file_name
stringlengths 3
137
| prefix
stringlengths 0
918k
| suffix
stringlengths 0
962k
| middle
stringlengths 0
812k
|
---|---|---|---|
register.go | package api
import (
"k8s.io/kubernetes/pkg/api/unversioned"
"k8s.io/kubernetes/pkg/runtime"
)
const GroupName = ""
const FutureGroupName = "oauth.openshift.io"
// SchemeGroupVersion is group version used to register these objects
var SchemeGroupVersion = unversioned.GroupVersion{Group: GroupName, Version: runtime.APIVersionInternal}
// Kind takes an unqualified kind and returns back a Group qualified GroupKind
func Kind(kind string) unversioned.GroupKind {
return SchemeGroupVersion.WithKind(kind).GroupKind()
}
// Resource takes an unqualified resource and returns back a Group qualified GroupResource
func Resource(resource string) unversioned.GroupResource {
return SchemeGroupVersion.WithResource(resource).GroupResource()
}
func AddToScheme(scheme *runtime.Scheme) {
// Add the API to Scheme.
addKnownTypes(scheme)
}
// Adds the list of known types to api.Scheme.
func addKnownTypes(scheme *runtime.Scheme) {
scheme.AddKnownTypes(SchemeGroupVersion,
&OAuthAccessToken{},
&OAuthAccessTokenList{},
&OAuthAuthorizeToken{},
&OAuthAuthorizeTokenList{},
&OAuthClient{},
&OAuthClientList{},
&OAuthClientAuthorization{},
&OAuthClientAuthorizationList{},
)
}
func (obj *OAuthClientAuthorizationList) GetObjectKind() unversioned.ObjectKind { return &obj.TypeMeta } | func (obj *OAuthAuthorizeTokenList) GetObjectKind() unversioned.ObjectKind { return &obj.TypeMeta }
func (obj *OAuthAuthorizeToken) GetObjectKind() unversioned.ObjectKind { return &obj.TypeMeta }
func (obj *OAuthAccessTokenList) GetObjectKind() unversioned.ObjectKind { return &obj.TypeMeta }
func (obj *OAuthAccessToken) GetObjectKind() unversioned.ObjectKind { return &obj.TypeMeta } | func (obj *OAuthClientAuthorization) GetObjectKind() unversioned.ObjectKind { return &obj.TypeMeta }
func (obj *OAuthClientList) GetObjectKind() unversioned.ObjectKind { return &obj.TypeMeta }
func (obj *OAuthClient) GetObjectKind() unversioned.ObjectKind { return &obj.TypeMeta } |
loading.ts | import { Observable, startWith, distinctUntilChanged, shareReplay, map, OperatorFunction, first } from 'rxjs';
/**
* Operator that returns true until the first item is emitted. Then returns false.
*
* @returns
*/
export function isLoading<T>(): OperatorFunction<T, boolean> {
return (source: Observable<T>) => {
return source.pipe(
first(),
map(() => false),
startWith(true),
distinctUntilChanged(),
shareReplay(1)
); | };
} |
|
create_enrollment.go | // Code generated by go-swagger; DO NOT EDIT.
//
// Copyright NetFoundry Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// __ __ _
// \ \ / / (_)
// \ \ /\ / /_ _ _ __ _ __ _ _ __ __ _
// \ \/ \/ / _` | '__| '_ \| | '_ \ / _` |
// \ /\ / (_| | | | | | | | | | | (_| | : This file is generated, do not edit it.
// \/ \/ \__,_|_| |_| |_|_|_| |_|\__, |
// __/ |
// |___/
package enrollment
// This file was generated by the swagger tool.
// Editing this file might prove futile when you re-run the generate command
import (
"net/http"
"github.com/go-openapi/runtime/middleware"
)
// CreateEnrollmentHandlerFunc turns a function with the right signature into a create enrollment handler
type CreateEnrollmentHandlerFunc func(CreateEnrollmentParams, interface{}) middleware.Responder
// Handle executing the request and returning a response
func (fn CreateEnrollmentHandlerFunc) Handle(params CreateEnrollmentParams, principal interface{}) middleware.Responder {
return fn(params, principal)
}
// CreateEnrollmentHandler interface for that can handle valid create enrollment params
type CreateEnrollmentHandler interface {
Handle(CreateEnrollmentParams, interface{}) middleware.Responder
}
// NewCreateEnrollment creates a new http.Handler for the create enrollment operation
func NewCreateEnrollment(ctx *middleware.Context, handler CreateEnrollmentHandler) *CreateEnrollment {
return &CreateEnrollment{Context: ctx, Handler: handler}
}
/* CreateEnrollment swagger:route POST /enrollments Enrollment createEnrollment
Create an outstanding enrollment for an identity
Creates a new OTT, OTTCA, or UPDB enrollment for a specific identity. If an enrollment of the same type is already outstanding the request will fail with a 409 conflict. If desired, an existing enrollment can be refreshed by `enrollments/:id/refresh` or deleted.
*/
type CreateEnrollment struct {
Context *middleware.Context
Handler CreateEnrollmentHandler
}
func (o *CreateEnrollment) ServeHTTP(rw http.ResponseWriter, r *http.Request) {
route, rCtx, _ := o.Context.RouteInfo(r)
if rCtx != nil {
*r = *rCtx
}
var Params = NewCreateEnrollmentParams()
uprinc, aCtx, err := o.Context.Authorize(r, route)
if err != nil {
o.Context.Respond(rw, r, route.Produces, route, err)
return
}
if aCtx != nil {
*r = *aCtx
}
var principal interface{}
if uprinc != nil {
principal = uprinc.(interface{}) // this is really a interface{}, I promise
}
if err := o.Context.BindValidRequest(r, route, &Params); err != nil |
res := o.Handler.Handle(Params, principal) // actually handle the request
o.Context.Respond(rw, r, route.Produces, route, res)
}
| { // bind params
o.Context.Respond(rw, r, route.Produces, route, err)
return
} |
mid5.rs | #[doc = "Reader of register MID5"]
pub type R = crate::R<u32, super::MID5>;
#[doc = "Writer for register MID5"]
pub type W = crate::W<u32, super::MID5>;
#[doc = "Register MID5 `reset()`'s with value 0"]
impl crate::ResetValue for super::MID5 {
#[inline(always)]
fn reset_value() -> Self::Ux {
0
}
}
#[doc = "Reader of field `MIDvB`"]
pub type MIDVB_R = crate::R<u32, u32>;
#[doc = "Write proxy for field `MIDvB`"]
pub struct MIDVB_W<'a> {
w: &'a mut W,
}
impl<'a> MIDVB_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn | (self, value: u32) -> &'a mut W {
self.w.bits = (self.w.bits & !0x0003_ffff) | ((value as u32) & 0x0003_ffff);
self.w
}
}
#[doc = "Reader of field `MIDvA`"]
pub type MIDVA_R = crate::R<u16, u16>;
#[doc = "Write proxy for field `MIDvA`"]
pub struct MIDVA_W<'a> {
w: &'a mut W,
}
impl<'a> MIDVA_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u16) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x07ff << 18)) | (((value as u32) & 0x07ff) << 18);
self.w
}
}
#[doc = "Reader of field `MIDE`"]
pub type MIDE_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `MIDE`"]
pub struct MIDE_W<'a> {
w: &'a mut W,
}
impl<'a> MIDE_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 29)) | (((value as u32) & 0x01) << 29);
self.w
}
}
impl R {
#[doc = "Bits 0:17 - Complementary bits for identifier in extended frame mode"]
#[inline(always)]
pub fn midv_b(&self) -> MIDVB_R {
MIDVB_R::new((self.bits & 0x0003_ffff) as u32)
}
#[doc = "Bits 18:28 - Identifier for standard frame mode"]
#[inline(always)]
pub fn midv_a(&self) -> MIDVA_R {
MIDVA_R::new(((self.bits >> 18) & 0x07ff) as u16)
}
#[doc = "Bit 29 - Identifier Version"]
#[inline(always)]
pub fn mide(&self) -> MIDE_R {
MIDE_R::new(((self.bits >> 29) & 0x01) != 0)
}
}
impl W {
#[doc = "Bits 0:17 - Complementary bits for identifier in extended frame mode"]
#[inline(always)]
pub fn midv_b(&mut self) -> MIDVB_W {
MIDVB_W { w: self }
}
#[doc = "Bits 18:28 - Identifier for standard frame mode"]
#[inline(always)]
pub fn midv_a(&mut self) -> MIDVA_W {
MIDVA_W { w: self }
}
#[doc = "Bit 29 - Identifier Version"]
#[inline(always)]
pub fn mide(&mut self) -> MIDE_W {
MIDE_W { w: self }
}
}
| bits |
presenter_test.go | package json
import (
"bytes"
"flag"
"testing"
"github.com/anchore/stereoscope/pkg/filetree"
"github.com/anchore/go-testutils"
"github.com/anchore/stereoscope/pkg/imagetest"
"github.com/anchore/syft/syft/distro"
"github.com/anchore/syft/syft/pkg"
"github.com/anchore/syft/syft/source"
"github.com/sergi/go-diff/diffmatchpatch"
)
var update = flag.Bool("update", false, "update the *.golden files for json presenters")
func must(c pkg.CPE, e error) pkg.CPE {
if e != nil {
panic(e)
}
return c
}
func TestJsonDirsPresenter(t *testing.T) {
var buffer bytes.Buffer
catalog := pkg.NewCatalog()
// populate catalog with test data
catalog.Add(pkg.Package{
ID: "package-1-id",
Name: "package-1",
Version: "1.0.1",
Type: pkg.PythonPkg,
FoundBy: "the-cataloger-1",
Locations: []source.Location{
{RealPath: "/some/path/pkg1"},
},
Language: pkg.Python,
MetadataType: pkg.PythonPackageMetadataType,
Licenses: []string{"MIT"},
Metadata: pkg.PythonPackageMetadata{
Name: "package-1",
Version: "1.0.1",
},
PURL: "a-purl-2",
CPEs: []pkg.CPE{
must(pkg.NewCPE("cpe:2.3:*:some:package:2:*:*:*:*:*:*:*")),
},
})
catalog.Add(pkg.Package{
ID: "package-2-id",
Name: "package-2",
Version: "2.0.1",
Type: pkg.DebPkg,
FoundBy: "the-cataloger-2",
Locations: []source.Location{
{RealPath: "/some/path/pkg1"},
},
MetadataType: pkg.DpkgMetadataType,
Metadata: pkg.DpkgMetadata{
Package: "package-2",
Version: "2.0.1",
},
PURL: "a-purl-2",
CPEs: []pkg.CPE{
must(pkg.NewCPE("cpe:2.3:*:some:package:2:*:*:*:*:*:*:*")),
},
})
var d *distro.Distro
s, err := source.NewFromDirectory("/some/path")
if err != nil {
t.Fatal(err)
}
pres := NewPresenter(catalog, s.Metadata, d)
// run presenter
err = pres.Present(&buffer)
if err != nil {
t.Fatal(err)
}
actual := buffer.Bytes()
if *update {
testutils.UpdateGoldenFileContents(t, actual)
}
var expected = testutils.GetGoldenFileContents(t)
if !bytes.Equal(expected, actual) {
dmp := diffmatchpatch.New()
diffs := dmp.DiffMain(string(expected), string(actual), true)
t.Errorf("mismatched output:\n%s", dmp.DiffPrettyText(diffs))
}
}
func TestJsonImgsPresenter(t *testing.T) {
var buffer bytes.Buffer
testImage := "image-simple"
if *update |
catalog := pkg.NewCatalog()
img := imagetest.GetGoldenFixtureImage(t, testImage)
_, ref1, _ := img.SquashedTree().File("/somefile-1.txt", filetree.FollowBasenameLinks)
_, ref2, _ := img.SquashedTree().File("/somefile-2.txt", filetree.FollowBasenameLinks)
// populate catalog with test data
catalog.Add(pkg.Package{
ID: "package-1-id",
Name: "package-1",
Version: "1.0.1",
Locations: []source.Location{
source.NewLocationFromImage(string(ref1.RealPath), *ref1, img),
},
Type: pkg.PythonPkg,
FoundBy: "the-cataloger-1",
Language: pkg.Python,
MetadataType: pkg.PythonPackageMetadataType,
Licenses: []string{"MIT"},
Metadata: pkg.PythonPackageMetadata{
Name: "package-1",
Version: "1.0.1",
},
PURL: "a-purl-1",
CPEs: []pkg.CPE{
must(pkg.NewCPE("cpe:2.3:*:some:package:1:*:*:*:*:*:*:*")),
},
})
catalog.Add(pkg.Package{
ID: "package-2-id",
Name: "package-2",
Version: "2.0.1",
Locations: []source.Location{
source.NewLocationFromImage(string(ref2.RealPath), *ref2, img),
},
Type: pkg.DebPkg,
FoundBy: "the-cataloger-2",
MetadataType: pkg.DpkgMetadataType,
Metadata: pkg.DpkgMetadata{
Package: "package-2",
Version: "2.0.1",
},
PURL: "a-purl-2",
CPEs: []pkg.CPE{
must(pkg.NewCPE("cpe:2.3:*:some:package:2:*:*:*:*:*:*:*")),
},
})
// this is a hard coded value that is not given by the fixture helper and must be provided manually
img.Metadata.ManifestDigest = "sha256:2731251dc34951c0e50fcc643b4c5f74922dad1a5d98f302b504cf46cd5d9368"
s, err := source.NewFromImage(img, source.SquashedScope, "user-image-input")
var d *distro.Distro
pres := NewPresenter(catalog, s.Metadata, d)
// run presenter
err = pres.Present(&buffer)
if err != nil {
t.Fatal(err)
}
actual := buffer.Bytes()
if *update {
testutils.UpdateGoldenFileContents(t, actual)
}
var expected = testutils.GetGoldenFileContents(t)
if !bytes.Equal(expected, actual) {
dmp := diffmatchpatch.New()
diffs := dmp.DiffMain(string(expected), string(actual), true)
t.Errorf("mismatched output:\n%s", dmp.DiffPrettyText(diffs))
}
}
| {
imagetest.UpdateGoldenFixtureImage(t, testImage)
} |
__init__.py | import os, inspect
from lib import navpy
from util import transformations as tr
from util import SRTM, common, file_tools, mavlink_meta
from shapely.geometry import LineString
from shapely import affinity
# Samuel Dudley
# September 2018
# Mission planning tool for mavlink enabled vehicles
# Setup logging
# generic mission object
class BaseMission(object):
def __init__(self, missionID, takeoffAlt, takoffLoiterTime, outputDir = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'missions'), logName = 'mission'):
# setup logger
self.logger = common.setupLogger(logName)
# set ouput directory
self.outputDir = outputDir
# TODO: Check to make sure the dir exists
self.latRef = None
self.lonRef = None
self.altRef = None
self.frame = 'ned'
self.points = []
# setup altitude types
self.altitudeTypes = {'relative':3, 'terrain':10}
self.availableFrames = ['ned', 'lla']
self.missionID = missionID | self.takeoffAlt = takeoffAlt
self.takoffLoiterTime = takoffLoiterTime
self.filePath = os.path.join(self.outputDir, self.missionID+'.txt')
self.fid = None
self.missionLine = 0
self.autoContinue = 1
self.mavlinkEnums = mavlink_meta.getMavlinkEnums()
def writeWaypointFile(self, actions):
file_tools.makePath(self.outputDir)
with open(self.filePath, 'w+') as self.fid:
for action in actions:
if inspect.ismethod(action):
action()
else:
self.writeGenericAction(action)
def writeWaypointLine(self, line, newLine = True):
if newLine:
line +="\n"
if not self.fid.closed:
self.fid.write(line)
self.missionLine += 1
else:
# The waypoint file is closed
self.logger.error('Failed to write to waypoint file')
def writeHomeLLA(self):
line = "{0} 0 0 {1} 0.0 0.0 0.0 0.0 {2} {3} {4} {5}".format(self.missionLine, self.mavlinkEnums['MAV_CMD']['NAV_WAYPOINT']['value'],
self.latRef, self.lonRef, self.altRef,
self.autoContinue)
self.writeWaypointLine(line)
def writeTakeoffLLA(self):
line = "{0} 0 {1} {2} 0.0 0.0 0.0 0.0 {3} {4} {5} {6}".format(self.missionLine, self.mavlinkEnums['MAV_FRAME']['GLOBAL_RELATIVE_ALT']['value'],
self.mavlinkEnums['MAV_CMD']['NAV_TAKEOFF']['value'],
self.latRef, self.lonRef, self.takeoffAlt,
self.autoContinue)
self.writeWaypointLine(line)
def writeLoiterTime(self):
line = "{0} 0 {1} {2} {3} 0.0 0.0 0.0 {4} {5} {6} {7}".format(self.missionLine, self.mavlinkEnums['MAV_FRAME']['GLOBAL_RELATIVE_ALT']['value'],
self.mavlinkEnums['MAV_CMD']['NAV_LOITER_TIME']['value'],
self.takoffLoiterTime,
self.latRef, self.lonRef, self.takeoffAlt,
self.autoContinue)
self.writeWaypointLine(line)
def writeReturnToLaunch(self):
line = "{0} 0 {1} {2} 0.0 0.0 0.0 0.0 0.0 0.0 0.0 {3}".format(self.missionLine, self.mavlinkEnums['MAV_FRAME']['GLOBAL_RELATIVE_ALT']['value'],
self.mavlinkEnums['MAV_CMD']['NAV_RETURN_TO_LAUNCH']['value'],
self.autoContinue)
self.writeWaypointLine(line)
def writeWaypointLLA(self, lla):
line = "{0} 0 {1} {2} 0.0 0.0 0.0 0.0 {3} {4} {5} {6}".format(self.missionLine, self.mavlinkEnums['MAV_FRAME']['GLOBAL_RELATIVE_ALT']['value'],
self.mavlinkEnums['MAV_CMD']['NAV_WAYPOINT']['value'],
lla[0], lla[1], lla[2],
self.autoContinue)
self.writeWaypointLine(line)
def writePreamble(self):
self.writeWaypointLine("QGC WPL 110")
self.missionLine = 0
def writeGenericAction(self, action):
line = "{0} {1}".format(self.missionLine, action)
self.writeWaypointLine(line)
def checkFrame(self):
if self.frame.lower() in self.availableFrames:
return True
else:
return False
def setReferenceLLA(self, LLA=[]):
# TODO: check LLA length
self.latRef = LLA[0]
self.lonRef = LLA[1]
self.altRef = LLA[2]
sss = SRTM.NEDGround(lla_ref = LLA , width_m = 10000 , height_m = 10000 , step_m = 30, logger = self.logger)
def setReferenceLatitude(self, lat):
self.latRef = lat
def setReferenceLongitude(self, lon):
self.lonRef = lon
def setReferenceAltitude(self, alt):
self.altRef = alt
def getPointsNED(self, lla):
ned = navpy.lla2ned(lla[0], lla[1], lla[2], lat_ref = self.latRef, lon_ref = self.lonRef, alt_ref = self.altRef)
return list(ned)
def getPointsLLA(self, ned):
lla = navpy.ned2lla(ned, lat_ref = self.latRef, lon_ref= self.lonRef , alt_ref = self.altRef)
return list(lla)
class GridMission(BaseMission):
def __init__(self, missionID, takeoffAlt = 10, takoffLoiterTime = 5, append = False):
super(GridMission, self).__init__(missionID, takeoffAlt, takoffLoiterTime)
self.logger.debug(missionID)
def generateGrid(self, out = 100, right = 50, yaw = 45, alt = 25):
# TODO: dynamically calculate lane width from sensor FoV and alt
laneWidth = 10
points = []
if right < 0:
laneWidth = -laneWidth
for k in range(0, 50, 2):
points.append((0, k*laneWidth))
points.append((out, k*laneWidth))
points.append((out, (k+1)*laneWidth))
points.append((0, (k+1)*laneWidth))
if abs(laneWidth*(k+1)) > abs(right) :
break;
line = LineString(points)
# line = affinity.rotate(line, angle=yaw, origin=list(line.coords)[0], use_radians=False)
llas = [self.getPointsLLA([point[0], point[1], 0]) for point in list(line.coords)]
for lla in llas:
self.writeWaypointLLA([lla[0], lla[1], alt])
if __name__ == '__main__':
mission = GridMission('grid_test')
mission.setReferenceLLA([-35.3615074158, 149.163650513, 500])
actions = [mission.writePreamble,
mission.writeHomeLLA,
mission.writeTakeoffLLA,
mission.writeLoiterTime,
mission.generateGrid,
mission.writeReturnToLaunch]
mission.writeWaypointFile(actions) | |
Principal.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.constant.ParamConstants import *
class Principal(object):
def __init__(self):
self._cert_no = None
self._cert_type = None
self._signer_type = None
self._user_name = None
self._verify_type = None
@property
def cert_no(self):
return self._cert_no
@cert_no.setter
def cert_no(self, value):
self._cert_no = value
@property
def cert_type(self):
return self._cert_type
@cert_type.setter
def cert_type(self, value):
self._cert_type = value
@property
def signer_type(self):
return self._signer_type
@signer_type.setter
def signer_type(self, value):
self._signer_type = value
@property
def user_name(self):
return self._user_name
@user_name.setter
def user_name(self, value):
self._user_name = value
@property
def verify_type(self):
return self._verify_type
@verify_type.setter
def verify_type(self, value):
self._verify_type = value
| params['cert_no'] = self.cert_no.to_alipay_dict()
else:
params['cert_no'] = self.cert_no
if self.cert_type:
if hasattr(self.cert_type, 'to_alipay_dict'):
params['cert_type'] = self.cert_type.to_alipay_dict()
else:
params['cert_type'] = self.cert_type
if self.signer_type:
if hasattr(self.signer_type, 'to_alipay_dict'):
params['signer_type'] = self.signer_type.to_alipay_dict()
else:
params['signer_type'] = self.signer_type
if self.user_name:
if hasattr(self.user_name, 'to_alipay_dict'):
params['user_name'] = self.user_name.to_alipay_dict()
else:
params['user_name'] = self.user_name
if self.verify_type:
if hasattr(self.verify_type, 'to_alipay_dict'):
params['verify_type'] = self.verify_type.to_alipay_dict()
else:
params['verify_type'] = self.verify_type
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = Principal()
if 'cert_no' in d:
o.cert_no = d['cert_no']
if 'cert_type' in d:
o.cert_type = d['cert_type']
if 'signer_type' in d:
o.signer_type = d['signer_type']
if 'user_name' in d:
o.user_name = d['user_name']
if 'verify_type' in d:
o.verify_type = d['verify_type']
return o | def to_alipay_dict(self):
params = dict()
if self.cert_no:
if hasattr(self.cert_no, 'to_alipay_dict'): |
violent.py | import cv2
import numpy as np
from VideoProcess import PreProcess
from OpticalFlow import OptFlow
import math
class VioFlow:
def __init__(self,video_name):
self.height = 0
self.width = 0
self.B_height = 0
self.B_width = 0
self.bins = np.arange(0.0,1.05,0.05,dtype=np.float64)
self.video_name = video_name
def getViolentFlow(self):
vid = PreProcess()
vid.read_video(self.video_name)
flow = OptFlow()
vid.setVideoDimension(100)
index = 0
temp_flows = []
for each_frame_index in range(3,vid.total_frames - vid.FRAME_GAP - 5,vid.FRAME_GAP):
PREV_F = vid.getFrameFromIndex(each_frame_index)
CURRENT_F = vid.getFrameFromIndex(each_frame_index + vid.MOVEMENT_INTERVAL)
NEXT_F = vid.getFrameFromIndex(each_frame_index + (2 * vid.MOVEMENT_INTERVAL))
PREV_F = vid.resize_frame(PREV_F)
CURRENT_F = vid.resize_frame(CURRENT_F)
NEXT_F = vid.resize_frame(NEXT_F)
(vx1,vy1,w1) = flow.sorFlow(PREV_F,CURRENT_F)
(vx2,vy2,w2) = flow.sorFlow(CURRENT_F,NEXT_F)
m1 = flow.getFlowMagnitude(vx1,vy1)
index = index + 1
m2 = flow.getFlowMagnitude(vx2,vy2)
change_mag = abs(m2-m1)
binary_mag = np.ones(change_mag.shape,dtype=np.float64)
threshold = np.mean(change_mag , dtype=np.float64)
temp_flows.append(np.where(change_mag < threshold,0,binary_mag))
flow_video = np.zeros(change_mag.shape,dtype=np.float64)
for each_flow in temp_flows:
flow_video = flow_video + each_flow
flow_video = flow_video / index
self.height = flow_video.shape[0]
self.width = flow_video.shape[1]
self.B_height = int(math.floor((self.height - 11)/4))
self.B_width = int(math.floor((self.width - 11)/4))
return flow_video
def | (self,X, bins):
map_to_bins = np.digitize(X,bins)
r = np.zeros(bins.shape,dtype=np.float64)
for i in map_to_bins:
r[i-1] += 1
return r
def getBlockHist(self,flow_video):
flow_vec = np.reshape(flow_video,(flow_video.shape[0]*flow_video.shape[1],1))
count_of_bins = self.histc(flow_vec,self.bins)
return count_of_bins/np.sum(count_of_bins)
def getFeatureVector(self):
frame_hist = []
flow_video = self.getViolentFlow()
for y in range(6,self.height-self.B_height-4,self.B_height):
for x in range(6,self.width-self.B_width-4,self.B_width):
block_hist = self.getBlockHist(flow_video[y:y+self.B_height,x:x+self.B_width])
frame_hist = np.append(frame_hist,block_hist,axis = 0)
return frame_hist
def writeFeatureToFile(self,filename):
np.savetxt(filename, self.getFeatureVector(), delimiter=',')
| histc |
main_test.go | package main
import (
"bytes"
"context"
"errors"
"fmt"
"io/ioutil"
"os"
"path/filepath"
"reflect"
"sort"
"strings"
"testing"
"time"
"github.com/google/go-cmp/cmp"
coreapi "k8s.io/api/core/v1"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/runtime"
"k8s.io/apimachinery/pkg/util/sets"
"k8s.io/client-go/kubernetes/fake"
coreclientset "k8s.io/client-go/kubernetes/typed/core/v1"
"k8s.io/client-go/rest"
"github.com/openshift/ci-tools/pkg/api/secretbootstrap"
"github.com/openshift/ci-tools/pkg/bitwarden"
"github.com/openshift/ci-tools/pkg/secrets"
"github.com/openshift/ci-tools/pkg/testhelper"
"github.com/openshift/ci-tools/pkg/vaultclient"
)
func TestParseOptions(t *testing.T) {
testCases := []struct {
name string
given []string
expected options
}{
{
name: "basic case",
given: []string{"cmd", "--dry-run=false", "--config=/tmp/config"},
expected: options{
configPath: "/tmp/config",
},
},
{
name: "with kubeconfig",
given: []string{"cmd", "--dry-run=false", "--config=/tmp/config", "--kubeconfig=/tmp/kubeconfig"},
expected: options{
configPath: "/tmp/config",
kubeConfigPath: "/tmp/kubeconfig",
},
},
}
censor := secrets.NewDynamicCensor()
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
oldArgs := os.Args
defer func() { os.Args = oldArgs }()
os.Args = tc.given
actual, err := parseOptions(&censor)
if err != nil {
t.Fatal(err)
}
if actual.dryRun != tc.expected.dryRun {
t.Errorf("%q: (dryRun) actual differs from expected:\n%s", tc.name, cmp.Diff(actual.dryRun, tc.expected.dryRun))
}
if actual.kubeConfigPath != tc.expected.kubeConfigPath {
t.Errorf("%q: (kubeConfigPath) actual differs from expected:\n%s", tc.name, cmp.Diff(actual.kubeConfigPath, tc.expected.kubeConfigPath))
}
})
}
}
func TestValidateOptions(t *testing.T) {
testCases := []struct {
name string
given options
expected error
}{
{
name: "basic case",
given: options{
logLevel: "info",
configPath: "/tmp/config",
secrets: secrets.CLIOptions{
BwUser: "username",
BwPasswordPath: "/tmp/bw-password",
},
},
},
{
name: "empty bw user",
given: options{
logLevel: "info",
configPath: "/tmp/config",
secrets: secrets.CLIOptions{
BwPasswordPath: "/tmp/bw-password",
},
},
expected: fmt.Errorf("[--bw-user and --bw-password-path must be specified together, must specify credentials for exactly one of vault or bitwarden, got credentials for: []]"),
},
{
name: "empty bw user password path",
given: options{
logLevel: "info",
configPath: "/tmp/config",
secrets: secrets.CLIOptions{
BwUser: "username",
},
},
expected: fmt.Errorf("[--bw-user and --bw-password-path must be specified together, must specify credentials for exactly one of vault or bitwarden, got credentials for: []]"),
},
{
name: "empty config path",
given: options{
logLevel: "info",
secrets: secrets.CLIOptions{
BwUser: "username",
BwPasswordPath: "/tmp/bw-password",
},
},
expected: fmt.Errorf("--config is required"),
},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
actual := tc.given.validateOptions()
equalError(t, tc.expected, actual)
})
}
}
const (
configContent = `---
secret_configs:
- from:
key-name-1:
bw_item: item-name-1
field: field-name-1
key-name-2:
bw_item: item-name-1
field: field-name-2
key-name-3:
bw_item: item-name-1
attachment: attachment-name-1
key-name-4:
bw_item: item-name-2
field: field-name-1
key-name-5:
bw_item: item-name-2
attachment: attachment-name-1
key-name-6:
bw_item: item-name-3
attachment: attachment-name-2
key-name-7:
bw_item: item-name-3
attribute: password
to:
- cluster: default
namespace: namespace-1
name: prod-secret-1
- cluster: build01
namespace: namespace-2
name: prod-secret-2
- from:
.dockerconfigjson:
bw_item: quay.io
field: Pull Credentials
to:
- cluster: default
namespace: ci
name: ci-pull-credentials
type: kubernetes.io/dockerconfigjson
`
configContentWithTypo = `---
secret_configs:
- from:
key-name-1:
bw_item: item-name-1
field: field-name-1
key-name-2:
bw_item: item-name-1
field: field-name-2
key-name-3:
bw_item: item-name-1
attachment: attachment-name-1
key-name-4:
bw_item: item-name-2
field: field-name-1
key-name-5:
bw_item: item-name-2
attachment: attachment-name-1
key-name-6:
bw_item: item-name-3
attachment: attachment-name-2
to:
- cluster: default
namespace: namespace-1
name: prod-secret-1
- cluster: bla
namespace: namespace-2
name: prod-secret-2
`
configContentWithNonPasswordAttribute = `---
secret_configs:
- from:
key-name-1:
bw_item: item-name-1
field: field-name-1
key-name-2:
bw_item: item-name-1
attribute: not-password
to:
- cluster: default
namespace: namespace-1
name: prod-secret-1
- cluster: build01
namespace: namespace-2
name: prod-secret-2
`
configWithGroups = `
cluster_groups:
group-a:
- default
secret_configs:
- from:
key-name-1:
bw_item: item-name-1
field: field-name-1
to:
- cluster_groups:
- group-a
namespace: ns
name: name
`
kubeConfigContent = `---
apiVersion: v1
clusters:
- cluster:
server: https://api.ci.openshift.org:443
name: api-ci-openshift-org:443
- cluster:
server: https://api.build01.ci.devcluster.openshift.com:6443
name: api-build01-ci-devcluster-openshift-com:6443
contexts:
- context:
cluster: api-build01-ci-devcluster-openshift-com:6443
namespace: ci
user: system:serviceaccount:ci:tool/api-build01-ci-devcluster-openshift-com:6443
name: build01
- context:
cluster: api-ci-openshift-org:443
namespace: ci
user: system:serviceaccount:ci:tool/api-ci-openshift-org:443
name: default
current-context: default
kind: Config
preferences: {}
users:
- name: system:serviceaccount:ci:tool/api-ci-openshift-org:443
user:
token: token1
- name: system:serviceaccount:ci:tool/api-build01-ci-devcluster-openshift-com:6443
user:
token: token2
`
)
var (
configDefault = rest.Config{
Host: "https://api.ci.openshift.org:443",
BearerToken: "token1",
}
configBuild01 = rest.Config{
Host: "https://api.build01.ci.devcluster.openshift.com:6443",
BearerToken: "token2",
}
defaultConfig = secretbootstrap.Config{
Secrets: []secretbootstrap.SecretConfig{
{
From: map[string]secretbootstrap.BitWardenContext{
"key-name-1": {
BWItem: "item-name-1",
Field: "field-name-1",
},
"key-name-2": {
BWItem: "item-name-1",
Field: "field-name-2",
},
"key-name-3": {
BWItem: "item-name-1",
Attachment: "attachment-name-1",
},
"key-name-4": {
BWItem: "item-name-2",
Field: "field-name-1",
},
"key-name-5": {
BWItem: "item-name-2",
Attachment: "attachment-name-1",
},
"key-name-6": {
BWItem: "item-name-3",
Attachment: "attachment-name-2",
},
"key-name-7": {
BWItem: "item-name-3",
Attribute: "password",
},
},
To: []secretbootstrap.SecretContext{
{
Cluster: "default",
Namespace: "namespace-1",
Name: "prod-secret-1",
},
{
Cluster: "build01",
Namespace: "namespace-2",
Name: "prod-secret-2",
},
},
},
{
From: map[string]secretbootstrap.BitWardenContext{
".dockerconfigjson": {
BWItem: "quay.io",
Field: "Pull Credentials",
},
},
To: []secretbootstrap.SecretContext{
{
Cluster: "default",
Namespace: "ci",
Name: "ci-pull-credentials",
Type: "kubernetes.io/dockerconfigjson",
},
},
},
},
}
defaultConfigWithoutDefaultCluster = secretbootstrap.Config{
Secrets: []secretbootstrap.SecretConfig{
{
From: map[string]secretbootstrap.BitWardenContext{
"key-name-1": {
BWItem: "item-name-1",
Field: "field-name-1",
},
"key-name-2": {
BWItem: "item-name-1",
Field: "field-name-2",
},
"key-name-3": {
BWItem: "item-name-1",
Attachment: "attachment-name-1",
},
"key-name-4": {
BWItem: "item-name-2",
Field: "field-name-1",
},
"key-name-5": {
BWItem: "item-name-2",
Attachment: "attachment-name-1",
},
"key-name-6": {
BWItem: "item-name-3",
Attachment: "attachment-name-2",
},
"key-name-7": {
BWItem: "item-name-3",
Attribute: "password",
},
},
To: []secretbootstrap.SecretContext{
{
Cluster: "build01",
Namespace: "namespace-2",
Name: "prod-secret-2",
},
},
},
},
}
)
func TestCompleteOptions(t *testing.T) {
dir, err := ioutil.TempDir("", "test")
if err != nil {
t.Errorf("Failed to create temp dir")
}
defer func() {
if err := os.RemoveAll(dir); err != nil {
t.Errorf("Failed to remove temp dir")
}
}()
bwPasswordPath := filepath.Join(dir, "bwPasswordPath")
configPath := filepath.Join(dir, "configPath")
kubeConfigPath := filepath.Join(dir, "kubeConfigPath")
configWithTypoPath := filepath.Join(dir, "configWithTypoPath")
configWithGroupsPath := filepath.Join(dir, "configWithGroups")
configWithNonPasswordAttributePath := filepath.Join(dir, "configContentWithNonPasswordAttribute")
fileMap := map[string][]byte{
bwPasswordPath: []byte("topSecret"),
configPath: []byte(configContent),
kubeConfigPath: []byte(kubeConfigContent),
configWithTypoPath: []byte(configContentWithTypo),
configWithGroupsPath: []byte(configWithGroups),
configWithNonPasswordAttributePath: []byte(configContentWithNonPasswordAttribute),
}
for k, v := range fileMap {
if err := ioutil.WriteFile(k, v, 0755); err != nil {
t.Errorf("Failed to remove temp dir")
}
}
testCases := []struct {
name string
given options
expectedError error
expectedBWPassword string
expectedConfig secretbootstrap.Config
expectedClusters []string
}{
{
name: "basic case",
given: options{
logLevel: "info",
configPath: configPath,
kubeConfigPath: kubeConfigPath,
},
expectedBWPassword: "topSecret",
expectedConfig: defaultConfig,
expectedClusters: []string{"build01", "default"},
},
{
name: "missing context in kubeconfig",
given: options{
logLevel: "info",
configPath: configWithTypoPath,
kubeConfigPath: kubeConfigPath,
},
expectedConfig: defaultConfig,
expectedError: fmt.Errorf("config[0].to[1]: failed to find cluster context \"bla\" in the kubeconfig"),
},
{
name: "only configured cluster is used",
given: options{
logLevel: "info",
configPath: configPath,
kubeConfigPath: kubeConfigPath,
cluster: "build01",
},
expectedBWPassword: "topSecret",
expectedConfig: defaultConfigWithoutDefaultCluster,
expectedClusters: []string{"build01"},
},
{
name: "attribute is not password",
given: options{
logLevel: "info",
configPath: configWithNonPasswordAttributePath,
kubeConfigPath: kubeConfigPath,
},
expectedConfig: defaultConfig,
expectedError: fmt.Errorf("failed to validate the config: config[0].from[key-name-2].attribute: only the 'password' is supported, not not-password"),
},
{
name: "group is resolved",
given: options{
logLevel: "info",
configPath: configWithGroupsPath,
kubeConfigPath: kubeConfigPath,
},
expectedBWPassword: "topSecret",
expectedConfig: secretbootstrap.Config{
Secrets: []secretbootstrap.SecretConfig{{
From: map[string]secretbootstrap.BitWardenContext{"key-name-1": {BWItem: "item-name-1", Field: "field-name-1"}},
To: []secretbootstrap.SecretContext{{Cluster: "default", Namespace: "ns", Name: "name"}},
}},
},
expectedClusters: []string{"default"},
},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
censor := secrets.NewDynamicCensor()
actualError := tc.given.completeOptions(&censor)
equalError(t, tc.expectedError, actualError)
if tc.expectedError == nil {
equal(t, "config", tc.expectedConfig, tc.given.config)
var actualClusters []string
for k := range tc.given.secretsGetters {
actualClusters = append(actualClusters, k)
}
sort.Strings(actualClusters)
equal(t, "clusters", tc.expectedClusters, actualClusters)
}
})
}
}
func TestValidateCompletedOptions(t *testing.T) {
testCases := []struct {
name string
given options
kubeConfigs map[string]rest.Config
expected error
}{
{
name: "basic case",
given: options{
logLevel: "info",
config: defaultConfig,
},
kubeConfigs: map[string]rest.Config{
"default": configDefault,
"build01": configBuild01,
},
},
{
name: "empty config",
given: options{},
expected: fmt.Errorf("no secrets found to sync"),
},
{
name: "empty config with cluster filter",
given: options{cluster: "cluster"},
expected: fmt.Errorf("no secrets found to sync for --cluster=cluster"),
},
{
name: "empty to",
given: options{
logLevel: "info",
config: secretbootstrap.Config{
Secrets: []secretbootstrap.SecretConfig{
{
From: map[string]secretbootstrap.BitWardenContext{
"key-name-1": {
BWItem: "item-name-1",
Field: "field-name-1",
},
},
},
},
},
},
expected: fmt.Errorf("config[0].to is empty"),
},
{
name: "empty from",
given: options{
logLevel: "info",
config: secretbootstrap.Config{
Secrets: []secretbootstrap.SecretConfig{{}},
},
},
expected: fmt.Errorf("config[0].from is empty"),
},
{
name: "empty key",
given: options{
logLevel: "info",
config: secretbootstrap.Config{
Secrets: []secretbootstrap.SecretConfig{
{
From: map[string]secretbootstrap.BitWardenContext{
"": {
BWItem: "item-name-1",
Field: "field-name-1",
},
},
To: []secretbootstrap.SecretContext{
{
Cluster: "default",
Namespace: "namespace-1",
Name: "prod-secret-1",
},
},
},
},
},
},
expected: fmt.Errorf("config[0].from: empty key is not allowed"),
},
{
name: "empty bw item",
given: options{
logLevel: "info",
config: secretbootstrap.Config{
Secrets: []secretbootstrap.SecretConfig{
{
From: map[string]secretbootstrap.BitWardenContext{
"key-name-1": {
Field: "field-name-1",
},
},
To: []secretbootstrap.SecretContext{
{
Cluster: "default",
Namespace: "namespace-1",
Name: "prod-secret-1",
},
},
},
},
},
},
expected: fmt.Errorf("config[0].from[key-name-1]: empty value is not allowed"),
},
{
name: "empty field and empty attachment",
given: options{
logLevel: "info",
config: secretbootstrap.Config{
Secrets: []secretbootstrap.SecretConfig{
{
From: map[string]secretbootstrap.BitWardenContext{
"key-name-1": {
BWItem: "item-name-1",
},
},
To: []secretbootstrap.SecretContext{
{
Cluster: "default",
Namespace: "namespace-1",
Name: "prod-secret-1",
},
},
},
},
},
},
expected: fmt.Errorf("config[0].from[key-name-1]: one of [field, attachment, attribute] must be set"),
},
{
name: "non-empty field and non-empty attachment",
given: options{
logLevel: "info",
config: secretbootstrap.Config{
Secrets: []secretbootstrap.SecretConfig{
{
From: map[string]secretbootstrap.BitWardenContext{
"key-name-1": {
BWItem: "item-name-1",
Field: "field-name-1",
Attachment: "attachment-name-1",
},
},
To: []secretbootstrap.SecretContext{
{
Cluster: "default",
Namespace: "namespace-1",
Name: "prod-secret-1",
},
},
},
},
},
},
expected: fmt.Errorf("config[0].from[key-name-1]: cannot use more than one in [field, attachment, attribute]"),
},
{
name: "empty cluster",
given: options{
logLevel: "info",
config: secretbootstrap.Config{
Secrets: []secretbootstrap.SecretConfig{
{
From: map[string]secretbootstrap.BitWardenContext{
"key-name-1": {
BWItem: "item-name-1",
Field: "field-name-1",
},
},
To: []secretbootstrap.SecretContext{
{
Namespace: "namespace-1",
Name: "prod-secret-1",
},
},
},
},
},
},
expected: fmt.Errorf("config[0].to[0].cluster: empty value is not allowed"),
},
{
name: "empty namespace",
given: options{
logLevel: "info",
config: secretbootstrap.Config{
Secrets: []secretbootstrap.SecretConfig{
{
From: map[string]secretbootstrap.BitWardenContext{
"key-name-1": {
BWItem: "item-name-1",
Attachment: "attachment-name-1",
},
},
To: []secretbootstrap.SecretContext{
{
Cluster: "default",
Name: "prod-secret-1",
},
},
},
},
},
},
expected: fmt.Errorf("config[0].to[0].namespace: empty value is not allowed"),
},
{
name: "empty name",
given: options{
logLevel: "info",
config: secretbootstrap.Config{
Secrets: []secretbootstrap.SecretConfig{
{
From: map[string]secretbootstrap.BitWardenContext{
"key-name-1": {
BWItem: "item-name-1",
Field: "field-name-1",
},
},
To: []secretbootstrap.SecretContext{
{
Cluster: "default",
Namespace: "namespace-1",
},
},
},
},
},
},
expected: fmt.Errorf("config[0].to[0].name: empty value is not allowed"),
},
{
name: "conflicting secrets in same TO",
given: options{
logLevel: "info",
config: secretbootstrap.Config{
Secrets: []secretbootstrap.SecretConfig{
{
From: map[string]secretbootstrap.BitWardenContext{
".dockerconfigjson": {
BWItem: "item-name-1",
Field: "field-name-1",
},
},
To: []secretbootstrap.SecretContext{
{
Cluster: "default",
Namespace: "namespace-1",
Name: "prod-secret-1",
Type: "kubernetes.io/dockerconfigjson",
},
{
Cluster: "build01",
Namespace: "namespace-1",
Name: "prod-secret-1",
},
{
Cluster: "default",
Namespace: "namespace-1",
Name: "prod-secret-1",
Type: "kubernetes.io/dockerconfigjson",
},
},
},
},
},
},
kubeConfigs: map[string]rest.Config{
"default": configDefault,
"build01": configBuild01,
},
expected: errors.New("config[0].to[2]: secret namespace-1/prod-secret-1 in cluster default listed more than once in the config"),
},
{
name: "conflicting secrets in different TOs",
given: options{
logLevel: "info",
config: secretbootstrap.Config{
Secrets: []secretbootstrap.SecretConfig{
{
From: map[string]secretbootstrap.BitWardenContext{
"key-name-1": {
BWItem: "item-name-1",
Field: "field-name-1",
},
},
To: []secretbootstrap.SecretContext{
{
Cluster: "build01",
Namespace: "namespace-1",
Name: "prod-secret-1",
},
{
Cluster: "default",
Namespace: "namespace-1",
Name: "prod-secret-1",
},
},
},
{
From: map[string]secretbootstrap.BitWardenContext{
"key-name-1": {
BWItem: "item-name-1",
Field: "field-name-1",
},
},
To: []secretbootstrap.SecretContext{
{
Cluster: "default",
Namespace: "namespace-1",
Name: "prod-secret-1",
},
{
Cluster: "build01",
Namespace: "namespace-1",
Name: "prod-secret-1",
},
},
},
},
},
},
kubeConfigs: map[string]rest.Config{
"default": configDefault,
"build01": configBuild01,
},
expected: errors.New("config[1].to[0]: secret namespace-1/prod-secret-1 in cluster default listed more than once in the config"),
},
{
name: "happy dockerconfigJSON configuration",
given: options{
logLevel: "info",
config: secretbootstrap.Config{
Secrets: []secretbootstrap.SecretConfig{
{
From: map[string]secretbootstrap.BitWardenContext{
".dockerconfigjson": {
DockerConfigJSONData: []secretbootstrap.DockerConfigJSONData{
{
BWItem: "bitwarden-item",
RegistryURLBitwardenField: "registryURL",
AuthBitwardenAttachment: "auth",
EmailBitwardenField: "email",
},
{
BWItem: "bitwarden-item2",
RegistryURLBitwardenField: "registryURL",
AuthBitwardenAttachment: "auth",
EmailBitwardenField: "email",
},
},
},
},
To: []secretbootstrap.SecretContext{
{
Cluster: "default",
Name: "docker-config-json-secret",
Namespace: "namespace-1",
Type: "kubernetes.io/dockerconfigjson",
},
},
},
},
},
},
},
{
name: "happy dockerconfigJSON configuration: use RegistryURL",
given: options{
logLevel: "info",
config: secretbootstrap.Config{
Secrets: []secretbootstrap.SecretConfig{
{
From: map[string]secretbootstrap.BitWardenContext{
".dockerconfigjson": {
DockerConfigJSONData: []secretbootstrap.DockerConfigJSONData{
{
BWItem: "bitwarden-item",
RegistryURL: "quay.io",
AuthBitwardenAttachment: "auth",
EmailBitwardenField: "email",
},
{
BWItem: "bitwarden-item2",
RegistryURLBitwardenField: "registryURL",
AuthBitwardenAttachment: "auth",
EmailBitwardenField: "email",
},
},
},
},
To: []secretbootstrap.SecretContext{
{
Cluster: "default",
Name: "docker-config-json-secret",
Namespace: "namespace-1",
Type: "kubernetes.io/dockerconfigjson",
},
},
},
},
},
},
},
{
name: "sad dockerconfigJSON configuration: cannot set both RegistryURL and RegistryURLBitwardenField",
given: options{
logLevel: "info",
config: secretbootstrap.Config{
Secrets: []secretbootstrap.SecretConfig{
{
From: map[string]secretbootstrap.BitWardenContext{
".dockerconfigjson": {
DockerConfigJSONData: []secretbootstrap.DockerConfigJSONData{
{
BWItem: "bitwarden-item",
RegistryURL: "quay.io",
RegistryURLBitwardenField: "registryURL",
AuthBitwardenAttachment: "auth",
EmailBitwardenField: "email",
},
},
},
},
To: []secretbootstrap.SecretContext{
{
Cluster: "default",
Name: "docker-config-json-secret",
Namespace: "namespace-1",
Type: "kubernetes.io/dockerconfigjson",
},
},
},
},
},
},
expected: fmt.Errorf("config[0].from[.dockerconfigjson]: registry_url_bw_field and registry_url are mutualy exclusive"),
},
{
name: "sad dockerconfigJSON configuration",
given: options{
logLevel: "info",
config: secretbootstrap.Config{
Secrets: []secretbootstrap.SecretConfig{
{
From: map[string]secretbootstrap.BitWardenContext{
"key-name-1": {
DockerConfigJSONData: []secretbootstrap.DockerConfigJSONData{
{
BWItem: "bitwarden-item",
RegistryURLBitwardenField: "registryURL",
},
{
BWItem: "bitwarden-item2",
RegistryURLBitwardenField: "registryURL",
AuthBitwardenAttachment: "auth",
EmailBitwardenField: "email",
},
},
},
},
To: []secretbootstrap.SecretContext{
{
Cluster: "default",
Name: "docker-config-json-secret",
Namespace: "namespace-1",
},
},
},
},
},
},
expected: fmt.Errorf("config[0].from[key-name-1]: auth_bw_attachment is missing"),
},
{
name: "sad dockerconfigJSON configuration: cannot determine registry URL",
given: options{
logLevel: "info",
config: secretbootstrap.Config{
Secrets: []secretbootstrap.SecretConfig{
{
From: map[string]secretbootstrap.BitWardenContext{
"key-name-1": {
DockerConfigJSONData: []secretbootstrap.DockerConfigJSONData{
{
BWItem: "bitwarden-item2",
AuthBitwardenAttachment: "auth",
EmailBitwardenField: "email",
},
},
},
},
To: []secretbootstrap.SecretContext{
{
Cluster: "default",
Name: "docker-config-json-secret",
Namespace: "namespace-1",
},
},
},
},
},
},
expected: fmt.Errorf("config[0].from[key-name-1]: either registry_url_bw_field or registry_url must be set"),
},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
actual := tc.given.validateCompletedOptions()
equalError(t, tc.expected, actual)
})
}
}
func TestConstructSecrets(t *testing.T) {
testCases := []struct {
name string
config secretbootstrap.Config
bwItems []bitwarden.Item
// id -> content
attachments map[string]string
expected map[string][]*coreapi.Secret
expectedBitwardenErr string
expectedVaultErr string
}{
{
name: "basic case",
config: defaultConfig,
bwItems: []bitwarden.Item{
{
ID: "1",
Name: "item-name-1",
Fields: []bitwarden.Field{
{
Name: "field-name-1",
Value: "value1",
},
{
Name: "field-name-2",
Value: "value2",
},
},
Attachments: []bitwarden.Attachment{
{
ID: "a-id-1-1",
FileName: "attachment-name-1",
},
{
ID: "a-id-1-2",
FileName: "attachment-name-2",
},
},
},
{
ID: "2",
Name: "item-name-2",
Fields: []bitwarden.Field{
{
Name: "field-name-1",
Value: "value3",
},
{
Name: "field-name-2",
Value: "value2",
},
},
Attachments: []bitwarden.Attachment{
{
ID: "a-id-2-1",
FileName: "attachment-name-1",
},
{
ID: "a-id-2-2",
FileName: "attachment-name-2",
},
},
},
{
ID: "3",
Name: "item-name-3",
Login: &bitwarden.Login{
Password: "yyy",
},
Fields: []bitwarden.Field{
{
Name: "field-name-1",
Value: "value1",
},
},
Attachments: []bitwarden.Attachment{
{
ID: "a-id-3-1",
FileName: "attachment-name-1",
},
{
ID: "a-id-3-2",
FileName: "attachment-name-2",
},
},
},
{
ID: "a",
Name: "quay.io",
Fields: []bitwarden.Field{
{
Name: "Pull Credentials",
Value: "123",
},
},
},
},
attachments: map[string]string{
"a-id-1-1": "attachment-name-1-1-value",
"a-id-2-1": "attachment-name-2-1-value",
"a-id-3-2": "attachment-name-3-2-value",
},
expected: map[string][]*coreapi.Secret{
"default": {
{
TypeMeta: metav1.TypeMeta{Kind: "Secret", APIVersion: "v1"},
ObjectMeta: metav1.ObjectMeta{
Name: "prod-secret-1",
Namespace: "namespace-1",
Labels: map[string]string{"dptp.openshift.io/requester": "ci-secret-bootstrap"},
},
Data: map[string][]byte{
"key-name-1": []byte("value1"),
"key-name-2": []byte("value2"),
"key-name-3": []byte("attachment-name-1-1-value"),
"key-name-4": []byte("value3"),
"key-name-5": []byte("attachment-name-2-1-value"),
"key-name-6": []byte("attachment-name-3-2-value"),
"key-name-7": []byte("yyy"),
},
Type: "Opaque",
},
{
TypeMeta: metav1.TypeMeta{Kind: "Secret", APIVersion: "v1"},
ObjectMeta: metav1.ObjectMeta{
Name: "ci-pull-credentials",
Namespace: "ci",
Labels: map[string]string{"dptp.openshift.io/requester": "ci-secret-bootstrap"},
},
Data: map[string][]byte{
".dockerconfigjson": []byte("123"),
},
Type: "kubernetes.io/dockerconfigjson",
},
},
"build01": {
{
TypeMeta: metav1.TypeMeta{Kind: "Secret", APIVersion: "v1"},
ObjectMeta: metav1.ObjectMeta{
Name: "prod-secret-2",
Namespace: "namespace-2",
Labels: map[string]string{"dptp.openshift.io/requester": "ci-secret-bootstrap"},
},
Data: map[string][]byte{
"key-name-1": []byte("value1"),
"key-name-2": []byte("value2"),
"key-name-3": []byte("attachment-name-1-1-value"),
"key-name-4": []byte("value3"),
"key-name-5": []byte("attachment-name-2-1-value"),
"key-name-6": []byte("attachment-name-3-2-value"),
"key-name-7": []byte("yyy"),
},
Type: "Opaque",
},
},
},
},
{
name: "error: no such field",
config: defaultConfig,
bwItems: []bitwarden.Item{
{
ID: "1",
Name: "item-name-1",
Fields: []bitwarden.Field{
{
Name: "field-name-2",
Value: "value2",
},
},
Attachments: []bitwarden.Attachment{
{
ID: "a-id-1-1",
FileName: "attachment-name-1",
},
{
ID: "a-id-1-2",
FileName: "attachment-name-2",
},
},
},
{
ID: "2",
Name: "item-name-2",
Fields: []bitwarden.Field{
{
Name: "field-name-1",
Value: "value3",
},
{
Name: "field-name-2",
Value: "value2",
},
},
Attachments: []bitwarden.Attachment{
{
ID: "a-id-2-1",
FileName: "attachment-name-1",
},
{
ID: "a-id-2-2",
FileName: "attachment-name-2",
},
},
},
{
ID: "3",
Name: "item-name-3",
Login: &bitwarden.Login{
Password: "yyy",
},
Fields: []bitwarden.Field{
{
Name: "field-name-1",
Value: "value1",
},
},
Attachments: []bitwarden.Attachment{
{
ID: "a-id-3-1",
FileName: "attachment-name-1",
},
{
ID: "a-id-3-2",
FileName: "attachment-name-2",
},
},
},
},
attachments: map[string]string{
"a-id-1-1": "attachment-name-1-1-value",
"a-id-2-1": "attachment-name-2-1-value",
"a-id-3-2": "attachment-name-3-2-value",
},
expectedBitwardenErr: `[config.0."key-name-1": failed to find field field-name-1 in item item-name-1, config.1.".dockerconfigjson": no item quay.io found]`,
expectedVaultErr: `[config.0."key-name-1": item at path "prefix/item-name-1" has no key "field-name-1", config.1.".dockerconfigjson": no data at path prefix/quay.io]`,
},
{
name: "error: no such attachment",
config: defaultConfig,
bwItems: []bitwarden.Item{
{
ID: "1",
Name: "item-name-1",
Login: &bitwarden.Login{Password: "abc"},
Fields: []bitwarden.Field{
{
Name: "field-name-1",
Value: "value1",
},
{
Name: "field-name-2",
Value: "value2",
},
},
Attachments: []bitwarden.Attachment{
{
ID: "a-id-1-1",
FileName: "attachment-name-1",
},
{
ID: "a-id-1-2",
FileName: "attachment-name-2",
},
},
},
{
ID: "2",
Name: "item-name-2",
Fields: []bitwarden.Field{
{
Name: "field-name-1",
Value: "value3",
},
{
Name: "field-name-2",
Value: "value2",
},
},
Attachments: []bitwarden.Attachment{
{
ID: "a-id-2-2",
FileName: "attachment-name-2",
},
},
},
{
ID: "3",
Name: "item-name-3",
Fields: []bitwarden.Field{
{
Name: "field-name-1",
Value: "value1",
},
},
Attachments: []bitwarden.Attachment{
{
ID: "a-id-3-1",
FileName: "attachment-name-1",
},
{
ID: "a-id-3-2",
FileName: "attachment-name-2",
},
},
},
},
attachments: map[string]string{
"a-id-1-1": "attachment-name-1-1-value",
"a-id-3-2": "attachment-name-3-2-value",
},
expectedBitwardenErr: `[config.0."key-name-5": failed to find attachment attachment-name-1 in item item-name-2, config.0."key-name-7": failed to find password in item item-name-3, config.1.".dockerconfigjson": no item quay.io found]`,
expectedVaultErr: `[config.0."key-name-5": item at path "prefix/item-name-2" has no key "attachment-name-1", config.0."key-name-7": item at path "prefix/item-name-3" has no key "password", config.1.".dockerconfigjson": no data at path prefix/quay.io]`,
},
{
name: "Usersecret, simple happy case",
bwItems: []bitwarden.Item{{Name: "my/vault/secret", Fields: bwFieldsFromMap(map[string]string{
"secretsync/target-namespace": "some-namespace",
"secretsync/target-name": "some-name",
"some-data-key": "a-secret",
})}},
config: secretbootstrap.Config{UserSecretsTargetClusters: []string{"a", "b"}},
expected: map[string][]*coreapi.Secret{
"a": {{
ObjectMeta: metav1.ObjectMeta{Namespace: "some-namespace", Name: "some-name", Labels: map[string]string{"dptp.openshift.io/requester": "ci-secret-bootstrap"}},
Type: coreapi.SecretTypeOpaque,
Data: map[string][]byte{
"some-data-key": []byte("a-secret"),
"secretsync-vault-source-path": []byte("prefix/my/vault/secret"),
},
}},
"b": {{
ObjectMeta: metav1.ObjectMeta{Namespace: "some-namespace", Name: "some-name", Labels: map[string]string{"dptp.openshift.io/requester": "ci-secret-bootstrap"}},
Type: coreapi.SecretTypeOpaque,
Data: map[string][]byte{
"some-data-key": []byte("a-secret"),
"secretsync-vault-source-path": []byte("prefix/my/vault/secret"),
},
}},
},
},
{
name: "Secret gets combined from user- and dptp secret ",
bwItems: []bitwarden.Item{
{Name: "my/vault/secret", Fields: bwFieldsFromMap(map[string]string{
"secretsync/target-namespace": "some-namespace",
"secretsync/target-name": "some-name",
"some-data-key": "a-secret",
})},
{Name: "dptp-item", Fields: []bitwarden.Field{{Name: "dptp-key", Value: "dptp-secret"}}},
},
config: secretbootstrap.Config{
UserSecretsTargetClusters: []string{"a", "b"},
Secrets: []secretbootstrap.SecretConfig{{
From: map[string]secretbootstrap.BitWardenContext{"dptp-key": {BWItem: "dptp-item", Field: "dptp-key"}},
To: []secretbootstrap.SecretContext{
{Cluster: "a", Namespace: "some-namespace", Name: "some-name"},
{Cluster: "b", Namespace: "some-namespace", Name: "some-name"},
},
}},
},
expected: map[string][]*coreapi.Secret{
"a": {{
ObjectMeta: metav1.ObjectMeta{Namespace: "some-namespace", Name: "some-name", Labels: map[string]string{"dptp.openshift.io/requester": "ci-secret-bootstrap"}},
Type: coreapi.SecretTypeOpaque,
Data: map[string][]byte{
"dptp-key": []byte("dptp-secret"),
"some-data-key": []byte("a-secret"),
"secretsync-vault-source-path": []byte("prefix/my/vault/secret"),
},
}},
"b": {{
ObjectMeta: metav1.ObjectMeta{Namespace: "some-namespace", Name: "some-name", Labels: map[string]string{"dptp.openshift.io/requester": "ci-secret-bootstrap"}},
Type: coreapi.SecretTypeOpaque,
Data: map[string][]byte{
"dptp-key": []byte("dptp-secret"),
"some-data-key": []byte("a-secret"),
"secretsync-vault-source-path": []byte("prefix/my/vault/secret"),
},
}},
},
},
{
name: "Usersecret would override dptp key, error",
bwItems: []bitwarden.Item{
{Name: "my/vault/secret", Fields: bwFieldsFromMap(map[string]string{
"secretsync/target-namespace": "some-namespace",
"secretsync/target-name": "some-name",
"dptp-key": "user-value",
})},
{Name: "dptp-item", Fields: []bitwarden.Field{{Name: "dptp-key", Value: "dptp-secret"}}},
},
config: secretbootstrap.Config{
UserSecretsTargetClusters: []string{"a", "b"},
Secrets: []secretbootstrap.SecretConfig{{
From: map[string]secretbootstrap.BitWardenContext{"dptp-key": {BWItem: "dptp-item", Field: "dptp-key"}},
To: []secretbootstrap.SecretContext{
{Cluster: "a", Namespace: "some-namespace", Name: "some-name"},
{Cluster: "b", Namespace: "some-namespace", Name: "some-name"},
},
}},
},
expectedVaultErr: `[key dptp-key in secret some-namespace/some-name in cluster a is targeted by ci-secret-bootstrap config and by vault item in path prefix/my/vault/secret, key dptp-key in secret some-namespace/some-name in cluster b is targeted by ci-secret-bootstrap config and by vault item in path prefix/my/vault/secret]`,
},
{
name: "dptp secret isn't of opaque type, error",
bwItems: []bitwarden.Item{
{Name: "my/vault/secret", Fields: bwFieldsFromMap(map[string]string{
"secretsync/target-namespace": "some-namespace",
"secretsync/target-name": "some-name",
"dptp-key": "user-value",
})},
{Name: "dptp-item", Fields: []bitwarden.Field{{Name: "dptp-key", Value: "dptp-secret"}}},
},
config: secretbootstrap.Config{
UserSecretsTargetClusters: []string{"a", "b"},
Secrets: []secretbootstrap.SecretConfig{{
From: map[string]secretbootstrap.BitWardenContext{"dptp-key": {BWItem: "dptp-item", Field: "dptp-key"}},
To: []secretbootstrap.SecretContext{
{Cluster: "a", Namespace: "some-namespace", Name: "some-name", Type: coreapi.SecretTypeBasicAuth},
{Cluster: "b", Namespace: "some-namespace", Name: "some-name", Type: coreapi.SecretTypeBasicAuth},
},
}},
},
expectedVaultErr: `[secret some-namespace/some-name in cluster a has ci-secret-bootstrap config as non-opaque type and is targeted by user sync from key prefix/my/vault/secret, secret some-namespace/some-name in cluster b has ci-secret-bootstrap config as non-opaque type and is targeted by user sync from key prefix/my/vault/secret]`,
},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
for _, clientTestCase := range []clientTestCase{
{name: "bitwarden", client: secrets.NewBitwardenClient(bitwarden.NewFakeClient(tc.bwItems, tc.attachments)), expectedError: tc.expectedBitwardenErr},
{name: "vault", client: vaultClientFromBitwardenItems(tc.bwItems, tc.attachments), expectedError: tc.expectedVaultErr},
} {
t.Run(clientTestCase.name, func(t *testing.T) {
if len(tc.config.UserSecretsTargetClusters) != 0 && clientTestCase.name == "bitwarden" {
t.Skip("Bitwarden doesn't implement a user secret sync functionality")
}
var actualErrorMsg string
actual, actualError := constructSecrets(context.TODO(), tc.config, clientTestCase.client, 10)
if actualError != nil {
actualErrorMsg = actualError.Error()
}
if actualErrorMsg != clientTestCase.expectedError {
t.Fatalf("expected error message %s, got %s", clientTestCase.expectedError, actualErrorMsg)
}
if actualError != nil {
return
}
for key := range actual {
sort.Slice(actual[key], func(i, j int) bool {
return actual[key][i].Name < actual[key][j].Name
})
}
for key := range tc.expected {
sort.Slice(tc.expected[key], func(i, j int) bool {
return tc.expected[key][i].Name < tc.expected[key][j].Name
})
}
equal(t, "secrets", tc.expected, actual)
})
}
})
}
}
func bwFieldsFromMap(m map[string]string) []bitwarden.Field {
var res []bitwarden.Field
for k, v := range m {
res = append(res, bitwarden.Field{Name: k, Value: v})
}
return res
}
func TestUpdateSecrets(t *testing.T) {
testCases := []struct {
name string
existSecretsOnDefault []runtime.Object
existSecretsOnBuild01 []runtime.Object
secretsMap map[string][]*coreapi.Secret
force bool
expected error
expectedSecretsOnDefault []coreapi.Secret
expectedSecretsOnBuild01 []coreapi.Secret
}{
{
name: "basic case with force",
existSecretsOnDefault: []runtime.Object{
&coreapi.Secret{
ObjectMeta: metav1.ObjectMeta{
Name: "prod-secret-1",
Namespace: "namespace-1",
},
Data: map[string][]byte{
"key-name-1": []byte("abc"),
},
},
},
secretsMap: map[string][]*coreapi.Secret{
"default": {
{
ObjectMeta: metav1.ObjectMeta{
Name: "prod-secret-1",
Namespace: "namespace-1",
Labels: map[string]string{"dptp.openshift.io/requester": "ci-secret-bootstrap"},
},
Data: map[string][]byte{
"key-name-1": []byte("value1"),
"key-name-2": []byte("value2"),
"key-name-3": []byte("attachment-name-1-1-value"),
"key-name-4": []byte("value3"),
"key-name-5": []byte("attachment-name-2-1-value"),
"key-name-6": []byte("attachment-name-3-2-value"),
"key-name-7": []byte("yyy"),
},
},
},
"build01": {
{
ObjectMeta: metav1.ObjectMeta{
Name: "prod-secret-2",
Namespace: "namespace-2",
Labels: map[string]string{"dptp.openshift.io/requester": "ci-secret-bootstrap"},
},
Data: map[string][]byte{
"key-name-1": []byte("value1"),
"key-name-2": []byte("value2"),
"key-name-3": []byte("attachment-name-1-1-value"),
"key-name-4": []byte("value3"),
"key-name-5": []byte("attachment-name-2-1-value"),
"key-name-6": []byte("attachment-name-3-2-value"),
"key-name-7": []byte("yyy"),
},
},
},
},
force: true,
expectedSecretsOnDefault: []coreapi.Secret{
{
ObjectMeta: metav1.ObjectMeta{
Name: "prod-secret-1",
Namespace: "namespace-1",
Labels: map[string]string{"dptp.openshift.io/requester": "ci-secret-bootstrap"},
},
Data: map[string][]byte{
"key-name-1": []byte("value1"),
"key-name-2": []byte("value2"),
"key-name-3": []byte("attachment-name-1-1-value"),
"key-name-4": []byte("value3"),
"key-name-5": []byte("attachment-name-2-1-value"),
"key-name-6": []byte("attachment-name-3-2-value"),
"key-name-7": []byte("yyy"),
},
},
},
expectedSecretsOnBuild01: []coreapi.Secret{
{
ObjectMeta: metav1.ObjectMeta{
Name: "prod-secret-2",
Namespace: "namespace-2",
Labels: map[string]string{"dptp.openshift.io/requester": "ci-secret-bootstrap"},
},
Data: map[string][]byte{
"key-name-1": []byte("value1"),
"key-name-2": []byte("value2"),
"key-name-3": []byte("attachment-name-1-1-value"),
"key-name-4": []byte("value3"),
"key-name-5": []byte("attachment-name-2-1-value"),
"key-name-6": []byte("attachment-name-3-2-value"),
"key-name-7": []byte("yyy"),
},
},
},
},
{
name: "basic case with force, unrelated keys are kept",
existSecretsOnDefault: []runtime.Object{
&coreapi.Secret{
ObjectMeta: metav1.ObjectMeta{
Name: "prod-secret-1",
Namespace: "namespace-1",
},
Data: map[string][]byte{
"key-name-1": []byte("abc"),
"unmanaged": []byte("data"),
},
},
},
secretsMap: map[string][]*coreapi.Secret{
"default": {
{
ObjectMeta: metav1.ObjectMeta{
Name: "prod-secret-1",
Namespace: "namespace-1",
Labels: map[string]string{"dptp.openshift.io/requester": "ci-secret-bootstrap"},
},
Data: map[string][]byte{
"key-name-1": []byte("value1"),
"key-name-2": []byte("value2"),
"key-name-3": []byte("attachment-name-1-1-value"),
"key-name-4": []byte("value3"),
"key-name-5": []byte("attachment-name-2-1-value"),
"key-name-6": []byte("attachment-name-3-2-value"),
"key-name-7": []byte("yyy"),
},
},
},
"build01": {
{
ObjectMeta: metav1.ObjectMeta{
Name: "prod-secret-2",
Namespace: "namespace-2",
Labels: map[string]string{"dptp.openshift.io/requester": "ci-secret-bootstrap"},
},
Data: map[string][]byte{
"key-name-1": []byte("value1"),
"key-name-2": []byte("value2"),
"key-name-3": []byte("attachment-name-1-1-value"),
"key-name-4": []byte("value3"),
"key-name-5": []byte("attachment-name-2-1-value"),
"key-name-6": []byte("attachment-name-3-2-value"),
"key-name-7": []byte("yyy"),
},
},
},
},
force: true,
expectedSecretsOnDefault: []coreapi.Secret{
{
ObjectMeta: metav1.ObjectMeta{
Name: "prod-secret-1",
Namespace: "namespace-1",
Labels: map[string]string{"dptp.openshift.io/requester": "ci-secret-bootstrap"},
},
Data: map[string][]byte{
"key-name-1": []byte("value1"),
"key-name-2": []byte("value2"),
"key-name-3": []byte("attachment-name-1-1-value"),
"key-name-4": []byte("value3"),
"key-name-5": []byte("attachment-name-2-1-value"),
"key-name-6": []byte("attachment-name-3-2-value"),
"key-name-7": []byte("yyy"),
"unmanaged": []byte("data"),
},
},
},
expectedSecretsOnBuild01: []coreapi.Secret{
{
ObjectMeta: metav1.ObjectMeta{
Name: "prod-secret-2",
Namespace: "namespace-2",
Labels: map[string]string{"dptp.openshift.io/requester": "ci-secret-bootstrap"},
},
Data: map[string][]byte{
"key-name-1": []byte("value1"),
"key-name-2": []byte("value2"),
"key-name-3": []byte("attachment-name-1-1-value"),
"key-name-4": []byte("value3"),
"key-name-5": []byte("attachment-name-2-1-value"),
"key-name-6": []byte("attachment-name-3-2-value"),
"key-name-7": []byte("yyy"),
},
},
},
},
{
name: "basic case without force: not semantically equal",
existSecretsOnDefault: []runtime.Object{
&coreapi.Secret{
ObjectMeta: metav1.ObjectMeta{
Name: "prod-secret-1",
Namespace: "namespace-1",
Labels: map[string]string{"dptp.openshift.io/requester": "ci-secret-bootstrap"},
},
Data: map[string][]byte{
"key-name-1": []byte("abc"),
},
},
},
secretsMap: map[string][]*coreapi.Secret{
"default": {
{
ObjectMeta: metav1.ObjectMeta{
Name: "prod-secret-1",
Namespace: "namespace-1",
Labels: map[string]string{"dptp.openshift.io/requester": "ci-secret-bootstrap"},
},
Data: map[string][]byte{
"key-name-1": []byte("value1"),
},
},
},
},
expected: fmt.Errorf("secret default:namespace-1/prod-secret-1 needs updating in place, use --force to do so"),
expectedSecretsOnDefault: []coreapi.Secret{
{
ObjectMeta: metav1.ObjectMeta{
Name: "prod-secret-1",
Namespace: "namespace-1",
Labels: map[string]string{"dptp.openshift.io/requester": "ci-secret-bootstrap"},
},
Data: map[string][]byte{
"key-name-1": []byte("abc"),
},
},
},
},
{
name: "basic case without force: semantically equal",
existSecretsOnDefault: []runtime.Object{
&coreapi.Secret{
ObjectMeta: metav1.ObjectMeta{
Name: "prod-secret-1",
Namespace: "namespace-1",
Labels: map[string]string{"dptp.openshift.io/requester": "ci-secret-bootstrap"},
CreationTimestamp: metav1.NewTime(time.Now()),
},
Data: map[string][]byte{
"key-name-1": []byte("abc"),
},
},
},
secretsMap: map[string][]*coreapi.Secret{
"default": {
{
ObjectMeta: metav1.ObjectMeta{
Name: "prod-secret-1",
Namespace: "namespace-1",
Labels: map[string]string{"dptp.openshift.io/requester": "ci-secret-bootstrap"},
},
Data: map[string][]byte{
"key-name-1": []byte("abc"),
},
},
},
},
expectedSecretsOnDefault: []coreapi.Secret{
{
ObjectMeta: metav1.ObjectMeta{
Name: "prod-secret-1",
Namespace: "namespace-1",
Labels: map[string]string{"dptp.openshift.io/requester": "ci-secret-bootstrap"},
},
Data: map[string][]byte{
"key-name-1": []byte("abc"),
},
},
},
},
{
name: "change secret type with force",
existSecretsOnDefault: []runtime.Object{
&coreapi.Secret{
ObjectMeta: metav1.ObjectMeta{
Name: "prod-secret-2",
Namespace: "namespace-2",
},
Data: map[string][]byte{
"key-name-1": []byte(`{
"auths": {
"quay.io": {
"auth": "aaa",
"email": ""
}
}
}`),
},
Type: coreapi.SecretTypeDockerConfigJson,
},
},
secretsMap: map[string][]*coreapi.Secret{
"default": {
{
ObjectMeta: metav1.ObjectMeta{
Name: "prod-secret-2",
Namespace: "namespace-2",
},
Data: map[string][]byte{
"key-name-1": []byte(`{
"auths": {
"quay.io": {
"auth": "aaa",
"email": ""
}
}
}`),
},
Type: coreapi.SecretTypeOpaque,
},
},
},
force: true,
expectedSecretsOnDefault: []coreapi.Secret{
{
ObjectMeta: metav1.ObjectMeta{
Name: "prod-secret-2",
Namespace: "namespace-2",
},
Data: map[string][]byte{
"key-name-1": []byte(`{
"auths": {
"quay.io": {
"auth": "aaa",
"email": ""
}
}
}`),
},
Type: coreapi.SecretTypeOpaque,
},
},
},
{
name: "change secret type without force",
existSecretsOnDefault: []runtime.Object{
&coreapi.Secret{
ObjectMeta: metav1.ObjectMeta{
Name: "prod-secret-2",
Namespace: "namespace-2",
},
Data: map[string][]byte{
"key-name-1": []byte(`{
"auths": {
"quay.io": {
"auth": "aaa",
"email": ""
}
}
}`),
},
Type: coreapi.SecretTypeDockerConfigJson,
},
},
secretsMap: map[string][]*coreapi.Secret{
"default": {
{
ObjectMeta: metav1.ObjectMeta{
Name: "prod-secret-2",
Namespace: "namespace-2",
},
Data: map[string][]byte{
"key-name-1": []byte(`{
"auths": {
"quay.io": {
"auth": "aaa",
"email": ""
}
}
}`),
},
},
},
},
expected: fmt.Errorf("cannot change secret type from \"kubernetes.io/dockerconfigjson\" to \"\" (immutable field): default:namespace-2/prod-secret-2"),
expectedSecretsOnDefault: []coreapi.Secret{
{
ObjectMeta: metav1.ObjectMeta{
Name: "prod-secret-2",
Namespace: "namespace-2",
},
Data: map[string][]byte{
"key-name-1": []byte(`{
"auths": {
"quay.io": {
"auth": "aaa",
"email": ""
}
}
}`),
},
Type: coreapi.SecretTypeDockerConfigJson,
},
},
},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
fkcDefault := fake.NewSimpleClientset(tc.existSecretsOnDefault...)
fkcBuild01 := fake.NewSimpleClientset(tc.existSecretsOnBuild01...)
clients := map[string]coreclientset.SecretsGetter{
"default": fkcDefault.CoreV1(),
"build01": fkcBuild01.CoreV1(),
}
actual := updateSecrets(clients, tc.secretsMap, tc.force)
equalError(t, tc.expected, actual)
actualSecretsOnDefault, err := fkcDefault.CoreV1().Secrets("").List(context.TODO(), metav1.ListOptions{})
equalError(t, nil, err)
equal(t, "secrets in default cluster", tc.expectedSecretsOnDefault, actualSecretsOnDefault.Items)
actualSecretsOnBuild01, err := fkcBuild01.CoreV1().Secrets("").List(context.TODO(), metav1.ListOptions{})
equalError(t, nil, err)
equal(t, "secrets in build01 cluster", tc.expectedSecretsOnBuild01, actualSecretsOnBuild01.Items)
})
}
}
func TestWriteSecrets(t *testing.T) {
testCases := []struct {
name string
secrets []*coreapi.Secret
w *bytes.Buffer
expected string
expectedError error
}{
{
name: "basic case",
secrets: []*coreapi.Secret{
{
TypeMeta: metav1.TypeMeta{Kind: "Secret", APIVersion: "v1"},
ObjectMeta: metav1.ObjectMeta{
Name: "prod-secret-1",
Namespace: "namespace-1",
Labels: map[string]string{"dptp.openshift.io/requester": "ci-secret-bootstrap"},
},
Data: map[string][]byte{
"key-name-1": []byte("value1"),
"key-name-2": []byte("value2"),
"key-name-3": []byte("attachment-name-1-1-value"),
"key-name-4": []byte("value3"),
"key-name-5": []byte("attachment-name-2-1-value"),
"key-name-6": []byte("attachment-name-3-2-value"),
},
},
{
TypeMeta: metav1.TypeMeta{Kind: "Secret", APIVersion: "v1"},
ObjectMeta: metav1.ObjectMeta{
Name: "prod-secret-2",
Namespace: "namespace-2",
Labels: map[string]string{"dptp.openshift.io/requester": "ci-secret-bootstrap"},
},
Data: map[string][]byte{
"key-name-1": []byte("value1"),
"key-name-2": []byte("value2"),
"key-name-3": []byte("attachment-name-1-1-value"),
"key-name-4": []byte("value3"),
"key-name-5": []byte("attachment-name-2-1-value"),
"key-name-6": []byte("attachment-name-3-2-value"),
},
},
},
w: &bytes.Buffer{},
expected: `apiVersion: v1
data:
key-name-1: dmFsdWUx
key-name-2: dmFsdWUy
key-name-3: YXR0YWNobWVudC1uYW1lLTEtMS12YWx1ZQ==
key-name-4: dmFsdWUz
key-name-5: YXR0YWNobWVudC1uYW1lLTItMS12YWx1ZQ==
key-name-6: YXR0YWNobWVudC1uYW1lLTMtMi12YWx1ZQ==
kind: Secret
metadata:
creationTimestamp: null
labels:
dptp.openshift.io/requester: ci-secret-bootstrap
name: prod-secret-1
namespace: namespace-1
---
apiVersion: v1
data:
key-name-1: dmFsdWUx
key-name-2: dmFsdWUy
key-name-3: YXR0YWNobWVudC1uYW1lLTEtMS12YWx1ZQ==
key-name-4: dmFsdWUz
key-name-5: YXR0YWNobWVudC1uYW1lLTItMS12YWx1ZQ==
key-name-6: YXR0YWNobWVudC1uYW1lLTMtMi12YWx1ZQ==
kind: Secret
metadata:
creationTimestamp: null
labels:
dptp.openshift.io/requester: ci-secret-bootstrap
name: prod-secret-2
namespace: namespace-2
---
`,
},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
actualError := writeSecretsToFile(tc.secrets, tc.w)
equalError(t, tc.expectedError, actualError)
equal(t, "result", tc.expected, tc.w.String())
})
}
}
func equalError(t *testing.T, expected, actual error) {
t.Helper()
if expected != nil && actual == nil || expected == nil && actual != nil {
t.Errorf("expecting error \"%v\", got \"%v\"", expected, actual)
}
if expected != nil && actual != nil && expected.Error() != actual.Error() {
t.Errorf("expecting error msg %q, got %q", expected.Error(), actual.Error())
}
}
func equal(t *testing.T, what string, expected, actual interface{}) {
if diff := cmp.Diff(expected, actual, testhelper.RuntimObjectIgnoreRvTypeMeta); diff != "" {
t.Errorf("%s differs from expected:\n%s", what, diff)
}
}
func TestConstructDockerConfigJSON(t *testing.T) {
type attachment struct {
bwItem string
filename string
contents []byte
}
testCases := []struct {
id string
bwClient bitwarden.Client
dockerConfigJSONData []secretbootstrap.DockerConfigJSONData
attachments []attachment
expectedJSON []byte
expectedError string
}{
{
id: "happy case",
attachments: []attachment{
{
bwItem: "item-name-1",
filename: "auth",
contents: []byte("c2VydmljZWFjY291bnQ6ZXlKaGJHY2lPaUpTVXpJMU5pSXNJbXRwWkNJNklrRndTekF0YjBaNGJXMUZURXRHTVMwMFVEa3djbEEwUTJWQlRUZERNMGRXUkZwdmJGOVllaTFEUW5NaWZRLmV5SnBjM01pT2lKcmRXSmxjbTVsZEdWekwzTmxjblpwWTJWaFkyTnZkVzUwSWl3aWEzVmlaWEp1WlhSbGN5NXBieTl6WlhKMmFXTmxZV05qYjNWdWRDOXVZVzFsYzNCaFkyVWlPaUpoYkhaaGNtOHRkR1Z6ZENJc0ltdDFZbVZ5Ym1WMFpYTXVhVzh2YzJWeWRtbGpaV0ZqWTI5MWJuUXZjMlZqY21WMExtNWhiV1VpT2lKa1pXWmhkV3gwTFhSdmEyVnVMV1EwT1d4aUlpd2lhM1ZpWlhKdVpYUmxjeTVwYnk5elpYSjJhV05sWVdOamIzVnVkQzl6WlhKMmFXTmxMV0ZqWTI5MWJuUXVibUZ0WlNJNkltUmxabUYxYkhRaUxDSnJkV0psY201bGRHVnpMbWx2TDNObGNuWnBZMlZoWTJOdmRXNTBMM05sY25acFkyVXRZV05qYjNWdWRDNTFhV1FpT2lJM05tVTRZMlpsTmkxbU1HWXhMVFF5WlRNdFlqUm1NQzFoTXpjM1pUbGhOemxrWWpRaUxDSnpkV0lpT2lKemVYTjBaVzA2YzJWeWRtbGpaV0ZqWTI5MWJuUTZZV3gyWVhKdkxYUmxjM1E2WkdWbVlYVnNkQ0o5LnMyajh6X2JfT3NMOHY5UGlLR1NUQmFuZDE0MHExMHc3VTlMdU9JWmZlUG1SeF9OMHdKRkZPcVN0MGNjdmtVaUVGV0x5QWNSU2k2cUt3T1FSVzE2MVUzSU52UEY4Q0pDZ2d2R3JHUnMzeHp6N3hjSmgzTWRpcXhzWGViTmNmQmlmWWxXUTU2U1RTZDlUeUh1RkN6c1poNXBlSHVzS3hOa2hJRTNyWHp5ZHNoMkhCaTZMYTlYZ1l4R1VjM0x3NWh4RnB5bXFyajFJNzExbWZLcUV2bUN0a0J4blJtMlhIZmFKalNVRkswWWdoY0lMbkhuWGhMOEx2MUl0bnU4SzlvWFRfWVZIQWY1R3hlaERjZ3FBMmw1NUZyYkJMTGVfNi1DV2V2N2RQZU5PbFlaWE5xbEtkUG5KbW9BREdsOEktTlhKN2x5ZXl2a2hfZ3JkanhXdVVqQ3lQUQ=="),
},
},
dockerConfigJSONData: []secretbootstrap.DockerConfigJSONData{
{
BWItem: "item-name-1",
RegistryURLBitwardenField: "registryURL",
AuthBitwardenAttachment: "auth",
EmailBitwardenField: "email",
},
},
bwClient: bitwarden.NewFakeClient(
[]bitwarden.Item{
{
ID: "1",
Name: "item-name-1",
Attachments: []bitwarden.Attachment{
{
ID: "12345678",
FileName: "auth",
},
},
Fields: []bitwarden.Field{
{
Name: "registryURL",
Value: "quay.io",
},
{
Name: "email",
Value: "[email protected]",
},
},
},
}, make(map[string]string)),
expectedJSON: []byte(`{"auths":{"quay.io":{"auth":"c2VydmljZWFjY291bnQ6ZXlKaGJHY2lPaUpTVXpJMU5pSXNJbXRwWkNJNklrRndTekF0YjBaNGJXMUZURXRHTVMwMFVEa3djbEEwUTJWQlRUZERNMGRXUkZwdmJGOVllaTFEUW5NaWZRLmV5SnBjM01pT2lKcmRXSmxjbTVsZEdWekwzTmxjblpwWTJWaFkyTnZkVzUwSWl3aWEzVmlaWEp1WlhSbGN5NXBieTl6WlhKMmFXTmxZV05qYjNWdWRDOXVZVzFsYzNCaFkyVWlPaUpoYkhaaGNtOHRkR1Z6ZENJc0ltdDFZbVZ5Ym1WMFpYTXVhVzh2YzJWeWRtbGpaV0ZqWTI5MWJuUXZjMlZqY21WMExtNWhiV1VpT2lKa1pXWmhkV3gwTFhSdmEyVnVMV1EwT1d4aUlpd2lhM1ZpWlhKdVpYUmxjeTVwYnk5elpYSjJhV05sWVdOamIzVnVkQzl6WlhKMmFXTmxMV0ZqWTI5MWJuUXVibUZ0WlNJNkltUmxabUYxYkhRaUxDSnJkV0psY201bGRHVnpMbWx2TDNObGNuWnBZMlZoWTJOdmRXNTBMM05sY25acFkyVXRZV05qYjNWdWRDNTFhV1FpT2lJM05tVTRZMlpsTmkxbU1HWXhMVFF5WlRNdFlqUm1NQzFoTXpjM1pUbGhOemxrWWpRaUxDSnpkV0lpT2lKemVYTjBaVzA2YzJWeWRtbGpaV0ZqWTI5MWJuUTZZV3gyWVhKdkxYUmxjM1E2WkdWbVlYVnNkQ0o5LnMyajh6X2JfT3NMOHY5UGlLR1NUQmFuZDE0MHExMHc3VTlMdU9JWmZlUG1SeF9OMHdKRkZPcVN0MGNjdmtVaUVGV0x5QWNSU2k2cUt3T1FSVzE2MVUzSU52UEY4Q0pDZ2d2R3JHUnMzeHp6N3hjSmgzTWRpcXhzWGViTmNmQmlmWWxXUTU2U1RTZDlUeUh1RkN6c1poNXBlSHVzS3hOa2hJRTNyWHp5ZHNoMkhCaTZMYTlYZ1l4R1VjM0x3NWh4RnB5bXFyajFJNzExbWZLcUV2bUN0a0J4blJtMlhIZmFKalNVRkswWWdoY0lMbkhuWGhMOEx2MUl0bnU4SzlvWFRfWVZIQWY1R3hlaERjZ3FBMmw1NUZyYkJMTGVfNi1DV2V2N2RQZU5PbFlaWE5xbEtkUG5KbW9BREdsOEktTlhKN2x5ZXl2a2hfZ3JkanhXdVVqQ3lQUQ==","email":"[email protected]"}}}`),
},
{
id: "invalid conents, parsing fails",
attachments: []attachment{
{
bwItem: "item-name-1",
filename: "auth",
contents: []byte("123456789"),
},
},
dockerConfigJSONData: []secretbootstrap.DockerConfigJSONData{
{
BWItem: "item-name-1",
RegistryURLBitwardenField: "registryURL",
AuthBitwardenAttachment: "auth",
EmailBitwardenField: "email",
},
},
bwClient: bitwarden.NewFakeClient(
[]bitwarden.Item{
{
ID: "1",
Name: "item-name-1",
Attachments: []bitwarden.Attachment{
{
ID: "12345678",
FileName: "auth",
},
},
Fields: []bitwarden.Field{
{
Name: "registryURL",
Value: "quay.io",
},
{
Name: "email",
Value: "[email protected]",
},
},
},
}, make(map[string]string)),
expectedJSON: []byte(`{"auths":{"quay.io":{"auth":"123456789","email":"[email protected]"}}}`),
expectedError: "the constructed dockerconfigJSON doesn't parse: illegal base64 data at input byte 8",
},
{
id: "RegistryURL overrides RegistryURLBitwardenField",
attachments: []attachment{
{
bwItem: "item-name-1",
filename: "auth",
contents: []byte("c2VydmljZWFjY291bnQ6ZXlKaGJHY2lPaUpTVXpJMU5pSXNJbXRwWkNJNklrRndTekF0YjBaNGJXMUZURXRHTVMwMFVEa3djbEEwUTJWQlRUZERNMGRXUkZwdmJGOVllaTFEUW5NaWZRLmV5SnBjM01pT2lKcmRXSmxjbTVsZEdWekwzTmxjblpwWTJWaFkyTnZkVzUwSWl3aWEzVmlaWEp1WlhSbGN5NXBieTl6WlhKMmFXTmxZV05qYjNWdWRDOXVZVzFsYzNCaFkyVWlPaUpoYkhaaGNtOHRkR1Z6ZENJc0ltdDFZbVZ5Ym1WMFpYTXVhVzh2YzJWeWRtbGpaV0ZqWTI5MWJuUXZjMlZqY21WMExtNWhiV1VpT2lKa1pXWmhkV3gwTFhSdmEyVnVMV1EwT1d4aUlpd2lhM1ZpWlhKdVpYUmxjeTVwYnk5elpYSjJhV05sWVdOamIzVnVkQzl6WlhKMmFXTmxMV0ZqWTI5MWJuUXVibUZ0WlNJNkltUmxabUYxYkhRaUxDSnJkV0psY201bGRHVnpMbWx2TDNObGNuWnBZMlZoWTJOdmRXNTBMM05sY25acFkyVXRZV05qYjNWdWRDNTFhV1FpT2lJM05tVTRZMlpsTmkxbU1HWXhMVFF5WlRNdFlqUm1NQzFoTXpjM1pUbGhOemxrWWpRaUxDSnpkV0lpT2lKemVYTjBaVzA2YzJWeWRtbGpaV0ZqWTI5MWJuUTZZV3gyWVhKdkxYUmxjM1E2WkdWbVlYVnNkQ0o5LnMyajh6X2JfT3NMOHY5UGlLR1NUQmFuZDE0MHExMHc3VTlMdU9JWmZlUG1SeF9OMHdKRkZPcVN0MGNjdmtVaUVGV0x5QWNSU2k2cUt3T1FSVzE2MVUzSU52UEY4Q0pDZ2d2R3JHUnMzeHp6N3hjSmgzTWRpcXhzWGViTmNmQmlmWWxXUTU2U1RTZDlUeUh1RkN6c1poNXBlSHVzS3hOa2hJRTNyWHp5ZHNoMkhCaTZMYTlYZ1l4R1VjM0x3NWh4RnB5bXFyajFJNzExbWZLcUV2bUN0a0J4blJtMlhIZmFKalNVRkswWWdoY0lMbkhuWGhMOEx2MUl0bnU4SzlvWFRfWVZIQWY1R3hlaERjZ3FBMmw1NUZyYkJMTGVfNi1DV2V2N2RQZU5PbFlaWE5xbEtkUG5KbW9BREdsOEktTlhKN2x5ZXl2a2hfZ3JkanhXdVVqQ3lQUQ=="),
},
},
dockerConfigJSONData: []secretbootstrap.DockerConfigJSONData{
{
BWItem: "item-name-1",
RegistryURLBitwardenField: "registryURL",
AuthBitwardenAttachment: "auth",
EmailBitwardenField: "email",
RegistryURL: "cool-url",
},
},
bwClient: bitwarden.NewFakeClient(
[]bitwarden.Item{
{
ID: "1",
Name: "item-name-1",
Attachments: []bitwarden.Attachment{
{
ID: "12345678",
FileName: "auth",
},
},
Fields: []bitwarden.Field{
{
Name: "registryURL",
Value: "quay.io",
},
{
Name: "email",
Value: "[email protected]",
},
},
},
}, make(map[string]string)),
expectedJSON: []byte(`{"auths":{"cool-url":{"auth":"c2VydmljZWFjY291bnQ6ZXlKaGJHY2lPaUpTVXpJMU5pSXNJbXRwWkNJNklrRndTekF0YjBaNGJXMUZURXRHTVMwMFVEa3djbEEwUTJWQlRUZERNMGRXUkZwdmJGOVllaTFEUW5NaWZRLmV5SnBjM01pT2lKcmRXSmxjbTVsZEdWekwzTmxjblpwWTJWaFkyTnZkVzUwSWl3aWEzVmlaWEp1WlhSbGN5NXBieTl6WlhKMmFXTmxZV05qYjNWdWRDOXVZVzFsYzNCaFkyVWlPaUpoYkhaaGNtOHRkR1Z6ZENJc0ltdDFZbVZ5Ym1WMFpYTXVhVzh2YzJWeWRtbGpaV0ZqWTI5MWJuUXZjMlZqY21WMExtNWhiV1VpT2lKa1pXWmhkV3gwTFhSdmEyVnVMV1EwT1d4aUlpd2lhM1ZpWlhKdVpYUmxjeTVwYnk5elpYSjJhV05sWVdOamIzVnVkQzl6WlhKMmFXTmxMV0ZqWTI5MWJuUXVibUZ0WlNJNkltUmxabUYxYkhRaUxDSnJkV0psY201bGRHVnpMbWx2TDNObGNuWnBZMlZoWTJOdmRXNTBMM05sY25acFkyVXRZV05qYjNWdWRDNTFhV1FpT2lJM05tVTRZMlpsTmkxbU1HWXhMVFF5WlRNdFlqUm1NQzFoTXpjM1pUbGhOemxrWWpRaUxDSnpkV0lpT2lKemVYTjBaVzA2YzJWeWRtbGpaV0ZqWTI5MWJuUTZZV3gyWVhKdkxYUmxjM1E2WkdWbVlYVnNkQ0o5LnMyajh6X2JfT3NMOHY5UGlLR1NUQmFuZDE0MHExMHc3VTlMdU9JWmZlUG1SeF9OMHdKRkZPcVN0MGNjdmtVaUVGV0x5QWNSU2k2cUt3T1FSVzE2MVUzSU52UEY4Q0pDZ2d2R3JHUnMzeHp6N3hjSmgzTWRpcXhzWGViTmNmQmlmWWxXUTU2U1RTZDlUeUh1RkN6c1poNXBlSHVzS3hOa2hJRTNyWHp5ZHNoMkhCaTZMYTlYZ1l4R1VjM0x3NWh4RnB5bXFyajFJNzExbWZLcUV2bUN0a0J4blJtMlhIZmFKalNVRkswWWdoY0lMbkhuWGhMOEx2MUl0bnU4SzlvWFRfWVZIQWY1R3hlaERjZ3FBMmw1NUZyYkJMTGVfNi1DV2V2N2RQZU5PbFlaWE5xbEtkUG5KbW9BREdsOEktTlhKN2x5ZXl2a2hfZ3JkanhXdVVqQ3lQUQ==","email":"[email protected]"}}}`),
},
{
id: "happy multiple case",
attachments: []attachment{
{
bwItem: "item-name-1",
filename: "auth",
contents: []byte("c2VydmljZWFjY291bnQ6ZXlKaGJHY2lPaUpTVXpJMU5pSXNJbXRwWkNJNklrRndTekF0YjBaNGJXMUZURXRHTVMwMFVEa3djbEEwUTJWQlRUZERNMGRXUkZwdmJGOVllaTFEUW5NaWZRLmV5SnBjM01pT2lKcmRXSmxjbTVsZEdWekwzTmxjblpwWTJWaFkyTnZkVzUwSWl3aWEzVmlaWEp1WlhSbGN5NXBieTl6WlhKMmFXTmxZV05qYjNWdWRDOXVZVzFsYzNCaFkyVWlPaUpoYkhaaGNtOHRkR1Z6ZENJc0ltdDFZbVZ5Ym1WMFpYTXVhVzh2YzJWeWRtbGpaV0ZqWTI5MWJuUXZjMlZqY21WMExtNWhiV1VpT2lKa1pXWmhkV3gwTFhSdmEyVnVMV1EwT1d4aUlpd2lhM1ZpWlhKdVpYUmxjeTVwYnk5elpYSjJhV05sWVdOamIzVnVkQzl6WlhKMmFXTmxMV0ZqWTI5MWJuUXVibUZ0WlNJNkltUmxabUYxYkhRaUxDSnJkV0psY201bGRHVnpMbWx2TDNObGNuWnBZMlZoWTJOdmRXNTBMM05sY25acFkyVXRZV05qYjNWdWRDNTFhV1FpT2lJM05tVTRZMlpsTmkxbU1HWXhMVFF5WlRNdFlqUm1NQzFoTXpjM1pUbGhOemxrWWpRaUxDSnpkV0lpT2lKemVYTjBaVzA2YzJWeWRtbGpaV0ZqWTI5MWJuUTZZV3gyWVhKdkxYUmxjM1E2WkdWbVlYVnNkQ0o5LnMyajh6X2JfT3NMOHY5UGlLR1NUQmFuZDE0MHExMHc3VTlMdU9JWmZlUG1SeF9OMHdKRkZPcVN0MGNjdmtVaUVGV0x5QWNSU2k2cUt3T1FSVzE2MVUzSU52UEY4Q0pDZ2d2R3JHUnMzeHp6N3hjSmgzTWRpcXhzWGViTmNmQmlmWWxXUTU2U1RTZDlUeUh1RkN6c1poNXBlSHVzS3hOa2hJRTNyWHp5ZHNoMkhCaTZMYTlYZ1l4R1VjM0x3NWh4RnB5bXFyajFJNzExbWZLcUV2bUN0a0J4blJtMlhIZmFKalNVRkswWWdoY0lMbkhuWGhMOEx2MUl0bnU4SzlvWFRfWVZIQWY1R3hlaERjZ3FBMmw1NUZyYkJMTGVfNi1DV2V2N2RQZU5PbFlaWE5xbEtkUG5KbW9BREdsOEktTlhKN2x5ZXl2a2hfZ3JkanhXdVVqQ3lQUQ=="),
},
{
bwItem: "item-name-2",
filename: "auth",
contents: []byte("c2VydmljZWFjY291bnQ6ZXlKaGJHY2lPaUpTVXpJMU5pSXNJbXRwWkNJNklrRndTekF0YjBaNGJXMUZURXRHTVMwMFVEa3djbEEwUTJWQlRUZERNMGRXUkZwdmJGOVllaTFEUW5NaWZRLmV5SnBjM01pT2lKcmRXSmxjbTVsZEdWekwzTmxjblpwWTJWaFkyTnZkVzUwSWl3aWEzVmlaWEp1WlhSbGN5NXBieTl6WlhKMmFXTmxZV05qYjNWdWRDOXVZVzFsYzNCaFkyVWlPaUpoYkhaaGNtOHRkR1Z6ZENJc0ltdDFZbVZ5Ym1WMFpYTXVhVzh2YzJWeWRtbGpaV0ZqWTI5MWJuUXZjMlZqY21WMExtNWhiV1VpT2lKa1pXWmhkV3gwTFhSdmEyVnVMV1EwT1d4aUlpd2lhM1ZpWlhKdVpYUmxjeTVwYnk5elpYSjJhV05sWVdOamIzVnVkQzl6WlhKMmFXTmxMV0ZqWTI5MWJuUXVibUZ0WlNJNkltUmxabUYxYkhRaUxDSnJkV0psY201bGRHVnpMbWx2TDNObGNuWnBZMlZoWTJOdmRXNTBMM05sY25acFkyVXRZV05qYjNWdWRDNTFhV1FpT2lJM05tVTRZMlpsTmkxbU1HWXhMVFF5WlRNdFlqUm1NQzFoTXpjM1pUbGhOemxrWWpRaUxDSnpkV0lpT2lKemVYTjBaVzA2YzJWeWRtbGpaV0ZqWTI5MWJuUTZZV3gyWVhKdkxYUmxjM1E2WkdWbVlYVnNkQ0o5LnMyajh6X2JfT3NMOHY5UGlLR1NUQmFuZDE0MHExMHc3VTlMdU9JWmZlUG1SeF9OMHdKRkZPcVN0MGNjdmtVaUVGV0x5QWNSU2k2cUt3T1FSVzE2MVUzSU52UEY4Q0pDZ2d2R3JHUnMzeHp6N3hjSmgzTWRpcXhzWGViTmNmQmlmWWxXUTU2U1RTZDlUeUh1RkN6c1poNXBlSHVzS3hOa2hJRTNyWHp5ZHNoMkhCaTZMYTlYZ1l4R1VjM0x3NWh4RnB5bXFyajFJNzExbWZLcUV2bUN0a0J4blJtMlhIZmFKalNVRkswWWdoY0lMbkhuWGhMOEx2MUl0bnU4SzlvWFRfWVZIQWY1R3hlaERjZ3FBMmw1NUZyYkJMTGVfNi1DV2V2N2RQZU5PbFlaWE5xbEtkUG5KbW9BREdsOEktTlhKN2x5ZXl2a2hfZ3JkanhXdVVqQ3lQUQ=="),
},
},
dockerConfigJSONData: []secretbootstrap.DockerConfigJSONData{
{
BWItem: "item-name-1",
RegistryURLBitwardenField: "registryURL",
AuthBitwardenAttachment: "auth",
EmailBitwardenField: "email",
},
{
BWItem: "item-name-2",
RegistryURLBitwardenField: "registryURL",
AuthBitwardenAttachment: "auth",
EmailBitwardenField: "email",
},
},
bwClient: bitwarden.NewFakeClient(
[]bitwarden.Item{
{
ID: "1",
Name: "item-name-1",
Fields: []bitwarden.Field{
{
Name: "registryURL",
Value: "quay.io",
},
{
Name: "auth",
Value: "123456789",
},
{
Name: "email", | },
},
{
ID: "2",
Name: "item-name-2",
Fields: []bitwarden.Field{
{
Name: "registryURL",
Value: "cloud.redhat.com",
},
{
Name: "auth",
Value: "987654321",
},
{
Name: "email",
Value: "[email protected]",
},
},
},
}, make(map[string]string)),
expectedJSON: []byte(`{"auths":{"cloud.redhat.com":{"auth":"c2VydmljZWFjY291bnQ6ZXlKaGJHY2lPaUpTVXpJMU5pSXNJbXRwWkNJNklrRndTekF0YjBaNGJXMUZURXRHTVMwMFVEa3djbEEwUTJWQlRUZERNMGRXUkZwdmJGOVllaTFEUW5NaWZRLmV5SnBjM01pT2lKcmRXSmxjbTVsZEdWekwzTmxjblpwWTJWaFkyTnZkVzUwSWl3aWEzVmlaWEp1WlhSbGN5NXBieTl6WlhKMmFXTmxZV05qYjNWdWRDOXVZVzFsYzNCaFkyVWlPaUpoYkhaaGNtOHRkR1Z6ZENJc0ltdDFZbVZ5Ym1WMFpYTXVhVzh2YzJWeWRtbGpaV0ZqWTI5MWJuUXZjMlZqY21WMExtNWhiV1VpT2lKa1pXWmhkV3gwTFhSdmEyVnVMV1EwT1d4aUlpd2lhM1ZpWlhKdVpYUmxjeTVwYnk5elpYSjJhV05sWVdOamIzVnVkQzl6WlhKMmFXTmxMV0ZqWTI5MWJuUXVibUZ0WlNJNkltUmxabUYxYkhRaUxDSnJkV0psY201bGRHVnpMbWx2TDNObGNuWnBZMlZoWTJOdmRXNTBMM05sY25acFkyVXRZV05qYjNWdWRDNTFhV1FpT2lJM05tVTRZMlpsTmkxbU1HWXhMVFF5WlRNdFlqUm1NQzFoTXpjM1pUbGhOemxrWWpRaUxDSnpkV0lpT2lKemVYTjBaVzA2YzJWeWRtbGpaV0ZqWTI5MWJuUTZZV3gyWVhKdkxYUmxjM1E2WkdWbVlYVnNkQ0o5LnMyajh6X2JfT3NMOHY5UGlLR1NUQmFuZDE0MHExMHc3VTlMdU9JWmZlUG1SeF9OMHdKRkZPcVN0MGNjdmtVaUVGV0x5QWNSU2k2cUt3T1FSVzE2MVUzSU52UEY4Q0pDZ2d2R3JHUnMzeHp6N3hjSmgzTWRpcXhzWGViTmNmQmlmWWxXUTU2U1RTZDlUeUh1RkN6c1poNXBlSHVzS3hOa2hJRTNyWHp5ZHNoMkhCaTZMYTlYZ1l4R1VjM0x3NWh4RnB5bXFyajFJNzExbWZLcUV2bUN0a0J4blJtMlhIZmFKalNVRkswWWdoY0lMbkhuWGhMOEx2MUl0bnU4SzlvWFRfWVZIQWY1R3hlaERjZ3FBMmw1NUZyYkJMTGVfNi1DV2V2N2RQZU5PbFlaWE5xbEtkUG5KbW9BREdsOEktTlhKN2x5ZXl2a2hfZ3JkanhXdVVqQ3lQUQ==","email":"[email protected]"},"quay.io":{"auth":"c2VydmljZWFjY291bnQ6ZXlKaGJHY2lPaUpTVXpJMU5pSXNJbXRwWkNJNklrRndTekF0YjBaNGJXMUZURXRHTVMwMFVEa3djbEEwUTJWQlRUZERNMGRXUkZwdmJGOVllaTFEUW5NaWZRLmV5SnBjM01pT2lKcmRXSmxjbTVsZEdWekwzTmxjblpwWTJWaFkyTnZkVzUwSWl3aWEzVmlaWEp1WlhSbGN5NXBieTl6WlhKMmFXTmxZV05qYjNWdWRDOXVZVzFsYzNCaFkyVWlPaUpoYkhaaGNtOHRkR1Z6ZENJc0ltdDFZbVZ5Ym1WMFpYTXVhVzh2YzJWeWRtbGpaV0ZqWTI5MWJuUXZjMlZqY21WMExtNWhiV1VpT2lKa1pXWmhkV3gwTFhSdmEyVnVMV1EwT1d4aUlpd2lhM1ZpWlhKdVpYUmxjeTVwYnk5elpYSjJhV05sWVdOamIzVnVkQzl6WlhKMmFXTmxMV0ZqWTI5MWJuUXVibUZ0WlNJNkltUmxabUYxYkhRaUxDSnJkV0psY201bGRHVnpMbWx2TDNObGNuWnBZMlZoWTJOdmRXNTBMM05sY25acFkyVXRZV05qYjNWdWRDNTFhV1FpT2lJM05tVTRZMlpsTmkxbU1HWXhMVFF5WlRNdFlqUm1NQzFoTXpjM1pUbGhOemxrWWpRaUxDSnpkV0lpT2lKemVYTjBaVzA2YzJWeWRtbGpaV0ZqWTI5MWJuUTZZV3gyWVhKdkxYUmxjM1E2WkdWbVlYVnNkQ0o5LnMyajh6X2JfT3NMOHY5UGlLR1NUQmFuZDE0MHExMHc3VTlMdU9JWmZlUG1SeF9OMHdKRkZPcVN0MGNjdmtVaUVGV0x5QWNSU2k2cUt3T1FSVzE2MVUzSU52UEY4Q0pDZ2d2R3JHUnMzeHp6N3hjSmgzTWRpcXhzWGViTmNmQmlmWWxXUTU2U1RTZDlUeUh1RkN6c1poNXBlSHVzS3hOa2hJRTNyWHp5ZHNoMkhCaTZMYTlYZ1l4R1VjM0x3NWh4RnB5bXFyajFJNzExbWZLcUV2bUN0a0J4blJtMlhIZmFKalNVRkswWWdoY0lMbkhuWGhMOEx2MUl0bnU4SzlvWFRfWVZIQWY1R3hlaERjZ3FBMmw1NUZyYkJMTGVfNi1DV2V2N2RQZU5PbFlaWE5xbEtkUG5KbW9BREdsOEktTlhKN2x5ZXl2a2hfZ3JkanhXdVVqQ3lQUQ==","email":"[email protected]"}}}`),
},
{
id: "sad case, field is missing",
dockerConfigJSONData: []secretbootstrap.DockerConfigJSONData{
{
BWItem: "item-name-1",
RegistryURLBitwardenField: "registryURL",
AuthBitwardenAttachment: "auth",
EmailBitwardenField: "email",
},
},
bwClient: bitwarden.NewFakeClient(
[]bitwarden.Item{
{
ID: "1",
Name: "item-name-1",
Fields: []bitwarden.Field{
{
Name: "registryURL",
Value: "quay.io",
},
{
Name: "email",
Value: "[email protected]",
},
},
},
}, nil),
expectedError: "couldn't get attachment 'auth' from bw item item-name-1: failed to find attachment auth in item item-name-1",
},
}
for _, tc := range testCases {
t.Run(tc.id, func(t *testing.T) {
if len(tc.attachments) > 0 {
for _, attachment := range tc.attachments {
if err := tc.bwClient.SetAttachmentOnItem(attachment.bwItem, attachment.filename, attachment.contents); err != nil {
t.Fatalf("couldn't create attachments: %v", err)
}
}
}
actual, err := constructDockerConfigJSON(secrets.NewBitwardenClient(tc.bwClient), tc.dockerConfigJSONData)
if tc.expectedError != "" && err != nil {
if !reflect.DeepEqual(err.Error(), tc.expectedError) {
t.Fatal(cmp.Diff(err.Error(), tc.expectedError))
}
} else if tc.expectedError == "" && err != nil {
t.Fatalf("Error not expected: %v", err)
} else {
if !reflect.DeepEqual(actual, tc.expectedJSON) {
t.Fatal(cmp.Diff(actual, tc.expectedJSON))
}
}
})
}
}
func TestGetUnusedBWItems(t *testing.T) {
threshold := time.Now()
dayAfter := threshold.AddDate(0, 0, 1)
dayBefore := threshold.AddDate(0, 0, -1)
testCases := []struct {
id string
config secretbootstrap.Config
bwItems []bitwarden.Item
bwAllowItems sets.String
expectedBitwardenErr string
expectedVaultErr string
}{
{
id: "all used, no unused items expected",
bwAllowItems: sets.NewString(),
bwItems: []bitwarden.Item{
{
ID: "1",
Name: "item-name-1",
Fields: []bitwarden.Field{{Name: "field-name-1"}},
Attachments: []bitwarden.Attachment{{FileName: "attachment-name-1"}},
},
{
ID: "2",
Name: "item-name-2",
Fields: []bitwarden.Field{{Name: "field-name-2"}},
Attachments: []bitwarden.Attachment{{FileName: "attachment-name-2"}},
},
},
config: secretbootstrap.Config{
Secrets: []secretbootstrap.SecretConfig{
{
From: map[string]secretbootstrap.BitWardenContext{
"1": {BWItem: "item-name-1", Field: "field-name-1"},
"2": {BWItem: "item-name-2", Field: "field-name-2"},
"3": {BWItem: "item-name-1", Attachment: "attachment-name-1"},
"4": {BWItem: "item-name-2", Attachment: "attachment-name-2"},
},
},
},
},
},
{
id: "partly used, unused items expected",
bwAllowItems: sets.NewString(),
bwItems: []bitwarden.Item{
{
ID: "1",
Name: "item-name-1",
Fields: []bitwarden.Field{{Name: "field-name-1"}},
Attachments: []bitwarden.Attachment{{FileName: "attachment-name-1"}},
},
{
ID: "2",
Name: "item-name-2",
Fields: []bitwarden.Field{{Name: "field-name-2"}},
Attachments: []bitwarden.Attachment{{FileName: "attachment-name-2"}},
},
},
config: secretbootstrap.Config{
Secrets: []secretbootstrap.SecretConfig{
{
From: map[string]secretbootstrap.BitWardenContext{
"1": {BWItem: "item-name-1", Field: "field-name-1"},
"2": {BWItem: "item-name-2", Attachment: "attachment-name-2"},
},
},
},
},
expectedBitwardenErr: "[Unused bw item: 'item-name-1' with Attachments: 'attachment-name-1', Unused bw item: 'item-name-2' with Fields: 'field-name-2']",
expectedVaultErr: "[Unused bw item: 'item-name-1' with SuperfluousFields: [attachment-name-1], Unused bw item: 'item-name-2' with SuperfluousFields: [field-name-2]]",
},
{
id: "partly used with docker json config, unused items expected",
bwAllowItems: sets.NewString(),
bwItems: []bitwarden.Item{
{
ID: "1",
Name: "item-name-1",
Fields: []bitwarden.Field{{Name: "field-name-1"}},
Attachments: []bitwarden.Attachment{{FileName: "attachment-name-1"}},
},
{
ID: "2",
Name: "item-name-2",
Fields: []bitwarden.Field{{Name: "field-name-2"}},
Attachments: []bitwarden.Attachment{{FileName: "attachment-name-2"}},
},
{
ID: "3",
Name: "item-name-3",
Fields: []bitwarden.Field{{Name: "registry-url"}, {Name: "email"}},
Attachments: []bitwarden.Attachment{{FileName: "auth"}},
},
},
config: secretbootstrap.Config{
Secrets: []secretbootstrap.SecretConfig{
{
From: map[string]secretbootstrap.BitWardenContext{
"1": {BWItem: "item-name-1", Field: "field-name-1"},
"2": {BWItem: "item-name-2", Attachment: "attachment-name-2"},
"3": {DockerConfigJSONData: []secretbootstrap.DockerConfigJSONData{{BWItem: "item-name-3", RegistryURLBitwardenField: "registry-url"}}},
},
},
},
},
expectedBitwardenErr: "[Unused bw item: 'item-name-1' with Attachments: 'attachment-name-1', Unused bw item: 'item-name-2' with Fields: 'field-name-2', Unused bw item: 'item-name-3' with Fields: 'email' Attachments: 'auth']",
expectedVaultErr: "[Unused bw item: 'item-name-1' with SuperfluousFields: [attachment-name-1], Unused bw item: 'item-name-2' with SuperfluousFields: [field-name-2], Unused bw item: 'item-name-3' with SuperfluousFields: [auth email]]",
},
{
id: "partly used with an allow list, no unused items expected",
bwAllowItems: sets.NewString([]string{"item-name-2"}...),
bwItems: []bitwarden.Item{
{
ID: "1",
Name: "item-name-1",
Fields: []bitwarden.Field{{Name: "field-name-1"}},
Attachments: []bitwarden.Attachment{{FileName: "attachment-name-1"}},
},
{
ID: "2",
Name: "item-name-2",
Fields: []bitwarden.Field{{Name: "field-name-2"}},
Attachments: []bitwarden.Attachment{{FileName: "attachment-name-2"}},
},
{
ID: "3",
Name: "item-name-3",
Fields: []bitwarden.Field{{Name: "registry-url"}},
Attachments: []bitwarden.Attachment{{FileName: "auth"}},
},
},
config: secretbootstrap.Config{
Secrets: []secretbootstrap.SecretConfig{
{
From: map[string]secretbootstrap.BitWardenContext{
"1": {BWItem: "item-name-1", Field: "field-name-1"},
"2": {BWItem: "item-name-1", Attachment: "attachment-name-1"},
"3": {DockerConfigJSONData: []secretbootstrap.DockerConfigJSONData{{BWItem: "item-name-3", RegistryURLBitwardenField: "registry-url", AuthBitwardenAttachment: "auth"}}},
},
},
},
},
},
{
id: "unused item last modified after threshold is not reported",
bwItems: []bitwarden.Item{
{
ID: "1",
Name: "item-name-1",
Fields: []bitwarden.Field{{Name: "field-name-1", Value: "value-1"}},
RevisionTime: &dayAfter,
},
},
},
{
id: "unused item last modified before threshold is reported",
bwItems: []bitwarden.Item{
{
ID: "1",
Name: "item-name-1",
Fields: []bitwarden.Field{{Name: "field-name-1", Value: "value-1"}},
RevisionTime: &dayBefore,
},
},
expectedBitwardenErr: "Unused bw item: 'item-name-1'",
expectedVaultErr: "Unused bw item: 'item-name-1'",
},
}
for _, tc := range testCases {
t.Run(tc.id, func(t *testing.T) {
for _, clientTestCase := range []clientTestCase{{
name: "bitwarden",
client: secrets.NewBitwardenClient(bitwarden.NewFakeClient(tc.bwItems, nil)),
expectedError: tc.expectedBitwardenErr,
}, {
name: "vault",
client: vaultClientFromBitwardenItems(tc.bwItems, nil),
expectedError: tc.expectedVaultErr,
}} {
t.Run(clientTestCase.name, func(t *testing.T) {
var actualErrMsg string
actualErr := getUnusedBWItems(tc.config, clientTestCase.client, tc.bwAllowItems, threshold)
if actualErr != nil {
actualErrMsg = actualErr.Error()
}
if actualErrMsg != clientTestCase.expectedError {
t.Errorf("expected error: %s\ngot error: %s", clientTestCase.expectedError, actualErr)
}
})
}
})
}
}
type clientTestCase struct {
name string
client secrets.Client
expectedError string
}
func vaultClientFromBitwardenItems(items []bitwarden.Item, attachments map[string]string) secrets.Client {
const prefix = "prefix"
data := make(map[string]*vaultclient.KVData, len(items))
for _, item := range items {
kvItem := &vaultclient.KVData{Data: map[string]string{}}
for _, field := range item.Fields {
kvItem.Data[field.Name] = field.Value
}
for _, attachment := range item.Attachments {
attachmentContent := "some-data"
if val, ok := attachments[attachment.ID]; ok {
attachmentContent = val
}
kvItem.Data[attachment.FileName] = attachmentContent
}
if item.Login != nil && item.Login.Password != "" {
kvItem.Data["password"] = item.Login.Password
}
if item.RevisionTime != nil {
kvItem.Metadata.CreatedTime = *item.RevisionTime
}
data[prefix+"/"+item.Name] = kvItem
}
censor := secrets.NewDynamicCensor()
return secrets.NewVaultClient(&fakeVaultClient{items: data}, prefix, &censor)
}
type fakeVaultClient struct {
items map[string]*vaultclient.KVData
}
func (f *fakeVaultClient) GetKV(path string) (*vaultclient.KVData, error) {
if item, ok := f.items[path]; ok {
return item, nil
}
return nil, fmt.Errorf("no data at path %s", path)
}
func (f *fakeVaultClient) ListKVRecursively(prefix string) ([]string, error) {
var result []string
for key := range f.items {
if !strings.HasPrefix(key, prefix) {
continue
}
result = append(result, key)
}
return result, nil
}
func (f *fakeVaultClient) UpsertKV(_ string, _ map[string]string) error {
return nil
} | Value: "[email protected]",
}, |
index.ios.ts | import { TextFieldBase, secureProperty } from './text-field-common';
import { textProperty } from '../text-base';
import { hintProperty, placeholderColorProperty, _updateCharactersInRangeReplacementString } from '../editable-text-base';
import { Color } from '../../color';
import { colorProperty, Length, paddingTopProperty, paddingRightProperty, paddingBottomProperty, paddingLeftProperty } from '../styling/style-properties';
import { layout } from '../../utils';
import { profile } from '../../profiling';
export * from './text-field-common';
const zeroLength: Length = {
value: 0,
unit: 'px',
};
@NativeClass
class UITextFieldDelegateImpl extends NSObject implements UITextFieldDelegate {
public static ObjCProtocols = [UITextFieldDelegate];
private _owner: WeakRef<TextField>;
private firstEdit: boolean;
public static initWithOwner(owner: WeakRef<TextField>): UITextFieldDelegateImpl {
const delegate = <UITextFieldDelegateImpl>UITextFieldDelegateImpl.new();
delegate._owner = owner;
return delegate;
}
public textFieldShouldBeginEditing(textField: UITextField): boolean {
const owner = this._owner.get();
if (owner) {
return owner.textFieldShouldBeginEditing(textField);
}
return true;
}
public textFieldDidBeginEditing(textField: UITextField): void {
const owner = this._owner.get();
if (owner) {
owner.textFieldDidBeginEditing(textField);
}
}
public textFieldDidEndEditing(textField: UITextField) {
const owner = this._owner.get();
if (owner) {
owner.textFieldDidEndEditing(textField);
}
}
public textFieldShouldClear(textField: UITextField) {
const owner = this._owner.get();
if (owner) {
return owner.textFieldShouldClear(textField);
}
return true;
}
public textFieldShouldReturn(textField: UITextField): boolean {
// Called when the user presses the return button.
const owner = this._owner.get();
if (owner) {
return owner.textFieldShouldReturn(textField);
}
return true;
}
public textFieldShouldChangeCharactersInRangeReplacementString(textField: UITextField, range: NSRange, replacementString: string): boolean {
const owner = this._owner.get();
if (owner) {
return owner.textFieldShouldChangeCharactersInRangeReplacementString(textField, range, replacementString);
}
return true;
}
}
@NativeClass
class UITextFieldImpl extends UITextField {
private _owner: WeakRef<TextField>;
public static initWithOwner(owner: WeakRef<TextField>): UITextFieldImpl {
const handler = <UITextFieldImpl>UITextFieldImpl.new();
handler._owner = owner;
return handler;
}
private _getTextRectForBounds(bounds: CGRect): CGRect {
const owner = this._owner ? this._owner.get() : null;
if (!owner) {
return bounds;
}
const size = bounds.size;
const x = layout.toDeviceIndependentPixels(owner.effectiveBorderLeftWidth + owner.effectivePaddingLeft);
const y = layout.toDeviceIndependentPixels(owner.effectiveBorderTopWidth + owner.effectivePaddingTop);
const width = layout.toDeviceIndependentPixels(layout.toDevicePixels(size.width) - (owner.effectiveBorderLeftWidth + owner.effectivePaddingLeft + owner.effectivePaddingRight + owner.effectiveBorderRightWidth));
const height = layout.toDeviceIndependentPixels(layout.toDevicePixels(size.height) - (owner.effectiveBorderTopWidth + owner.effectivePaddingTop + owner.effectivePaddingBottom + owner.effectiveBorderBottomWidth));
return CGRectMake(x, y, width, height);
}
public textRectForBounds(bounds: CGRect): CGRect {
return this._getTextRectForBounds(bounds); | return this._getTextRectForBounds(bounds);
}
}
export class TextField extends TextFieldBase {
nativeViewProtected: UITextField;
private _delegate: UITextFieldDelegateImpl;
createNativeView() {
return UITextFieldImpl.initWithOwner(new WeakRef(this));
}
initNativeView() {
super.initNativeView();
this._delegate = UITextFieldDelegateImpl.initWithOwner(new WeakRef(this));
}
disposeNativeView() {
this._delegate = null;
super.disposeNativeView();
}
@profile
public onLoaded() {
super.onLoaded();
this.ios.delegate = this._delegate;
}
public onUnloaded() {
this.ios.delegate = null;
super.onUnloaded();
}
get ios(): UITextField {
return this.nativeViewProtected;
}
private firstEdit: boolean;
public textFieldShouldBeginEditing(textField: UITextField): boolean {
this.firstEdit = true;
return this.editable;
}
public textFieldDidBeginEditing(textField: UITextField): void {
this.notify({ eventName: TextField.focusEvent, object: this });
}
public textFieldDidEndEditing(textField: UITextField) {
if (this.updateTextTrigger === 'focusLost') {
textProperty.nativeValueChange(this, textField.text);
}
this.dismissSoftInput();
}
public textFieldShouldClear(textField: UITextField) {
this.firstEdit = false;
textProperty.nativeValueChange(this, '');
return true;
}
public textFieldShouldReturn(textField: UITextField): boolean {
// Called when the user presses the return button.
if (this.closeOnReturn) {
this.dismissSoftInput();
}
this.notify({ eventName: TextField.returnPressEvent, object: this });
return true;
}
public textFieldShouldChangeCharactersInRangeReplacementString(textField: UITextField, range: NSRange, replacementString: string): boolean {
if (this.secureWithoutAutofill && !textField.secureTextEntry) {
/**
* Helps avoid iOS 12+ autofill strong password suggestion prompt
* Discussed in several circles but for example:
* https://github.com/expo/expo/issues/2571#issuecomment-473347380
*/
textField.secureTextEntry = true;
}
const delta = replacementString.length - range.length;
if (delta > 0) {
if (textField.text.length + delta > this.maxLength) {
return false;
}
}
if (this.updateTextTrigger === 'textChanged') {
if (textField.secureTextEntry && this.firstEdit) {
textProperty.nativeValueChange(this, replacementString);
} else {
if (range.location <= textField.text.length) {
const newText = NSString.stringWithString(textField.text).stringByReplacingCharactersInRangeWithString(range, replacementString);
textProperty.nativeValueChange(this, newText);
}
}
}
if (this.formattedText) {
_updateCharactersInRangeReplacementString(this.formattedText, range.location, range.length, replacementString);
}
this.firstEdit = false;
return true;
}
[hintProperty.getDefault](): string {
return this.nativeTextViewProtected.placeholder;
}
[hintProperty.setNative](value: string) {
this._updateAttributedPlaceholder();
}
[secureProperty.getDefault](): boolean {
return this.nativeTextViewProtected.secureTextEntry;
}
[secureProperty.setNative](value: boolean) {
this.nativeTextViewProtected.secureTextEntry = value;
}
[colorProperty.getDefault](): { textColor: UIColor; tintColor: UIColor } {
return {
textColor: this.nativeTextViewProtected.textColor,
tintColor: this.nativeTextViewProtected.tintColor,
};
}
[colorProperty.setNative](value: Color | { textColor: UIColor; tintColor: UIColor }) {
if (value instanceof Color) {
let color = value instanceof Color ? value.ios : value;
this.nativeTextViewProtected.textColor = color;
this.nativeTextViewProtected.tintColor = color;
} else {
this.nativeTextViewProtected.textColor = value.textColor;
this.nativeTextViewProtected.tintColor = value.tintColor;
}
}
[placeholderColorProperty.getDefault](): UIColor {
return null;
}
[placeholderColorProperty.setNative](value: UIColor | Color) {
this._updateAttributedPlaceholder();
}
_updateAttributedPlaceholder(): void {
let stringValue = this.hint;
if (stringValue === null || stringValue === void 0) {
stringValue = '';
} else {
stringValue = stringValue + '';
}
if (stringValue === '') {
// we do not use empty string since initWithStringAttributes does not return proper value and
// nativeView.attributedPlaceholder will be null
stringValue = ' ';
}
const attributes: any = {};
if (this.style.placeholderColor) {
attributes[NSForegroundColorAttributeName] = this.style.placeholderColor.ios;
}
const attributedPlaceholder = NSAttributedString.alloc().initWithStringAttributes(stringValue, attributes);
this.nativeTextViewProtected.attributedPlaceholder = attributedPlaceholder;
}
[paddingTopProperty.getDefault](): Length {
return zeroLength;
}
[paddingTopProperty.setNative](value: Length) {
// Padding is realized via UITextFieldImpl.textRectForBounds method
}
[paddingRightProperty.getDefault](): Length {
return zeroLength;
}
[paddingRightProperty.setNative](value: Length) {
// Padding is realized via UITextFieldImpl.textRectForBounds method
}
[paddingBottomProperty.getDefault](): Length {
return zeroLength;
}
[paddingBottomProperty.setNative](value: Length) {
// Padding is realized via UITextFieldImpl.textRectForBounds method
}
[paddingLeftProperty.getDefault](): Length {
return zeroLength;
}
[paddingLeftProperty.setNative](value: Length) {
// Padding is realized via UITextFieldImpl.textRectForBounds method
}
} | }
public editingRectForBounds(bounds: CGRect): CGRect { |
initializers.py | # coding: utf-8
import re
import numpy as np
from scipy import stats
from abc import ABCMeta, abstractmethod
from .utils import mk_class_get
from .utils import handleKeyError
from .utils import handleRandomState
class KerasyAbstInitializer(metaclass=ABCMeta):
def __init__(self):
self.name = re.sub(r"([a-z])([A-Z])", r"\1_\2", self.__class__.__name__).lower()
@abstractmethod
def __call__(self, shape, dtype=None):
raise NotImplementedError
class Zeros(KerasyAbstInitializer):
def __call__(self, shape, dtype=None):
return np.zeros(shape=shape, dtype=dtype)
class Ones(KerasyAbstInitializer):
def __call__(self, shape, dtype=None):
return np.ones(shape=shape, dtype=dtype)
class Constant(KerasyAbstInitializer):
def __call__(self, shape, value=0, dtype=None):
|
class RandomNormal(KerasyAbstInitializer):
def __call__(self, shape, mean=0, stddev=0.05, dtype=None, seed=None):
rnd = handleRandomState(seed)
return rnd.normal(size=shape, loc=mean, scale=stddev).astype(dtype)
class RandomUniform(KerasyAbstInitializer):
def __call__(self, shape, minval=-0.05, maxval=0.05, dtype=None, seed=None):
rnd = handleRandomState(seed)
return rnd.uniform(size=shape, low=minval, high=maxval)
class TruncatedNormal(KerasyAbstInitializer):
def __call__(self, shape, mean=0.0, stddev=0.05, dtype=None, seed=None):
X = stats.truncnorm(
(-stddev - mean) / stddev,
(stddev - mean) / stddev,
loc=mean,
scale=stddev,
)
return X.rvs(size=shape,random_state=seed).astype(dtype)
class VarianceScaling(KerasyAbstInitializer):
def __call__(self, shape, scale=1.0, mode='fan_in', distribution='normal', dtype=None, seed=None):
fan_in, fan_out = _compute_fans(shape)
n = fan_in if mode=="fan_in" else fan_out if mode=="fan_out" else (fan_in+fan_out)/2
scale /= max(1., n)
if distribution=='normal':
# 0.879... = scipy.stats.truncnorm.std(a=-2, b=2, loc=0., scale=1.)
stddev = np.sqrt(scale) / .87962566103423978
return TruncatedNormal()(shape=shape, mean=0.0, stddev=stddev, dtype=dtype, seed=seed)
else:
limit = np.sqrt(3 * scale)
return RandomUniform()(shape=shape, minval=-limit, maxval=limit, dtype=dtype, seed=seed)
class Orthogonal(KerasyAbstInitializer):
def __call__(self, shape, gain=1.0, dtype=None, seed=None):
rnd = handleRandomState(seed)
num_rows = 1
for dim in shape[:-1]:
num_rows *= dim
num_cols = shape[-1]
flat_shape = (num_rows, num_cols)
a = rnd.normal(loc=0.0, scale=1.0, size=flat_shape).astype(dtype)
u, _, v = np.linalg.svd(a, full_matrices=False)
# Pick the one with the correct shape.
q = u if u.shape == flat_shape else v
q = q.reshape(shape)
return gain * q[:shape[0], :shape[1]]
class Identity(KerasyAbstInitializer):
def __call__(self, shape, dtype=None, gain=1.0):
if len(shape) != 2 or shape[0]!=shape[1]:
raise ValueError('Identity matrix initializer can only be used for 2D Square matrices.')
return gain * np.eye(N=shape[0], dtype=dtype)
class GlorotNormal(KerasyAbstInitializer):
def __call__(self, shape, dtype=None, seed=None):
return VarianceScaling()(
shape=shape,
scale=1.,
mode='fan_avg',
distribution='normal',
dtype=dtype,
seed=seed
)
class GlorotUniform(KerasyAbstInitializer):
def __call__(self, shape, dtype=None, seed=None):
return VarianceScaling()(
shape=shape,
scale=1.,
mode='fan_avg',
distribution='uniform',
dtype=dtype,
seed=seed
)
class HeNormal(KerasyAbstInitializer):
def __call__(self, shape, dtype=None, seed=None):
return VarianceScaling()(
shape=shape,
scale=2.,
mode='fan_in',
distribution='normal',
dtype=dtype,
seed=seed
)
class LeCunNormal(KerasyAbstInitializer):
def __call__(self, shape, dtype=None, seed=None):
return VarianceScaling()(
shape=shape,
scale=1.,
mode='fan_in',
distribution='normal',
dtype=dtype,
seed=seed
)
class HeUniform(KerasyAbstInitializer):
def __call__(self, shape, dtype=None, seed=None):
return VarianceScaling()(
shape=shape,
scale=2.,
mode='fan_in',
distribution='uniform',
dtype=dtype,
seed=seed
)
class LeCunUniform(KerasyAbstInitializer):
def __call__(self, shape, dtype=None, seed=None):
return VarianceScaling()(
shape=shape,
scale=1.,
mode='fan_in',
distribution='uniform',
dtype=dtype,
seed=seed
)
def _compute_fans(shape, data_format='channels_last'):
"""Computes the number of input and output units for a weight shape.
@param shape : Integer shape tuple.
@param data_format: Image data format to use for convolution kernels.
@return fan_in : size of the input shape.
@return fan_out : size of the output shape.
"""
if len(shape) == 2:
fan_in,fan_out = shape
elif len(shape) in {3, 4, 5}:
# Assuming convolution kernels (1D, 2D or 3D).
# TH kernel shape: (depth, input_depth, ...)
# TF kernel shape: (..., input_depth, depth)
if data_format == 'channels_first':
receptive_field_size = np.prod(shape[2:])
fan_in = shape[1] * receptive_field_size
fan_out = shape[0] * receptive_field_size
elif data_format == 'channels_last':
receptive_field_size = np.prod(shape[:-2])
fan_in = shape[-2] * receptive_field_size
fan_out = shape[-1] * receptive_field_size
else:
raise ValueError('Invalid data_format: ' + data_format)
else:
# No specific assumptions.
fan_in = np.sqrt(np.prod(shape))
fan_out = np.sqrt(np.prod(shape))
return fan_in, fan_out
all = KerasyInitializerFunctions = {
'zeros' : Zeros,
'ones' : Ones,
'constant' : Constant,
'random_normal' : RandomNormal,
'random_uniform' : RandomUniform,
'truncated_normal' : TruncatedNormal,
'variance_scaling' : VarianceScaling,
'orthogonal' : Orthogonal,
'identity' : Identity,
'glorot_normal' : GlorotNormal,
'glorot_uniform' : GlorotUniform,
'he_normal' : HeNormal,
'lecun_normal' : LeCunNormal,
'he_uniform' : HeUniform,
'lecun_uniform' : LeCunUniform,
}
get = mk_class_get(
all_classes=KerasyInitializerFunctions,
kerasy_abst_class=[KerasyAbstInitializer],
genre="initializer"
)
| return np.full(shape=shape, fill_value=value, dtype=dtype) |
ListDeliverabilityTestReportsCommand.ts | import { PinpointEmailClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../PinpointEmailClient";
import { ListDeliverabilityTestReportsRequest, ListDeliverabilityTestReportsResponse } from "../models/index";
import {
deserializeAws_restJson1ListDeliverabilityTestReportsCommand,
serializeAws_restJson1ListDeliverabilityTestReportsCommand,
} from "../protocols/Aws_restJson1";
import { getSerdePlugin } from "@aws-sdk/middleware-serde";
import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http";
import { Command as $Command } from "@aws-sdk/smithy-client";
import {
FinalizeHandlerArguments,
Handler,
HandlerExecutionContext,
MiddlewareStack,
HttpHandlerOptions as __HttpHandlerOptions,
MetadataBearer as __MetadataBearer,
SerdeContext as __SerdeContext,
} from "@aws-sdk/types";
export type ListDeliverabilityTestReportsCommandInput = ListDeliverabilityTestReportsRequest;
export type ListDeliverabilityTestReportsCommandOutput = ListDeliverabilityTestReportsResponse & __MetadataBearer;
export class | extends $Command<
ListDeliverabilityTestReportsCommandInput,
ListDeliverabilityTestReportsCommandOutput,
PinpointEmailClientResolvedConfig
> {
// Start section: command_properties
// End section: command_properties
constructor(readonly input: ListDeliverabilityTestReportsCommandInput) {
// Start section: command_constructor
super();
// End section: command_constructor
}
resolveMiddleware(
clientStack: MiddlewareStack<ServiceInputTypes, ServiceOutputTypes>,
configuration: PinpointEmailClientResolvedConfig,
options?: __HttpHandlerOptions
): Handler<ListDeliverabilityTestReportsCommandInput, ListDeliverabilityTestReportsCommandOutput> {
this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize));
const stack = clientStack.concat(this.middlewareStack);
const { logger } = configuration;
const handlerExecutionContext: HandlerExecutionContext = {
logger,
inputFilterSensitiveLog: ListDeliverabilityTestReportsRequest.filterSensitiveLog,
outputFilterSensitiveLog: ListDeliverabilityTestReportsResponse.filterSensitiveLog,
};
const { requestHandler } = configuration;
return stack.resolve(
(request: FinalizeHandlerArguments<any>) =>
requestHandler.handle(request.request as __HttpRequest, options || {}),
handlerExecutionContext
);
}
private serialize(input: ListDeliverabilityTestReportsCommandInput, context: __SerdeContext): Promise<__HttpRequest> {
return serializeAws_restJson1ListDeliverabilityTestReportsCommand(input, context);
}
private deserialize(
output: __HttpResponse,
context: __SerdeContext
): Promise<ListDeliverabilityTestReportsCommandOutput> {
return deserializeAws_restJson1ListDeliverabilityTestReportsCommand(output, context);
}
// Start section: command_body_extra
// End section: command_body_extra
}
| ListDeliverabilityTestReportsCommand |
options.rs | // Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
//! User facing options for the file formats readers
use std::sync::Arc;
use arrow::datatypes::{Schema, SchemaRef};
use crate::datasource::file_format::json::DEFAULT_JSON_EXTENSION;
use crate::datasource::{
file_format::{avro::AvroFormat, csv::CsvFormat},
listing::ListingOptions,
};
/// CSV file read option
#[derive(Copy, Clone)]
pub struct CsvReadOptions<'a> {
/// Does the CSV file have a header?
///
/// If schema inference is run on a file with no headers, default column names
/// are created.
pub has_header: bool,
/// An optional column delimiter. Defaults to `b','`.
pub delimiter: u8,
/// An optional schema representing the CSV files. If None, CSV reader will try to infer it
/// based on data in file.
pub schema: Option<&'a Schema>,
/// Max number of rows to read from CSV files for schema inference if needed. Defaults to 1000.
pub schema_infer_max_records: usize,
/// File extension; only files with this extension are selected for data input.
/// Defaults to ".csv".
pub file_extension: &'a str,
}
impl<'a> Default for CsvReadOptions<'a> {
fn default() -> Self {
Self::new()
}
}
impl<'a> CsvReadOptions<'a> {
/// Create a CSV read option with default presets
pub fn new() -> Self {
Self {
has_header: true,
schema: None,
schema_infer_max_records: 1000,
delimiter: b',',
file_extension: ".csv",
}
}
/// Configure has_header setting
pub fn has_header(mut self, has_header: bool) -> Self {
self.has_header = has_header;
self
}
/// Specify delimiter to use for CSV read
pub fn delimiter(mut self, delimiter: u8) -> Self {
self.delimiter = delimiter;
self
}
/// Specify the file extension for CSV file selection
pub fn file_extension(mut self, file_extension: &'a str) -> Self {
self.file_extension = file_extension;
self
}
/// Configure delimiter setting with Option, None value will be ignored
pub fn delimiter_option(mut self, delimiter: Option<u8>) -> Self {
if let Some(d) = delimiter {
self.delimiter = d;
}
self
}
/// Specify schema to use for CSV read
pub fn schema(mut self, schema: &'a Schema) -> Self {
self.schema = Some(schema);
self
}
/// Configure number of max records to read for schema inference
pub fn schema_infer_max_records(mut self, max_records: usize) -> Self {
self.schema_infer_max_records = max_records;
self
}
/// Helper to convert these user facing options to `ListingTable` options
pub fn to_listing_options(&self, target_partitions: usize) -> ListingOptions {
let file_format = CsvFormat::default()
.with_has_header(self.has_header)
.with_delimiter(self.delimiter)
.with_schema_infer_max_rec(Some(self.schema_infer_max_records));
ListingOptions {
format: Arc::new(file_format),
collect_stat: false,
file_extension: self.file_extension.to_owned(),
target_partitions,
table_partition_cols: vec![],
}
}
}
/// Avro read options
#[derive(Clone)]
pub struct AvroReadOptions<'a> {
/// The data source schema.
pub schema: Option<SchemaRef>,
/// File extension; only files with this extension are selected for data input.
/// Defaults to ".avro".
pub file_extension: &'a str,
}
impl<'a> Default for AvroReadOptions<'a> {
fn default() -> Self {
Self {
schema: None,
file_extension: ".avro",
}
}
}
impl<'a> AvroReadOptions<'a> {
/// Helper to convert these user facing options to `ListingTable` options
pub fn | (&self, target_partitions: usize) -> ListingOptions {
let file_format = AvroFormat::default();
ListingOptions {
format: Arc::new(file_format),
collect_stat: false,
file_extension: self.file_extension.to_owned(),
target_partitions,
table_partition_cols: vec![],
}
}
}
/// Line-delimited JSON read options
#[derive(Clone)]
pub struct NdJsonReadOptions<'a> {
/// The data source schema.
pub schema: Option<SchemaRef>,
/// Max number of rows to read from CSV files for schema inference if needed. Defaults to 1000.
pub schema_infer_max_records: usize,
/// File extension; only files with this extension are selected for data input.
/// Defaults to ".json".
pub file_extension: &'a str,
}
impl<'a> Default for NdJsonReadOptions<'a> {
fn default() -> Self {
Self {
schema: None,
schema_infer_max_records: 1000,
file_extension: DEFAULT_JSON_EXTENSION,
}
}
}
| to_listing_options |
main.py | import sys
import os
import ode
import logging
import threading
from time import sleep, time
from genie_python.genie_startup import *
import pv_server
import render
from configurations import config_zoom as config
from collide import collide, CollisionDetector
from geometry import GeometryBox
from move import move_all
sys.path.insert(0, os.path.abspath(os.environ["MYDIRCD"]))
from monitor import Monitor
from server_common.loggers.isis_logger import IsisLogger
logging.basicConfig(level=logging.INFO,
format='%(asctime)s (%(threadName)-2s) %(message)s',
)
def auto_seek(start_step_size, start_values, end_value, geometries, moves, axis_index, ignore, fine_step=None):
limit = end_value
current_value = start_values[axis_index]
if current_value == end_value:
return end_value
values = start_values[:]
last_value = None
old_points = None
step_checked = False
if current_value < end_value:
# Going up
def comp(a, b):
return a < b
step_size = abs(start_step_size)
else:
# Going down
def comp(a, b):
return a > b
step_size = -abs(start_step_size)
while last_value is None or comp(last_value, end_value):
# Move if we need to
if last_value is not None:
current_value += step_size
# print "Using step size of %f" % step_size
else:
current_value = start_values[axis_index]
if not comp(current_value, end_value):
current_value = end_value
values[axis_index] = current_value
move_all(geometries, moves, values=values[:])
# Check nothing moved too far
if step_checked is False:
new_points = [g.get_vertices() for g in geometries]
if old_points is not None:
delta = max_delta(geometries, new_points, old_points)
if delta > start_step_size:
# Work out a new step size
step_size *= start_step_size/delta
last_value = None
continue
step_checked = True
# Check for collisions
collisions = collide(geometries, ignore)
if any(collisions):
if current_value == start_values[axis_index]:
# There was already a collision
limit = current_value
break
elif fine_step and fine_step < step_size:
start_values[axis_index] = last_value
limit = auto_seek(fine_step, start_values, current_value, geometries, moves, axis_index, ignore)
else:
limit = last_value
break
old_points = new_points[:]
last_value = current_value
# print "Found limits for axis %d using step size of %f" % (axis_index, step_size)
if limit is None:
raise ValueError("Null limit")
return limit
def max_delta(geometries, new_points, old_points):
# Calculate the greatest position deltas
delta = 0
for j in range(len(geometries)):
old = old_points[j]
new = new_points[j]
deltas = [map(float, n - o) for n, o in zip(new, old)]
for i, (x, y, z) in enumerate(deltas):
mag = float(x) ** 2 + float(y) ** 2 + float(z) ** 2
if mag > delta:
delta = mag
# print "New max delta of %f (%f, %f, %f) for body %d at %s from %s" % \
# (mag ** 0.5, x, y, z, j, new[i], old[i])
delta = float(delta) ** 0.5
return delta
def compare(sign):
if sign > 0:
return lambda a, b: a > b
else:
return lambda a, b: a < b
def | (geometries, ignore, moves, values, limits, coarse=1.0, fine=0.1):
dynamic_limits = []
for i in range(len(values)):
logging.debug("Seeking for axis %d" % i)
lower_limit = auto_seek(coarse, values[:], min(limits[i]), geometries, moves, i, ignore, fine)
upper_limit = auto_seek(coarse, values[:], max(limits[i]), geometries, moves, i, ignore, fine)
dynamic_limits.append([lower_limit, upper_limit])
logging.debug("Found limits for axis %d at %s, %s" % (i, upper_limit, lower_limit))
return dynamic_limits
def look_ahead(start_values, pvs, is_moving, geometries, moves, ignore, max_movement=1.0, max_time=10., time_step=0.1):
# Get the indices of the axes currently moving
moving = [i for i, m in enumerate(is_moving) if m == 0] # DMOV = 0 when motors not moving
msg = "No collisions predicted in the next %fs" % max_time
safe_time = max_time
safe = True
# Only worth calculating if more than one axis is moving
if len(moving) > 1:
set_points = [None] * len(pvs)
speeds = [None] * len(pvs)
directions = [None] * len(pvs)
# Assume everything has finished moving
move_complete = [True] * len(pvs)
# Get some settings:
for i in moving:
pv = pvs[i]
set_point = get_pv(pv + '.DVAL')
speed = get_pv(pv + '.VELO')
direction = 0.
move = set_point - start_values[i]
if move > 0:
direction = 1.
if move < 0:
direction = -1.
set_points[i] = set_point
speeds[i] = speed
directions[i] = direction
# This axis has not finished moving!
move_complete[i] = False
current_time = 0.
values = start_values[:]
old_points = None
step_checked = False
last_time = None
while current_time < max_time:
if last_time is None:
values = start_values[:]
current_time = 0.
old_points = None
else:
current_time += time_step
for i in moving:
if move_complete[i] is False:
values[i] = start_values[i] + (directions[i] * speeds[i] * current_time)
comp = compare(directions[i])(values[i], set_points[i])
if comp:
values[i] = set_points[i]
# Move the bodies
move_all(geometries, moves, values=values)
if step_checked is False:
new_points = [g.get_vertices() for g in geometries]
if old_points is not None:
delta = max_delta(geometries, new_points, old_points)
if delta > max_movement:
# Reduce the size of the time step
time_step *= max_movement/delta
# Reset to starting point
last_time = None
old_points = None
continue
step_checked = True
# Check for collisions
collisions = collide(geometries, ignore)
if any(collisions):
if last_time is None:
msg = "There is already a collision"
safe_time = 0.
else:
msg = "Collision expected in %.1fs - %.1fs" % (last_time, current_time)
safe_time = last_time
safe = False
break
old_points = new_points[:]
last_time = current_time
return msg, safe_time, safe
# Set the high and low dial limits for each motor
def set_limits(limits, pvs):
for limit, pv in zip(limits, pvs):
set_pv(pv + '.DLLM', limit[0])
set_pv(pv + '.DHLM', limit[1])
# Contains operating mode events
class OperatingMode(object):
def __init__(self):
# Close event to be triggered by the render thread
self.close = threading.Event()
# Set dynamic limits automatically
self.set_limits = threading.Event()
# Stop the motors on a collision
self.auto_stop = threading.Event()
# Re-calculate limits on demand
self.calc_limits = threading.Event()
def get_operation_mode(self):
return self.auto_stop.is_set(), self.set_limits.is_set(), self.close.is_set()
def set_operation_mode(self, auto_stop, set_limits, close):
if auto_stop:
self.auto_stop.set()
else:
self.auto_stop.clear()
if set_limits:
self.set_limits.set()
else:
self.set_limits.clear()
if close:
self.close.set()
else:
self.close.clear()
# The main routine to execute
def main():
# Load config:
colors = config.colors
moves = config.moves
ignore = config.ignore
pvs = config.pvs
config_limits = config.hardlimits
old_limits = config_limits[:]
# Create space objects for the live and rendered world
space = ode.Space()
render_space = ode.Space()
collision_space = ode.Space()
# Create and populate lists of geometries
geometries = []
render_geometries = []
collision_geometries = []
for i, geometry in enumerate(config.geometries):
geometries.append(GeometryBox(space, oversize=config.oversize, **geometry))
render_geometries.append(GeometryBox(render_space, **geometry))
collision_geometries.append(GeometryBox(collision_space, oversize=config.oversize, **geometry))
# Create and populate two lists of monitors
monitors = []
is_moving = []
for pv in pvs:
m = Monitor(pv + ".DRBV")
m.start()
monitors.append(m)
any_moving = Monitor(pv + ".DMOV")
any_moving.start()
is_moving.append(any_moving)
# Create a shared operating mode object to control the main thread
op_mode = OperatingMode()
# Set the default behaviour to set_limits as calculated, and auto_stop on collision
op_mode.set_limits.set()
op_mode.auto_stop.set()
# Start a logger
logger = IsisLogger()
# Create a shared render parameter object to update the render thread
parameters = render.RenderParams()
if 'blind' not in sys.argv:
# Initialise the render thread, and set it to daemon - won't prevent the main thread from exiting
renderer = render.Renderer(parameters, render_geometries, colors, monitors, pvs, moves, op_mode)
renderer.daemon = True
# Need to know if this is the first execution of the main loop
op_mode.calc_limits.set()
# Initialise the pv server
# Loop over the pvdb and update the counts based on the number of aves/bodies
for pv in pv_server.pvdb:
for key, val in pv_server.pvdb[pv].items():
if key == 'count':
if val is pv_server.axis_count:
pv_server.pvdb[pv]['count'] = len(config.pvs)
if val is pv_server.body_count:
pv_server.pvdb[pv]['count'] = len(config.geometries)
driver = pv_server.start_thread(config.control_pv, op_mode)
driver.setParam('OVERSIZE', config.oversize)
driver.setParam('COARSE', config.coarse)
driver.setParam('FINE', config.fine)
driver.setParam('NAMES', [g['name'] for g in config.geometries])
# Only report for new collisions
collision_detector = CollisionDetector(driver, collision_geometries, config.moves, monitors, config.ignore,
is_moving, logger, op_mode, config.pvs)
collision_detector.start()
# Main loop
while True:
# Freeze the positions of our current monitors by creating some dummies
# This stops the threads from trying to reading each monitor sequentially, and holding each other up
frozen = [m.value() for m in monitors]
# Execute the move
move_all(geometries, moves, values=frozen)
# Check if the oversize has been changed, ahead of any collision calcs
if driver.new_data.isSet():
for geometry, collision_geometry in zip(geometries, collision_geometries):
geometry.set_size(oversize=driver.getParam('OVERSIZE'))
collision_geometry.set_size(oversize=driver.getParam('OVERSIZE'))
driver.new_data.clear()
op_mode.calc_limits.set()
if driver.getParam("CALC") != 0:
op_mode.calc_limits.set()
collisions = collision_detector.collisions[:]
collision_message = collision_detector.message[:]
# Check if there have been any changes to the .MOVN monitors
fresh = any([m.fresh() for m in is_moving])
# Check if any of the motors monitors are moving
moving = [not m.value() for m in is_moving] # Invert because DMOV is inverted from MOVN
any_moving = any(moving)
new_limits = []
if fresh or any_moving or op_mode.calc_limits.isSet():
# Look ahead some time to see if any collisions are going to happen in the future
msg, safe_time, safe = look_ahead(frozen, config.pvs, moving, geometries, moves, ignore,
max_movement=driver.getParam('COARSE'))
if not safe and not any(collisions):
logger.write_to_log(msg, "MAJOR", "COLLIDE")
driver.setParam('MSG', msg)
else:
driver.setParam('MSG', collision_message)
logging.info(msg)
# Start timing for diagnostics
time_passed = time()
# Seek the correct limit values
dynamic_limits = auto_seek_limits(geometries, ignore, moves, frozen, config_limits,
coarse=driver.getParam('COARSE'), fine=driver.getParam('FINE'))
# Calculate and log the time taken to calculate
time_passed = (time() - time_passed) * 1000
# Log the new limits
logging.info("New limits calculated in %dms, are %s" % (time_passed, dynamic_limits))
# Set the limits according to the set_limits operating mode
if op_mode.set_limits.is_set():
# Apply the calculated limits
new_limits = dynamic_limits[:]
else:
# Restore the configuration limits
new_limits = config_limits[:]
# Update the render thread parameters
parameters.update_params(dynamic_limits, collisions, time_passed)
# # Update the PVs
driver.setParam('TIME', time_passed)
driver.setParam('HI_LIM', [l[1] for l in dynamic_limits])
driver.setParam('LO_LIM', [l[0] for l in dynamic_limits])
driver.setParam('TRAVEL', [min([l[0] - m, l[1] - m], key=abs)
for l, m in zip(dynamic_limits, frozen)])
driver.setParam('TRAV_F', [l[1] - m for l, m in zip(dynamic_limits, frozen)])
driver.setParam('TRAV_R', [l[0] - m for l, m in zip(dynamic_limits, frozen)])
driver.updatePVs()
if 'blind' not in sys.argv:
# On the first run, start the renderer
if renderer.is_alive() is False:
renderer.start()
op_mode.calc_limits.clear()
driver.setParam("CALC", False)
else:
# Restore the configuration limits
if op_mode.set_limits.is_set() is False:
new_limits = config_limits[:]
# Stop us overloading the limits
if not new_limits == old_limits:
threading.Thread(target=set_limits, args=(new_limits, pvs)).start()
old_limits = new_limits[:]
# Exit the program
if op_mode.close.is_set():
# Restore the configuration limits
set_limits(config_limits, pvs)
return
# Give the CPU a break
sleep(0.01)
if 'return' in sys.argv:
return
# Execute main
main()
| auto_seek_limits |
generated.rs | // =================================================================
//
// * WARNING *
//
// This file is generated!
//
// Changes made to this file will be overwritten. If changes are
// required to the generated code, the service_crategen project
// must be updated to generate the changes.
//
// =================================================================
use std::error::Error;
use std::fmt;
#[allow(warnings)]
use futures::future;
use futures::Future;
use rusoto_core::credential::ProvideAwsCredentials;
use rusoto_core::region;
use rusoto_core::request::{BufferedHttpResponse, DispatchSignedRequest};
use rusoto_core::{Client, RusotoError, RusotoFuture};
use rusoto_core::proto;
use rusoto_core::signature::SignedRequest;
use serde_json;
/// <p>The <code>ActivatedRule</code> object in an <a>UpdateWebACL</a> request specifies a <code>Rule</code> that you want to insert or delete, the priority of the <code>Rule</code> in the <code>WebACL</code>, and the action that you want AWS WAF to take when a web request matches the <code>Rule</code> (<code>ALLOW</code>, <code>BLOCK</code>, or <code>COUNT</code>).</p> <p>To specify whether to insert or delete a <code>Rule</code>, use the <code>Action</code> parameter in the <a>WebACLUpdate</a> data type.</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct ActivatedRule {
/// <p>Specifies the action that CloudFront or AWS WAF takes when a web request matches the conditions in the <code>Rule</code>. Valid values for <code>Action</code> include the following:</p> <ul> <li> <p> <code>ALLOW</code>: CloudFront responds with the requested object.</p> </li> <li> <p> <code>BLOCK</code>: CloudFront responds with an HTTP 403 (Forbidden) status code.</p> </li> <li> <p> <code>COUNT</code>: AWS WAF increments a counter of requests that match the conditions in the rule and then continues to inspect the web request based on the remaining rules in the web ACL. </p> </li> </ul> <p> <code>ActivatedRule|OverrideAction</code> applies only when updating or adding a <code>RuleGroup</code> to a <code>WebACL</code>. In this case, you do not use <code>ActivatedRule|Action</code>. For all other update requests, <code>ActivatedRule|Action</code> is used instead of <code>ActivatedRule|OverrideAction</code>.</p>
#[serde(rename = "Action")]
#[serde(skip_serializing_if = "Option::is_none")]
pub action: Option<WafAction>,
/// <p><p>An array of rules to exclude from a rule group. This is applicable only when the <code>ActivatedRule</code> refers to a <code>RuleGroup</code>.</p> <p>Sometimes it is necessary to troubleshoot rule groups that are blocking traffic unexpectedly (false positives). One troubleshooting technique is to identify the specific rule within the rule group that is blocking the legitimate traffic and then disable (exclude) that particular rule. You can exclude rules from both your own rule groups and AWS Marketplace rule groups that have been associated with a web ACL.</p> <p>Specifying <code>ExcludedRules</code> does not remove those rules from the rule group. Rather, it changes the action for the rules to <code>COUNT</code>. Therefore, requests that match an <code>ExcludedRule</code> are counted but not blocked. The <code>RuleGroup</code> owner will receive COUNT metrics for each <code>ExcludedRule</code>.</p> <p>If you want to exclude rules from a rule group that is already associated with a web ACL, perform the following steps:</p> <ol> <li> <p>Use the AWS WAF logs to identify the IDs of the rules that you want to exclude. For more information about the logs, see <a href="http://docs.aws.amazon.com/waf/latest/developerguide/logging.html">Logging Web ACL Traffic Information</a>.</p> </li> <li> <p>Submit an <a>UpdateWebACL</a> request that has two actions:</p> <ul> <li> <p>The first action deletes the existing rule group from the web ACL. That is, in the <a>UpdateWebACL</a> request, the first <code>Updates:Action</code> should be <code>DELETE</code> and <code>Updates:ActivatedRule:RuleId</code> should be the rule group that contains the rules that you want to exclude.</p> </li> <li> <p>The second action inserts the same rule group back in, but specifying the rules to exclude. That is, the second <code>Updates:Action</code> should be <code>INSERT</code>, <code>Updates:ActivatedRule:RuleId</code> should be the rule group that you just removed, and <code>ExcludedRules</code> should contain the rules that you want to exclude.</p> </li> </ul> </li> </ol></p>
#[serde(rename = "ExcludedRules")]
#[serde(skip_serializing_if = "Option::is_none")]
pub excluded_rules: Option<Vec<ExcludedRule>>,
/// <p>Use the <code>OverrideAction</code> to test your <code>RuleGroup</code>.</p> <p>Any rule in a <code>RuleGroup</code> can potentially block a request. If you set the <code>OverrideAction</code> to <code>None</code>, the <code>RuleGroup</code> will block a request if any individual rule in the <code>RuleGroup</code> matches the request and is configured to block that request. However if you first want to test the <code>RuleGroup</code>, set the <code>OverrideAction</code> to <code>Count</code>. The <code>RuleGroup</code> will then override any block action specified by individual rules contained within the group. Instead of blocking matching requests, those requests will be counted. You can view a record of counted requests using <a>GetSampledRequests</a>. </p> <p> <code>ActivatedRule|OverrideAction</code> applies only when updating or adding a <code>RuleGroup</code> to a <code>WebACL</code>. In this case you do not use <code>ActivatedRule|Action</code>. For all other update requests, <code>ActivatedRule|Action</code> is used instead of <code>ActivatedRule|OverrideAction</code>.</p>
#[serde(rename = "OverrideAction")]
#[serde(skip_serializing_if = "Option::is_none")]
pub override_action: Option<WafOverrideAction>,
/// <p>Specifies the order in which the <code>Rules</code> in a <code>WebACL</code> are evaluated. Rules with a lower value for <code>Priority</code> are evaluated before <code>Rules</code> with a higher value. The value must be a unique integer. If you add multiple <code>Rules</code> to a <code>WebACL</code>, the values don't need to be consecutive.</p>
#[serde(rename = "Priority")]
pub priority: i64,
/// <p>The <code>RuleId</code> for a <code>Rule</code>. You use <code>RuleId</code> to get more information about a <code>Rule</code> (see <a>GetRule</a>), update a <code>Rule</code> (see <a>UpdateRule</a>), insert a <code>Rule</code> into a <code>WebACL</code> or delete a one from a <code>WebACL</code> (see <a>UpdateWebACL</a>), or delete a <code>Rule</code> from AWS WAF (see <a>DeleteRule</a>).</p> <p> <code>RuleId</code> is returned by <a>CreateRule</a> and by <a>ListRules</a>.</p>
#[serde(rename = "RuleId")]
pub rule_id: String,
/// <p>The rule type, either <code>REGULAR</code>, as defined by <a>Rule</a>, <code>RATE_BASED</code>, as defined by <a>RateBasedRule</a>, or <code>GROUP</code>, as defined by <a>RuleGroup</a>. The default is REGULAR. Although this field is optional, be aware that if you try to add a RATE_BASED rule to a web ACL without setting the type, the <a>UpdateWebACL</a> request will fail because the request tries to add a REGULAR rule with the specified ID, which does not exist. </p>
#[serde(rename = "Type")]
#[serde(skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
}
/// <p>In a <a>GetByteMatchSet</a> request, <code>ByteMatchSet</code> is a complex type that contains the <code>ByteMatchSetId</code> and <code>Name</code> of a <code>ByteMatchSet</code>, and the values that you specified when you updated the <code>ByteMatchSet</code>. </p> <p>A complex type that contains <code>ByteMatchTuple</code> objects, which specify the parts of web requests that you want AWS WAF to inspect and the values that you want AWS WAF to search for. If a <code>ByteMatchSet</code> contains more than one <code>ByteMatchTuple</code> object, a request needs to match the settings in only one <code>ByteMatchTuple</code> to be considered a match.</p>
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct ByteMatchSet {
/// <p>The <code>ByteMatchSetId</code> for a <code>ByteMatchSet</code>. You use <code>ByteMatchSetId</code> to get information about a <code>ByteMatchSet</code> (see <a>GetByteMatchSet</a>), update a <code>ByteMatchSet</code> (see <a>UpdateByteMatchSet</a>), insert a <code>ByteMatchSet</code> into a <code>Rule</code> or delete one from a <code>Rule</code> (see <a>UpdateRule</a>), and delete a <code>ByteMatchSet</code> from AWS WAF (see <a>DeleteByteMatchSet</a>).</p> <p> <code>ByteMatchSetId</code> is returned by <a>CreateByteMatchSet</a> and by <a>ListByteMatchSets</a>.</p>
#[serde(rename = "ByteMatchSetId")]
pub byte_match_set_id: String,
/// <p>Specifies the bytes (typically a string that corresponds with ASCII characters) that you want AWS WAF to search for in web requests, the location in requests that you want AWS WAF to search, and other settings.</p>
#[serde(rename = "ByteMatchTuples")]
pub byte_match_tuples: Vec<ByteMatchTuple>,
/// <p>A friendly name or description of the <a>ByteMatchSet</a>. You can't change <code>Name</code> after you create a <code>ByteMatchSet</code>.</p>
#[serde(rename = "Name")]
#[serde(skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
}
/// <p>Returned by <a>ListByteMatchSets</a>. Each <code>ByteMatchSetSummary</code> object includes the <code>Name</code> and <code>ByteMatchSetId</code> for one <a>ByteMatchSet</a>.</p>
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct ByteMatchSetSummary {
/// <p>The <code>ByteMatchSetId</code> for a <code>ByteMatchSet</code>. You use <code>ByteMatchSetId</code> to get information about a <code>ByteMatchSet</code>, update a <code>ByteMatchSet</code>, remove a <code>ByteMatchSet</code> from a <code>Rule</code>, and delete a <code>ByteMatchSet</code> from AWS WAF.</p> <p> <code>ByteMatchSetId</code> is returned by <a>CreateByteMatchSet</a> and by <a>ListByteMatchSets</a>.</p>
#[serde(rename = "ByteMatchSetId")]
pub byte_match_set_id: String,
/// <p>A friendly name or description of the <a>ByteMatchSet</a>. You can't change <code>Name</code> after you create a <code>ByteMatchSet</code>.</p>
#[serde(rename = "Name")]
pub name: String,
}
/// <p>In an <a>UpdateByteMatchSet</a> request, <code>ByteMatchSetUpdate</code> specifies whether to insert or delete a <a>ByteMatchTuple</a> and includes the settings for the <code>ByteMatchTuple</code>.</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct ByteMatchSetUpdate {
/// <p>Specifies whether to insert or delete a <a>ByteMatchTuple</a>.</p>
#[serde(rename = "Action")]
pub action: String,
/// <p>Information about the part of a web request that you want AWS WAF to inspect and the value that you want AWS WAF to search for. If you specify <code>DELETE</code> for the value of <code>Action</code>, the <code>ByteMatchTuple</code> values must exactly match the values in the <code>ByteMatchTuple</code> that you want to delete from the <code>ByteMatchSet</code>.</p>
#[serde(rename = "ByteMatchTuple")]
pub byte_match_tuple: ByteMatchTuple,
}
/// <p>The bytes (typically a string that corresponds with ASCII characters) that you want AWS WAF to search for in web requests, the location in requests that you want AWS WAF to search, and other settings.</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct ByteMatchTuple {
/// <p>The part of a web request that you want AWS WAF to search, such as a specified header or a query string. For more information, see <a>FieldToMatch</a>.</p>
#[serde(rename = "FieldToMatch")]
pub field_to_match: FieldToMatch,
/// <p>Within the portion of a web request that you want to search (for example, in the query string, if any), specify where you want AWS WAF to search. Valid values include the following:</p> <p> <b>CONTAINS</b> </p> <p>The specified part of the web request must include the value of <code>TargetString</code>, but the location doesn't matter.</p> <p> <b>CONTAINS_WORD</b> </p> <p>The specified part of the web request must include the value of <code>TargetString</code>, and <code>TargetString</code> must contain only alphanumeric characters or underscore (A-Z, a-z, 0-9, or _). In addition, <code>TargetString</code> must be a word, which means one of the following:</p> <ul> <li> <p> <code>TargetString</code> exactly matches the value of the specified part of the web request, such as the value of a header.</p> </li> <li> <p> <code>TargetString</code> is at the beginning of the specified part of the web request and is followed by a character other than an alphanumeric character or underscore (_), for example, <code>BadBot;</code>.</p> </li> <li> <p> <code>TargetString</code> is at the end of the specified part of the web request and is preceded by a character other than an alphanumeric character or underscore (_), for example, <code>;BadBot</code>.</p> </li> <li> <p> <code>TargetString</code> is in the middle of the specified part of the web request and is preceded and followed by characters other than alphanumeric characters or underscore (_), for example, <code>-BadBot;</code>.</p> </li> </ul> <p> <b>EXACTLY</b> </p> <p>The value of the specified part of the web request must exactly match the value of <code>TargetString</code>.</p> <p> <b>STARTS_WITH</b> </p> <p>The value of <code>TargetString</code> must appear at the beginning of the specified part of the web request.</p> <p> <b>ENDS_WITH</b> </p> <p>The value of <code>TargetString</code> must appear at the end of the specified part of the web request.</p>
#[serde(rename = "PositionalConstraint")]
pub positional_constraint: String,
/// <p>The value that you want AWS WAF to search for. AWS WAF searches for the specified string in the part of web requests that you specified in <code>FieldToMatch</code>. The maximum length of the value is 50 bytes.</p> <p>Valid values depend on the values that you specified for <code>FieldToMatch</code>:</p> <ul> <li> <p> <code>HEADER</code>: The value that you want AWS WAF to search for in the request header that you specified in <a>FieldToMatch</a>, for example, the value of the <code>User-Agent</code> or <code>Referer</code> header.</p> </li> <li> <p> <code>METHOD</code>: The HTTP method, which indicates the type of operation specified in the request. CloudFront supports the following methods: <code>DELETE</code>, <code>GET</code>, <code>HEAD</code>, <code>OPTIONS</code>, <code>PATCH</code>, <code>POST</code>, and <code>PUT</code>.</p> </li> <li> <p> <code>QUERY_STRING</code>: The value that you want AWS WAF to search for in the query string, which is the part of a URL that appears after a <code>?</code> character.</p> </li> <li> <p> <code>URI</code>: The value that you want AWS WAF to search for in the part of a URL that identifies a resource, for example, <code>/images/daily-ad.jpg</code>.</p> </li> <li> <p> <code>BODY</code>: The part of a request that contains any additional data that you want to send to your web server as the HTTP request body, such as data from a form. The request body immediately follows the request headers. Note that only the first <code>8192</code> bytes of the request body are forwarded to AWS WAF for inspection. To allow or block requests based on the length of the body, you can create a size constraint set. For more information, see <a>CreateSizeConstraintSet</a>. </p> </li> <li> <p> <code>SINGLE_QUERY_ARG</code>: The parameter in the query string that you will inspect, such as <i>UserName</i> or <i>SalesRegion</i>. The maximum length for <code>SINGLE_QUERY_ARG</code> is 30 characters.</p> </li> <li> <p> <code>ALL_QUERY_ARGS</code>: Similar to <code>SINGLE_QUERY_ARG</code>, but instead of inspecting a single parameter, AWS WAF inspects all parameters within the query string for the value or regex pattern that you specify in <code>TargetString</code>.</p> </li> </ul> <p>If <code>TargetString</code> includes alphabetic characters A-Z and a-z, note that the value is case sensitive.</p> <p> <b>If you're using the AWS WAF API</b> </p> <p>Specify a base64-encoded version of the value. The maximum length of the value before you base64-encode it is 50 bytes.</p> <p>For example, suppose the value of <code>Type</code> is <code>HEADER</code> and the value of <code>Data</code> is <code>User-Agent</code>. If you want to search the <code>User-Agent</code> header for the value <code>BadBot</code>, you base64-encode <code>BadBot</code> using MIME base64-encoding and include the resulting value, <code>QmFkQm90</code>, in the value of <code>TargetString</code>.</p> <p> <b>If you're using the AWS CLI or one of the AWS SDKs</b> </p> <p>The value that you want AWS WAF to search for. The SDK automatically base64 encodes the value.</p>
#[serde(rename = "TargetString")]
#[serde(
deserialize_with = "::rusoto_core::serialization::SerdeBlob::deserialize_blob",
serialize_with = "::rusoto_core::serialization::SerdeBlob::serialize_blob",
default
)]
pub target_string: bytes::Bytes,
/// <p>Text transformations eliminate some of the unusual formatting that attackers use in web requests in an effort to bypass AWS WAF. If you specify a transformation, AWS WAF performs the transformation on <code>TargetString</code> before inspecting a request for a match.</p> <p>You can only specify a single type of TextTransformation.</p> <p> <b>CMD_LINE</b> </p> <p>When you're concerned that attackers are injecting an operating system command line command and using unusual formatting to disguise some or all of the command, use this option to perform the following transformations:</p> <ul> <li> <p>Delete the following characters: \ " ' ^</p> </li> <li> <p>Delete spaces before the following characters: / (</p> </li> <li> <p>Replace the following characters with a space: , ;</p> </li> <li> <p>Replace multiple spaces with one space</p> </li> <li> <p>Convert uppercase letters (A-Z) to lowercase (a-z)</p> </li> </ul> <p> <b>COMPRESS_WHITE_SPACE</b> </p> <p>Use this option to replace the following characters with a space character (decimal 32):</p> <ul> <li> <p>\f, formfeed, decimal 12</p> </li> <li> <p>\t, tab, decimal 9</p> </li> <li> <p>\n, newline, decimal 10</p> </li> <li> <p>\r, carriage return, decimal 13</p> </li> <li> <p>\v, vertical tab, decimal 11</p> </li> <li> <p>non-breaking space, decimal 160</p> </li> </ul> <p> <code>COMPRESS_WHITE_SPACE</code> also replaces multiple spaces with one space.</p> <p> <b>HTML_ENTITY_DECODE</b> </p> <p>Use this option to replace HTML-encoded characters with unencoded characters. <code>HTML_ENTITY_DECODE</code> performs the following operations:</p> <ul> <li> <p>Replaces <code>(ampersand)quot;</code> with <code>"</code> </p> </li> <li> <p>Replaces <code>(ampersand)nbsp;</code> with a non-breaking space, decimal 160</p> </li> <li> <p>Replaces <code>(ampersand)lt;</code> with a "less than" symbol</p> </li> <li> <p>Replaces <code>(ampersand)gt;</code> with <code>></code> </p> </li> <li> <p>Replaces characters that are represented in hexadecimal format, <code>(ampersand)#xhhhh;</code>, with the corresponding characters</p> </li> <li> <p>Replaces characters that are represented in decimal format, <code>(ampersand)#nnnn;</code>, with the corresponding characters</p> </li> </ul> <p> <b>LOWERCASE</b> </p> <p>Use this option to convert uppercase letters (A-Z) to lowercase (a-z).</p> <p> <b>URL_DECODE</b> </p> <p>Use this option to decode a URL-encoded value.</p> <p> <b>NONE</b> </p> <p>Specify <code>NONE</code> if you don't want to perform any text transformations.</p>
#[serde(rename = "TextTransformation")]
pub text_transformation: String,
}
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct CreateByteMatchSetRequest {
/// <p>The value returned by the most recent call to <a>GetChangeToken</a>.</p>
#[serde(rename = "ChangeToken")]
pub change_token: String,
/// <p>A friendly name or description of the <a>ByteMatchSet</a>. You can't change <code>Name</code> after you create a <code>ByteMatchSet</code>.</p>
#[serde(rename = "Name")]
pub name: String,
}
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct CreateByteMatchSetResponse {
/// <p>A <a>ByteMatchSet</a> that contains no <code>ByteMatchTuple</code> objects.</p>
#[serde(rename = "ByteMatchSet")]
#[serde(skip_serializing_if = "Option::is_none")]
pub byte_match_set: Option<ByteMatchSet>,
/// <p>The <code>ChangeToken</code> that you used to submit the <code>CreateByteMatchSet</code> request. You can also use this value to query the status of the request. For more information, see <a>GetChangeTokenStatus</a>.</p>
#[serde(rename = "ChangeToken")]
#[serde(skip_serializing_if = "Option::is_none")]
pub change_token: Option<String>,
}
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct CreateGeoMatchSetRequest {
/// <p>The value returned by the most recent call to <a>GetChangeToken</a>.</p>
#[serde(rename = "ChangeToken")]
pub change_token: String,
/// <p>A friendly name or description of the <a>GeoMatchSet</a>. You can't change <code>Name</code> after you create the <code>GeoMatchSet</code>.</p>
#[serde(rename = "Name")]
pub name: String,
}
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct CreateGeoMatchSetResponse {
/// <p>The <code>ChangeToken</code> that you used to submit the <code>CreateGeoMatchSet</code> request. You can also use this value to query the status of the request. For more information, see <a>GetChangeTokenStatus</a>.</p>
#[serde(rename = "ChangeToken")]
#[serde(skip_serializing_if = "Option::is_none")]
pub change_token: Option<String>,
/// <p>The <a>GeoMatchSet</a> returned in the <code>CreateGeoMatchSet</code> response. The <code>GeoMatchSet</code> contains no <code>GeoMatchConstraints</code>.</p>
#[serde(rename = "GeoMatchSet")]
#[serde(skip_serializing_if = "Option::is_none")]
pub geo_match_set: Option<GeoMatchSet>,
}
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct CreateIPSetRequest {
/// <p>The value returned by the most recent call to <a>GetChangeToken</a>.</p>
#[serde(rename = "ChangeToken")]
pub change_token: String,
/// <p>A friendly name or description of the <a>IPSet</a>. You can't change <code>Name</code> after you create the <code>IPSet</code>.</p>
#[serde(rename = "Name")]
pub name: String,
}
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct CreateIPSetResponse {
/// <p>The <code>ChangeToken</code> that you used to submit the <code>CreateIPSet</code> request. You can also use this value to query the status of the request. For more information, see <a>GetChangeTokenStatus</a>.</p>
#[serde(rename = "ChangeToken")]
#[serde(skip_serializing_if = "Option::is_none")]
pub change_token: Option<String>,
/// <p>The <a>IPSet</a> returned in the <code>CreateIPSet</code> response.</p>
#[serde(rename = "IPSet")]
#[serde(skip_serializing_if = "Option::is_none")]
pub ip_set: Option<IPSet>,
}
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct CreateRateBasedRuleRequest {
/// <p>The <code>ChangeToken</code> that you used to submit the <code>CreateRateBasedRule</code> request. You can also use this value to query the status of the request. For more information, see <a>GetChangeTokenStatus</a>.</p>
#[serde(rename = "ChangeToken")]
pub change_token: String,
/// <p>A friendly name or description for the metrics for this <code>RateBasedRule</code>. The name can contain only alphanumeric characters (A-Z, a-z, 0-9); the name can't contain whitespace. You can't change the name of the metric after you create the <code>RateBasedRule</code>.</p>
#[serde(rename = "MetricName")]
pub metric_name: String,
/// <p>A friendly name or description of the <a>RateBasedRule</a>. You can't change the name of a <code>RateBasedRule</code> after you create it.</p>
#[serde(rename = "Name")]
pub name: String,
/// <p>The field that AWS WAF uses to determine if requests are likely arriving from a single source and thus subject to rate monitoring. The only valid value for <code>RateKey</code> is <code>IP</code>. <code>IP</code> indicates that requests that arrive from the same IP address are subject to the <code>RateLimit</code> that is specified in the <code>RateBasedRule</code>.</p>
#[serde(rename = "RateKey")]
pub rate_key: String,
/// <p>The maximum number of requests, which have an identical value in the field that is specified by <code>RateKey</code>, allowed in a five-minute period. If the number of requests exceeds the <code>RateLimit</code> and the other predicates specified in the rule are also met, AWS WAF triggers the action that is specified for this rule.</p>
#[serde(rename = "RateLimit")]
pub rate_limit: i64,
}
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct CreateRateBasedRuleResponse {
/// <p>The <code>ChangeToken</code> that you used to submit the <code>CreateRateBasedRule</code> request. You can also use this value to query the status of the request. For more information, see <a>GetChangeTokenStatus</a>.</p>
#[serde(rename = "ChangeToken")]
#[serde(skip_serializing_if = "Option::is_none")]
pub change_token: Option<String>,
/// <p>The <a>RateBasedRule</a> that is returned in the <code>CreateRateBasedRule</code> response.</p>
#[serde(rename = "Rule")]
#[serde(skip_serializing_if = "Option::is_none")]
pub rule: Option<RateBasedRule>,
}
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct CreateRegexMatchSetRequest {
/// <p>The value returned by the most recent call to <a>GetChangeToken</a>.</p>
#[serde(rename = "ChangeToken")]
pub change_token: String,
/// <p>A friendly name or description of the <a>RegexMatchSet</a>. You can't change <code>Name</code> after you create a <code>RegexMatchSet</code>.</p>
#[serde(rename = "Name")]
pub name: String,
}
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct CreateRegexMatchSetResponse {
/// <p>The <code>ChangeToken</code> that you used to submit the <code>CreateRegexMatchSet</code> request. You can also use this value to query the status of the request. For more information, see <a>GetChangeTokenStatus</a>.</p>
#[serde(rename = "ChangeToken")]
#[serde(skip_serializing_if = "Option::is_none")]
pub change_token: Option<String>,
/// <p>A <a>RegexMatchSet</a> that contains no <code>RegexMatchTuple</code> objects.</p>
#[serde(rename = "RegexMatchSet")]
#[serde(skip_serializing_if = "Option::is_none")]
pub regex_match_set: Option<RegexMatchSet>,
}
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct CreateRegexPatternSetRequest {
/// <p>The value returned by the most recent call to <a>GetChangeToken</a>.</p>
#[serde(rename = "ChangeToken")]
pub change_token: String,
/// <p>A friendly name or description of the <a>RegexPatternSet</a>. You can't change <code>Name</code> after you create a <code>RegexPatternSet</code>.</p>
#[serde(rename = "Name")]
pub name: String,
}
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct CreateRegexPatternSetResponse {
/// <p>The <code>ChangeToken</code> that you used to submit the <code>CreateRegexPatternSet</code> request. You can also use this value to query the status of the request. For more information, see <a>GetChangeTokenStatus</a>.</p>
#[serde(rename = "ChangeToken")]
#[serde(skip_serializing_if = "Option::is_none")]
pub change_token: Option<String>,
/// <p>A <a>RegexPatternSet</a> that contains no objects.</p>
#[serde(rename = "RegexPatternSet")]
#[serde(skip_serializing_if = "Option::is_none")]
pub regex_pattern_set: Option<RegexPatternSet>,
}
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct CreateRuleGroupRequest {
/// <p>The value returned by the most recent call to <a>GetChangeToken</a>.</p>
#[serde(rename = "ChangeToken")]
pub change_token: String,
/// <p>A friendly name or description for the metrics for this <code>RuleGroup</code>. The name can contain only alphanumeric characters (A-Z, a-z, 0-9); the name can't contain whitespace. You can't change the name of the metric after you create the <code>RuleGroup</code>.</p>
#[serde(rename = "MetricName")]
pub metric_name: String,
/// <p>A friendly name or description of the <a>RuleGroup</a>. You can't change <code>Name</code> after you create a <code>RuleGroup</code>.</p>
#[serde(rename = "Name")]
pub name: String,
}
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct CreateRuleGroupResponse {
/// <p>The <code>ChangeToken</code> that you used to submit the <code>CreateRuleGroup</code> request. You can also use this value to query the status of the request. For more information, see <a>GetChangeTokenStatus</a>.</p>
#[serde(rename = "ChangeToken")]
#[serde(skip_serializing_if = "Option::is_none")]
pub change_token: Option<String>,
/// <p>An empty <a>RuleGroup</a>.</p>
#[serde(rename = "RuleGroup")]
#[serde(skip_serializing_if = "Option::is_none")]
pub rule_group: Option<RuleGroup>,
}
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct CreateRuleRequest {
/// <p>The value returned by the most recent call to <a>GetChangeToken</a>.</p>
#[serde(rename = "ChangeToken")]
pub change_token: String,
/// <p>A friendly name or description for the metrics for this <code>Rule</code>. The name can contain only alphanumeric characters (A-Z, a-z, 0-9); the name can't contain white space. You can't change the name of the metric after you create the <code>Rule</code>.</p>
#[serde(rename = "MetricName")]
pub metric_name: String,
/// <p>A friendly name or description of the <a>Rule</a>. You can't change the name of a <code>Rule</code> after you create it.</p>
#[serde(rename = "Name")]
pub name: String,
}
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct CreateRuleResponse {
/// <p>The <code>ChangeToken</code> that you used to submit the <code>CreateRule</code> request. You can also use this value to query the status of the request. For more information, see <a>GetChangeTokenStatus</a>.</p>
#[serde(rename = "ChangeToken")]
#[serde(skip_serializing_if = "Option::is_none")]
pub change_token: Option<String>,
/// <p>The <a>Rule</a> returned in the <code>CreateRule</code> response.</p>
#[serde(rename = "Rule")]
#[serde(skip_serializing_if = "Option::is_none")]
pub rule: Option<Rule>,
}
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct CreateSizeConstraintSetRequest {
/// <p>The value returned by the most recent call to <a>GetChangeToken</a>.</p>
#[serde(rename = "ChangeToken")]
pub change_token: String,
/// <p>A friendly name or description of the <a>SizeConstraintSet</a>. You can't change <code>Name</code> after you create a <code>SizeConstraintSet</code>.</p>
#[serde(rename = "Name")]
pub name: String,
}
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct CreateSizeConstraintSetResponse {
/// <p>The <code>ChangeToken</code> that you used to submit the <code>CreateSizeConstraintSet</code> request. You can also use this value to query the status of the request. For more information, see <a>GetChangeTokenStatus</a>.</p>
#[serde(rename = "ChangeToken")]
#[serde(skip_serializing_if = "Option::is_none")]
pub change_token: Option<String>,
/// <p>A <a>SizeConstraintSet</a> that contains no <code>SizeConstraint</code> objects.</p>
#[serde(rename = "SizeConstraintSet")]
#[serde(skip_serializing_if = "Option::is_none")]
pub size_constraint_set: Option<SizeConstraintSet>,
}
/// <p>A request to create a <a>SqlInjectionMatchSet</a>.</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct CreateSqlInjectionMatchSetRequest {
/// <p>The value returned by the most recent call to <a>GetChangeToken</a>.</p>
#[serde(rename = "ChangeToken")]
pub change_token: String,
/// <p>A friendly name or description for the <a>SqlInjectionMatchSet</a> that you're creating. You can't change <code>Name</code> after you create the <code>SqlInjectionMatchSet</code>.</p>
#[serde(rename = "Name")]
pub name: String,
}
/// <p>The response to a <code>CreateSqlInjectionMatchSet</code> request.</p>
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct CreateSqlInjectionMatchSetResponse {
/// <p>The <code>ChangeToken</code> that you used to submit the <code>CreateSqlInjectionMatchSet</code> request. You can also use this value to query the status of the request. For more information, see <a>GetChangeTokenStatus</a>.</p>
#[serde(rename = "ChangeToken")]
#[serde(skip_serializing_if = "Option::is_none")]
pub change_token: Option<String>,
/// <p>A <a>SqlInjectionMatchSet</a>.</p>
#[serde(rename = "SqlInjectionMatchSet")]
#[serde(skip_serializing_if = "Option::is_none")]
pub sql_injection_match_set: Option<SqlInjectionMatchSet>,
}
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct CreateWebACLRequest {
/// <p>The value returned by the most recent call to <a>GetChangeToken</a>.</p>
#[serde(rename = "ChangeToken")]
pub change_token: String,
/// <p>The action that you want AWS WAF to take when a request doesn't match the criteria specified in any of the <code>Rule</code> objects that are associated with the <code>WebACL</code>.</p>
#[serde(rename = "DefaultAction")]
pub default_action: WafAction,
/// <p>A friendly name or description for the metrics for this <code>WebACL</code>. The name can contain only alphanumeric characters (A-Z, a-z, 0-9); the name can't contain white space. You can't change <code>MetricName</code> after you create the <code>WebACL</code>.</p>
#[serde(rename = "MetricName")]
pub metric_name: String,
/// <p>A friendly name or description of the <a>WebACL</a>. You can't change <code>Name</code> after you create the <code>WebACL</code>.</p>
#[serde(rename = "Name")]
pub name: String,
}
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct CreateWebACLResponse {
/// <p>The <code>ChangeToken</code> that you used to submit the <code>CreateWebACL</code> request. You can also use this value to query the status of the request. For more information, see <a>GetChangeTokenStatus</a>.</p>
#[serde(rename = "ChangeToken")]
#[serde(skip_serializing_if = "Option::is_none")]
pub change_token: Option<String>,
/// <p>The <a>WebACL</a> returned in the <code>CreateWebACL</code> response.</p>
#[serde(rename = "WebACL")]
#[serde(skip_serializing_if = "Option::is_none")]
pub web_acl: Option<WebACL>,
}
/// <p>A request to create an <a>XssMatchSet</a>.</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct CreateXssMatchSetRequest {
/// <p>The value returned by the most recent call to <a>GetChangeToken</a>.</p>
#[serde(rename = "ChangeToken")]
pub change_token: String,
/// <p>A friendly name or description for the <a>XssMatchSet</a> that you're creating. You can't change <code>Name</code> after you create the <code>XssMatchSet</code>.</p>
#[serde(rename = "Name")]
pub name: String,
}
/// <p>The response to a <code>CreateXssMatchSet</code> request.</p>
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct CreateXssMatchSetResponse {
/// <p>The <code>ChangeToken</code> that you used to submit the <code>CreateXssMatchSet</code> request. You can also use this value to query the status of the request. For more information, see <a>GetChangeTokenStatus</a>.</p>
#[serde(rename = "ChangeToken")]
#[serde(skip_serializing_if = "Option::is_none")]
pub change_token: Option<String>,
/// <p>An <a>XssMatchSet</a>.</p>
#[serde(rename = "XssMatchSet")]
#[serde(skip_serializing_if = "Option::is_none")]
pub xss_match_set: Option<XssMatchSet>,
}
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct DeleteByteMatchSetRequest {
/// <p>The <code>ByteMatchSetId</code> of the <a>ByteMatchSet</a> that you want to delete. <code>ByteMatchSetId</code> is returned by <a>CreateByteMatchSet</a> and by <a>ListByteMatchSets</a>.</p>
#[serde(rename = "ByteMatchSetId")]
pub byte_match_set_id: String,
/// <p>The value returned by the most recent call to <a>GetChangeToken</a>.</p>
#[serde(rename = "ChangeToken")]
pub change_token: String,
}
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct DeleteByteMatchSetResponse {
/// <p>The <code>ChangeToken</code> that you used to submit the <code>DeleteByteMatchSet</code> request. You can also use this value to query the status of the request. For more information, see <a>GetChangeTokenStatus</a>.</p>
#[serde(rename = "ChangeToken")]
#[serde(skip_serializing_if = "Option::is_none")]
pub change_token: Option<String>,
}
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct DeleteGeoMatchSetRequest {
/// <p>The value returned by the most recent call to <a>GetChangeToken</a>.</p>
#[serde(rename = "ChangeToken")]
pub change_token: String,
/// <p>The <code>GeoMatchSetID</code> of the <a>GeoMatchSet</a> that you want to delete. <code>GeoMatchSetId</code> is returned by <a>CreateGeoMatchSet</a> and by <a>ListGeoMatchSets</a>.</p>
#[serde(rename = "GeoMatchSetId")]
pub geo_match_set_id: String,
}
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct DeleteGeoMatchSetResponse {
/// <p>The <code>ChangeToken</code> that you used to submit the <code>DeleteGeoMatchSet</code> request. You can also use this value to query the status of the request. For more information, see <a>GetChangeTokenStatus</a>.</p>
#[serde(rename = "ChangeToken")]
#[serde(skip_serializing_if = "Option::is_none")]
pub change_token: Option<String>,
}
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct DeleteIPSetRequest {
/// <p>The value returned by the most recent call to <a>GetChangeToken</a>.</p>
#[serde(rename = "ChangeToken")]
pub change_token: String,
/// <p>The <code>IPSetId</code> of the <a>IPSet</a> that you want to delete. <code>IPSetId</code> is returned by <a>CreateIPSet</a> and by <a>ListIPSets</a>.</p>
#[serde(rename = "IPSetId")]
pub ip_set_id: String,
}
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct DeleteIPSetResponse {
/// <p>The <code>ChangeToken</code> that you used to submit the <code>DeleteIPSet</code> request. You can also use this value to query the status of the request. For more information, see <a>GetChangeTokenStatus</a>.</p>
#[serde(rename = "ChangeToken")]
#[serde(skip_serializing_if = "Option::is_none")]
pub change_token: Option<String>,
}
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct DeleteLoggingConfigurationRequest {
/// <p>The Amazon Resource Name (ARN) of the web ACL from which you want to delete the <a>LoggingConfiguration</a>.</p>
#[serde(rename = "ResourceArn")]
pub resource_arn: String,
}
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct DeleteLoggingConfigurationResponse {}
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct DeletePermissionPolicyRequest {
/// <p>The Amazon Resource Name (ARN) of the RuleGroup from which you want to delete the policy.</p> <p>The user making the request must be the owner of the RuleGroup.</p>
#[serde(rename = "ResourceArn")]
pub resource_arn: String,
}
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct DeletePermissionPolicyResponse {}
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct DeleteRateBasedRuleRequest {
/// <p>The value returned by the most recent call to <a>GetChangeToken</a>.</p>
#[serde(rename = "ChangeToken")]
pub change_token: String,
/// <p>The <code>RuleId</code> of the <a>RateBasedRule</a> that you want to delete. <code>RuleId</code> is returned by <a>CreateRateBasedRule</a> and by <a>ListRateBasedRules</a>.</p>
#[serde(rename = "RuleId")]
pub rule_id: String,
}
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct DeleteRateBasedRuleResponse {
/// <p>The <code>ChangeToken</code> that you used to submit the <code>DeleteRateBasedRule</code> request. You can also use this value to query the status of the request. For more information, see <a>GetChangeTokenStatus</a>.</p>
#[serde(rename = "ChangeToken")]
#[serde(skip_serializing_if = "Option::is_none")]
pub change_token: Option<String>,
}
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct DeleteRegexMatchSetRequest {
/// <p>The value returned by the most recent call to <a>GetChangeToken</a>.</p>
#[serde(rename = "ChangeToken")]
pub change_token: String,
/// <p>The <code>RegexMatchSetId</code> of the <a>RegexMatchSet</a> that you want to delete. <code>RegexMatchSetId</code> is returned by <a>CreateRegexMatchSet</a> and by <a>ListRegexMatchSets</a>.</p>
#[serde(rename = "RegexMatchSetId")]
pub regex_match_set_id: String,
}
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct DeleteRegexMatchSetResponse {
/// <p>The <code>ChangeToken</code> that you used to submit the <code>DeleteRegexMatchSet</code> request. You can also use this value to query the status of the request. For more information, see <a>GetChangeTokenStatus</a>.</p>
#[serde(rename = "ChangeToken")]
#[serde(skip_serializing_if = "Option::is_none")]
pub change_token: Option<String>,
}
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct DeleteRegexPatternSetRequest {
/// <p>The value returned by the most recent call to <a>GetChangeToken</a>.</p>
#[serde(rename = "ChangeToken")]
pub change_token: String,
/// <p>The <code>RegexPatternSetId</code> of the <a>RegexPatternSet</a> that you want to delete. <code>RegexPatternSetId</code> is returned by <a>CreateRegexPatternSet</a> and by <a>ListRegexPatternSets</a>.</p>
#[serde(rename = "RegexPatternSetId")]
pub regex_pattern_set_id: String,
}
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct DeleteRegexPatternSetResponse {
/// <p>The <code>ChangeToken</code> that you used to submit the <code>DeleteRegexPatternSet</code> request. You can also use this value to query the status of the request. For more information, see <a>GetChangeTokenStatus</a>.</p>
#[serde(rename = "ChangeToken")]
#[serde(skip_serializing_if = "Option::is_none")]
pub change_token: Option<String>,
}
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct DeleteRuleGroupRequest {
/// <p>The value returned by the most recent call to <a>GetChangeToken</a>.</p>
#[serde(rename = "ChangeToken")]
pub change_token: String,
/// <p>The <code>RuleGroupId</code> of the <a>RuleGroup</a> that you want to delete. <code>RuleGroupId</code> is returned by <a>CreateRuleGroup</a> and by <a>ListRuleGroups</a>.</p>
#[serde(rename = "RuleGroupId")]
pub rule_group_id: String,
}
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct DeleteRuleGroupResponse {
/// <p>The <code>ChangeToken</code> that you used to submit the <code>DeleteRuleGroup</code> request. You can also use this value to query the status of the request. For more information, see <a>GetChangeTokenStatus</a>.</p>
#[serde(rename = "ChangeToken")]
#[serde(skip_serializing_if = "Option::is_none")]
pub change_token: Option<String>,
}
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct DeleteRuleRequest {
/// <p>The value returned by the most recent call to <a>GetChangeToken</a>.</p>
#[serde(rename = "ChangeToken")]
pub change_token: String,
/// <p>The <code>RuleId</code> of the <a>Rule</a> that you want to delete. <code>RuleId</code> is returned by <a>CreateRule</a> and by <a>ListRules</a>.</p>
#[serde(rename = "RuleId")]
pub rule_id: String,
}
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct DeleteRuleResponse {
/// <p>The <code>ChangeToken</code> that you used to submit the <code>DeleteRule</code> request. You can also use this value to query the status of the request. For more information, see <a>GetChangeTokenStatus</a>.</p>
#[serde(rename = "ChangeToken")]
#[serde(skip_serializing_if = "Option::is_none")]
pub change_token: Option<String>,
}
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct DeleteSizeConstraintSetRequest {
/// <p>The value returned by the most recent call to <a>GetChangeToken</a>.</p>
#[serde(rename = "ChangeToken")]
pub change_token: String,
/// <p>The <code>SizeConstraintSetId</code> of the <a>SizeConstraintSet</a> that you want to delete. <code>SizeConstraintSetId</code> is returned by <a>CreateSizeConstraintSet</a> and by <a>ListSizeConstraintSets</a>.</p>
#[serde(rename = "SizeConstraintSetId")]
pub size_constraint_set_id: String,
}
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct DeleteSizeConstraintSetResponse {
/// <p>The <code>ChangeToken</code> that you used to submit the <code>DeleteSizeConstraintSet</code> request. You can also use this value to query the status of the request. For more information, see <a>GetChangeTokenStatus</a>.</p>
#[serde(rename = "ChangeToken")]
#[serde(skip_serializing_if = "Option::is_none")]
pub change_token: Option<String>,
}
/// <p>A request to delete a <a>SqlInjectionMatchSet</a> from AWS WAF.</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct DeleteSqlInjectionMatchSetRequest {
/// <p>The value returned by the most recent call to <a>GetChangeToken</a>.</p>
#[serde(rename = "ChangeToken")]
pub change_token: String,
/// <p>The <code>SqlInjectionMatchSetId</code> of the <a>SqlInjectionMatchSet</a> that you want to delete. <code>SqlInjectionMatchSetId</code> is returned by <a>CreateSqlInjectionMatchSet</a> and by <a>ListSqlInjectionMatchSets</a>.</p>
#[serde(rename = "SqlInjectionMatchSetId")]
pub sql_injection_match_set_id: String,
}
/// <p>The response to a request to delete a <a>SqlInjectionMatchSet</a> from AWS WAF.</p>
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct DeleteSqlInjectionMatchSetResponse {
/// <p>The <code>ChangeToken</code> that you used to submit the <code>DeleteSqlInjectionMatchSet</code> request. You can also use this value to query the status of the request. For more information, see <a>GetChangeTokenStatus</a>.</p>
#[serde(rename = "ChangeToken")]
#[serde(skip_serializing_if = "Option::is_none")]
pub change_token: Option<String>,
}
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct DeleteWebACLRequest {
/// <p>The value returned by the most recent call to <a>GetChangeToken</a>.</p>
#[serde(rename = "ChangeToken")]
pub change_token: String,
/// <p>The <code>WebACLId</code> of the <a>WebACL</a> that you want to delete. <code>WebACLId</code> is returned by <a>CreateWebACL</a> and by <a>ListWebACLs</a>.</p>
#[serde(rename = "WebACLId")]
pub web_acl_id: String,
}
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct DeleteWebACLResponse {
/// <p>The <code>ChangeToken</code> that you used to submit the <code>DeleteWebACL</code> request. You can also use this value to query the status of the request. For more information, see <a>GetChangeTokenStatus</a>.</p>
#[serde(rename = "ChangeToken")]
#[serde(skip_serializing_if = "Option::is_none")]
pub change_token: Option<String>,
}
/// <p>A request to delete an <a>XssMatchSet</a> from AWS WAF.</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct DeleteXssMatchSetRequest {
/// <p>The value returned by the most recent call to <a>GetChangeToken</a>.</p>
#[serde(rename = "ChangeToken")]
pub change_token: String,
/// <p>The <code>XssMatchSetId</code> of the <a>XssMatchSet</a> that you want to delete. <code>XssMatchSetId</code> is returned by <a>CreateXssMatchSet</a> and by <a>ListXssMatchSets</a>.</p>
#[serde(rename = "XssMatchSetId")]
pub xss_match_set_id: String,
}
/// <p>The response to a request to delete an <a>XssMatchSet</a> from AWS WAF.</p>
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct DeleteXssMatchSetResponse {
/// <p>The <code>ChangeToken</code> that you used to submit the <code>DeleteXssMatchSet</code> request. You can also use this value to query the status of the request. For more information, see <a>GetChangeTokenStatus</a>.</p>
#[serde(rename = "ChangeToken")]
#[serde(skip_serializing_if = "Option::is_none")]
pub change_token: Option<String>,
}
/// <p>The rule to exclude from a rule group. This is applicable only when the <code>ActivatedRule</code> refers to a <code>RuleGroup</code>. The rule must belong to the <code>RuleGroup</code> that is specified by the <code>ActivatedRule</code>. </p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct ExcludedRule {
/// <p>The unique identifier for the rule to exclude from the rule group.</p>
#[serde(rename = "RuleId")]
pub rule_id: String,
}
/// <p>Specifies where in a web request to look for <code>TargetString</code>.</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct FieldToMatch {
/// <p>When the value of <code>Type</code> is <code>HEADER</code>, enter the name of the header that you want AWS WAF to search, for example, <code>User-Agent</code> or <code>Referer</code>. The name of the header is not case sensitive.</p> <p>When the value of <code>Type</code> is <code>SINGLE_QUERY_ARG</code>, enter the name of the parameter that you want AWS WAF to search, for example, <code>UserName</code> or <code>SalesRegion</code>. The parameter name is not case sensitive.</p> <p>If the value of <code>Type</code> is any other value, omit <code>Data</code>.</p>
#[serde(rename = "Data")]
#[serde(skip_serializing_if = "Option::is_none")]
pub data: Option<String>,
/// <p><p>The part of the web request that you want AWS WAF to search for a specified string. Parts of a request that you can search include the following:</p> <ul> <li> <p> <code>HEADER</code>: A specified request header, for example, the value of the <code>User-Agent</code> or <code>Referer</code> header. If you choose <code>HEADER</code> for the type, specify the name of the header in <code>Data</code>.</p> </li> <li> <p> <code>METHOD</code>: The HTTP method, which indicated the type of operation that the request is asking the origin to perform. Amazon CloudFront supports the following methods: <code>DELETE</code>, <code>GET</code>, <code>HEAD</code>, <code>OPTIONS</code>, <code>PATCH</code>, <code>POST</code>, and <code>PUT</code>.</p> </li> <li> <p> <code>QUERY<em>STRING</code>: A query string, which is the part of a URL that appears after a <code>?</code> character, if any.</p> </li> <li> <p> <code>URI</code>: The part of a web request that identifies a resource, for example, <code>/images/daily-ad.jpg</code>.</p> </li> <li> <p> <code>BODY</code>: The part of a request that contains any additional data that you want to send to your web server as the HTTP request body, such as data from a form. The request body immediately follows the request headers. Note that only the first <code>8192</code> bytes of the request body are forwarded to AWS WAF for inspection. To allow or block requests based on the length of the body, you can create a size constraint set. For more information, see <a>CreateSizeConstraintSet</a>. </p> </li> <li> <p> <code>SINGLE</em>QUERY<em>ARG</code>: The parameter in the query string that you will inspect, such as <i>UserName</i> or <i>SalesRegion</i>. The maximum length for <code>SINGLE</em>QUERY<em>ARG</code> is 30 characters.</p> </li> <li> <p> <code>ALL</em>QUERY<em>ARGS</code>: Similar to <code>SINGLE</em>QUERY_ARG</code>, but rather than inspecting a single parameter, AWS WAF will inspect all parameters within the query for the value or regex pattern that you specify in <code>TargetString</code>.</p> </li> </ul></p>
#[serde(rename = "Type")]
pub type_: String,
}
/// <p>The country from which web requests originate that you want AWS WAF to search for.</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct GeoMatchConstraint {
/// <p>The type of geographical area you want AWS WAF to search for. Currently <code>Country</code> is the only valid value.</p>
#[serde(rename = "Type")]
pub type_: String,
/// <p>The country that you want AWS WAF to search for.</p>
#[serde(rename = "Value")]
pub value: String,
}
/// <p>Contains one or more countries that AWS WAF will search for.</p>
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct GeoMatchSet {
/// <p>An array of <a>GeoMatchConstraint</a> objects, which contain the country that you want AWS WAF to search for.</p>
#[serde(rename = "GeoMatchConstraints")]
pub geo_match_constraints: Vec<GeoMatchConstraint>,
/// <p>The <code>GeoMatchSetId</code> for an <code>GeoMatchSet</code>. You use <code>GeoMatchSetId</code> to get information about a <code>GeoMatchSet</code> (see <a>GeoMatchSet</a>), update a <code>GeoMatchSet</code> (see <a>UpdateGeoMatchSet</a>), insert a <code>GeoMatchSet</code> into a <code>Rule</code> or delete one from a <code>Rule</code> (see <a>UpdateRule</a>), and delete a <code>GeoMatchSet</code> from AWS WAF (see <a>DeleteGeoMatchSet</a>).</p> <p> <code>GeoMatchSetId</code> is returned by <a>CreateGeoMatchSet</a> and by <a>ListGeoMatchSets</a>.</p>
#[serde(rename = "GeoMatchSetId")]
pub geo_match_set_id: String,
/// <p>A friendly name or description of the <a>GeoMatchSet</a>. You can't change the name of an <code>GeoMatchSet</code> after you create it.</p>
#[serde(rename = "Name")]
#[serde(skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
}
/// <p>Contains the identifier and the name of the <code>GeoMatchSet</code>.</p>
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct GeoMatchSetSummary {
/// <p>The <code>GeoMatchSetId</code> for an <a>GeoMatchSet</a>. You can use <code>GeoMatchSetId</code> in a <a>GetGeoMatchSet</a> request to get detailed information about an <a>GeoMatchSet</a>.</p>
#[serde(rename = "GeoMatchSetId")]
pub geo_match_set_id: String,
/// <p>A friendly name or description of the <a>GeoMatchSet</a>. You can't change the name of an <code>GeoMatchSet</code> after you create it.</p>
#[serde(rename = "Name")]
pub name: String,
}
/// <p>Specifies the type of update to perform to an <a>GeoMatchSet</a> with <a>UpdateGeoMatchSet</a>.</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct GeoMatchSetUpdate {
/// <p>Specifies whether to insert or delete a country with <a>UpdateGeoMatchSet</a>.</p>
#[serde(rename = "Action")]
pub action: String,
/// <p>The country from which web requests originate that you want AWS WAF to search for.</p>
#[serde(rename = "GeoMatchConstraint")]
pub geo_match_constraint: GeoMatchConstraint,
}
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct GetByteMatchSetRequest {
/// <p>The <code>ByteMatchSetId</code> of the <a>ByteMatchSet</a> that you want to get. <code>ByteMatchSetId</code> is returned by <a>CreateByteMatchSet</a> and by <a>ListByteMatchSets</a>.</p>
#[serde(rename = "ByteMatchSetId")]
pub byte_match_set_id: String,
}
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct GetByteMatchSetResponse {
/// <p><p>Information about the <a>ByteMatchSet</a> that you specified in the <code>GetByteMatchSet</code> request. For more information, see the following topics:</p> <ul> <li> <p> <a>ByteMatchSet</a>: Contains <code>ByteMatchSetId</code>, <code>ByteMatchTuples</code>, and <code>Name</code> </p> </li> <li> <p> <code>ByteMatchTuples</code>: Contains an array of <a>ByteMatchTuple</a> objects. Each <code>ByteMatchTuple</code> object contains <a>FieldToMatch</a>, <code>PositionalConstraint</code>, <code>TargetString</code>, and <code>TextTransformation</code> </p> </li> <li> <p> <a>FieldToMatch</a>: Contains <code>Data</code> and <code>Type</code> </p> </li> </ul></p>
#[serde(rename = "ByteMatchSet")]
#[serde(skip_serializing_if = "Option::is_none")]
pub byte_match_set: Option<ByteMatchSet>,
}
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct GetChangeTokenRequest {}
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct GetChangeTokenResponse {
/// <p>The <code>ChangeToken</code> that you used in the request. Use this value in a <code>GetChangeTokenStatus</code> request to get the current status of the request. </p>
#[serde(rename = "ChangeToken")]
#[serde(skip_serializing_if = "Option::is_none")]
pub change_token: Option<String>,
}
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct GetChangeTokenStatusRequest {
/// <p>The change token for which you want to get the status. This change token was previously returned in the <code>GetChangeToken</code> response.</p>
#[serde(rename = "ChangeToken")]
pub change_token: String,
}
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct GetChangeTokenStatusResponse {
/// <p>The status of the change token.</p>
#[serde(rename = "ChangeTokenStatus")]
#[serde(skip_serializing_if = "Option::is_none")]
pub change_token_status: Option<String>,
}
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct GetGeoMatchSetRequest {
/// <p>The <code>GeoMatchSetId</code> of the <a>GeoMatchSet</a> that you want to get. <code>GeoMatchSetId</code> is returned by <a>CreateGeoMatchSet</a> and by <a>ListGeoMatchSets</a>.</p>
#[serde(rename = "GeoMatchSetId")]
pub geo_match_set_id: String,
}
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct GetGeoMatchSetResponse {
/// <p>Information about the <a>GeoMatchSet</a> that you specified in the <code>GetGeoMatchSet</code> request. This includes the <code>Type</code>, which for a <code>GeoMatchContraint</code> is always <code>Country</code>, as well as the <code>Value</code>, which is the identifier for a specific country.</p>
#[serde(rename = "GeoMatchSet")]
#[serde(skip_serializing_if = "Option::is_none")]
pub geo_match_set: Option<GeoMatchSet>,
}
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct GetIPSetRequest {
/// <p>The <code>IPSetId</code> of the <a>IPSet</a> that you want to get. <code>IPSetId</code> is returned by <a>CreateIPSet</a> and by <a>ListIPSets</a>.</p>
#[serde(rename = "IPSetId")]
pub ip_set_id: String,
}
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct GetIPSetResponse {
/// <p><p>Information about the <a>IPSet</a> that you specified in the <code>GetIPSet</code> request. For more information, see the following topics:</p> <ul> <li> <p> <a>IPSet</a>: Contains <code>IPSetDescriptors</code>, <code>IPSetId</code>, and <code>Name</code> </p> </li> <li> <p> <code>IPSetDescriptors</code>: Contains an array of <a>IPSetDescriptor</a> objects. Each <code>IPSetDescriptor</code> object contains <code>Type</code> and <code>Value</code> </p> </li> </ul></p>
#[serde(rename = "IPSet")]
#[serde(skip_serializing_if = "Option::is_none")]
pub ip_set: Option<IPSet>,
}
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct GetLoggingConfigurationRequest {
/// <p>The Amazon Resource Name (ARN) of the web ACL for which you want to get the <a>LoggingConfiguration</a>.</p>
#[serde(rename = "ResourceArn")]
pub resource_arn: String,
}
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct GetLoggingConfigurationResponse {
/// <p>The <a>LoggingConfiguration</a> for the specified web ACL.</p>
#[serde(rename = "LoggingConfiguration")]
#[serde(skip_serializing_if = "Option::is_none")]
pub logging_configuration: Option<LoggingConfiguration>,
}
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct GetPermissionPolicyRequest {
/// <p>The Amazon Resource Name (ARN) of the RuleGroup for which you want to get the policy.</p>
#[serde(rename = "ResourceArn")]
pub resource_arn: String,
}
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct GetPermissionPolicyResponse {
/// <p>The IAM policy attached to the specified RuleGroup.</p>
#[serde(rename = "Policy")]
#[serde(skip_serializing_if = "Option::is_none")]
pub policy: Option<String>,
}
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct GetRateBasedRuleManagedKeysRequest {
/// <p>A null value and not currently used. Do not include this in your request.</p>
#[serde(rename = "NextMarker")]
#[serde(skip_serializing_if = "Option::is_none")]
pub next_marker: Option<String>,
/// <p>The <code>RuleId</code> of the <a>RateBasedRule</a> for which you want to get a list of <code>ManagedKeys</code>. <code>RuleId</code> is returned by <a>CreateRateBasedRule</a> and by <a>ListRateBasedRules</a>.</p>
#[serde(rename = "RuleId")]
pub rule_id: String,
}
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct GetRateBasedRuleManagedKeysResponse {
/// <p>An array of IP addresses that currently are blocked by the specified <a>RateBasedRule</a>. </p>
#[serde(rename = "ManagedKeys")]
#[serde(skip_serializing_if = "Option::is_none")]
pub managed_keys: Option<Vec<String>>,
/// <p>A null value and not currently used.</p>
#[serde(rename = "NextMarker")]
#[serde(skip_serializing_if = "Option::is_none")]
pub next_marker: Option<String>,
}
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct GetRateBasedRuleRequest {
/// <p>The <code>RuleId</code> of the <a>RateBasedRule</a> that you want to get. <code>RuleId</code> is returned by <a>CreateRateBasedRule</a> and by <a>ListRateBasedRules</a>.</p>
#[serde(rename = "RuleId")]
pub rule_id: String,
}
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct GetRateBasedRuleResponse {
/// <p>Information about the <a>RateBasedRule</a> that you specified in the <code>GetRateBasedRule</code> request.</p>
#[serde(rename = "Rule")]
#[serde(skip_serializing_if = "Option::is_none")]
pub rule: Option<RateBasedRule>,
}
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct GetRegexMatchSetRequest {
/// <p>The <code>RegexMatchSetId</code> of the <a>RegexMatchSet</a> that you want to get. <code>RegexMatchSetId</code> is returned by <a>CreateRegexMatchSet</a> and by <a>ListRegexMatchSets</a>.</p>
#[serde(rename = "RegexMatchSetId")]
pub regex_match_set_id: String,
}
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct GetRegexMatchSetResponse {
/// <p>Information about the <a>RegexMatchSet</a> that you specified in the <code>GetRegexMatchSet</code> request. For more information, see <a>RegexMatchTuple</a>.</p>
#[serde(rename = "RegexMatchSet")]
#[serde(skip_serializing_if = "Option::is_none")]
pub regex_match_set: Option<RegexMatchSet>,
}
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct GetRegexPatternSetRequest {
/// <p>The <code>RegexPatternSetId</code> of the <a>RegexPatternSet</a> that you want to get. <code>RegexPatternSetId</code> is returned by <a>CreateRegexPatternSet</a> and by <a>ListRegexPatternSets</a>.</p>
#[serde(rename = "RegexPatternSetId")]
pub regex_pattern_set_id: String,
}
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct GetRegexPatternSetResponse {
/// <p>Information about the <a>RegexPatternSet</a> that you specified in the <code>GetRegexPatternSet</code> request, including the identifier of the pattern set and the regular expression patterns you want AWS WAF to search for. </p>
#[serde(rename = "RegexPatternSet")]
#[serde(skip_serializing_if = "Option::is_none")]
pub regex_pattern_set: Option<RegexPatternSet>,
}
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct GetRuleGroupRequest {
/// <p>The <code>RuleGroupId</code> of the <a>RuleGroup</a> that you want to get. <code>RuleGroupId</code> is returned by <a>CreateRuleGroup</a> and by <a>ListRuleGroups</a>.</p>
#[serde(rename = "RuleGroupId")]
pub rule_group_id: String,
}
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct GetRuleGroupResponse {
/// <p>Information about the <a>RuleGroup</a> that you specified in the <code>GetRuleGroup</code> request. </p>
#[serde(rename = "RuleGroup")]
#[serde(skip_serializing_if = "Option::is_none")]
pub rule_group: Option<RuleGroup>,
}
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct GetRuleRequest {
/// <p>The <code>RuleId</code> of the <a>Rule</a> that you want to get. <code>RuleId</code> is returned by <a>CreateRule</a> and by <a>ListRules</a>.</p>
#[serde(rename = "RuleId")]
pub rule_id: String,
}
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct GetRuleResponse {
/// <p><p>Information about the <a>Rule</a> that you specified in the <code>GetRule</code> request. For more information, see the following topics:</p> <ul> <li> <p> <a>Rule</a>: Contains <code>MetricName</code>, <code>Name</code>, an array of <code>Predicate</code> objects, and <code>RuleId</code> </p> </li> <li> <p> <a>Predicate</a>: Each <code>Predicate</code> object contains <code>DataId</code>, <code>Negated</code>, and <code>Type</code> </p> </li> </ul></p>
#[serde(rename = "Rule")]
#[serde(skip_serializing_if = "Option::is_none")]
pub rule: Option<Rule>,
}
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct GetSampledRequestsRequest {
/// <p>The number of requests that you want AWS WAF to return from among the first 5,000 requests that your AWS resource received during the time range. If your resource received fewer requests than the value of <code>MaxItems</code>, <code>GetSampledRequests</code> returns information about all of them. </p>
#[serde(rename = "MaxItems")]
pub max_items: i64,
/// <p><p> <code>RuleId</code> is one of three values:</p> <ul> <li> <p>The <code>RuleId</code> of the <code>Rule</code> or the <code>RuleGroupId</code> of the <code>RuleGroup</code> for which you want <code>GetSampledRequests</code> to return a sample of requests.</p> </li> <li> <p> <code>Default_Action</code>, which causes <code>GetSampledRequests</code> to return a sample of the requests that didn't match any of the rules in the specified <code>WebACL</code>.</p> </li> </ul></p>
#[serde(rename = "RuleId")]
pub rule_id: String,
/// <p>The start date and time and the end date and time of the range for which you want <code>GetSampledRequests</code> to return a sample of requests. Specify the date and time in the following format: <code>"2016-09-27T14:50Z"</code>. You can specify any time range in the previous three hours.</p>
#[serde(rename = "TimeWindow")]
pub time_window: TimeWindow,
/// <p>The <code>WebACLId</code> of the <code>WebACL</code> for which you want <code>GetSampledRequests</code> to return a sample of requests.</p>
#[serde(rename = "WebAclId")]
pub web_acl_id: String,
}
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct GetSampledRequestsResponse {
/// <p>The total number of requests from which <code>GetSampledRequests</code> got a sample of <code>MaxItems</code> requests. If <code>PopulationSize</code> is less than <code>MaxItems</code>, the sample includes every request that your AWS resource received during the specified time range.</p>
#[serde(rename = "PopulationSize")]
#[serde(skip_serializing_if = "Option::is_none")]
pub population_size: Option<i64>,
/// <p>A complex type that contains detailed information about each of the requests in the sample.</p>
#[serde(rename = "SampledRequests")]
#[serde(skip_serializing_if = "Option::is_none")]
pub sampled_requests: Option<Vec<SampledHTTPRequest>>,
/// <p>Usually, <code>TimeWindow</code> is the time range that you specified in the <code>GetSampledRequests</code> request. However, if your AWS resource received more than 5,000 requests during the time range that you specified in the request, <code>GetSampledRequests</code> returns the time range for the first 5,000 requests.</p>
#[serde(rename = "TimeWindow")]
#[serde(skip_serializing_if = "Option::is_none")]
pub time_window: Option<TimeWindow>,
}
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct GetSizeConstraintSetRequest {
/// <p>The <code>SizeConstraintSetId</code> of the <a>SizeConstraintSet</a> that you want to get. <code>SizeConstraintSetId</code> is returned by <a>CreateSizeConstraintSet</a> and by <a>ListSizeConstraintSets</a>.</p>
#[serde(rename = "SizeConstraintSetId")]
pub size_constraint_set_id: String,
}
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct GetSizeConstraintSetResponse {
/// <p><p>Information about the <a>SizeConstraintSet</a> that you specified in the <code>GetSizeConstraintSet</code> request. For more information, see the following topics:</p> <ul> <li> <p> <a>SizeConstraintSet</a>: Contains <code>SizeConstraintSetId</code>, <code>SizeConstraints</code>, and <code>Name</code> </p> </li> <li> <p> <code>SizeConstraints</code>: Contains an array of <a>SizeConstraint</a> objects. Each <code>SizeConstraint</code> object contains <a>FieldToMatch</a>, <code>TextTransformation</code>, <code>ComparisonOperator</code>, and <code>Size</code> </p> </li> <li> <p> <a>FieldToMatch</a>: Contains <code>Data</code> and <code>Type</code> </p> </li> </ul></p>
#[serde(rename = "SizeConstraintSet")]
#[serde(skip_serializing_if = "Option::is_none")]
pub size_constraint_set: Option<SizeConstraintSet>,
}
/// <p>A request to get a <a>SqlInjectionMatchSet</a>.</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct GetSqlInjectionMatchSetRequest {
/// <p>The <code>SqlInjectionMatchSetId</code> of the <a>SqlInjectionMatchSet</a> that you want to get. <code>SqlInjectionMatchSetId</code> is returned by <a>CreateSqlInjectionMatchSet</a> and by <a>ListSqlInjectionMatchSets</a>.</p>
#[serde(rename = "SqlInjectionMatchSetId")]
pub sql_injection_match_set_id: String,
}
/// <p>The response to a <a>GetSqlInjectionMatchSet</a> request.</p>
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct GetSqlInjectionMatchSetResponse {
/// <p><p>Information about the <a>SqlInjectionMatchSet</a> that you specified in the <code>GetSqlInjectionMatchSet</code> request. For more information, see the following topics:</p> <ul> <li> <p> <a>SqlInjectionMatchSet</a>: Contains <code>Name</code>, <code>SqlInjectionMatchSetId</code>, and an array of <code>SqlInjectionMatchTuple</code> objects</p> </li> <li> <p> <a>SqlInjectionMatchTuple</a>: Each <code>SqlInjectionMatchTuple</code> object contains <code>FieldToMatch</code> and <code>TextTransformation</code> </p> </li> <li> <p> <a>FieldToMatch</a>: Contains <code>Data</code> and <code>Type</code> </p> </li> </ul></p>
#[serde(rename = "SqlInjectionMatchSet")]
#[serde(skip_serializing_if = "Option::is_none")]
pub sql_injection_match_set: Option<SqlInjectionMatchSet>,
}
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct GetWebACLRequest {
/// <p>The <code>WebACLId</code> of the <a>WebACL</a> that you want to get. <code>WebACLId</code> is returned by <a>CreateWebACL</a> and by <a>ListWebACLs</a>.</p>
#[serde(rename = "WebACLId")]
pub web_acl_id: String,
}
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct GetWebACLResponse {
/// <p><p>Information about the <a>WebACL</a> that you specified in the <code>GetWebACL</code> request. For more information, see the following topics:</p> <ul> <li> <p> <a>WebACL</a>: Contains <code>DefaultAction</code>, <code>MetricName</code>, <code>Name</code>, an array of <code>Rule</code> objects, and <code>WebACLId</code> </p> </li> <li> <p> <code>DefaultAction</code> (Data type is <a>WafAction</a>): Contains <code>Type</code> </p> </li> <li> <p> <code>Rules</code>: Contains an array of <code>ActivatedRule</code> objects, which contain <code>Action</code>, <code>Priority</code>, and <code>RuleId</code> </p> </li> <li> <p> <code>Action</code>: Contains <code>Type</code> </p> </li> </ul></p>
#[serde(rename = "WebACL")]
#[serde(skip_serializing_if = "Option::is_none")]
pub web_acl: Option<WebACL>,
}
/// <p>A request to get an <a>XssMatchSet</a>.</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct GetXssMatchSetRequest {
/// <p>The <code>XssMatchSetId</code> of the <a>XssMatchSet</a> that you want to get. <code>XssMatchSetId</code> is returned by <a>CreateXssMatchSet</a> and by <a>ListXssMatchSets</a>.</p>
#[serde(rename = "XssMatchSetId")]
pub xss_match_set_id: String,
}
/// <p>The response to a <a>GetXssMatchSet</a> request.</p>
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct GetXssMatchSetResponse {
/// <p><p>Information about the <a>XssMatchSet</a> that you specified in the <code>GetXssMatchSet</code> request. For more information, see the following topics:</p> <ul> <li> <p> <a>XssMatchSet</a>: Contains <code>Name</code>, <code>XssMatchSetId</code>, and an array of <code>XssMatchTuple</code> objects</p> </li> <li> <p> <a>XssMatchTuple</a>: Each <code>XssMatchTuple</code> object contains <code>FieldToMatch</code> and <code>TextTransformation</code> </p> </li> <li> <p> <a>FieldToMatch</a>: Contains <code>Data</code> and <code>Type</code> </p> </li> </ul></p>
#[serde(rename = "XssMatchSet")]
#[serde(skip_serializing_if = "Option::is_none")]
pub xss_match_set: Option<XssMatchSet>,
}
/// <p>The response from a <a>GetSampledRequests</a> request includes an <code>HTTPHeader</code> complex type that appears as <code>Headers</code> in the response syntax. <code>HTTPHeader</code> contains the names and values of all of the headers that appear in one of the web requests that were returned by <code>GetSampledRequests</code>. </p>
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct HTTPHeader {
/// <p>The name of one of the headers in the sampled web request.</p>
#[serde(rename = "Name")]
#[serde(skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
/// <p>The value of one of the headers in the sampled web request.</p>
#[serde(rename = "Value")]
#[serde(skip_serializing_if = "Option::is_none")]
pub value: Option<String>,
}
/// <p>The response from a <a>GetSampledRequests</a> request includes an <code>HTTPRequest</code> complex type that appears as <code>Request</code> in the response syntax. <code>HTTPRequest</code> contains information about one of the web requests that were returned by <code>GetSampledRequests</code>. </p>
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct HTTPRequest {
/// <p><p>The IP address that the request originated from. If the <code>WebACL</code> is associated with a CloudFront distribution, this is the value of one of the following fields in CloudFront access logs:</p> <ul> <li> <p> <code>c-ip</code>, if the viewer did not use an HTTP proxy or a load balancer to send the request</p> </li> <li> <p> <code>x-forwarded-for</code>, if the viewer did use an HTTP proxy or a load balancer to send the request</p> </li> </ul></p>
#[serde(rename = "ClientIP")]
#[serde(skip_serializing_if = "Option::is_none")]
pub client_ip: Option<String>,
/// <p>The two-letter country code for the country that the request originated from. For a current list of country codes, see the Wikipedia entry <a href="https://en.wikipedia.org/wiki/ISO_3166-1_alpha-2">ISO 3166-1 alpha-2</a>.</p>
#[serde(rename = "Country")]
#[serde(skip_serializing_if = "Option::is_none")]
pub country: Option<String>,
/// <p>The HTTP version specified in the sampled web request, for example, <code>HTTP/1.1</code>.</p>
#[serde(rename = "HTTPVersion")]
#[serde(skip_serializing_if = "Option::is_none")]
pub http_version: Option<String>,
/// <p>A complex type that contains two values for each header in the sampled web request: the name of the header and the value of the header.</p>
#[serde(rename = "Headers")]
#[serde(skip_serializing_if = "Option::is_none")]
pub headers: Option<Vec<HTTPHeader>>,
/// <p>The HTTP method specified in the sampled web request. CloudFront supports the following methods: <code>DELETE</code>, <code>GET</code>, <code>HEAD</code>, <code>OPTIONS</code>, <code>PATCH</code>, <code>POST</code>, and <code>PUT</code>. </p>
#[serde(rename = "Method")]
#[serde(skip_serializing_if = "Option::is_none")]
pub method: Option<String>,
/// <p>The part of a web request that identifies the resource, for example, <code>/images/daily-ad.jpg</code>.</p>
#[serde(rename = "URI")]
#[serde(skip_serializing_if = "Option::is_none")]
pub uri: Option<String>,
}
/// <p>Contains one or more IP addresses or blocks of IP addresses specified in Classless Inter-Domain Routing (CIDR) notation. AWS WAF supports IPv4 address ranges: /8 and any range between /16 through /32. AWS WAF supports IPv6 address ranges: /16, /24, /32, /48, /56, /64, and /128.</p> <p>To specify an individual IP address, you specify the four-part IP address followed by a <code>/32</code>, for example, 192.0.2.0/31. To block a range of IP addresses, you can specify /8 or any range between /16 through /32 (for IPv4) or /16, /24, /32, /48, /56, /64, or /128 (for IPv6). For more information about CIDR notation, see the Wikipedia entry <a href="https://en.wikipedia.org/wiki/Classless_Inter-Domain_Routing">Classless Inter-Domain Routing</a>. </p>
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct IPSet {
/// <p>The IP address type (<code>IPV4</code> or <code>IPV6</code>) and the IP address range (in CIDR notation) that web requests originate from. If the <code>WebACL</code> is associated with a CloudFront distribution and the viewer did not use an HTTP proxy or a load balancer to send the request, this is the value of the c-ip field in the CloudFront access logs.</p>
#[serde(rename = "IPSetDescriptors")]
pub ip_set_descriptors: Vec<IPSetDescriptor>,
/// <p>The <code>IPSetId</code> for an <code>IPSet</code>. You use <code>IPSetId</code> to get information about an <code>IPSet</code> (see <a>GetIPSet</a>), update an <code>IPSet</code> (see <a>UpdateIPSet</a>), insert an <code>IPSet</code> into a <code>Rule</code> or delete one from a <code>Rule</code> (see <a>UpdateRule</a>), and delete an <code>IPSet</code> from AWS WAF (see <a>DeleteIPSet</a>).</p> <p> <code>IPSetId</code> is returned by <a>CreateIPSet</a> and by <a>ListIPSets</a>.</p>
#[serde(rename = "IPSetId")]
pub ip_set_id: String,
/// <p>A friendly name or description of the <a>IPSet</a>. You can't change the name of an <code>IPSet</code> after you create it.</p>
#[serde(rename = "Name")]
#[serde(skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
}
/// <p>Specifies the IP address type (<code>IPV4</code> or <code>IPV6</code>) and the IP address range (in CIDR format) that web requests originate from.</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct IPSetDescriptor {
/// <p>Specify <code>IPV4</code> or <code>IPV6</code>.</p>
#[serde(rename = "Type")]
pub type_: String,
/// <p><p>Specify an IPv4 address by using CIDR notation. For example:</p> <ul> <li> <p>To configure AWS WAF to allow, block, or count requests that originated from the IP address 192.0.2.44, specify <code>192.0.2.44/32</code>.</p> </li> <li> <p>To configure AWS WAF to allow, block, or count requests that originated from IP addresses from 192.0.2.0 to 192.0.2.255, specify <code>192.0.2.0/24</code>.</p> </li> </ul> <p>For more information about CIDR notation, see the Wikipedia entry <a href="https://en.wikipedia.org/wiki/Classless_Inter-Domain_Routing">Classless Inter-Domain Routing</a>.</p> <p>Specify an IPv6 address by using CIDR notation. For example:</p> <ul> <li> <p>To configure AWS WAF to allow, block, or count requests that originated from the IP address 1111:0000:0000:0000:0000:0000:0000:0111, specify <code>1111:0000:0000:0000:0000:0000:0000:0111/128</code>.</p> </li> <li> <p>To configure AWS WAF to allow, block, or count requests that originated from IP addresses 1111:0000:0000:0000:0000:0000:0000:0000 to 1111:0000:0000:0000:ffff:ffff:ffff:ffff, specify <code>1111:0000:0000:0000:0000:0000:0000:0000/64</code>.</p> </li> </ul></p>
#[serde(rename = "Value")]
pub value: String,
}
/// <p>Contains the identifier and the name of the <code>IPSet</code>.</p>
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct IPSetSummary {
/// <p>The <code>IPSetId</code> for an <a>IPSet</a>. You can use <code>IPSetId</code> in a <a>GetIPSet</a> request to get detailed information about an <a>IPSet</a>.</p>
#[serde(rename = "IPSetId")]
pub ip_set_id: String,
/// <p>A friendly name or description of the <a>IPSet</a>. You can't change the name of an <code>IPSet</code> after you create it.</p>
#[serde(rename = "Name")]
pub name: String,
}
/// <p>Specifies the type of update to perform to an <a>IPSet</a> with <a>UpdateIPSet</a>.</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct IPSetUpdate {
/// <p>Specifies whether to insert or delete an IP address with <a>UpdateIPSet</a>.</p>
#[serde(rename = "Action")]
pub action: String,
/// <p>The IP address type (<code>IPV4</code> or <code>IPV6</code>) and the IP address range (in CIDR notation) that web requests originate from.</p>
#[serde(rename = "IPSetDescriptor")]
pub ip_set_descriptor: IPSetDescriptor,
}
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct ListActivatedRulesInRuleGroupRequest {
/// <p>Specifies the number of <code>ActivatedRules</code> that you want AWS WAF to return for this request. If you have more <code>ActivatedRules</code> than the number that you specify for <code>Limit</code>, the response includes a <code>NextMarker</code> value that you can use to get another batch of <code>ActivatedRules</code>.</p>
#[serde(rename = "Limit")]
#[serde(skip_serializing_if = "Option::is_none")]
pub limit: Option<i64>,
/// <p>If you specify a value for <code>Limit</code> and you have more <code>ActivatedRules</code> than the value of <code>Limit</code>, AWS WAF returns a <code>NextMarker</code> value in the response that allows you to list another group of <code>ActivatedRules</code>. For the second and subsequent <code>ListActivatedRulesInRuleGroup</code> requests, specify the value of <code>NextMarker</code> from the previous response to get information about another batch of <code>ActivatedRules</code>.</p>
#[serde(rename = "NextMarker")]
#[serde(skip_serializing_if = "Option::is_none")]
pub next_marker: Option<String>,
/// <p>The <code>RuleGroupId</code> of the <a>RuleGroup</a> for which you want to get a list of <a>ActivatedRule</a> objects.</p>
#[serde(rename = "RuleGroupId")]
#[serde(skip_serializing_if = "Option::is_none")]
pub rule_group_id: Option<String>,
}
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct ListActivatedRulesInRuleGroupResponse {
/// <p>An array of <code>ActivatedRules</code> objects.</p>
#[serde(rename = "ActivatedRules")]
#[serde(skip_serializing_if = "Option::is_none")]
pub activated_rules: Option<Vec<ActivatedRule>>,
/// <p>If you have more <code>ActivatedRules</code> than the number that you specified for <code>Limit</code> in the request, the response includes a <code>NextMarker</code> value. To list more <code>ActivatedRules</code>, submit another <code>ListActivatedRulesInRuleGroup</code> request, and specify the <code>NextMarker</code> value from the response in the <code>NextMarker</code> value in the next request.</p>
#[serde(rename = "NextMarker")]
#[serde(skip_serializing_if = "Option::is_none")]
pub next_marker: Option<String>,
}
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct ListByteMatchSetsRequest {
/// <p>Specifies the number of <code>ByteMatchSet</code> objects that you want AWS WAF to return for this request. If you have more <code>ByteMatchSets</code> objects than the number you specify for <code>Limit</code>, the response includes a <code>NextMarker</code> value that you can use to get another batch of <code>ByteMatchSet</code> objects.</p>
#[serde(rename = "Limit")]
#[serde(skip_serializing_if = "Option::is_none")]
pub limit: Option<i64>,
/// <p>If you specify a value for <code>Limit</code> and you have more <code>ByteMatchSets</code> than the value of <code>Limit</code>, AWS WAF returns a <code>NextMarker</code> value in the response that allows you to list another group of <code>ByteMatchSets</code>. For the second and subsequent <code>ListByteMatchSets</code> requests, specify the value of <code>NextMarker</code> from the previous response to get information about another batch of <code>ByteMatchSets</code>.</p>
#[serde(rename = "NextMarker")]
#[serde(skip_serializing_if = "Option::is_none")]
pub next_marker: Option<String>,
}
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct ListByteMatchSetsResponse {
/// <p>An array of <a>ByteMatchSetSummary</a> objects.</p>
#[serde(rename = "ByteMatchSets")]
#[serde(skip_serializing_if = "Option::is_none")]
pub byte_match_sets: Option<Vec<ByteMatchSetSummary>>,
/// <p>If you have more <code>ByteMatchSet</code> objects than the number that you specified for <code>Limit</code> in the request, the response includes a <code>NextMarker</code> value. To list more <code>ByteMatchSet</code> objects, submit another <code>ListByteMatchSets</code> request, and specify the <code>NextMarker</code> value from the response in the <code>NextMarker</code> value in the next request.</p>
#[serde(rename = "NextMarker")]
#[serde(skip_serializing_if = "Option::is_none")]
pub next_marker: Option<String>,
}
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct ListGeoMatchSetsRequest {
/// <p>Specifies the number of <code>GeoMatchSet</code> objects that you want AWS WAF to return for this request. If you have more <code>GeoMatchSet</code> objects than the number you specify for <code>Limit</code>, the response includes a <code>NextMarker</code> value that you can use to get another batch of <code>GeoMatchSet</code> objects.</p>
#[serde(rename = "Limit")]
#[serde(skip_serializing_if = "Option::is_none")]
pub limit: Option<i64>,
/// <p>If you specify a value for <code>Limit</code> and you have more <code>GeoMatchSet</code>s than the value of <code>Limit</code>, AWS WAF returns a <code>NextMarker</code> value in the response that allows you to list another group of <code>GeoMatchSet</code> objects. For the second and subsequent <code>ListGeoMatchSets</code> requests, specify the value of <code>NextMarker</code> from the previous response to get information about another batch of <code>GeoMatchSet</code> objects.</p>
#[serde(rename = "NextMarker")]
#[serde(skip_serializing_if = "Option::is_none")]
pub next_marker: Option<String>,
}
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct ListGeoMatchSetsResponse {
/// <p>An array of <a>GeoMatchSetSummary</a> objects.</p>
#[serde(rename = "GeoMatchSets")]
#[serde(skip_serializing_if = "Option::is_none")]
pub geo_match_sets: Option<Vec<GeoMatchSetSummary>>,
/// <p>If you have more <code>GeoMatchSet</code> objects than the number that you specified for <code>Limit</code> in the request, the response includes a <code>NextMarker</code> value. To list more <code>GeoMatchSet</code> objects, submit another <code>ListGeoMatchSets</code> request, and specify the <code>NextMarker</code> value from the response in the <code>NextMarker</code> value in the next request.</p>
#[serde(rename = "NextMarker")]
#[serde(skip_serializing_if = "Option::is_none")]
pub next_marker: Option<String>,
}
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct ListIPSetsRequest {
/// <p>Specifies the number of <code>IPSet</code> objects that you want AWS WAF to return for this request. If you have more <code>IPSet</code> objects than the number you specify for <code>Limit</code>, the response includes a <code>NextMarker</code> value that you can use to get another batch of <code>IPSet</code> objects.</p>
#[serde(rename = "Limit")]
#[serde(skip_serializing_if = "Option::is_none")]
pub limit: Option<i64>,
/// <p>If you specify a value for <code>Limit</code> and you have more <code>IPSets</code> than the value of <code>Limit</code>, AWS WAF returns a <code>NextMarker</code> value in the response that allows you to list another group of <code>IPSets</code>. For the second and subsequent <code>ListIPSets</code> requests, specify the value of <code>NextMarker</code> from the previous response to get information about another batch of <code>IPSets</code>.</p>
#[serde(rename = "NextMarker")]
#[serde(skip_serializing_if = "Option::is_none")]
pub next_marker: Option<String>,
}
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct ListIPSetsResponse {
/// <p>An array of <a>IPSetSummary</a> objects.</p>
#[serde(rename = "IPSets")]
#[serde(skip_serializing_if = "Option::is_none")]
pub ip_sets: Option<Vec<IPSetSummary>>,
/// <p>If you have more <code>IPSet</code> objects than the number that you specified for <code>Limit</code> in the request, the response includes a <code>NextMarker</code> value. To list more <code>IPSet</code> objects, submit another <code>ListIPSets</code> request, and specify the <code>NextMarker</code> value from the response in the <code>NextMarker</code> value in the next request.</p>
#[serde(rename = "NextMarker")]
#[serde(skip_serializing_if = "Option::is_none")]
pub next_marker: Option<String>,
}
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct ListLoggingConfigurationsRequest {
/// <p>Specifies the number of <code>LoggingConfigurations</code> that you want AWS WAF to return for this request. If you have more <code>LoggingConfigurations</code> than the number that you specify for <code>Limit</code>, the response includes a <code>NextMarker</code> value that you can use to get another batch of <code>LoggingConfigurations</code>.</p>
#[serde(rename = "Limit")]
#[serde(skip_serializing_if = "Option::is_none")]
pub limit: Option<i64>,
/// <p>If you specify a value for <code>Limit</code> and you have more <code>LoggingConfigurations</code> than the value of <code>Limit</code>, AWS WAF returns a <code>NextMarker</code> value in the response that allows you to list another group of <code>LoggingConfigurations</code>. For the second and subsequent <code>ListLoggingConfigurations</code> requests, specify the value of <code>NextMarker</code> from the previous response to get information about another batch of <code>ListLoggingConfigurations</code>.</p>
#[serde(rename = "NextMarker")]
#[serde(skip_serializing_if = "Option::is_none")]
pub next_marker: Option<String>,
}
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct ListLoggingConfigurationsResponse {
/// <p>An array of <a>LoggingConfiguration</a> objects.</p>
#[serde(rename = "LoggingConfigurations")]
#[serde(skip_serializing_if = "Option::is_none")]
pub logging_configurations: Option<Vec<LoggingConfiguration>>,
/// <p>If you have more <code>LoggingConfigurations</code> than the number that you specified for <code>Limit</code> in the request, the response includes a <code>NextMarker</code> value. To list more <code>LoggingConfigurations</code>, submit another <code>ListLoggingConfigurations</code> request, and specify the <code>NextMarker</code> value from the response in the <code>NextMarker</code> value in the next request.</p>
#[serde(rename = "NextMarker")]
#[serde(skip_serializing_if = "Option::is_none")]
pub next_marker: Option<String>,
}
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct ListRateBasedRulesRequest {
/// <p>Specifies the number of <code>Rules</code> that you want AWS WAF to return for this request. If you have more <code>Rules</code> than the number that you specify for <code>Limit</code>, the response includes a <code>NextMarker</code> value that you can use to get another batch of <code>Rules</code>.</p>
#[serde(rename = "Limit")]
#[serde(skip_serializing_if = "Option::is_none")]
pub limit: Option<i64>,
/// <p>If you specify a value for <code>Limit</code> and you have more <code>Rules</code> than the value of <code>Limit</code>, AWS WAF returns a <code>NextMarker</code> value in the response that allows you to list another group of <code>Rules</code>. For the second and subsequent <code>ListRateBasedRules</code> requests, specify the value of <code>NextMarker</code> from the previous response to get information about another batch of <code>Rules</code>.</p>
#[serde(rename = "NextMarker")]
#[serde(skip_serializing_if = "Option::is_none")]
pub next_marker: Option<String>,
}
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct ListRateBasedRulesResponse {
/// <p>If you have more <code>Rules</code> than the number that you specified for <code>Limit</code> in the request, the response includes a <code>NextMarker</code> value. To list more <code>Rules</code>, submit another <code>ListRateBasedRules</code> request, and specify the <code>NextMarker</code> value from the response in the <code>NextMarker</code> value in the next request.</p>
#[serde(rename = "NextMarker")]
#[serde(skip_serializing_if = "Option::is_none")]
pub next_marker: Option<String>,
/// <p>An array of <a>RuleSummary</a> objects.</p>
#[serde(rename = "Rules")]
#[serde(skip_serializing_if = "Option::is_none")]
pub rules: Option<Vec<RuleSummary>>,
}
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct ListRegexMatchSetsRequest {
/// <p>Specifies the number of <code>RegexMatchSet</code> objects that you want AWS WAF to return for this request. If you have more <code>RegexMatchSet</code> objects than the number you specify for <code>Limit</code>, the response includes a <code>NextMarker</code> value that you can use to get another batch of <code>RegexMatchSet</code> objects.</p>
#[serde(rename = "Limit")]
#[serde(skip_serializing_if = "Option::is_none")]
pub limit: Option<i64>,
/// <p>If you specify a value for <code>Limit</code> and you have more <code>RegexMatchSet</code> objects than the value of <code>Limit</code>, AWS WAF returns a <code>NextMarker</code> value in the response that allows you to list another group of <code>ByteMatchSets</code>. For the second and subsequent <code>ListRegexMatchSets</code> requests, specify the value of <code>NextMarker</code> from the previous response to get information about another batch of <code>RegexMatchSet</code> objects.</p>
#[serde(rename = "NextMarker")]
#[serde(skip_serializing_if = "Option::is_none")]
pub next_marker: Option<String>,
}
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct ListRegexMatchSetsResponse {
/// <p>If you have more <code>RegexMatchSet</code> objects than the number that you specified for <code>Limit</code> in the request, the response includes a <code>NextMarker</code> value. To list more <code>RegexMatchSet</code> objects, submit another <code>ListRegexMatchSets</code> request, and specify the <code>NextMarker</code> value from the response in the <code>NextMarker</code> value in the next request.</p>
#[serde(rename = "NextMarker")]
#[serde(skip_serializing_if = "Option::is_none")]
pub next_marker: Option<String>,
/// <p>An array of <a>RegexMatchSetSummary</a> objects.</p>
#[serde(rename = "RegexMatchSets")]
#[serde(skip_serializing_if = "Option::is_none")]
pub regex_match_sets: Option<Vec<RegexMatchSetSummary>>,
}
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct ListRegexPatternSetsRequest {
/// <p>Specifies the number of <code>RegexPatternSet</code> objects that you want AWS WAF to return for this request. If you have more <code>RegexPatternSet</code> objects than the number you specify for <code>Limit</code>, the response includes a <code>NextMarker</code> value that you can use to get another batch of <code>RegexPatternSet</code> objects.</p>
#[serde(rename = "Limit")]
#[serde(skip_serializing_if = "Option::is_none")]
pub limit: Option<i64>,
/// <p>If you specify a value for <code>Limit</code> and you have more <code>RegexPatternSet</code> objects than the value of <code>Limit</code>, AWS WAF returns a <code>NextMarker</code> value in the response that allows you to list another group of <code>RegexPatternSet</code> objects. For the second and subsequent <code>ListRegexPatternSets</code> requests, specify the value of <code>NextMarker</code> from the previous response to get information about another batch of <code>RegexPatternSet</code> objects.</p>
#[serde(rename = "NextMarker")]
#[serde(skip_serializing_if = "Option::is_none")]
pub next_marker: Option<String>,
}
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct ListRegexPatternSetsResponse {
/// <p>If you have more <code>RegexPatternSet</code> objects than the number that you specified for <code>Limit</code> in the request, the response includes a <code>NextMarker</code> value. To list more <code>RegexPatternSet</code> objects, submit another <code>ListRegexPatternSets</code> request, and specify the <code>NextMarker</code> value from the response in the <code>NextMarker</code> value in the next request.</p>
#[serde(rename = "NextMarker")]
#[serde(skip_serializing_if = "Option::is_none")]
pub next_marker: Option<String>,
/// <p>An array of <a>RegexPatternSetSummary</a> objects.</p>
#[serde(rename = "RegexPatternSets")]
#[serde(skip_serializing_if = "Option::is_none")]
pub regex_pattern_sets: Option<Vec<RegexPatternSetSummary>>,
}
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct ListRuleGroupsRequest {
/// <p>Specifies the number of <code>RuleGroups</code> that you want AWS WAF to return for this request. If you have more <code>RuleGroups</code> than the number that you specify for <code>Limit</code>, the response includes a <code>NextMarker</code> value that you can use to get another batch of <code>RuleGroups</code>.</p>
#[serde(rename = "Limit")]
#[serde(skip_serializing_if = "Option::is_none")]
pub limit: Option<i64>,
/// <p>If you specify a value for <code>Limit</code> and you have more <code>RuleGroups</code> than the value of <code>Limit</code>, AWS WAF returns a <code>NextMarker</code> value in the response that allows you to list another group of <code>RuleGroups</code>. For the second and subsequent <code>ListRuleGroups</code> requests, specify the value of <code>NextMarker</code> from the previous response to get information about another batch of <code>RuleGroups</code>.</p>
#[serde(rename = "NextMarker")]
#[serde(skip_serializing_if = "Option::is_none")]
pub next_marker: Option<String>,
}
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct ListRuleGroupsResponse {
/// <p>If you have more <code>RuleGroups</code> than the number that you specified for <code>Limit</code> in the request, the response includes a <code>NextMarker</code> value. To list more <code>RuleGroups</code>, submit another <code>ListRuleGroups</code> request, and specify the <code>NextMarker</code> value from the response in the <code>NextMarker</code> value in the next request.</p>
#[serde(rename = "NextMarker")]
#[serde(skip_serializing_if = "Option::is_none")]
pub next_marker: Option<String>,
/// <p>An array of <a>RuleGroup</a> objects.</p>
#[serde(rename = "RuleGroups")]
#[serde(skip_serializing_if = "Option::is_none")]
pub rule_groups: Option<Vec<RuleGroupSummary>>,
}
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct ListRulesRequest {
/// <p>Specifies the number of <code>Rules</code> that you want AWS WAF to return for this request. If you have more <code>Rules</code> than the number that you specify for <code>Limit</code>, the response includes a <code>NextMarker</code> value that you can use to get another batch of <code>Rules</code>.</p>
#[serde(rename = "Limit")]
#[serde(skip_serializing_if = "Option::is_none")]
pub limit: Option<i64>,
/// <p>If you specify a value for <code>Limit</code> and you have more <code>Rules</code> than the value of <code>Limit</code>, AWS WAF returns a <code>NextMarker</code> value in the response that allows you to list another group of <code>Rules</code>. For the second and subsequent <code>ListRules</code> requests, specify the value of <code>NextMarker</code> from the previous response to get information about another batch of <code>Rules</code>.</p>
#[serde(rename = "NextMarker")]
#[serde(skip_serializing_if = "Option::is_none")]
pub next_marker: Option<String>,
}
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct ListRulesResponse {
/// <p>If you have more <code>Rules</code> than the number that you specified for <code>Limit</code> in the request, the response includes a <code>NextMarker</code> value. To list more <code>Rules</code>, submit another <code>ListRules</code> request, and specify the <code>NextMarker</code> value from the response in the <code>NextMarker</code> value in the next request.</p>
#[serde(rename = "NextMarker")]
#[serde(skip_serializing_if = "Option::is_none")]
pub next_marker: Option<String>,
/// <p>An array of <a>RuleSummary</a> objects.</p>
#[serde(rename = "Rules")]
#[serde(skip_serializing_if = "Option::is_none")]
pub rules: Option<Vec<RuleSummary>>,
}
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct ListSizeConstraintSetsRequest {
/// <p>Specifies the number of <code>SizeConstraintSet</code> objects that you want AWS WAF to return for this request. If you have more <code>SizeConstraintSets</code> objects than the number you specify for <code>Limit</code>, the response includes a <code>NextMarker</code> value that you can use to get another batch of <code>SizeConstraintSet</code> objects.</p>
#[serde(rename = "Limit")]
#[serde(skip_serializing_if = "Option::is_none")]
pub limit: Option<i64>,
/// <p>If you specify a value for <code>Limit</code> and you have more <code>SizeConstraintSets</code> than the value of <code>Limit</code>, AWS WAF returns a <code>NextMarker</code> value in the response that allows you to list another group of <code>SizeConstraintSets</code>. For the second and subsequent <code>ListSizeConstraintSets</code> requests, specify the value of <code>NextMarker</code> from the previous response to get information about another batch of <code>SizeConstraintSets</code>.</p>
#[serde(rename = "NextMarker")]
#[serde(skip_serializing_if = "Option::is_none")]
pub next_marker: Option<String>,
}
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct ListSizeConstraintSetsResponse {
/// <p>If you have more <code>SizeConstraintSet</code> objects than the number that you specified for <code>Limit</code> in the request, the response includes a <code>NextMarker</code> value. To list more <code>SizeConstraintSet</code> objects, submit another <code>ListSizeConstraintSets</code> request, and specify the <code>NextMarker</code> value from the response in the <code>NextMarker</code> value in the next request.</p>
#[serde(rename = "NextMarker")]
#[serde(skip_serializing_if = "Option::is_none")]
pub next_marker: Option<String>,
/// <p>An array of <a>SizeConstraintSetSummary</a> objects.</p>
#[serde(rename = "SizeConstraintSets")]
#[serde(skip_serializing_if = "Option::is_none")]
pub size_constraint_sets: Option<Vec<SizeConstraintSetSummary>>,
}
/// <p>A request to list the <a>SqlInjectionMatchSet</a> objects created by the current AWS account.</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct ListSqlInjectionMatchSetsRequest {
/// <p>Specifies the number of <a>SqlInjectionMatchSet</a> objects that you want AWS WAF to return for this request. If you have more <code>SqlInjectionMatchSet</code> objects than the number you specify for <code>Limit</code>, the response includes a <code>NextMarker</code> value that you can use to get another batch of <code>Rules</code>.</p>
#[serde(rename = "Limit")]
#[serde(skip_serializing_if = "Option::is_none")]
pub limit: Option<i64>,
/// <p>If you specify a value for <code>Limit</code> and you have more <a>SqlInjectionMatchSet</a> objects than the value of <code>Limit</code>, AWS WAF returns a <code>NextMarker</code> value in the response that allows you to list another group of <code>SqlInjectionMatchSets</code>. For the second and subsequent <code>ListSqlInjectionMatchSets</code> requests, specify the value of <code>NextMarker</code> from the previous response to get information about another batch of <code>SqlInjectionMatchSets</code>.</p>
#[serde(rename = "NextMarker")]
#[serde(skip_serializing_if = "Option::is_none")]
pub next_marker: Option<String>,
}
/// <p>The response to a <a>ListSqlInjectionMatchSets</a> request.</p>
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct ListSqlInjectionMatchSetsResponse {
/// <p>If you have more <a>SqlInjectionMatchSet</a> objects than the number that you specified for <code>Limit</code> in the request, the response includes a <code>NextMarker</code> value. To list more <code>SqlInjectionMatchSet</code> objects, submit another <code>ListSqlInjectionMatchSets</code> request, and specify the <code>NextMarker</code> value from the response in the <code>NextMarker</code> value in the next request.</p>
#[serde(rename = "NextMarker")]
#[serde(skip_serializing_if = "Option::is_none")]
pub next_marker: Option<String>,
/// <p>An array of <a>SqlInjectionMatchSetSummary</a> objects.</p>
#[serde(rename = "SqlInjectionMatchSets")]
#[serde(skip_serializing_if = "Option::is_none")]
pub sql_injection_match_sets: Option<Vec<SqlInjectionMatchSetSummary>>,
}
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct ListSubscribedRuleGroupsRequest {
/// <p>Specifies the number of subscribed rule groups that you want AWS WAF to return for this request. If you have more objects than the number you specify for <code>Limit</code>, the response includes a <code>NextMarker</code> value that you can use to get another batch of objects.</p>
#[serde(rename = "Limit")]
#[serde(skip_serializing_if = "Option::is_none")]
pub limit: Option<i64>,
/// <p>If you specify a value for <code>Limit</code> and you have more <code>ByteMatchSets</code>subscribed rule groups than the value of <code>Limit</code>, AWS WAF returns a <code>NextMarker</code> value in the response that allows you to list another group of subscribed rule groups. For the second and subsequent <code>ListSubscribedRuleGroupsRequest</code> requests, specify the value of <code>NextMarker</code> from the previous response to get information about another batch of subscribed rule groups.</p>
#[serde(rename = "NextMarker")]
#[serde(skip_serializing_if = "Option::is_none")]
pub next_marker: Option<String>,
}
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct ListSubscribedRuleGroupsResponse {
/// <p>If you have more objects than the number that you specified for <code>Limit</code> in the request, the response includes a <code>NextMarker</code> value. To list more objects, submit another <code>ListSubscribedRuleGroups</code> request, and specify the <code>NextMarker</code> value from the response in the <code>NextMarker</code> value in the next request.</p>
#[serde(rename = "NextMarker")]
#[serde(skip_serializing_if = "Option::is_none")]
pub next_marker: Option<String>,
/// <p>An array of <a>RuleGroup</a> objects.</p>
#[serde(rename = "RuleGroups")]
#[serde(skip_serializing_if = "Option::is_none")]
pub rule_groups: Option<Vec<SubscribedRuleGroupSummary>>,
}
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct ListWebACLsRequest {
/// <p>Specifies the number of <code>WebACL</code> objects that you want AWS WAF to return for this request. If you have more <code>WebACL</code> objects than the number that you specify for <code>Limit</code>, the response includes a <code>NextMarker</code> value that you can use to get another batch of <code>WebACL</code> objects.</p>
#[serde(rename = "Limit")]
#[serde(skip_serializing_if = "Option::is_none")]
pub limit: Option<i64>,
/// <p>If you specify a value for <code>Limit</code> and you have more <code>WebACL</code> objects than the number that you specify for <code>Limit</code>, AWS WAF returns a <code>NextMarker</code> value in the response that allows you to list another group of <code>WebACL</code> objects. For the second and subsequent <code>ListWebACLs</code> requests, specify the value of <code>NextMarker</code> from the previous response to get information about another batch of <code>WebACL</code> objects.</p>
#[serde(rename = "NextMarker")]
#[serde(skip_serializing_if = "Option::is_none")]
pub next_marker: Option<String>,
}
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct ListWebACLsResponse {
/// <p>If you have more <code>WebACL</code> objects than the number that you specified for <code>Limit</code> in the request, the response includes a <code>NextMarker</code> value. To list more <code>WebACL</code> objects, submit another <code>ListWebACLs</code> request, and specify the <code>NextMarker</code> value from the response in the <code>NextMarker</code> value in the next request.</p>
#[serde(rename = "NextMarker")]
#[serde(skip_serializing_if = "Option::is_none")]
pub next_marker: Option<String>,
/// <p>An array of <a>WebACLSummary</a> objects.</p>
#[serde(rename = "WebACLs")]
#[serde(skip_serializing_if = "Option::is_none")]
pub web_ac_ls: Option<Vec<WebACLSummary>>,
}
/// <p>A request to list the <a>XssMatchSet</a> objects created by the current AWS account.</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct ListXssMatchSetsRequest {
/// <p>Specifies the number of <a>XssMatchSet</a> objects that you want AWS WAF to return for this request. If you have more <code>XssMatchSet</code> objects than the number you specify for <code>Limit</code>, the response includes a <code>NextMarker</code> value that you can use to get another batch of <code>Rules</code>.</p>
#[serde(rename = "Limit")]
#[serde(skip_serializing_if = "Option::is_none")]
pub limit: Option<i64>,
/// <p>If you specify a value for <code>Limit</code> and you have more <a>XssMatchSet</a> objects than the value of <code>Limit</code>, AWS WAF returns a <code>NextMarker</code> value in the response that allows you to list another group of <code>XssMatchSets</code>. For the second and subsequent <code>ListXssMatchSets</code> requests, specify the value of <code>NextMarker</code> from the previous response to get information about another batch of <code>XssMatchSets</code>.</p>
#[serde(rename = "NextMarker")]
#[serde(skip_serializing_if = "Option::is_none")]
pub next_marker: Option<String>,
}
/// <p>The response to a <a>ListXssMatchSets</a> request.</p>
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct ListXssMatchSetsResponse {
/// <p>If you have more <a>XssMatchSet</a> objects than the number that you specified for <code>Limit</code> in the request, the response includes a <code>NextMarker</code> value. To list more <code>XssMatchSet</code> objects, submit another <code>ListXssMatchSets</code> request, and specify the <code>NextMarker</code> value from the response in the <code>NextMarker</code> value in the next request.</p>
#[serde(rename = "NextMarker")]
#[serde(skip_serializing_if = "Option::is_none")]
pub next_marker: Option<String>,
/// <p>An array of <a>XssMatchSetSummary</a> objects.</p>
#[serde(rename = "XssMatchSets")]
#[serde(skip_serializing_if = "Option::is_none")]
pub xss_match_sets: Option<Vec<XssMatchSetSummary>>,
}
/// <p>The Amazon Kinesis Data Firehose, <code>RedactedFields</code> information, and the web ACL Amazon Resource Name (ARN).</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct LoggingConfiguration {
/// <p>An array of Amazon Kinesis Data Firehose ARNs.</p>
#[serde(rename = "LogDestinationConfigs")]
pub log_destination_configs: Vec<String>,
/// <p>The parts of the request that you want redacted from the logs. For example, if you redact the cookie field, the cookie field in the firehose will be <code>xxx</code>. </p>
#[serde(rename = "RedactedFields")]
#[serde(skip_serializing_if = "Option::is_none")]
pub redacted_fields: Option<Vec<FieldToMatch>>,
/// <p>The Amazon Resource Name (ARN) of the web ACL that you want to associate with <code>LogDestinationConfigs</code>.</p>
#[serde(rename = "ResourceArn")]
pub resource_arn: String,
}
/// <p>Specifies the <a>ByteMatchSet</a>, <a>IPSet</a>, <a>SqlInjectionMatchSet</a>, <a>XssMatchSet</a>, <a>RegexMatchSet</a>, <a>GeoMatchSet</a>, and <a>SizeConstraintSet</a> objects that you want to add to a <code>Rule</code> and, for each object, indicates whether you want to negate the settings, for example, requests that do NOT originate from the IP address 192.0.2.44. </p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct Predicate {
/// <p>A unique identifier for a predicate in a <code>Rule</code>, such as <code>ByteMatchSetId</code> or <code>IPSetId</code>. The ID is returned by the corresponding <code>Create</code> or <code>List</code> command.</p>
#[serde(rename = "DataId")]
pub data_id: String,
/// <p>Set <code>Negated</code> to <code>False</code> if you want AWS WAF to allow, block, or count requests based on the settings in the specified <a>ByteMatchSet</a>, <a>IPSet</a>, <a>SqlInjectionMatchSet</a>, <a>XssMatchSet</a>, <a>RegexMatchSet</a>, <a>GeoMatchSet</a>, or <a>SizeConstraintSet</a>. For example, if an <code>IPSet</code> includes the IP address <code>192.0.2.44</code>, AWS WAF will allow or block requests based on that IP address.</p> <p>Set <code>Negated</code> to <code>True</code> if you want AWS WAF to allow or block a request based on the negation of the settings in the <a>ByteMatchSet</a>, <a>IPSet</a>, <a>SqlInjectionMatchSet</a>, <a>XssMatchSet</a>, <a>RegexMatchSet</a>, <a>GeoMatchSet</a>, or <a>SizeConstraintSet</a>. For example, if an <code>IPSet</code> includes the IP address <code>192.0.2.44</code>, AWS WAF will allow, block, or count requests based on all IP addresses <i>except</i> <code>192.0.2.44</code>.</p>
#[serde(rename = "Negated")]
pub negated: bool,
/// <p>The type of predicate in a <code>Rule</code>, such as <code>ByteMatch</code> or <code>IPSet</code>.</p>
#[serde(rename = "Type")]
pub type_: String,
}
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct PutLoggingConfigurationRequest {
/// <p>The Amazon Kinesis Data Firehose that contains the inspected traffic information, the redacted fields details, and the Amazon Resource Name (ARN) of the web ACL to monitor.</p>
#[serde(rename = "LoggingConfiguration")]
pub logging_configuration: LoggingConfiguration,
}
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct PutLoggingConfigurationResponse {
/// <p>The <a>LoggingConfiguration</a> that you submitted in the request.</p>
#[serde(rename = "LoggingConfiguration")]
#[serde(skip_serializing_if = "Option::is_none")]
pub logging_configuration: Option<LoggingConfiguration>,
}
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct PutPermissionPolicyRequest {
/// <p>The policy to attach to the specified RuleGroup.</p>
#[serde(rename = "Policy")]
pub policy: String,
/// <p>The Amazon Resource Name (ARN) of the RuleGroup to which you want to attach the policy.</p>
#[serde(rename = "ResourceArn")]
pub resource_arn: String,
}
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct PutPermissionPolicyResponse {}
/// <p>A <code>RateBasedRule</code> is identical to a regular <a>Rule</a>, with one addition: a <code>RateBasedRule</code> counts the number of requests that arrive from a specified IP address every five minutes. For example, based on recent requests that you've seen from an attacker, you might create a <code>RateBasedRule</code> that includes the following conditions: </p> <ul> <li> <p>The requests come from 192.0.2.44.</p> </li> <li> <p>They contain the value <code>BadBot</code> in the <code>User-Agent</code> header.</p> </li> </ul> <p>In the rule, you also define the rate limit as 15,000.</p> <p>Requests that meet both of these conditions and exceed 15,000 requests every five minutes trigger the rule's action (block or count), which is defined in the web ACL.</p>
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct RateBasedRule {
/// <p>The <code>Predicates</code> object contains one <code>Predicate</code> element for each <a>ByteMatchSet</a>, <a>IPSet</a>, or <a>SqlInjectionMatchSet</a> object that you want to include in a <code>RateBasedRule</code>.</p>
#[serde(rename = "MatchPredicates")]
pub match_predicates: Vec<Predicate>,
/// <p>A friendly name or description for the metrics for a <code>RateBasedRule</code>. The name can contain only alphanumeric characters (A-Z, a-z, 0-9); the name can't contain whitespace. You can't change the name of the metric after you create the <code>RateBasedRule</code>.</p>
#[serde(rename = "MetricName")]
#[serde(skip_serializing_if = "Option::is_none")]
pub metric_name: Option<String>,
/// <p>A friendly name or description for a <code>RateBasedRule</code>. You can't change the name of a <code>RateBasedRule</code> after you create it.</p>
#[serde(rename = "Name")]
#[serde(skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
/// <p>The field that AWS WAF uses to determine if requests are likely arriving from single source and thus subject to rate monitoring. The only valid value for <code>RateKey</code> is <code>IP</code>. <code>IP</code> indicates that requests arriving from the same IP address are subject to the <code>RateLimit</code> that is specified in the <code>RateBasedRule</code>.</p>
#[serde(rename = "RateKey")]
pub rate_key: String,
/// <p>The maximum number of requests, which have an identical value in the field specified by the <code>RateKey</code>, allowed in a five-minute period. If the number of requests exceeds the <code>RateLimit</code> and the other predicates specified in the rule are also met, AWS WAF triggers the action that is specified for this rule.</p>
#[serde(rename = "RateLimit")]
pub rate_limit: i64,
/// <p>A unique identifier for a <code>RateBasedRule</code>. You use <code>RuleId</code> to get more information about a <code>RateBasedRule</code> (see <a>GetRateBasedRule</a>), update a <code>RateBasedRule</code> (see <a>UpdateRateBasedRule</a>), insert a <code>RateBasedRule</code> into a <code>WebACL</code> or delete one from a <code>WebACL</code> (see <a>UpdateWebACL</a>), or delete a <code>RateBasedRule</code> from AWS WAF (see <a>DeleteRateBasedRule</a>).</p>
#[serde(rename = "RuleId")]
pub rule_id: String,
}
/// <p>In a <a>GetRegexMatchSet</a> request, <code>RegexMatchSet</code> is a complex type that contains the <code>RegexMatchSetId</code> and <code>Name</code> of a <code>RegexMatchSet</code>, and the values that you specified when you updated the <code>RegexMatchSet</code>.</p> <p> The values are contained in a <code>RegexMatchTuple</code> object, which specify the parts of web requests that you want AWS WAF to inspect and the values that you want AWS WAF to search for. If a <code>RegexMatchSet</code> contains more than one <code>RegexMatchTuple</code> object, a request needs to match the settings in only one <code>ByteMatchTuple</code> to be considered a match.</p>
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct RegexMatchSet {
/// <p>A friendly name or description of the <a>RegexMatchSet</a>. You can't change <code>Name</code> after you create a <code>RegexMatchSet</code>.</p>
#[serde(rename = "Name")]
#[serde(skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
/// <p>The <code>RegexMatchSetId</code> for a <code>RegexMatchSet</code>. You use <code>RegexMatchSetId</code> to get information about a <code>RegexMatchSet</code> (see <a>GetRegexMatchSet</a>), update a <code>RegexMatchSet</code> (see <a>UpdateRegexMatchSet</a>), insert a <code>RegexMatchSet</code> into a <code>Rule</code> or delete one from a <code>Rule</code> (see <a>UpdateRule</a>), and delete a <code>RegexMatchSet</code> from AWS WAF (see <a>DeleteRegexMatchSet</a>).</p> <p> <code>RegexMatchSetId</code> is returned by <a>CreateRegexMatchSet</a> and by <a>ListRegexMatchSets</a>.</p>
#[serde(rename = "RegexMatchSetId")]
#[serde(skip_serializing_if = "Option::is_none")]
pub regex_match_set_id: Option<String>,
/// <p><p>Contains an array of <a>RegexMatchTuple</a> objects. Each <code>RegexMatchTuple</code> object contains: </p> <ul> <li> <p>The part of a web request that you want AWS WAF to inspect, such as a query string or the value of the <code>User-Agent</code> header. </p> </li> <li> <p>The identifier of the pattern (a regular expression) that you want AWS WAF to look for. For more information, see <a>RegexPatternSet</a>.</p> </li> <li> <p>Whether to perform any conversions on the request, such as converting it to lowercase, before inspecting it for the specified string.</p> </li> </ul></p>
#[serde(rename = "RegexMatchTuples")]
#[serde(skip_serializing_if = "Option::is_none")]
pub regex_match_tuples: Option<Vec<RegexMatchTuple>>,
}
/// <p>Returned by <a>ListRegexMatchSets</a>. Each <code>RegexMatchSetSummary</code> object includes the <code>Name</code> and <code>RegexMatchSetId</code> for one <a>RegexMatchSet</a>.</p>
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct RegexMatchSetSummary {
/// <p>A friendly name or description of the <a>RegexMatchSet</a>. You can't change <code>Name</code> after you create a <code>RegexMatchSet</code>.</p>
#[serde(rename = "Name")]
pub name: String,
/// <p>The <code>RegexMatchSetId</code> for a <code>RegexMatchSet</code>. You use <code>RegexMatchSetId</code> to get information about a <code>RegexMatchSet</code>, update a <code>RegexMatchSet</code>, remove a <code>RegexMatchSet</code> from a <code>Rule</code>, and delete a <code>RegexMatchSet</code> from AWS WAF.</p> <p> <code>RegexMatchSetId</code> is returned by <a>CreateRegexMatchSet</a> and by <a>ListRegexMatchSets</a>.</p>
#[serde(rename = "RegexMatchSetId")]
pub regex_match_set_id: String,
}
/// <p>In an <a>UpdateRegexMatchSet</a> request, <code>RegexMatchSetUpdate</code> specifies whether to insert or delete a <a>RegexMatchTuple</a> and includes the settings for the <code>RegexMatchTuple</code>.</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct RegexMatchSetUpdate {
/// <p>Specifies whether to insert or delete a <a>RegexMatchTuple</a>.</p>
#[serde(rename = "Action")]
pub action: String,
/// <p>Information about the part of a web request that you want AWS WAF to inspect and the identifier of the regular expression (regex) pattern that you want AWS WAF to search for. If you specify <code>DELETE</code> for the value of <code>Action</code>, the <code>RegexMatchTuple</code> values must exactly match the values in the <code>RegexMatchTuple</code> that you want to delete from the <code>RegexMatchSet</code>.</p>
#[serde(rename = "RegexMatchTuple")]
pub regex_match_tuple: RegexMatchTuple,
}
/// <p><p>The regular expression pattern that you want AWS WAF to search for in web requests, the location in requests that you want AWS WAF to search, and other settings. Each <code>RegexMatchTuple</code> object contains: </p> <ul> <li> <p>The part of a web request that you want AWS WAF to inspect, such as a query string or the value of the <code>User-Agent</code> header. </p> </li> <li> <p>The identifier of the pattern (a regular expression) that you want AWS WAF to look for. For more information, see <a>RegexPatternSet</a>. </p> </li> <li> <p>Whether to perform any conversions on the request, such as converting it to lowercase, before inspecting it for the specified string.</p> </li> </ul></p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct RegexMatchTuple {
/// <p>Specifies where in a web request to look for the <code>RegexPatternSet</code>.</p>
#[serde(rename = "FieldToMatch")]
pub field_to_match: FieldToMatch,
/// <p>The <code>RegexPatternSetId</code> for a <code>RegexPatternSet</code>. You use <code>RegexPatternSetId</code> to get information about a <code>RegexPatternSet</code> (see <a>GetRegexPatternSet</a>), update a <code>RegexPatternSet</code> (see <a>UpdateRegexPatternSet</a>), insert a <code>RegexPatternSet</code> into a <code>RegexMatchSet</code> or delete one from a <code>RegexMatchSet</code> (see <a>UpdateRegexMatchSet</a>), and delete an <code>RegexPatternSet</code> from AWS WAF (see <a>DeleteRegexPatternSet</a>).</p> <p> <code>RegexPatternSetId</code> is returned by <a>CreateRegexPatternSet</a> and by <a>ListRegexPatternSets</a>.</p>
#[serde(rename = "RegexPatternSetId")]
pub regex_pattern_set_id: String,
/// <p>Text transformations eliminate some of the unusual formatting that attackers use in web requests in an effort to bypass AWS WAF. If you specify a transformation, AWS WAF performs the transformation on <code>RegexPatternSet</code> before inspecting a request for a match.</p> <p>You can only specify a single type of TextTransformation.</p> <p> <b>CMD_LINE</b> </p> <p>When you're concerned that attackers are injecting an operating system commandline command and using unusual formatting to disguise some or all of the command, use this option to perform the following transformations:</p> <ul> <li> <p>Delete the following characters: \ " ' ^</p> </li> <li> <p>Delete spaces before the following characters: / (</p> </li> <li> <p>Replace the following characters with a space: , ;</p> </li> <li> <p>Replace multiple spaces with one space</p> </li> <li> <p>Convert uppercase letters (A-Z) to lowercase (a-z)</p> </li> </ul> <p> <b>COMPRESS_WHITE_SPACE</b> </p> <p>Use this option to replace the following characters with a space character (decimal 32):</p> <ul> <li> <p>\f, formfeed, decimal 12</p> </li> <li> <p>\t, tab, decimal 9</p> </li> <li> <p>\n, newline, decimal 10</p> </li> <li> <p>\r, carriage return, decimal 13</p> </li> <li> <p>\v, vertical tab, decimal 11</p> </li> <li> <p>non-breaking space, decimal 160</p> </li> </ul> <p> <code>COMPRESS_WHITE_SPACE</code> also replaces multiple spaces with one space.</p> <p> <b>HTML_ENTITY_DECODE</b> </p> <p>Use this option to replace HTML-encoded characters with unencoded characters. <code>HTML_ENTITY_DECODE</code> performs the following operations:</p> <ul> <li> <p>Replaces <code>(ampersand)quot;</code> with <code>"</code> </p> </li> <li> <p>Replaces <code>(ampersand)nbsp;</code> with a non-breaking space, decimal 160</p> </li> <li> <p>Replaces <code>(ampersand)lt;</code> with a "less than" symbol</p> </li> <li> <p>Replaces <code>(ampersand)gt;</code> with <code>></code> </p> </li> <li> <p>Replaces characters that are represented in hexadecimal format, <code>(ampersand)#xhhhh;</code>, with the corresponding characters</p> </li> <li> <p>Replaces characters that are represented in decimal format, <code>(ampersand)#nnnn;</code>, with the corresponding characters</p> </li> </ul> <p> <b>LOWERCASE</b> </p> <p>Use this option to convert uppercase letters (A-Z) to lowercase (a-z).</p> <p> <b>URL_DECODE</b> </p> <p>Use this option to decode a URL-encoded value.</p> <p> <b>NONE</b> </p> <p>Specify <code>NONE</code> if you don't want to perform any text transformations.</p>
#[serde(rename = "TextTransformation")]
pub text_transformation: String,
}
/// <p>The <code>RegexPatternSet</code> specifies the regular expression (regex) pattern that you want AWS WAF to search for, such as <code>B[a@]dB[o0]t</code>. You can then configure AWS WAF to reject those requests.</p>
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct RegexPatternSet {
/// <p>A friendly name or description of the <a>RegexPatternSet</a>. You can't change <code>Name</code> after you create a <code>RegexPatternSet</code>.</p>
#[serde(rename = "Name")]
#[serde(skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
/// <p>The identifier for the <code>RegexPatternSet</code>. You use <code>RegexPatternSetId</code> to get information about a <code>RegexPatternSet</code>, update a <code>RegexPatternSet</code>, remove a <code>RegexPatternSet</code> from a <code>RegexMatchSet</code>, and delete a <code>RegexPatternSet</code> from AWS WAF.</p> <p> <code>RegexMatchSetId</code> is returned by <a>CreateRegexPatternSet</a> and by <a>ListRegexPatternSets</a>.</p>
#[serde(rename = "RegexPatternSetId")]
pub regex_pattern_set_id: String,
/// <p>Specifies the regular expression (regex) patterns that you want AWS WAF to search for, such as <code>B[a@]dB[o0]t</code>.</p>
#[serde(rename = "RegexPatternStrings")]
pub regex_pattern_strings: Vec<String>,
}
/// <p>Returned by <a>ListRegexPatternSets</a>. Each <code>RegexPatternSetSummary</code> object includes the <code>Name</code> and <code>RegexPatternSetId</code> for one <a>RegexPatternSet</a>.</p>
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct RegexPatternSetSummary {
/// <p>A friendly name or description of the <a>RegexPatternSet</a>. You can't change <code>Name</code> after you create a <code>RegexPatternSet</code>.</p>
#[serde(rename = "Name")]
pub name: String,
/// <p>The <code>RegexPatternSetId</code> for a <code>RegexPatternSet</code>. You use <code>RegexPatternSetId</code> to get information about a <code>RegexPatternSet</code>, update a <code>RegexPatternSet</code>, remove a <code>RegexPatternSet</code> from a <code>RegexMatchSet</code>, and delete a <code>RegexPatternSet</code> from AWS WAF.</p> <p> <code>RegexPatternSetId</code> is returned by <a>CreateRegexPatternSet</a> and by <a>ListRegexPatternSets</a>.</p>
#[serde(rename = "RegexPatternSetId")]
pub regex_pattern_set_id: String,
}
/// <p>In an <a>UpdateRegexPatternSet</a> request, <code>RegexPatternSetUpdate</code> specifies whether to insert or delete a <code>RegexPatternString</code> and includes the settings for the <code>RegexPatternString</code>.</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct RegexPatternSetUpdate {
/// <p>Specifies whether to insert or delete a <code>RegexPatternString</code>.</p>
#[serde(rename = "Action")]
pub action: String,
/// <p>Specifies the regular expression (regex) pattern that you want AWS WAF to search for, such as <code>B[a@]dB[o0]t</code>.</p>
#[serde(rename = "RegexPatternString")]
pub regex_pattern_string: String,
}
/// <p>A combination of <a>ByteMatchSet</a>, <a>IPSet</a>, and/or <a>SqlInjectionMatchSet</a> objects that identify the web requests that you want to allow, block, or count. For example, you might create a <code>Rule</code> that includes the following predicates:</p> <ul> <li> <p>An <code>IPSet</code> that causes AWS WAF to search for web requests that originate from the IP address <code>192.0.2.44</code> </p> </li> <li> <p>A <code>ByteMatchSet</code> that causes AWS WAF to search for web requests for which the value of the <code>User-Agent</code> header is <code>BadBot</code>.</p> </li> </ul> <p>To match the settings in this <code>Rule</code>, a request must originate from <code>192.0.2.44</code> AND include a <code>User-Agent</code> header for which the value is <code>BadBot</code>.</p>
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct Rule {
/// <p>A friendly name or description for the metrics for this <code>Rule</code>. The name can contain only alphanumeric characters (A-Z, a-z, 0-9); the name can't contain whitespace. You can't change <code>MetricName</code> after you create the <code>Rule</code>.</p>
#[serde(rename = "MetricName")]
#[serde(skip_serializing_if = "Option::is_none")]
pub metric_name: Option<String>,
/// <p>The friendly name or description for the <code>Rule</code>. You can't change the name of a <code>Rule</code> after you create it.</p>
#[serde(rename = "Name")]
#[serde(skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
/// <p>The <code>Predicates</code> object contains one <code>Predicate</code> element for each <a>ByteMatchSet</a>, <a>IPSet</a>, or <a>SqlInjectionMatchSet</a> object that you want to include in a <code>Rule</code>.</p>
#[serde(rename = "Predicates")]
pub predicates: Vec<Predicate>,
/// <p>A unique identifier for a <code>Rule</code>. You use <code>RuleId</code> to get more information about a <code>Rule</code> (see <a>GetRule</a>), update a <code>Rule</code> (see <a>UpdateRule</a>), insert a <code>Rule</code> into a <code>WebACL</code> or delete a one from a <code>WebACL</code> (see <a>UpdateWebACL</a>), or delete a <code>Rule</code> from AWS WAF (see <a>DeleteRule</a>).</p> <p> <code>RuleId</code> is returned by <a>CreateRule</a> and by <a>ListRules</a>.</p>
#[serde(rename = "RuleId")]
pub rule_id: String,
}
/// <p><p>A collection of predefined rules that you can add to a web ACL.</p> <p>Rule groups are subject to the following limits:</p> <ul> <li> <p>Three rule groups per account. You can request an increase to this limit by contacting customer support.</p> </li> <li> <p>One rule group per web ACL.</p> </li> <li> <p>Ten rules per rule group.</p> </li> </ul></p>
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct RuleGroup {
/// <p>A friendly name or description for the metrics for this <code>RuleGroup</code>. The name can contain only alphanumeric characters (A-Z, a-z, 0-9); the name can't contain whitespace. You can't change the name of the metric after you create the <code>RuleGroup</code>.</p>
#[serde(rename = "MetricName")]
#[serde(skip_serializing_if = "Option::is_none")]
pub metric_name: Option<String>,
/// <p>The friendly name or description for the <code>RuleGroup</code>. You can't change the name of a <code>RuleGroup</code> after you create it.</p>
#[serde(rename = "Name")]
#[serde(skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
/// <p>A unique identifier for a <code>RuleGroup</code>. You use <code>RuleGroupId</code> to get more information about a <code>RuleGroup</code> (see <a>GetRuleGroup</a>), update a <code>RuleGroup</code> (see <a>UpdateRuleGroup</a>), insert a <code>RuleGroup</code> into a <code>WebACL</code> or delete a one from a <code>WebACL</code> (see <a>UpdateWebACL</a>), or delete a <code>RuleGroup</code> from AWS WAF (see <a>DeleteRuleGroup</a>).</p> <p> <code>RuleGroupId</code> is returned by <a>CreateRuleGroup</a> and by <a>ListRuleGroups</a>.</p>
#[serde(rename = "RuleGroupId")]
pub rule_group_id: String,
}
/// <p>Contains the identifier and the friendly name or description of the <code>RuleGroup</code>.</p>
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct RuleGroupSummary {
/// <p>A friendly name or description of the <a>RuleGroup</a>. You can't change the name of a <code>RuleGroup</code> after you create it.</p>
#[serde(rename = "Name")]
pub name: String,
/// <p>A unique identifier for a <code>RuleGroup</code>. You use <code>RuleGroupId</code> to get more information about a <code>RuleGroup</code> (see <a>GetRuleGroup</a>), update a <code>RuleGroup</code> (see <a>UpdateRuleGroup</a>), insert a <code>RuleGroup</code> into a <code>WebACL</code> or delete one from a <code>WebACL</code> (see <a>UpdateWebACL</a>), or delete a <code>RuleGroup</code> from AWS WAF (see <a>DeleteRuleGroup</a>).</p> <p> <code>RuleGroupId</code> is returned by <a>CreateRuleGroup</a> and by <a>ListRuleGroups</a>.</p>
#[serde(rename = "RuleGroupId")]
pub rule_group_id: String,
}
/// <p>Specifies an <code>ActivatedRule</code> and indicates whether you want to add it to a <code>RuleGroup</code> or delete it from a <code>RuleGroup</code>.</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct RuleGroupUpdate {
/// <p>Specify <code>INSERT</code> to add an <code>ActivatedRule</code> to a <code>RuleGroup</code>. Use <code>DELETE</code> to remove an <code>ActivatedRule</code> from a <code>RuleGroup</code>.</p>
#[serde(rename = "Action")]
pub action: String,
/// <p>The <code>ActivatedRule</code> object specifies a <code>Rule</code> that you want to insert or delete, the priority of the <code>Rule</code> in the <code>WebACL</code>, and the action that you want AWS WAF to take when a web request matches the <code>Rule</code> (<code>ALLOW</code>, <code>BLOCK</code>, or <code>COUNT</code>).</p>
#[serde(rename = "ActivatedRule")]
pub activated_rule: ActivatedRule,
}
/// <p>Contains the identifier and the friendly name or description of the <code>Rule</code>.</p>
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct RuleSummary {
/// <p>A friendly name or description of the <a>Rule</a>. You can't change the name of a <code>Rule</code> after you create it.</p>
#[serde(rename = "Name")]
pub name: String,
/// <p>A unique identifier for a <code>Rule</code>. You use <code>RuleId</code> to get more information about a <code>Rule</code> (see <a>GetRule</a>), update a <code>Rule</code> (see <a>UpdateRule</a>), insert a <code>Rule</code> into a <code>WebACL</code> or delete one from a <code>WebACL</code> (see <a>UpdateWebACL</a>), or delete a <code>Rule</code> from AWS WAF (see <a>DeleteRule</a>).</p> <p> <code>RuleId</code> is returned by <a>CreateRule</a> and by <a>ListRules</a>.</p>
#[serde(rename = "RuleId")]
pub rule_id: String,
}
/// <p>Specifies a <code>Predicate</code> (such as an <code>IPSet</code>) and indicates whether you want to add it to a <code>Rule</code> or delete it from a <code>Rule</code>.</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct RuleUpdate {
/// <p>Specify <code>INSERT</code> to add a <code>Predicate</code> to a <code>Rule</code>. Use <code>DELETE</code> to remove a <code>Predicate</code> from a <code>Rule</code>.</p>
#[serde(rename = "Action")]
pub action: String,
/// <p>The ID of the <code>Predicate</code> (such as an <code>IPSet</code>) that you want to add to a <code>Rule</code>.</p>
#[serde(rename = "Predicate")]
pub predicate: Predicate,
}
/// <p>The response from a <a>GetSampledRequests</a> request includes a <code>SampledHTTPRequests</code> complex type that appears as <code>SampledRequests</code> in the response syntax. <code>SampledHTTPRequests</code> contains one <code>SampledHTTPRequest</code> object for each web request that is returned by <code>GetSampledRequests</code>.</p>
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct SampledHTTPRequest {
/// <p>The action for the <code>Rule</code> that the request matched: <code>ALLOW</code>, <code>BLOCK</code>, or <code>COUNT</code>.</p>
#[serde(rename = "Action")]
#[serde(skip_serializing_if = "Option::is_none")]
pub action: Option<String>,
/// <p>A complex type that contains detailed information about the request.</p>
#[serde(rename = "Request")]
pub request: HTTPRequest,
/// <p>This value is returned if the <code>GetSampledRequests</code> request specifies the ID of a <code>RuleGroup</code> rather than the ID of an individual rule. <code>RuleWithinRuleGroup</code> is the rule within the specified <code>RuleGroup</code> that matched the request listed in the response.</p>
#[serde(rename = "RuleWithinRuleGroup")]
#[serde(skip_serializing_if = "Option::is_none")]
pub rule_within_rule_group: Option<String>,
/// <p>The time at which AWS WAF received the request from your AWS resource, in Unix time format (in seconds).</p>
#[serde(rename = "Timestamp")]
#[serde(skip_serializing_if = "Option::is_none")]
pub timestamp: Option<f64>,
/// <p>A value that indicates how one result in the response relates proportionally to other results in the response. A result that has a weight of <code>2</code> represents roughly twice as many CloudFront web requests as a result that has a weight of <code>1</code>.</p>
#[serde(rename = "Weight")]
pub weight: i64,
}
/// <p>Specifies a constraint on the size of a part of the web request. AWS WAF uses the <code>Size</code>, <code>ComparisonOperator</code>, and <code>FieldToMatch</code> to build an expression in the form of "<code>Size</code> <code>ComparisonOperator</code> size in bytes of <code>FieldToMatch</code>". If that expression is true, the <code>SizeConstraint</code> is considered to match.</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct SizeConstraint {
/// <p>The type of comparison you want AWS WAF to perform. AWS WAF uses this in combination with the provided <code>Size</code> and <code>FieldToMatch</code> to build an expression in the form of "<code>Size</code> <code>ComparisonOperator</code> size in bytes of <code>FieldToMatch</code>". If that expression is true, the <code>SizeConstraint</code> is considered to match.</p> <p> <b>EQ</b>: Used to test if the <code>Size</code> is equal to the size of the <code>FieldToMatch</code> </p> <p> <b>NE</b>: Used to test if the <code>Size</code> is not equal to the size of the <code>FieldToMatch</code> </p> <p> <b>LE</b>: Used to test if the <code>Size</code> is less than or equal to the size of the <code>FieldToMatch</code> </p> <p> <b>LT</b>: Used to test if the <code>Size</code> is strictly less than the size of the <code>FieldToMatch</code> </p> <p> <b>GE</b>: Used to test if the <code>Size</code> is greater than or equal to the size of the <code>FieldToMatch</code> </p> <p> <b>GT</b>: Used to test if the <code>Size</code> is strictly greater than the size of the <code>FieldToMatch</code> </p>
#[serde(rename = "ComparisonOperator")]
pub comparison_operator: String,
/// <p>Specifies where in a web request to look for the size constraint.</p>
#[serde(rename = "FieldToMatch")]
pub field_to_match: FieldToMatch,
/// <p>The size in bytes that you want AWS WAF to compare against the size of the specified <code>FieldToMatch</code>. AWS WAF uses this in combination with <code>ComparisonOperator</code> and <code>FieldToMatch</code> to build an expression in the form of "<code>Size</code> <code>ComparisonOperator</code> size in bytes of <code>FieldToMatch</code>". If that expression is true, the <code>SizeConstraint</code> is considered to match.</p> <p>Valid values for size are 0 - 21474836480 bytes (0 - 20 GB).</p> <p>If you specify <code>URI</code> for the value of <code>Type</code>, the / in the URI counts as one character. For example, the URI <code>/logo.jpg</code> is nine characters long.</p>
#[serde(rename = "Size")]
pub size: i64,
/// <p>Text transformations eliminate some of the unusual formatting that attackers use in web requests in an effort to bypass AWS WAF. If you specify a transformation, AWS WAF performs the transformation on <code>FieldToMatch</code> before inspecting a request for a match.</p> <p>You can only specify a single type of TextTransformation.</p> <p>Note that if you choose <code>BODY</code> for the value of <code>Type</code>, you must choose <code>NONE</code> for <code>TextTransformation</code> because CloudFront forwards only the first 8192 bytes for inspection. </p> <p> <b>NONE</b> </p> <p>Specify <code>NONE</code> if you don't want to perform any text transformations.</p> <p> <b>CMD_LINE</b> </p> <p>When you're concerned that attackers are injecting an operating system command line command and using unusual formatting to disguise some or all of the command, use this option to perform the following transformations:</p> <ul> <li> <p>Delete the following characters: \ " ' ^</p> </li> <li> <p>Delete spaces before the following characters: / (</p> </li> <li> <p>Replace the following characters with a space: , ;</p> </li> <li> <p>Replace multiple spaces with one space</p> </li> <li> <p>Convert uppercase letters (A-Z) to lowercase (a-z)</p> </li> </ul> <p> <b>COMPRESS_WHITE_SPACE</b> </p> <p>Use this option to replace the following characters with a space character (decimal 32):</p> <ul> <li> <p>\f, formfeed, decimal 12</p> </li> <li> <p>\t, tab, decimal 9</p> </li> <li> <p>\n, newline, decimal 10</p> </li> <li> <p>\r, carriage return, decimal 13</p> </li> <li> <p>\v, vertical tab, decimal 11</p> </li> <li> <p>non-breaking space, decimal 160</p> </li> </ul> <p> <code>COMPRESS_WHITE_SPACE</code> also replaces multiple spaces with one space.</p> <p> <b>HTML_ENTITY_DECODE</b> </p> <p>Use this option to replace HTML-encoded characters with unencoded characters. <code>HTML_ENTITY_DECODE</code> performs the following operations:</p> <ul> <li> <p>Replaces <code>(ampersand)quot;</code> with <code>"</code> </p> </li> <li> <p>Replaces <code>(ampersand)nbsp;</code> with a non-breaking space, decimal 160</p> </li> <li> <p>Replaces <code>(ampersand)lt;</code> with a "less than" symbol</p> </li> <li> <p>Replaces <code>(ampersand)gt;</code> with <code>></code> </p> </li> <li> <p>Replaces characters that are represented in hexadecimal format, <code>(ampersand)#xhhhh;</code>, with the corresponding characters</p> </li> <li> <p>Replaces characters that are represented in decimal format, <code>(ampersand)#nnnn;</code>, with the corresponding characters</p> </li> </ul> <p> <b>LOWERCASE</b> </p> <p>Use this option to convert uppercase letters (A-Z) to lowercase (a-z).</p> <p> <b>URL_DECODE</b> </p> <p>Use this option to decode a URL-encoded value.</p>
#[serde(rename = "TextTransformation")]
pub text_transformation: String,
}
/// <p>A complex type that contains <code>SizeConstraint</code> objects, which specify the parts of web requests that you want AWS WAF to inspect the size of. If a <code>SizeConstraintSet</code> contains more than one <code>SizeConstraint</code> object, a request only needs to match one constraint to be considered a match.</p>
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct SizeConstraintSet {
/// <p>The name, if any, of the <code>SizeConstraintSet</code>.</p>
#[serde(rename = "Name")]
#[serde(skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
/// <p>A unique identifier for a <code>SizeConstraintSet</code>. You use <code>SizeConstraintSetId</code> to get information about a <code>SizeConstraintSet</code> (see <a>GetSizeConstraintSet</a>), update a <code>SizeConstraintSet</code> (see <a>UpdateSizeConstraintSet</a>), insert a <code>SizeConstraintSet</code> into a <code>Rule</code> or delete one from a <code>Rule</code> (see <a>UpdateRule</a>), and delete a <code>SizeConstraintSet</code> from AWS WAF (see <a>DeleteSizeConstraintSet</a>).</p> <p> <code>SizeConstraintSetId</code> is returned by <a>CreateSizeConstraintSet</a> and by <a>ListSizeConstraintSets</a>.</p>
#[serde(rename = "SizeConstraintSetId")]
pub size_constraint_set_id: String,
/// <p>Specifies the parts of web requests that you want to inspect the size of.</p>
#[serde(rename = "SizeConstraints")]
pub size_constraints: Vec<SizeConstraint>,
}
/// <p>The <code>Id</code> and <code>Name</code> of a <code>SizeConstraintSet</code>.</p>
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct SizeConstraintSetSummary {
/// <p>The name of the <code>SizeConstraintSet</code>, if any.</p>
#[serde(rename = "Name")]
pub name: String,
/// <p>A unique identifier for a <code>SizeConstraintSet</code>. You use <code>SizeConstraintSetId</code> to get information about a <code>SizeConstraintSet</code> (see <a>GetSizeConstraintSet</a>), update a <code>SizeConstraintSet</code> (see <a>UpdateSizeConstraintSet</a>), insert a <code>SizeConstraintSet</code> into a <code>Rule</code> or delete one from a <code>Rule</code> (see <a>UpdateRule</a>), and delete a <code>SizeConstraintSet</code> from AWS WAF (see <a>DeleteSizeConstraintSet</a>).</p> <p> <code>SizeConstraintSetId</code> is returned by <a>CreateSizeConstraintSet</a> and by <a>ListSizeConstraintSets</a>.</p>
#[serde(rename = "SizeConstraintSetId")]
pub size_constraint_set_id: String,
}
/// <p>Specifies the part of a web request that you want to inspect the size of and indicates whether you want to add the specification to a <a>SizeConstraintSet</a> or delete it from a <code>SizeConstraintSet</code>.</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct SizeConstraintSetUpdate {
/// <p>Specify <code>INSERT</code> to add a <a>SizeConstraintSetUpdate</a> to a <a>SizeConstraintSet</a>. Use <code>DELETE</code> to remove a <code>SizeConstraintSetUpdate</code> from a <code>SizeConstraintSet</code>.</p>
#[serde(rename = "Action")]
pub action: String,
/// <p>Specifies a constraint on the size of a part of the web request. AWS WAF uses the <code>Size</code>, <code>ComparisonOperator</code>, and <code>FieldToMatch</code> to build an expression in the form of "<code>Size</code> <code>ComparisonOperator</code> size in bytes of <code>FieldToMatch</code>". If that expression is true, the <code>SizeConstraint</code> is considered to match.</p>
#[serde(rename = "SizeConstraint")]
pub size_constraint: SizeConstraint,
}
/// <p>A complex type that contains <code>SqlInjectionMatchTuple</code> objects, which specify the parts of web requests that you want AWS WAF to inspect for snippets of malicious SQL code and, if you want AWS WAF to inspect a header, the name of the header. If a <code>SqlInjectionMatchSet</code> contains more than one <code>SqlInjectionMatchTuple</code> object, a request needs to include snippets of SQL code in only one of the specified parts of the request to be considered a match.</p>
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct SqlInjectionMatchSet {
/// <p>The name, if any, of the <code>SqlInjectionMatchSet</code>.</p>
#[serde(rename = "Name")]
#[serde(skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
/// <p>A unique identifier for a <code>SqlInjectionMatchSet</code>. You use <code>SqlInjectionMatchSetId</code> to get information about a <code>SqlInjectionMatchSet</code> (see <a>GetSqlInjectionMatchSet</a>), update a <code>SqlInjectionMatchSet</code> (see <a>UpdateSqlInjectionMatchSet</a>), insert a <code>SqlInjectionMatchSet</code> into a <code>Rule</code> or delete one from a <code>Rule</code> (see <a>UpdateRule</a>), and delete a <code>SqlInjectionMatchSet</code> from AWS WAF (see <a>DeleteSqlInjectionMatchSet</a>).</p> <p> <code>SqlInjectionMatchSetId</code> is returned by <a>CreateSqlInjectionMatchSet</a> and by <a>ListSqlInjectionMatchSets</a>.</p>
#[serde(rename = "SqlInjectionMatchSetId")]
pub sql_injection_match_set_id: String,
/// <p>Specifies the parts of web requests that you want to inspect for snippets of malicious SQL code.</p>
#[serde(rename = "SqlInjectionMatchTuples")]
pub sql_injection_match_tuples: Vec<SqlInjectionMatchTuple>,
}
/// <p>The <code>Id</code> and <code>Name</code> of a <code>SqlInjectionMatchSet</code>.</p>
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct SqlInjectionMatchSetSummary {
/// <p>The name of the <code>SqlInjectionMatchSet</code>, if any, specified by <code>Id</code>.</p>
#[serde(rename = "Name")]
pub name: String,
/// <p>A unique identifier for a <code>SqlInjectionMatchSet</code>. You use <code>SqlInjectionMatchSetId</code> to get information about a <code>SqlInjectionMatchSet</code> (see <a>GetSqlInjectionMatchSet</a>), update a <code>SqlInjectionMatchSet</code> (see <a>UpdateSqlInjectionMatchSet</a>), insert a <code>SqlInjectionMatchSet</code> into a <code>Rule</code> or delete one from a <code>Rule</code> (see <a>UpdateRule</a>), and delete a <code>SqlInjectionMatchSet</code> from AWS WAF (see <a>DeleteSqlInjectionMatchSet</a>).</p> <p> <code>SqlInjectionMatchSetId</code> is returned by <a>CreateSqlInjectionMatchSet</a> and by <a>ListSqlInjectionMatchSets</a>.</p>
#[serde(rename = "SqlInjectionMatchSetId")]
pub sql_injection_match_set_id: String,
}
/// <p>Specifies the part of a web request that you want to inspect for snippets of malicious SQL code and indicates whether you want to add the specification to a <a>SqlInjectionMatchSet</a> or delete it from a <code>SqlInjectionMatchSet</code>.</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct SqlInjectionMatchSetUpdate {
/// <p>Specify <code>INSERT</code> to add a <a>SqlInjectionMatchSetUpdate</a> to a <a>SqlInjectionMatchSet</a>. Use <code>DELETE</code> to remove a <code>SqlInjectionMatchSetUpdate</code> from a <code>SqlInjectionMatchSet</code>.</p>
#[serde(rename = "Action")]
pub action: String,
/// <p>Specifies the part of a web request that you want AWS WAF to inspect for snippets of malicious SQL code and, if you want AWS WAF to inspect a header, the name of the header.</p>
#[serde(rename = "SqlInjectionMatchTuple")]
pub sql_injection_match_tuple: SqlInjectionMatchTuple,
}
/// <p>Specifies the part of a web request that you want AWS WAF to inspect for snippets of malicious SQL code and, if you want AWS WAF to inspect a header, the name of the header.</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct SqlInjectionMatchTuple {
/// <p>Specifies where in a web request to look for snippets of malicious SQL code.</p>
#[serde(rename = "FieldToMatch")]
pub field_to_match: FieldToMatch,
/// <p>Text transformations eliminate some of the unusual formatting that attackers use in web requests in an effort to bypass AWS WAF. If you specify a transformation, AWS WAF performs the transformation on <code>FieldToMatch</code> before inspecting a request for a match.</p> <p>You can only specify a single type of TextTransformation.</p> <p> <b>CMD_LINE</b> </p> <p>When you're concerned that attackers are injecting an operating system command line command and using unusual formatting to disguise some or all of the command, use this option to perform the following transformations:</p> <ul> <li> <p>Delete the following characters: \ " ' ^</p> </li> <li> <p>Delete spaces before the following characters: / (</p> </li> <li> <p>Replace the following characters with a space: , ;</p> </li> <li> <p>Replace multiple spaces with one space</p> </li> <li> <p>Convert uppercase letters (A-Z) to lowercase (a-z)</p> </li> </ul> <p> <b>COMPRESS_WHITE_SPACE</b> </p> <p>Use this option to replace the following characters with a space character (decimal 32):</p> <ul> <li> <p>\f, formfeed, decimal 12</p> </li> <li> <p>\t, tab, decimal 9</p> </li> <li> <p>\n, newline, decimal 10</p> </li> <li> <p>\r, carriage return, decimal 13</p> </li> <li> <p>\v, vertical tab, decimal 11</p> </li> <li> <p>non-breaking space, decimal 160</p> </li> </ul> <p> <code>COMPRESS_WHITE_SPACE</code> also replaces multiple spaces with one space.</p> <p> <b>HTML_ENTITY_DECODE</b> </p> <p>Use this option to replace HTML-encoded characters with unencoded characters. <code>HTML_ENTITY_DECODE</code> performs the following operations:</p> <ul> <li> <p>Replaces <code>(ampersand)quot;</code> with <code>"</code> </p> </li> <li> <p>Replaces <code>(ampersand)nbsp;</code> with a non-breaking space, decimal 160</p> </li> <li> <p>Replaces <code>(ampersand)lt;</code> with a "less than" symbol</p> </li> <li> <p>Replaces <code>(ampersand)gt;</code> with <code>></code> </p> </li> <li> <p>Replaces characters that are represented in hexadecimal format, <code>(ampersand)#xhhhh;</code>, with the corresponding characters</p> </li> <li> <p>Replaces characters that are represented in decimal format, <code>(ampersand)#nnnn;</code>, with the corresponding characters</p> </li> </ul> <p> <b>LOWERCASE</b> </p> <p>Use this option to convert uppercase letters (A-Z) to lowercase (a-z).</p> <p> <b>URL_DECODE</b> </p> <p>Use this option to decode a URL-encoded value.</p> <p> <b>NONE</b> </p> <p>Specify <code>NONE</code> if you don't want to perform any text transformations.</p>
#[serde(rename = "TextTransformation")]
pub text_transformation: String,
}
/// <p>A summary of the rule groups you are subscribed to.</p>
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct SubscribedRuleGroupSummary {
/// <p>A friendly name or description for the metrics for this <code>RuleGroup</code>. The name can contain only alphanumeric characters (A-Z, a-z, 0-9); the name can't contain whitespace. You can't change the name of the metric after you create the <code>RuleGroup</code>.</p>
#[serde(rename = "MetricName")]
pub metric_name: String,
/// <p>A friendly name or description of the <code>RuleGroup</code>. You can't change the name of a <code>RuleGroup</code> after you create it.</p>
#[serde(rename = "Name")]
pub name: String,
/// <p>A unique identifier for a <code>RuleGroup</code>.</p>
#[serde(rename = "RuleGroupId")]
pub rule_group_id: String,
}
/// <p>In a <a>GetSampledRequests</a> request, the <code>StartTime</code> and <code>EndTime</code> objects specify the time range for which you want AWS WAF to return a sample of web requests.</p> <p>In a <a>GetSampledRequests</a> response, the <code>StartTime</code> and <code>EndTime</code> objects specify the time range for which AWS WAF actually returned a sample of web requests. AWS WAF gets the specified number of requests from among the first 5,000 requests that your AWS resource receives during the specified time period. If your resource receives more than 5,000 requests during that period, AWS WAF stops sampling after the 5,000th request. In that case, <code>EndTime</code> is the time that AWS WAF received the 5,000th request. </p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct TimeWindow {
/// <p>The end of the time range from which you want <code>GetSampledRequests</code> to return a sample of the requests that your AWS resource received. Specify the date and time in the following format: <code>"2016-09-27T14:50Z"</code>. You can specify any time range in the previous three hours.</p>
#[serde(rename = "EndTime")]
pub end_time: f64,
/// <p>The beginning of the time range from which you want <code>GetSampledRequests</code> to return a sample of the requests that your AWS resource received. Specify the date and time in the following format: <code>"2016-09-27T14:50Z"</code>. You can specify any time range in the previous three hours.</p>
#[serde(rename = "StartTime")]
pub start_time: f64,
}
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct UpdateByteMatchSetRequest {
/// <p>The <code>ByteMatchSetId</code> of the <a>ByteMatchSet</a> that you want to update. <code>ByteMatchSetId</code> is returned by <a>CreateByteMatchSet</a> and by <a>ListByteMatchSets</a>.</p>
#[serde(rename = "ByteMatchSetId")]
pub byte_match_set_id: String,
/// <p>The value returned by the most recent call to <a>GetChangeToken</a>.</p>
#[serde(rename = "ChangeToken")]
pub change_token: String,
/// <p><p>An array of <code>ByteMatchSetUpdate</code> objects that you want to insert into or delete from a <a>ByteMatchSet</a>. For more information, see the applicable data types:</p> <ul> <li> <p> <a>ByteMatchSetUpdate</a>: Contains <code>Action</code> and <code>ByteMatchTuple</code> </p> </li> <li> <p> <a>ByteMatchTuple</a>: Contains <code>FieldToMatch</code>, <code>PositionalConstraint</code>, <code>TargetString</code>, and <code>TextTransformation</code> </p> </li> <li> <p> <a>FieldToMatch</a>: Contains <code>Data</code> and <code>Type</code> </p> </li> </ul></p>
#[serde(rename = "Updates")]
pub updates: Vec<ByteMatchSetUpdate>,
}
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct UpdateByteMatchSetResponse {
/// <p>The <code>ChangeToken</code> that you used to submit the <code>UpdateByteMatchSet</code> request. You can also use this value to query the status of the request. For more information, see <a>GetChangeTokenStatus</a>.</p>
#[serde(rename = "ChangeToken")]
#[serde(skip_serializing_if = "Option::is_none")]
pub change_token: Option<String>,
}
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct UpdateGeoMatchSetRequest {
/// <p>The value returned by the most recent call to <a>GetChangeToken</a>.</p>
#[serde(rename = "ChangeToken")]
pub change_token: String,
/// <p>The <code>GeoMatchSetId</code> of the <a>GeoMatchSet</a> that you want to update. <code>GeoMatchSetId</code> is returned by <a>CreateGeoMatchSet</a> and by <a>ListGeoMatchSets</a>.</p>
#[serde(rename = "GeoMatchSetId")]
pub geo_match_set_id: String,
/// <p><p>An array of <code>GeoMatchSetUpdate</code> objects that you want to insert into or delete from an <a>GeoMatchSet</a>. For more information, see the applicable data types:</p> <ul> <li> <p> <a>GeoMatchSetUpdate</a>: Contains <code>Action</code> and <code>GeoMatchConstraint</code> </p> </li> <li> <p> <a>GeoMatchConstraint</a>: Contains <code>Type</code> and <code>Value</code> </p> <p>You can have only one <code>Type</code> and <code>Value</code> per <code>GeoMatchConstraint</code>. To add multiple countries, include multiple <code>GeoMatchSetUpdate</code> objects in your request.</p> </li> </ul></p>
#[serde(rename = "Updates")]
pub updates: Vec<GeoMatchSetUpdate>,
}
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct UpdateGeoMatchSetResponse {
/// <p>The <code>ChangeToken</code> that you used to submit the <code>UpdateGeoMatchSet</code> request. You can also use this value to query the status of the request. For more information, see <a>GetChangeTokenStatus</a>.</p>
#[serde(rename = "ChangeToken")]
#[serde(skip_serializing_if = "Option::is_none")]
pub change_token: Option<String>,
}
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct UpdateIPSetRequest {
/// <p>The value returned by the most recent call to <a>GetChangeToken</a>.</p>
#[serde(rename = "ChangeToken")]
pub change_token: String,
/// <p>The <code>IPSetId</code> of the <a>IPSet</a> that you want to update. <code>IPSetId</code> is returned by <a>CreateIPSet</a> and by <a>ListIPSets</a>.</p>
#[serde(rename = "IPSetId")]
pub ip_set_id: String,
/// <p>An array of <code>IPSetUpdate</code> objects that you want to insert into or delete from an <a>IPSet</a>. For more information, see the applicable data types:</p> <ul> <li> <p> <a>IPSetUpdate</a>: Contains <code>Action</code> and <code>IPSetDescriptor</code> </p> </li> <li> <p> <a>IPSetDescriptor</a>: Contains <code>Type</code> and <code>Value</code> </p> </li> </ul> <p>You can insert a maximum of 1000 addresses in a single request.</p>
#[serde(rename = "Updates")]
pub updates: Vec<IPSetUpdate>,
}
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct UpdateIPSetResponse {
/// <p>The <code>ChangeToken</code> that you used to submit the <code>UpdateIPSet</code> request. You can also use this value to query the status of the request. For more information, see <a>GetChangeTokenStatus</a>.</p>
#[serde(rename = "ChangeToken")]
#[serde(skip_serializing_if = "Option::is_none")]
pub change_token: Option<String>,
}
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct UpdateRateBasedRuleRequest {
/// <p>The value returned by the most recent call to <a>GetChangeToken</a>.</p>
#[serde(rename = "ChangeToken")]
pub change_token: String,
/// <p>The maximum number of requests, which have an identical value in the field specified by the <code>RateKey</code>, allowed in a five-minute period. If the number of requests exceeds the <code>RateLimit</code> and the other predicates specified in the rule are also met, AWS WAF triggers the action that is specified for this rule.</p>
#[serde(rename = "RateLimit")]
pub rate_limit: i64,
/// <p>The <code>RuleId</code> of the <code>RateBasedRule</code> that you want to update. <code>RuleId</code> is returned by <code>CreateRateBasedRule</code> and by <a>ListRateBasedRules</a>.</p>
#[serde(rename = "RuleId")]
pub rule_id: String,
/// <p>An array of <code>RuleUpdate</code> objects that you want to insert into or delete from a <a>RateBasedRule</a>. </p>
#[serde(rename = "Updates")]
pub updates: Vec<RuleUpdate>,
}
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct UpdateRateBasedRuleResponse {
/// <p>The <code>ChangeToken</code> that you used to submit the <code>UpdateRateBasedRule</code> request. You can also use this value to query the status of the request. For more information, see <a>GetChangeTokenStatus</a>.</p>
#[serde(rename = "ChangeToken")]
#[serde(skip_serializing_if = "Option::is_none")]
pub change_token: Option<String>,
}
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct UpdateRegexMatchSetRequest {
/// <p>The value returned by the most recent call to <a>GetChangeToken</a>.</p>
#[serde(rename = "ChangeToken")]
pub change_token: String,
/// <p>The <code>RegexMatchSetId</code> of the <a>RegexMatchSet</a> that you want to update. <code>RegexMatchSetId</code> is returned by <a>CreateRegexMatchSet</a> and by <a>ListRegexMatchSets</a>.</p>
#[serde(rename = "RegexMatchSetId")]
pub regex_match_set_id: String,
/// <p>An array of <code>RegexMatchSetUpdate</code> objects that you want to insert into or delete from a <a>RegexMatchSet</a>. For more information, see <a>RegexMatchTuple</a>.</p>
#[serde(rename = "Updates")]
pub updates: Vec<RegexMatchSetUpdate>,
}
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct UpdateRegexMatchSetResponse {
/// <p>The <code>ChangeToken</code> that you used to submit the <code>UpdateRegexMatchSet</code> request. You can also use this value to query the status of the request. For more information, see <a>GetChangeTokenStatus</a>.</p>
#[serde(rename = "ChangeToken")]
#[serde(skip_serializing_if = "Option::is_none")]
pub change_token: Option<String>,
}
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct UpdateRegexPatternSetRequest {
/// <p>The value returned by the most recent call to <a>GetChangeToken</a>.</p>
#[serde(rename = "ChangeToken")]
pub change_token: String,
/// <p>The <code>RegexPatternSetId</code> of the <a>RegexPatternSet</a> that you want to update. <code>RegexPatternSetId</code> is returned by <a>CreateRegexPatternSet</a> and by <a>ListRegexPatternSets</a>.</p>
#[serde(rename = "RegexPatternSetId")]
pub regex_pattern_set_id: String,
/// <p>An array of <code>RegexPatternSetUpdate</code> objects that you want to insert into or delete from a <a>RegexPatternSet</a>.</p>
#[serde(rename = "Updates")]
pub updates: Vec<RegexPatternSetUpdate>,
}
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct UpdateRegexPatternSetResponse {
/// <p>The <code>ChangeToken</code> that you used to submit the <code>UpdateRegexPatternSet</code> request. You can also use this value to query the status of the request. For more information, see <a>GetChangeTokenStatus</a>.</p>
#[serde(rename = "ChangeToken")]
#[serde(skip_serializing_if = "Option::is_none")]
pub change_token: Option<String>,
}
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct UpdateRuleGroupRequest {
/// <p>The value returned by the most recent call to <a>GetChangeToken</a>.</p>
#[serde(rename = "ChangeToken")]
pub change_token: String,
/// <p>The <code>RuleGroupId</code> of the <a>RuleGroup</a> that you want to update. <code>RuleGroupId</code> is returned by <a>CreateRuleGroup</a> and by <a>ListRuleGroups</a>.</p>
#[serde(rename = "RuleGroupId")]
pub rule_group_id: String,
/// <p>An array of <code>RuleGroupUpdate</code> objects that you want to insert into or delete from a <a>RuleGroup</a>.</p> <p>You can only insert <code>REGULAR</code> rules into a rule group.</p> <p> <code>ActivatedRule|OverrideAction</code> applies only when updating or adding a <code>RuleGroup</code> to a <code>WebACL</code>. In this case you do not use <code>ActivatedRule|Action</code>. For all other update requests, <code>ActivatedRule|Action</code> is used instead of <code>ActivatedRule|OverrideAction</code>.</p>
#[serde(rename = "Updates")]
pub updates: Vec<RuleGroupUpdate>,
}
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct UpdateRuleGroupResponse {
/// <p>The <code>ChangeToken</code> that you used to submit the <code>UpdateRuleGroup</code> request. You can also use this value to query the status of the request. For more information, see <a>GetChangeTokenStatus</a>.</p>
#[serde(rename = "ChangeToken")]
#[serde(skip_serializing_if = "Option::is_none")]
pub change_token: Option<String>,
}
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct UpdateRuleRequest {
/// <p>The value returned by the most recent call to <a>GetChangeToken</a>.</p>
#[serde(rename = "ChangeToken")]
pub change_token: String,
/// <p>The <code>RuleId</code> of the <code>Rule</code> that you want to update. <code>RuleId</code> is returned by <code>CreateRule</code> and by <a>ListRules</a>.</p>
#[serde(rename = "RuleId")]
pub rule_id: String,
/// <p><p>An array of <code>RuleUpdate</code> objects that you want to insert into or delete from a <a>Rule</a>. For more information, see the applicable data types:</p> <ul> <li> <p> <a>RuleUpdate</a>: Contains <code>Action</code> and <code>Predicate</code> </p> </li> <li> <p> <a>Predicate</a>: Contains <code>DataId</code>, <code>Negated</code>, and <code>Type</code> </p> </li> <li> <p> <a>FieldToMatch</a>: Contains <code>Data</code> and <code>Type</code> </p> </li> </ul></p>
#[serde(rename = "Updates")]
pub updates: Vec<RuleUpdate>,
}
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct UpdateRuleResponse {
/// <p>The <code>ChangeToken</code> that you used to submit the <code>UpdateRule</code> request. You can also use this value to query the status of the request. For more information, see <a>GetChangeTokenStatus</a>.</p>
#[serde(rename = "ChangeToken")]
#[serde(skip_serializing_if = "Option::is_none")]
pub change_token: Option<String>,
}
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct UpdateSizeConstraintSetRequest {
/// <p>The value returned by the most recent call to <a>GetChangeToken</a>.</p>
#[serde(rename = "ChangeToken")]
pub change_token: String,
/// <p>The <code>SizeConstraintSetId</code> of the <a>SizeConstraintSet</a> that you want to update. <code>SizeConstraintSetId</code> is returned by <a>CreateSizeConstraintSet</a> and by <a>ListSizeConstraintSets</a>.</p>
#[serde(rename = "SizeConstraintSetId")]
pub size_constraint_set_id: String,
/// <p><p>An array of <code>SizeConstraintSetUpdate</code> objects that you want to insert into or delete from a <a>SizeConstraintSet</a>. For more information, see the applicable data types:</p> <ul> <li> <p> <a>SizeConstraintSetUpdate</a>: Contains <code>Action</code> and <code>SizeConstraint</code> </p> </li> <li> <p> <a>SizeConstraint</a>: Contains <code>FieldToMatch</code>, <code>TextTransformation</code>, <code>ComparisonOperator</code>, and <code>Size</code> </p> </li> <li> <p> <a>FieldToMatch</a>: Contains <code>Data</code> and <code>Type</code> </p> </li> </ul></p>
#[serde(rename = "Updates")]
pub updates: Vec<SizeConstraintSetUpdate>,
}
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct UpdateSizeConstraintSetResponse {
/// <p>The <code>ChangeToken</code> that you used to submit the <code>UpdateSizeConstraintSet</code> request. You can also use this value to query the status of the request. For more information, see <a>GetChangeTokenStatus</a>.</p>
#[serde(rename = "ChangeToken")]
#[serde(skip_serializing_if = "Option::is_none")]
pub change_token: Option<String>,
}
/// <p>A request to update a <a>SqlInjectionMatchSet</a>.</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct UpdateSqlInjectionMatchSetRequest {
/// <p>The value returned by the most recent call to <a>GetChangeToken</a>.</p>
#[serde(rename = "ChangeToken")]
pub change_token: String,
/// <p>The <code>SqlInjectionMatchSetId</code> of the <code>SqlInjectionMatchSet</code> that you want to update. <code>SqlInjectionMatchSetId</code> is returned by <a>CreateSqlInjectionMatchSet</a> and by <a>ListSqlInjectionMatchSets</a>.</p>
#[serde(rename = "SqlInjectionMatchSetId")]
pub sql_injection_match_set_id: String,
/// <p><p>An array of <code>SqlInjectionMatchSetUpdate</code> objects that you want to insert into or delete from a <a>SqlInjectionMatchSet</a>. For more information, see the applicable data types:</p> <ul> <li> <p> <a>SqlInjectionMatchSetUpdate</a>: Contains <code>Action</code> and <code>SqlInjectionMatchTuple</code> </p> </li> <li> <p> <a>SqlInjectionMatchTuple</a>: Contains <code>FieldToMatch</code> and <code>TextTransformation</code> </p> </li> <li> <p> <a>FieldToMatch</a>: Contains <code>Data</code> and <code>Type</code> </p> </li> </ul></p>
#[serde(rename = "Updates")]
pub updates: Vec<SqlInjectionMatchSetUpdate>,
}
/// <p>The response to an <a>UpdateSqlInjectionMatchSets</a> request.</p>
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct UpdateSqlInjectionMatchSetResponse {
/// <p>The <code>ChangeToken</code> that you used to submit the <code>UpdateSqlInjectionMatchSet</code> request. You can also use this value to query the status of the request. For more information, see <a>GetChangeTokenStatus</a>.</p>
#[serde(rename = "ChangeToken")]
#[serde(skip_serializing_if = "Option::is_none")]
pub change_token: Option<String>,
}
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct UpdateWebACLRequest {
/// <p>The value returned by the most recent call to <a>GetChangeToken</a>.</p>
#[serde(rename = "ChangeToken")]
pub change_token: String,
/// <p>A default action for the web ACL, either ALLOW or BLOCK. AWS WAF performs the default action if a request doesn't match the criteria in any of the rules in a web ACL.</p>
#[serde(rename = "DefaultAction")]
#[serde(skip_serializing_if = "Option::is_none")]
pub default_action: Option<WafAction>,
/// <p><p>An array of updates to make to the <a>WebACL</a>.</p> <p>An array of <code>WebACLUpdate</code> objects that you want to insert into or delete from a <a>WebACL</a>. For more information, see the applicable data types:</p> <ul> <li> <p> <a>WebACLUpdate</a>: Contains <code>Action</code> and <code>ActivatedRule</code> </p> </li> <li> <p> <a>ActivatedRule</a>: Contains <code>Action</code>, <code>OverrideAction</code>, <code>Priority</code>, <code>RuleId</code>, and <code>Type</code>. <code>ActivatedRule|OverrideAction</code> applies only when updating or adding a <code>RuleGroup</code> to a <code>WebACL</code>. In this case, you do not use <code>ActivatedRule|Action</code>. For all other update requests, <code>ActivatedRule|Action</code> is used instead of <code>ActivatedRule|OverrideAction</code>. </p> </li> <li> <p> <a>WafAction</a>: Contains <code>Type</code> </p> </li> </ul></p>
#[serde(rename = "Updates")]
#[serde(skip_serializing_if = "Option::is_none")]
pub updates: Option<Vec<WebACLUpdate>>,
/// <p>The <code>WebACLId</code> of the <a>WebACL</a> that you want to update. <code>WebACLId</code> is returned by <a>CreateWebACL</a> and by <a>ListWebACLs</a>.</p>
#[serde(rename = "WebACLId")]
pub web_acl_id: String,
}
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct UpdateWebACLResponse {
/// <p>The <code>ChangeToken</code> that you used to submit the <code>UpdateWebACL</code> request. You can also use this value to query the status of the request. For more information, see <a>GetChangeTokenStatus</a>.</p>
#[serde(rename = "ChangeToken")]
#[serde(skip_serializing_if = "Option::is_none")]
pub change_token: Option<String>,
}
/// <p>A request to update an <a>XssMatchSet</a>.</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct UpdateXssMatchSetRequest {
/// <p>The value returned by the most recent call to <a>GetChangeToken</a>.</p>
#[serde(rename = "ChangeToken")]
pub change_token: String,
/// <p><p>An array of <code>XssMatchSetUpdate</code> objects that you want to insert into or delete from an <a>XssMatchSet</a>. For more information, see the applicable data types:</p> <ul> <li> <p> <a>XssMatchSetUpdate</a>: Contains <code>Action</code> and <code>XssMatchTuple</code> </p> </li> <li> <p> <a>XssMatchTuple</a>: Contains <code>FieldToMatch</code> and <code>TextTransformation</code> </p> </li> <li> <p> <a>FieldToMatch</a>: Contains <code>Data</code> and <code>Type</code> </p> </li> </ul></p>
#[serde(rename = "Updates")]
pub updates: Vec<XssMatchSetUpdate>,
/// <p>The <code>XssMatchSetId</code> of the <code>XssMatchSet</code> that you want to update. <code>XssMatchSetId</code> is returned by <a>CreateXssMatchSet</a> and by <a>ListXssMatchSets</a>.</p>
#[serde(rename = "XssMatchSetId")]
pub xss_match_set_id: String,
}
/// <p>The response to an <a>UpdateXssMatchSets</a> request.</p>
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct UpdateXssMatchSetResponse {
/// <p>The <code>ChangeToken</code> that you used to submit the <code>UpdateXssMatchSet</code> request. You can also use this value to query the status of the request. For more information, see <a>GetChangeTokenStatus</a>.</p>
#[serde(rename = "ChangeToken")]
#[serde(skip_serializing_if = "Option::is_none")]
pub change_token: Option<String>,
}
/// <p>For the action that is associated with a rule in a <code>WebACL</code>, specifies the action that you want AWS WAF to perform when a web request matches all of the conditions in a rule. For the default action in a <code>WebACL</code>, specifies the action that you want AWS WAF to take when a web request doesn't match all of the conditions in any of the rules in a <code>WebACL</code>. </p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct WafAction {
/// <p><p>Specifies how you want AWS WAF to respond to requests that match the settings in a <code>Rule</code>. Valid settings include the following:</p> <ul> <li> <p> <code>ALLOW</code>: AWS WAF allows requests</p> </li> <li> <p> <code>BLOCK</code>: AWS WAF blocks requests</p> </li> <li> <p> <code>COUNT</code>: AWS WAF increments a counter of the requests that match all of the conditions in the rule. AWS WAF then continues to inspect the web request based on the remaining rules in the web ACL. You can't specify <code>COUNT</code> for the default action for a <code>WebACL</code>.</p> </li> </ul></p>
#[serde(rename = "Type")]
pub type_: String,
}
/// <p>The action to take if any rule within the <code>RuleGroup</code> matches a request. </p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct WafOverrideAction {
/// <p> <code>COUNT</code> overrides the action specified by the individual rule within a <code>RuleGroup</code> . If set to <code>NONE</code>, the rule's action will take place.</p>
#[serde(rename = "Type")]
pub type_: String,
}
/// <p>Contains the <code>Rules</code> that identify the requests that you want to allow, block, or count. In a <code>WebACL</code>, you also specify a default action (<code>ALLOW</code> or <code>BLOCK</code>), and the action for each <code>Rule</code> that you add to a <code>WebACL</code>, for example, block requests from specified IP addresses or block requests from specified referrers. You also associate the <code>WebACL</code> with a CloudFront distribution to identify the requests that you want AWS WAF to filter. If you add more than one <code>Rule</code> to a <code>WebACL</code>, a request needs to match only one of the specifications to be allowed, blocked, or counted. For more information, see <a>UpdateWebACL</a>.</p>
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct WebACL {
/// <p>The action to perform if none of the <code>Rules</code> contained in the <code>WebACL</code> match. The action is specified by the <a>WafAction</a> object.</p>
#[serde(rename = "DefaultAction")]
pub default_action: WafAction,
/// <p>A friendly name or description for the metrics for this <code>WebACL</code>. The name can contain only alphanumeric characters (A-Z, a-z, 0-9); the name can't contain whitespace. You can't change <code>MetricName</code> after you create the <code>WebACL</code>.</p>
#[serde(rename = "MetricName")]
#[serde(skip_serializing_if = "Option::is_none")]
pub metric_name: Option<String>,
/// <p>A friendly name or description of the <code>WebACL</code>. You can't change the name of a <code>WebACL</code> after you create it.</p>
#[serde(rename = "Name")]
#[serde(skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
/// <p>An array that contains the action for each <code>Rule</code> in a <code>WebACL</code>, the priority of the <code>Rule</code>, and the ID of the <code>Rule</code>.</p>
#[serde(rename = "Rules")]
pub rules: Vec<ActivatedRule>,
/// <p>Tha Amazon Resource Name (ARN) of the web ACL.</p>
#[serde(rename = "WebACLArn")]
#[serde(skip_serializing_if = "Option::is_none")]
pub web_acl_arn: Option<String>,
/// <p>A unique identifier for a <code>WebACL</code>. You use <code>WebACLId</code> to get information about a <code>WebACL</code> (see <a>GetWebACL</a>), update a <code>WebACL</code> (see <a>UpdateWebACL</a>), and delete a <code>WebACL</code> from AWS WAF (see <a>DeleteWebACL</a>).</p> <p> <code>WebACLId</code> is returned by <a>CreateWebACL</a> and by <a>ListWebACLs</a>.</p>
#[serde(rename = "WebACLId")]
pub web_acl_id: String,
}
/// <p>Contains the identifier and the name or description of the <a>WebACL</a>.</p>
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct WebACLSummary {
/// <p>A friendly name or description of the <a>WebACL</a>. You can't change the name of a <code>WebACL</code> after you create it.</p>
#[serde(rename = "Name")]
pub name: String,
/// <p>A unique identifier for a <code>WebACL</code>. You use <code>WebACLId</code> to get information about a <code>WebACL</code> (see <a>GetWebACL</a>), update a <code>WebACL</code> (see <a>UpdateWebACL</a>), and delete a <code>WebACL</code> from AWS WAF (see <a>DeleteWebACL</a>).</p> <p> <code>WebACLId</code> is returned by <a>CreateWebACL</a> and by <a>ListWebACLs</a>.</p>
#[serde(rename = "WebACLId")]
pub web_acl_id: String,
}
/// <p>Specifies whether to insert a <code>Rule</code> into or delete a <code>Rule</code> from a <code>WebACL</code>.</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct WebACLUpdate {
/// <p>Specifies whether to insert a <code>Rule</code> into or delete a <code>Rule</code> from a <code>WebACL</code>.</p>
#[serde(rename = "Action")]
pub action: String,
/// <p>The <code>ActivatedRule</code> object in an <a>UpdateWebACL</a> request specifies a <code>Rule</code> that you want to insert or delete, the priority of the <code>Rule</code> in the <code>WebACL</code>, and the action that you want AWS WAF to take when a web request matches the <code>Rule</code> (<code>ALLOW</code>, <code>BLOCK</code>, or <code>COUNT</code>).</p>
#[serde(rename = "ActivatedRule")]
pub activated_rule: ActivatedRule,
}
/// <p>A complex type that contains <code>XssMatchTuple</code> objects, which specify the parts of web requests that you want AWS WAF to inspect for cross-site scripting attacks and, if you want AWS WAF to inspect a header, the name of the header. If a <code>XssMatchSet</code> contains more than one <code>XssMatchTuple</code> object, a request needs to include cross-site scripting attacks in only one of the specified parts of the request to be considered a match.</p>
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct XssMatchSet {
/// <p>The name, if any, of the <code>XssMatchSet</code>.</p>
#[serde(rename = "Name")]
#[serde(skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
/// <p>A unique identifier for an <code>XssMatchSet</code>. You use <code>XssMatchSetId</code> to get information about an <code>XssMatchSet</code> (see <a>GetXssMatchSet</a>), update an <code>XssMatchSet</code> (see <a>UpdateXssMatchSet</a>), insert an <code>XssMatchSet</code> into a <code>Rule</code> or delete one from a <code>Rule</code> (see <a>UpdateRule</a>), and delete an <code>XssMatchSet</code> from AWS WAF (see <a>DeleteXssMatchSet</a>).</p> <p> <code>XssMatchSetId</code> is returned by <a>CreateXssMatchSet</a> and by <a>ListXssMatchSets</a>.</p>
#[serde(rename = "XssMatchSetId")]
pub xss_match_set_id: String,
/// <p>Specifies the parts of web requests that you want to inspect for cross-site scripting attacks.</p>
#[serde(rename = "XssMatchTuples")]
pub xss_match_tuples: Vec<XssMatchTuple>,
}
/// <p>The <code>Id</code> and <code>Name</code> of an <code>XssMatchSet</code>.</p>
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct XssMatchSetSummary {
/// <p>The name of the <code>XssMatchSet</code>, if any, specified by <code>Id</code>.</p>
#[serde(rename = "Name")]
pub name: String,
/// <p>A unique identifier for an <code>XssMatchSet</code>. You use <code>XssMatchSetId</code> to get information about a <code>XssMatchSet</code> (see <a>GetXssMatchSet</a>), update an <code>XssMatchSet</code> (see <a>UpdateXssMatchSet</a>), insert an <code>XssMatchSet</code> into a <code>Rule</code> or delete one from a <code>Rule</code> (see <a>UpdateRule</a>), and delete an <code>XssMatchSet</code> from AWS WAF (see <a>DeleteXssMatchSet</a>).</p> <p> <code>XssMatchSetId</code> is returned by <a>CreateXssMatchSet</a> and by <a>ListXssMatchSets</a>.</p>
#[serde(rename = "XssMatchSetId")]
pub xss_match_set_id: String,
}
/// <p>Specifies the part of a web request that you want to inspect for cross-site scripting attacks and indicates whether you want to add the specification to an <a>XssMatchSet</a> or delete it from an <code>XssMatchSet</code>.</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct XssMatchSetUpdate {
/// <p>Specify <code>INSERT</code> to add an <a>XssMatchSetUpdate</a> to an <a>XssMatchSet</a>. Use <code>DELETE</code> to remove an <code>XssMatchSetUpdate</code> from an <code>XssMatchSet</code>.</p>
#[serde(rename = "Action")]
pub action: String,
/// <p>Specifies the part of a web request that you want AWS WAF to inspect for cross-site scripting attacks and, if you want AWS WAF to inspect a header, the name of the header.</p>
#[serde(rename = "XssMatchTuple")]
pub xss_match_tuple: XssMatchTuple,
}
/// <p>Specifies the part of a web request that you want AWS WAF to inspect for cross-site scripting attacks and, if you want AWS WAF to inspect a header, the name of the header.</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct XssMatchTuple {
/// <p>Specifies where in a web request to look for cross-site scripting attacks.</p>
#[serde(rename = "FieldToMatch")]
pub field_to_match: FieldToMatch,
/// <p>Text transformations eliminate some of the unusual formatting that attackers use in web requests in an effort to bypass AWS WAF. If you specify a transformation, AWS WAF performs the transformation on <code>FieldToMatch</code> before inspecting a request for a match.</p> <p>You can only specify a single type of TextTransformation.</p> <p> <b>CMD_LINE</b> </p> <p>When you're concerned that attackers are injecting an operating system command line command and using unusual formatting to disguise some or all of the command, use this option to perform the following transformations:</p> <ul> <li> <p>Delete the following characters: \ " ' ^</p> </li> <li> <p>Delete spaces before the following characters: / (</p> </li> <li> <p>Replace the following characters with a space: , ;</p> </li> <li> <p>Replace multiple spaces with one space</p> </li> <li> <p>Convert uppercase letters (A-Z) to lowercase (a-z)</p> </li> </ul> <p> <b>COMPRESS_WHITE_SPACE</b> </p> <p>Use this option to replace the following characters with a space character (decimal 32):</p> <ul> <li> <p>\f, formfeed, decimal 12</p> </li> <li> <p>\t, tab, decimal 9</p> </li> <li> <p>\n, newline, decimal 10</p> </li> <li> <p>\r, carriage return, decimal 13</p> </li> <li> <p>\v, vertical tab, decimal 11</p> </li> <li> <p>non-breaking space, decimal 160</p> </li> </ul> <p> <code>COMPRESS_WHITE_SPACE</code> also replaces multiple spaces with one space.</p> <p> <b>HTML_ENTITY_DECODE</b> </p> <p>Use this option to replace HTML-encoded characters with unencoded characters. <code>HTML_ENTITY_DECODE</code> performs the following operations:</p> <ul> <li> <p>Replaces <code>(ampersand)quot;</code> with <code>"</code> </p> </li> <li> <p>Replaces <code>(ampersand)nbsp;</code> with a non-breaking space, decimal 160</p> </li> <li> <p>Replaces <code>(ampersand)lt;</code> with a "less than" symbol</p> </li> <li> <p>Replaces <code>(ampersand)gt;</code> with <code>></code> </p> </li> <li> <p>Replaces characters that are represented in hexadecimal format, <code>(ampersand)#xhhhh;</code>, with the corresponding characters</p> </li> <li> <p>Replaces characters that are represented in decimal format, <code>(ampersand)#nnnn;</code>, with the corresponding characters</p> </li> </ul> <p> <b>LOWERCASE</b> </p> <p>Use this option to convert uppercase letters (A-Z) to lowercase (a-z).</p> <p> <b>URL_DECODE</b> </p> <p>Use this option to decode a URL-encoded value.</p> <p> <b>NONE</b> </p> <p>Specify <code>NONE</code> if you don't want to perform any text transformations.</p>
#[serde(rename = "TextTransformation")]
pub text_transformation: String,
}
/// Errors returned by CreateByteMatchSet
#[derive(Debug, PartialEq)]
pub enum CreateByteMatchSetError {
/// <p>The name specified is invalid.</p>
WAFDisallowedName(String),
/// <p>The operation failed because of a system problem, even though the request was valid. Retry your request.</p>
WAFInternalError(String),
/// <p>The operation failed because you tried to create, update, or delete an object by using an invalid account identifier.</p>
WAFInvalidAccount(String),
/// <p><p>The operation failed because AWS WAF didn't recognize a parameter in the request. For example:</p> <ul> <li> <p>You specified an invalid parameter name.</p> </li> <li> <p>You specified an invalid value.</p> </li> <li> <p>You tried to update an object (<code>ByteMatchSet</code>, <code>IPSet</code>, <code>Rule</code>, or <code>WebACL</code>) using an action other than <code>INSERT</code> or <code>DELETE</code>.</p> </li> <li> <p>You tried to create a <code>WebACL</code> with a <code>DefaultAction</code> <code>Type</code> other than <code>ALLOW</code>, <code>BLOCK</code>, or <code>COUNT</code>.</p> </li> <li> <p>You tried to create a <code>RateBasedRule</code> with a <code>RateKey</code> value other than <code>IP</code>.</p> </li> <li> <p>You tried to update a <code>WebACL</code> with a <code>WafAction</code> <code>Type</code> other than <code>ALLOW</code>, <code>BLOCK</code>, or <code>COUNT</code>.</p> </li> <li> <p>You tried to update a <code>ByteMatchSet</code> with a <code>FieldToMatch</code> <code>Type</code> other than HEADER, METHOD, QUERY_STRING, URI, or BODY.</p> </li> <li> <p>You tried to update a <code>ByteMatchSet</code> with a <code>Field</code> of <code>HEADER</code> but no value for <code>Data</code>.</p> </li> <li> <p>Your request references an ARN that is malformed, or corresponds to a resource with which a web ACL cannot be associated.</p> </li> </ul></p>
WAFInvalidParameter(String),
/// <p>The operation exceeds a resource limit, for example, the maximum number of <code>WebACL</code> objects that you can create for an AWS account. For more information, see <a href="http://docs.aws.amazon.com/waf/latest/developerguide/limits.html">Limits</a> in the <i>AWS WAF Developer Guide</i>.</p>
WAFLimitsExceeded(String),
/// <p>The operation failed because you tried to create, update, or delete an object by using a change token that has already been used.</p>
WAFStaleData(String),
}
impl CreateByteMatchSetError {
pub fn from_response(res: BufferedHttpResponse) -> RusotoError<CreateByteMatchSetError> {
if let Some(err) = proto::json::Error::parse(&res) {
match err.typ.as_str() {
"WAFDisallowedNameException" => {
return RusotoError::Service(CreateByteMatchSetError::WAFDisallowedName(
err.msg,
))
}
"WAFInternalErrorException" => {
return RusotoError::Service(CreateByteMatchSetError::WAFInternalError(err.msg))
}
"WAFInvalidAccountException" => {
return RusotoError::Service(CreateByteMatchSetError::WAFInvalidAccount(
err.msg,
))
}
"WAFInvalidParameterException" => {
return RusotoError::Service(CreateByteMatchSetError::WAFInvalidParameter(
err.msg,
))
}
"WAFLimitsExceededException" => {
return RusotoError::Service(CreateByteMatchSetError::WAFLimitsExceeded(
err.msg,
))
}
"WAFStaleDataException" => {
return RusotoError::Service(CreateByteMatchSetError::WAFStaleData(err.msg))
}
"ValidationException" => return RusotoError::Validation(err.msg),
_ => {}
}
}
return RusotoError::Unknown(res);
}
}
impl fmt::Display for CreateByteMatchSetError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for CreateByteMatchSetError {
fn description(&self) -> &str {
match *self {
CreateByteMatchSetError::WAFDisallowedName(ref cause) => cause,
CreateByteMatchSetError::WAFInternalError(ref cause) => cause,
CreateByteMatchSetError::WAFInvalidAccount(ref cause) => cause,
CreateByteMatchSetError::WAFInvalidParameter(ref cause) => cause,
CreateByteMatchSetError::WAFLimitsExceeded(ref cause) => cause,
CreateByteMatchSetError::WAFStaleData(ref cause) => cause,
}
}
}
/// Errors returned by CreateGeoMatchSet
#[derive(Debug, PartialEq)]
pub enum CreateGeoMatchSetError {
/// <p>The name specified is invalid.</p>
WAFDisallowedName(String),
/// <p>The operation failed because of a system problem, even though the request was valid. Retry your request.</p>
WAFInternalError(String),
/// <p>The operation failed because you tried to create, update, or delete an object by using an invalid account identifier.</p>
WAFInvalidAccount(String),
/// <p><p>The operation failed because AWS WAF didn't recognize a parameter in the request. For example:</p> <ul> <li> <p>You specified an invalid parameter name.</p> </li> <li> <p>You specified an invalid value.</p> </li> <li> <p>You tried to update an object (<code>ByteMatchSet</code>, <code>IPSet</code>, <code>Rule</code>, or <code>WebACL</code>) using an action other than <code>INSERT</code> or <code>DELETE</code>.</p> </li> <li> <p>You tried to create a <code>WebACL</code> with a <code>DefaultAction</code> <code>Type</code> other than <code>ALLOW</code>, <code>BLOCK</code>, or <code>COUNT</code>.</p> </li> <li> <p>You tried to create a <code>RateBasedRule</code> with a <code>RateKey</code> value other than <code>IP</code>.</p> </li> <li> <p>You tried to update a <code>WebACL</code> with a <code>WafAction</code> <code>Type</code> other than <code>ALLOW</code>, <code>BLOCK</code>, or <code>COUNT</code>.</p> </li> <li> <p>You tried to update a <code>ByteMatchSet</code> with a <code>FieldToMatch</code> <code>Type</code> other than HEADER, METHOD, QUERY_STRING, URI, or BODY.</p> </li> <li> <p>You tried to update a <code>ByteMatchSet</code> with a <code>Field</code> of <code>HEADER</code> but no value for <code>Data</code>.</p> </li> <li> <p>Your request references an ARN that is malformed, or corresponds to a resource with which a web ACL cannot be associated.</p> </li> </ul></p>
WAFInvalidParameter(String),
/// <p>The operation exceeds a resource limit, for example, the maximum number of <code>WebACL</code> objects that you can create for an AWS account. For more information, see <a href="http://docs.aws.amazon.com/waf/latest/developerguide/limits.html">Limits</a> in the <i>AWS WAF Developer Guide</i>.</p>
WAFLimitsExceeded(String),
/// <p>The operation failed because you tried to create, update, or delete an object by using a change token that has already been used.</p>
WAFStaleData(String),
}
impl CreateGeoMatchSetError {
pub fn from_response(res: BufferedHttpResponse) -> RusotoError<CreateGeoMatchSetError> {
if let Some(err) = proto::json::Error::parse(&res) {
match err.typ.as_str() {
"WAFDisallowedNameException" => {
return RusotoError::Service(CreateGeoMatchSetError::WAFDisallowedName(err.msg))
}
"WAFInternalErrorException" => {
return RusotoError::Service(CreateGeoMatchSetError::WAFInternalError(err.msg))
}
"WAFInvalidAccountException" => {
return RusotoError::Service(CreateGeoMatchSetError::WAFInvalidAccount(err.msg))
}
"WAFInvalidParameterException" => {
return RusotoError::Service(CreateGeoMatchSetError::WAFInvalidParameter(
err.msg,
))
}
"WAFLimitsExceededException" => {
return RusotoError::Service(CreateGeoMatchSetError::WAFLimitsExceeded(err.msg))
}
"WAFStaleDataException" => {
return RusotoError::Service(CreateGeoMatchSetError::WAFStaleData(err.msg))
}
"ValidationException" => return RusotoError::Validation(err.msg),
_ => {}
}
}
return RusotoError::Unknown(res);
}
}
impl fmt::Display for CreateGeoMatchSetError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for CreateGeoMatchSetError {
fn description(&self) -> &str {
match *self {
CreateGeoMatchSetError::WAFDisallowedName(ref cause) => cause,
CreateGeoMatchSetError::WAFInternalError(ref cause) => cause,
CreateGeoMatchSetError::WAFInvalidAccount(ref cause) => cause,
CreateGeoMatchSetError::WAFInvalidParameter(ref cause) => cause,
CreateGeoMatchSetError::WAFLimitsExceeded(ref cause) => cause,
CreateGeoMatchSetError::WAFStaleData(ref cause) => cause,
}
}
}
/// Errors returned by CreateIPSet
#[derive(Debug, PartialEq)]
pub enum CreateIPSetError {
/// <p>The name specified is invalid.</p>
WAFDisallowedName(String),
/// <p>The operation failed because of a system problem, even though the request was valid. Retry your request.</p>
WAFInternalError(String),
/// <p>The operation failed because you tried to create, update, or delete an object by using an invalid account identifier.</p>
WAFInvalidAccount(String),
/// <p><p>The operation failed because AWS WAF didn't recognize a parameter in the request. For example:</p> <ul> <li> <p>You specified an invalid parameter name.</p> </li> <li> <p>You specified an invalid value.</p> </li> <li> <p>You tried to update an object (<code>ByteMatchSet</code>, <code>IPSet</code>, <code>Rule</code>, or <code>WebACL</code>) using an action other than <code>INSERT</code> or <code>DELETE</code>.</p> </li> <li> <p>You tried to create a <code>WebACL</code> with a <code>DefaultAction</code> <code>Type</code> other than <code>ALLOW</code>, <code>BLOCK</code>, or <code>COUNT</code>.</p> </li> <li> <p>You tried to create a <code>RateBasedRule</code> with a <code>RateKey</code> value other than <code>IP</code>.</p> </li> <li> <p>You tried to update a <code>WebACL</code> with a <code>WafAction</code> <code>Type</code> other than <code>ALLOW</code>, <code>BLOCK</code>, or <code>COUNT</code>.</p> </li> <li> <p>You tried to update a <code>ByteMatchSet</code> with a <code>FieldToMatch</code> <code>Type</code> other than HEADER, METHOD, QUERY_STRING, URI, or BODY.</p> </li> <li> <p>You tried to update a <code>ByteMatchSet</code> with a <code>Field</code> of <code>HEADER</code> but no value for <code>Data</code>.</p> </li> <li> <p>Your request references an ARN that is malformed, or corresponds to a resource with which a web ACL cannot be associated.</p> </li> </ul></p>
WAFInvalidParameter(String),
/// <p>The operation exceeds a resource limit, for example, the maximum number of <code>WebACL</code> objects that you can create for an AWS account. For more information, see <a href="http://docs.aws.amazon.com/waf/latest/developerguide/limits.html">Limits</a> in the <i>AWS WAF Developer Guide</i>.</p>
WAFLimitsExceeded(String),
/// <p>The operation failed because you tried to create, update, or delete an object by using a change token that has already been used.</p>
WAFStaleData(String),
}
impl CreateIPSetError {
pub fn from_response(res: BufferedHttpResponse) -> RusotoError<CreateIPSetError> {
if let Some(err) = proto::json::Error::parse(&res) {
match err.typ.as_str() {
"WAFDisallowedNameException" => {
return RusotoError::Service(CreateIPSetError::WAFDisallowedName(err.msg))
}
"WAFInternalErrorException" => {
return RusotoError::Service(CreateIPSetError::WAFInternalError(err.msg))
}
"WAFInvalidAccountException" => {
return RusotoError::Service(CreateIPSetError::WAFInvalidAccount(err.msg))
}
"WAFInvalidParameterException" => {
return RusotoError::Service(CreateIPSetError::WAFInvalidParameter(err.msg))
}
"WAFLimitsExceededException" => {
return RusotoError::Service(CreateIPSetError::WAFLimitsExceeded(err.msg))
}
"WAFStaleDataException" => {
return RusotoError::Service(CreateIPSetError::WAFStaleData(err.msg))
}
"ValidationException" => return RusotoError::Validation(err.msg),
_ => {}
}
}
return RusotoError::Unknown(res);
}
}
impl fmt::Display for CreateIPSetError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for CreateIPSetError {
fn description(&self) -> &str {
match *self {
CreateIPSetError::WAFDisallowedName(ref cause) => cause,
CreateIPSetError::WAFInternalError(ref cause) => cause,
CreateIPSetError::WAFInvalidAccount(ref cause) => cause,
CreateIPSetError::WAFInvalidParameter(ref cause) => cause,
CreateIPSetError::WAFLimitsExceeded(ref cause) => cause,
CreateIPSetError::WAFStaleData(ref cause) => cause,
}
}
}
/// Errors returned by CreateRateBasedRule
#[derive(Debug, PartialEq)]
pub enum CreateRateBasedRuleError {
/// <p>The name specified is invalid.</p>
WAFDisallowedName(String),
/// <p>The operation failed because of a system problem, even though the request was valid. Retry your request.</p>
WAFInternalError(String),
/// <p><p>The operation failed because AWS WAF didn't recognize a parameter in the request. For example:</p> <ul> <li> <p>You specified an invalid parameter name.</p> </li> <li> <p>You specified an invalid value.</p> </li> <li> <p>You tried to update an object (<code>ByteMatchSet</code>, <code>IPSet</code>, <code>Rule</code>, or <code>WebACL</code>) using an action other than <code>INSERT</code> or <code>DELETE</code>.</p> </li> <li> <p>You tried to create a <code>WebACL</code> with a <code>DefaultAction</code> <code>Type</code> other than <code>ALLOW</code>, <code>BLOCK</code>, or <code>COUNT</code>.</p> </li> <li> <p>You tried to create a <code>RateBasedRule</code> with a <code>RateKey</code> value other than <code>IP</code>.</p> </li> <li> <p>You tried to update a <code>WebACL</code> with a <code>WafAction</code> <code>Type</code> other than <code>ALLOW</code>, <code>BLOCK</code>, or <code>COUNT</code>.</p> </li> <li> <p>You tried to update a <code>ByteMatchSet</code> with a <code>FieldToMatch</code> <code>Type</code> other than HEADER, METHOD, QUERY_STRING, URI, or BODY.</p> </li> <li> <p>You tried to update a <code>ByteMatchSet</code> with a <code>Field</code> of <code>HEADER</code> but no value for <code>Data</code>.</p> </li> <li> <p>Your request references an ARN that is malformed, or corresponds to a resource with which a web ACL cannot be associated.</p> </li> </ul></p>
WAFInvalidParameter(String),
/// <p>The operation exceeds a resource limit, for example, the maximum number of <code>WebACL</code> objects that you can create for an AWS account. For more information, see <a href="http://docs.aws.amazon.com/waf/latest/developerguide/limits.html">Limits</a> in the <i>AWS WAF Developer Guide</i>.</p>
WAFLimitsExceeded(String),
/// <p>The operation failed because you tried to create, update, or delete an object by using a change token that has already been used.</p>
WAFStaleData(String),
}
impl CreateRateBasedRuleError {
pub fn from_response(res: BufferedHttpResponse) -> RusotoError<CreateRateBasedRuleError> {
if let Some(err) = proto::json::Error::parse(&res) {
match err.typ.as_str() {
"WAFDisallowedNameException" => {
return RusotoError::Service(CreateRateBasedRuleError::WAFDisallowedName(
err.msg,
))
}
"WAFInternalErrorException" => {
return RusotoError::Service(CreateRateBasedRuleError::WAFInternalError(
err.msg,
))
}
"WAFInvalidParameterException" => {
return RusotoError::Service(CreateRateBasedRuleError::WAFInvalidParameter(
err.msg,
))
}
"WAFLimitsExceededException" => {
return RusotoError::Service(CreateRateBasedRuleError::WAFLimitsExceeded(
err.msg,
))
}
"WAFStaleDataException" => {
return RusotoError::Service(CreateRateBasedRuleError::WAFStaleData(err.msg))
}
"ValidationException" => return RusotoError::Validation(err.msg),
_ => {}
}
}
return RusotoError::Unknown(res);
}
}
impl fmt::Display for CreateRateBasedRuleError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for CreateRateBasedRuleError {
fn description(&self) -> &str {
match *self {
CreateRateBasedRuleError::WAFDisallowedName(ref cause) => cause,
CreateRateBasedRuleError::WAFInternalError(ref cause) => cause,
CreateRateBasedRuleError::WAFInvalidParameter(ref cause) => cause,
CreateRateBasedRuleError::WAFLimitsExceeded(ref cause) => cause,
CreateRateBasedRuleError::WAFStaleData(ref cause) => cause,
}
}
}
/// Errors returned by CreateRegexMatchSet
#[derive(Debug, PartialEq)]
pub enum CreateRegexMatchSetError {
/// <p>The name specified is invalid.</p>
WAFDisallowedName(String),
/// <p>The operation failed because of a system problem, even though the request was valid. Retry your request.</p>
WAFInternalError(String),
/// <p>The operation exceeds a resource limit, for example, the maximum number of <code>WebACL</code> objects that you can create for an AWS account. For more information, see <a href="http://docs.aws.amazon.com/waf/latest/developerguide/limits.html">Limits</a> in the <i>AWS WAF Developer Guide</i>.</p>
WAFLimitsExceeded(String),
/// <p>The operation failed because you tried to create, update, or delete an object by using a change token that has already been used.</p>
WAFStaleData(String),
}
impl CreateRegexMatchSetError {
pub fn from_response(res: BufferedHttpResponse) -> RusotoError<CreateRegexMatchSetError> {
if let Some(err) = proto::json::Error::parse(&res) {
match err.typ.as_str() {
"WAFDisallowedNameException" => {
return RusotoError::Service(CreateRegexMatchSetError::WAFDisallowedName(
err.msg,
))
}
"WAFInternalErrorException" => {
return RusotoError::Service(CreateRegexMatchSetError::WAFInternalError(
err.msg,
))
}
"WAFLimitsExceededException" => {
return RusotoError::Service(CreateRegexMatchSetError::WAFLimitsExceeded(
err.msg,
))
}
"WAFStaleDataException" => {
return RusotoError::Service(CreateRegexMatchSetError::WAFStaleData(err.msg))
}
"ValidationException" => return RusotoError::Validation(err.msg),
_ => {}
}
}
return RusotoError::Unknown(res);
}
}
impl fmt::Display for CreateRegexMatchSetError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for CreateRegexMatchSetError {
fn description(&self) -> &str {
match *self {
CreateRegexMatchSetError::WAFDisallowedName(ref cause) => cause,
CreateRegexMatchSetError::WAFInternalError(ref cause) => cause,
CreateRegexMatchSetError::WAFLimitsExceeded(ref cause) => cause,
CreateRegexMatchSetError::WAFStaleData(ref cause) => cause,
}
}
}
/// Errors returned by CreateRegexPatternSet
#[derive(Debug, PartialEq)]
pub enum CreateRegexPatternSetError {
/// <p>The name specified is invalid.</p>
WAFDisallowedName(String),
/// <p>The operation failed because of a system problem, even though the request was valid. Retry your request.</p>
WAFInternalError(String),
/// <p>The operation exceeds a resource limit, for example, the maximum number of <code>WebACL</code> objects that you can create for an AWS account. For more information, see <a href="http://docs.aws.amazon.com/waf/latest/developerguide/limits.html">Limits</a> in the <i>AWS WAF Developer Guide</i>.</p>
WAFLimitsExceeded(String),
/// <p>The operation failed because you tried to create, update, or delete an object by using a change token that has already been used.</p>
WAFStaleData(String),
}
impl CreateRegexPatternSetError {
pub fn from_response(res: BufferedHttpResponse) -> RusotoError<CreateRegexPatternSetError> {
if let Some(err) = proto::json::Error::parse(&res) {
match err.typ.as_str() {
"WAFDisallowedNameException" => {
return RusotoError::Service(CreateRegexPatternSetError::WAFDisallowedName(
err.msg,
))
}
"WAFInternalErrorException" => {
return RusotoError::Service(CreateRegexPatternSetError::WAFInternalError(
err.msg,
))
}
"WAFLimitsExceededException" => {
return RusotoError::Service(CreateRegexPatternSetError::WAFLimitsExceeded(
err.msg,
))
}
"WAFStaleDataException" => {
return RusotoError::Service(CreateRegexPatternSetError::WAFStaleData(err.msg))
}
"ValidationException" => return RusotoError::Validation(err.msg),
_ => {}
}
}
return RusotoError::Unknown(res);
}
}
impl fmt::Display for CreateRegexPatternSetError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for CreateRegexPatternSetError {
fn description(&self) -> &str {
match *self {
CreateRegexPatternSetError::WAFDisallowedName(ref cause) => cause,
CreateRegexPatternSetError::WAFInternalError(ref cause) => cause,
CreateRegexPatternSetError::WAFLimitsExceeded(ref cause) => cause,
CreateRegexPatternSetError::WAFStaleData(ref cause) => cause,
}
}
}
/// Errors returned by CreateRule
#[derive(Debug, PartialEq)]
pub enum CreateRuleError {
/// <p>The name specified is invalid.</p>
WAFDisallowedName(String),
/// <p>The operation failed because of a system problem, even though the request was valid. Retry your request.</p>
WAFInternalError(String),
/// <p><p>The operation failed because AWS WAF didn't recognize a parameter in the request. For example:</p> <ul> <li> <p>You specified an invalid parameter name.</p> </li> <li> <p>You specified an invalid value.</p> </li> <li> <p>You tried to update an object (<code>ByteMatchSet</code>, <code>IPSet</code>, <code>Rule</code>, or <code>WebACL</code>) using an action other than <code>INSERT</code> or <code>DELETE</code>.</p> </li> <li> <p>You tried to create a <code>WebACL</code> with a <code>DefaultAction</code> <code>Type</code> other than <code>ALLOW</code>, <code>BLOCK</code>, or <code>COUNT</code>.</p> </li> <li> <p>You tried to create a <code>RateBasedRule</code> with a <code>RateKey</code> value other than <code>IP</code>.</p> </li> <li> <p>You tried to update a <code>WebACL</code> with a <code>WafAction</code> <code>Type</code> other than <code>ALLOW</code>, <code>BLOCK</code>, or <code>COUNT</code>.</p> </li> <li> <p>You tried to update a <code>ByteMatchSet</code> with a <code>FieldToMatch</code> <code>Type</code> other than HEADER, METHOD, QUERY_STRING, URI, or BODY.</p> </li> <li> <p>You tried to update a <code>ByteMatchSet</code> with a <code>Field</code> of <code>HEADER</code> but no value for <code>Data</code>.</p> </li> <li> <p>Your request references an ARN that is malformed, or corresponds to a resource with which a web ACL cannot be associated.</p> </li> </ul></p>
WAFInvalidParameter(String),
/// <p>The operation exceeds a resource limit, for example, the maximum number of <code>WebACL</code> objects that you can create for an AWS account. For more information, see <a href="http://docs.aws.amazon.com/waf/latest/developerguide/limits.html">Limits</a> in the <i>AWS WAF Developer Guide</i>.</p>
WAFLimitsExceeded(String),
/// <p>The operation failed because you tried to create, update, or delete an object by using a change token that has already been used.</p>
WAFStaleData(String),
}
impl CreateRuleError {
pub fn from_response(res: BufferedHttpResponse) -> RusotoError<CreateRuleError> {
if let Some(err) = proto::json::Error::parse(&res) {
match err.typ.as_str() {
"WAFDisallowedNameException" => {
return RusotoError::Service(CreateRuleError::WAFDisallowedName(err.msg))
}
"WAFInternalErrorException" => {
return RusotoError::Service(CreateRuleError::WAFInternalError(err.msg))
}
"WAFInvalidParameterException" => {
return RusotoError::Service(CreateRuleError::WAFInvalidParameter(err.msg))
}
"WAFLimitsExceededException" => {
return RusotoError::Service(CreateRuleError::WAFLimitsExceeded(err.msg))
}
"WAFStaleDataException" => {
return RusotoError::Service(CreateRuleError::WAFStaleData(err.msg))
}
"ValidationException" => return RusotoError::Validation(err.msg),
_ => {}
}
}
return RusotoError::Unknown(res);
}
}
impl fmt::Display for CreateRuleError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for CreateRuleError {
fn description(&self) -> &str {
match *self {
CreateRuleError::WAFDisallowedName(ref cause) => cause,
CreateRuleError::WAFInternalError(ref cause) => cause,
CreateRuleError::WAFInvalidParameter(ref cause) => cause,
CreateRuleError::WAFLimitsExceeded(ref cause) => cause,
CreateRuleError::WAFStaleData(ref cause) => cause,
}
}
}
/// Errors returned by CreateRuleGroup
#[derive(Debug, PartialEq)]
pub enum CreateRuleGroupError {
/// <p>The name specified is invalid.</p>
WAFDisallowedName(String),
/// <p>The operation failed because of a system problem, even though the request was valid. Retry your request.</p>
WAFInternalError(String),
/// <p>The operation exceeds a resource limit, for example, the maximum number of <code>WebACL</code> objects that you can create for an AWS account. For more information, see <a href="http://docs.aws.amazon.com/waf/latest/developerguide/limits.html">Limits</a> in the <i>AWS WAF Developer Guide</i>.</p>
WAFLimitsExceeded(String),
/// <p>The operation failed because you tried to create, update, or delete an object by using a change token that has already been used.</p>
WAFStaleData(String),
}
impl CreateRuleGroupError {
pub fn from_response(res: BufferedHttpResponse) -> RusotoError<CreateRuleGroupError> {
if let Some(err) = proto::json::Error::parse(&res) {
match err.typ.as_str() {
"WAFDisallowedNameException" => {
return RusotoError::Service(CreateRuleGroupError::WAFDisallowedName(err.msg))
}
"WAFInternalErrorException" => {
return RusotoError::Service(CreateRuleGroupError::WAFInternalError(err.msg))
}
"WAFLimitsExceededException" => {
return RusotoError::Service(CreateRuleGroupError::WAFLimitsExceeded(err.msg))
}
"WAFStaleDataException" => {
return RusotoError::Service(CreateRuleGroupError::WAFStaleData(err.msg))
}
"ValidationException" => return RusotoError::Validation(err.msg),
_ => {}
}
}
return RusotoError::Unknown(res);
}
}
impl fmt::Display for CreateRuleGroupError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for CreateRuleGroupError {
fn description(&self) -> &str {
match *self {
CreateRuleGroupError::WAFDisallowedName(ref cause) => cause,
CreateRuleGroupError::WAFInternalError(ref cause) => cause,
CreateRuleGroupError::WAFLimitsExceeded(ref cause) => cause,
CreateRuleGroupError::WAFStaleData(ref cause) => cause,
}
}
}
/// Errors returned by CreateSizeConstraintSet
#[derive(Debug, PartialEq)]
pub enum CreateSizeConstraintSetError {
/// <p>The name specified is invalid.</p>
WAFDisallowedName(String),
/// <p>The operation failed because of a system problem, even though the request was valid. Retry your request.</p>
WAFInternalError(String),
/// <p>The operation failed because you tried to create, update, or delete an object by using an invalid account identifier.</p>
WAFInvalidAccount(String),
/// <p><p>The operation failed because AWS WAF didn't recognize a parameter in the request. For example:</p> <ul> <li> <p>You specified an invalid parameter name.</p> </li> <li> <p>You specified an invalid value.</p> </li> <li> <p>You tried to update an object (<code>ByteMatchSet</code>, <code>IPSet</code>, <code>Rule</code>, or <code>WebACL</code>) using an action other than <code>INSERT</code> or <code>DELETE</code>.</p> </li> <li> <p>You tried to create a <code>WebACL</code> with a <code>DefaultAction</code> <code>Type</code> other than <code>ALLOW</code>, <code>BLOCK</code>, or <code>COUNT</code>.</p> </li> <li> <p>You tried to create a <code>RateBasedRule</code> with a <code>RateKey</code> value other than <code>IP</code>.</p> </li> <li> <p>You tried to update a <code>WebACL</code> with a <code>WafAction</code> <code>Type</code> other than <code>ALLOW</code>, <code>BLOCK</code>, or <code>COUNT</code>.</p> </li> <li> <p>You tried to update a <code>ByteMatchSet</code> with a <code>FieldToMatch</code> <code>Type</code> other than HEADER, METHOD, QUERY_STRING, URI, or BODY.</p> </li> <li> <p>You tried to update a <code>ByteMatchSet</code> with a <code>Field</code> of <code>HEADER</code> but no value for <code>Data</code>.</p> </li> <li> <p>Your request references an ARN that is malformed, or corresponds to a resource with which a web ACL cannot be associated.</p> </li> </ul></p>
WAFInvalidParameter(String),
/// <p>The operation exceeds a resource limit, for example, the maximum number of <code>WebACL</code> objects that you can create for an AWS account. For more information, see <a href="http://docs.aws.amazon.com/waf/latest/developerguide/limits.html">Limits</a> in the <i>AWS WAF Developer Guide</i>.</p>
WAFLimitsExceeded(String),
/// <p>The operation failed because you tried to create, update, or delete an object by using a change token that has already been used.</p>
WAFStaleData(String),
}
impl CreateSizeConstraintSetError {
pub fn from_response(res: BufferedHttpResponse) -> RusotoError<CreateSizeConstraintSetError> {
if let Some(err) = proto::json::Error::parse(&res) {
match err.typ.as_str() {
"WAFDisallowedNameException" => {
return RusotoError::Service(CreateSizeConstraintSetError::WAFDisallowedName(
err.msg,
))
}
"WAFInternalErrorException" => {
return RusotoError::Service(CreateSizeConstraintSetError::WAFInternalError(
err.msg,
))
}
"WAFInvalidAccountException" => {
return RusotoError::Service(CreateSizeConstraintSetError::WAFInvalidAccount(
err.msg,
))
}
"WAFInvalidParameterException" => {
return RusotoError::Service(CreateSizeConstraintSetError::WAFInvalidParameter(
err.msg,
))
}
"WAFLimitsExceededException" => {
return RusotoError::Service(CreateSizeConstraintSetError::WAFLimitsExceeded(
err.msg,
))
}
"WAFStaleDataException" => {
return RusotoError::Service(CreateSizeConstraintSetError::WAFStaleData(
err.msg,
))
}
"ValidationException" => return RusotoError::Validation(err.msg),
_ => {}
}
}
return RusotoError::Unknown(res);
}
}
impl fmt::Display for CreateSizeConstraintSetError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for CreateSizeConstraintSetError {
fn description(&self) -> &str {
match *self {
CreateSizeConstraintSetError::WAFDisallowedName(ref cause) => cause,
CreateSizeConstraintSetError::WAFInternalError(ref cause) => cause,
CreateSizeConstraintSetError::WAFInvalidAccount(ref cause) => cause,
CreateSizeConstraintSetError::WAFInvalidParameter(ref cause) => cause,
CreateSizeConstraintSetError::WAFLimitsExceeded(ref cause) => cause,
CreateSizeConstraintSetError::WAFStaleData(ref cause) => cause,
}
}
}
/// Errors returned by CreateSqlInjectionMatchSet
#[derive(Debug, PartialEq)]
pub enum CreateSqlInjectionMatchSetError {
/// <p>The name specified is invalid.</p>
WAFDisallowedName(String),
/// <p>The operation failed because of a system problem, even though the request was valid. Retry your request.</p>
WAFInternalError(String),
/// <p>The operation failed because you tried to create, update, or delete an object by using an invalid account identifier.</p>
WAFInvalidAccount(String),
/// <p><p>The operation failed because AWS WAF didn't recognize a parameter in the request. For example:</p> <ul> <li> <p>You specified an invalid parameter name.</p> </li> <li> <p>You specified an invalid value.</p> </li> <li> <p>You tried to update an object (<code>ByteMatchSet</code>, <code>IPSet</code>, <code>Rule</code>, or <code>WebACL</code>) using an action other than <code>INSERT</code> or <code>DELETE</code>.</p> </li> <li> <p>You tried to create a <code>WebACL</code> with a <code>DefaultAction</code> <code>Type</code> other than <code>ALLOW</code>, <code>BLOCK</code>, or <code>COUNT</code>.</p> </li> <li> <p>You tried to create a <code>RateBasedRule</code> with a <code>RateKey</code> value other than <code>IP</code>.</p> </li> <li> <p>You tried to update a <code>WebACL</code> with a <code>WafAction</code> <code>Type</code> other than <code>ALLOW</code>, <code>BLOCK</code>, or <code>COUNT</code>.</p> </li> <li> <p>You tried to update a <code>ByteMatchSet</code> with a <code>FieldToMatch</code> <code>Type</code> other than HEADER, METHOD, QUERY_STRING, URI, or BODY.</p> </li> <li> <p>You tried to update a <code>ByteMatchSet</code> with a <code>Field</code> of <code>HEADER</code> but no value for <code>Data</code>.</p> </li> <li> <p>Your request references an ARN that is malformed, or corresponds to a resource with which a web ACL cannot be associated.</p> </li> </ul></p>
WAFInvalidParameter(String),
/// <p>The operation exceeds a resource limit, for example, the maximum number of <code>WebACL</code> objects that you can create for an AWS account. For more information, see <a href="http://docs.aws.amazon.com/waf/latest/developerguide/limits.html">Limits</a> in the <i>AWS WAF Developer Guide</i>.</p>
WAFLimitsExceeded(String),
/// <p>The operation failed because you tried to create, update, or delete an object by using a change token that has already been used.</p>
WAFStaleData(String),
}
impl CreateSqlInjectionMatchSetError {
pub fn from_response(
res: BufferedHttpResponse,
) -> RusotoError<CreateSqlInjectionMatchSetError> {
if let Some(err) = proto::json::Error::parse(&res) {
match err.typ.as_str() {
"WAFDisallowedNameException" => {
return RusotoError::Service(
CreateSqlInjectionMatchSetError::WAFDisallowedName(err.msg),
)
}
"WAFInternalErrorException" => {
return RusotoError::Service(CreateSqlInjectionMatchSetError::WAFInternalError(
err.msg,
))
}
"WAFInvalidAccountException" => {
return RusotoError::Service(
CreateSqlInjectionMatchSetError::WAFInvalidAccount(err.msg),
)
}
"WAFInvalidParameterException" => {
return RusotoError::Service(
CreateSqlInjectionMatchSetError::WAFInvalidParameter(err.msg),
)
}
"WAFLimitsExceededException" => {
return RusotoError::Service(
CreateSqlInjectionMatchSetError::WAFLimitsExceeded(err.msg),
)
}
"WAFStaleDataException" => {
return RusotoError::Service(CreateSqlInjectionMatchSetError::WAFStaleData(
err.msg,
))
}
"ValidationException" => return RusotoError::Validation(err.msg),
_ => {}
}
}
return RusotoError::Unknown(res);
}
}
impl fmt::Display for CreateSqlInjectionMatchSetError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for CreateSqlInjectionMatchSetError {
fn description(&self) -> &str {
match *self {
CreateSqlInjectionMatchSetError::WAFDisallowedName(ref cause) => cause,
CreateSqlInjectionMatchSetError::WAFInternalError(ref cause) => cause,
CreateSqlInjectionMatchSetError::WAFInvalidAccount(ref cause) => cause,
CreateSqlInjectionMatchSetError::WAFInvalidParameter(ref cause) => cause,
CreateSqlInjectionMatchSetError::WAFLimitsExceeded(ref cause) => cause,
CreateSqlInjectionMatchSetError::WAFStaleData(ref cause) => cause,
}
}
}
/// Errors returned by CreateWebACL
#[derive(Debug, PartialEq)]
pub enum CreateWebACLError {
/// <p>The name specified is invalid.</p>
WAFDisallowedName(String),
/// <p>The operation failed because of a system problem, even though the request was valid. Retry your request.</p>
WAFInternalError(String),
/// <p>The operation failed because you tried to create, update, or delete an object by using an invalid account identifier.</p>
WAFInvalidAccount(String),
/// <p><p>The operation failed because AWS WAF didn't recognize a parameter in the request. For example:</p> <ul> <li> <p>You specified an invalid parameter name.</p> </li> <li> <p>You specified an invalid value.</p> </li> <li> <p>You tried to update an object (<code>ByteMatchSet</code>, <code>IPSet</code>, <code>Rule</code>, or <code>WebACL</code>) using an action other than <code>INSERT</code> or <code>DELETE</code>.</p> </li> <li> <p>You tried to create a <code>WebACL</code> with a <code>DefaultAction</code> <code>Type</code> other than <code>ALLOW</code>, <code>BLOCK</code>, or <code>COUNT</code>.</p> </li> <li> <p>You tried to create a <code>RateBasedRule</code> with a <code>RateKey</code> value other than <code>IP</code>.</p> </li> <li> <p>You tried to update a <code>WebACL</code> with a <code>WafAction</code> <code>Type</code> other than <code>ALLOW</code>, <code>BLOCK</code>, or <code>COUNT</code>.</p> </li> <li> <p>You tried to update a <code>ByteMatchSet</code> with a <code>FieldToMatch</code> <code>Type</code> other than HEADER, METHOD, QUERY_STRING, URI, or BODY.</p> </li> <li> <p>You tried to update a <code>ByteMatchSet</code> with a <code>Field</code> of <code>HEADER</code> but no value for <code>Data</code>.</p> </li> <li> <p>Your request references an ARN that is malformed, or corresponds to a resource with which a web ACL cannot be associated.</p> </li> </ul></p>
WAFInvalidParameter(String),
/// <p>The operation exceeds a resource limit, for example, the maximum number of <code>WebACL</code> objects that you can create for an AWS account. For more information, see <a href="http://docs.aws.amazon.com/waf/latest/developerguide/limits.html">Limits</a> in the <i>AWS WAF Developer Guide</i>.</p>
WAFLimitsExceeded(String),
/// <p>The operation failed because you tried to create, update, or delete an object by using a change token that has already been used.</p>
WAFStaleData(String),
}
impl CreateWebACLError {
pub fn from_response(res: BufferedHttpResponse) -> RusotoError<CreateWebACLError> {
if let Some(err) = proto::json::Error::parse(&res) {
match err.typ.as_str() {
"WAFDisallowedNameException" => {
return RusotoError::Service(CreateWebACLError::WAFDisallowedName(err.msg))
}
"WAFInternalErrorException" => {
return RusotoError::Service(CreateWebACLError::WAFInternalError(err.msg))
}
"WAFInvalidAccountException" => {
return RusotoError::Service(CreateWebACLError::WAFInvalidAccount(err.msg))
}
"WAFInvalidParameterException" => {
return RusotoError::Service(CreateWebACLError::WAFInvalidParameter(err.msg))
}
"WAFLimitsExceededException" => {
return RusotoError::Service(CreateWebACLError::WAFLimitsExceeded(err.msg))
}
"WAFStaleDataException" => {
return RusotoError::Service(CreateWebACLError::WAFStaleData(err.msg))
}
"ValidationException" => return RusotoError::Validation(err.msg),
_ => {}
}
}
return RusotoError::Unknown(res);
}
}
impl fmt::Display for CreateWebACLError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for CreateWebACLError {
fn description(&self) -> &str {
match *self {
CreateWebACLError::WAFDisallowedName(ref cause) => cause,
CreateWebACLError::WAFInternalError(ref cause) => cause,
CreateWebACLError::WAFInvalidAccount(ref cause) => cause,
CreateWebACLError::WAFInvalidParameter(ref cause) => cause,
CreateWebACLError::WAFLimitsExceeded(ref cause) => cause,
CreateWebACLError::WAFStaleData(ref cause) => cause,
}
}
}
/// Errors returned by CreateXssMatchSet
#[derive(Debug, PartialEq)]
pub enum CreateXssMatchSetError {
/// <p>The name specified is invalid.</p>
WAFDisallowedName(String),
/// <p>The operation failed because of a system problem, even though the request was valid. Retry your request.</p>
WAFInternalError(String),
/// <p>The operation failed because you tried to create, update, or delete an object by using an invalid account identifier.</p>
WAFInvalidAccount(String),
/// <p><p>The operation failed because AWS WAF didn't recognize a parameter in the request. For example:</p> <ul> <li> <p>You specified an invalid parameter name.</p> </li> <li> <p>You specified an invalid value.</p> </li> <li> <p>You tried to update an object (<code>ByteMatchSet</code>, <code>IPSet</code>, <code>Rule</code>, or <code>WebACL</code>) using an action other than <code>INSERT</code> or <code>DELETE</code>.</p> </li> <li> <p>You tried to create a <code>WebACL</code> with a <code>DefaultAction</code> <code>Type</code> other than <code>ALLOW</code>, <code>BLOCK</code>, or <code>COUNT</code>.</p> </li> <li> <p>You tried to create a <code>RateBasedRule</code> with a <code>RateKey</code> value other than <code>IP</code>.</p> </li> <li> <p>You tried to update a <code>WebACL</code> with a <code>WafAction</code> <code>Type</code> other than <code>ALLOW</code>, <code>BLOCK</code>, or <code>COUNT</code>.</p> </li> <li> <p>You tried to update a <code>ByteMatchSet</code> with a <code>FieldToMatch</code> <code>Type</code> other than HEADER, METHOD, QUERY_STRING, URI, or BODY.</p> </li> <li> <p>You tried to update a <code>ByteMatchSet</code> with a <code>Field</code> of <code>HEADER</code> but no value for <code>Data</code>.</p> </li> <li> <p>Your request references an ARN that is malformed, or corresponds to a resource with which a web ACL cannot be associated.</p> </li> </ul></p>
WAFInvalidParameter(String),
/// <p>The operation exceeds a resource limit, for example, the maximum number of <code>WebACL</code> objects that you can create for an AWS account. For more information, see <a href="http://docs.aws.amazon.com/waf/latest/developerguide/limits.html">Limits</a> in the <i>AWS WAF Developer Guide</i>.</p>
WAFLimitsExceeded(String),
/// <p>The operation failed because you tried to create, update, or delete an object by using a change token that has already been used.</p>
WAFStaleData(String),
}
impl CreateXssMatchSetError {
pub fn from_response(res: BufferedHttpResponse) -> RusotoError<CreateXssMatchSetError> {
if let Some(err) = proto::json::Error::parse(&res) {
match err.typ.as_str() {
"WAFDisallowedNameException" => {
return RusotoError::Service(CreateXssMatchSetError::WAFDisallowedName(err.msg))
}
"WAFInternalErrorException" => {
return RusotoError::Service(CreateXssMatchSetError::WAFInternalError(err.msg))
}
"WAFInvalidAccountException" => {
return RusotoError::Service(CreateXssMatchSetError::WAFInvalidAccount(err.msg))
}
"WAFInvalidParameterException" => {
return RusotoError::Service(CreateXssMatchSetError::WAFInvalidParameter(
err.msg,
))
}
"WAFLimitsExceededException" => {
return RusotoError::Service(CreateXssMatchSetError::WAFLimitsExceeded(err.msg))
}
"WAFStaleDataException" => {
return RusotoError::Service(CreateXssMatchSetError::WAFStaleData(err.msg))
}
"ValidationException" => return RusotoError::Validation(err.msg),
_ => {}
}
}
return RusotoError::Unknown(res);
}
}
impl fmt::Display for CreateXssMatchSetError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for CreateXssMatchSetError {
fn description(&self) -> &str {
match *self {
CreateXssMatchSetError::WAFDisallowedName(ref cause) => cause,
CreateXssMatchSetError::WAFInternalError(ref cause) => cause,
CreateXssMatchSetError::WAFInvalidAccount(ref cause) => cause,
CreateXssMatchSetError::WAFInvalidParameter(ref cause) => cause,
CreateXssMatchSetError::WAFLimitsExceeded(ref cause) => cause,
CreateXssMatchSetError::WAFStaleData(ref cause) => cause,
}
}
}
/// Errors returned by DeleteByteMatchSet
#[derive(Debug, PartialEq)]
pub enum DeleteByteMatchSetError {
/// <p>The operation failed because of a system problem, even though the request was valid. Retry your request.</p>
WAFInternalError(String),
/// <p>The operation failed because you tried to create, update, or delete an object by using an invalid account identifier.</p>
WAFInvalidAccount(String),
/// <p><p>The operation failed because you tried to delete an object that isn't empty. For example:</p> <ul> <li> <p>You tried to delete a <code>WebACL</code> that still contains one or more <code>Rule</code> objects.</p> </li> <li> <p>You tried to delete a <code>Rule</code> that still contains one or more <code>ByteMatchSet</code> objects or other predicates.</p> </li> <li> <p>You tried to delete a <code>ByteMatchSet</code> that contains one or more <code>ByteMatchTuple</code> objects.</p> </li> <li> <p>You tried to delete an <code>IPSet</code> that references one or more IP addresses.</p> </li> </ul></p>
WAFNonEmptyEntity(String),
/// <p>The operation failed because the referenced object doesn't exist.</p>
WAFNonexistentItem(String),
/// <p><p>The operation failed because you tried to delete an object that is still in use. For example:</p> <ul> <li> <p>You tried to delete a <code>ByteMatchSet</code> that is still referenced by a <code>Rule</code>.</p> </li> <li> <p>You tried to delete a <code>Rule</code> that is still referenced by a <code>WebACL</code>.</p> </li> </ul></p>
WAFReferencedItem(String),
/// <p>The operation failed because you tried to create, update, or delete an object by using a change token that has already been used.</p>
WAFStaleData(String),
}
impl DeleteByteMatchSetError {
pub fn from_response(res: BufferedHttpResponse) -> RusotoError<DeleteByteMatchSetError> {
if let Some(err) = proto::json::Error::parse(&res) {
match err.typ.as_str() {
"WAFInternalErrorException" => {
return RusotoError::Service(DeleteByteMatchSetError::WAFInternalError(err.msg))
}
"WAFInvalidAccountException" => {
return RusotoError::Service(DeleteByteMatchSetError::WAFInvalidAccount(
err.msg,
))
}
"WAFNonEmptyEntityException" => {
return RusotoError::Service(DeleteByteMatchSetError::WAFNonEmptyEntity(
err.msg,
))
}
"WAFNonexistentItemException" => {
return RusotoError::Service(DeleteByteMatchSetError::WAFNonexistentItem(
err.msg,
))
}
"WAFReferencedItemException" => {
return RusotoError::Service(DeleteByteMatchSetError::WAFReferencedItem(
err.msg,
))
}
"WAFStaleDataException" => {
return RusotoError::Service(DeleteByteMatchSetError::WAFStaleData(err.msg))
}
"ValidationException" => return RusotoError::Validation(err.msg),
_ => {}
}
}
return RusotoError::Unknown(res);
}
}
impl fmt::Display for DeleteByteMatchSetError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for DeleteByteMatchSetError {
fn description(&self) -> &str |
}
/// Errors returned by DeleteGeoMatchSet
#[derive(Debug, PartialEq)]
pub enum DeleteGeoMatchSetError {
/// <p>The operation failed because of a system problem, even though the request was valid. Retry your request.</p>
WAFInternalError(String),
/// <p>The operation failed because you tried to create, update, or delete an object by using an invalid account identifier.</p>
WAFInvalidAccount(String),
/// <p><p>The operation failed because you tried to delete an object that isn't empty. For example:</p> <ul> <li> <p>You tried to delete a <code>WebACL</code> that still contains one or more <code>Rule</code> objects.</p> </li> <li> <p>You tried to delete a <code>Rule</code> that still contains one or more <code>ByteMatchSet</code> objects or other predicates.</p> </li> <li> <p>You tried to delete a <code>ByteMatchSet</code> that contains one or more <code>ByteMatchTuple</code> objects.</p> </li> <li> <p>You tried to delete an <code>IPSet</code> that references one or more IP addresses.</p> </li> </ul></p>
WAFNonEmptyEntity(String),
/// <p>The operation failed because the referenced object doesn't exist.</p>
WAFNonexistentItem(String),
/// <p><p>The operation failed because you tried to delete an object that is still in use. For example:</p> <ul> <li> <p>You tried to delete a <code>ByteMatchSet</code> that is still referenced by a <code>Rule</code>.</p> </li> <li> <p>You tried to delete a <code>Rule</code> that is still referenced by a <code>WebACL</code>.</p> </li> </ul></p>
WAFReferencedItem(String),
/// <p>The operation failed because you tried to create, update, or delete an object by using a change token that has already been used.</p>
WAFStaleData(String),
}
impl DeleteGeoMatchSetError {
pub fn from_response(res: BufferedHttpResponse) -> RusotoError<DeleteGeoMatchSetError> {
if let Some(err) = proto::json::Error::parse(&res) {
match err.typ.as_str() {
"WAFInternalErrorException" => {
return RusotoError::Service(DeleteGeoMatchSetError::WAFInternalError(err.msg))
}
"WAFInvalidAccountException" => {
return RusotoError::Service(DeleteGeoMatchSetError::WAFInvalidAccount(err.msg))
}
"WAFNonEmptyEntityException" => {
return RusotoError::Service(DeleteGeoMatchSetError::WAFNonEmptyEntity(err.msg))
}
"WAFNonexistentItemException" => {
return RusotoError::Service(DeleteGeoMatchSetError::WAFNonexistentItem(
err.msg,
))
}
"WAFReferencedItemException" => {
return RusotoError::Service(DeleteGeoMatchSetError::WAFReferencedItem(err.msg))
}
"WAFStaleDataException" => {
return RusotoError::Service(DeleteGeoMatchSetError::WAFStaleData(err.msg))
}
"ValidationException" => return RusotoError::Validation(err.msg),
_ => {}
}
}
return RusotoError::Unknown(res);
}
}
impl fmt::Display for DeleteGeoMatchSetError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for DeleteGeoMatchSetError {
fn description(&self) -> &str {
match *self {
DeleteGeoMatchSetError::WAFInternalError(ref cause) => cause,
DeleteGeoMatchSetError::WAFInvalidAccount(ref cause) => cause,
DeleteGeoMatchSetError::WAFNonEmptyEntity(ref cause) => cause,
DeleteGeoMatchSetError::WAFNonexistentItem(ref cause) => cause,
DeleteGeoMatchSetError::WAFReferencedItem(ref cause) => cause,
DeleteGeoMatchSetError::WAFStaleData(ref cause) => cause,
}
}
}
/// Errors returned by DeleteIPSet
#[derive(Debug, PartialEq)]
pub enum DeleteIPSetError {
/// <p>The operation failed because of a system problem, even though the request was valid. Retry your request.</p>
WAFInternalError(String),
/// <p>The operation failed because you tried to create, update, or delete an object by using an invalid account identifier.</p>
WAFInvalidAccount(String),
/// <p><p>The operation failed because you tried to delete an object that isn't empty. For example:</p> <ul> <li> <p>You tried to delete a <code>WebACL</code> that still contains one or more <code>Rule</code> objects.</p> </li> <li> <p>You tried to delete a <code>Rule</code> that still contains one or more <code>ByteMatchSet</code> objects or other predicates.</p> </li> <li> <p>You tried to delete a <code>ByteMatchSet</code> that contains one or more <code>ByteMatchTuple</code> objects.</p> </li> <li> <p>You tried to delete an <code>IPSet</code> that references one or more IP addresses.</p> </li> </ul></p>
WAFNonEmptyEntity(String),
/// <p>The operation failed because the referenced object doesn't exist.</p>
WAFNonexistentItem(String),
/// <p><p>The operation failed because you tried to delete an object that is still in use. For example:</p> <ul> <li> <p>You tried to delete a <code>ByteMatchSet</code> that is still referenced by a <code>Rule</code>.</p> </li> <li> <p>You tried to delete a <code>Rule</code> that is still referenced by a <code>WebACL</code>.</p> </li> </ul></p>
WAFReferencedItem(String),
/// <p>The operation failed because you tried to create, update, or delete an object by using a change token that has already been used.</p>
WAFStaleData(String),
}
impl DeleteIPSetError {
pub fn from_response(res: BufferedHttpResponse) -> RusotoError<DeleteIPSetError> {
if let Some(err) = proto::json::Error::parse(&res) {
match err.typ.as_str() {
"WAFInternalErrorException" => {
return RusotoError::Service(DeleteIPSetError::WAFInternalError(err.msg))
}
"WAFInvalidAccountException" => {
return RusotoError::Service(DeleteIPSetError::WAFInvalidAccount(err.msg))
}
"WAFNonEmptyEntityException" => {
return RusotoError::Service(DeleteIPSetError::WAFNonEmptyEntity(err.msg))
}
"WAFNonexistentItemException" => {
return RusotoError::Service(DeleteIPSetError::WAFNonexistentItem(err.msg))
}
"WAFReferencedItemException" => {
return RusotoError::Service(DeleteIPSetError::WAFReferencedItem(err.msg))
}
"WAFStaleDataException" => {
return RusotoError::Service(DeleteIPSetError::WAFStaleData(err.msg))
}
"ValidationException" => return RusotoError::Validation(err.msg),
_ => {}
}
}
return RusotoError::Unknown(res);
}
}
impl fmt::Display for DeleteIPSetError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for DeleteIPSetError {
fn description(&self) -> &str {
match *self {
DeleteIPSetError::WAFInternalError(ref cause) => cause,
DeleteIPSetError::WAFInvalidAccount(ref cause) => cause,
DeleteIPSetError::WAFNonEmptyEntity(ref cause) => cause,
DeleteIPSetError::WAFNonexistentItem(ref cause) => cause,
DeleteIPSetError::WAFReferencedItem(ref cause) => cause,
DeleteIPSetError::WAFStaleData(ref cause) => cause,
}
}
}
/// Errors returned by DeleteLoggingConfiguration
#[derive(Debug, PartialEq)]
pub enum DeleteLoggingConfigurationError {
/// <p>The operation failed because of a system problem, even though the request was valid. Retry your request.</p>
WAFInternalError(String),
/// <p>The operation failed because the referenced object doesn't exist.</p>
WAFNonexistentItem(String),
/// <p>The operation failed because you tried to create, update, or delete an object by using a change token that has already been used.</p>
WAFStaleData(String),
}
impl DeleteLoggingConfigurationError {
pub fn from_response(
res: BufferedHttpResponse,
) -> RusotoError<DeleteLoggingConfigurationError> {
if let Some(err) = proto::json::Error::parse(&res) {
match err.typ.as_str() {
"WAFInternalErrorException" => {
return RusotoError::Service(DeleteLoggingConfigurationError::WAFInternalError(
err.msg,
))
}
"WAFNonexistentItemException" => {
return RusotoError::Service(
DeleteLoggingConfigurationError::WAFNonexistentItem(err.msg),
)
}
"WAFStaleDataException" => {
return RusotoError::Service(DeleteLoggingConfigurationError::WAFStaleData(
err.msg,
))
}
"ValidationException" => return RusotoError::Validation(err.msg),
_ => {}
}
}
return RusotoError::Unknown(res);
}
}
impl fmt::Display for DeleteLoggingConfigurationError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for DeleteLoggingConfigurationError {
fn description(&self) -> &str {
match *self {
DeleteLoggingConfigurationError::WAFInternalError(ref cause) => cause,
DeleteLoggingConfigurationError::WAFNonexistentItem(ref cause) => cause,
DeleteLoggingConfigurationError::WAFStaleData(ref cause) => cause,
}
}
}
/// Errors returned by DeletePermissionPolicy
#[derive(Debug, PartialEq)]
pub enum DeletePermissionPolicyError {
/// <p>The operation failed because of a system problem, even though the request was valid. Retry your request.</p>
WAFInternalError(String),
/// <p>The operation failed because the referenced object doesn't exist.</p>
WAFNonexistentItem(String),
/// <p>The operation failed because you tried to create, update, or delete an object by using a change token that has already been used.</p>
WAFStaleData(String),
}
impl DeletePermissionPolicyError {
pub fn from_response(res: BufferedHttpResponse) -> RusotoError<DeletePermissionPolicyError> {
if let Some(err) = proto::json::Error::parse(&res) {
match err.typ.as_str() {
"WAFInternalErrorException" => {
return RusotoError::Service(DeletePermissionPolicyError::WAFInternalError(
err.msg,
))
}
"WAFNonexistentItemException" => {
return RusotoError::Service(DeletePermissionPolicyError::WAFNonexistentItem(
err.msg,
))
}
"WAFStaleDataException" => {
return RusotoError::Service(DeletePermissionPolicyError::WAFStaleData(err.msg))
}
"ValidationException" => return RusotoError::Validation(err.msg),
_ => {}
}
}
return RusotoError::Unknown(res);
}
}
impl fmt::Display for DeletePermissionPolicyError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for DeletePermissionPolicyError {
fn description(&self) -> &str {
match *self {
DeletePermissionPolicyError::WAFInternalError(ref cause) => cause,
DeletePermissionPolicyError::WAFNonexistentItem(ref cause) => cause,
DeletePermissionPolicyError::WAFStaleData(ref cause) => cause,
}
}
}
/// Errors returned by DeleteRateBasedRule
#[derive(Debug, PartialEq)]
pub enum DeleteRateBasedRuleError {
/// <p>The operation failed because of a system problem, even though the request was valid. Retry your request.</p>
WAFInternalError(String),
/// <p>The operation failed because you tried to create, update, or delete an object by using an invalid account identifier.</p>
WAFInvalidAccount(String),
/// <p><p>The operation failed because you tried to delete an object that isn't empty. For example:</p> <ul> <li> <p>You tried to delete a <code>WebACL</code> that still contains one or more <code>Rule</code> objects.</p> </li> <li> <p>You tried to delete a <code>Rule</code> that still contains one or more <code>ByteMatchSet</code> objects or other predicates.</p> </li> <li> <p>You tried to delete a <code>ByteMatchSet</code> that contains one or more <code>ByteMatchTuple</code> objects.</p> </li> <li> <p>You tried to delete an <code>IPSet</code> that references one or more IP addresses.</p> </li> </ul></p>
WAFNonEmptyEntity(String),
/// <p>The operation failed because the referenced object doesn't exist.</p>
WAFNonexistentItem(String),
/// <p><p>The operation failed because you tried to delete an object that is still in use. For example:</p> <ul> <li> <p>You tried to delete a <code>ByteMatchSet</code> that is still referenced by a <code>Rule</code>.</p> </li> <li> <p>You tried to delete a <code>Rule</code> that is still referenced by a <code>WebACL</code>.</p> </li> </ul></p>
WAFReferencedItem(String),
/// <p>The operation failed because you tried to create, update, or delete an object by using a change token that has already been used.</p>
WAFStaleData(String),
}
impl DeleteRateBasedRuleError {
pub fn from_response(res: BufferedHttpResponse) -> RusotoError<DeleteRateBasedRuleError> {
if let Some(err) = proto::json::Error::parse(&res) {
match err.typ.as_str() {
"WAFInternalErrorException" => {
return RusotoError::Service(DeleteRateBasedRuleError::WAFInternalError(
err.msg,
))
}
"WAFInvalidAccountException" => {
return RusotoError::Service(DeleteRateBasedRuleError::WAFInvalidAccount(
err.msg,
))
}
"WAFNonEmptyEntityException" => {
return RusotoError::Service(DeleteRateBasedRuleError::WAFNonEmptyEntity(
err.msg,
))
}
"WAFNonexistentItemException" => {
return RusotoError::Service(DeleteRateBasedRuleError::WAFNonexistentItem(
err.msg,
))
}
"WAFReferencedItemException" => {
return RusotoError::Service(DeleteRateBasedRuleError::WAFReferencedItem(
err.msg,
))
}
"WAFStaleDataException" => {
return RusotoError::Service(DeleteRateBasedRuleError::WAFStaleData(err.msg))
}
"ValidationException" => return RusotoError::Validation(err.msg),
_ => {}
}
}
return RusotoError::Unknown(res);
}
}
impl fmt::Display for DeleteRateBasedRuleError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for DeleteRateBasedRuleError {
fn description(&self) -> &str {
match *self {
DeleteRateBasedRuleError::WAFInternalError(ref cause) => cause,
DeleteRateBasedRuleError::WAFInvalidAccount(ref cause) => cause,
DeleteRateBasedRuleError::WAFNonEmptyEntity(ref cause) => cause,
DeleteRateBasedRuleError::WAFNonexistentItem(ref cause) => cause,
DeleteRateBasedRuleError::WAFReferencedItem(ref cause) => cause,
DeleteRateBasedRuleError::WAFStaleData(ref cause) => cause,
}
}
}
/// Errors returned by DeleteRegexMatchSet
#[derive(Debug, PartialEq)]
pub enum DeleteRegexMatchSetError {
/// <p>The operation failed because of a system problem, even though the request was valid. Retry your request.</p>
WAFInternalError(String),
/// <p>The operation failed because you tried to create, update, or delete an object by using an invalid account identifier.</p>
WAFInvalidAccount(String),
/// <p><p>The operation failed because you tried to delete an object that isn't empty. For example:</p> <ul> <li> <p>You tried to delete a <code>WebACL</code> that still contains one or more <code>Rule</code> objects.</p> </li> <li> <p>You tried to delete a <code>Rule</code> that still contains one or more <code>ByteMatchSet</code> objects or other predicates.</p> </li> <li> <p>You tried to delete a <code>ByteMatchSet</code> that contains one or more <code>ByteMatchTuple</code> objects.</p> </li> <li> <p>You tried to delete an <code>IPSet</code> that references one or more IP addresses.</p> </li> </ul></p>
WAFNonEmptyEntity(String),
/// <p>The operation failed because the referenced object doesn't exist.</p>
WAFNonexistentItem(String),
/// <p><p>The operation failed because you tried to delete an object that is still in use. For example:</p> <ul> <li> <p>You tried to delete a <code>ByteMatchSet</code> that is still referenced by a <code>Rule</code>.</p> </li> <li> <p>You tried to delete a <code>Rule</code> that is still referenced by a <code>WebACL</code>.</p> </li> </ul></p>
WAFReferencedItem(String),
/// <p>The operation failed because you tried to create, update, or delete an object by using a change token that has already been used.</p>
WAFStaleData(String),
}
impl DeleteRegexMatchSetError {
pub fn from_response(res: BufferedHttpResponse) -> RusotoError<DeleteRegexMatchSetError> {
if let Some(err) = proto::json::Error::parse(&res) {
match err.typ.as_str() {
"WAFInternalErrorException" => {
return RusotoError::Service(DeleteRegexMatchSetError::WAFInternalError(
err.msg,
))
}
"WAFInvalidAccountException" => {
return RusotoError::Service(DeleteRegexMatchSetError::WAFInvalidAccount(
err.msg,
))
}
"WAFNonEmptyEntityException" => {
return RusotoError::Service(DeleteRegexMatchSetError::WAFNonEmptyEntity(
err.msg,
))
}
"WAFNonexistentItemException" => {
return RusotoError::Service(DeleteRegexMatchSetError::WAFNonexistentItem(
err.msg,
))
}
"WAFReferencedItemException" => {
return RusotoError::Service(DeleteRegexMatchSetError::WAFReferencedItem(
err.msg,
))
}
"WAFStaleDataException" => {
return RusotoError::Service(DeleteRegexMatchSetError::WAFStaleData(err.msg))
}
"ValidationException" => return RusotoError::Validation(err.msg),
_ => {}
}
}
return RusotoError::Unknown(res);
}
}
impl fmt::Display for DeleteRegexMatchSetError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for DeleteRegexMatchSetError {
fn description(&self) -> &str {
match *self {
DeleteRegexMatchSetError::WAFInternalError(ref cause) => cause,
DeleteRegexMatchSetError::WAFInvalidAccount(ref cause) => cause,
DeleteRegexMatchSetError::WAFNonEmptyEntity(ref cause) => cause,
DeleteRegexMatchSetError::WAFNonexistentItem(ref cause) => cause,
DeleteRegexMatchSetError::WAFReferencedItem(ref cause) => cause,
DeleteRegexMatchSetError::WAFStaleData(ref cause) => cause,
}
}
}
/// Errors returned by DeleteRegexPatternSet
#[derive(Debug, PartialEq)]
pub enum DeleteRegexPatternSetError {
/// <p>The operation failed because of a system problem, even though the request was valid. Retry your request.</p>
WAFInternalError(String),
/// <p>The operation failed because you tried to create, update, or delete an object by using an invalid account identifier.</p>
WAFInvalidAccount(String),
/// <p><p>The operation failed because you tried to delete an object that isn't empty. For example:</p> <ul> <li> <p>You tried to delete a <code>WebACL</code> that still contains one or more <code>Rule</code> objects.</p> </li> <li> <p>You tried to delete a <code>Rule</code> that still contains one or more <code>ByteMatchSet</code> objects or other predicates.</p> </li> <li> <p>You tried to delete a <code>ByteMatchSet</code> that contains one or more <code>ByteMatchTuple</code> objects.</p> </li> <li> <p>You tried to delete an <code>IPSet</code> that references one or more IP addresses.</p> </li> </ul></p>
WAFNonEmptyEntity(String),
/// <p>The operation failed because the referenced object doesn't exist.</p>
WAFNonexistentItem(String),
/// <p><p>The operation failed because you tried to delete an object that is still in use. For example:</p> <ul> <li> <p>You tried to delete a <code>ByteMatchSet</code> that is still referenced by a <code>Rule</code>.</p> </li> <li> <p>You tried to delete a <code>Rule</code> that is still referenced by a <code>WebACL</code>.</p> </li> </ul></p>
WAFReferencedItem(String),
/// <p>The operation failed because you tried to create, update, or delete an object by using a change token that has already been used.</p>
WAFStaleData(String),
}
impl DeleteRegexPatternSetError {
pub fn from_response(res: BufferedHttpResponse) -> RusotoError<DeleteRegexPatternSetError> {
if let Some(err) = proto::json::Error::parse(&res) {
match err.typ.as_str() {
"WAFInternalErrorException" => {
return RusotoError::Service(DeleteRegexPatternSetError::WAFInternalError(
err.msg,
))
}
"WAFInvalidAccountException" => {
return RusotoError::Service(DeleteRegexPatternSetError::WAFInvalidAccount(
err.msg,
))
}
"WAFNonEmptyEntityException" => {
return RusotoError::Service(DeleteRegexPatternSetError::WAFNonEmptyEntity(
err.msg,
))
}
"WAFNonexistentItemException" => {
return RusotoError::Service(DeleteRegexPatternSetError::WAFNonexistentItem(
err.msg,
))
}
"WAFReferencedItemException" => {
return RusotoError::Service(DeleteRegexPatternSetError::WAFReferencedItem(
err.msg,
))
}
"WAFStaleDataException" => {
return RusotoError::Service(DeleteRegexPatternSetError::WAFStaleData(err.msg))
}
"ValidationException" => return RusotoError::Validation(err.msg),
_ => {}
}
}
return RusotoError::Unknown(res);
}
}
impl fmt::Display for DeleteRegexPatternSetError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for DeleteRegexPatternSetError {
fn description(&self) -> &str {
match *self {
DeleteRegexPatternSetError::WAFInternalError(ref cause) => cause,
DeleteRegexPatternSetError::WAFInvalidAccount(ref cause) => cause,
DeleteRegexPatternSetError::WAFNonEmptyEntity(ref cause) => cause,
DeleteRegexPatternSetError::WAFNonexistentItem(ref cause) => cause,
DeleteRegexPatternSetError::WAFReferencedItem(ref cause) => cause,
DeleteRegexPatternSetError::WAFStaleData(ref cause) => cause,
}
}
}
/// Errors returned by DeleteRule
#[derive(Debug, PartialEq)]
pub enum DeleteRuleError {
/// <p>The operation failed because of a system problem, even though the request was valid. Retry your request.</p>
WAFInternalError(String),
/// <p>The operation failed because you tried to create, update, or delete an object by using an invalid account identifier.</p>
WAFInvalidAccount(String),
/// <p><p>The operation failed because you tried to delete an object that isn't empty. For example:</p> <ul> <li> <p>You tried to delete a <code>WebACL</code> that still contains one or more <code>Rule</code> objects.</p> </li> <li> <p>You tried to delete a <code>Rule</code> that still contains one or more <code>ByteMatchSet</code> objects or other predicates.</p> </li> <li> <p>You tried to delete a <code>ByteMatchSet</code> that contains one or more <code>ByteMatchTuple</code> objects.</p> </li> <li> <p>You tried to delete an <code>IPSet</code> that references one or more IP addresses.</p> </li> </ul></p>
WAFNonEmptyEntity(String),
/// <p>The operation failed because the referenced object doesn't exist.</p>
WAFNonexistentItem(String),
/// <p><p>The operation failed because you tried to delete an object that is still in use. For example:</p> <ul> <li> <p>You tried to delete a <code>ByteMatchSet</code> that is still referenced by a <code>Rule</code>.</p> </li> <li> <p>You tried to delete a <code>Rule</code> that is still referenced by a <code>WebACL</code>.</p> </li> </ul></p>
WAFReferencedItem(String),
/// <p>The operation failed because you tried to create, update, or delete an object by using a change token that has already been used.</p>
WAFStaleData(String),
}
impl DeleteRuleError {
pub fn from_response(res: BufferedHttpResponse) -> RusotoError<DeleteRuleError> {
if let Some(err) = proto::json::Error::parse(&res) {
match err.typ.as_str() {
"WAFInternalErrorException" => {
return RusotoError::Service(DeleteRuleError::WAFInternalError(err.msg))
}
"WAFInvalidAccountException" => {
return RusotoError::Service(DeleteRuleError::WAFInvalidAccount(err.msg))
}
"WAFNonEmptyEntityException" => {
return RusotoError::Service(DeleteRuleError::WAFNonEmptyEntity(err.msg))
}
"WAFNonexistentItemException" => {
return RusotoError::Service(DeleteRuleError::WAFNonexistentItem(err.msg))
}
"WAFReferencedItemException" => {
return RusotoError::Service(DeleteRuleError::WAFReferencedItem(err.msg))
}
"WAFStaleDataException" => {
return RusotoError::Service(DeleteRuleError::WAFStaleData(err.msg))
}
"ValidationException" => return RusotoError::Validation(err.msg),
_ => {}
}
}
return RusotoError::Unknown(res);
}
}
impl fmt::Display for DeleteRuleError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for DeleteRuleError {
fn description(&self) -> &str {
match *self {
DeleteRuleError::WAFInternalError(ref cause) => cause,
DeleteRuleError::WAFInvalidAccount(ref cause) => cause,
DeleteRuleError::WAFNonEmptyEntity(ref cause) => cause,
DeleteRuleError::WAFNonexistentItem(ref cause) => cause,
DeleteRuleError::WAFReferencedItem(ref cause) => cause,
DeleteRuleError::WAFStaleData(ref cause) => cause,
}
}
}
/// Errors returned by DeleteRuleGroup
#[derive(Debug, PartialEq)]
pub enum DeleteRuleGroupError {
/// <p>The operation failed because of a system problem, even though the request was valid. Retry your request.</p>
WAFInternalError(String),
/// <p><p>The operation failed because there was nothing to do. For example:</p> <ul> <li> <p>You tried to remove a <code>Rule</code> from a <code>WebACL</code>, but the <code>Rule</code> isn't in the specified <code>WebACL</code>.</p> </li> <li> <p>You tried to remove an IP address from an <code>IPSet</code>, but the IP address isn't in the specified <code>IPSet</code>.</p> </li> <li> <p>You tried to remove a <code>ByteMatchTuple</code> from a <code>ByteMatchSet</code>, but the <code>ByteMatchTuple</code> isn't in the specified <code>WebACL</code>.</p> </li> <li> <p>You tried to add a <code>Rule</code> to a <code>WebACL</code>, but the <code>Rule</code> already exists in the specified <code>WebACL</code>.</p> </li> <li> <p>You tried to add a <code>ByteMatchTuple</code> to a <code>ByteMatchSet</code>, but the <code>ByteMatchTuple</code> already exists in the specified <code>WebACL</code>.</p> </li> </ul></p>
WAFInvalidOperation(String),
/// <p><p>The operation failed because you tried to delete an object that isn't empty. For example:</p> <ul> <li> <p>You tried to delete a <code>WebACL</code> that still contains one or more <code>Rule</code> objects.</p> </li> <li> <p>You tried to delete a <code>Rule</code> that still contains one or more <code>ByteMatchSet</code> objects or other predicates.</p> </li> <li> <p>You tried to delete a <code>ByteMatchSet</code> that contains one or more <code>ByteMatchTuple</code> objects.</p> </li> <li> <p>You tried to delete an <code>IPSet</code> that references one or more IP addresses.</p> </li> </ul></p>
WAFNonEmptyEntity(String),
/// <p>The operation failed because the referenced object doesn't exist.</p>
WAFNonexistentItem(String),
/// <p><p>The operation failed because you tried to delete an object that is still in use. For example:</p> <ul> <li> <p>You tried to delete a <code>ByteMatchSet</code> that is still referenced by a <code>Rule</code>.</p> </li> <li> <p>You tried to delete a <code>Rule</code> that is still referenced by a <code>WebACL</code>.</p> </li> </ul></p>
WAFReferencedItem(String),
/// <p>The operation failed because you tried to create, update, or delete an object by using a change token that has already been used.</p>
WAFStaleData(String),
}
impl DeleteRuleGroupError {
pub fn from_response(res: BufferedHttpResponse) -> RusotoError<DeleteRuleGroupError> {
if let Some(err) = proto::json::Error::parse(&res) {
match err.typ.as_str() {
"WAFInternalErrorException" => {
return RusotoError::Service(DeleteRuleGroupError::WAFInternalError(err.msg))
}
"WAFInvalidOperationException" => {
return RusotoError::Service(DeleteRuleGroupError::WAFInvalidOperation(err.msg))
}
"WAFNonEmptyEntityException" => {
return RusotoError::Service(DeleteRuleGroupError::WAFNonEmptyEntity(err.msg))
}
"WAFNonexistentItemException" => {
return RusotoError::Service(DeleteRuleGroupError::WAFNonexistentItem(err.msg))
}
"WAFReferencedItemException" => {
return RusotoError::Service(DeleteRuleGroupError::WAFReferencedItem(err.msg))
}
"WAFStaleDataException" => {
return RusotoError::Service(DeleteRuleGroupError::WAFStaleData(err.msg))
}
"ValidationException" => return RusotoError::Validation(err.msg),
_ => {}
}
}
return RusotoError::Unknown(res);
}
}
impl fmt::Display for DeleteRuleGroupError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for DeleteRuleGroupError {
fn description(&self) -> &str {
match *self {
DeleteRuleGroupError::WAFInternalError(ref cause) => cause,
DeleteRuleGroupError::WAFInvalidOperation(ref cause) => cause,
DeleteRuleGroupError::WAFNonEmptyEntity(ref cause) => cause,
DeleteRuleGroupError::WAFNonexistentItem(ref cause) => cause,
DeleteRuleGroupError::WAFReferencedItem(ref cause) => cause,
DeleteRuleGroupError::WAFStaleData(ref cause) => cause,
}
}
}
/// Errors returned by DeleteSizeConstraintSet
#[derive(Debug, PartialEq)]
pub enum DeleteSizeConstraintSetError {
/// <p>The operation failed because of a system problem, even though the request was valid. Retry your request.</p>
WAFInternalError(String),
/// <p>The operation failed because you tried to create, update, or delete an object by using an invalid account identifier.</p>
WAFInvalidAccount(String),
/// <p><p>The operation failed because you tried to delete an object that isn't empty. For example:</p> <ul> <li> <p>You tried to delete a <code>WebACL</code> that still contains one or more <code>Rule</code> objects.</p> </li> <li> <p>You tried to delete a <code>Rule</code> that still contains one or more <code>ByteMatchSet</code> objects or other predicates.</p> </li> <li> <p>You tried to delete a <code>ByteMatchSet</code> that contains one or more <code>ByteMatchTuple</code> objects.</p> </li> <li> <p>You tried to delete an <code>IPSet</code> that references one or more IP addresses.</p> </li> </ul></p>
WAFNonEmptyEntity(String),
/// <p>The operation failed because the referenced object doesn't exist.</p>
WAFNonexistentItem(String),
/// <p><p>The operation failed because you tried to delete an object that is still in use. For example:</p> <ul> <li> <p>You tried to delete a <code>ByteMatchSet</code> that is still referenced by a <code>Rule</code>.</p> </li> <li> <p>You tried to delete a <code>Rule</code> that is still referenced by a <code>WebACL</code>.</p> </li> </ul></p>
WAFReferencedItem(String),
/// <p>The operation failed because you tried to create, update, or delete an object by using a change token that has already been used.</p>
WAFStaleData(String),
}
impl DeleteSizeConstraintSetError {
pub fn from_response(res: BufferedHttpResponse) -> RusotoError<DeleteSizeConstraintSetError> {
if let Some(err) = proto::json::Error::parse(&res) {
match err.typ.as_str() {
"WAFInternalErrorException" => {
return RusotoError::Service(DeleteSizeConstraintSetError::WAFInternalError(
err.msg,
))
}
"WAFInvalidAccountException" => {
return RusotoError::Service(DeleteSizeConstraintSetError::WAFInvalidAccount(
err.msg,
))
}
"WAFNonEmptyEntityException" => {
return RusotoError::Service(DeleteSizeConstraintSetError::WAFNonEmptyEntity(
err.msg,
))
}
"WAFNonexistentItemException" => {
return RusotoError::Service(DeleteSizeConstraintSetError::WAFNonexistentItem(
err.msg,
))
}
"WAFReferencedItemException" => {
return RusotoError::Service(DeleteSizeConstraintSetError::WAFReferencedItem(
err.msg,
))
}
"WAFStaleDataException" => {
return RusotoError::Service(DeleteSizeConstraintSetError::WAFStaleData(
err.msg,
))
}
"ValidationException" => return RusotoError::Validation(err.msg),
_ => {}
}
}
return RusotoError::Unknown(res);
}
}
impl fmt::Display for DeleteSizeConstraintSetError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for DeleteSizeConstraintSetError {
fn description(&self) -> &str {
match *self {
DeleteSizeConstraintSetError::WAFInternalError(ref cause) => cause,
DeleteSizeConstraintSetError::WAFInvalidAccount(ref cause) => cause,
DeleteSizeConstraintSetError::WAFNonEmptyEntity(ref cause) => cause,
DeleteSizeConstraintSetError::WAFNonexistentItem(ref cause) => cause,
DeleteSizeConstraintSetError::WAFReferencedItem(ref cause) => cause,
DeleteSizeConstraintSetError::WAFStaleData(ref cause) => cause,
}
}
}
/// Errors returned by DeleteSqlInjectionMatchSet
#[derive(Debug, PartialEq)]
pub enum DeleteSqlInjectionMatchSetError {
/// <p>The operation failed because of a system problem, even though the request was valid. Retry your request.</p>
WAFInternalError(String),
/// <p>The operation failed because you tried to create, update, or delete an object by using an invalid account identifier.</p>
WAFInvalidAccount(String),
/// <p><p>The operation failed because you tried to delete an object that isn't empty. For example:</p> <ul> <li> <p>You tried to delete a <code>WebACL</code> that still contains one or more <code>Rule</code> objects.</p> </li> <li> <p>You tried to delete a <code>Rule</code> that still contains one or more <code>ByteMatchSet</code> objects or other predicates.</p> </li> <li> <p>You tried to delete a <code>ByteMatchSet</code> that contains one or more <code>ByteMatchTuple</code> objects.</p> </li> <li> <p>You tried to delete an <code>IPSet</code> that references one or more IP addresses.</p> </li> </ul></p>
WAFNonEmptyEntity(String),
/// <p>The operation failed because the referenced object doesn't exist.</p>
WAFNonexistentItem(String),
/// <p><p>The operation failed because you tried to delete an object that is still in use. For example:</p> <ul> <li> <p>You tried to delete a <code>ByteMatchSet</code> that is still referenced by a <code>Rule</code>.</p> </li> <li> <p>You tried to delete a <code>Rule</code> that is still referenced by a <code>WebACL</code>.</p> </li> </ul></p>
WAFReferencedItem(String),
/// <p>The operation failed because you tried to create, update, or delete an object by using a change token that has already been used.</p>
WAFStaleData(String),
}
impl DeleteSqlInjectionMatchSetError {
pub fn from_response(
res: BufferedHttpResponse,
) -> RusotoError<DeleteSqlInjectionMatchSetError> {
if let Some(err) = proto::json::Error::parse(&res) {
match err.typ.as_str() {
"WAFInternalErrorException" => {
return RusotoError::Service(DeleteSqlInjectionMatchSetError::WAFInternalError(
err.msg,
))
}
"WAFInvalidAccountException" => {
return RusotoError::Service(
DeleteSqlInjectionMatchSetError::WAFInvalidAccount(err.msg),
)
}
"WAFNonEmptyEntityException" => {
return RusotoError::Service(
DeleteSqlInjectionMatchSetError::WAFNonEmptyEntity(err.msg),
)
}
"WAFNonexistentItemException" => {
return RusotoError::Service(
DeleteSqlInjectionMatchSetError::WAFNonexistentItem(err.msg),
)
}
"WAFReferencedItemException" => {
return RusotoError::Service(
DeleteSqlInjectionMatchSetError::WAFReferencedItem(err.msg),
)
}
"WAFStaleDataException" => {
return RusotoError::Service(DeleteSqlInjectionMatchSetError::WAFStaleData(
err.msg,
))
}
"ValidationException" => return RusotoError::Validation(err.msg),
_ => {}
}
}
return RusotoError::Unknown(res);
}
}
impl fmt::Display for DeleteSqlInjectionMatchSetError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for DeleteSqlInjectionMatchSetError {
fn description(&self) -> &str {
match *self {
DeleteSqlInjectionMatchSetError::WAFInternalError(ref cause) => cause,
DeleteSqlInjectionMatchSetError::WAFInvalidAccount(ref cause) => cause,
DeleteSqlInjectionMatchSetError::WAFNonEmptyEntity(ref cause) => cause,
DeleteSqlInjectionMatchSetError::WAFNonexistentItem(ref cause) => cause,
DeleteSqlInjectionMatchSetError::WAFReferencedItem(ref cause) => cause,
DeleteSqlInjectionMatchSetError::WAFStaleData(ref cause) => cause,
}
}
}
/// Errors returned by DeleteWebACL
#[derive(Debug, PartialEq)]
pub enum DeleteWebACLError {
/// <p>The operation failed because of a system problem, even though the request was valid. Retry your request.</p>
WAFInternalError(String),
/// <p>The operation failed because you tried to create, update, or delete an object by using an invalid account identifier.</p>
WAFInvalidAccount(String),
/// <p><p>The operation failed because you tried to delete an object that isn't empty. For example:</p> <ul> <li> <p>You tried to delete a <code>WebACL</code> that still contains one or more <code>Rule</code> objects.</p> </li> <li> <p>You tried to delete a <code>Rule</code> that still contains one or more <code>ByteMatchSet</code> objects or other predicates.</p> </li> <li> <p>You tried to delete a <code>ByteMatchSet</code> that contains one or more <code>ByteMatchTuple</code> objects.</p> </li> <li> <p>You tried to delete an <code>IPSet</code> that references one or more IP addresses.</p> </li> </ul></p>
WAFNonEmptyEntity(String),
/// <p>The operation failed because the referenced object doesn't exist.</p>
WAFNonexistentItem(String),
/// <p><p>The operation failed because you tried to delete an object that is still in use. For example:</p> <ul> <li> <p>You tried to delete a <code>ByteMatchSet</code> that is still referenced by a <code>Rule</code>.</p> </li> <li> <p>You tried to delete a <code>Rule</code> that is still referenced by a <code>WebACL</code>.</p> </li> </ul></p>
WAFReferencedItem(String),
/// <p>The operation failed because you tried to create, update, or delete an object by using a change token that has already been used.</p>
WAFStaleData(String),
}
impl DeleteWebACLError {
pub fn from_response(res: BufferedHttpResponse) -> RusotoError<DeleteWebACLError> {
if let Some(err) = proto::json::Error::parse(&res) {
match err.typ.as_str() {
"WAFInternalErrorException" => {
return RusotoError::Service(DeleteWebACLError::WAFInternalError(err.msg))
}
"WAFInvalidAccountException" => {
return RusotoError::Service(DeleteWebACLError::WAFInvalidAccount(err.msg))
}
"WAFNonEmptyEntityException" => {
return RusotoError::Service(DeleteWebACLError::WAFNonEmptyEntity(err.msg))
}
"WAFNonexistentItemException" => {
return RusotoError::Service(DeleteWebACLError::WAFNonexistentItem(err.msg))
}
"WAFReferencedItemException" => {
return RusotoError::Service(DeleteWebACLError::WAFReferencedItem(err.msg))
}
"WAFStaleDataException" => {
return RusotoError::Service(DeleteWebACLError::WAFStaleData(err.msg))
}
"ValidationException" => return RusotoError::Validation(err.msg),
_ => {}
}
}
return RusotoError::Unknown(res);
}
}
impl fmt::Display for DeleteWebACLError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for DeleteWebACLError {
fn description(&self) -> &str {
match *self {
DeleteWebACLError::WAFInternalError(ref cause) => cause,
DeleteWebACLError::WAFInvalidAccount(ref cause) => cause,
DeleteWebACLError::WAFNonEmptyEntity(ref cause) => cause,
DeleteWebACLError::WAFNonexistentItem(ref cause) => cause,
DeleteWebACLError::WAFReferencedItem(ref cause) => cause,
DeleteWebACLError::WAFStaleData(ref cause) => cause,
}
}
}
/// Errors returned by DeleteXssMatchSet
#[derive(Debug, PartialEq)]
pub enum DeleteXssMatchSetError {
/// <p>The operation failed because of a system problem, even though the request was valid. Retry your request.</p>
WAFInternalError(String),
/// <p>The operation failed because you tried to create, update, or delete an object by using an invalid account identifier.</p>
WAFInvalidAccount(String),
/// <p><p>The operation failed because you tried to delete an object that isn't empty. For example:</p> <ul> <li> <p>You tried to delete a <code>WebACL</code> that still contains one or more <code>Rule</code> objects.</p> </li> <li> <p>You tried to delete a <code>Rule</code> that still contains one or more <code>ByteMatchSet</code> objects or other predicates.</p> </li> <li> <p>You tried to delete a <code>ByteMatchSet</code> that contains one or more <code>ByteMatchTuple</code> objects.</p> </li> <li> <p>You tried to delete an <code>IPSet</code> that references one or more IP addresses.</p> </li> </ul></p>
WAFNonEmptyEntity(String),
/// <p>The operation failed because the referenced object doesn't exist.</p>
WAFNonexistentItem(String),
/// <p><p>The operation failed because you tried to delete an object that is still in use. For example:</p> <ul> <li> <p>You tried to delete a <code>ByteMatchSet</code> that is still referenced by a <code>Rule</code>.</p> </li> <li> <p>You tried to delete a <code>Rule</code> that is still referenced by a <code>WebACL</code>.</p> </li> </ul></p>
WAFReferencedItem(String),
/// <p>The operation failed because you tried to create, update, or delete an object by using a change token that has already been used.</p>
WAFStaleData(String),
}
impl DeleteXssMatchSetError {
pub fn from_response(res: BufferedHttpResponse) -> RusotoError<DeleteXssMatchSetError> {
if let Some(err) = proto::json::Error::parse(&res) {
match err.typ.as_str() {
"WAFInternalErrorException" => {
return RusotoError::Service(DeleteXssMatchSetError::WAFInternalError(err.msg))
}
"WAFInvalidAccountException" => {
return RusotoError::Service(DeleteXssMatchSetError::WAFInvalidAccount(err.msg))
}
"WAFNonEmptyEntityException" => {
return RusotoError::Service(DeleteXssMatchSetError::WAFNonEmptyEntity(err.msg))
}
"WAFNonexistentItemException" => {
return RusotoError::Service(DeleteXssMatchSetError::WAFNonexistentItem(
err.msg,
))
}
"WAFReferencedItemException" => {
return RusotoError::Service(DeleteXssMatchSetError::WAFReferencedItem(err.msg))
}
"WAFStaleDataException" => {
return RusotoError::Service(DeleteXssMatchSetError::WAFStaleData(err.msg))
}
"ValidationException" => return RusotoError::Validation(err.msg),
_ => {}
}
}
return RusotoError::Unknown(res);
}
}
impl fmt::Display for DeleteXssMatchSetError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for DeleteXssMatchSetError {
fn description(&self) -> &str {
match *self {
DeleteXssMatchSetError::WAFInternalError(ref cause) => cause,
DeleteXssMatchSetError::WAFInvalidAccount(ref cause) => cause,
DeleteXssMatchSetError::WAFNonEmptyEntity(ref cause) => cause,
DeleteXssMatchSetError::WAFNonexistentItem(ref cause) => cause,
DeleteXssMatchSetError::WAFReferencedItem(ref cause) => cause,
DeleteXssMatchSetError::WAFStaleData(ref cause) => cause,
}
}
}
/// Errors returned by GetByteMatchSet
#[derive(Debug, PartialEq)]
pub enum GetByteMatchSetError {
/// <p>The operation failed because of a system problem, even though the request was valid. Retry your request.</p>
WAFInternalError(String),
/// <p>The operation failed because you tried to create, update, or delete an object by using an invalid account identifier.</p>
WAFInvalidAccount(String),
/// <p>The operation failed because the referenced object doesn't exist.</p>
WAFNonexistentItem(String),
}
impl GetByteMatchSetError {
pub fn from_response(res: BufferedHttpResponse) -> RusotoError<GetByteMatchSetError> {
if let Some(err) = proto::json::Error::parse(&res) {
match err.typ.as_str() {
"WAFInternalErrorException" => {
return RusotoError::Service(GetByteMatchSetError::WAFInternalError(err.msg))
}
"WAFInvalidAccountException" => {
return RusotoError::Service(GetByteMatchSetError::WAFInvalidAccount(err.msg))
}
"WAFNonexistentItemException" => {
return RusotoError::Service(GetByteMatchSetError::WAFNonexistentItem(err.msg))
}
"ValidationException" => return RusotoError::Validation(err.msg),
_ => {}
}
}
return RusotoError::Unknown(res);
}
}
impl fmt::Display for GetByteMatchSetError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for GetByteMatchSetError {
fn description(&self) -> &str {
match *self {
GetByteMatchSetError::WAFInternalError(ref cause) => cause,
GetByteMatchSetError::WAFInvalidAccount(ref cause) => cause,
GetByteMatchSetError::WAFNonexistentItem(ref cause) => cause,
}
}
}
/// Errors returned by GetChangeToken
#[derive(Debug, PartialEq)]
pub enum GetChangeTokenError {
/// <p>The operation failed because of a system problem, even though the request was valid. Retry your request.</p>
WAFInternalError(String),
}
impl GetChangeTokenError {
pub fn from_response(res: BufferedHttpResponse) -> RusotoError<GetChangeTokenError> {
if let Some(err) = proto::json::Error::parse(&res) {
match err.typ.as_str() {
"WAFInternalErrorException" => {
return RusotoError::Service(GetChangeTokenError::WAFInternalError(err.msg))
}
"ValidationException" => return RusotoError::Validation(err.msg),
_ => {}
}
}
return RusotoError::Unknown(res);
}
}
impl fmt::Display for GetChangeTokenError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for GetChangeTokenError {
fn description(&self) -> &str {
match *self {
GetChangeTokenError::WAFInternalError(ref cause) => cause,
}
}
}
/// Errors returned by GetChangeTokenStatus
#[derive(Debug, PartialEq)]
pub enum GetChangeTokenStatusError {
/// <p>The operation failed because of a system problem, even though the request was valid. Retry your request.</p>
WAFInternalError(String),
/// <p>The operation failed because the referenced object doesn't exist.</p>
WAFNonexistentItem(String),
}
impl GetChangeTokenStatusError {
pub fn from_response(res: BufferedHttpResponse) -> RusotoError<GetChangeTokenStatusError> {
if let Some(err) = proto::json::Error::parse(&res) {
match err.typ.as_str() {
"WAFInternalErrorException" => {
return RusotoError::Service(GetChangeTokenStatusError::WAFInternalError(
err.msg,
))
}
"WAFNonexistentItemException" => {
return RusotoError::Service(GetChangeTokenStatusError::WAFNonexistentItem(
err.msg,
))
}
"ValidationException" => return RusotoError::Validation(err.msg),
_ => {}
}
}
return RusotoError::Unknown(res);
}
}
impl fmt::Display for GetChangeTokenStatusError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for GetChangeTokenStatusError {
fn description(&self) -> &str {
match *self {
GetChangeTokenStatusError::WAFInternalError(ref cause) => cause,
GetChangeTokenStatusError::WAFNonexistentItem(ref cause) => cause,
}
}
}
/// Errors returned by GetGeoMatchSet
#[derive(Debug, PartialEq)]
pub enum GetGeoMatchSetError {
/// <p>The operation failed because of a system problem, even though the request was valid. Retry your request.</p>
WAFInternalError(String),
/// <p>The operation failed because you tried to create, update, or delete an object by using an invalid account identifier.</p>
WAFInvalidAccount(String),
/// <p>The operation failed because the referenced object doesn't exist.</p>
WAFNonexistentItem(String),
}
impl GetGeoMatchSetError {
pub fn from_response(res: BufferedHttpResponse) -> RusotoError<GetGeoMatchSetError> {
if let Some(err) = proto::json::Error::parse(&res) {
match err.typ.as_str() {
"WAFInternalErrorException" => {
return RusotoError::Service(GetGeoMatchSetError::WAFInternalError(err.msg))
}
"WAFInvalidAccountException" => {
return RusotoError::Service(GetGeoMatchSetError::WAFInvalidAccount(err.msg))
}
"WAFNonexistentItemException" => {
return RusotoError::Service(GetGeoMatchSetError::WAFNonexistentItem(err.msg))
}
"ValidationException" => return RusotoError::Validation(err.msg),
_ => {}
}
}
return RusotoError::Unknown(res);
}
}
impl fmt::Display for GetGeoMatchSetError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for GetGeoMatchSetError {
fn description(&self) -> &str {
match *self {
GetGeoMatchSetError::WAFInternalError(ref cause) => cause,
GetGeoMatchSetError::WAFInvalidAccount(ref cause) => cause,
GetGeoMatchSetError::WAFNonexistentItem(ref cause) => cause,
}
}
}
/// Errors returned by GetIPSet
#[derive(Debug, PartialEq)]
pub enum GetIPSetError {
/// <p>The operation failed because of a system problem, even though the request was valid. Retry your request.</p>
WAFInternalError(String),
/// <p>The operation failed because you tried to create, update, or delete an object by using an invalid account identifier.</p>
WAFInvalidAccount(String),
/// <p>The operation failed because the referenced object doesn't exist.</p>
WAFNonexistentItem(String),
}
impl GetIPSetError {
pub fn from_response(res: BufferedHttpResponse) -> RusotoError<GetIPSetError> {
if let Some(err) = proto::json::Error::parse(&res) {
match err.typ.as_str() {
"WAFInternalErrorException" => {
return RusotoError::Service(GetIPSetError::WAFInternalError(err.msg))
}
"WAFInvalidAccountException" => {
return RusotoError::Service(GetIPSetError::WAFInvalidAccount(err.msg))
}
"WAFNonexistentItemException" => {
return RusotoError::Service(GetIPSetError::WAFNonexistentItem(err.msg))
}
"ValidationException" => return RusotoError::Validation(err.msg),
_ => {}
}
}
return RusotoError::Unknown(res);
}
}
impl fmt::Display for GetIPSetError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for GetIPSetError {
fn description(&self) -> &str {
match *self {
GetIPSetError::WAFInternalError(ref cause) => cause,
GetIPSetError::WAFInvalidAccount(ref cause) => cause,
GetIPSetError::WAFNonexistentItem(ref cause) => cause,
}
}
}
/// Errors returned by GetLoggingConfiguration
#[derive(Debug, PartialEq)]
pub enum GetLoggingConfigurationError {
/// <p>The operation failed because of a system problem, even though the request was valid. Retry your request.</p>
WAFInternalError(String),
/// <p>The operation failed because the referenced object doesn't exist.</p>
WAFNonexistentItem(String),
}
impl GetLoggingConfigurationError {
pub fn from_response(res: BufferedHttpResponse) -> RusotoError<GetLoggingConfigurationError> {
if let Some(err) = proto::json::Error::parse(&res) {
match err.typ.as_str() {
"WAFInternalErrorException" => {
return RusotoError::Service(GetLoggingConfigurationError::WAFInternalError(
err.msg,
))
}
"WAFNonexistentItemException" => {
return RusotoError::Service(GetLoggingConfigurationError::WAFNonexistentItem(
err.msg,
))
}
"ValidationException" => return RusotoError::Validation(err.msg),
_ => {}
}
}
return RusotoError::Unknown(res);
}
}
impl fmt::Display for GetLoggingConfigurationError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for GetLoggingConfigurationError {
fn description(&self) -> &str {
match *self {
GetLoggingConfigurationError::WAFInternalError(ref cause) => cause,
GetLoggingConfigurationError::WAFNonexistentItem(ref cause) => cause,
}
}
}
/// Errors returned by GetPermissionPolicy
#[derive(Debug, PartialEq)]
pub enum GetPermissionPolicyError {
/// <p>The operation failed because of a system problem, even though the request was valid. Retry your request.</p>
WAFInternalError(String),
/// <p>The operation failed because the referenced object doesn't exist.</p>
WAFNonexistentItem(String),
}
impl GetPermissionPolicyError {
pub fn from_response(res: BufferedHttpResponse) -> RusotoError<GetPermissionPolicyError> {
if let Some(err) = proto::json::Error::parse(&res) {
match err.typ.as_str() {
"WAFInternalErrorException" => {
return RusotoError::Service(GetPermissionPolicyError::WAFInternalError(
err.msg,
))
}
"WAFNonexistentItemException" => {
return RusotoError::Service(GetPermissionPolicyError::WAFNonexistentItem(
err.msg,
))
}
"ValidationException" => return RusotoError::Validation(err.msg),
_ => {}
}
}
return RusotoError::Unknown(res);
}
}
impl fmt::Display for GetPermissionPolicyError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for GetPermissionPolicyError {
fn description(&self) -> &str {
match *self {
GetPermissionPolicyError::WAFInternalError(ref cause) => cause,
GetPermissionPolicyError::WAFNonexistentItem(ref cause) => cause,
}
}
}
/// Errors returned by GetRateBasedRule
#[derive(Debug, PartialEq)]
pub enum GetRateBasedRuleError {
/// <p>The operation failed because of a system problem, even though the request was valid. Retry your request.</p>
WAFInternalError(String),
/// <p>The operation failed because you tried to create, update, or delete an object by using an invalid account identifier.</p>
WAFInvalidAccount(String),
/// <p>The operation failed because the referenced object doesn't exist.</p>
WAFNonexistentItem(String),
}
impl GetRateBasedRuleError {
pub fn from_response(res: BufferedHttpResponse) -> RusotoError<GetRateBasedRuleError> {
if let Some(err) = proto::json::Error::parse(&res) {
match err.typ.as_str() {
"WAFInternalErrorException" => {
return RusotoError::Service(GetRateBasedRuleError::WAFInternalError(err.msg))
}
"WAFInvalidAccountException" => {
return RusotoError::Service(GetRateBasedRuleError::WAFInvalidAccount(err.msg))
}
"WAFNonexistentItemException" => {
return RusotoError::Service(GetRateBasedRuleError::WAFNonexistentItem(err.msg))
}
"ValidationException" => return RusotoError::Validation(err.msg),
_ => {}
}
}
return RusotoError::Unknown(res);
}
}
impl fmt::Display for GetRateBasedRuleError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for GetRateBasedRuleError {
fn description(&self) -> &str {
match *self {
GetRateBasedRuleError::WAFInternalError(ref cause) => cause,
GetRateBasedRuleError::WAFInvalidAccount(ref cause) => cause,
GetRateBasedRuleError::WAFNonexistentItem(ref cause) => cause,
}
}
}
/// Errors returned by GetRateBasedRuleManagedKeys
#[derive(Debug, PartialEq)]
pub enum GetRateBasedRuleManagedKeysError {
/// <p>The operation failed because of a system problem, even though the request was valid. Retry your request.</p>
WAFInternalError(String),
/// <p>The operation failed because you tried to create, update, or delete an object by using an invalid account identifier.</p>
WAFInvalidAccount(String),
/// <p><p>The operation failed because AWS WAF didn't recognize a parameter in the request. For example:</p> <ul> <li> <p>You specified an invalid parameter name.</p> </li> <li> <p>You specified an invalid value.</p> </li> <li> <p>You tried to update an object (<code>ByteMatchSet</code>, <code>IPSet</code>, <code>Rule</code>, or <code>WebACL</code>) using an action other than <code>INSERT</code> or <code>DELETE</code>.</p> </li> <li> <p>You tried to create a <code>WebACL</code> with a <code>DefaultAction</code> <code>Type</code> other than <code>ALLOW</code>, <code>BLOCK</code>, or <code>COUNT</code>.</p> </li> <li> <p>You tried to create a <code>RateBasedRule</code> with a <code>RateKey</code> value other than <code>IP</code>.</p> </li> <li> <p>You tried to update a <code>WebACL</code> with a <code>WafAction</code> <code>Type</code> other than <code>ALLOW</code>, <code>BLOCK</code>, or <code>COUNT</code>.</p> </li> <li> <p>You tried to update a <code>ByteMatchSet</code> with a <code>FieldToMatch</code> <code>Type</code> other than HEADER, METHOD, QUERY_STRING, URI, or BODY.</p> </li> <li> <p>You tried to update a <code>ByteMatchSet</code> with a <code>Field</code> of <code>HEADER</code> but no value for <code>Data</code>.</p> </li> <li> <p>Your request references an ARN that is malformed, or corresponds to a resource with which a web ACL cannot be associated.</p> </li> </ul></p>
WAFInvalidParameter(String),
/// <p>The operation failed because the referenced object doesn't exist.</p>
WAFNonexistentItem(String),
}
impl GetRateBasedRuleManagedKeysError {
pub fn from_response(
res: BufferedHttpResponse,
) -> RusotoError<GetRateBasedRuleManagedKeysError> {
if let Some(err) = proto::json::Error::parse(&res) {
match err.typ.as_str() {
"WAFInternalErrorException" => {
return RusotoError::Service(
GetRateBasedRuleManagedKeysError::WAFInternalError(err.msg),
)
}
"WAFInvalidAccountException" => {
return RusotoError::Service(
GetRateBasedRuleManagedKeysError::WAFInvalidAccount(err.msg),
)
}
"WAFInvalidParameterException" => {
return RusotoError::Service(
GetRateBasedRuleManagedKeysError::WAFInvalidParameter(err.msg),
)
}
"WAFNonexistentItemException" => {
return RusotoError::Service(
GetRateBasedRuleManagedKeysError::WAFNonexistentItem(err.msg),
)
}
"ValidationException" => return RusotoError::Validation(err.msg),
_ => {}
}
}
return RusotoError::Unknown(res);
}
}
impl fmt::Display for GetRateBasedRuleManagedKeysError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for GetRateBasedRuleManagedKeysError {
fn description(&self) -> &str {
match *self {
GetRateBasedRuleManagedKeysError::WAFInternalError(ref cause) => cause,
GetRateBasedRuleManagedKeysError::WAFInvalidAccount(ref cause) => cause,
GetRateBasedRuleManagedKeysError::WAFInvalidParameter(ref cause) => cause,
GetRateBasedRuleManagedKeysError::WAFNonexistentItem(ref cause) => cause,
}
}
}
/// Errors returned by GetRegexMatchSet
#[derive(Debug, PartialEq)]
pub enum GetRegexMatchSetError {
/// <p>The operation failed because of a system problem, even though the request was valid. Retry your request.</p>
WAFInternalError(String),
/// <p>The operation failed because you tried to create, update, or delete an object by using an invalid account identifier.</p>
WAFInvalidAccount(String),
/// <p>The operation failed because the referenced object doesn't exist.</p>
WAFNonexistentItem(String),
}
impl GetRegexMatchSetError {
pub fn from_response(res: BufferedHttpResponse) -> RusotoError<GetRegexMatchSetError> {
if let Some(err) = proto::json::Error::parse(&res) {
match err.typ.as_str() {
"WAFInternalErrorException" => {
return RusotoError::Service(GetRegexMatchSetError::WAFInternalError(err.msg))
}
"WAFInvalidAccountException" => {
return RusotoError::Service(GetRegexMatchSetError::WAFInvalidAccount(err.msg))
}
"WAFNonexistentItemException" => {
return RusotoError::Service(GetRegexMatchSetError::WAFNonexistentItem(err.msg))
}
"ValidationException" => return RusotoError::Validation(err.msg),
_ => {}
}
}
return RusotoError::Unknown(res);
}
}
impl fmt::Display for GetRegexMatchSetError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for GetRegexMatchSetError {
fn description(&self) -> &str {
match *self {
GetRegexMatchSetError::WAFInternalError(ref cause) => cause,
GetRegexMatchSetError::WAFInvalidAccount(ref cause) => cause,
GetRegexMatchSetError::WAFNonexistentItem(ref cause) => cause,
}
}
}
/// Errors returned by GetRegexPatternSet
#[derive(Debug, PartialEq)]
pub enum GetRegexPatternSetError {
/// <p>The operation failed because of a system problem, even though the request was valid. Retry your request.</p>
WAFInternalError(String),
/// <p>The operation failed because you tried to create, update, or delete an object by using an invalid account identifier.</p>
WAFInvalidAccount(String),
/// <p>The operation failed because the referenced object doesn't exist.</p>
WAFNonexistentItem(String),
}
impl GetRegexPatternSetError {
pub fn from_response(res: BufferedHttpResponse) -> RusotoError<GetRegexPatternSetError> {
if let Some(err) = proto::json::Error::parse(&res) {
match err.typ.as_str() {
"WAFInternalErrorException" => {
return RusotoError::Service(GetRegexPatternSetError::WAFInternalError(err.msg))
}
"WAFInvalidAccountException" => {
return RusotoError::Service(GetRegexPatternSetError::WAFInvalidAccount(
err.msg,
))
}
"WAFNonexistentItemException" => {
return RusotoError::Service(GetRegexPatternSetError::WAFNonexistentItem(
err.msg,
))
}
"ValidationException" => return RusotoError::Validation(err.msg),
_ => {}
}
}
return RusotoError::Unknown(res);
}
}
impl fmt::Display for GetRegexPatternSetError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for GetRegexPatternSetError {
fn description(&self) -> &str {
match *self {
GetRegexPatternSetError::WAFInternalError(ref cause) => cause,
GetRegexPatternSetError::WAFInvalidAccount(ref cause) => cause,
GetRegexPatternSetError::WAFNonexistentItem(ref cause) => cause,
}
}
}
/// Errors returned by GetRule
#[derive(Debug, PartialEq)]
pub enum GetRuleError {
/// <p>The operation failed because of a system problem, even though the request was valid. Retry your request.</p>
WAFInternalError(String),
/// <p>The operation failed because you tried to create, update, or delete an object by using an invalid account identifier.</p>
WAFInvalidAccount(String),
/// <p>The operation failed because the referenced object doesn't exist.</p>
WAFNonexistentItem(String),
}
impl GetRuleError {
pub fn from_response(res: BufferedHttpResponse) -> RusotoError<GetRuleError> {
if let Some(err) = proto::json::Error::parse(&res) {
match err.typ.as_str() {
"WAFInternalErrorException" => {
return RusotoError::Service(GetRuleError::WAFInternalError(err.msg))
}
"WAFInvalidAccountException" => {
return RusotoError::Service(GetRuleError::WAFInvalidAccount(err.msg))
}
"WAFNonexistentItemException" => {
return RusotoError::Service(GetRuleError::WAFNonexistentItem(err.msg))
}
"ValidationException" => return RusotoError::Validation(err.msg),
_ => {}
}
}
return RusotoError::Unknown(res);
}
}
impl fmt::Display for GetRuleError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for GetRuleError {
fn description(&self) -> &str {
match *self {
GetRuleError::WAFInternalError(ref cause) => cause,
GetRuleError::WAFInvalidAccount(ref cause) => cause,
GetRuleError::WAFNonexistentItem(ref cause) => cause,
}
}
}
/// Errors returned by GetRuleGroup
#[derive(Debug, PartialEq)]
pub enum GetRuleGroupError {
/// <p>The operation failed because of a system problem, even though the request was valid. Retry your request.</p>
WAFInternalError(String),
/// <p>The operation failed because the referenced object doesn't exist.</p>
WAFNonexistentItem(String),
}
impl GetRuleGroupError {
pub fn from_response(res: BufferedHttpResponse) -> RusotoError<GetRuleGroupError> {
if let Some(err) = proto::json::Error::parse(&res) {
match err.typ.as_str() {
"WAFInternalErrorException" => {
return RusotoError::Service(GetRuleGroupError::WAFInternalError(err.msg))
}
"WAFNonexistentItemException" => {
return RusotoError::Service(GetRuleGroupError::WAFNonexistentItem(err.msg))
}
"ValidationException" => return RusotoError::Validation(err.msg),
_ => {}
}
}
return RusotoError::Unknown(res);
}
}
impl fmt::Display for GetRuleGroupError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for GetRuleGroupError {
fn description(&self) -> &str {
match *self {
GetRuleGroupError::WAFInternalError(ref cause) => cause,
GetRuleGroupError::WAFNonexistentItem(ref cause) => cause,
}
}
}
/// Errors returned by GetSampledRequests
#[derive(Debug, PartialEq)]
pub enum GetSampledRequestsError {
/// <p>The operation failed because of a system problem, even though the request was valid. Retry your request.</p>
WAFInternalError(String),
/// <p>The operation failed because the referenced object doesn't exist.</p>
WAFNonexistentItem(String),
}
impl GetSampledRequestsError {
pub fn from_response(res: BufferedHttpResponse) -> RusotoError<GetSampledRequestsError> {
if let Some(err) = proto::json::Error::parse(&res) {
match err.typ.as_str() {
"WAFInternalErrorException" => {
return RusotoError::Service(GetSampledRequestsError::WAFInternalError(err.msg))
}
"WAFNonexistentItemException" => {
return RusotoError::Service(GetSampledRequestsError::WAFNonexistentItem(
err.msg,
))
}
"ValidationException" => return RusotoError::Validation(err.msg),
_ => {}
}
}
return RusotoError::Unknown(res);
}
}
impl fmt::Display for GetSampledRequestsError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for GetSampledRequestsError {
fn description(&self) -> &str {
match *self {
GetSampledRequestsError::WAFInternalError(ref cause) => cause,
GetSampledRequestsError::WAFNonexistentItem(ref cause) => cause,
}
}
}
/// Errors returned by GetSizeConstraintSet
#[derive(Debug, PartialEq)]
pub enum GetSizeConstraintSetError {
/// <p>The operation failed because of a system problem, even though the request was valid. Retry your request.</p>
WAFInternalError(String),
/// <p>The operation failed because you tried to create, update, or delete an object by using an invalid account identifier.</p>
WAFInvalidAccount(String),
/// <p>The operation failed because the referenced object doesn't exist.</p>
WAFNonexistentItem(String),
}
impl GetSizeConstraintSetError {
pub fn from_response(res: BufferedHttpResponse) -> RusotoError<GetSizeConstraintSetError> {
if let Some(err) = proto::json::Error::parse(&res) {
match err.typ.as_str() {
"WAFInternalErrorException" => {
return RusotoError::Service(GetSizeConstraintSetError::WAFInternalError(
err.msg,
))
}
"WAFInvalidAccountException" => {
return RusotoError::Service(GetSizeConstraintSetError::WAFInvalidAccount(
err.msg,
))
}
"WAFNonexistentItemException" => {
return RusotoError::Service(GetSizeConstraintSetError::WAFNonexistentItem(
err.msg,
))
}
"ValidationException" => return RusotoError::Validation(err.msg),
_ => {}
}
}
return RusotoError::Unknown(res);
}
}
impl fmt::Display for GetSizeConstraintSetError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for GetSizeConstraintSetError {
fn description(&self) -> &str {
match *self {
GetSizeConstraintSetError::WAFInternalError(ref cause) => cause,
GetSizeConstraintSetError::WAFInvalidAccount(ref cause) => cause,
GetSizeConstraintSetError::WAFNonexistentItem(ref cause) => cause,
}
}
}
/// Errors returned by GetSqlInjectionMatchSet
#[derive(Debug, PartialEq)]
pub enum GetSqlInjectionMatchSetError {
/// <p>The operation failed because of a system problem, even though the request was valid. Retry your request.</p>
WAFInternalError(String),
/// <p>The operation failed because you tried to create, update, or delete an object by using an invalid account identifier.</p>
WAFInvalidAccount(String),
/// <p>The operation failed because the referenced object doesn't exist.</p>
WAFNonexistentItem(String),
}
impl GetSqlInjectionMatchSetError {
pub fn from_response(res: BufferedHttpResponse) -> RusotoError<GetSqlInjectionMatchSetError> {
if let Some(err) = proto::json::Error::parse(&res) {
match err.typ.as_str() {
"WAFInternalErrorException" => {
return RusotoError::Service(GetSqlInjectionMatchSetError::WAFInternalError(
err.msg,
))
}
"WAFInvalidAccountException" => {
return RusotoError::Service(GetSqlInjectionMatchSetError::WAFInvalidAccount(
err.msg,
))
}
"WAFNonexistentItemException" => {
return RusotoError::Service(GetSqlInjectionMatchSetError::WAFNonexistentItem(
err.msg,
))
}
"ValidationException" => return RusotoError::Validation(err.msg),
_ => {}
}
}
return RusotoError::Unknown(res);
}
}
impl fmt::Display for GetSqlInjectionMatchSetError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for GetSqlInjectionMatchSetError {
fn description(&self) -> &str {
match *self {
GetSqlInjectionMatchSetError::WAFInternalError(ref cause) => cause,
GetSqlInjectionMatchSetError::WAFInvalidAccount(ref cause) => cause,
GetSqlInjectionMatchSetError::WAFNonexistentItem(ref cause) => cause,
}
}
}
/// Errors returned by GetWebACL
#[derive(Debug, PartialEq)]
pub enum GetWebACLError {
/// <p>The operation failed because of a system problem, even though the request was valid. Retry your request.</p>
WAFInternalError(String),
/// <p>The operation failed because you tried to create, update, or delete an object by using an invalid account identifier.</p>
WAFInvalidAccount(String),
/// <p>The operation failed because the referenced object doesn't exist.</p>
WAFNonexistentItem(String),
}
impl GetWebACLError {
pub fn from_response(res: BufferedHttpResponse) -> RusotoError<GetWebACLError> {
if let Some(err) = proto::json::Error::parse(&res) {
match err.typ.as_str() {
"WAFInternalErrorException" => {
return RusotoError::Service(GetWebACLError::WAFInternalError(err.msg))
}
"WAFInvalidAccountException" => {
return RusotoError::Service(GetWebACLError::WAFInvalidAccount(err.msg))
}
"WAFNonexistentItemException" => {
return RusotoError::Service(GetWebACLError::WAFNonexistentItem(err.msg))
}
"ValidationException" => return RusotoError::Validation(err.msg),
_ => {}
}
}
return RusotoError::Unknown(res);
}
}
impl fmt::Display for GetWebACLError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for GetWebACLError {
fn description(&self) -> &str {
match *self {
GetWebACLError::WAFInternalError(ref cause) => cause,
GetWebACLError::WAFInvalidAccount(ref cause) => cause,
GetWebACLError::WAFNonexistentItem(ref cause) => cause,
}
}
}
/// Errors returned by GetXssMatchSet
#[derive(Debug, PartialEq)]
pub enum GetXssMatchSetError {
/// <p>The operation failed because of a system problem, even though the request was valid. Retry your request.</p>
WAFInternalError(String),
/// <p>The operation failed because you tried to create, update, or delete an object by using an invalid account identifier.</p>
WAFInvalidAccount(String),
/// <p>The operation failed because the referenced object doesn't exist.</p>
WAFNonexistentItem(String),
}
impl GetXssMatchSetError {
pub fn from_response(res: BufferedHttpResponse) -> RusotoError<GetXssMatchSetError> {
if let Some(err) = proto::json::Error::parse(&res) {
match err.typ.as_str() {
"WAFInternalErrorException" => {
return RusotoError::Service(GetXssMatchSetError::WAFInternalError(err.msg))
}
"WAFInvalidAccountException" => {
return RusotoError::Service(GetXssMatchSetError::WAFInvalidAccount(err.msg))
}
"WAFNonexistentItemException" => {
return RusotoError::Service(GetXssMatchSetError::WAFNonexistentItem(err.msg))
}
"ValidationException" => return RusotoError::Validation(err.msg),
_ => {}
}
}
return RusotoError::Unknown(res);
}
}
impl fmt::Display for GetXssMatchSetError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for GetXssMatchSetError {
fn description(&self) -> &str {
match *self {
GetXssMatchSetError::WAFInternalError(ref cause) => cause,
GetXssMatchSetError::WAFInvalidAccount(ref cause) => cause,
GetXssMatchSetError::WAFNonexistentItem(ref cause) => cause,
}
}
}
/// Errors returned by ListActivatedRulesInRuleGroup
#[derive(Debug, PartialEq)]
pub enum ListActivatedRulesInRuleGroupError {
/// <p>The operation failed because of a system problem, even though the request was valid. Retry your request.</p>
WAFInternalError(String),
/// <p><p>The operation failed because AWS WAF didn't recognize a parameter in the request. For example:</p> <ul> <li> <p>You specified an invalid parameter name.</p> </li> <li> <p>You specified an invalid value.</p> </li> <li> <p>You tried to update an object (<code>ByteMatchSet</code>, <code>IPSet</code>, <code>Rule</code>, or <code>WebACL</code>) using an action other than <code>INSERT</code> or <code>DELETE</code>.</p> </li> <li> <p>You tried to create a <code>WebACL</code> with a <code>DefaultAction</code> <code>Type</code> other than <code>ALLOW</code>, <code>BLOCK</code>, or <code>COUNT</code>.</p> </li> <li> <p>You tried to create a <code>RateBasedRule</code> with a <code>RateKey</code> value other than <code>IP</code>.</p> </li> <li> <p>You tried to update a <code>WebACL</code> with a <code>WafAction</code> <code>Type</code> other than <code>ALLOW</code>, <code>BLOCK</code>, or <code>COUNT</code>.</p> </li> <li> <p>You tried to update a <code>ByteMatchSet</code> with a <code>FieldToMatch</code> <code>Type</code> other than HEADER, METHOD, QUERY_STRING, URI, or BODY.</p> </li> <li> <p>You tried to update a <code>ByteMatchSet</code> with a <code>Field</code> of <code>HEADER</code> but no value for <code>Data</code>.</p> </li> <li> <p>Your request references an ARN that is malformed, or corresponds to a resource with which a web ACL cannot be associated.</p> </li> </ul></p>
WAFInvalidParameter(String),
/// <p>The operation failed because the referenced object doesn't exist.</p>
WAFNonexistentItem(String),
}
impl ListActivatedRulesInRuleGroupError {
pub fn from_response(
res: BufferedHttpResponse,
) -> RusotoError<ListActivatedRulesInRuleGroupError> {
if let Some(err) = proto::json::Error::parse(&res) {
match err.typ.as_str() {
"WAFInternalErrorException" => {
return RusotoError::Service(
ListActivatedRulesInRuleGroupError::WAFInternalError(err.msg),
)
}
"WAFInvalidParameterException" => {
return RusotoError::Service(
ListActivatedRulesInRuleGroupError::WAFInvalidParameter(err.msg),
)
}
"WAFNonexistentItemException" => {
return RusotoError::Service(
ListActivatedRulesInRuleGroupError::WAFNonexistentItem(err.msg),
)
}
"ValidationException" => return RusotoError::Validation(err.msg),
_ => {}
}
}
return RusotoError::Unknown(res);
}
}
impl fmt::Display for ListActivatedRulesInRuleGroupError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for ListActivatedRulesInRuleGroupError {
fn description(&self) -> &str {
match *self {
ListActivatedRulesInRuleGroupError::WAFInternalError(ref cause) => cause,
ListActivatedRulesInRuleGroupError::WAFInvalidParameter(ref cause) => cause,
ListActivatedRulesInRuleGroupError::WAFNonexistentItem(ref cause) => cause,
}
}
}
/// Errors returned by ListByteMatchSets
#[derive(Debug, PartialEq)]
pub enum ListByteMatchSetsError {
/// <p>The operation failed because of a system problem, even though the request was valid. Retry your request.</p>
WAFInternalError(String),
/// <p>The operation failed because you tried to create, update, or delete an object by using an invalid account identifier.</p>
WAFInvalidAccount(String),
}
impl ListByteMatchSetsError {
pub fn from_response(res: BufferedHttpResponse) -> RusotoError<ListByteMatchSetsError> {
if let Some(err) = proto::json::Error::parse(&res) {
match err.typ.as_str() {
"WAFInternalErrorException" => {
return RusotoError::Service(ListByteMatchSetsError::WAFInternalError(err.msg))
}
"WAFInvalidAccountException" => {
return RusotoError::Service(ListByteMatchSetsError::WAFInvalidAccount(err.msg))
}
"ValidationException" => return RusotoError::Validation(err.msg),
_ => {}
}
}
return RusotoError::Unknown(res);
}
}
impl fmt::Display for ListByteMatchSetsError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for ListByteMatchSetsError {
fn description(&self) -> &str {
match *self {
ListByteMatchSetsError::WAFInternalError(ref cause) => cause,
ListByteMatchSetsError::WAFInvalidAccount(ref cause) => cause,
}
}
}
/// Errors returned by ListGeoMatchSets
#[derive(Debug, PartialEq)]
pub enum ListGeoMatchSetsError {
/// <p>The operation failed because of a system problem, even though the request was valid. Retry your request.</p>
WAFInternalError(String),
/// <p>The operation failed because you tried to create, update, or delete an object by using an invalid account identifier.</p>
WAFInvalidAccount(String),
}
impl ListGeoMatchSetsError {
pub fn from_response(res: BufferedHttpResponse) -> RusotoError<ListGeoMatchSetsError> {
if let Some(err) = proto::json::Error::parse(&res) {
match err.typ.as_str() {
"WAFInternalErrorException" => {
return RusotoError::Service(ListGeoMatchSetsError::WAFInternalError(err.msg))
}
"WAFInvalidAccountException" => {
return RusotoError::Service(ListGeoMatchSetsError::WAFInvalidAccount(err.msg))
}
"ValidationException" => return RusotoError::Validation(err.msg),
_ => {}
}
}
return RusotoError::Unknown(res);
}
}
impl fmt::Display for ListGeoMatchSetsError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for ListGeoMatchSetsError {
fn description(&self) -> &str {
match *self {
ListGeoMatchSetsError::WAFInternalError(ref cause) => cause,
ListGeoMatchSetsError::WAFInvalidAccount(ref cause) => cause,
}
}
}
/// Errors returned by ListIPSets
#[derive(Debug, PartialEq)]
pub enum ListIPSetsError {
/// <p>The operation failed because of a system problem, even though the request was valid. Retry your request.</p>
WAFInternalError(String),
/// <p>The operation failed because you tried to create, update, or delete an object by using an invalid account identifier.</p>
WAFInvalidAccount(String),
}
impl ListIPSetsError {
pub fn from_response(res: BufferedHttpResponse) -> RusotoError<ListIPSetsError> {
if let Some(err) = proto::json::Error::parse(&res) {
match err.typ.as_str() {
"WAFInternalErrorException" => {
return RusotoError::Service(ListIPSetsError::WAFInternalError(err.msg))
}
"WAFInvalidAccountException" => {
return RusotoError::Service(ListIPSetsError::WAFInvalidAccount(err.msg))
}
"ValidationException" => return RusotoError::Validation(err.msg),
_ => {}
}
}
return RusotoError::Unknown(res);
}
}
impl fmt::Display for ListIPSetsError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for ListIPSetsError {
fn description(&self) -> &str {
match *self {
ListIPSetsError::WAFInternalError(ref cause) => cause,
ListIPSetsError::WAFInvalidAccount(ref cause) => cause,
}
}
}
/// Errors returned by ListLoggingConfigurations
#[derive(Debug, PartialEq)]
pub enum ListLoggingConfigurationsError {
/// <p>The operation failed because of a system problem, even though the request was valid. Retry your request.</p>
WAFInternalError(String),
/// <p><p>The operation failed because AWS WAF didn't recognize a parameter in the request. For example:</p> <ul> <li> <p>You specified an invalid parameter name.</p> </li> <li> <p>You specified an invalid value.</p> </li> <li> <p>You tried to update an object (<code>ByteMatchSet</code>, <code>IPSet</code>, <code>Rule</code>, or <code>WebACL</code>) using an action other than <code>INSERT</code> or <code>DELETE</code>.</p> </li> <li> <p>You tried to create a <code>WebACL</code> with a <code>DefaultAction</code> <code>Type</code> other than <code>ALLOW</code>, <code>BLOCK</code>, or <code>COUNT</code>.</p> </li> <li> <p>You tried to create a <code>RateBasedRule</code> with a <code>RateKey</code> value other than <code>IP</code>.</p> </li> <li> <p>You tried to update a <code>WebACL</code> with a <code>WafAction</code> <code>Type</code> other than <code>ALLOW</code>, <code>BLOCK</code>, or <code>COUNT</code>.</p> </li> <li> <p>You tried to update a <code>ByteMatchSet</code> with a <code>FieldToMatch</code> <code>Type</code> other than HEADER, METHOD, QUERY_STRING, URI, or BODY.</p> </li> <li> <p>You tried to update a <code>ByteMatchSet</code> with a <code>Field</code> of <code>HEADER</code> but no value for <code>Data</code>.</p> </li> <li> <p>Your request references an ARN that is malformed, or corresponds to a resource with which a web ACL cannot be associated.</p> </li> </ul></p>
WAFInvalidParameter(String),
/// <p>The operation failed because the referenced object doesn't exist.</p>
WAFNonexistentItem(String),
}
impl ListLoggingConfigurationsError {
pub fn from_response(res: BufferedHttpResponse) -> RusotoError<ListLoggingConfigurationsError> {
if let Some(err) = proto::json::Error::parse(&res) {
match err.typ.as_str() {
"WAFInternalErrorException" => {
return RusotoError::Service(ListLoggingConfigurationsError::WAFInternalError(
err.msg,
))
}
"WAFInvalidParameterException" => {
return RusotoError::Service(
ListLoggingConfigurationsError::WAFInvalidParameter(err.msg),
)
}
"WAFNonexistentItemException" => {
return RusotoError::Service(
ListLoggingConfigurationsError::WAFNonexistentItem(err.msg),
)
}
"ValidationException" => return RusotoError::Validation(err.msg),
_ => {}
}
}
return RusotoError::Unknown(res);
}
}
impl fmt::Display for ListLoggingConfigurationsError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for ListLoggingConfigurationsError {
fn description(&self) -> &str {
match *self {
ListLoggingConfigurationsError::WAFInternalError(ref cause) => cause,
ListLoggingConfigurationsError::WAFInvalidParameter(ref cause) => cause,
ListLoggingConfigurationsError::WAFNonexistentItem(ref cause) => cause,
}
}
}
/// Errors returned by ListRateBasedRules
#[derive(Debug, PartialEq)]
pub enum ListRateBasedRulesError {
/// <p>The operation failed because of a system problem, even though the request was valid. Retry your request.</p>
WAFInternalError(String),
/// <p>The operation failed because you tried to create, update, or delete an object by using an invalid account identifier.</p>
WAFInvalidAccount(String),
}
impl ListRateBasedRulesError {
pub fn from_response(res: BufferedHttpResponse) -> RusotoError<ListRateBasedRulesError> {
if let Some(err) = proto::json::Error::parse(&res) {
match err.typ.as_str() {
"WAFInternalErrorException" => {
return RusotoError::Service(ListRateBasedRulesError::WAFInternalError(err.msg))
}
"WAFInvalidAccountException" => {
return RusotoError::Service(ListRateBasedRulesError::WAFInvalidAccount(
err.msg,
))
}
"ValidationException" => return RusotoError::Validation(err.msg),
_ => {}
}
}
return RusotoError::Unknown(res);
}
}
impl fmt::Display for ListRateBasedRulesError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for ListRateBasedRulesError {
fn description(&self) -> &str {
match *self {
ListRateBasedRulesError::WAFInternalError(ref cause) => cause,
ListRateBasedRulesError::WAFInvalidAccount(ref cause) => cause,
}
}
}
/// Errors returned by ListRegexMatchSets
#[derive(Debug, PartialEq)]
pub enum ListRegexMatchSetsError {
/// <p>The operation failed because of a system problem, even though the request was valid. Retry your request.</p>
WAFInternalError(String),
/// <p>The operation failed because you tried to create, update, or delete an object by using an invalid account identifier.</p>
WAFInvalidAccount(String),
}
impl ListRegexMatchSetsError {
pub fn from_response(res: BufferedHttpResponse) -> RusotoError<ListRegexMatchSetsError> {
if let Some(err) = proto::json::Error::parse(&res) {
match err.typ.as_str() {
"WAFInternalErrorException" => {
return RusotoError::Service(ListRegexMatchSetsError::WAFInternalError(err.msg))
}
"WAFInvalidAccountException" => {
return RusotoError::Service(ListRegexMatchSetsError::WAFInvalidAccount(
err.msg,
))
}
"ValidationException" => return RusotoError::Validation(err.msg),
_ => {}
}
}
return RusotoError::Unknown(res);
}
}
impl fmt::Display for ListRegexMatchSetsError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for ListRegexMatchSetsError {
fn description(&self) -> &str {
match *self {
ListRegexMatchSetsError::WAFInternalError(ref cause) => cause,
ListRegexMatchSetsError::WAFInvalidAccount(ref cause) => cause,
}
}
}
/// Errors returned by ListRegexPatternSets
#[derive(Debug, PartialEq)]
pub enum ListRegexPatternSetsError {
/// <p>The operation failed because of a system problem, even though the request was valid. Retry your request.</p>
WAFInternalError(String),
/// <p>The operation failed because you tried to create, update, or delete an object by using an invalid account identifier.</p>
WAFInvalidAccount(String),
}
impl ListRegexPatternSetsError {
pub fn from_response(res: BufferedHttpResponse) -> RusotoError<ListRegexPatternSetsError> {
if let Some(err) = proto::json::Error::parse(&res) {
match err.typ.as_str() {
"WAFInternalErrorException" => {
return RusotoError::Service(ListRegexPatternSetsError::WAFInternalError(
err.msg,
))
}
"WAFInvalidAccountException" => {
return RusotoError::Service(ListRegexPatternSetsError::WAFInvalidAccount(
err.msg,
))
}
"ValidationException" => return RusotoError::Validation(err.msg),
_ => {}
}
}
return RusotoError::Unknown(res);
}
}
impl fmt::Display for ListRegexPatternSetsError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for ListRegexPatternSetsError {
fn description(&self) -> &str {
match *self {
ListRegexPatternSetsError::WAFInternalError(ref cause) => cause,
ListRegexPatternSetsError::WAFInvalidAccount(ref cause) => cause,
}
}
}
/// Errors returned by ListRuleGroups
#[derive(Debug, PartialEq)]
pub enum ListRuleGroupsError {
/// <p>The operation failed because of a system problem, even though the request was valid. Retry your request.</p>
WAFInternalError(String),
}
impl ListRuleGroupsError {
pub fn from_response(res: BufferedHttpResponse) -> RusotoError<ListRuleGroupsError> {
if let Some(err) = proto::json::Error::parse(&res) {
match err.typ.as_str() {
"WAFInternalErrorException" => {
return RusotoError::Service(ListRuleGroupsError::WAFInternalError(err.msg))
}
"ValidationException" => return RusotoError::Validation(err.msg),
_ => {}
}
}
return RusotoError::Unknown(res);
}
}
impl fmt::Display for ListRuleGroupsError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for ListRuleGroupsError {
fn description(&self) -> &str {
match *self {
ListRuleGroupsError::WAFInternalError(ref cause) => cause,
}
}
}
/// Errors returned by ListRules
#[derive(Debug, PartialEq)]
pub enum ListRulesError {
/// <p>The operation failed because of a system problem, even though the request was valid. Retry your request.</p>
WAFInternalError(String),
/// <p>The operation failed because you tried to create, update, or delete an object by using an invalid account identifier.</p>
WAFInvalidAccount(String),
}
impl ListRulesError {
pub fn from_response(res: BufferedHttpResponse) -> RusotoError<ListRulesError> {
if let Some(err) = proto::json::Error::parse(&res) {
match err.typ.as_str() {
"WAFInternalErrorException" => {
return RusotoError::Service(ListRulesError::WAFInternalError(err.msg))
}
"WAFInvalidAccountException" => {
return RusotoError::Service(ListRulesError::WAFInvalidAccount(err.msg))
}
"ValidationException" => return RusotoError::Validation(err.msg),
_ => {}
}
}
return RusotoError::Unknown(res);
}
}
impl fmt::Display for ListRulesError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for ListRulesError {
fn description(&self) -> &str {
match *self {
ListRulesError::WAFInternalError(ref cause) => cause,
ListRulesError::WAFInvalidAccount(ref cause) => cause,
}
}
}
/// Errors returned by ListSizeConstraintSets
#[derive(Debug, PartialEq)]
pub enum ListSizeConstraintSetsError {
/// <p>The operation failed because of a system problem, even though the request was valid. Retry your request.</p>
WAFInternalError(String),
/// <p>The operation failed because you tried to create, update, or delete an object by using an invalid account identifier.</p>
WAFInvalidAccount(String),
}
impl ListSizeConstraintSetsError {
pub fn from_response(res: BufferedHttpResponse) -> RusotoError<ListSizeConstraintSetsError> {
if let Some(err) = proto::json::Error::parse(&res) {
match err.typ.as_str() {
"WAFInternalErrorException" => {
return RusotoError::Service(ListSizeConstraintSetsError::WAFInternalError(
err.msg,
))
}
"WAFInvalidAccountException" => {
return RusotoError::Service(ListSizeConstraintSetsError::WAFInvalidAccount(
err.msg,
))
}
"ValidationException" => return RusotoError::Validation(err.msg),
_ => {}
}
}
return RusotoError::Unknown(res);
}
}
impl fmt::Display for ListSizeConstraintSetsError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for ListSizeConstraintSetsError {
fn description(&self) -> &str {
match *self {
ListSizeConstraintSetsError::WAFInternalError(ref cause) => cause,
ListSizeConstraintSetsError::WAFInvalidAccount(ref cause) => cause,
}
}
}
/// Errors returned by ListSqlInjectionMatchSets
#[derive(Debug, PartialEq)]
pub enum ListSqlInjectionMatchSetsError {
/// <p>The operation failed because of a system problem, even though the request was valid. Retry your request.</p>
WAFInternalError(String),
/// <p>The operation failed because you tried to create, update, or delete an object by using an invalid account identifier.</p>
WAFInvalidAccount(String),
}
impl ListSqlInjectionMatchSetsError {
pub fn from_response(res: BufferedHttpResponse) -> RusotoError<ListSqlInjectionMatchSetsError> {
if let Some(err) = proto::json::Error::parse(&res) {
match err.typ.as_str() {
"WAFInternalErrorException" => {
return RusotoError::Service(ListSqlInjectionMatchSetsError::WAFInternalError(
err.msg,
))
}
"WAFInvalidAccountException" => {
return RusotoError::Service(ListSqlInjectionMatchSetsError::WAFInvalidAccount(
err.msg,
))
}
"ValidationException" => return RusotoError::Validation(err.msg),
_ => {}
}
}
return RusotoError::Unknown(res);
}
}
impl fmt::Display for ListSqlInjectionMatchSetsError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for ListSqlInjectionMatchSetsError {
fn description(&self) -> &str {
match *self {
ListSqlInjectionMatchSetsError::WAFInternalError(ref cause) => cause,
ListSqlInjectionMatchSetsError::WAFInvalidAccount(ref cause) => cause,
}
}
}
/// Errors returned by ListSubscribedRuleGroups
#[derive(Debug, PartialEq)]
pub enum ListSubscribedRuleGroupsError {
/// <p>The operation failed because of a system problem, even though the request was valid. Retry your request.</p>
WAFInternalError(String),
/// <p>The operation failed because the referenced object doesn't exist.</p>
WAFNonexistentItem(String),
}
impl ListSubscribedRuleGroupsError {
pub fn from_response(res: BufferedHttpResponse) -> RusotoError<ListSubscribedRuleGroupsError> {
if let Some(err) = proto::json::Error::parse(&res) {
match err.typ.as_str() {
"WAFInternalErrorException" => {
return RusotoError::Service(ListSubscribedRuleGroupsError::WAFInternalError(
err.msg,
))
}
"WAFNonexistentItemException" => {
return RusotoError::Service(ListSubscribedRuleGroupsError::WAFNonexistentItem(
err.msg,
))
}
"ValidationException" => return RusotoError::Validation(err.msg),
_ => {}
}
}
return RusotoError::Unknown(res);
}
}
impl fmt::Display for ListSubscribedRuleGroupsError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for ListSubscribedRuleGroupsError {
fn description(&self) -> &str {
match *self {
ListSubscribedRuleGroupsError::WAFInternalError(ref cause) => cause,
ListSubscribedRuleGroupsError::WAFNonexistentItem(ref cause) => cause,
}
}
}
/// Errors returned by ListWebACLs
#[derive(Debug, PartialEq)]
pub enum ListWebACLsError {
/// <p>The operation failed because of a system problem, even though the request was valid. Retry your request.</p>
WAFInternalError(String),
/// <p>The operation failed because you tried to create, update, or delete an object by using an invalid account identifier.</p>
WAFInvalidAccount(String),
}
impl ListWebACLsError {
pub fn from_response(res: BufferedHttpResponse) -> RusotoError<ListWebACLsError> {
if let Some(err) = proto::json::Error::parse(&res) {
match err.typ.as_str() {
"WAFInternalErrorException" => {
return RusotoError::Service(ListWebACLsError::WAFInternalError(err.msg))
}
"WAFInvalidAccountException" => {
return RusotoError::Service(ListWebACLsError::WAFInvalidAccount(err.msg))
}
"ValidationException" => return RusotoError::Validation(err.msg),
_ => {}
}
}
return RusotoError::Unknown(res);
}
}
impl fmt::Display for ListWebACLsError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for ListWebACLsError {
fn description(&self) -> &str {
match *self {
ListWebACLsError::WAFInternalError(ref cause) => cause,
ListWebACLsError::WAFInvalidAccount(ref cause) => cause,
}
}
}
/// Errors returned by ListXssMatchSets
#[derive(Debug, PartialEq)]
pub enum ListXssMatchSetsError {
/// <p>The operation failed because of a system problem, even though the request was valid. Retry your request.</p>
WAFInternalError(String),
/// <p>The operation failed because you tried to create, update, or delete an object by using an invalid account identifier.</p>
WAFInvalidAccount(String),
}
impl ListXssMatchSetsError {
pub fn from_response(res: BufferedHttpResponse) -> RusotoError<ListXssMatchSetsError> {
if let Some(err) = proto::json::Error::parse(&res) {
match err.typ.as_str() {
"WAFInternalErrorException" => {
return RusotoError::Service(ListXssMatchSetsError::WAFInternalError(err.msg))
}
"WAFInvalidAccountException" => {
return RusotoError::Service(ListXssMatchSetsError::WAFInvalidAccount(err.msg))
}
"ValidationException" => return RusotoError::Validation(err.msg),
_ => {}
}
}
return RusotoError::Unknown(res);
}
}
impl fmt::Display for ListXssMatchSetsError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for ListXssMatchSetsError {
fn description(&self) -> &str {
match *self {
ListXssMatchSetsError::WAFInternalError(ref cause) => cause,
ListXssMatchSetsError::WAFInvalidAccount(ref cause) => cause,
}
}
}
/// Errors returned by PutLoggingConfiguration
#[derive(Debug, PartialEq)]
pub enum PutLoggingConfigurationError {
/// <p>The operation failed because of a system problem, even though the request was valid. Retry your request.</p>
WAFInternalError(String),
/// <p>The operation failed because the referenced object doesn't exist.</p>
WAFNonexistentItem(String),
/// <p>AWS WAF is not able to access the service linked role. This can be caused by a previous <code>PutLoggingConfiguration</code> request, which can lock the service linked role for about 20 seconds. Please try your request again. The service linked role can also be locked by a previous <code>DeleteServiceLinkedRole</code> request, which can lock the role for 15 minutes or more. If you recently made a <code>DeleteServiceLinkedRole</code>, wait at least 15 minutes and try the request again. If you receive this same exception again, you will have to wait additional time until the role is unlocked.</p>
WAFServiceLinkedRoleError(String),
/// <p>The operation failed because you tried to create, update, or delete an object by using a change token that has already been used.</p>
WAFStaleData(String),
}
impl PutLoggingConfigurationError {
pub fn from_response(res: BufferedHttpResponse) -> RusotoError<PutLoggingConfigurationError> {
if let Some(err) = proto::json::Error::parse(&res) {
match err.typ.as_str() {
"WAFInternalErrorException" => {
return RusotoError::Service(PutLoggingConfigurationError::WAFInternalError(
err.msg,
))
}
"WAFNonexistentItemException" => {
return RusotoError::Service(PutLoggingConfigurationError::WAFNonexistentItem(
err.msg,
))
}
"WAFServiceLinkedRoleErrorException" => {
return RusotoError::Service(
PutLoggingConfigurationError::WAFServiceLinkedRoleError(err.msg),
)
}
"WAFStaleDataException" => {
return RusotoError::Service(PutLoggingConfigurationError::WAFStaleData(
err.msg,
))
}
"ValidationException" => return RusotoError::Validation(err.msg),
_ => {}
}
}
return RusotoError::Unknown(res);
}
}
impl fmt::Display for PutLoggingConfigurationError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for PutLoggingConfigurationError {
fn description(&self) -> &str {
match *self {
PutLoggingConfigurationError::WAFInternalError(ref cause) => cause,
PutLoggingConfigurationError::WAFNonexistentItem(ref cause) => cause,
PutLoggingConfigurationError::WAFServiceLinkedRoleError(ref cause) => cause,
PutLoggingConfigurationError::WAFStaleData(ref cause) => cause,
}
}
}
/// Errors returned by PutPermissionPolicy
#[derive(Debug, PartialEq)]
pub enum PutPermissionPolicyError {
/// <p>The operation failed because of a system problem, even though the request was valid. Retry your request.</p>
WAFInternalError(String),
/// <p><p>The operation failed because the specified policy is not in the proper format. </p> <p>The policy is subject to the following restrictions:</p> <ul> <li> <p>You can attach only one policy with each <code>PutPermissionPolicy</code> request.</p> </li> <li> <p>The policy must include an <code>Effect</code>, <code>Action</code> and <code>Principal</code>. </p> </li> <li> <p> <code>Effect</code> must specify <code>Allow</code>.</p> </li> <li> <p>The <code>Action</code> in the policy must be <code>waf:UpdateWebACL</code>, <code>waf-regional:UpdateWebACL</code>, <code>waf:GetRuleGroup</code> and <code>waf-regional:GetRuleGroup</code> . Any extra or wildcard actions in the policy will be rejected.</p> </li> <li> <p>The policy cannot include a <code>Resource</code> parameter.</p> </li> <li> <p>The ARN in the request must be a valid WAF RuleGroup ARN and the RuleGroup must exist in the same region.</p> </li> <li> <p>The user making the request must be the owner of the RuleGroup.</p> </li> <li> <p>Your policy must be composed using IAM Policy version 2012-10-17.</p> </li> </ul></p>
WAFInvalidPermissionPolicy(String),
/// <p>The operation failed because the referenced object doesn't exist.</p>
WAFNonexistentItem(String),
/// <p>The operation failed because you tried to create, update, or delete an object by using a change token that has already been used.</p>
WAFStaleData(String),
}
impl PutPermissionPolicyError {
pub fn from_response(res: BufferedHttpResponse) -> RusotoError<PutPermissionPolicyError> {
if let Some(err) = proto::json::Error::parse(&res) {
match err.typ.as_str() {
"WAFInternalErrorException" => {
return RusotoError::Service(PutPermissionPolicyError::WAFInternalError(
err.msg,
))
}
"WAFInvalidPermissionPolicyException" => {
return RusotoError::Service(
PutPermissionPolicyError::WAFInvalidPermissionPolicy(err.msg),
)
}
"WAFNonexistentItemException" => {
return RusotoError::Service(PutPermissionPolicyError::WAFNonexistentItem(
err.msg,
))
}
"WAFStaleDataException" => {
return RusotoError::Service(PutPermissionPolicyError::WAFStaleData(err.msg))
}
"ValidationException" => return RusotoError::Validation(err.msg),
_ => {}
}
}
return RusotoError::Unknown(res);
}
}
impl fmt::Display for PutPermissionPolicyError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for PutPermissionPolicyError {
fn description(&self) -> &str {
match *self {
PutPermissionPolicyError::WAFInternalError(ref cause) => cause,
PutPermissionPolicyError::WAFInvalidPermissionPolicy(ref cause) => cause,
PutPermissionPolicyError::WAFNonexistentItem(ref cause) => cause,
PutPermissionPolicyError::WAFStaleData(ref cause) => cause,
}
}
}
/// Errors returned by UpdateByteMatchSet
#[derive(Debug, PartialEq)]
pub enum UpdateByteMatchSetError {
/// <p>The operation failed because of a system problem, even though the request was valid. Retry your request.</p>
WAFInternalError(String),
/// <p>The operation failed because you tried to create, update, or delete an object by using an invalid account identifier.</p>
WAFInvalidAccount(String),
/// <p><p>The operation failed because there was nothing to do. For example:</p> <ul> <li> <p>You tried to remove a <code>Rule</code> from a <code>WebACL</code>, but the <code>Rule</code> isn't in the specified <code>WebACL</code>.</p> </li> <li> <p>You tried to remove an IP address from an <code>IPSet</code>, but the IP address isn't in the specified <code>IPSet</code>.</p> </li> <li> <p>You tried to remove a <code>ByteMatchTuple</code> from a <code>ByteMatchSet</code>, but the <code>ByteMatchTuple</code> isn't in the specified <code>WebACL</code>.</p> </li> <li> <p>You tried to add a <code>Rule</code> to a <code>WebACL</code>, but the <code>Rule</code> already exists in the specified <code>WebACL</code>.</p> </li> <li> <p>You tried to add a <code>ByteMatchTuple</code> to a <code>ByteMatchSet</code>, but the <code>ByteMatchTuple</code> already exists in the specified <code>WebACL</code>.</p> </li> </ul></p>
WAFInvalidOperation(String),
/// <p><p>The operation failed because AWS WAF didn't recognize a parameter in the request. For example:</p> <ul> <li> <p>You specified an invalid parameter name.</p> </li> <li> <p>You specified an invalid value.</p> </li> <li> <p>You tried to update an object (<code>ByteMatchSet</code>, <code>IPSet</code>, <code>Rule</code>, or <code>WebACL</code>) using an action other than <code>INSERT</code> or <code>DELETE</code>.</p> </li> <li> <p>You tried to create a <code>WebACL</code> with a <code>DefaultAction</code> <code>Type</code> other than <code>ALLOW</code>, <code>BLOCK</code>, or <code>COUNT</code>.</p> </li> <li> <p>You tried to create a <code>RateBasedRule</code> with a <code>RateKey</code> value other than <code>IP</code>.</p> </li> <li> <p>You tried to update a <code>WebACL</code> with a <code>WafAction</code> <code>Type</code> other than <code>ALLOW</code>, <code>BLOCK</code>, or <code>COUNT</code>.</p> </li> <li> <p>You tried to update a <code>ByteMatchSet</code> with a <code>FieldToMatch</code> <code>Type</code> other than HEADER, METHOD, QUERY_STRING, URI, or BODY.</p> </li> <li> <p>You tried to update a <code>ByteMatchSet</code> with a <code>Field</code> of <code>HEADER</code> but no value for <code>Data</code>.</p> </li> <li> <p>Your request references an ARN that is malformed, or corresponds to a resource with which a web ACL cannot be associated.</p> </li> </ul></p>
WAFInvalidParameter(String),
/// <p>The operation exceeds a resource limit, for example, the maximum number of <code>WebACL</code> objects that you can create for an AWS account. For more information, see <a href="http://docs.aws.amazon.com/waf/latest/developerguide/limits.html">Limits</a> in the <i>AWS WAF Developer Guide</i>.</p>
WAFLimitsExceeded(String),
/// <p><p>The operation failed because you tried to add an object to or delete an object from another object that doesn't exist. For example:</p> <ul> <li> <p>You tried to add a <code>Rule</code> to or delete a <code>Rule</code> from a <code>WebACL</code> that doesn't exist.</p> </li> <li> <p>You tried to add a <code>ByteMatchSet</code> to or delete a <code>ByteMatchSet</code> from a <code>Rule</code> that doesn't exist.</p> </li> <li> <p>You tried to add an IP address to or delete an IP address from an <code>IPSet</code> that doesn't exist.</p> </li> <li> <p>You tried to add a <code>ByteMatchTuple</code> to or delete a <code>ByteMatchTuple</code> from a <code>ByteMatchSet</code> that doesn't exist.</p> </li> </ul></p>
WAFNonexistentContainer(String),
/// <p>The operation failed because the referenced object doesn't exist.</p>
WAFNonexistentItem(String),
/// <p>The operation failed because you tried to create, update, or delete an object by using a change token that has already been used.</p>
WAFStaleData(String),
}
impl UpdateByteMatchSetError {
pub fn from_response(res: BufferedHttpResponse) -> RusotoError<UpdateByteMatchSetError> {
if let Some(err) = proto::json::Error::parse(&res) {
match err.typ.as_str() {
"WAFInternalErrorException" => {
return RusotoError::Service(UpdateByteMatchSetError::WAFInternalError(err.msg))
}
"WAFInvalidAccountException" => {
return RusotoError::Service(UpdateByteMatchSetError::WAFInvalidAccount(
err.msg,
))
}
"WAFInvalidOperationException" => {
return RusotoError::Service(UpdateByteMatchSetError::WAFInvalidOperation(
err.msg,
))
}
"WAFInvalidParameterException" => {
return RusotoError::Service(UpdateByteMatchSetError::WAFInvalidParameter(
err.msg,
))
}
"WAFLimitsExceededException" => {
return RusotoError::Service(UpdateByteMatchSetError::WAFLimitsExceeded(
err.msg,
))
}
"WAFNonexistentContainerException" => {
return RusotoError::Service(UpdateByteMatchSetError::WAFNonexistentContainer(
err.msg,
))
}
"WAFNonexistentItemException" => {
return RusotoError::Service(UpdateByteMatchSetError::WAFNonexistentItem(
err.msg,
))
}
"WAFStaleDataException" => {
return RusotoError::Service(UpdateByteMatchSetError::WAFStaleData(err.msg))
}
"ValidationException" => return RusotoError::Validation(err.msg),
_ => {}
}
}
return RusotoError::Unknown(res);
}
}
impl fmt::Display for UpdateByteMatchSetError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for UpdateByteMatchSetError {
fn description(&self) -> &str {
match *self {
UpdateByteMatchSetError::WAFInternalError(ref cause) => cause,
UpdateByteMatchSetError::WAFInvalidAccount(ref cause) => cause,
UpdateByteMatchSetError::WAFInvalidOperation(ref cause) => cause,
UpdateByteMatchSetError::WAFInvalidParameter(ref cause) => cause,
UpdateByteMatchSetError::WAFLimitsExceeded(ref cause) => cause,
UpdateByteMatchSetError::WAFNonexistentContainer(ref cause) => cause,
UpdateByteMatchSetError::WAFNonexistentItem(ref cause) => cause,
UpdateByteMatchSetError::WAFStaleData(ref cause) => cause,
}
}
}
/// Errors returned by UpdateGeoMatchSet
#[derive(Debug, PartialEq)]
pub enum UpdateGeoMatchSetError {
/// <p>The operation failed because of a system problem, even though the request was valid. Retry your request.</p>
WAFInternalError(String),
/// <p>The operation failed because you tried to create, update, or delete an object by using an invalid account identifier.</p>
WAFInvalidAccount(String),
/// <p><p>The operation failed because there was nothing to do. For example:</p> <ul> <li> <p>You tried to remove a <code>Rule</code> from a <code>WebACL</code>, but the <code>Rule</code> isn't in the specified <code>WebACL</code>.</p> </li> <li> <p>You tried to remove an IP address from an <code>IPSet</code>, but the IP address isn't in the specified <code>IPSet</code>.</p> </li> <li> <p>You tried to remove a <code>ByteMatchTuple</code> from a <code>ByteMatchSet</code>, but the <code>ByteMatchTuple</code> isn't in the specified <code>WebACL</code>.</p> </li> <li> <p>You tried to add a <code>Rule</code> to a <code>WebACL</code>, but the <code>Rule</code> already exists in the specified <code>WebACL</code>.</p> </li> <li> <p>You tried to add a <code>ByteMatchTuple</code> to a <code>ByteMatchSet</code>, but the <code>ByteMatchTuple</code> already exists in the specified <code>WebACL</code>.</p> </li> </ul></p>
WAFInvalidOperation(String),
/// <p><p>The operation failed because AWS WAF didn't recognize a parameter in the request. For example:</p> <ul> <li> <p>You specified an invalid parameter name.</p> </li> <li> <p>You specified an invalid value.</p> </li> <li> <p>You tried to update an object (<code>ByteMatchSet</code>, <code>IPSet</code>, <code>Rule</code>, or <code>WebACL</code>) using an action other than <code>INSERT</code> or <code>DELETE</code>.</p> </li> <li> <p>You tried to create a <code>WebACL</code> with a <code>DefaultAction</code> <code>Type</code> other than <code>ALLOW</code>, <code>BLOCK</code>, or <code>COUNT</code>.</p> </li> <li> <p>You tried to create a <code>RateBasedRule</code> with a <code>RateKey</code> value other than <code>IP</code>.</p> </li> <li> <p>You tried to update a <code>WebACL</code> with a <code>WafAction</code> <code>Type</code> other than <code>ALLOW</code>, <code>BLOCK</code>, or <code>COUNT</code>.</p> </li> <li> <p>You tried to update a <code>ByteMatchSet</code> with a <code>FieldToMatch</code> <code>Type</code> other than HEADER, METHOD, QUERY_STRING, URI, or BODY.</p> </li> <li> <p>You tried to update a <code>ByteMatchSet</code> with a <code>Field</code> of <code>HEADER</code> but no value for <code>Data</code>.</p> </li> <li> <p>Your request references an ARN that is malformed, or corresponds to a resource with which a web ACL cannot be associated.</p> </li> </ul></p>
WAFInvalidParameter(String),
/// <p>The operation exceeds a resource limit, for example, the maximum number of <code>WebACL</code> objects that you can create for an AWS account. For more information, see <a href="http://docs.aws.amazon.com/waf/latest/developerguide/limits.html">Limits</a> in the <i>AWS WAF Developer Guide</i>.</p>
WAFLimitsExceeded(String),
/// <p><p>The operation failed because you tried to add an object to or delete an object from another object that doesn't exist. For example:</p> <ul> <li> <p>You tried to add a <code>Rule</code> to or delete a <code>Rule</code> from a <code>WebACL</code> that doesn't exist.</p> </li> <li> <p>You tried to add a <code>ByteMatchSet</code> to or delete a <code>ByteMatchSet</code> from a <code>Rule</code> that doesn't exist.</p> </li> <li> <p>You tried to add an IP address to or delete an IP address from an <code>IPSet</code> that doesn't exist.</p> </li> <li> <p>You tried to add a <code>ByteMatchTuple</code> to or delete a <code>ByteMatchTuple</code> from a <code>ByteMatchSet</code> that doesn't exist.</p> </li> </ul></p>
WAFNonexistentContainer(String),
/// <p>The operation failed because the referenced object doesn't exist.</p>
WAFNonexistentItem(String),
/// <p><p>The operation failed because you tried to delete an object that is still in use. For example:</p> <ul> <li> <p>You tried to delete a <code>ByteMatchSet</code> that is still referenced by a <code>Rule</code>.</p> </li> <li> <p>You tried to delete a <code>Rule</code> that is still referenced by a <code>WebACL</code>.</p> </li> </ul></p>
WAFReferencedItem(String),
/// <p>The operation failed because you tried to create, update, or delete an object by using a change token that has already been used.</p>
WAFStaleData(String),
}
impl UpdateGeoMatchSetError {
pub fn from_response(res: BufferedHttpResponse) -> RusotoError<UpdateGeoMatchSetError> {
if let Some(err) = proto::json::Error::parse(&res) {
match err.typ.as_str() {
"WAFInternalErrorException" => {
return RusotoError::Service(UpdateGeoMatchSetError::WAFInternalError(err.msg))
}
"WAFInvalidAccountException" => {
return RusotoError::Service(UpdateGeoMatchSetError::WAFInvalidAccount(err.msg))
}
"WAFInvalidOperationException" => {
return RusotoError::Service(UpdateGeoMatchSetError::WAFInvalidOperation(
err.msg,
))
}
"WAFInvalidParameterException" => {
return RusotoError::Service(UpdateGeoMatchSetError::WAFInvalidParameter(
err.msg,
))
}
"WAFLimitsExceededException" => {
return RusotoError::Service(UpdateGeoMatchSetError::WAFLimitsExceeded(err.msg))
}
"WAFNonexistentContainerException" => {
return RusotoError::Service(UpdateGeoMatchSetError::WAFNonexistentContainer(
err.msg,
))
}
"WAFNonexistentItemException" => {
return RusotoError::Service(UpdateGeoMatchSetError::WAFNonexistentItem(
err.msg,
))
}
"WAFReferencedItemException" => {
return RusotoError::Service(UpdateGeoMatchSetError::WAFReferencedItem(err.msg))
}
"WAFStaleDataException" => {
return RusotoError::Service(UpdateGeoMatchSetError::WAFStaleData(err.msg))
}
"ValidationException" => return RusotoError::Validation(err.msg),
_ => {}
}
}
return RusotoError::Unknown(res);
}
}
impl fmt::Display for UpdateGeoMatchSetError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for UpdateGeoMatchSetError {
fn description(&self) -> &str {
match *self {
UpdateGeoMatchSetError::WAFInternalError(ref cause) => cause,
UpdateGeoMatchSetError::WAFInvalidAccount(ref cause) => cause,
UpdateGeoMatchSetError::WAFInvalidOperation(ref cause) => cause,
UpdateGeoMatchSetError::WAFInvalidParameter(ref cause) => cause,
UpdateGeoMatchSetError::WAFLimitsExceeded(ref cause) => cause,
UpdateGeoMatchSetError::WAFNonexistentContainer(ref cause) => cause,
UpdateGeoMatchSetError::WAFNonexistentItem(ref cause) => cause,
UpdateGeoMatchSetError::WAFReferencedItem(ref cause) => cause,
UpdateGeoMatchSetError::WAFStaleData(ref cause) => cause,
}
}
}
/// Errors returned by UpdateIPSet
#[derive(Debug, PartialEq)]
pub enum UpdateIPSetError {
/// <p>The operation failed because of a system problem, even though the request was valid. Retry your request.</p>
WAFInternalError(String),
/// <p>The operation failed because you tried to create, update, or delete an object by using an invalid account identifier.</p>
WAFInvalidAccount(String),
/// <p><p>The operation failed because there was nothing to do. For example:</p> <ul> <li> <p>You tried to remove a <code>Rule</code> from a <code>WebACL</code>, but the <code>Rule</code> isn't in the specified <code>WebACL</code>.</p> </li> <li> <p>You tried to remove an IP address from an <code>IPSet</code>, but the IP address isn't in the specified <code>IPSet</code>.</p> </li> <li> <p>You tried to remove a <code>ByteMatchTuple</code> from a <code>ByteMatchSet</code>, but the <code>ByteMatchTuple</code> isn't in the specified <code>WebACL</code>.</p> </li> <li> <p>You tried to add a <code>Rule</code> to a <code>WebACL</code>, but the <code>Rule</code> already exists in the specified <code>WebACL</code>.</p> </li> <li> <p>You tried to add a <code>ByteMatchTuple</code> to a <code>ByteMatchSet</code>, but the <code>ByteMatchTuple</code> already exists in the specified <code>WebACL</code>.</p> </li> </ul></p>
WAFInvalidOperation(String),
/// <p><p>The operation failed because AWS WAF didn't recognize a parameter in the request. For example:</p> <ul> <li> <p>You specified an invalid parameter name.</p> </li> <li> <p>You specified an invalid value.</p> </li> <li> <p>You tried to update an object (<code>ByteMatchSet</code>, <code>IPSet</code>, <code>Rule</code>, or <code>WebACL</code>) using an action other than <code>INSERT</code> or <code>DELETE</code>.</p> </li> <li> <p>You tried to create a <code>WebACL</code> with a <code>DefaultAction</code> <code>Type</code> other than <code>ALLOW</code>, <code>BLOCK</code>, or <code>COUNT</code>.</p> </li> <li> <p>You tried to create a <code>RateBasedRule</code> with a <code>RateKey</code> value other than <code>IP</code>.</p> </li> <li> <p>You tried to update a <code>WebACL</code> with a <code>WafAction</code> <code>Type</code> other than <code>ALLOW</code>, <code>BLOCK</code>, or <code>COUNT</code>.</p> </li> <li> <p>You tried to update a <code>ByteMatchSet</code> with a <code>FieldToMatch</code> <code>Type</code> other than HEADER, METHOD, QUERY_STRING, URI, or BODY.</p> </li> <li> <p>You tried to update a <code>ByteMatchSet</code> with a <code>Field</code> of <code>HEADER</code> but no value for <code>Data</code>.</p> </li> <li> <p>Your request references an ARN that is malformed, or corresponds to a resource with which a web ACL cannot be associated.</p> </li> </ul></p>
WAFInvalidParameter(String),
/// <p>The operation exceeds a resource limit, for example, the maximum number of <code>WebACL</code> objects that you can create for an AWS account. For more information, see <a href="http://docs.aws.amazon.com/waf/latest/developerguide/limits.html">Limits</a> in the <i>AWS WAF Developer Guide</i>.</p>
WAFLimitsExceeded(String),
/// <p><p>The operation failed because you tried to add an object to or delete an object from another object that doesn't exist. For example:</p> <ul> <li> <p>You tried to add a <code>Rule</code> to or delete a <code>Rule</code> from a <code>WebACL</code> that doesn't exist.</p> </li> <li> <p>You tried to add a <code>ByteMatchSet</code> to or delete a <code>ByteMatchSet</code> from a <code>Rule</code> that doesn't exist.</p> </li> <li> <p>You tried to add an IP address to or delete an IP address from an <code>IPSet</code> that doesn't exist.</p> </li> <li> <p>You tried to add a <code>ByteMatchTuple</code> to or delete a <code>ByteMatchTuple</code> from a <code>ByteMatchSet</code> that doesn't exist.</p> </li> </ul></p>
WAFNonexistentContainer(String),
/// <p>The operation failed because the referenced object doesn't exist.</p>
WAFNonexistentItem(String),
/// <p><p>The operation failed because you tried to delete an object that is still in use. For example:</p> <ul> <li> <p>You tried to delete a <code>ByteMatchSet</code> that is still referenced by a <code>Rule</code>.</p> </li> <li> <p>You tried to delete a <code>Rule</code> that is still referenced by a <code>WebACL</code>.</p> </li> </ul></p>
WAFReferencedItem(String),
/// <p>The operation failed because you tried to create, update, or delete an object by using a change token that has already been used.</p>
WAFStaleData(String),
}
impl UpdateIPSetError {
pub fn from_response(res: BufferedHttpResponse) -> RusotoError<UpdateIPSetError> {
if let Some(err) = proto::json::Error::parse(&res) {
match err.typ.as_str() {
"WAFInternalErrorException" => {
return RusotoError::Service(UpdateIPSetError::WAFInternalError(err.msg))
}
"WAFInvalidAccountException" => {
return RusotoError::Service(UpdateIPSetError::WAFInvalidAccount(err.msg))
}
"WAFInvalidOperationException" => {
return RusotoError::Service(UpdateIPSetError::WAFInvalidOperation(err.msg))
}
"WAFInvalidParameterException" => {
return RusotoError::Service(UpdateIPSetError::WAFInvalidParameter(err.msg))
}
"WAFLimitsExceededException" => {
return RusotoError::Service(UpdateIPSetError::WAFLimitsExceeded(err.msg))
}
"WAFNonexistentContainerException" => {
return RusotoError::Service(UpdateIPSetError::WAFNonexistentContainer(err.msg))
}
"WAFNonexistentItemException" => {
return RusotoError::Service(UpdateIPSetError::WAFNonexistentItem(err.msg))
}
"WAFReferencedItemException" => {
return RusotoError::Service(UpdateIPSetError::WAFReferencedItem(err.msg))
}
"WAFStaleDataException" => {
return RusotoError::Service(UpdateIPSetError::WAFStaleData(err.msg))
}
"ValidationException" => return RusotoError::Validation(err.msg),
_ => {}
}
}
return RusotoError::Unknown(res);
}
}
impl fmt::Display for UpdateIPSetError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for UpdateIPSetError {
fn description(&self) -> &str {
match *self {
UpdateIPSetError::WAFInternalError(ref cause) => cause,
UpdateIPSetError::WAFInvalidAccount(ref cause) => cause,
UpdateIPSetError::WAFInvalidOperation(ref cause) => cause,
UpdateIPSetError::WAFInvalidParameter(ref cause) => cause,
UpdateIPSetError::WAFLimitsExceeded(ref cause) => cause,
UpdateIPSetError::WAFNonexistentContainer(ref cause) => cause,
UpdateIPSetError::WAFNonexistentItem(ref cause) => cause,
UpdateIPSetError::WAFReferencedItem(ref cause) => cause,
UpdateIPSetError::WAFStaleData(ref cause) => cause,
}
}
}
/// Errors returned by UpdateRateBasedRule
#[derive(Debug, PartialEq)]
pub enum UpdateRateBasedRuleError {
/// <p>The operation failed because of a system problem, even though the request was valid. Retry your request.</p>
WAFInternalError(String),
/// <p>The operation failed because you tried to create, update, or delete an object by using an invalid account identifier.</p>
WAFInvalidAccount(String),
/// <p><p>The operation failed because there was nothing to do. For example:</p> <ul> <li> <p>You tried to remove a <code>Rule</code> from a <code>WebACL</code>, but the <code>Rule</code> isn't in the specified <code>WebACL</code>.</p> </li> <li> <p>You tried to remove an IP address from an <code>IPSet</code>, but the IP address isn't in the specified <code>IPSet</code>.</p> </li> <li> <p>You tried to remove a <code>ByteMatchTuple</code> from a <code>ByteMatchSet</code>, but the <code>ByteMatchTuple</code> isn't in the specified <code>WebACL</code>.</p> </li> <li> <p>You tried to add a <code>Rule</code> to a <code>WebACL</code>, but the <code>Rule</code> already exists in the specified <code>WebACL</code>.</p> </li> <li> <p>You tried to add a <code>ByteMatchTuple</code> to a <code>ByteMatchSet</code>, but the <code>ByteMatchTuple</code> already exists in the specified <code>WebACL</code>.</p> </li> </ul></p>
WAFInvalidOperation(String),
/// <p><p>The operation failed because AWS WAF didn't recognize a parameter in the request. For example:</p> <ul> <li> <p>You specified an invalid parameter name.</p> </li> <li> <p>You specified an invalid value.</p> </li> <li> <p>You tried to update an object (<code>ByteMatchSet</code>, <code>IPSet</code>, <code>Rule</code>, or <code>WebACL</code>) using an action other than <code>INSERT</code> or <code>DELETE</code>.</p> </li> <li> <p>You tried to create a <code>WebACL</code> with a <code>DefaultAction</code> <code>Type</code> other than <code>ALLOW</code>, <code>BLOCK</code>, or <code>COUNT</code>.</p> </li> <li> <p>You tried to create a <code>RateBasedRule</code> with a <code>RateKey</code> value other than <code>IP</code>.</p> </li> <li> <p>You tried to update a <code>WebACL</code> with a <code>WafAction</code> <code>Type</code> other than <code>ALLOW</code>, <code>BLOCK</code>, or <code>COUNT</code>.</p> </li> <li> <p>You tried to update a <code>ByteMatchSet</code> with a <code>FieldToMatch</code> <code>Type</code> other than HEADER, METHOD, QUERY_STRING, URI, or BODY.</p> </li> <li> <p>You tried to update a <code>ByteMatchSet</code> with a <code>Field</code> of <code>HEADER</code> but no value for <code>Data</code>.</p> </li> <li> <p>Your request references an ARN that is malformed, or corresponds to a resource with which a web ACL cannot be associated.</p> </li> </ul></p>
WAFInvalidParameter(String),
/// <p>The operation exceeds a resource limit, for example, the maximum number of <code>WebACL</code> objects that you can create for an AWS account. For more information, see <a href="http://docs.aws.amazon.com/waf/latest/developerguide/limits.html">Limits</a> in the <i>AWS WAF Developer Guide</i>.</p>
WAFLimitsExceeded(String),
/// <p><p>The operation failed because you tried to add an object to or delete an object from another object that doesn't exist. For example:</p> <ul> <li> <p>You tried to add a <code>Rule</code> to or delete a <code>Rule</code> from a <code>WebACL</code> that doesn't exist.</p> </li> <li> <p>You tried to add a <code>ByteMatchSet</code> to or delete a <code>ByteMatchSet</code> from a <code>Rule</code> that doesn't exist.</p> </li> <li> <p>You tried to add an IP address to or delete an IP address from an <code>IPSet</code> that doesn't exist.</p> </li> <li> <p>You tried to add a <code>ByteMatchTuple</code> to or delete a <code>ByteMatchTuple</code> from a <code>ByteMatchSet</code> that doesn't exist.</p> </li> </ul></p>
WAFNonexistentContainer(String),
/// <p>The operation failed because the referenced object doesn't exist.</p>
WAFNonexistentItem(String),
/// <p><p>The operation failed because you tried to delete an object that is still in use. For example:</p> <ul> <li> <p>You tried to delete a <code>ByteMatchSet</code> that is still referenced by a <code>Rule</code>.</p> </li> <li> <p>You tried to delete a <code>Rule</code> that is still referenced by a <code>WebACL</code>.</p> </li> </ul></p>
WAFReferencedItem(String),
/// <p>The operation failed because you tried to create, update, or delete an object by using a change token that has already been used.</p>
WAFStaleData(String),
}
impl UpdateRateBasedRuleError {
pub fn from_response(res: BufferedHttpResponse) -> RusotoError<UpdateRateBasedRuleError> {
if let Some(err) = proto::json::Error::parse(&res) {
match err.typ.as_str() {
"WAFInternalErrorException" => {
return RusotoError::Service(UpdateRateBasedRuleError::WAFInternalError(
err.msg,
))
}
"WAFInvalidAccountException" => {
return RusotoError::Service(UpdateRateBasedRuleError::WAFInvalidAccount(
err.msg,
))
}
"WAFInvalidOperationException" => {
return RusotoError::Service(UpdateRateBasedRuleError::WAFInvalidOperation(
err.msg,
))
}
"WAFInvalidParameterException" => {
return RusotoError::Service(UpdateRateBasedRuleError::WAFInvalidParameter(
err.msg,
))
}
"WAFLimitsExceededException" => {
return RusotoError::Service(UpdateRateBasedRuleError::WAFLimitsExceeded(
err.msg,
))
}
"WAFNonexistentContainerException" => {
return RusotoError::Service(UpdateRateBasedRuleError::WAFNonexistentContainer(
err.msg,
))
}
"WAFNonexistentItemException" => {
return RusotoError::Service(UpdateRateBasedRuleError::WAFNonexistentItem(
err.msg,
))
}
"WAFReferencedItemException" => {
return RusotoError::Service(UpdateRateBasedRuleError::WAFReferencedItem(
err.msg,
))
}
"WAFStaleDataException" => {
return RusotoError::Service(UpdateRateBasedRuleError::WAFStaleData(err.msg))
}
"ValidationException" => return RusotoError::Validation(err.msg),
_ => {}
}
}
return RusotoError::Unknown(res);
}
}
impl fmt::Display for UpdateRateBasedRuleError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for UpdateRateBasedRuleError {
fn description(&self) -> &str {
match *self {
UpdateRateBasedRuleError::WAFInternalError(ref cause) => cause,
UpdateRateBasedRuleError::WAFInvalidAccount(ref cause) => cause,
UpdateRateBasedRuleError::WAFInvalidOperation(ref cause) => cause,
UpdateRateBasedRuleError::WAFInvalidParameter(ref cause) => cause,
UpdateRateBasedRuleError::WAFLimitsExceeded(ref cause) => cause,
UpdateRateBasedRuleError::WAFNonexistentContainer(ref cause) => cause,
UpdateRateBasedRuleError::WAFNonexistentItem(ref cause) => cause,
UpdateRateBasedRuleError::WAFReferencedItem(ref cause) => cause,
UpdateRateBasedRuleError::WAFStaleData(ref cause) => cause,
}
}
}
/// Errors returned by UpdateRegexMatchSet
#[derive(Debug, PartialEq)]
pub enum UpdateRegexMatchSetError {
/// <p>The name specified is invalid.</p>
WAFDisallowedName(String),
/// <p>The operation failed because of a system problem, even though the request was valid. Retry your request.</p>
WAFInternalError(String),
/// <p>The operation failed because you tried to create, update, or delete an object by using an invalid account identifier.</p>
WAFInvalidAccount(String),
/// <p><p>The operation failed because there was nothing to do. For example:</p> <ul> <li> <p>You tried to remove a <code>Rule</code> from a <code>WebACL</code>, but the <code>Rule</code> isn't in the specified <code>WebACL</code>.</p> </li> <li> <p>You tried to remove an IP address from an <code>IPSet</code>, but the IP address isn't in the specified <code>IPSet</code>.</p> </li> <li> <p>You tried to remove a <code>ByteMatchTuple</code> from a <code>ByteMatchSet</code>, but the <code>ByteMatchTuple</code> isn't in the specified <code>WebACL</code>.</p> </li> <li> <p>You tried to add a <code>Rule</code> to a <code>WebACL</code>, but the <code>Rule</code> already exists in the specified <code>WebACL</code>.</p> </li> <li> <p>You tried to add a <code>ByteMatchTuple</code> to a <code>ByteMatchSet</code>, but the <code>ByteMatchTuple</code> already exists in the specified <code>WebACL</code>.</p> </li> </ul></p>
WAFInvalidOperation(String),
/// <p>The operation exceeds a resource limit, for example, the maximum number of <code>WebACL</code> objects that you can create for an AWS account. For more information, see <a href="http://docs.aws.amazon.com/waf/latest/developerguide/limits.html">Limits</a> in the <i>AWS WAF Developer Guide</i>.</p>
WAFLimitsExceeded(String),
/// <p><p>The operation failed because you tried to add an object to or delete an object from another object that doesn't exist. For example:</p> <ul> <li> <p>You tried to add a <code>Rule</code> to or delete a <code>Rule</code> from a <code>WebACL</code> that doesn't exist.</p> </li> <li> <p>You tried to add a <code>ByteMatchSet</code> to or delete a <code>ByteMatchSet</code> from a <code>Rule</code> that doesn't exist.</p> </li> <li> <p>You tried to add an IP address to or delete an IP address from an <code>IPSet</code> that doesn't exist.</p> </li> <li> <p>You tried to add a <code>ByteMatchTuple</code> to or delete a <code>ByteMatchTuple</code> from a <code>ByteMatchSet</code> that doesn't exist.</p> </li> </ul></p>
WAFNonexistentContainer(String),
/// <p>The operation failed because the referenced object doesn't exist.</p>
WAFNonexistentItem(String),
/// <p>The operation failed because you tried to create, update, or delete an object by using a change token that has already been used.</p>
WAFStaleData(String),
}
impl UpdateRegexMatchSetError {
pub fn from_response(res: BufferedHttpResponse) -> RusotoError<UpdateRegexMatchSetError> {
if let Some(err) = proto::json::Error::parse(&res) {
match err.typ.as_str() {
"WAFDisallowedNameException" => {
return RusotoError::Service(UpdateRegexMatchSetError::WAFDisallowedName(
err.msg,
))
}
"WAFInternalErrorException" => {
return RusotoError::Service(UpdateRegexMatchSetError::WAFInternalError(
err.msg,
))
}
"WAFInvalidAccountException" => {
return RusotoError::Service(UpdateRegexMatchSetError::WAFInvalidAccount(
err.msg,
))
}
"WAFInvalidOperationException" => {
return RusotoError::Service(UpdateRegexMatchSetError::WAFInvalidOperation(
err.msg,
))
}
"WAFLimitsExceededException" => {
return RusotoError::Service(UpdateRegexMatchSetError::WAFLimitsExceeded(
err.msg,
))
}
"WAFNonexistentContainerException" => {
return RusotoError::Service(UpdateRegexMatchSetError::WAFNonexistentContainer(
err.msg,
))
}
"WAFNonexistentItemException" => {
return RusotoError::Service(UpdateRegexMatchSetError::WAFNonexistentItem(
err.msg,
))
}
"WAFStaleDataException" => {
return RusotoError::Service(UpdateRegexMatchSetError::WAFStaleData(err.msg))
}
"ValidationException" => return RusotoError::Validation(err.msg),
_ => {}
}
}
return RusotoError::Unknown(res);
}
}
impl fmt::Display for UpdateRegexMatchSetError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for UpdateRegexMatchSetError {
fn description(&self) -> &str {
match *self {
UpdateRegexMatchSetError::WAFDisallowedName(ref cause) => cause,
UpdateRegexMatchSetError::WAFInternalError(ref cause) => cause,
UpdateRegexMatchSetError::WAFInvalidAccount(ref cause) => cause,
UpdateRegexMatchSetError::WAFInvalidOperation(ref cause) => cause,
UpdateRegexMatchSetError::WAFLimitsExceeded(ref cause) => cause,
UpdateRegexMatchSetError::WAFNonexistentContainer(ref cause) => cause,
UpdateRegexMatchSetError::WAFNonexistentItem(ref cause) => cause,
UpdateRegexMatchSetError::WAFStaleData(ref cause) => cause,
}
}
}
/// Errors returned by UpdateRegexPatternSet
#[derive(Debug, PartialEq)]
pub enum UpdateRegexPatternSetError {
/// <p>The operation failed because of a system problem, even though the request was valid. Retry your request.</p>
WAFInternalError(String),
/// <p>The operation failed because you tried to create, update, or delete an object by using an invalid account identifier.</p>
WAFInvalidAccount(String),
/// <p><p>The operation failed because there was nothing to do. For example:</p> <ul> <li> <p>You tried to remove a <code>Rule</code> from a <code>WebACL</code>, but the <code>Rule</code> isn't in the specified <code>WebACL</code>.</p> </li> <li> <p>You tried to remove an IP address from an <code>IPSet</code>, but the IP address isn't in the specified <code>IPSet</code>.</p> </li> <li> <p>You tried to remove a <code>ByteMatchTuple</code> from a <code>ByteMatchSet</code>, but the <code>ByteMatchTuple</code> isn't in the specified <code>WebACL</code>.</p> </li> <li> <p>You tried to add a <code>Rule</code> to a <code>WebACL</code>, but the <code>Rule</code> already exists in the specified <code>WebACL</code>.</p> </li> <li> <p>You tried to add a <code>ByteMatchTuple</code> to a <code>ByteMatchSet</code>, but the <code>ByteMatchTuple</code> already exists in the specified <code>WebACL</code>.</p> </li> </ul></p>
WAFInvalidOperation(String),
/// <p>The regular expression (regex) you specified in <code>RegexPatternString</code> is invalid.</p>
WAFInvalidRegexPattern(String),
/// <p>The operation exceeds a resource limit, for example, the maximum number of <code>WebACL</code> objects that you can create for an AWS account. For more information, see <a href="http://docs.aws.amazon.com/waf/latest/developerguide/limits.html">Limits</a> in the <i>AWS WAF Developer Guide</i>.</p>
WAFLimitsExceeded(String),
/// <p><p>The operation failed because you tried to add an object to or delete an object from another object that doesn't exist. For example:</p> <ul> <li> <p>You tried to add a <code>Rule</code> to or delete a <code>Rule</code> from a <code>WebACL</code> that doesn't exist.</p> </li> <li> <p>You tried to add a <code>ByteMatchSet</code> to or delete a <code>ByteMatchSet</code> from a <code>Rule</code> that doesn't exist.</p> </li> <li> <p>You tried to add an IP address to or delete an IP address from an <code>IPSet</code> that doesn't exist.</p> </li> <li> <p>You tried to add a <code>ByteMatchTuple</code> to or delete a <code>ByteMatchTuple</code> from a <code>ByteMatchSet</code> that doesn't exist.</p> </li> </ul></p>
WAFNonexistentContainer(String),
/// <p>The operation failed because the referenced object doesn't exist.</p>
WAFNonexistentItem(String),
/// <p>The operation failed because you tried to create, update, or delete an object by using a change token that has already been used.</p>
WAFStaleData(String),
}
impl UpdateRegexPatternSetError {
pub fn from_response(res: BufferedHttpResponse) -> RusotoError<UpdateRegexPatternSetError> {
if let Some(err) = proto::json::Error::parse(&res) {
match err.typ.as_str() {
"WAFInternalErrorException" => {
return RusotoError::Service(UpdateRegexPatternSetError::WAFInternalError(
err.msg,
))
}
"WAFInvalidAccountException" => {
return RusotoError::Service(UpdateRegexPatternSetError::WAFInvalidAccount(
err.msg,
))
}
"WAFInvalidOperationException" => {
return RusotoError::Service(UpdateRegexPatternSetError::WAFInvalidOperation(
err.msg,
))
}
"WAFInvalidRegexPatternException" => {
return RusotoError::Service(
UpdateRegexPatternSetError::WAFInvalidRegexPattern(err.msg),
)
}
"WAFLimitsExceededException" => {
return RusotoError::Service(UpdateRegexPatternSetError::WAFLimitsExceeded(
err.msg,
))
}
"WAFNonexistentContainerException" => {
return RusotoError::Service(
UpdateRegexPatternSetError::WAFNonexistentContainer(err.msg),
)
}
"WAFNonexistentItemException" => {
return RusotoError::Service(UpdateRegexPatternSetError::WAFNonexistentItem(
err.msg,
))
}
"WAFStaleDataException" => {
return RusotoError::Service(UpdateRegexPatternSetError::WAFStaleData(err.msg))
}
"ValidationException" => return RusotoError::Validation(err.msg),
_ => {}
}
}
return RusotoError::Unknown(res);
}
}
impl fmt::Display for UpdateRegexPatternSetError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for UpdateRegexPatternSetError {
fn description(&self) -> &str {
match *self {
UpdateRegexPatternSetError::WAFInternalError(ref cause) => cause,
UpdateRegexPatternSetError::WAFInvalidAccount(ref cause) => cause,
UpdateRegexPatternSetError::WAFInvalidOperation(ref cause) => cause,
UpdateRegexPatternSetError::WAFInvalidRegexPattern(ref cause) => cause,
UpdateRegexPatternSetError::WAFLimitsExceeded(ref cause) => cause,
UpdateRegexPatternSetError::WAFNonexistentContainer(ref cause) => cause,
UpdateRegexPatternSetError::WAFNonexistentItem(ref cause) => cause,
UpdateRegexPatternSetError::WAFStaleData(ref cause) => cause,
}
}
}
/// Errors returned by UpdateRule
#[derive(Debug, PartialEq)]
pub enum UpdateRuleError {
/// <p>The operation failed because of a system problem, even though the request was valid. Retry your request.</p>
WAFInternalError(String),
/// <p>The operation failed because you tried to create, update, or delete an object by using an invalid account identifier.</p>
WAFInvalidAccount(String),
/// <p><p>The operation failed because there was nothing to do. For example:</p> <ul> <li> <p>You tried to remove a <code>Rule</code> from a <code>WebACL</code>, but the <code>Rule</code> isn't in the specified <code>WebACL</code>.</p> </li> <li> <p>You tried to remove an IP address from an <code>IPSet</code>, but the IP address isn't in the specified <code>IPSet</code>.</p> </li> <li> <p>You tried to remove a <code>ByteMatchTuple</code> from a <code>ByteMatchSet</code>, but the <code>ByteMatchTuple</code> isn't in the specified <code>WebACL</code>.</p> </li> <li> <p>You tried to add a <code>Rule</code> to a <code>WebACL</code>, but the <code>Rule</code> already exists in the specified <code>WebACL</code>.</p> </li> <li> <p>You tried to add a <code>ByteMatchTuple</code> to a <code>ByteMatchSet</code>, but the <code>ByteMatchTuple</code> already exists in the specified <code>WebACL</code>.</p> </li> </ul></p>
WAFInvalidOperation(String),
/// <p><p>The operation failed because AWS WAF didn't recognize a parameter in the request. For example:</p> <ul> <li> <p>You specified an invalid parameter name.</p> </li> <li> <p>You specified an invalid value.</p> </li> <li> <p>You tried to update an object (<code>ByteMatchSet</code>, <code>IPSet</code>, <code>Rule</code>, or <code>WebACL</code>) using an action other than <code>INSERT</code> or <code>DELETE</code>.</p> </li> <li> <p>You tried to create a <code>WebACL</code> with a <code>DefaultAction</code> <code>Type</code> other than <code>ALLOW</code>, <code>BLOCK</code>, or <code>COUNT</code>.</p> </li> <li> <p>You tried to create a <code>RateBasedRule</code> with a <code>RateKey</code> value other than <code>IP</code>.</p> </li> <li> <p>You tried to update a <code>WebACL</code> with a <code>WafAction</code> <code>Type</code> other than <code>ALLOW</code>, <code>BLOCK</code>, or <code>COUNT</code>.</p> </li> <li> <p>You tried to update a <code>ByteMatchSet</code> with a <code>FieldToMatch</code> <code>Type</code> other than HEADER, METHOD, QUERY_STRING, URI, or BODY.</p> </li> <li> <p>You tried to update a <code>ByteMatchSet</code> with a <code>Field</code> of <code>HEADER</code> but no value for <code>Data</code>.</p> </li> <li> <p>Your request references an ARN that is malformed, or corresponds to a resource with which a web ACL cannot be associated.</p> </li> </ul></p>
WAFInvalidParameter(String),
/// <p>The operation exceeds a resource limit, for example, the maximum number of <code>WebACL</code> objects that you can create for an AWS account. For more information, see <a href="http://docs.aws.amazon.com/waf/latest/developerguide/limits.html">Limits</a> in the <i>AWS WAF Developer Guide</i>.</p>
WAFLimitsExceeded(String),
/// <p><p>The operation failed because you tried to add an object to or delete an object from another object that doesn't exist. For example:</p> <ul> <li> <p>You tried to add a <code>Rule</code> to or delete a <code>Rule</code> from a <code>WebACL</code> that doesn't exist.</p> </li> <li> <p>You tried to add a <code>ByteMatchSet</code> to or delete a <code>ByteMatchSet</code> from a <code>Rule</code> that doesn't exist.</p> </li> <li> <p>You tried to add an IP address to or delete an IP address from an <code>IPSet</code> that doesn't exist.</p> </li> <li> <p>You tried to add a <code>ByteMatchTuple</code> to or delete a <code>ByteMatchTuple</code> from a <code>ByteMatchSet</code> that doesn't exist.</p> </li> </ul></p>
WAFNonexistentContainer(String),
/// <p>The operation failed because the referenced object doesn't exist.</p>
WAFNonexistentItem(String),
/// <p><p>The operation failed because you tried to delete an object that is still in use. For example:</p> <ul> <li> <p>You tried to delete a <code>ByteMatchSet</code> that is still referenced by a <code>Rule</code>.</p> </li> <li> <p>You tried to delete a <code>Rule</code> that is still referenced by a <code>WebACL</code>.</p> </li> </ul></p>
WAFReferencedItem(String),
/// <p>The operation failed because you tried to create, update, or delete an object by using a change token that has already been used.</p>
WAFStaleData(String),
}
impl UpdateRuleError {
pub fn from_response(res: BufferedHttpResponse) -> RusotoError<UpdateRuleError> {
if let Some(err) = proto::json::Error::parse(&res) {
match err.typ.as_str() {
"WAFInternalErrorException" => {
return RusotoError::Service(UpdateRuleError::WAFInternalError(err.msg))
}
"WAFInvalidAccountException" => {
return RusotoError::Service(UpdateRuleError::WAFInvalidAccount(err.msg))
}
"WAFInvalidOperationException" => {
return RusotoError::Service(UpdateRuleError::WAFInvalidOperation(err.msg))
}
"WAFInvalidParameterException" => {
return RusotoError::Service(UpdateRuleError::WAFInvalidParameter(err.msg))
}
"WAFLimitsExceededException" => {
return RusotoError::Service(UpdateRuleError::WAFLimitsExceeded(err.msg))
}
"WAFNonexistentContainerException" => {
return RusotoError::Service(UpdateRuleError::WAFNonexistentContainer(err.msg))
}
"WAFNonexistentItemException" => {
return RusotoError::Service(UpdateRuleError::WAFNonexistentItem(err.msg))
}
"WAFReferencedItemException" => {
return RusotoError::Service(UpdateRuleError::WAFReferencedItem(err.msg))
}
"WAFStaleDataException" => {
return RusotoError::Service(UpdateRuleError::WAFStaleData(err.msg))
}
"ValidationException" => return RusotoError::Validation(err.msg),
_ => {}
}
}
return RusotoError::Unknown(res);
}
}
impl fmt::Display for UpdateRuleError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for UpdateRuleError {
fn description(&self) -> &str {
match *self {
UpdateRuleError::WAFInternalError(ref cause) => cause,
UpdateRuleError::WAFInvalidAccount(ref cause) => cause,
UpdateRuleError::WAFInvalidOperation(ref cause) => cause,
UpdateRuleError::WAFInvalidParameter(ref cause) => cause,
UpdateRuleError::WAFLimitsExceeded(ref cause) => cause,
UpdateRuleError::WAFNonexistentContainer(ref cause) => cause,
UpdateRuleError::WAFNonexistentItem(ref cause) => cause,
UpdateRuleError::WAFReferencedItem(ref cause) => cause,
UpdateRuleError::WAFStaleData(ref cause) => cause,
}
}
}
/// Errors returned by UpdateRuleGroup
#[derive(Debug, PartialEq)]
pub enum UpdateRuleGroupError {
/// <p>The operation failed because of a system problem, even though the request was valid. Retry your request.</p>
WAFInternalError(String),
/// <p><p>The operation failed because there was nothing to do. For example:</p> <ul> <li> <p>You tried to remove a <code>Rule</code> from a <code>WebACL</code>, but the <code>Rule</code> isn't in the specified <code>WebACL</code>.</p> </li> <li> <p>You tried to remove an IP address from an <code>IPSet</code>, but the IP address isn't in the specified <code>IPSet</code>.</p> </li> <li> <p>You tried to remove a <code>ByteMatchTuple</code> from a <code>ByteMatchSet</code>, but the <code>ByteMatchTuple</code> isn't in the specified <code>WebACL</code>.</p> </li> <li> <p>You tried to add a <code>Rule</code> to a <code>WebACL</code>, but the <code>Rule</code> already exists in the specified <code>WebACL</code>.</p> </li> <li> <p>You tried to add a <code>ByteMatchTuple</code> to a <code>ByteMatchSet</code>, but the <code>ByteMatchTuple</code> already exists in the specified <code>WebACL</code>.</p> </li> </ul></p>
WAFInvalidOperation(String),
/// <p><p>The operation failed because AWS WAF didn't recognize a parameter in the request. For example:</p> <ul> <li> <p>You specified an invalid parameter name.</p> </li> <li> <p>You specified an invalid value.</p> </li> <li> <p>You tried to update an object (<code>ByteMatchSet</code>, <code>IPSet</code>, <code>Rule</code>, or <code>WebACL</code>) using an action other than <code>INSERT</code> or <code>DELETE</code>.</p> </li> <li> <p>You tried to create a <code>WebACL</code> with a <code>DefaultAction</code> <code>Type</code> other than <code>ALLOW</code>, <code>BLOCK</code>, or <code>COUNT</code>.</p> </li> <li> <p>You tried to create a <code>RateBasedRule</code> with a <code>RateKey</code> value other than <code>IP</code>.</p> </li> <li> <p>You tried to update a <code>WebACL</code> with a <code>WafAction</code> <code>Type</code> other than <code>ALLOW</code>, <code>BLOCK</code>, or <code>COUNT</code>.</p> </li> <li> <p>You tried to update a <code>ByteMatchSet</code> with a <code>FieldToMatch</code> <code>Type</code> other than HEADER, METHOD, QUERY_STRING, URI, or BODY.</p> </li> <li> <p>You tried to update a <code>ByteMatchSet</code> with a <code>Field</code> of <code>HEADER</code> but no value for <code>Data</code>.</p> </li> <li> <p>Your request references an ARN that is malformed, or corresponds to a resource with which a web ACL cannot be associated.</p> </li> </ul></p>
WAFInvalidParameter(String),
/// <p>The operation exceeds a resource limit, for example, the maximum number of <code>WebACL</code> objects that you can create for an AWS account. For more information, see <a href="http://docs.aws.amazon.com/waf/latest/developerguide/limits.html">Limits</a> in the <i>AWS WAF Developer Guide</i>.</p>
WAFLimitsExceeded(String),
/// <p><p>The operation failed because you tried to add an object to or delete an object from another object that doesn't exist. For example:</p> <ul> <li> <p>You tried to add a <code>Rule</code> to or delete a <code>Rule</code> from a <code>WebACL</code> that doesn't exist.</p> </li> <li> <p>You tried to add a <code>ByteMatchSet</code> to or delete a <code>ByteMatchSet</code> from a <code>Rule</code> that doesn't exist.</p> </li> <li> <p>You tried to add an IP address to or delete an IP address from an <code>IPSet</code> that doesn't exist.</p> </li> <li> <p>You tried to add a <code>ByteMatchTuple</code> to or delete a <code>ByteMatchTuple</code> from a <code>ByteMatchSet</code> that doesn't exist.</p> </li> </ul></p>
WAFNonexistentContainer(String),
/// <p>The operation failed because the referenced object doesn't exist.</p>
WAFNonexistentItem(String),
/// <p>The operation failed because you tried to create, update, or delete an object by using a change token that has already been used.</p>
WAFStaleData(String),
}
impl UpdateRuleGroupError {
pub fn from_response(res: BufferedHttpResponse) -> RusotoError<UpdateRuleGroupError> {
if let Some(err) = proto::json::Error::parse(&res) {
match err.typ.as_str() {
"WAFInternalErrorException" => {
return RusotoError::Service(UpdateRuleGroupError::WAFInternalError(err.msg))
}
"WAFInvalidOperationException" => {
return RusotoError::Service(UpdateRuleGroupError::WAFInvalidOperation(err.msg))
}
"WAFInvalidParameterException" => {
return RusotoError::Service(UpdateRuleGroupError::WAFInvalidParameter(err.msg))
}
"WAFLimitsExceededException" => {
return RusotoError::Service(UpdateRuleGroupError::WAFLimitsExceeded(err.msg))
}
"WAFNonexistentContainerException" => {
return RusotoError::Service(UpdateRuleGroupError::WAFNonexistentContainer(
err.msg,
))
}
"WAFNonexistentItemException" => {
return RusotoError::Service(UpdateRuleGroupError::WAFNonexistentItem(err.msg))
}
"WAFStaleDataException" => {
return RusotoError::Service(UpdateRuleGroupError::WAFStaleData(err.msg))
}
"ValidationException" => return RusotoError::Validation(err.msg),
_ => {}
}
}
return RusotoError::Unknown(res);
}
}
impl fmt::Display for UpdateRuleGroupError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for UpdateRuleGroupError {
fn description(&self) -> &str {
match *self {
UpdateRuleGroupError::WAFInternalError(ref cause) => cause,
UpdateRuleGroupError::WAFInvalidOperation(ref cause) => cause,
UpdateRuleGroupError::WAFInvalidParameter(ref cause) => cause,
UpdateRuleGroupError::WAFLimitsExceeded(ref cause) => cause,
UpdateRuleGroupError::WAFNonexistentContainer(ref cause) => cause,
UpdateRuleGroupError::WAFNonexistentItem(ref cause) => cause,
UpdateRuleGroupError::WAFStaleData(ref cause) => cause,
}
}
}
/// Errors returned by UpdateSizeConstraintSet
#[derive(Debug, PartialEq)]
pub enum UpdateSizeConstraintSetError {
/// <p>The operation failed because of a system problem, even though the request was valid. Retry your request.</p>
WAFInternalError(String),
/// <p>The operation failed because you tried to create, update, or delete an object by using an invalid account identifier.</p>
WAFInvalidAccount(String),
/// <p><p>The operation failed because there was nothing to do. For example:</p> <ul> <li> <p>You tried to remove a <code>Rule</code> from a <code>WebACL</code>, but the <code>Rule</code> isn't in the specified <code>WebACL</code>.</p> </li> <li> <p>You tried to remove an IP address from an <code>IPSet</code>, but the IP address isn't in the specified <code>IPSet</code>.</p> </li> <li> <p>You tried to remove a <code>ByteMatchTuple</code> from a <code>ByteMatchSet</code>, but the <code>ByteMatchTuple</code> isn't in the specified <code>WebACL</code>.</p> </li> <li> <p>You tried to add a <code>Rule</code> to a <code>WebACL</code>, but the <code>Rule</code> already exists in the specified <code>WebACL</code>.</p> </li> <li> <p>You tried to add a <code>ByteMatchTuple</code> to a <code>ByteMatchSet</code>, but the <code>ByteMatchTuple</code> already exists in the specified <code>WebACL</code>.</p> </li> </ul></p>
WAFInvalidOperation(String),
/// <p><p>The operation failed because AWS WAF didn't recognize a parameter in the request. For example:</p> <ul> <li> <p>You specified an invalid parameter name.</p> </li> <li> <p>You specified an invalid value.</p> </li> <li> <p>You tried to update an object (<code>ByteMatchSet</code>, <code>IPSet</code>, <code>Rule</code>, or <code>WebACL</code>) using an action other than <code>INSERT</code> or <code>DELETE</code>.</p> </li> <li> <p>You tried to create a <code>WebACL</code> with a <code>DefaultAction</code> <code>Type</code> other than <code>ALLOW</code>, <code>BLOCK</code>, or <code>COUNT</code>.</p> </li> <li> <p>You tried to create a <code>RateBasedRule</code> with a <code>RateKey</code> value other than <code>IP</code>.</p> </li> <li> <p>You tried to update a <code>WebACL</code> with a <code>WafAction</code> <code>Type</code> other than <code>ALLOW</code>, <code>BLOCK</code>, or <code>COUNT</code>.</p> </li> <li> <p>You tried to update a <code>ByteMatchSet</code> with a <code>FieldToMatch</code> <code>Type</code> other than HEADER, METHOD, QUERY_STRING, URI, or BODY.</p> </li> <li> <p>You tried to update a <code>ByteMatchSet</code> with a <code>Field</code> of <code>HEADER</code> but no value for <code>Data</code>.</p> </li> <li> <p>Your request references an ARN that is malformed, or corresponds to a resource with which a web ACL cannot be associated.</p> </li> </ul></p>
WAFInvalidParameter(String),
/// <p>The operation exceeds a resource limit, for example, the maximum number of <code>WebACL</code> objects that you can create for an AWS account. For more information, see <a href="http://docs.aws.amazon.com/waf/latest/developerguide/limits.html">Limits</a> in the <i>AWS WAF Developer Guide</i>.</p>
WAFLimitsExceeded(String),
/// <p><p>The operation failed because you tried to add an object to or delete an object from another object that doesn't exist. For example:</p> <ul> <li> <p>You tried to add a <code>Rule</code> to or delete a <code>Rule</code> from a <code>WebACL</code> that doesn't exist.</p> </li> <li> <p>You tried to add a <code>ByteMatchSet</code> to or delete a <code>ByteMatchSet</code> from a <code>Rule</code> that doesn't exist.</p> </li> <li> <p>You tried to add an IP address to or delete an IP address from an <code>IPSet</code> that doesn't exist.</p> </li> <li> <p>You tried to add a <code>ByteMatchTuple</code> to or delete a <code>ByteMatchTuple</code> from a <code>ByteMatchSet</code> that doesn't exist.</p> </li> </ul></p>
WAFNonexistentContainer(String),
/// <p>The operation failed because the referenced object doesn't exist.</p>
WAFNonexistentItem(String),
/// <p><p>The operation failed because you tried to delete an object that is still in use. For example:</p> <ul> <li> <p>You tried to delete a <code>ByteMatchSet</code> that is still referenced by a <code>Rule</code>.</p> </li> <li> <p>You tried to delete a <code>Rule</code> that is still referenced by a <code>WebACL</code>.</p> </li> </ul></p>
WAFReferencedItem(String),
/// <p>The operation failed because you tried to create, update, or delete an object by using a change token that has already been used.</p>
WAFStaleData(String),
}
impl UpdateSizeConstraintSetError {
pub fn from_response(res: BufferedHttpResponse) -> RusotoError<UpdateSizeConstraintSetError> {
if let Some(err) = proto::json::Error::parse(&res) {
match err.typ.as_str() {
"WAFInternalErrorException" => {
return RusotoError::Service(UpdateSizeConstraintSetError::WAFInternalError(
err.msg,
))
}
"WAFInvalidAccountException" => {
return RusotoError::Service(UpdateSizeConstraintSetError::WAFInvalidAccount(
err.msg,
))
}
"WAFInvalidOperationException" => {
return RusotoError::Service(UpdateSizeConstraintSetError::WAFInvalidOperation(
err.msg,
))
}
"WAFInvalidParameterException" => {
return RusotoError::Service(UpdateSizeConstraintSetError::WAFInvalidParameter(
err.msg,
))
}
"WAFLimitsExceededException" => {
return RusotoError::Service(UpdateSizeConstraintSetError::WAFLimitsExceeded(
err.msg,
))
}
"WAFNonexistentContainerException" => {
return RusotoError::Service(
UpdateSizeConstraintSetError::WAFNonexistentContainer(err.msg),
)
}
"WAFNonexistentItemException" => {
return RusotoError::Service(UpdateSizeConstraintSetError::WAFNonexistentItem(
err.msg,
))
}
"WAFReferencedItemException" => {
return RusotoError::Service(UpdateSizeConstraintSetError::WAFReferencedItem(
err.msg,
))
}
"WAFStaleDataException" => {
return RusotoError::Service(UpdateSizeConstraintSetError::WAFStaleData(
err.msg,
))
}
"ValidationException" => return RusotoError::Validation(err.msg),
_ => {}
}
}
return RusotoError::Unknown(res);
}
}
impl fmt::Display for UpdateSizeConstraintSetError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for UpdateSizeConstraintSetError {
fn description(&self) -> &str {
match *self {
UpdateSizeConstraintSetError::WAFInternalError(ref cause) => cause,
UpdateSizeConstraintSetError::WAFInvalidAccount(ref cause) => cause,
UpdateSizeConstraintSetError::WAFInvalidOperation(ref cause) => cause,
UpdateSizeConstraintSetError::WAFInvalidParameter(ref cause) => cause,
UpdateSizeConstraintSetError::WAFLimitsExceeded(ref cause) => cause,
UpdateSizeConstraintSetError::WAFNonexistentContainer(ref cause) => cause,
UpdateSizeConstraintSetError::WAFNonexistentItem(ref cause) => cause,
UpdateSizeConstraintSetError::WAFReferencedItem(ref cause) => cause,
UpdateSizeConstraintSetError::WAFStaleData(ref cause) => cause,
}
}
}
/// Errors returned by UpdateSqlInjectionMatchSet
#[derive(Debug, PartialEq)]
pub enum UpdateSqlInjectionMatchSetError {
/// <p>The operation failed because of a system problem, even though the request was valid. Retry your request.</p>
WAFInternalError(String),
/// <p>The operation failed because you tried to create, update, or delete an object by using an invalid account identifier.</p>
WAFInvalidAccount(String),
/// <p><p>The operation failed because there was nothing to do. For example:</p> <ul> <li> <p>You tried to remove a <code>Rule</code> from a <code>WebACL</code>, but the <code>Rule</code> isn't in the specified <code>WebACL</code>.</p> </li> <li> <p>You tried to remove an IP address from an <code>IPSet</code>, but the IP address isn't in the specified <code>IPSet</code>.</p> </li> <li> <p>You tried to remove a <code>ByteMatchTuple</code> from a <code>ByteMatchSet</code>, but the <code>ByteMatchTuple</code> isn't in the specified <code>WebACL</code>.</p> </li> <li> <p>You tried to add a <code>Rule</code> to a <code>WebACL</code>, but the <code>Rule</code> already exists in the specified <code>WebACL</code>.</p> </li> <li> <p>You tried to add a <code>ByteMatchTuple</code> to a <code>ByteMatchSet</code>, but the <code>ByteMatchTuple</code> already exists in the specified <code>WebACL</code>.</p> </li> </ul></p>
WAFInvalidOperation(String),
/// <p><p>The operation failed because AWS WAF didn't recognize a parameter in the request. For example:</p> <ul> <li> <p>You specified an invalid parameter name.</p> </li> <li> <p>You specified an invalid value.</p> </li> <li> <p>You tried to update an object (<code>ByteMatchSet</code>, <code>IPSet</code>, <code>Rule</code>, or <code>WebACL</code>) using an action other than <code>INSERT</code> or <code>DELETE</code>.</p> </li> <li> <p>You tried to create a <code>WebACL</code> with a <code>DefaultAction</code> <code>Type</code> other than <code>ALLOW</code>, <code>BLOCK</code>, or <code>COUNT</code>.</p> </li> <li> <p>You tried to create a <code>RateBasedRule</code> with a <code>RateKey</code> value other than <code>IP</code>.</p> </li> <li> <p>You tried to update a <code>WebACL</code> with a <code>WafAction</code> <code>Type</code> other than <code>ALLOW</code>, <code>BLOCK</code>, or <code>COUNT</code>.</p> </li> <li> <p>You tried to update a <code>ByteMatchSet</code> with a <code>FieldToMatch</code> <code>Type</code> other than HEADER, METHOD, QUERY_STRING, URI, or BODY.</p> </li> <li> <p>You tried to update a <code>ByteMatchSet</code> with a <code>Field</code> of <code>HEADER</code> but no value for <code>Data</code>.</p> </li> <li> <p>Your request references an ARN that is malformed, or corresponds to a resource with which a web ACL cannot be associated.</p> </li> </ul></p>
WAFInvalidParameter(String),
/// <p>The operation exceeds a resource limit, for example, the maximum number of <code>WebACL</code> objects that you can create for an AWS account. For more information, see <a href="http://docs.aws.amazon.com/waf/latest/developerguide/limits.html">Limits</a> in the <i>AWS WAF Developer Guide</i>.</p>
WAFLimitsExceeded(String),
/// <p><p>The operation failed because you tried to add an object to or delete an object from another object that doesn't exist. For example:</p> <ul> <li> <p>You tried to add a <code>Rule</code> to or delete a <code>Rule</code> from a <code>WebACL</code> that doesn't exist.</p> </li> <li> <p>You tried to add a <code>ByteMatchSet</code> to or delete a <code>ByteMatchSet</code> from a <code>Rule</code> that doesn't exist.</p> </li> <li> <p>You tried to add an IP address to or delete an IP address from an <code>IPSet</code> that doesn't exist.</p> </li> <li> <p>You tried to add a <code>ByteMatchTuple</code> to or delete a <code>ByteMatchTuple</code> from a <code>ByteMatchSet</code> that doesn't exist.</p> </li> </ul></p>
WAFNonexistentContainer(String),
/// <p>The operation failed because the referenced object doesn't exist.</p>
WAFNonexistentItem(String),
/// <p>The operation failed because you tried to create, update, or delete an object by using a change token that has already been used.</p>
WAFStaleData(String),
}
impl UpdateSqlInjectionMatchSetError {
pub fn from_response(
res: BufferedHttpResponse,
) -> RusotoError<UpdateSqlInjectionMatchSetError> {
if let Some(err) = proto::json::Error::parse(&res) {
match err.typ.as_str() {
"WAFInternalErrorException" => {
return RusotoError::Service(UpdateSqlInjectionMatchSetError::WAFInternalError(
err.msg,
))
}
"WAFInvalidAccountException" => {
return RusotoError::Service(
UpdateSqlInjectionMatchSetError::WAFInvalidAccount(err.msg),
)
}
"WAFInvalidOperationException" => {
return RusotoError::Service(
UpdateSqlInjectionMatchSetError::WAFInvalidOperation(err.msg),
)
}
"WAFInvalidParameterException" => {
return RusotoError::Service(
UpdateSqlInjectionMatchSetError::WAFInvalidParameter(err.msg),
)
}
"WAFLimitsExceededException" => {
return RusotoError::Service(
UpdateSqlInjectionMatchSetError::WAFLimitsExceeded(err.msg),
)
}
"WAFNonexistentContainerException" => {
return RusotoError::Service(
UpdateSqlInjectionMatchSetError::WAFNonexistentContainer(err.msg),
)
}
"WAFNonexistentItemException" => {
return RusotoError::Service(
UpdateSqlInjectionMatchSetError::WAFNonexistentItem(err.msg),
)
}
"WAFStaleDataException" => {
return RusotoError::Service(UpdateSqlInjectionMatchSetError::WAFStaleData(
err.msg,
))
}
"ValidationException" => return RusotoError::Validation(err.msg),
_ => {}
}
}
return RusotoError::Unknown(res);
}
}
impl fmt::Display for UpdateSqlInjectionMatchSetError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for UpdateSqlInjectionMatchSetError {
fn description(&self) -> &str {
match *self {
UpdateSqlInjectionMatchSetError::WAFInternalError(ref cause) => cause,
UpdateSqlInjectionMatchSetError::WAFInvalidAccount(ref cause) => cause,
UpdateSqlInjectionMatchSetError::WAFInvalidOperation(ref cause) => cause,
UpdateSqlInjectionMatchSetError::WAFInvalidParameter(ref cause) => cause,
UpdateSqlInjectionMatchSetError::WAFLimitsExceeded(ref cause) => cause,
UpdateSqlInjectionMatchSetError::WAFNonexistentContainer(ref cause) => cause,
UpdateSqlInjectionMatchSetError::WAFNonexistentItem(ref cause) => cause,
UpdateSqlInjectionMatchSetError::WAFStaleData(ref cause) => cause,
}
}
}
/// Errors returned by UpdateWebACL
#[derive(Debug, PartialEq)]
pub enum UpdateWebACLError {
/// <p>The operation failed because of a system problem, even though the request was valid. Retry your request.</p>
WAFInternalError(String),
/// <p>The operation failed because you tried to create, update, or delete an object by using an invalid account identifier.</p>
WAFInvalidAccount(String),
/// <p><p>The operation failed because there was nothing to do. For example:</p> <ul> <li> <p>You tried to remove a <code>Rule</code> from a <code>WebACL</code>, but the <code>Rule</code> isn't in the specified <code>WebACL</code>.</p> </li> <li> <p>You tried to remove an IP address from an <code>IPSet</code>, but the IP address isn't in the specified <code>IPSet</code>.</p> </li> <li> <p>You tried to remove a <code>ByteMatchTuple</code> from a <code>ByteMatchSet</code>, but the <code>ByteMatchTuple</code> isn't in the specified <code>WebACL</code>.</p> </li> <li> <p>You tried to add a <code>Rule</code> to a <code>WebACL</code>, but the <code>Rule</code> already exists in the specified <code>WebACL</code>.</p> </li> <li> <p>You tried to add a <code>ByteMatchTuple</code> to a <code>ByteMatchSet</code>, but the <code>ByteMatchTuple</code> already exists in the specified <code>WebACL</code>.</p> </li> </ul></p>
WAFInvalidOperation(String),
/// <p><p>The operation failed because AWS WAF didn't recognize a parameter in the request. For example:</p> <ul> <li> <p>You specified an invalid parameter name.</p> </li> <li> <p>You specified an invalid value.</p> </li> <li> <p>You tried to update an object (<code>ByteMatchSet</code>, <code>IPSet</code>, <code>Rule</code>, or <code>WebACL</code>) using an action other than <code>INSERT</code> or <code>DELETE</code>.</p> </li> <li> <p>You tried to create a <code>WebACL</code> with a <code>DefaultAction</code> <code>Type</code> other than <code>ALLOW</code>, <code>BLOCK</code>, or <code>COUNT</code>.</p> </li> <li> <p>You tried to create a <code>RateBasedRule</code> with a <code>RateKey</code> value other than <code>IP</code>.</p> </li> <li> <p>You tried to update a <code>WebACL</code> with a <code>WafAction</code> <code>Type</code> other than <code>ALLOW</code>, <code>BLOCK</code>, or <code>COUNT</code>.</p> </li> <li> <p>You tried to update a <code>ByteMatchSet</code> with a <code>FieldToMatch</code> <code>Type</code> other than HEADER, METHOD, QUERY_STRING, URI, or BODY.</p> </li> <li> <p>You tried to update a <code>ByteMatchSet</code> with a <code>Field</code> of <code>HEADER</code> but no value for <code>Data</code>.</p> </li> <li> <p>Your request references an ARN that is malformed, or corresponds to a resource with which a web ACL cannot be associated.</p> </li> </ul></p>
WAFInvalidParameter(String),
/// <p>The operation exceeds a resource limit, for example, the maximum number of <code>WebACL</code> objects that you can create for an AWS account. For more information, see <a href="http://docs.aws.amazon.com/waf/latest/developerguide/limits.html">Limits</a> in the <i>AWS WAF Developer Guide</i>.</p>
WAFLimitsExceeded(String),
/// <p><p>The operation failed because you tried to add an object to or delete an object from another object that doesn't exist. For example:</p> <ul> <li> <p>You tried to add a <code>Rule</code> to or delete a <code>Rule</code> from a <code>WebACL</code> that doesn't exist.</p> </li> <li> <p>You tried to add a <code>ByteMatchSet</code> to or delete a <code>ByteMatchSet</code> from a <code>Rule</code> that doesn't exist.</p> </li> <li> <p>You tried to add an IP address to or delete an IP address from an <code>IPSet</code> that doesn't exist.</p> </li> <li> <p>You tried to add a <code>ByteMatchTuple</code> to or delete a <code>ByteMatchTuple</code> from a <code>ByteMatchSet</code> that doesn't exist.</p> </li> </ul></p>
WAFNonexistentContainer(String),
/// <p>The operation failed because the referenced object doesn't exist.</p>
WAFNonexistentItem(String),
/// <p><p>The operation failed because you tried to delete an object that is still in use. For example:</p> <ul> <li> <p>You tried to delete a <code>ByteMatchSet</code> that is still referenced by a <code>Rule</code>.</p> </li> <li> <p>You tried to delete a <code>Rule</code> that is still referenced by a <code>WebACL</code>.</p> </li> </ul></p>
WAFReferencedItem(String),
/// <p>The operation failed because you tried to create, update, or delete an object by using a change token that has already been used.</p>
WAFStaleData(String),
/// <p>The specified subscription does not exist.</p>
WAFSubscriptionNotFound(String),
}
impl UpdateWebACLError {
pub fn from_response(res: BufferedHttpResponse) -> RusotoError<UpdateWebACLError> {
if let Some(err) = proto::json::Error::parse(&res) {
match err.typ.as_str() {
"WAFInternalErrorException" => {
return RusotoError::Service(UpdateWebACLError::WAFInternalError(err.msg))
}
"WAFInvalidAccountException" => {
return RusotoError::Service(UpdateWebACLError::WAFInvalidAccount(err.msg))
}
"WAFInvalidOperationException" => {
return RusotoError::Service(UpdateWebACLError::WAFInvalidOperation(err.msg))
}
"WAFInvalidParameterException" => {
return RusotoError::Service(UpdateWebACLError::WAFInvalidParameter(err.msg))
}
"WAFLimitsExceededException" => {
return RusotoError::Service(UpdateWebACLError::WAFLimitsExceeded(err.msg))
}
"WAFNonexistentContainerException" => {
return RusotoError::Service(UpdateWebACLError::WAFNonexistentContainer(
err.msg,
))
}
"WAFNonexistentItemException" => {
return RusotoError::Service(UpdateWebACLError::WAFNonexistentItem(err.msg))
}
"WAFReferencedItemException" => {
return RusotoError::Service(UpdateWebACLError::WAFReferencedItem(err.msg))
}
"WAFStaleDataException" => {
return RusotoError::Service(UpdateWebACLError::WAFStaleData(err.msg))
}
"WAFSubscriptionNotFoundException" => {
return RusotoError::Service(UpdateWebACLError::WAFSubscriptionNotFound(
err.msg,
))
}
"ValidationException" => return RusotoError::Validation(err.msg),
_ => {}
}
}
return RusotoError::Unknown(res);
}
}
impl fmt::Display for UpdateWebACLError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for UpdateWebACLError {
fn description(&self) -> &str {
match *self {
UpdateWebACLError::WAFInternalError(ref cause) => cause,
UpdateWebACLError::WAFInvalidAccount(ref cause) => cause,
UpdateWebACLError::WAFInvalidOperation(ref cause) => cause,
UpdateWebACLError::WAFInvalidParameter(ref cause) => cause,
UpdateWebACLError::WAFLimitsExceeded(ref cause) => cause,
UpdateWebACLError::WAFNonexistentContainer(ref cause) => cause,
UpdateWebACLError::WAFNonexistentItem(ref cause) => cause,
UpdateWebACLError::WAFReferencedItem(ref cause) => cause,
UpdateWebACLError::WAFStaleData(ref cause) => cause,
UpdateWebACLError::WAFSubscriptionNotFound(ref cause) => cause,
}
}
}
/// Errors returned by UpdateXssMatchSet
#[derive(Debug, PartialEq)]
pub enum UpdateXssMatchSetError {
/// <p>The operation failed because of a system problem, even though the request was valid. Retry your request.</p>
WAFInternalError(String),
/// <p>The operation failed because you tried to create, update, or delete an object by using an invalid account identifier.</p>
WAFInvalidAccount(String),
/// <p><p>The operation failed because there was nothing to do. For example:</p> <ul> <li> <p>You tried to remove a <code>Rule</code> from a <code>WebACL</code>, but the <code>Rule</code> isn't in the specified <code>WebACL</code>.</p> </li> <li> <p>You tried to remove an IP address from an <code>IPSet</code>, but the IP address isn't in the specified <code>IPSet</code>.</p> </li> <li> <p>You tried to remove a <code>ByteMatchTuple</code> from a <code>ByteMatchSet</code>, but the <code>ByteMatchTuple</code> isn't in the specified <code>WebACL</code>.</p> </li> <li> <p>You tried to add a <code>Rule</code> to a <code>WebACL</code>, but the <code>Rule</code> already exists in the specified <code>WebACL</code>.</p> </li> <li> <p>You tried to add a <code>ByteMatchTuple</code> to a <code>ByteMatchSet</code>, but the <code>ByteMatchTuple</code> already exists in the specified <code>WebACL</code>.</p> </li> </ul></p>
WAFInvalidOperation(String),
/// <p><p>The operation failed because AWS WAF didn't recognize a parameter in the request. For example:</p> <ul> <li> <p>You specified an invalid parameter name.</p> </li> <li> <p>You specified an invalid value.</p> </li> <li> <p>You tried to update an object (<code>ByteMatchSet</code>, <code>IPSet</code>, <code>Rule</code>, or <code>WebACL</code>) using an action other than <code>INSERT</code> or <code>DELETE</code>.</p> </li> <li> <p>You tried to create a <code>WebACL</code> with a <code>DefaultAction</code> <code>Type</code> other than <code>ALLOW</code>, <code>BLOCK</code>, or <code>COUNT</code>.</p> </li> <li> <p>You tried to create a <code>RateBasedRule</code> with a <code>RateKey</code> value other than <code>IP</code>.</p> </li> <li> <p>You tried to update a <code>WebACL</code> with a <code>WafAction</code> <code>Type</code> other than <code>ALLOW</code>, <code>BLOCK</code>, or <code>COUNT</code>.</p> </li> <li> <p>You tried to update a <code>ByteMatchSet</code> with a <code>FieldToMatch</code> <code>Type</code> other than HEADER, METHOD, QUERY_STRING, URI, or BODY.</p> </li> <li> <p>You tried to update a <code>ByteMatchSet</code> with a <code>Field</code> of <code>HEADER</code> but no value for <code>Data</code>.</p> </li> <li> <p>Your request references an ARN that is malformed, or corresponds to a resource with which a web ACL cannot be associated.</p> </li> </ul></p>
WAFInvalidParameter(String),
/// <p>The operation exceeds a resource limit, for example, the maximum number of <code>WebACL</code> objects that you can create for an AWS account. For more information, see <a href="http://docs.aws.amazon.com/waf/latest/developerguide/limits.html">Limits</a> in the <i>AWS WAF Developer Guide</i>.</p>
WAFLimitsExceeded(String),
/// <p><p>The operation failed because you tried to add an object to or delete an object from another object that doesn't exist. For example:</p> <ul> <li> <p>You tried to add a <code>Rule</code> to or delete a <code>Rule</code> from a <code>WebACL</code> that doesn't exist.</p> </li> <li> <p>You tried to add a <code>ByteMatchSet</code> to or delete a <code>ByteMatchSet</code> from a <code>Rule</code> that doesn't exist.</p> </li> <li> <p>You tried to add an IP address to or delete an IP address from an <code>IPSet</code> that doesn't exist.</p> </li> <li> <p>You tried to add a <code>ByteMatchTuple</code> to or delete a <code>ByteMatchTuple</code> from a <code>ByteMatchSet</code> that doesn't exist.</p> </li> </ul></p>
WAFNonexistentContainer(String),
/// <p>The operation failed because the referenced object doesn't exist.</p>
WAFNonexistentItem(String),
/// <p>The operation failed because you tried to create, update, or delete an object by using a change token that has already been used.</p>
WAFStaleData(String),
}
impl UpdateXssMatchSetError {
pub fn from_response(res: BufferedHttpResponse) -> RusotoError<UpdateXssMatchSetError> {
if let Some(err) = proto::json::Error::parse(&res) {
match err.typ.as_str() {
"WAFInternalErrorException" => {
return RusotoError::Service(UpdateXssMatchSetError::WAFInternalError(err.msg))
}
"WAFInvalidAccountException" => {
return RusotoError::Service(UpdateXssMatchSetError::WAFInvalidAccount(err.msg))
}
"WAFInvalidOperationException" => {
return RusotoError::Service(UpdateXssMatchSetError::WAFInvalidOperation(
err.msg,
))
}
"WAFInvalidParameterException" => {
return RusotoError::Service(UpdateXssMatchSetError::WAFInvalidParameter(
err.msg,
))
}
"WAFLimitsExceededException" => {
return RusotoError::Service(UpdateXssMatchSetError::WAFLimitsExceeded(err.msg))
}
"WAFNonexistentContainerException" => {
return RusotoError::Service(UpdateXssMatchSetError::WAFNonexistentContainer(
err.msg,
))
}
"WAFNonexistentItemException" => {
return RusotoError::Service(UpdateXssMatchSetError::WAFNonexistentItem(
err.msg,
))
}
"WAFStaleDataException" => {
return RusotoError::Service(UpdateXssMatchSetError::WAFStaleData(err.msg))
}
"ValidationException" => return RusotoError::Validation(err.msg),
_ => {}
}
}
return RusotoError::Unknown(res);
}
}
impl fmt::Display for UpdateXssMatchSetError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for UpdateXssMatchSetError {
fn description(&self) -> &str {
match *self {
UpdateXssMatchSetError::WAFInternalError(ref cause) => cause,
UpdateXssMatchSetError::WAFInvalidAccount(ref cause) => cause,
UpdateXssMatchSetError::WAFInvalidOperation(ref cause) => cause,
UpdateXssMatchSetError::WAFInvalidParameter(ref cause) => cause,
UpdateXssMatchSetError::WAFLimitsExceeded(ref cause) => cause,
UpdateXssMatchSetError::WAFNonexistentContainer(ref cause) => cause,
UpdateXssMatchSetError::WAFNonexistentItem(ref cause) => cause,
UpdateXssMatchSetError::WAFStaleData(ref cause) => cause,
}
}
}
/// Trait representing the capabilities of the WAF API. WAF clients implement this trait.
pub trait Waf {
/// <p>Creates a <code>ByteMatchSet</code>. You then use <a>UpdateByteMatchSet</a> to identify the part of a web request that you want AWS WAF to inspect, such as the values of the <code>User-Agent</code> header or the query string. For example, you can create a <code>ByteMatchSet</code> that matches any requests with <code>User-Agent</code> headers that contain the string <code>BadBot</code>. You can then configure AWS WAF to reject those requests.</p> <p>To create and configure a <code>ByteMatchSet</code>, perform the following steps:</p> <ol> <li> <p>Use <a>GetChangeToken</a> to get the change token that you provide in the <code>ChangeToken</code> parameter of a <code>CreateByteMatchSet</code> request.</p> </li> <li> <p>Submit a <code>CreateByteMatchSet</code> request.</p> </li> <li> <p>Use <code>GetChangeToken</code> to get the change token that you provide in the <code>ChangeToken</code> parameter of an <code>UpdateByteMatchSet</code> request.</p> </li> <li> <p>Submit an <a>UpdateByteMatchSet</a> request to specify the part of the request that you want AWS WAF to inspect (for example, the header or the URI) and the value that you want AWS WAF to watch for.</p> </li> </ol> <p>For more information about how to use the AWS WAF API to allow or block HTTP requests, see the <a href="http://docs.aws.amazon.com/waf/latest/developerguide/">AWS WAF Developer Guide</a>.</p>
fn create_byte_match_set(
&self,
input: CreateByteMatchSetRequest,
) -> RusotoFuture<CreateByteMatchSetResponse, CreateByteMatchSetError>;
/// <p>Creates an <a>GeoMatchSet</a>, which you use to specify which web requests you want to allow or block based on the country that the requests originate from. For example, if you're receiving a lot of requests from one or more countries and you want to block the requests, you can create an <code>GeoMatchSet</code> that contains those countries and then configure AWS WAF to block the requests. </p> <p>To create and configure a <code>GeoMatchSet</code>, perform the following steps:</p> <ol> <li> <p>Use <a>GetChangeToken</a> to get the change token that you provide in the <code>ChangeToken</code> parameter of a <code>CreateGeoMatchSet</code> request.</p> </li> <li> <p>Submit a <code>CreateGeoMatchSet</code> request.</p> </li> <li> <p>Use <code>GetChangeToken</code> to get the change token that you provide in the <code>ChangeToken</code> parameter of an <a>UpdateGeoMatchSet</a> request.</p> </li> <li> <p>Submit an <code>UpdateGeoMatchSetSet</code> request to specify the countries that you want AWS WAF to watch for.</p> </li> </ol> <p>For more information about how to use the AWS WAF API to allow or block HTTP requests, see the <a href="http://docs.aws.amazon.com/waf/latest/developerguide/">AWS WAF Developer Guide</a>.</p>
fn create_geo_match_set(
&self,
input: CreateGeoMatchSetRequest,
) -> RusotoFuture<CreateGeoMatchSetResponse, CreateGeoMatchSetError>;
/// <p>Creates an <a>IPSet</a>, which you use to specify which web requests that you want to allow or block based on the IP addresses that the requests originate from. For example, if you're receiving a lot of requests from one or more individual IP addresses or one or more ranges of IP addresses and you want to block the requests, you can create an <code>IPSet</code> that contains those IP addresses and then configure AWS WAF to block the requests. </p> <p>To create and configure an <code>IPSet</code>, perform the following steps:</p> <ol> <li> <p>Use <a>GetChangeToken</a> to get the change token that you provide in the <code>ChangeToken</code> parameter of a <code>CreateIPSet</code> request.</p> </li> <li> <p>Submit a <code>CreateIPSet</code> request.</p> </li> <li> <p>Use <code>GetChangeToken</code> to get the change token that you provide in the <code>ChangeToken</code> parameter of an <a>UpdateIPSet</a> request.</p> </li> <li> <p>Submit an <code>UpdateIPSet</code> request to specify the IP addresses that you want AWS WAF to watch for.</p> </li> </ol> <p>For more information about how to use the AWS WAF API to allow or block HTTP requests, see the <a href="http://docs.aws.amazon.com/waf/latest/developerguide/">AWS WAF Developer Guide</a>.</p>
fn create_ip_set(
&self,
input: CreateIPSetRequest,
) -> RusotoFuture<CreateIPSetResponse, CreateIPSetError>;
/// <p>Creates a <a>RateBasedRule</a>. The <code>RateBasedRule</code> contains a <code>RateLimit</code>, which specifies the maximum number of requests that AWS WAF allows from a specified IP address in a five-minute period. The <code>RateBasedRule</code> also contains the <code>IPSet</code> objects, <code>ByteMatchSet</code> objects, and other predicates that identify the requests that you want to count or block if these requests exceed the <code>RateLimit</code>.</p> <p>If you add more than one predicate to a <code>RateBasedRule</code>, a request not only must exceed the <code>RateLimit</code>, but it also must match all the specifications to be counted or blocked. For example, suppose you add the following to a <code>RateBasedRule</code>:</p> <ul> <li> <p>An <code>IPSet</code> that matches the IP address <code>192.0.2.44/32</code> </p> </li> <li> <p>A <code>ByteMatchSet</code> that matches <code>BadBot</code> in the <code>User-Agent</code> header</p> </li> </ul> <p>Further, you specify a <code>RateLimit</code> of 15,000.</p> <p>You then add the <code>RateBasedRule</code> to a <code>WebACL</code> and specify that you want to block requests that meet the conditions in the rule. For a request to be blocked, it must come from the IP address 192.0.2.44 <i>and</i> the <code>User-Agent</code> header in the request must contain the value <code>BadBot</code>. Further, requests that match these two conditions must be received at a rate of more than 15,000 requests every five minutes. If both conditions are met and the rate is exceeded, AWS WAF blocks the requests. If the rate drops below 15,000 for a five-minute period, AWS WAF no longer blocks the requests.</p> <p>As a second example, suppose you want to limit requests to a particular page on your site. To do this, you could add the following to a <code>RateBasedRule</code>:</p> <ul> <li> <p>A <code>ByteMatchSet</code> with <code>FieldToMatch</code> of <code>URI</code> </p> </li> <li> <p>A <code>PositionalConstraint</code> of <code>STARTS_WITH</code> </p> </li> <li> <p>A <code>TargetString</code> of <code>login</code> </p> </li> </ul> <p>Further, you specify a <code>RateLimit</code> of 15,000.</p> <p>By adding this <code>RateBasedRule</code> to a <code>WebACL</code>, you could limit requests to your login page without affecting the rest of your site.</p> <p>To create and configure a <code>RateBasedRule</code>, perform the following steps:</p> <ol> <li> <p>Create and update the predicates that you want to include in the rule. For more information, see <a>CreateByteMatchSet</a>, <a>CreateIPSet</a>, and <a>CreateSqlInjectionMatchSet</a>.</p> </li> <li> <p>Use <a>GetChangeToken</a> to get the change token that you provide in the <code>ChangeToken</code> parameter of a <code>CreateRule</code> request.</p> </li> <li> <p>Submit a <code>CreateRateBasedRule</code> request.</p> </li> <li> <p>Use <code>GetChangeToken</code> to get the change token that you provide in the <code>ChangeToken</code> parameter of an <a>UpdateRule</a> request.</p> </li> <li> <p>Submit an <code>UpdateRateBasedRule</code> request to specify the predicates that you want to include in the rule.</p> </li> <li> <p>Create and update a <code>WebACL</code> that contains the <code>RateBasedRule</code>. For more information, see <a>CreateWebACL</a>.</p> </li> </ol> <p>For more information about how to use the AWS WAF API to allow or block HTTP requests, see the <a href="http://docs.aws.amazon.com/waf/latest/developerguide/">AWS WAF Developer Guide</a>.</p>
fn create_rate_based_rule(
&self,
input: CreateRateBasedRuleRequest,
) -> RusotoFuture<CreateRateBasedRuleResponse, CreateRateBasedRuleError>;
/// <p>Creates a <a>RegexMatchSet</a>. You then use <a>UpdateRegexMatchSet</a> to identify the part of a web request that you want AWS WAF to inspect, such as the values of the <code>User-Agent</code> header or the query string. For example, you can create a <code>RegexMatchSet</code> that contains a <code>RegexMatchTuple</code> that looks for any requests with <code>User-Agent</code> headers that match a <code>RegexPatternSet</code> with pattern <code>B[a@]dB[o0]t</code>. You can then configure AWS WAF to reject those requests.</p> <p>To create and configure a <code>RegexMatchSet</code>, perform the following steps:</p> <ol> <li> <p>Use <a>GetChangeToken</a> to get the change token that you provide in the <code>ChangeToken</code> parameter of a <code>CreateRegexMatchSet</code> request.</p> </li> <li> <p>Submit a <code>CreateRegexMatchSet</code> request.</p> </li> <li> <p>Use <code>GetChangeToken</code> to get the change token that you provide in the <code>ChangeToken</code> parameter of an <code>UpdateRegexMatchSet</code> request.</p> </li> <li> <p>Submit an <a>UpdateRegexMatchSet</a> request to specify the part of the request that you want AWS WAF to inspect (for example, the header or the URI) and the value, using a <code>RegexPatternSet</code>, that you want AWS WAF to watch for.</p> </li> </ol> <p>For more information about how to use the AWS WAF API to allow or block HTTP requests, see the <a href="http://docs.aws.amazon.com/waf/latest/developerguide/">AWS WAF Developer Guide</a>.</p>
fn create_regex_match_set(
&self,
input: CreateRegexMatchSetRequest,
) -> RusotoFuture<CreateRegexMatchSetResponse, CreateRegexMatchSetError>;
/// <p>Creates a <code>RegexPatternSet</code>. You then use <a>UpdateRegexPatternSet</a> to specify the regular expression (regex) pattern that you want AWS WAF to search for, such as <code>B[a@]dB[o0]t</code>. You can then configure AWS WAF to reject those requests.</p> <p>To create and configure a <code>RegexPatternSet</code>, perform the following steps:</p> <ol> <li> <p>Use <a>GetChangeToken</a> to get the change token that you provide in the <code>ChangeToken</code> parameter of a <code>CreateRegexPatternSet</code> request.</p> </li> <li> <p>Submit a <code>CreateRegexPatternSet</code> request.</p> </li> <li> <p>Use <code>GetChangeToken</code> to get the change token that you provide in the <code>ChangeToken</code> parameter of an <code>UpdateRegexPatternSet</code> request.</p> </li> <li> <p>Submit an <a>UpdateRegexPatternSet</a> request to specify the string that you want AWS WAF to watch for.</p> </li> </ol> <p>For more information about how to use the AWS WAF API to allow or block HTTP requests, see the <a href="http://docs.aws.amazon.com/waf/latest/developerguide/">AWS WAF Developer Guide</a>.</p>
fn create_regex_pattern_set(
&self,
input: CreateRegexPatternSetRequest,
) -> RusotoFuture<CreateRegexPatternSetResponse, CreateRegexPatternSetError>;
/// <p>Creates a <code>Rule</code>, which contains the <code>IPSet</code> objects, <code>ByteMatchSet</code> objects, and other predicates that identify the requests that you want to block. If you add more than one predicate to a <code>Rule</code>, a request must match all of the specifications to be allowed or blocked. For example, suppose that you add the following to a <code>Rule</code>:</p> <ul> <li> <p>An <code>IPSet</code> that matches the IP address <code>192.0.2.44/32</code> </p> </li> <li> <p>A <code>ByteMatchSet</code> that matches <code>BadBot</code> in the <code>User-Agent</code> header</p> </li> </ul> <p>You then add the <code>Rule</code> to a <code>WebACL</code> and specify that you want to blocks requests that satisfy the <code>Rule</code>. For a request to be blocked, it must come from the IP address 192.0.2.44 <i>and</i> the <code>User-Agent</code> header in the request must contain the value <code>BadBot</code>.</p> <p>To create and configure a <code>Rule</code>, perform the following steps:</p> <ol> <li> <p>Create and update the predicates that you want to include in the <code>Rule</code>. For more information, see <a>CreateByteMatchSet</a>, <a>CreateIPSet</a>, and <a>CreateSqlInjectionMatchSet</a>.</p> </li> <li> <p>Use <a>GetChangeToken</a> to get the change token that you provide in the <code>ChangeToken</code> parameter of a <code>CreateRule</code> request.</p> </li> <li> <p>Submit a <code>CreateRule</code> request.</p> </li> <li> <p>Use <code>GetChangeToken</code> to get the change token that you provide in the <code>ChangeToken</code> parameter of an <a>UpdateRule</a> request.</p> </li> <li> <p>Submit an <code>UpdateRule</code> request to specify the predicates that you want to include in the <code>Rule</code>.</p> </li> <li> <p>Create and update a <code>WebACL</code> that contains the <code>Rule</code>. For more information, see <a>CreateWebACL</a>.</p> </li> </ol> <p>For more information about how to use the AWS WAF API to allow or block HTTP requests, see the <a href="http://docs.aws.amazon.com/waf/latest/developerguide/">AWS WAF Developer Guide</a>.</p>
fn create_rule(
&self,
input: CreateRuleRequest,
) -> RusotoFuture<CreateRuleResponse, CreateRuleError>;
/// <p>Creates a <code>RuleGroup</code>. A rule group is a collection of predefined rules that you add to a web ACL. You use <a>UpdateRuleGroup</a> to add rules to the rule group.</p> <p>Rule groups are subject to the following limits:</p> <ul> <li> <p>Three rule groups per account. You can request an increase to this limit by contacting customer support.</p> </li> <li> <p>One rule group per web ACL.</p> </li> <li> <p>Ten rules per rule group.</p> </li> </ul> <p>For more information about how to use the AWS WAF API to allow or block HTTP requests, see the <a href="http://docs.aws.amazon.com/waf/latest/developerguide/">AWS WAF Developer Guide</a>.</p>
fn create_rule_group(
&self,
input: CreateRuleGroupRequest,
) -> RusotoFuture<CreateRuleGroupResponse, CreateRuleGroupError>;
/// <p>Creates a <code>SizeConstraintSet</code>. You then use <a>UpdateSizeConstraintSet</a> to identify the part of a web request that you want AWS WAF to check for length, such as the length of the <code>User-Agent</code> header or the length of the query string. For example, you can create a <code>SizeConstraintSet</code> that matches any requests that have a query string that is longer than 100 bytes. You can then configure AWS WAF to reject those requests.</p> <p>To create and configure a <code>SizeConstraintSet</code>, perform the following steps:</p> <ol> <li> <p>Use <a>GetChangeToken</a> to get the change token that you provide in the <code>ChangeToken</code> parameter of a <code>CreateSizeConstraintSet</code> request.</p> </li> <li> <p>Submit a <code>CreateSizeConstraintSet</code> request.</p> </li> <li> <p>Use <code>GetChangeToken</code> to get the change token that you provide in the <code>ChangeToken</code> parameter of an <code>UpdateSizeConstraintSet</code> request.</p> </li> <li> <p>Submit an <a>UpdateSizeConstraintSet</a> request to specify the part of the request that you want AWS WAF to inspect (for example, the header or the URI) and the value that you want AWS WAF to watch for.</p> </li> </ol> <p>For more information about how to use the AWS WAF API to allow or block HTTP requests, see the <a href="http://docs.aws.amazon.com/waf/latest/developerguide/">AWS WAF Developer Guide</a>.</p>
fn create_size_constraint_set(
&self,
input: CreateSizeConstraintSetRequest,
) -> RusotoFuture<CreateSizeConstraintSetResponse, CreateSizeConstraintSetError>;
/// <p>Creates a <a>SqlInjectionMatchSet</a>, which you use to allow, block, or count requests that contain snippets of SQL code in a specified part of web requests. AWS WAF searches for character sequences that are likely to be malicious strings.</p> <p>To create and configure a <code>SqlInjectionMatchSet</code>, perform the following steps:</p> <ol> <li> <p>Use <a>GetChangeToken</a> to get the change token that you provide in the <code>ChangeToken</code> parameter of a <code>CreateSqlInjectionMatchSet</code> request.</p> </li> <li> <p>Submit a <code>CreateSqlInjectionMatchSet</code> request.</p> </li> <li> <p>Use <code>GetChangeToken</code> to get the change token that you provide in the <code>ChangeToken</code> parameter of an <a>UpdateSqlInjectionMatchSet</a> request.</p> </li> <li> <p>Submit an <a>UpdateSqlInjectionMatchSet</a> request to specify the parts of web requests in which you want to allow, block, or count malicious SQL code.</p> </li> </ol> <p>For more information about how to use the AWS WAF API to allow or block HTTP requests, see the <a href="http://docs.aws.amazon.com/waf/latest/developerguide/">AWS WAF Developer Guide</a>.</p>
fn create_sql_injection_match_set(
&self,
input: CreateSqlInjectionMatchSetRequest,
) -> RusotoFuture<CreateSqlInjectionMatchSetResponse, CreateSqlInjectionMatchSetError>;
/// <p>Creates a <code>WebACL</code>, which contains the <code>Rules</code> that identify the CloudFront web requests that you want to allow, block, or count. AWS WAF evaluates <code>Rules</code> in order based on the value of <code>Priority</code> for each <code>Rule</code>.</p> <p>You also specify a default action, either <code>ALLOW</code> or <code>BLOCK</code>. If a web request doesn't match any of the <code>Rules</code> in a <code>WebACL</code>, AWS WAF responds to the request with the default action. </p> <p>To create and configure a <code>WebACL</code>, perform the following steps:</p> <ol> <li> <p>Create and update the <code>ByteMatchSet</code> objects and other predicates that you want to include in <code>Rules</code>. For more information, see <a>CreateByteMatchSet</a>, <a>UpdateByteMatchSet</a>, <a>CreateIPSet</a>, <a>UpdateIPSet</a>, <a>CreateSqlInjectionMatchSet</a>, and <a>UpdateSqlInjectionMatchSet</a>.</p> </li> <li> <p>Create and update the <code>Rules</code> that you want to include in the <code>WebACL</code>. For more information, see <a>CreateRule</a> and <a>UpdateRule</a>.</p> </li> <li> <p>Use <a>GetChangeToken</a> to get the change token that you provide in the <code>ChangeToken</code> parameter of a <code>CreateWebACL</code> request.</p> </li> <li> <p>Submit a <code>CreateWebACL</code> request.</p> </li> <li> <p>Use <code>GetChangeToken</code> to get the change token that you provide in the <code>ChangeToken</code> parameter of an <a>UpdateWebACL</a> request.</p> </li> <li> <p>Submit an <a>UpdateWebACL</a> request to specify the <code>Rules</code> that you want to include in the <code>WebACL</code>, to specify the default action, and to associate the <code>WebACL</code> with a CloudFront distribution.</p> </li> </ol> <p>For more information about how to use the AWS WAF API, see the <a href="http://docs.aws.amazon.com/waf/latest/developerguide/">AWS WAF Developer Guide</a>.</p>
fn create_web_acl(
&self,
input: CreateWebACLRequest,
) -> RusotoFuture<CreateWebACLResponse, CreateWebACLError>;
/// <p>Creates an <a>XssMatchSet</a>, which you use to allow, block, or count requests that contain cross-site scripting attacks in the specified part of web requests. AWS WAF searches for character sequences that are likely to be malicious strings.</p> <p>To create and configure an <code>XssMatchSet</code>, perform the following steps:</p> <ol> <li> <p>Use <a>GetChangeToken</a> to get the change token that you provide in the <code>ChangeToken</code> parameter of a <code>CreateXssMatchSet</code> request.</p> </li> <li> <p>Submit a <code>CreateXssMatchSet</code> request.</p> </li> <li> <p>Use <code>GetChangeToken</code> to get the change token that you provide in the <code>ChangeToken</code> parameter of an <a>UpdateXssMatchSet</a> request.</p> </li> <li> <p>Submit an <a>UpdateXssMatchSet</a> request to specify the parts of web requests in which you want to allow, block, or count cross-site scripting attacks.</p> </li> </ol> <p>For more information about how to use the AWS WAF API to allow or block HTTP requests, see the <a href="http://docs.aws.amazon.com/waf/latest/developerguide/">AWS WAF Developer Guide</a>.</p>
fn create_xss_match_set(
&self,
input: CreateXssMatchSetRequest,
) -> RusotoFuture<CreateXssMatchSetResponse, CreateXssMatchSetError>;
/// <p><p>Permanently deletes a <a>ByteMatchSet</a>. You can't delete a <code>ByteMatchSet</code> if it's still used in any <code>Rules</code> or if it still includes any <a>ByteMatchTuple</a> objects (any filters).</p> <p>If you just want to remove a <code>ByteMatchSet</code> from a <code>Rule</code>, use <a>UpdateRule</a>.</p> <p>To permanently delete a <code>ByteMatchSet</code>, perform the following steps:</p> <ol> <li> <p>Update the <code>ByteMatchSet</code> to remove filters, if any. For more information, see <a>UpdateByteMatchSet</a>.</p> </li> <li> <p>Use <a>GetChangeToken</a> to get the change token that you provide in the <code>ChangeToken</code> parameter of a <code>DeleteByteMatchSet</code> request.</p> </li> <li> <p>Submit a <code>DeleteByteMatchSet</code> request.</p> </li> </ol></p>
fn delete_byte_match_set(
&self,
input: DeleteByteMatchSetRequest,
) -> RusotoFuture<DeleteByteMatchSetResponse, DeleteByteMatchSetError>;
/// <p><p>Permanently deletes a <a>GeoMatchSet</a>. You can't delete a <code>GeoMatchSet</code> if it's still used in any <code>Rules</code> or if it still includes any countries.</p> <p>If you just want to remove a <code>GeoMatchSet</code> from a <code>Rule</code>, use <a>UpdateRule</a>.</p> <p>To permanently delete a <code>GeoMatchSet</code> from AWS WAF, perform the following steps:</p> <ol> <li> <p>Update the <code>GeoMatchSet</code> to remove any countries. For more information, see <a>UpdateGeoMatchSet</a>.</p> </li> <li> <p>Use <a>GetChangeToken</a> to get the change token that you provide in the <code>ChangeToken</code> parameter of a <code>DeleteGeoMatchSet</code> request.</p> </li> <li> <p>Submit a <code>DeleteGeoMatchSet</code> request.</p> </li> </ol></p>
fn delete_geo_match_set(
&self,
input: DeleteGeoMatchSetRequest,
) -> RusotoFuture<DeleteGeoMatchSetResponse, DeleteGeoMatchSetError>;
/// <p><p>Permanently deletes an <a>IPSet</a>. You can't delete an <code>IPSet</code> if it's still used in any <code>Rules</code> or if it still includes any IP addresses.</p> <p>If you just want to remove an <code>IPSet</code> from a <code>Rule</code>, use <a>UpdateRule</a>.</p> <p>To permanently delete an <code>IPSet</code> from AWS WAF, perform the following steps:</p> <ol> <li> <p>Update the <code>IPSet</code> to remove IP address ranges, if any. For more information, see <a>UpdateIPSet</a>.</p> </li> <li> <p>Use <a>GetChangeToken</a> to get the change token that you provide in the <code>ChangeToken</code> parameter of a <code>DeleteIPSet</code> request.</p> </li> <li> <p>Submit a <code>DeleteIPSet</code> request.</p> </li> </ol></p>
fn delete_ip_set(
&self,
input: DeleteIPSetRequest,
) -> RusotoFuture<DeleteIPSetResponse, DeleteIPSetError>;
/// <p>Permanently deletes the <a>LoggingConfiguration</a> from the specified web ACL.</p>
fn delete_logging_configuration(
&self,
input: DeleteLoggingConfigurationRequest,
) -> RusotoFuture<DeleteLoggingConfigurationResponse, DeleteLoggingConfigurationError>;
/// <p>Permanently deletes an IAM policy from the specified RuleGroup.</p> <p>The user making the request must be the owner of the RuleGroup.</p>
fn delete_permission_policy(
&self,
input: DeletePermissionPolicyRequest,
) -> RusotoFuture<DeletePermissionPolicyResponse, DeletePermissionPolicyError>;
/// <p><p>Permanently deletes a <a>RateBasedRule</a>. You can't delete a rule if it's still used in any <code>WebACL</code> objects or if it still includes any predicates, such as <code>ByteMatchSet</code> objects.</p> <p>If you just want to remove a rule from a <code>WebACL</code>, use <a>UpdateWebACL</a>.</p> <p>To permanently delete a <code>RateBasedRule</code> from AWS WAF, perform the following steps:</p> <ol> <li> <p>Update the <code>RateBasedRule</code> to remove predicates, if any. For more information, see <a>UpdateRateBasedRule</a>.</p> </li> <li> <p>Use <a>GetChangeToken</a> to get the change token that you provide in the <code>ChangeToken</code> parameter of a <code>DeleteRateBasedRule</code> request.</p> </li> <li> <p>Submit a <code>DeleteRateBasedRule</code> request.</p> </li> </ol></p>
fn delete_rate_based_rule(
&self,
input: DeleteRateBasedRuleRequest,
) -> RusotoFuture<DeleteRateBasedRuleResponse, DeleteRateBasedRuleError>;
/// <p><p>Permanently deletes a <a>RegexMatchSet</a>. You can't delete a <code>RegexMatchSet</code> if it's still used in any <code>Rules</code> or if it still includes any <code>RegexMatchTuples</code> objects (any filters).</p> <p>If you just want to remove a <code>RegexMatchSet</code> from a <code>Rule</code>, use <a>UpdateRule</a>.</p> <p>To permanently delete a <code>RegexMatchSet</code>, perform the following steps:</p> <ol> <li> <p>Update the <code>RegexMatchSet</code> to remove filters, if any. For more information, see <a>UpdateRegexMatchSet</a>.</p> </li> <li> <p>Use <a>GetChangeToken</a> to get the change token that you provide in the <code>ChangeToken</code> parameter of a <code>DeleteRegexMatchSet</code> request.</p> </li> <li> <p>Submit a <code>DeleteRegexMatchSet</code> request.</p> </li> </ol></p>
fn delete_regex_match_set(
&self,
input: DeleteRegexMatchSetRequest,
) -> RusotoFuture<DeleteRegexMatchSetResponse, DeleteRegexMatchSetError>;
/// <p>Permanently deletes a <a>RegexPatternSet</a>. You can't delete a <code>RegexPatternSet</code> if it's still used in any <code>RegexMatchSet</code> or if the <code>RegexPatternSet</code> is not empty. </p>
fn delete_regex_pattern_set(
&self,
input: DeleteRegexPatternSetRequest,
) -> RusotoFuture<DeleteRegexPatternSetResponse, DeleteRegexPatternSetError>;
/// <p><p>Permanently deletes a <a>Rule</a>. You can't delete a <code>Rule</code> if it's still used in any <code>WebACL</code> objects or if it still includes any predicates, such as <code>ByteMatchSet</code> objects.</p> <p>If you just want to remove a <code>Rule</code> from a <code>WebACL</code>, use <a>UpdateWebACL</a>.</p> <p>To permanently delete a <code>Rule</code> from AWS WAF, perform the following steps:</p> <ol> <li> <p>Update the <code>Rule</code> to remove predicates, if any. For more information, see <a>UpdateRule</a>.</p> </li> <li> <p>Use <a>GetChangeToken</a> to get the change token that you provide in the <code>ChangeToken</code> parameter of a <code>DeleteRule</code> request.</p> </li> <li> <p>Submit a <code>DeleteRule</code> request.</p> </li> </ol></p>
fn delete_rule(
&self,
input: DeleteRuleRequest,
) -> RusotoFuture<DeleteRuleResponse, DeleteRuleError>;
/// <p><p>Permanently deletes a <a>RuleGroup</a>. You can't delete a <code>RuleGroup</code> if it's still used in any <code>WebACL</code> objects or if it still includes any rules.</p> <p>If you just want to remove a <code>RuleGroup</code> from a <code>WebACL</code>, use <a>UpdateWebACL</a>.</p> <p>To permanently delete a <code>RuleGroup</code> from AWS WAF, perform the following steps:</p> <ol> <li> <p>Update the <code>RuleGroup</code> to remove rules, if any. For more information, see <a>UpdateRuleGroup</a>.</p> </li> <li> <p>Use <a>GetChangeToken</a> to get the change token that you provide in the <code>ChangeToken</code> parameter of a <code>DeleteRuleGroup</code> request.</p> </li> <li> <p>Submit a <code>DeleteRuleGroup</code> request.</p> </li> </ol></p>
fn delete_rule_group(
&self,
input: DeleteRuleGroupRequest,
) -> RusotoFuture<DeleteRuleGroupResponse, DeleteRuleGroupError>;
/// <p><p>Permanently deletes a <a>SizeConstraintSet</a>. You can't delete a <code>SizeConstraintSet</code> if it's still used in any <code>Rules</code> or if it still includes any <a>SizeConstraint</a> objects (any filters).</p> <p>If you just want to remove a <code>SizeConstraintSet</code> from a <code>Rule</code>, use <a>UpdateRule</a>.</p> <p>To permanently delete a <code>SizeConstraintSet</code>, perform the following steps:</p> <ol> <li> <p>Update the <code>SizeConstraintSet</code> to remove filters, if any. For more information, see <a>UpdateSizeConstraintSet</a>.</p> </li> <li> <p>Use <a>GetChangeToken</a> to get the change token that you provide in the <code>ChangeToken</code> parameter of a <code>DeleteSizeConstraintSet</code> request.</p> </li> <li> <p>Submit a <code>DeleteSizeConstraintSet</code> request.</p> </li> </ol></p>
fn delete_size_constraint_set(
&self,
input: DeleteSizeConstraintSetRequest,
) -> RusotoFuture<DeleteSizeConstraintSetResponse, DeleteSizeConstraintSetError>;
/// <p><p>Permanently deletes a <a>SqlInjectionMatchSet</a>. You can't delete a <code>SqlInjectionMatchSet</code> if it's still used in any <code>Rules</code> or if it still contains any <a>SqlInjectionMatchTuple</a> objects.</p> <p>If you just want to remove a <code>SqlInjectionMatchSet</code> from a <code>Rule</code>, use <a>UpdateRule</a>.</p> <p>To permanently delete a <code>SqlInjectionMatchSet</code> from AWS WAF, perform the following steps:</p> <ol> <li> <p>Update the <code>SqlInjectionMatchSet</code> to remove filters, if any. For more information, see <a>UpdateSqlInjectionMatchSet</a>.</p> </li> <li> <p>Use <a>GetChangeToken</a> to get the change token that you provide in the <code>ChangeToken</code> parameter of a <code>DeleteSqlInjectionMatchSet</code> request.</p> </li> <li> <p>Submit a <code>DeleteSqlInjectionMatchSet</code> request.</p> </li> </ol></p>
fn delete_sql_injection_match_set(
&self,
input: DeleteSqlInjectionMatchSetRequest,
) -> RusotoFuture<DeleteSqlInjectionMatchSetResponse, DeleteSqlInjectionMatchSetError>;
/// <p><p>Permanently deletes a <a>WebACL</a>. You can't delete a <code>WebACL</code> if it still contains any <code>Rules</code>.</p> <p>To delete a <code>WebACL</code>, perform the following steps:</p> <ol> <li> <p>Update the <code>WebACL</code> to remove <code>Rules</code>, if any. For more information, see <a>UpdateWebACL</a>.</p> </li> <li> <p>Use <a>GetChangeToken</a> to get the change token that you provide in the <code>ChangeToken</code> parameter of a <code>DeleteWebACL</code> request.</p> </li> <li> <p>Submit a <code>DeleteWebACL</code> request.</p> </li> </ol></p>
fn delete_web_acl(
&self,
input: DeleteWebACLRequest,
) -> RusotoFuture<DeleteWebACLResponse, DeleteWebACLError>;
/// <p><p>Permanently deletes an <a>XssMatchSet</a>. You can't delete an <code>XssMatchSet</code> if it's still used in any <code>Rules</code> or if it still contains any <a>XssMatchTuple</a> objects.</p> <p>If you just want to remove an <code>XssMatchSet</code> from a <code>Rule</code>, use <a>UpdateRule</a>.</p> <p>To permanently delete an <code>XssMatchSet</code> from AWS WAF, perform the following steps:</p> <ol> <li> <p>Update the <code>XssMatchSet</code> to remove filters, if any. For more information, see <a>UpdateXssMatchSet</a>.</p> </li> <li> <p>Use <a>GetChangeToken</a> to get the change token that you provide in the <code>ChangeToken</code> parameter of a <code>DeleteXssMatchSet</code> request.</p> </li> <li> <p>Submit a <code>DeleteXssMatchSet</code> request.</p> </li> </ol></p>
fn delete_xss_match_set(
&self,
input: DeleteXssMatchSetRequest,
) -> RusotoFuture<DeleteXssMatchSetResponse, DeleteXssMatchSetError>;
/// <p>Returns the <a>ByteMatchSet</a> specified by <code>ByteMatchSetId</code>.</p>
fn get_byte_match_set(
&self,
input: GetByteMatchSetRequest,
) -> RusotoFuture<GetByteMatchSetResponse, GetByteMatchSetError>;
/// <p>When you want to create, update, or delete AWS WAF objects, get a change token and include the change token in the create, update, or delete request. Change tokens ensure that your application doesn't submit conflicting requests to AWS WAF.</p> <p>Each create, update, or delete request must use a unique change token. If your application submits a <code>GetChangeToken</code> request and then submits a second <code>GetChangeToken</code> request before submitting a create, update, or delete request, the second <code>GetChangeToken</code> request returns the same value as the first <code>GetChangeToken</code> request.</p> <p>When you use a change token in a create, update, or delete request, the status of the change token changes to <code>PENDING</code>, which indicates that AWS WAF is propagating the change to all AWS WAF servers. Use <code>GetChangeTokenStatus</code> to determine the status of your change token.</p>
fn get_change_token(&self) -> RusotoFuture<GetChangeTokenResponse, GetChangeTokenError>;
/// <p><p>Returns the status of a <code>ChangeToken</code> that you got by calling <a>GetChangeToken</a>. <code>ChangeTokenStatus</code> is one of the following values:</p> <ul> <li> <p> <code>PROVISIONED</code>: You requested the change token by calling <code>GetChangeToken</code>, but you haven't used it yet in a call to create, update, or delete an AWS WAF object.</p> </li> <li> <p> <code>PENDING</code>: AWS WAF is propagating the create, update, or delete request to all AWS WAF servers.</p> </li> <li> <p> <code>IN_SYNC</code>: Propagation is complete.</p> </li> </ul></p>
fn get_change_token_status(
&self,
input: GetChangeTokenStatusRequest,
) -> RusotoFuture<GetChangeTokenStatusResponse, GetChangeTokenStatusError>;
/// <p>Returns the <a>GeoMatchSet</a> that is specified by <code>GeoMatchSetId</code>.</p>
fn get_geo_match_set(
&self,
input: GetGeoMatchSetRequest,
) -> RusotoFuture<GetGeoMatchSetResponse, GetGeoMatchSetError>;
/// <p>Returns the <a>IPSet</a> that is specified by <code>IPSetId</code>.</p>
fn get_ip_set(&self, input: GetIPSetRequest) -> RusotoFuture<GetIPSetResponse, GetIPSetError>;
/// <p>Returns the <a>LoggingConfiguration</a> for the specified web ACL.</p>
fn get_logging_configuration(
&self,
input: GetLoggingConfigurationRequest,
) -> RusotoFuture<GetLoggingConfigurationResponse, GetLoggingConfigurationError>;
/// <p>Returns the IAM policy attached to the RuleGroup.</p>
fn get_permission_policy(
&self,
input: GetPermissionPolicyRequest,
) -> RusotoFuture<GetPermissionPolicyResponse, GetPermissionPolicyError>;
/// <p>Returns the <a>RateBasedRule</a> that is specified by the <code>RuleId</code> that you included in the <code>GetRateBasedRule</code> request.</p>
fn get_rate_based_rule(
&self,
input: GetRateBasedRuleRequest,
) -> RusotoFuture<GetRateBasedRuleResponse, GetRateBasedRuleError>;
/// <p>Returns an array of IP addresses currently being blocked by the <a>RateBasedRule</a> that is specified by the <code>RuleId</code>. The maximum number of managed keys that will be blocked is 10,000. If more than 10,000 addresses exceed the rate limit, the 10,000 addresses with the highest rates will be blocked.</p>
fn get_rate_based_rule_managed_keys(
&self,
input: GetRateBasedRuleManagedKeysRequest,
) -> RusotoFuture<GetRateBasedRuleManagedKeysResponse, GetRateBasedRuleManagedKeysError>;
/// <p>Returns the <a>RegexMatchSet</a> specified by <code>RegexMatchSetId</code>.</p>
fn get_regex_match_set(
&self,
input: GetRegexMatchSetRequest,
) -> RusotoFuture<GetRegexMatchSetResponse, GetRegexMatchSetError>;
/// <p>Returns the <a>RegexPatternSet</a> specified by <code>RegexPatternSetId</code>.</p>
fn get_regex_pattern_set(
&self,
input: GetRegexPatternSetRequest,
) -> RusotoFuture<GetRegexPatternSetResponse, GetRegexPatternSetError>;
/// <p>Returns the <a>Rule</a> that is specified by the <code>RuleId</code> that you included in the <code>GetRule</code> request.</p>
fn get_rule(&self, input: GetRuleRequest) -> RusotoFuture<GetRuleResponse, GetRuleError>;
/// <p>Returns the <a>RuleGroup</a> that is specified by the <code>RuleGroupId</code> that you included in the <code>GetRuleGroup</code> request.</p> <p>To view the rules in a rule group, use <a>ListActivatedRulesInRuleGroup</a>.</p>
fn get_rule_group(
&self,
input: GetRuleGroupRequest,
) -> RusotoFuture<GetRuleGroupResponse, GetRuleGroupError>;
/// <p>Gets detailed information about a specified number of requests--a sample--that AWS WAF randomly selects from among the first 5,000 requests that your AWS resource received during a time range that you choose. You can specify a sample size of up to 500 requests, and you can specify any time range in the previous three hours.</p> <p> <code>GetSampledRequests</code> returns a time range, which is usually the time range that you specified. However, if your resource (such as a CloudFront distribution) received 5,000 requests before the specified time range elapsed, <code>GetSampledRequests</code> returns an updated time range. This new time range indicates the actual period during which AWS WAF selected the requests in the sample.</p>
fn get_sampled_requests(
&self,
input: GetSampledRequestsRequest,
) -> RusotoFuture<GetSampledRequestsResponse, GetSampledRequestsError>;
/// <p>Returns the <a>SizeConstraintSet</a> specified by <code>SizeConstraintSetId</code>.</p>
fn get_size_constraint_set(
&self,
input: GetSizeConstraintSetRequest,
) -> RusotoFuture<GetSizeConstraintSetResponse, GetSizeConstraintSetError>;
/// <p>Returns the <a>SqlInjectionMatchSet</a> that is specified by <code>SqlInjectionMatchSetId</code>.</p>
fn get_sql_injection_match_set(
&self,
input: GetSqlInjectionMatchSetRequest,
) -> RusotoFuture<GetSqlInjectionMatchSetResponse, GetSqlInjectionMatchSetError>;
/// <p>Returns the <a>WebACL</a> that is specified by <code>WebACLId</code>.</p>
fn get_web_acl(
&self,
input: GetWebACLRequest,
) -> RusotoFuture<GetWebACLResponse, GetWebACLError>;
/// <p>Returns the <a>XssMatchSet</a> that is specified by <code>XssMatchSetId</code>.</p>
fn get_xss_match_set(
&self,
input: GetXssMatchSetRequest,
) -> RusotoFuture<GetXssMatchSetResponse, GetXssMatchSetError>;
/// <p>Returns an array of <a>ActivatedRule</a> objects.</p>
fn list_activated_rules_in_rule_group(
&self,
input: ListActivatedRulesInRuleGroupRequest,
) -> RusotoFuture<ListActivatedRulesInRuleGroupResponse, ListActivatedRulesInRuleGroupError>;
/// <p>Returns an array of <a>ByteMatchSetSummary</a> objects.</p>
fn list_byte_match_sets(
&self,
input: ListByteMatchSetsRequest,
) -> RusotoFuture<ListByteMatchSetsResponse, ListByteMatchSetsError>;
/// <p>Returns an array of <a>GeoMatchSetSummary</a> objects in the response.</p>
fn list_geo_match_sets(
&self,
input: ListGeoMatchSetsRequest,
) -> RusotoFuture<ListGeoMatchSetsResponse, ListGeoMatchSetsError>;
/// <p>Returns an array of <a>IPSetSummary</a> objects in the response.</p>
fn list_ip_sets(
&self,
input: ListIPSetsRequest,
) -> RusotoFuture<ListIPSetsResponse, ListIPSetsError>;
/// <p>Returns an array of <a>LoggingConfiguration</a> objects.</p>
fn list_logging_configurations(
&self,
input: ListLoggingConfigurationsRequest,
) -> RusotoFuture<ListLoggingConfigurationsResponse, ListLoggingConfigurationsError>;
/// <p>Returns an array of <a>RuleSummary</a> objects.</p>
fn list_rate_based_rules(
&self,
input: ListRateBasedRulesRequest,
) -> RusotoFuture<ListRateBasedRulesResponse, ListRateBasedRulesError>;
/// <p>Returns an array of <a>RegexMatchSetSummary</a> objects.</p>
fn list_regex_match_sets(
&self,
input: ListRegexMatchSetsRequest,
) -> RusotoFuture<ListRegexMatchSetsResponse, ListRegexMatchSetsError>;
/// <p>Returns an array of <a>RegexPatternSetSummary</a> objects.</p>
fn list_regex_pattern_sets(
&self,
input: ListRegexPatternSetsRequest,
) -> RusotoFuture<ListRegexPatternSetsResponse, ListRegexPatternSetsError>;
/// <p>Returns an array of <a>RuleGroup</a> objects.</p>
fn list_rule_groups(
&self,
input: ListRuleGroupsRequest,
) -> RusotoFuture<ListRuleGroupsResponse, ListRuleGroupsError>;
/// <p>Returns an array of <a>RuleSummary</a> objects.</p>
fn list_rules(
&self,
input: ListRulesRequest,
) -> RusotoFuture<ListRulesResponse, ListRulesError>;
/// <p>Returns an array of <a>SizeConstraintSetSummary</a> objects.</p>
fn list_size_constraint_sets(
&self,
input: ListSizeConstraintSetsRequest,
) -> RusotoFuture<ListSizeConstraintSetsResponse, ListSizeConstraintSetsError>;
/// <p>Returns an array of <a>SqlInjectionMatchSet</a> objects.</p>
fn list_sql_injection_match_sets(
&self,
input: ListSqlInjectionMatchSetsRequest,
) -> RusotoFuture<ListSqlInjectionMatchSetsResponse, ListSqlInjectionMatchSetsError>;
/// <p>Returns an array of <a>RuleGroup</a> objects that you are subscribed to.</p>
fn list_subscribed_rule_groups(
&self,
input: ListSubscribedRuleGroupsRequest,
) -> RusotoFuture<ListSubscribedRuleGroupsResponse, ListSubscribedRuleGroupsError>;
/// <p>Returns an array of <a>WebACLSummary</a> objects in the response.</p>
fn list_web_ac_ls(
&self,
input: ListWebACLsRequest,
) -> RusotoFuture<ListWebACLsResponse, ListWebACLsError>;
/// <p>Returns an array of <a>XssMatchSet</a> objects.</p>
fn list_xss_match_sets(
&self,
input: ListXssMatchSetsRequest,
) -> RusotoFuture<ListXssMatchSetsResponse, ListXssMatchSetsError>;
/// <p>Associates a <a>LoggingConfiguration</a> with a specified web ACL.</p> <p>You can access information about all traffic that AWS WAF inspects using the following steps:</p> <ol> <li> <p>Create an Amazon Kinesis Data Firehose . </p> </li> <li> <p>Associate that firehose to your web ACL using a <code>PutLoggingConfiguration</code> request.</p> </li> </ol> <p>When you successfully enable logging using a <code>PutLoggingConfiguration</code> request, AWS WAF will create a service linked role with the necessary permissions to write logs to the Amazon Kinesis Data Firehose. For more information, see <a href="http://docs.aws.amazon.com/waf/latest/developerguide/logging.html">Logging Web ACL Traffic Information</a> in the <i>AWS WAF Developer Guide</i>.</p>
fn put_logging_configuration(
&self,
input: PutLoggingConfigurationRequest,
) -> RusotoFuture<PutLoggingConfigurationResponse, PutLoggingConfigurationError>;
/// <p>Attaches a IAM policy to the specified resource. The only supported use for this action is to share a RuleGroup across accounts.</p> <p>The <code>PutPermissionPolicy</code> is subject to the following restrictions:</p> <ul> <li> <p>You can attach only one policy with each <code>PutPermissionPolicy</code> request.</p> </li> <li> <p>The policy must include an <code>Effect</code>, <code>Action</code> and <code>Principal</code>. </p> </li> <li> <p> <code>Effect</code> must specify <code>Allow</code>.</p> </li> <li> <p>The <code>Action</code> in the policy must be <code>waf:UpdateWebACL</code>, <code>waf-regional:UpdateWebACL</code>, <code>waf:GetRuleGroup</code> and <code>waf-regional:GetRuleGroup</code> . Any extra or wildcard actions in the policy will be rejected.</p> </li> <li> <p>The policy cannot include a <code>Resource</code> parameter.</p> </li> <li> <p>The ARN in the request must be a valid WAF RuleGroup ARN and the RuleGroup must exist in the same region.</p> </li> <li> <p>The user making the request must be the owner of the RuleGroup.</p> </li> <li> <p>Your policy must be composed using IAM Policy version 2012-10-17.</p> </li> </ul> <p>For more information, see <a href="https://docs.aws.amazon.com/IAM/latest/UserGuide/access_policies.html">IAM Policies</a>. </p> <p>An example of a valid policy parameter is shown in the Examples section below.</p>
fn put_permission_policy(
&self,
input: PutPermissionPolicyRequest,
) -> RusotoFuture<PutPermissionPolicyResponse, PutPermissionPolicyError>;
/// <p>Inserts or deletes <a>ByteMatchTuple</a> objects (filters) in a <a>ByteMatchSet</a>. For each <code>ByteMatchTuple</code> object, you specify the following values: </p> <ul> <li> <p>Whether to insert or delete the object from the array. If you want to change a <code>ByteMatchSetUpdate</code> object, you delete the existing object and add a new one.</p> </li> <li> <p>The part of a web request that you want AWS WAF to inspect, such as a query string or the value of the <code>User-Agent</code> header. </p> </li> <li> <p>The bytes (typically a string that corresponds with ASCII characters) that you want AWS WAF to look for. For more information, including how you specify the values for the AWS WAF API and the AWS CLI or SDKs, see <code>TargetString</code> in the <a>ByteMatchTuple</a> data type. </p> </li> <li> <p>Where to look, such as at the beginning or the end of a query string.</p> </li> <li> <p>Whether to perform any conversions on the request, such as converting it to lowercase, before inspecting it for the specified string.</p> </li> </ul> <p>For example, you can add a <code>ByteMatchSetUpdate</code> object that matches web requests in which <code>User-Agent</code> headers contain the string <code>BadBot</code>. You can then configure AWS WAF to block those requests.</p> <p>To create and configure a <code>ByteMatchSet</code>, perform the following steps:</p> <ol> <li> <p>Create a <code>ByteMatchSet.</code> For more information, see <a>CreateByteMatchSet</a>.</p> </li> <li> <p>Use <a>GetChangeToken</a> to get the change token that you provide in the <code>ChangeToken</code> parameter of an <code>UpdateByteMatchSet</code> request.</p> </li> <li> <p>Submit an <code>UpdateByteMatchSet</code> request to specify the part of the request that you want AWS WAF to inspect (for example, the header or the URI) and the value that you want AWS WAF to watch for.</p> </li> </ol> <p>For more information about how to use the AWS WAF API to allow or block HTTP requests, see the <a href="http://docs.aws.amazon.com/waf/latest/developerguide/">AWS WAF Developer Guide</a>.</p>
fn update_byte_match_set(
&self,
input: UpdateByteMatchSetRequest,
) -> RusotoFuture<UpdateByteMatchSetResponse, UpdateByteMatchSetError>;
/// <p>Inserts or deletes <a>GeoMatchConstraint</a> objects in an <code>GeoMatchSet</code>. For each <code>GeoMatchConstraint</code> object, you specify the following values: </p> <ul> <li> <p>Whether to insert or delete the object from the array. If you want to change an <code>GeoMatchConstraint</code> object, you delete the existing object and add a new one.</p> </li> <li> <p>The <code>Type</code>. The only valid value for <code>Type</code> is <code>Country</code>.</p> </li> <li> <p>The <code>Value</code>, which is a two character code for the country to add to the <code>GeoMatchConstraint</code> object. Valid codes are listed in <a>GeoMatchConstraint$Value</a>.</p> </li> </ul> <p>To create and configure an <code>GeoMatchSet</code>, perform the following steps:</p> <ol> <li> <p>Submit a <a>CreateGeoMatchSet</a> request.</p> </li> <li> <p>Use <a>GetChangeToken</a> to get the change token that you provide in the <code>ChangeToken</code> parameter of an <a>UpdateGeoMatchSet</a> request.</p> </li> <li> <p>Submit an <code>UpdateGeoMatchSet</code> request to specify the country that you want AWS WAF to watch for.</p> </li> </ol> <p>When you update an <code>GeoMatchSet</code>, you specify the country that you want to add and/or the country that you want to delete. If you want to change a country, you delete the existing country and add the new one.</p> <p>For more information about how to use the AWS WAF API to allow or block HTTP requests, see the <a href="http://docs.aws.amazon.com/waf/latest/developerguide/">AWS WAF Developer Guide</a>.</p>
fn update_geo_match_set(
&self,
input: UpdateGeoMatchSetRequest,
) -> RusotoFuture<UpdateGeoMatchSetResponse, UpdateGeoMatchSetError>;
/// <p>Inserts or deletes <a>IPSetDescriptor</a> objects in an <code>IPSet</code>. For each <code>IPSetDescriptor</code> object, you specify the following values: </p> <ul> <li> <p>Whether to insert or delete the object from the array. If you want to change an <code>IPSetDescriptor</code> object, you delete the existing object and add a new one.</p> </li> <li> <p>The IP address version, <code>IPv4</code> or <code>IPv6</code>. </p> </li> <li> <p>The IP address in CIDR notation, for example, <code>192.0.2.0/24</code> (for the range of IP addresses from <code>192.0.2.0</code> to <code>192.0.2.255</code>) or <code>192.0.2.44/32</code> (for the individual IP address <code>192.0.2.44</code>). </p> </li> </ul> <p>AWS WAF supports IPv4 address ranges: /8 and any range between /16 through /32. AWS WAF supports IPv6 address ranges: /16, /24, /32, /48, /56, /64, and /128. For more information about CIDR notation, see the Wikipedia entry <a href="https://en.wikipedia.org/wiki/Classless_Inter-Domain_Routing">Classless Inter-Domain Routing</a>.</p> <p>IPv6 addresses can be represented using any of the following formats:</p> <ul> <li> <p>1111:0000:0000:0000:0000:0000:0000:0111/128</p> </li> <li> <p>1111:0:0:0:0:0:0:0111/128</p> </li> <li> <p>1111::0111/128</p> </li> <li> <p>1111::111/128</p> </li> </ul> <p>You use an <code>IPSet</code> to specify which web requests you want to allow or block based on the IP addresses that the requests originated from. For example, if you're receiving a lot of requests from one or a small number of IP addresses and you want to block the requests, you can create an <code>IPSet</code> that specifies those IP addresses, and then configure AWS WAF to block the requests. </p> <p>To create and configure an <code>IPSet</code>, perform the following steps:</p> <ol> <li> <p>Submit a <a>CreateIPSet</a> request.</p> </li> <li> <p>Use <a>GetChangeToken</a> to get the change token that you provide in the <code>ChangeToken</code> parameter of an <a>UpdateIPSet</a> request.</p> </li> <li> <p>Submit an <code>UpdateIPSet</code> request to specify the IP addresses that you want AWS WAF to watch for.</p> </li> </ol> <p>When you update an <code>IPSet</code>, you specify the IP addresses that you want to add and/or the IP addresses that you want to delete. If you want to change an IP address, you delete the existing IP address and add the new one.</p> <p>You can insert a maximum of 1000 addresses in a single request.</p> <p>For more information about how to use the AWS WAF API to allow or block HTTP requests, see the <a href="http://docs.aws.amazon.com/waf/latest/developerguide/">AWS WAF Developer Guide</a>.</p>
fn update_ip_set(
&self,
input: UpdateIPSetRequest,
) -> RusotoFuture<UpdateIPSetResponse, UpdateIPSetError>;
/// <p>Inserts or deletes <a>Predicate</a> objects in a rule and updates the <code>RateLimit</code> in the rule. </p> <p>Each <code>Predicate</code> object identifies a predicate, such as a <a>ByteMatchSet</a> or an <a>IPSet</a>, that specifies the web requests that you want to block or count. The <code>RateLimit</code> specifies the number of requests every five minutes that triggers the rule.</p> <p>If you add more than one predicate to a <code>RateBasedRule</code>, a request must match all the predicates and exceed the <code>RateLimit</code> to be counted or blocked. For example, suppose you add the following to a <code>RateBasedRule</code>:</p> <ul> <li> <p>An <code>IPSet</code> that matches the IP address <code>192.0.2.44/32</code> </p> </li> <li> <p>A <code>ByteMatchSet</code> that matches <code>BadBot</code> in the <code>User-Agent</code> header</p> </li> </ul> <p>Further, you specify a <code>RateLimit</code> of 15,000.</p> <p>You then add the <code>RateBasedRule</code> to a <code>WebACL</code> and specify that you want to block requests that satisfy the rule. For a request to be blocked, it must come from the IP address 192.0.2.44 <i>and</i> the <code>User-Agent</code> header in the request must contain the value <code>BadBot</code>. Further, requests that match these two conditions much be received at a rate of more than 15,000 every five minutes. If the rate drops below this limit, AWS WAF no longer blocks the requests.</p> <p>As a second example, suppose you want to limit requests to a particular page on your site. To do this, you could add the following to a <code>RateBasedRule</code>:</p> <ul> <li> <p>A <code>ByteMatchSet</code> with <code>FieldToMatch</code> of <code>URI</code> </p> </li> <li> <p>A <code>PositionalConstraint</code> of <code>STARTS_WITH</code> </p> </li> <li> <p>A <code>TargetString</code> of <code>login</code> </p> </li> </ul> <p>Further, you specify a <code>RateLimit</code> of 15,000.</p> <p>By adding this <code>RateBasedRule</code> to a <code>WebACL</code>, you could limit requests to your login page without affecting the rest of your site.</p>
fn update_rate_based_rule(
&self,
input: UpdateRateBasedRuleRequest,
) -> RusotoFuture<UpdateRateBasedRuleResponse, UpdateRateBasedRuleError>;
/// <p>Inserts or deletes <a>RegexMatchTuple</a> objects (filters) in a <a>RegexMatchSet</a>. For each <code>RegexMatchSetUpdate</code> object, you specify the following values: </p> <ul> <li> <p>Whether to insert or delete the object from the array. If you want to change a <code>RegexMatchSetUpdate</code> object, you delete the existing object and add a new one.</p> </li> <li> <p>The part of a web request that you want AWS WAF to inspectupdate, such as a query string or the value of the <code>User-Agent</code> header. </p> </li> <li> <p>The identifier of the pattern (a regular expression) that you want AWS WAF to look for. For more information, see <a>RegexPatternSet</a>. </p> </li> <li> <p>Whether to perform any conversions on the request, such as converting it to lowercase, before inspecting it for the specified string.</p> </li> </ul> <p> For example, you can create a <code>RegexPatternSet</code> that matches any requests with <code>User-Agent</code> headers that contain the string <code>B[a@]dB[o0]t</code>. You can then configure AWS WAF to reject those requests.</p> <p>To create and configure a <code>RegexMatchSet</code>, perform the following steps:</p> <ol> <li> <p>Create a <code>RegexMatchSet.</code> For more information, see <a>CreateRegexMatchSet</a>.</p> </li> <li> <p>Use <a>GetChangeToken</a> to get the change token that you provide in the <code>ChangeToken</code> parameter of an <code>UpdateRegexMatchSet</code> request.</p> </li> <li> <p>Submit an <code>UpdateRegexMatchSet</code> request to specify the part of the request that you want AWS WAF to inspect (for example, the header or the URI) and the identifier of the <code>RegexPatternSet</code> that contain the regular expression patters you want AWS WAF to watch for.</p> </li> </ol> <p>For more information about how to use the AWS WAF API to allow or block HTTP requests, see the <a href="http://docs.aws.amazon.com/waf/latest/developerguide/">AWS WAF Developer Guide</a>.</p>
fn update_regex_match_set(
&self,
input: UpdateRegexMatchSetRequest,
) -> RusotoFuture<UpdateRegexMatchSetResponse, UpdateRegexMatchSetError>;
/// <p>Inserts or deletes <code>RegexPatternString</code> objects in a <a>RegexPatternSet</a>. For each <code>RegexPatternString</code> object, you specify the following values: </p> <ul> <li> <p>Whether to insert or delete the <code>RegexPatternString</code>.</p> </li> <li> <p>The regular expression pattern that you want to insert or delete. For more information, see <a>RegexPatternSet</a>. </p> </li> </ul> <p> For example, you can create a <code>RegexPatternString</code> such as <code>B[a@]dB[o0]t</code>. AWS WAF will match this <code>RegexPatternString</code> to:</p> <ul> <li> <p>BadBot</p> </li> <li> <p>BadB0t</p> </li> <li> <p>B@dBot</p> </li> <li> <p>B@dB0t</p> </li> </ul> <p>To create and configure a <code>RegexPatternSet</code>, perform the following steps:</p> <ol> <li> <p>Create a <code>RegexPatternSet.</code> For more information, see <a>CreateRegexPatternSet</a>.</p> </li> <li> <p>Use <a>GetChangeToken</a> to get the change token that you provide in the <code>ChangeToken</code> parameter of an <code>UpdateRegexPatternSet</code> request.</p> </li> <li> <p>Submit an <code>UpdateRegexPatternSet</code> request to specify the regular expression pattern that you want AWS WAF to watch for.</p> </li> </ol> <p>For more information about how to use the AWS WAF API to allow or block HTTP requests, see the <a href="http://docs.aws.amazon.com/waf/latest/developerguide/">AWS WAF Developer Guide</a>.</p>
fn update_regex_pattern_set(
&self,
input: UpdateRegexPatternSetRequest,
) -> RusotoFuture<UpdateRegexPatternSetResponse, UpdateRegexPatternSetError>;
/// <p>Inserts or deletes <a>Predicate</a> objects in a <code>Rule</code>. Each <code>Predicate</code> object identifies a predicate, such as a <a>ByteMatchSet</a> or an <a>IPSet</a>, that specifies the web requests that you want to allow, block, or count. If you add more than one predicate to a <code>Rule</code>, a request must match all of the specifications to be allowed, blocked, or counted. For example, suppose that you add the following to a <code>Rule</code>: </p> <ul> <li> <p>A <code>ByteMatchSet</code> that matches the value <code>BadBot</code> in the <code>User-Agent</code> header</p> </li> <li> <p>An <code>IPSet</code> that matches the IP address <code>192.0.2.44</code> </p> </li> </ul> <p>You then add the <code>Rule</code> to a <code>WebACL</code> and specify that you want to block requests that satisfy the <code>Rule</code>. For a request to be blocked, the <code>User-Agent</code> header in the request must contain the value <code>BadBot</code> <i>and</i> the request must originate from the IP address 192.0.2.44.</p> <p>To create and configure a <code>Rule</code>, perform the following steps:</p> <ol> <li> <p>Create and update the predicates that you want to include in the <code>Rule</code>.</p> </li> <li> <p>Create the <code>Rule</code>. See <a>CreateRule</a>.</p> </li> <li> <p>Use <code>GetChangeToken</code> to get the change token that you provide in the <code>ChangeToken</code> parameter of an <a>UpdateRule</a> request.</p> </li> <li> <p>Submit an <code>UpdateRule</code> request to add predicates to the <code>Rule</code>.</p> </li> <li> <p>Create and update a <code>WebACL</code> that contains the <code>Rule</code>. See <a>CreateWebACL</a>.</p> </li> </ol> <p>If you want to replace one <code>ByteMatchSet</code> or <code>IPSet</code> with another, you delete the existing one and add the new one.</p> <p>For more information about how to use the AWS WAF API to allow or block HTTP requests, see the <a href="http://docs.aws.amazon.com/waf/latest/developerguide/">AWS WAF Developer Guide</a>.</p>
fn update_rule(
&self,
input: UpdateRuleRequest,
) -> RusotoFuture<UpdateRuleResponse, UpdateRuleError>;
/// <p>Inserts or deletes <a>ActivatedRule</a> objects in a <code>RuleGroup</code>.</p> <p>You can only insert <code>REGULAR</code> rules into a rule group.</p> <p>You can have a maximum of ten rules per rule group.</p> <p>To create and configure a <code>RuleGroup</code>, perform the following steps:</p> <ol> <li> <p>Create and update the <code>Rules</code> that you want to include in the <code>RuleGroup</code>. See <a>CreateRule</a>.</p> </li> <li> <p>Use <code>GetChangeToken</code> to get the change token that you provide in the <code>ChangeToken</code> parameter of an <a>UpdateRuleGroup</a> request.</p> </li> <li> <p>Submit an <code>UpdateRuleGroup</code> request to add <code>Rules</code> to the <code>RuleGroup</code>.</p> </li> <li> <p>Create and update a <code>WebACL</code> that contains the <code>RuleGroup</code>. See <a>CreateWebACL</a>.</p> </li> </ol> <p>If you want to replace one <code>Rule</code> with another, you delete the existing one and add the new one.</p> <p>For more information about how to use the AWS WAF API to allow or block HTTP requests, see the <a href="http://docs.aws.amazon.com/waf/latest/developerguide/">AWS WAF Developer Guide</a>.</p>
fn update_rule_group(
&self,
input: UpdateRuleGroupRequest,
) -> RusotoFuture<UpdateRuleGroupResponse, UpdateRuleGroupError>;
/// <p>Inserts or deletes <a>SizeConstraint</a> objects (filters) in a <a>SizeConstraintSet</a>. For each <code>SizeConstraint</code> object, you specify the following values: </p> <ul> <li> <p>Whether to insert or delete the object from the array. If you want to change a <code>SizeConstraintSetUpdate</code> object, you delete the existing object and add a new one.</p> </li> <li> <p>The part of a web request that you want AWS WAF to evaluate, such as the length of a query string or the length of the <code>User-Agent</code> header.</p> </li> <li> <p>Whether to perform any transformations on the request, such as converting it to lowercase, before checking its length. Note that transformations of the request body are not supported because the AWS resource forwards only the first <code>8192</code> bytes of your request to AWS WAF.</p> <p>You can only specify a single type of TextTransformation.</p> </li> <li> <p>A <code>ComparisonOperator</code> used for evaluating the selected part of the request against the specified <code>Size</code>, such as equals, greater than, less than, and so on.</p> </li> <li> <p>The length, in bytes, that you want AWS WAF to watch for in selected part of the request. The length is computed after applying the transformation.</p> </li> </ul> <p>For example, you can add a <code>SizeConstraintSetUpdate</code> object that matches web requests in which the length of the <code>User-Agent</code> header is greater than 100 bytes. You can then configure AWS WAF to block those requests.</p> <p>To create and configure a <code>SizeConstraintSet</code>, perform the following steps:</p> <ol> <li> <p>Create a <code>SizeConstraintSet.</code> For more information, see <a>CreateSizeConstraintSet</a>.</p> </li> <li> <p>Use <a>GetChangeToken</a> to get the change token that you provide in the <code>ChangeToken</code> parameter of an <code>UpdateSizeConstraintSet</code> request.</p> </li> <li> <p>Submit an <code>UpdateSizeConstraintSet</code> request to specify the part of the request that you want AWS WAF to inspect (for example, the header or the URI) and the value that you want AWS WAF to watch for.</p> </li> </ol> <p>For more information about how to use the AWS WAF API to allow or block HTTP requests, see the <a href="http://docs.aws.amazon.com/waf/latest/developerguide/">AWS WAF Developer Guide</a>.</p>
fn update_size_constraint_set(
&self,
input: UpdateSizeConstraintSetRequest,
) -> RusotoFuture<UpdateSizeConstraintSetResponse, UpdateSizeConstraintSetError>;
/// <p>Inserts or deletes <a>SqlInjectionMatchTuple</a> objects (filters) in a <a>SqlInjectionMatchSet</a>. For each <code>SqlInjectionMatchTuple</code> object, you specify the following values:</p> <ul> <li> <p> <code>Action</code>: Whether to insert the object into or delete the object from the array. To change a <code>SqlInjectionMatchTuple</code>, you delete the existing object and add a new one.</p> </li> <li> <p> <code>FieldToMatch</code>: The part of web requests that you want AWS WAF to inspect and, if you want AWS WAF to inspect a header or custom query parameter, the name of the header or parameter.</p> </li> <li> <p> <code>TextTransformation</code>: Which text transformation, if any, to perform on the web request before inspecting the request for snippets of malicious SQL code.</p> <p>You can only specify a single type of TextTransformation.</p> </li> </ul> <p>You use <code>SqlInjectionMatchSet</code> objects to specify which CloudFront requests that you want to allow, block, or count. For example, if you're receiving requests that contain snippets of SQL code in the query string and you want to block the requests, you can create a <code>SqlInjectionMatchSet</code> with the applicable settings, and then configure AWS WAF to block the requests. </p> <p>To create and configure a <code>SqlInjectionMatchSet</code>, perform the following steps:</p> <ol> <li> <p>Submit a <a>CreateSqlInjectionMatchSet</a> request.</p> </li> <li> <p>Use <a>GetChangeToken</a> to get the change token that you provide in the <code>ChangeToken</code> parameter of an <a>UpdateIPSet</a> request.</p> </li> <li> <p>Submit an <code>UpdateSqlInjectionMatchSet</code> request to specify the parts of web requests that you want AWS WAF to inspect for snippets of SQL code.</p> </li> </ol> <p>For more information about how to use the AWS WAF API to allow or block HTTP requests, see the <a href="http://docs.aws.amazon.com/waf/latest/developerguide/">AWS WAF Developer Guide</a>.</p>
fn update_sql_injection_match_set(
&self,
input: UpdateSqlInjectionMatchSetRequest,
) -> RusotoFuture<UpdateSqlInjectionMatchSetResponse, UpdateSqlInjectionMatchSetError>;
/// <p>Inserts or deletes <a>ActivatedRule</a> objects in a <code>WebACL</code>. Each <code>Rule</code> identifies web requests that you want to allow, block, or count. When you update a <code>WebACL</code>, you specify the following values:</p> <ul> <li> <p>A default action for the <code>WebACL</code>, either <code>ALLOW</code> or <code>BLOCK</code>. AWS WAF performs the default action if a request doesn't match the criteria in any of the <code>Rules</code> in a <code>WebACL</code>.</p> </li> <li> <p>The <code>Rules</code> that you want to add or delete. If you want to replace one <code>Rule</code> with another, you delete the existing <code>Rule</code> and add the new one.</p> </li> <li> <p>For each <code>Rule</code>, whether you want AWS WAF to allow requests, block requests, or count requests that match the conditions in the <code>Rule</code>.</p> </li> <li> <p>The order in which you want AWS WAF to evaluate the <code>Rules</code> in a <code>WebACL</code>. If you add more than one <code>Rule</code> to a <code>WebACL</code>, AWS WAF evaluates each request against the <code>Rules</code> in order based on the value of <code>Priority</code>. (The <code>Rule</code> that has the lowest value for <code>Priority</code> is evaluated first.) When a web request matches all the predicates (such as <code>ByteMatchSets</code> and <code>IPSets</code>) in a <code>Rule</code>, AWS WAF immediately takes the corresponding action, allow or block, and doesn't evaluate the request against the remaining <code>Rules</code> in the <code>WebACL</code>, if any. </p> </li> </ul> <p>To create and configure a <code>WebACL</code>, perform the following steps:</p> <ol> <li> <p>Create and update the predicates that you want to include in <code>Rules</code>. For more information, see <a>CreateByteMatchSet</a>, <a>UpdateByteMatchSet</a>, <a>CreateIPSet</a>, <a>UpdateIPSet</a>, <a>CreateSqlInjectionMatchSet</a>, and <a>UpdateSqlInjectionMatchSet</a>.</p> </li> <li> <p>Create and update the <code>Rules</code> that you want to include in the <code>WebACL</code>. For more information, see <a>CreateRule</a> and <a>UpdateRule</a>.</p> </li> <li> <p>Create a <code>WebACL</code>. See <a>CreateWebACL</a>.</p> </li> <li> <p>Use <code>GetChangeToken</code> to get the change token that you provide in the <code>ChangeToken</code> parameter of an <a>UpdateWebACL</a> request.</p> </li> <li> <p>Submit an <code>UpdateWebACL</code> request to specify the <code>Rules</code> that you want to include in the <code>WebACL</code>, to specify the default action, and to associate the <code>WebACL</code> with a CloudFront distribution. </p> <p>The <code>ActivatedRule</code> can be a rule group. If you specify a rule group as your <code>ActivatedRule</code>, you can exclude specific rules from that rule group.</p> <p>If you already have a rule group associated with a web ACL and want to submit an <code>UpdateWebACL</code> request to exclude certain rules from that rule group, you must first remove the rule group from the web ACL, the re-insert it again, specifying the excluded rules. For details, see <a>ActivatedRule$ExcludedRules</a>. </p> </li> </ol> <p>Be aware that if you try to add a RATE_BASED rule to a web ACL without setting the rule type when first creating the rule, the <a>UpdateWebACL</a> request will fail because the request tries to add a REGULAR rule (the default rule type) with the specified ID, which does not exist. </p> <p>For more information about how to use the AWS WAF API to allow or block HTTP requests, see the <a href="http://docs.aws.amazon.com/waf/latest/developerguide/">AWS WAF Developer Guide</a>.</p>
fn update_web_acl(
&self,
input: UpdateWebACLRequest,
) -> RusotoFuture<UpdateWebACLResponse, UpdateWebACLError>;
/// <p>Inserts or deletes <a>XssMatchTuple</a> objects (filters) in an <a>XssMatchSet</a>. For each <code>XssMatchTuple</code> object, you specify the following values:</p> <ul> <li> <p> <code>Action</code>: Whether to insert the object into or delete the object from the array. To change an <code>XssMatchTuple</code>, you delete the existing object and add a new one.</p> </li> <li> <p> <code>FieldToMatch</code>: The part of web requests that you want AWS WAF to inspect and, if you want AWS WAF to inspect a header or custom query parameter, the name of the header or parameter.</p> </li> <li> <p> <code>TextTransformation</code>: Which text transformation, if any, to perform on the web request before inspecting the request for cross-site scripting attacks.</p> <p>You can only specify a single type of TextTransformation.</p> </li> </ul> <p>You use <code>XssMatchSet</code> objects to specify which CloudFront requests that you want to allow, block, or count. For example, if you're receiving requests that contain cross-site scripting attacks in the request body and you want to block the requests, you can create an <code>XssMatchSet</code> with the applicable settings, and then configure AWS WAF to block the requests. </p> <p>To create and configure an <code>XssMatchSet</code>, perform the following steps:</p> <ol> <li> <p>Submit a <a>CreateXssMatchSet</a> request.</p> </li> <li> <p>Use <a>GetChangeToken</a> to get the change token that you provide in the <code>ChangeToken</code> parameter of an <a>UpdateIPSet</a> request.</p> </li> <li> <p>Submit an <code>UpdateXssMatchSet</code> request to specify the parts of web requests that you want AWS WAF to inspect for cross-site scripting attacks.</p> </li> </ol> <p>For more information about how to use the AWS WAF API to allow or block HTTP requests, see the <a href="http://docs.aws.amazon.com/waf/latest/developerguide/">AWS WAF Developer Guide</a>.</p>
fn update_xss_match_set(
&self,
input: UpdateXssMatchSetRequest,
) -> RusotoFuture<UpdateXssMatchSetResponse, UpdateXssMatchSetError>;
}
/// A client for the WAF API.
#[derive(Clone)]
pub struct WafClient {
client: Client,
region: region::Region,
}
impl WafClient {
/// Creates a client backed by the default tokio event loop.
///
/// The client will use the default credentials provider and tls client.
pub fn new(region: region::Region) -> WafClient {
WafClient {
client: Client::shared(),
region: region,
}
}
pub fn new_with<P, D>(
request_dispatcher: D,
credentials_provider: P,
region: region::Region,
) -> WafClient
where
P: ProvideAwsCredentials + Send + Sync + 'static,
P::Future: Send,
D: DispatchSignedRequest + Send + Sync + 'static,
D::Future: Send,
{
WafClient {
client: Client::new_with(credentials_provider, request_dispatcher),
region: region,
}
}
}
impl Waf for WafClient {
/// <p>Creates a <code>ByteMatchSet</code>. You then use <a>UpdateByteMatchSet</a> to identify the part of a web request that you want AWS WAF to inspect, such as the values of the <code>User-Agent</code> header or the query string. For example, you can create a <code>ByteMatchSet</code> that matches any requests with <code>User-Agent</code> headers that contain the string <code>BadBot</code>. You can then configure AWS WAF to reject those requests.</p> <p>To create and configure a <code>ByteMatchSet</code>, perform the following steps:</p> <ol> <li> <p>Use <a>GetChangeToken</a> to get the change token that you provide in the <code>ChangeToken</code> parameter of a <code>CreateByteMatchSet</code> request.</p> </li> <li> <p>Submit a <code>CreateByteMatchSet</code> request.</p> </li> <li> <p>Use <code>GetChangeToken</code> to get the change token that you provide in the <code>ChangeToken</code> parameter of an <code>UpdateByteMatchSet</code> request.</p> </li> <li> <p>Submit an <a>UpdateByteMatchSet</a> request to specify the part of the request that you want AWS WAF to inspect (for example, the header or the URI) and the value that you want AWS WAF to watch for.</p> </li> </ol> <p>For more information about how to use the AWS WAF API to allow or block HTTP requests, see the <a href="http://docs.aws.amazon.com/waf/latest/developerguide/">AWS WAF Developer Guide</a>.</p>
fn create_byte_match_set(
&self,
input: CreateByteMatchSetRequest,
) -> RusotoFuture<CreateByteMatchSetResponse, CreateByteMatchSetError> {
let mut request = SignedRequest::new("POST", "waf", &self.region, "/");
request.set_content_type("application/x-amz-json-1.1".to_owned());
request.add_header("x-amz-target", "AWSWAF_20150824.CreateByteMatchSet");
let encoded = serde_json::to_string(&input).unwrap();
request.set_payload(Some(encoded));
self.client.sign_and_dispatch(request, |response| {
if response.status.is_success() {
Box::new(response.buffer().from_err().and_then(|response| {
proto::json::ResponsePayload::new(&response)
.deserialize::<CreateByteMatchSetResponse, _>()
}))
} else {
Box::new(
response
.buffer()
.from_err()
.and_then(|response| Err(CreateByteMatchSetError::from_response(response))),
)
}
})
}
/// <p>Creates an <a>GeoMatchSet</a>, which you use to specify which web requests you want to allow or block based on the country that the requests originate from. For example, if you're receiving a lot of requests from one or more countries and you want to block the requests, you can create an <code>GeoMatchSet</code> that contains those countries and then configure AWS WAF to block the requests. </p> <p>To create and configure a <code>GeoMatchSet</code>, perform the following steps:</p> <ol> <li> <p>Use <a>GetChangeToken</a> to get the change token that you provide in the <code>ChangeToken</code> parameter of a <code>CreateGeoMatchSet</code> request.</p> </li> <li> <p>Submit a <code>CreateGeoMatchSet</code> request.</p> </li> <li> <p>Use <code>GetChangeToken</code> to get the change token that you provide in the <code>ChangeToken</code> parameter of an <a>UpdateGeoMatchSet</a> request.</p> </li> <li> <p>Submit an <code>UpdateGeoMatchSetSet</code> request to specify the countries that you want AWS WAF to watch for.</p> </li> </ol> <p>For more information about how to use the AWS WAF API to allow or block HTTP requests, see the <a href="http://docs.aws.amazon.com/waf/latest/developerguide/">AWS WAF Developer Guide</a>.</p>
fn create_geo_match_set(
&self,
input: CreateGeoMatchSetRequest,
) -> RusotoFuture<CreateGeoMatchSetResponse, CreateGeoMatchSetError> {
let mut request = SignedRequest::new("POST", "waf", &self.region, "/");
request.set_content_type("application/x-amz-json-1.1".to_owned());
request.add_header("x-amz-target", "AWSWAF_20150824.CreateGeoMatchSet");
let encoded = serde_json::to_string(&input).unwrap();
request.set_payload(Some(encoded));
self.client.sign_and_dispatch(request, |response| {
if response.status.is_success() {
Box::new(response.buffer().from_err().and_then(|response| {
proto::json::ResponsePayload::new(&response)
.deserialize::<CreateGeoMatchSetResponse, _>()
}))
} else {
Box::new(
response
.buffer()
.from_err()
.and_then(|response| Err(CreateGeoMatchSetError::from_response(response))),
)
}
})
}
/// <p>Creates an <a>IPSet</a>, which you use to specify which web requests that you want to allow or block based on the IP addresses that the requests originate from. For example, if you're receiving a lot of requests from one or more individual IP addresses or one or more ranges of IP addresses and you want to block the requests, you can create an <code>IPSet</code> that contains those IP addresses and then configure AWS WAF to block the requests. </p> <p>To create and configure an <code>IPSet</code>, perform the following steps:</p> <ol> <li> <p>Use <a>GetChangeToken</a> to get the change token that you provide in the <code>ChangeToken</code> parameter of a <code>CreateIPSet</code> request.</p> </li> <li> <p>Submit a <code>CreateIPSet</code> request.</p> </li> <li> <p>Use <code>GetChangeToken</code> to get the change token that you provide in the <code>ChangeToken</code> parameter of an <a>UpdateIPSet</a> request.</p> </li> <li> <p>Submit an <code>UpdateIPSet</code> request to specify the IP addresses that you want AWS WAF to watch for.</p> </li> </ol> <p>For more information about how to use the AWS WAF API to allow or block HTTP requests, see the <a href="http://docs.aws.amazon.com/waf/latest/developerguide/">AWS WAF Developer Guide</a>.</p>
fn create_ip_set(
&self,
input: CreateIPSetRequest,
) -> RusotoFuture<CreateIPSetResponse, CreateIPSetError> {
let mut request = SignedRequest::new("POST", "waf", &self.region, "/");
request.set_content_type("application/x-amz-json-1.1".to_owned());
request.add_header("x-amz-target", "AWSWAF_20150824.CreateIPSet");
let encoded = serde_json::to_string(&input).unwrap();
request.set_payload(Some(encoded));
self.client.sign_and_dispatch(request, |response| {
if response.status.is_success() {
Box::new(response.buffer().from_err().and_then(|response| {
proto::json::ResponsePayload::new(&response)
.deserialize::<CreateIPSetResponse, _>()
}))
} else {
Box::new(
response
.buffer()
.from_err()
.and_then(|response| Err(CreateIPSetError::from_response(response))),
)
}
})
}
/// <p>Creates a <a>RateBasedRule</a>. The <code>RateBasedRule</code> contains a <code>RateLimit</code>, which specifies the maximum number of requests that AWS WAF allows from a specified IP address in a five-minute period. The <code>RateBasedRule</code> also contains the <code>IPSet</code> objects, <code>ByteMatchSet</code> objects, and other predicates that identify the requests that you want to count or block if these requests exceed the <code>RateLimit</code>.</p> <p>If you add more than one predicate to a <code>RateBasedRule</code>, a request not only must exceed the <code>RateLimit</code>, but it also must match all the specifications to be counted or blocked. For example, suppose you add the following to a <code>RateBasedRule</code>:</p> <ul> <li> <p>An <code>IPSet</code> that matches the IP address <code>192.0.2.44/32</code> </p> </li> <li> <p>A <code>ByteMatchSet</code> that matches <code>BadBot</code> in the <code>User-Agent</code> header</p> </li> </ul> <p>Further, you specify a <code>RateLimit</code> of 15,000.</p> <p>You then add the <code>RateBasedRule</code> to a <code>WebACL</code> and specify that you want to block requests that meet the conditions in the rule. For a request to be blocked, it must come from the IP address 192.0.2.44 <i>and</i> the <code>User-Agent</code> header in the request must contain the value <code>BadBot</code>. Further, requests that match these two conditions must be received at a rate of more than 15,000 requests every five minutes. If both conditions are met and the rate is exceeded, AWS WAF blocks the requests. If the rate drops below 15,000 for a five-minute period, AWS WAF no longer blocks the requests.</p> <p>As a second example, suppose you want to limit requests to a particular page on your site. To do this, you could add the following to a <code>RateBasedRule</code>:</p> <ul> <li> <p>A <code>ByteMatchSet</code> with <code>FieldToMatch</code> of <code>URI</code> </p> </li> <li> <p>A <code>PositionalConstraint</code> of <code>STARTS_WITH</code> </p> </li> <li> <p>A <code>TargetString</code> of <code>login</code> </p> </li> </ul> <p>Further, you specify a <code>RateLimit</code> of 15,000.</p> <p>By adding this <code>RateBasedRule</code> to a <code>WebACL</code>, you could limit requests to your login page without affecting the rest of your site.</p> <p>To create and configure a <code>RateBasedRule</code>, perform the following steps:</p> <ol> <li> <p>Create and update the predicates that you want to include in the rule. For more information, see <a>CreateByteMatchSet</a>, <a>CreateIPSet</a>, and <a>CreateSqlInjectionMatchSet</a>.</p> </li> <li> <p>Use <a>GetChangeToken</a> to get the change token that you provide in the <code>ChangeToken</code> parameter of a <code>CreateRule</code> request.</p> </li> <li> <p>Submit a <code>CreateRateBasedRule</code> request.</p> </li> <li> <p>Use <code>GetChangeToken</code> to get the change token that you provide in the <code>ChangeToken</code> parameter of an <a>UpdateRule</a> request.</p> </li> <li> <p>Submit an <code>UpdateRateBasedRule</code> request to specify the predicates that you want to include in the rule.</p> </li> <li> <p>Create and update a <code>WebACL</code> that contains the <code>RateBasedRule</code>. For more information, see <a>CreateWebACL</a>.</p> </li> </ol> <p>For more information about how to use the AWS WAF API to allow or block HTTP requests, see the <a href="http://docs.aws.amazon.com/waf/latest/developerguide/">AWS WAF Developer Guide</a>.</p>
fn create_rate_based_rule(
&self,
input: CreateRateBasedRuleRequest,
) -> RusotoFuture<CreateRateBasedRuleResponse, CreateRateBasedRuleError> {
let mut request = SignedRequest::new("POST", "waf", &self.region, "/");
request.set_content_type("application/x-amz-json-1.1".to_owned());
request.add_header("x-amz-target", "AWSWAF_20150824.CreateRateBasedRule");
let encoded = serde_json::to_string(&input).unwrap();
request.set_payload(Some(encoded));
self.client.sign_and_dispatch(request, |response| {
if response.status.is_success() {
Box::new(response.buffer().from_err().and_then(|response| {
proto::json::ResponsePayload::new(&response)
.deserialize::<CreateRateBasedRuleResponse, _>()
}))
} else {
Box::new(
response.buffer().from_err().and_then(|response| {
Err(CreateRateBasedRuleError::from_response(response))
}),
)
}
})
}
/// <p>Creates a <a>RegexMatchSet</a>. You then use <a>UpdateRegexMatchSet</a> to identify the part of a web request that you want AWS WAF to inspect, such as the values of the <code>User-Agent</code> header or the query string. For example, you can create a <code>RegexMatchSet</code> that contains a <code>RegexMatchTuple</code> that looks for any requests with <code>User-Agent</code> headers that match a <code>RegexPatternSet</code> with pattern <code>B[a@]dB[o0]t</code>. You can then configure AWS WAF to reject those requests.</p> <p>To create and configure a <code>RegexMatchSet</code>, perform the following steps:</p> <ol> <li> <p>Use <a>GetChangeToken</a> to get the change token that you provide in the <code>ChangeToken</code> parameter of a <code>CreateRegexMatchSet</code> request.</p> </li> <li> <p>Submit a <code>CreateRegexMatchSet</code> request.</p> </li> <li> <p>Use <code>GetChangeToken</code> to get the change token that you provide in the <code>ChangeToken</code> parameter of an <code>UpdateRegexMatchSet</code> request.</p> </li> <li> <p>Submit an <a>UpdateRegexMatchSet</a> request to specify the part of the request that you want AWS WAF to inspect (for example, the header or the URI) and the value, using a <code>RegexPatternSet</code>, that you want AWS WAF to watch for.</p> </li> </ol> <p>For more information about how to use the AWS WAF API to allow or block HTTP requests, see the <a href="http://docs.aws.amazon.com/waf/latest/developerguide/">AWS WAF Developer Guide</a>.</p>
fn create_regex_match_set(
&self,
input: CreateRegexMatchSetRequest,
) -> RusotoFuture<CreateRegexMatchSetResponse, CreateRegexMatchSetError> {
let mut request = SignedRequest::new("POST", "waf", &self.region, "/");
request.set_content_type("application/x-amz-json-1.1".to_owned());
request.add_header("x-amz-target", "AWSWAF_20150824.CreateRegexMatchSet");
let encoded = serde_json::to_string(&input).unwrap();
request.set_payload(Some(encoded));
self.client.sign_and_dispatch(request, |response| {
if response.status.is_success() {
Box::new(response.buffer().from_err().and_then(|response| {
proto::json::ResponsePayload::new(&response)
.deserialize::<CreateRegexMatchSetResponse, _>()
}))
} else {
Box::new(
response.buffer().from_err().and_then(|response| {
Err(CreateRegexMatchSetError::from_response(response))
}),
)
}
})
}
/// <p>Creates a <code>RegexPatternSet</code>. You then use <a>UpdateRegexPatternSet</a> to specify the regular expression (regex) pattern that you want AWS WAF to search for, such as <code>B[a@]dB[o0]t</code>. You can then configure AWS WAF to reject those requests.</p> <p>To create and configure a <code>RegexPatternSet</code>, perform the following steps:</p> <ol> <li> <p>Use <a>GetChangeToken</a> to get the change token that you provide in the <code>ChangeToken</code> parameter of a <code>CreateRegexPatternSet</code> request.</p> </li> <li> <p>Submit a <code>CreateRegexPatternSet</code> request.</p> </li> <li> <p>Use <code>GetChangeToken</code> to get the change token that you provide in the <code>ChangeToken</code> parameter of an <code>UpdateRegexPatternSet</code> request.</p> </li> <li> <p>Submit an <a>UpdateRegexPatternSet</a> request to specify the string that you want AWS WAF to watch for.</p> </li> </ol> <p>For more information about how to use the AWS WAF API to allow or block HTTP requests, see the <a href="http://docs.aws.amazon.com/waf/latest/developerguide/">AWS WAF Developer Guide</a>.</p>
fn create_regex_pattern_set(
&self,
input: CreateRegexPatternSetRequest,
) -> RusotoFuture<CreateRegexPatternSetResponse, CreateRegexPatternSetError> {
let mut request = SignedRequest::new("POST", "waf", &self.region, "/");
request.set_content_type("application/x-amz-json-1.1".to_owned());
request.add_header("x-amz-target", "AWSWAF_20150824.CreateRegexPatternSet");
let encoded = serde_json::to_string(&input).unwrap();
request.set_payload(Some(encoded));
self.client.sign_and_dispatch(request, |response| {
if response.status.is_success() {
Box::new(response.buffer().from_err().and_then(|response| {
proto::json::ResponsePayload::new(&response)
.deserialize::<CreateRegexPatternSetResponse, _>()
}))
} else {
Box::new(
response.buffer().from_err().and_then(|response| {
Err(CreateRegexPatternSetError::from_response(response))
}),
)
}
})
}
/// <p>Creates a <code>Rule</code>, which contains the <code>IPSet</code> objects, <code>ByteMatchSet</code> objects, and other predicates that identify the requests that you want to block. If you add more than one predicate to a <code>Rule</code>, a request must match all of the specifications to be allowed or blocked. For example, suppose that you add the following to a <code>Rule</code>:</p> <ul> <li> <p>An <code>IPSet</code> that matches the IP address <code>192.0.2.44/32</code> </p> </li> <li> <p>A <code>ByteMatchSet</code> that matches <code>BadBot</code> in the <code>User-Agent</code> header</p> </li> </ul> <p>You then add the <code>Rule</code> to a <code>WebACL</code> and specify that you want to blocks requests that satisfy the <code>Rule</code>. For a request to be blocked, it must come from the IP address 192.0.2.44 <i>and</i> the <code>User-Agent</code> header in the request must contain the value <code>BadBot</code>.</p> <p>To create and configure a <code>Rule</code>, perform the following steps:</p> <ol> <li> <p>Create and update the predicates that you want to include in the <code>Rule</code>. For more information, see <a>CreateByteMatchSet</a>, <a>CreateIPSet</a>, and <a>CreateSqlInjectionMatchSet</a>.</p> </li> <li> <p>Use <a>GetChangeToken</a> to get the change token that you provide in the <code>ChangeToken</code> parameter of a <code>CreateRule</code> request.</p> </li> <li> <p>Submit a <code>CreateRule</code> request.</p> </li> <li> <p>Use <code>GetChangeToken</code> to get the change token that you provide in the <code>ChangeToken</code> parameter of an <a>UpdateRule</a> request.</p> </li> <li> <p>Submit an <code>UpdateRule</code> request to specify the predicates that you want to include in the <code>Rule</code>.</p> </li> <li> <p>Create and update a <code>WebACL</code> that contains the <code>Rule</code>. For more information, see <a>CreateWebACL</a>.</p> </li> </ol> <p>For more information about how to use the AWS WAF API to allow or block HTTP requests, see the <a href="http://docs.aws.amazon.com/waf/latest/developerguide/">AWS WAF Developer Guide</a>.</p>
fn create_rule(
&self,
input: CreateRuleRequest,
) -> RusotoFuture<CreateRuleResponse, CreateRuleError> {
let mut request = SignedRequest::new("POST", "waf", &self.region, "/");
request.set_content_type("application/x-amz-json-1.1".to_owned());
request.add_header("x-amz-target", "AWSWAF_20150824.CreateRule");
let encoded = serde_json::to_string(&input).unwrap();
request.set_payload(Some(encoded));
self.client.sign_and_dispatch(request, |response| {
if response.status.is_success() {
Box::new(response.buffer().from_err().and_then(|response| {
proto::json::ResponsePayload::new(&response)
.deserialize::<CreateRuleResponse, _>()
}))
} else {
Box::new(
response
.buffer()
.from_err()
.and_then(|response| Err(CreateRuleError::from_response(response))),
)
}
})
}
/// <p>Creates a <code>RuleGroup</code>. A rule group is a collection of predefined rules that you add to a web ACL. You use <a>UpdateRuleGroup</a> to add rules to the rule group.</p> <p>Rule groups are subject to the following limits:</p> <ul> <li> <p>Three rule groups per account. You can request an increase to this limit by contacting customer support.</p> </li> <li> <p>One rule group per web ACL.</p> </li> <li> <p>Ten rules per rule group.</p> </li> </ul> <p>For more information about how to use the AWS WAF API to allow or block HTTP requests, see the <a href="http://docs.aws.amazon.com/waf/latest/developerguide/">AWS WAF Developer Guide</a>.</p>
fn create_rule_group(
&self,
input: CreateRuleGroupRequest,
) -> RusotoFuture<CreateRuleGroupResponse, CreateRuleGroupError> {
let mut request = SignedRequest::new("POST", "waf", &self.region, "/");
request.set_content_type("application/x-amz-json-1.1".to_owned());
request.add_header("x-amz-target", "AWSWAF_20150824.CreateRuleGroup");
let encoded = serde_json::to_string(&input).unwrap();
request.set_payload(Some(encoded));
self.client.sign_and_dispatch(request, |response| {
if response.status.is_success() {
Box::new(response.buffer().from_err().and_then(|response| {
proto::json::ResponsePayload::new(&response)
.deserialize::<CreateRuleGroupResponse, _>()
}))
} else {
Box::new(
response
.buffer()
.from_err()
.and_then(|response| Err(CreateRuleGroupError::from_response(response))),
)
}
})
}
/// <p>Creates a <code>SizeConstraintSet</code>. You then use <a>UpdateSizeConstraintSet</a> to identify the part of a web request that you want AWS WAF to check for length, such as the length of the <code>User-Agent</code> header or the length of the query string. For example, you can create a <code>SizeConstraintSet</code> that matches any requests that have a query string that is longer than 100 bytes. You can then configure AWS WAF to reject those requests.</p> <p>To create and configure a <code>SizeConstraintSet</code>, perform the following steps:</p> <ol> <li> <p>Use <a>GetChangeToken</a> to get the change token that you provide in the <code>ChangeToken</code> parameter of a <code>CreateSizeConstraintSet</code> request.</p> </li> <li> <p>Submit a <code>CreateSizeConstraintSet</code> request.</p> </li> <li> <p>Use <code>GetChangeToken</code> to get the change token that you provide in the <code>ChangeToken</code> parameter of an <code>UpdateSizeConstraintSet</code> request.</p> </li> <li> <p>Submit an <a>UpdateSizeConstraintSet</a> request to specify the part of the request that you want AWS WAF to inspect (for example, the header or the URI) and the value that you want AWS WAF to watch for.</p> </li> </ol> <p>For more information about how to use the AWS WAF API to allow or block HTTP requests, see the <a href="http://docs.aws.amazon.com/waf/latest/developerguide/">AWS WAF Developer Guide</a>.</p>
fn create_size_constraint_set(
&self,
input: CreateSizeConstraintSetRequest,
) -> RusotoFuture<CreateSizeConstraintSetResponse, CreateSizeConstraintSetError> {
let mut request = SignedRequest::new("POST", "waf", &self.region, "/");
request.set_content_type("application/x-amz-json-1.1".to_owned());
request.add_header("x-amz-target", "AWSWAF_20150824.CreateSizeConstraintSet");
let encoded = serde_json::to_string(&input).unwrap();
request.set_payload(Some(encoded));
self.client.sign_and_dispatch(request, |response| {
if response.status.is_success() {
Box::new(response.buffer().from_err().and_then(|response| {
proto::json::ResponsePayload::new(&response)
.deserialize::<CreateSizeConstraintSetResponse, _>()
}))
} else {
Box::new(response.buffer().from_err().and_then(|response| {
Err(CreateSizeConstraintSetError::from_response(response))
}))
}
})
}
/// <p>Creates a <a>SqlInjectionMatchSet</a>, which you use to allow, block, or count requests that contain snippets of SQL code in a specified part of web requests. AWS WAF searches for character sequences that are likely to be malicious strings.</p> <p>To create and configure a <code>SqlInjectionMatchSet</code>, perform the following steps:</p> <ol> <li> <p>Use <a>GetChangeToken</a> to get the change token that you provide in the <code>ChangeToken</code> parameter of a <code>CreateSqlInjectionMatchSet</code> request.</p> </li> <li> <p>Submit a <code>CreateSqlInjectionMatchSet</code> request.</p> </li> <li> <p>Use <code>GetChangeToken</code> to get the change token that you provide in the <code>ChangeToken</code> parameter of an <a>UpdateSqlInjectionMatchSet</a> request.</p> </li> <li> <p>Submit an <a>UpdateSqlInjectionMatchSet</a> request to specify the parts of web requests in which you want to allow, block, or count malicious SQL code.</p> </li> </ol> <p>For more information about how to use the AWS WAF API to allow or block HTTP requests, see the <a href="http://docs.aws.amazon.com/waf/latest/developerguide/">AWS WAF Developer Guide</a>.</p>
fn create_sql_injection_match_set(
&self,
input: CreateSqlInjectionMatchSetRequest,
) -> RusotoFuture<CreateSqlInjectionMatchSetResponse, CreateSqlInjectionMatchSetError> {
let mut request = SignedRequest::new("POST", "waf", &self.region, "/");
request.set_content_type("application/x-amz-json-1.1".to_owned());
request.add_header("x-amz-target", "AWSWAF_20150824.CreateSqlInjectionMatchSet");
let encoded = serde_json::to_string(&input).unwrap();
request.set_payload(Some(encoded));
self.client.sign_and_dispatch(request, |response| {
if response.status.is_success() {
Box::new(response.buffer().from_err().and_then(|response| {
proto::json::ResponsePayload::new(&response)
.deserialize::<CreateSqlInjectionMatchSetResponse, _>()
}))
} else {
Box::new(response.buffer().from_err().and_then(|response| {
Err(CreateSqlInjectionMatchSetError::from_response(response))
}))
}
})
}
/// <p>Creates a <code>WebACL</code>, which contains the <code>Rules</code> that identify the CloudFront web requests that you want to allow, block, or count. AWS WAF evaluates <code>Rules</code> in order based on the value of <code>Priority</code> for each <code>Rule</code>.</p> <p>You also specify a default action, either <code>ALLOW</code> or <code>BLOCK</code>. If a web request doesn't match any of the <code>Rules</code> in a <code>WebACL</code>, AWS WAF responds to the request with the default action. </p> <p>To create and configure a <code>WebACL</code>, perform the following steps:</p> <ol> <li> <p>Create and update the <code>ByteMatchSet</code> objects and other predicates that you want to include in <code>Rules</code>. For more information, see <a>CreateByteMatchSet</a>, <a>UpdateByteMatchSet</a>, <a>CreateIPSet</a>, <a>UpdateIPSet</a>, <a>CreateSqlInjectionMatchSet</a>, and <a>UpdateSqlInjectionMatchSet</a>.</p> </li> <li> <p>Create and update the <code>Rules</code> that you want to include in the <code>WebACL</code>. For more information, see <a>CreateRule</a> and <a>UpdateRule</a>.</p> </li> <li> <p>Use <a>GetChangeToken</a> to get the change token that you provide in the <code>ChangeToken</code> parameter of a <code>CreateWebACL</code> request.</p> </li> <li> <p>Submit a <code>CreateWebACL</code> request.</p> </li> <li> <p>Use <code>GetChangeToken</code> to get the change token that you provide in the <code>ChangeToken</code> parameter of an <a>UpdateWebACL</a> request.</p> </li> <li> <p>Submit an <a>UpdateWebACL</a> request to specify the <code>Rules</code> that you want to include in the <code>WebACL</code>, to specify the default action, and to associate the <code>WebACL</code> with a CloudFront distribution.</p> </li> </ol> <p>For more information about how to use the AWS WAF API, see the <a href="http://docs.aws.amazon.com/waf/latest/developerguide/">AWS WAF Developer Guide</a>.</p>
fn create_web_acl(
&self,
input: CreateWebACLRequest,
) -> RusotoFuture<CreateWebACLResponse, CreateWebACLError> {
let mut request = SignedRequest::new("POST", "waf", &self.region, "/");
request.set_content_type("application/x-amz-json-1.1".to_owned());
request.add_header("x-amz-target", "AWSWAF_20150824.CreateWebACL");
let encoded = serde_json::to_string(&input).unwrap();
request.set_payload(Some(encoded));
self.client.sign_and_dispatch(request, |response| {
if response.status.is_success() {
Box::new(response.buffer().from_err().and_then(|response| {
proto::json::ResponsePayload::new(&response)
.deserialize::<CreateWebACLResponse, _>()
}))
} else {
Box::new(
response
.buffer()
.from_err()
.and_then(|response| Err(CreateWebACLError::from_response(response))),
)
}
})
}
/// <p>Creates an <a>XssMatchSet</a>, which you use to allow, block, or count requests that contain cross-site scripting attacks in the specified part of web requests. AWS WAF searches for character sequences that are likely to be malicious strings.</p> <p>To create and configure an <code>XssMatchSet</code>, perform the following steps:</p> <ol> <li> <p>Use <a>GetChangeToken</a> to get the change token that you provide in the <code>ChangeToken</code> parameter of a <code>CreateXssMatchSet</code> request.</p> </li> <li> <p>Submit a <code>CreateXssMatchSet</code> request.</p> </li> <li> <p>Use <code>GetChangeToken</code> to get the change token that you provide in the <code>ChangeToken</code> parameter of an <a>UpdateXssMatchSet</a> request.</p> </li> <li> <p>Submit an <a>UpdateXssMatchSet</a> request to specify the parts of web requests in which you want to allow, block, or count cross-site scripting attacks.</p> </li> </ol> <p>For more information about how to use the AWS WAF API to allow or block HTTP requests, see the <a href="http://docs.aws.amazon.com/waf/latest/developerguide/">AWS WAF Developer Guide</a>.</p>
fn create_xss_match_set(
&self,
input: CreateXssMatchSetRequest,
) -> RusotoFuture<CreateXssMatchSetResponse, CreateXssMatchSetError> {
let mut request = SignedRequest::new("POST", "waf", &self.region, "/");
request.set_content_type("application/x-amz-json-1.1".to_owned());
request.add_header("x-amz-target", "AWSWAF_20150824.CreateXssMatchSet");
let encoded = serde_json::to_string(&input).unwrap();
request.set_payload(Some(encoded));
self.client.sign_and_dispatch(request, |response| {
if response.status.is_success() {
Box::new(response.buffer().from_err().and_then(|response| {
proto::json::ResponsePayload::new(&response)
.deserialize::<CreateXssMatchSetResponse, _>()
}))
} else {
Box::new(
response
.buffer()
.from_err()
.and_then(|response| Err(CreateXssMatchSetError::from_response(response))),
)
}
})
}
/// <p><p>Permanently deletes a <a>ByteMatchSet</a>. You can't delete a <code>ByteMatchSet</code> if it's still used in any <code>Rules</code> or if it still includes any <a>ByteMatchTuple</a> objects (any filters).</p> <p>If you just want to remove a <code>ByteMatchSet</code> from a <code>Rule</code>, use <a>UpdateRule</a>.</p> <p>To permanently delete a <code>ByteMatchSet</code>, perform the following steps:</p> <ol> <li> <p>Update the <code>ByteMatchSet</code> to remove filters, if any. For more information, see <a>UpdateByteMatchSet</a>.</p> </li> <li> <p>Use <a>GetChangeToken</a> to get the change token that you provide in the <code>ChangeToken</code> parameter of a <code>DeleteByteMatchSet</code> request.</p> </li> <li> <p>Submit a <code>DeleteByteMatchSet</code> request.</p> </li> </ol></p>
fn delete_byte_match_set(
&self,
input: DeleteByteMatchSetRequest,
) -> RusotoFuture<DeleteByteMatchSetResponse, DeleteByteMatchSetError> {
let mut request = SignedRequest::new("POST", "waf", &self.region, "/");
request.set_content_type("application/x-amz-json-1.1".to_owned());
request.add_header("x-amz-target", "AWSWAF_20150824.DeleteByteMatchSet");
let encoded = serde_json::to_string(&input).unwrap();
request.set_payload(Some(encoded));
self.client.sign_and_dispatch(request, |response| {
if response.status.is_success() {
Box::new(response.buffer().from_err().and_then(|response| {
proto::json::ResponsePayload::new(&response)
.deserialize::<DeleteByteMatchSetResponse, _>()
}))
} else {
Box::new(
response
.buffer()
.from_err()
.and_then(|response| Err(DeleteByteMatchSetError::from_response(response))),
)
}
})
}
/// <p><p>Permanently deletes a <a>GeoMatchSet</a>. You can't delete a <code>GeoMatchSet</code> if it's still used in any <code>Rules</code> or if it still includes any countries.</p> <p>If you just want to remove a <code>GeoMatchSet</code> from a <code>Rule</code>, use <a>UpdateRule</a>.</p> <p>To permanently delete a <code>GeoMatchSet</code> from AWS WAF, perform the following steps:</p> <ol> <li> <p>Update the <code>GeoMatchSet</code> to remove any countries. For more information, see <a>UpdateGeoMatchSet</a>.</p> </li> <li> <p>Use <a>GetChangeToken</a> to get the change token that you provide in the <code>ChangeToken</code> parameter of a <code>DeleteGeoMatchSet</code> request.</p> </li> <li> <p>Submit a <code>DeleteGeoMatchSet</code> request.</p> </li> </ol></p>
fn delete_geo_match_set(
&self,
input: DeleteGeoMatchSetRequest,
) -> RusotoFuture<DeleteGeoMatchSetResponse, DeleteGeoMatchSetError> {
let mut request = SignedRequest::new("POST", "waf", &self.region, "/");
request.set_content_type("application/x-amz-json-1.1".to_owned());
request.add_header("x-amz-target", "AWSWAF_20150824.DeleteGeoMatchSet");
let encoded = serde_json::to_string(&input).unwrap();
request.set_payload(Some(encoded));
self.client.sign_and_dispatch(request, |response| {
if response.status.is_success() {
Box::new(response.buffer().from_err().and_then(|response| {
proto::json::ResponsePayload::new(&response)
.deserialize::<DeleteGeoMatchSetResponse, _>()
}))
} else {
Box::new(
response
.buffer()
.from_err()
.and_then(|response| Err(DeleteGeoMatchSetError::from_response(response))),
)
}
})
}
/// <p><p>Permanently deletes an <a>IPSet</a>. You can't delete an <code>IPSet</code> if it's still used in any <code>Rules</code> or if it still includes any IP addresses.</p> <p>If you just want to remove an <code>IPSet</code> from a <code>Rule</code>, use <a>UpdateRule</a>.</p> <p>To permanently delete an <code>IPSet</code> from AWS WAF, perform the following steps:</p> <ol> <li> <p>Update the <code>IPSet</code> to remove IP address ranges, if any. For more information, see <a>UpdateIPSet</a>.</p> </li> <li> <p>Use <a>GetChangeToken</a> to get the change token that you provide in the <code>ChangeToken</code> parameter of a <code>DeleteIPSet</code> request.</p> </li> <li> <p>Submit a <code>DeleteIPSet</code> request.</p> </li> </ol></p>
fn delete_ip_set(
&self,
input: DeleteIPSetRequest,
) -> RusotoFuture<DeleteIPSetResponse, DeleteIPSetError> {
let mut request = SignedRequest::new("POST", "waf", &self.region, "/");
request.set_content_type("application/x-amz-json-1.1".to_owned());
request.add_header("x-amz-target", "AWSWAF_20150824.DeleteIPSet");
let encoded = serde_json::to_string(&input).unwrap();
request.set_payload(Some(encoded));
self.client.sign_and_dispatch(request, |response| {
if response.status.is_success() {
Box::new(response.buffer().from_err().and_then(|response| {
proto::json::ResponsePayload::new(&response)
.deserialize::<DeleteIPSetResponse, _>()
}))
} else {
Box::new(
response
.buffer()
.from_err()
.and_then(|response| Err(DeleteIPSetError::from_response(response))),
)
}
})
}
/// <p>Permanently deletes the <a>LoggingConfiguration</a> from the specified web ACL.</p>
fn delete_logging_configuration(
&self,
input: DeleteLoggingConfigurationRequest,
) -> RusotoFuture<DeleteLoggingConfigurationResponse, DeleteLoggingConfigurationError> {
let mut request = SignedRequest::new("POST", "waf", &self.region, "/");
request.set_content_type("application/x-amz-json-1.1".to_owned());
request.add_header("x-amz-target", "AWSWAF_20150824.DeleteLoggingConfiguration");
let encoded = serde_json::to_string(&input).unwrap();
request.set_payload(Some(encoded));
self.client.sign_and_dispatch(request, |response| {
if response.status.is_success() {
Box::new(response.buffer().from_err().and_then(|response| {
proto::json::ResponsePayload::new(&response)
.deserialize::<DeleteLoggingConfigurationResponse, _>()
}))
} else {
Box::new(response.buffer().from_err().and_then(|response| {
Err(DeleteLoggingConfigurationError::from_response(response))
}))
}
})
}
/// <p>Permanently deletes an IAM policy from the specified RuleGroup.</p> <p>The user making the request must be the owner of the RuleGroup.</p>
fn delete_permission_policy(
&self,
input: DeletePermissionPolicyRequest,
) -> RusotoFuture<DeletePermissionPolicyResponse, DeletePermissionPolicyError> {
let mut request = SignedRequest::new("POST", "waf", &self.region, "/");
request.set_content_type("application/x-amz-json-1.1".to_owned());
request.add_header("x-amz-target", "AWSWAF_20150824.DeletePermissionPolicy");
let encoded = serde_json::to_string(&input).unwrap();
request.set_payload(Some(encoded));
self.client.sign_and_dispatch(request, |response| {
if response.status.is_success() {
Box::new(response.buffer().from_err().and_then(|response| {
proto::json::ResponsePayload::new(&response)
.deserialize::<DeletePermissionPolicyResponse, _>()
}))
} else {
Box::new(
response.buffer().from_err().and_then(|response| {
Err(DeletePermissionPolicyError::from_response(response))
}),
)
}
})
}
/// <p><p>Permanently deletes a <a>RateBasedRule</a>. You can't delete a rule if it's still used in any <code>WebACL</code> objects or if it still includes any predicates, such as <code>ByteMatchSet</code> objects.</p> <p>If you just want to remove a rule from a <code>WebACL</code>, use <a>UpdateWebACL</a>.</p> <p>To permanently delete a <code>RateBasedRule</code> from AWS WAF, perform the following steps:</p> <ol> <li> <p>Update the <code>RateBasedRule</code> to remove predicates, if any. For more information, see <a>UpdateRateBasedRule</a>.</p> </li> <li> <p>Use <a>GetChangeToken</a> to get the change token that you provide in the <code>ChangeToken</code> parameter of a <code>DeleteRateBasedRule</code> request.</p> </li> <li> <p>Submit a <code>DeleteRateBasedRule</code> request.</p> </li> </ol></p>
fn delete_rate_based_rule(
&self,
input: DeleteRateBasedRuleRequest,
) -> RusotoFuture<DeleteRateBasedRuleResponse, DeleteRateBasedRuleError> {
let mut request = SignedRequest::new("POST", "waf", &self.region, "/");
request.set_content_type("application/x-amz-json-1.1".to_owned());
request.add_header("x-amz-target", "AWSWAF_20150824.DeleteRateBasedRule");
let encoded = serde_json::to_string(&input).unwrap();
request.set_payload(Some(encoded));
self.client.sign_and_dispatch(request, |response| {
if response.status.is_success() {
Box::new(response.buffer().from_err().and_then(|response| {
proto::json::ResponsePayload::new(&response)
.deserialize::<DeleteRateBasedRuleResponse, _>()
}))
} else {
Box::new(
response.buffer().from_err().and_then(|response| {
Err(DeleteRateBasedRuleError::from_response(response))
}),
)
}
})
}
/// <p><p>Permanently deletes a <a>RegexMatchSet</a>. You can't delete a <code>RegexMatchSet</code> if it's still used in any <code>Rules</code> or if it still includes any <code>RegexMatchTuples</code> objects (any filters).</p> <p>If you just want to remove a <code>RegexMatchSet</code> from a <code>Rule</code>, use <a>UpdateRule</a>.</p> <p>To permanently delete a <code>RegexMatchSet</code>, perform the following steps:</p> <ol> <li> <p>Update the <code>RegexMatchSet</code> to remove filters, if any. For more information, see <a>UpdateRegexMatchSet</a>.</p> </li> <li> <p>Use <a>GetChangeToken</a> to get the change token that you provide in the <code>ChangeToken</code> parameter of a <code>DeleteRegexMatchSet</code> request.</p> </li> <li> <p>Submit a <code>DeleteRegexMatchSet</code> request.</p> </li> </ol></p>
fn delete_regex_match_set(
&self,
input: DeleteRegexMatchSetRequest,
) -> RusotoFuture<DeleteRegexMatchSetResponse, DeleteRegexMatchSetError> {
let mut request = SignedRequest::new("POST", "waf", &self.region, "/");
request.set_content_type("application/x-amz-json-1.1".to_owned());
request.add_header("x-amz-target", "AWSWAF_20150824.DeleteRegexMatchSet");
let encoded = serde_json::to_string(&input).unwrap();
request.set_payload(Some(encoded));
self.client.sign_and_dispatch(request, |response| {
if response.status.is_success() {
Box::new(response.buffer().from_err().and_then(|response| {
proto::json::ResponsePayload::new(&response)
.deserialize::<DeleteRegexMatchSetResponse, _>()
}))
} else {
Box::new(
response.buffer().from_err().and_then(|response| {
Err(DeleteRegexMatchSetError::from_response(response))
}),
)
}
})
}
/// <p>Permanently deletes a <a>RegexPatternSet</a>. You can't delete a <code>RegexPatternSet</code> if it's still used in any <code>RegexMatchSet</code> or if the <code>RegexPatternSet</code> is not empty. </p>
fn delete_regex_pattern_set(
&self,
input: DeleteRegexPatternSetRequest,
) -> RusotoFuture<DeleteRegexPatternSetResponse, DeleteRegexPatternSetError> {
let mut request = SignedRequest::new("POST", "waf", &self.region, "/");
request.set_content_type("application/x-amz-json-1.1".to_owned());
request.add_header("x-amz-target", "AWSWAF_20150824.DeleteRegexPatternSet");
let encoded = serde_json::to_string(&input).unwrap();
request.set_payload(Some(encoded));
self.client.sign_and_dispatch(request, |response| {
if response.status.is_success() {
Box::new(response.buffer().from_err().and_then(|response| {
proto::json::ResponsePayload::new(&response)
.deserialize::<DeleteRegexPatternSetResponse, _>()
}))
} else {
Box::new(
response.buffer().from_err().and_then(|response| {
Err(DeleteRegexPatternSetError::from_response(response))
}),
)
}
})
}
/// <p><p>Permanently deletes a <a>Rule</a>. You can't delete a <code>Rule</code> if it's still used in any <code>WebACL</code> objects or if it still includes any predicates, such as <code>ByteMatchSet</code> objects.</p> <p>If you just want to remove a <code>Rule</code> from a <code>WebACL</code>, use <a>UpdateWebACL</a>.</p> <p>To permanently delete a <code>Rule</code> from AWS WAF, perform the following steps:</p> <ol> <li> <p>Update the <code>Rule</code> to remove predicates, if any. For more information, see <a>UpdateRule</a>.</p> </li> <li> <p>Use <a>GetChangeToken</a> to get the change token that you provide in the <code>ChangeToken</code> parameter of a <code>DeleteRule</code> request.</p> </li> <li> <p>Submit a <code>DeleteRule</code> request.</p> </li> </ol></p>
fn delete_rule(
&self,
input: DeleteRuleRequest,
) -> RusotoFuture<DeleteRuleResponse, DeleteRuleError> {
let mut request = SignedRequest::new("POST", "waf", &self.region, "/");
request.set_content_type("application/x-amz-json-1.1".to_owned());
request.add_header("x-amz-target", "AWSWAF_20150824.DeleteRule");
let encoded = serde_json::to_string(&input).unwrap();
request.set_payload(Some(encoded));
self.client.sign_and_dispatch(request, |response| {
if response.status.is_success() {
Box::new(response.buffer().from_err().and_then(|response| {
proto::json::ResponsePayload::new(&response)
.deserialize::<DeleteRuleResponse, _>()
}))
} else {
Box::new(
response
.buffer()
.from_err()
.and_then(|response| Err(DeleteRuleError::from_response(response))),
)
}
})
}
/// <p><p>Permanently deletes a <a>RuleGroup</a>. You can't delete a <code>RuleGroup</code> if it's still used in any <code>WebACL</code> objects or if it still includes any rules.</p> <p>If you just want to remove a <code>RuleGroup</code> from a <code>WebACL</code>, use <a>UpdateWebACL</a>.</p> <p>To permanently delete a <code>RuleGroup</code> from AWS WAF, perform the following steps:</p> <ol> <li> <p>Update the <code>RuleGroup</code> to remove rules, if any. For more information, see <a>UpdateRuleGroup</a>.</p> </li> <li> <p>Use <a>GetChangeToken</a> to get the change token that you provide in the <code>ChangeToken</code> parameter of a <code>DeleteRuleGroup</code> request.</p> </li> <li> <p>Submit a <code>DeleteRuleGroup</code> request.</p> </li> </ol></p>
fn delete_rule_group(
&self,
input: DeleteRuleGroupRequest,
) -> RusotoFuture<DeleteRuleGroupResponse, DeleteRuleGroupError> {
let mut request = SignedRequest::new("POST", "waf", &self.region, "/");
request.set_content_type("application/x-amz-json-1.1".to_owned());
request.add_header("x-amz-target", "AWSWAF_20150824.DeleteRuleGroup");
let encoded = serde_json::to_string(&input).unwrap();
request.set_payload(Some(encoded));
self.client.sign_and_dispatch(request, |response| {
if response.status.is_success() {
Box::new(response.buffer().from_err().and_then(|response| {
proto::json::ResponsePayload::new(&response)
.deserialize::<DeleteRuleGroupResponse, _>()
}))
} else {
Box::new(
response
.buffer()
.from_err()
.and_then(|response| Err(DeleteRuleGroupError::from_response(response))),
)
}
})
}
/// <p><p>Permanently deletes a <a>SizeConstraintSet</a>. You can't delete a <code>SizeConstraintSet</code> if it's still used in any <code>Rules</code> or if it still includes any <a>SizeConstraint</a> objects (any filters).</p> <p>If you just want to remove a <code>SizeConstraintSet</code> from a <code>Rule</code>, use <a>UpdateRule</a>.</p> <p>To permanently delete a <code>SizeConstraintSet</code>, perform the following steps:</p> <ol> <li> <p>Update the <code>SizeConstraintSet</code> to remove filters, if any. For more information, see <a>UpdateSizeConstraintSet</a>.</p> </li> <li> <p>Use <a>GetChangeToken</a> to get the change token that you provide in the <code>ChangeToken</code> parameter of a <code>DeleteSizeConstraintSet</code> request.</p> </li> <li> <p>Submit a <code>DeleteSizeConstraintSet</code> request.</p> </li> </ol></p>
fn delete_size_constraint_set(
&self,
input: DeleteSizeConstraintSetRequest,
) -> RusotoFuture<DeleteSizeConstraintSetResponse, DeleteSizeConstraintSetError> {
let mut request = SignedRequest::new("POST", "waf", &self.region, "/");
request.set_content_type("application/x-amz-json-1.1".to_owned());
request.add_header("x-amz-target", "AWSWAF_20150824.DeleteSizeConstraintSet");
let encoded = serde_json::to_string(&input).unwrap();
request.set_payload(Some(encoded));
self.client.sign_and_dispatch(request, |response| {
if response.status.is_success() {
Box::new(response.buffer().from_err().and_then(|response| {
proto::json::ResponsePayload::new(&response)
.deserialize::<DeleteSizeConstraintSetResponse, _>()
}))
} else {
Box::new(response.buffer().from_err().and_then(|response| {
Err(DeleteSizeConstraintSetError::from_response(response))
}))
}
})
}
/// <p><p>Permanently deletes a <a>SqlInjectionMatchSet</a>. You can't delete a <code>SqlInjectionMatchSet</code> if it's still used in any <code>Rules</code> or if it still contains any <a>SqlInjectionMatchTuple</a> objects.</p> <p>If you just want to remove a <code>SqlInjectionMatchSet</code> from a <code>Rule</code>, use <a>UpdateRule</a>.</p> <p>To permanently delete a <code>SqlInjectionMatchSet</code> from AWS WAF, perform the following steps:</p> <ol> <li> <p>Update the <code>SqlInjectionMatchSet</code> to remove filters, if any. For more information, see <a>UpdateSqlInjectionMatchSet</a>.</p> </li> <li> <p>Use <a>GetChangeToken</a> to get the change token that you provide in the <code>ChangeToken</code> parameter of a <code>DeleteSqlInjectionMatchSet</code> request.</p> </li> <li> <p>Submit a <code>DeleteSqlInjectionMatchSet</code> request.</p> </li> </ol></p>
fn delete_sql_injection_match_set(
&self,
input: DeleteSqlInjectionMatchSetRequest,
) -> RusotoFuture<DeleteSqlInjectionMatchSetResponse, DeleteSqlInjectionMatchSetError> {
let mut request = SignedRequest::new("POST", "waf", &self.region, "/");
request.set_content_type("application/x-amz-json-1.1".to_owned());
request.add_header("x-amz-target", "AWSWAF_20150824.DeleteSqlInjectionMatchSet");
let encoded = serde_json::to_string(&input).unwrap();
request.set_payload(Some(encoded));
self.client.sign_and_dispatch(request, |response| {
if response.status.is_success() {
Box::new(response.buffer().from_err().and_then(|response| {
proto::json::ResponsePayload::new(&response)
.deserialize::<DeleteSqlInjectionMatchSetResponse, _>()
}))
} else {
Box::new(response.buffer().from_err().and_then(|response| {
Err(DeleteSqlInjectionMatchSetError::from_response(response))
}))
}
})
}
/// <p><p>Permanently deletes a <a>WebACL</a>. You can't delete a <code>WebACL</code> if it still contains any <code>Rules</code>.</p> <p>To delete a <code>WebACL</code>, perform the following steps:</p> <ol> <li> <p>Update the <code>WebACL</code> to remove <code>Rules</code>, if any. For more information, see <a>UpdateWebACL</a>.</p> </li> <li> <p>Use <a>GetChangeToken</a> to get the change token that you provide in the <code>ChangeToken</code> parameter of a <code>DeleteWebACL</code> request.</p> </li> <li> <p>Submit a <code>DeleteWebACL</code> request.</p> </li> </ol></p>
fn delete_web_acl(
&self,
input: DeleteWebACLRequest,
) -> RusotoFuture<DeleteWebACLResponse, DeleteWebACLError> {
let mut request = SignedRequest::new("POST", "waf", &self.region, "/");
request.set_content_type("application/x-amz-json-1.1".to_owned());
request.add_header("x-amz-target", "AWSWAF_20150824.DeleteWebACL");
let encoded = serde_json::to_string(&input).unwrap();
request.set_payload(Some(encoded));
self.client.sign_and_dispatch(request, |response| {
if response.status.is_success() {
Box::new(response.buffer().from_err().and_then(|response| {
proto::json::ResponsePayload::new(&response)
.deserialize::<DeleteWebACLResponse, _>()
}))
} else {
Box::new(
response
.buffer()
.from_err()
.and_then(|response| Err(DeleteWebACLError::from_response(response))),
)
}
})
}
/// <p><p>Permanently deletes an <a>XssMatchSet</a>. You can't delete an <code>XssMatchSet</code> if it's still used in any <code>Rules</code> or if it still contains any <a>XssMatchTuple</a> objects.</p> <p>If you just want to remove an <code>XssMatchSet</code> from a <code>Rule</code>, use <a>UpdateRule</a>.</p> <p>To permanently delete an <code>XssMatchSet</code> from AWS WAF, perform the following steps:</p> <ol> <li> <p>Update the <code>XssMatchSet</code> to remove filters, if any. For more information, see <a>UpdateXssMatchSet</a>.</p> </li> <li> <p>Use <a>GetChangeToken</a> to get the change token that you provide in the <code>ChangeToken</code> parameter of a <code>DeleteXssMatchSet</code> request.</p> </li> <li> <p>Submit a <code>DeleteXssMatchSet</code> request.</p> </li> </ol></p>
fn delete_xss_match_set(
&self,
input: DeleteXssMatchSetRequest,
) -> RusotoFuture<DeleteXssMatchSetResponse, DeleteXssMatchSetError> {
let mut request = SignedRequest::new("POST", "waf", &self.region, "/");
request.set_content_type("application/x-amz-json-1.1".to_owned());
request.add_header("x-amz-target", "AWSWAF_20150824.DeleteXssMatchSet");
let encoded = serde_json::to_string(&input).unwrap();
request.set_payload(Some(encoded));
self.client.sign_and_dispatch(request, |response| {
if response.status.is_success() {
Box::new(response.buffer().from_err().and_then(|response| {
proto::json::ResponsePayload::new(&response)
.deserialize::<DeleteXssMatchSetResponse, _>()
}))
} else {
Box::new(
response
.buffer()
.from_err()
.and_then(|response| Err(DeleteXssMatchSetError::from_response(response))),
)
}
})
}
/// <p>Returns the <a>ByteMatchSet</a> specified by <code>ByteMatchSetId</code>.</p>
fn get_byte_match_set(
&self,
input: GetByteMatchSetRequest,
) -> RusotoFuture<GetByteMatchSetResponse, GetByteMatchSetError> {
let mut request = SignedRequest::new("POST", "waf", &self.region, "/");
request.set_content_type("application/x-amz-json-1.1".to_owned());
request.add_header("x-amz-target", "AWSWAF_20150824.GetByteMatchSet");
let encoded = serde_json::to_string(&input).unwrap();
request.set_payload(Some(encoded));
self.client.sign_and_dispatch(request, |response| {
if response.status.is_success() {
Box::new(response.buffer().from_err().and_then(|response| {
proto::json::ResponsePayload::new(&response)
.deserialize::<GetByteMatchSetResponse, _>()
}))
} else {
Box::new(
response
.buffer()
.from_err()
.and_then(|response| Err(GetByteMatchSetError::from_response(response))),
)
}
})
}
/// <p>When you want to create, update, or delete AWS WAF objects, get a change token and include the change token in the create, update, or delete request. Change tokens ensure that your application doesn't submit conflicting requests to AWS WAF.</p> <p>Each create, update, or delete request must use a unique change token. If your application submits a <code>GetChangeToken</code> request and then submits a second <code>GetChangeToken</code> request before submitting a create, update, or delete request, the second <code>GetChangeToken</code> request returns the same value as the first <code>GetChangeToken</code> request.</p> <p>When you use a change token in a create, update, or delete request, the status of the change token changes to <code>PENDING</code>, which indicates that AWS WAF is propagating the change to all AWS WAF servers. Use <code>GetChangeTokenStatus</code> to determine the status of your change token.</p>
fn get_change_token(&self) -> RusotoFuture<GetChangeTokenResponse, GetChangeTokenError> {
let mut request = SignedRequest::new("POST", "waf", &self.region, "/");
request.set_content_type("application/x-amz-json-1.1".to_owned());
request.add_header("x-amz-target", "AWSWAF_20150824.GetChangeToken");
request.set_payload(Some(bytes::Bytes::from_static(b"{}")));
self.client.sign_and_dispatch(request, |response| {
if response.status.is_success() {
Box::new(response.buffer().from_err().and_then(|response| {
proto::json::ResponsePayload::new(&response)
.deserialize::<GetChangeTokenResponse, _>()
}))
} else {
Box::new(
response
.buffer()
.from_err()
.and_then(|response| Err(GetChangeTokenError::from_response(response))),
)
}
})
}
/// <p><p>Returns the status of a <code>ChangeToken</code> that you got by calling <a>GetChangeToken</a>. <code>ChangeTokenStatus</code> is one of the following values:</p> <ul> <li> <p> <code>PROVISIONED</code>: You requested the change token by calling <code>GetChangeToken</code>, but you haven't used it yet in a call to create, update, or delete an AWS WAF object.</p> </li> <li> <p> <code>PENDING</code>: AWS WAF is propagating the create, update, or delete request to all AWS WAF servers.</p> </li> <li> <p> <code>IN_SYNC</code>: Propagation is complete.</p> </li> </ul></p>
fn get_change_token_status(
&self,
input: GetChangeTokenStatusRequest,
) -> RusotoFuture<GetChangeTokenStatusResponse, GetChangeTokenStatusError> {
let mut request = SignedRequest::new("POST", "waf", &self.region, "/");
request.set_content_type("application/x-amz-json-1.1".to_owned());
request.add_header("x-amz-target", "AWSWAF_20150824.GetChangeTokenStatus");
let encoded = serde_json::to_string(&input).unwrap();
request.set_payload(Some(encoded));
self.client.sign_and_dispatch(request, |response| {
if response.status.is_success() {
Box::new(response.buffer().from_err().and_then(|response| {
proto::json::ResponsePayload::new(&response)
.deserialize::<GetChangeTokenStatusResponse, _>()
}))
} else {
Box::new(
response.buffer().from_err().and_then(|response| {
Err(GetChangeTokenStatusError::from_response(response))
}),
)
}
})
}
/// <p>Returns the <a>GeoMatchSet</a> that is specified by <code>GeoMatchSetId</code>.</p>
fn get_geo_match_set(
&self,
input: GetGeoMatchSetRequest,
) -> RusotoFuture<GetGeoMatchSetResponse, GetGeoMatchSetError> {
let mut request = SignedRequest::new("POST", "waf", &self.region, "/");
request.set_content_type("application/x-amz-json-1.1".to_owned());
request.add_header("x-amz-target", "AWSWAF_20150824.GetGeoMatchSet");
let encoded = serde_json::to_string(&input).unwrap();
request.set_payload(Some(encoded));
self.client.sign_and_dispatch(request, |response| {
if response.status.is_success() {
Box::new(response.buffer().from_err().and_then(|response| {
proto::json::ResponsePayload::new(&response)
.deserialize::<GetGeoMatchSetResponse, _>()
}))
} else {
Box::new(
response
.buffer()
.from_err()
.and_then(|response| Err(GetGeoMatchSetError::from_response(response))),
)
}
})
}
/// <p>Returns the <a>IPSet</a> that is specified by <code>IPSetId</code>.</p>
fn get_ip_set(&self, input: GetIPSetRequest) -> RusotoFuture<GetIPSetResponse, GetIPSetError> {
let mut request = SignedRequest::new("POST", "waf", &self.region, "/");
request.set_content_type("application/x-amz-json-1.1".to_owned());
request.add_header("x-amz-target", "AWSWAF_20150824.GetIPSet");
let encoded = serde_json::to_string(&input).unwrap();
request.set_payload(Some(encoded));
self.client.sign_and_dispatch(request, |response| {
if response.status.is_success() {
Box::new(response.buffer().from_err().and_then(|response| {
proto::json::ResponsePayload::new(&response)
.deserialize::<GetIPSetResponse, _>()
}))
} else {
Box::new(
response
.buffer()
.from_err()
.and_then(|response| Err(GetIPSetError::from_response(response))),
)
}
})
}
/// <p>Returns the <a>LoggingConfiguration</a> for the specified web ACL.</p>
fn get_logging_configuration(
&self,
input: GetLoggingConfigurationRequest,
) -> RusotoFuture<GetLoggingConfigurationResponse, GetLoggingConfigurationError> {
let mut request = SignedRequest::new("POST", "waf", &self.region, "/");
request.set_content_type("application/x-amz-json-1.1".to_owned());
request.add_header("x-amz-target", "AWSWAF_20150824.GetLoggingConfiguration");
let encoded = serde_json::to_string(&input).unwrap();
request.set_payload(Some(encoded));
self.client.sign_and_dispatch(request, |response| {
if response.status.is_success() {
Box::new(response.buffer().from_err().and_then(|response| {
proto::json::ResponsePayload::new(&response)
.deserialize::<GetLoggingConfigurationResponse, _>()
}))
} else {
Box::new(response.buffer().from_err().and_then(|response| {
Err(GetLoggingConfigurationError::from_response(response))
}))
}
})
}
/// <p>Returns the IAM policy attached to the RuleGroup.</p>
fn get_permission_policy(
&self,
input: GetPermissionPolicyRequest,
) -> RusotoFuture<GetPermissionPolicyResponse, GetPermissionPolicyError> {
let mut request = SignedRequest::new("POST", "waf", &self.region, "/");
request.set_content_type("application/x-amz-json-1.1".to_owned());
request.add_header("x-amz-target", "AWSWAF_20150824.GetPermissionPolicy");
let encoded = serde_json::to_string(&input).unwrap();
request.set_payload(Some(encoded));
self.client.sign_and_dispatch(request, |response| {
if response.status.is_success() {
Box::new(response.buffer().from_err().and_then(|response| {
proto::json::ResponsePayload::new(&response)
.deserialize::<GetPermissionPolicyResponse, _>()
}))
} else {
Box::new(
response.buffer().from_err().and_then(|response| {
Err(GetPermissionPolicyError::from_response(response))
}),
)
}
})
}
/// <p>Returns the <a>RateBasedRule</a> that is specified by the <code>RuleId</code> that you included in the <code>GetRateBasedRule</code> request.</p>
fn get_rate_based_rule(
&self,
input: GetRateBasedRuleRequest,
) -> RusotoFuture<GetRateBasedRuleResponse, GetRateBasedRuleError> {
let mut request = SignedRequest::new("POST", "waf", &self.region, "/");
request.set_content_type("application/x-amz-json-1.1".to_owned());
request.add_header("x-amz-target", "AWSWAF_20150824.GetRateBasedRule");
let encoded = serde_json::to_string(&input).unwrap();
request.set_payload(Some(encoded));
self.client.sign_and_dispatch(request, |response| {
if response.status.is_success() {
Box::new(response.buffer().from_err().and_then(|response| {
proto::json::ResponsePayload::new(&response)
.deserialize::<GetRateBasedRuleResponse, _>()
}))
} else {
Box::new(
response
.buffer()
.from_err()
.and_then(|response| Err(GetRateBasedRuleError::from_response(response))),
)
}
})
}
/// <p>Returns an array of IP addresses currently being blocked by the <a>RateBasedRule</a> that is specified by the <code>RuleId</code>. The maximum number of managed keys that will be blocked is 10,000. If more than 10,000 addresses exceed the rate limit, the 10,000 addresses with the highest rates will be blocked.</p>
fn get_rate_based_rule_managed_keys(
&self,
input: GetRateBasedRuleManagedKeysRequest,
) -> RusotoFuture<GetRateBasedRuleManagedKeysResponse, GetRateBasedRuleManagedKeysError> {
let mut request = SignedRequest::new("POST", "waf", &self.region, "/");
request.set_content_type("application/x-amz-json-1.1".to_owned());
request.add_header(
"x-amz-target",
"AWSWAF_20150824.GetRateBasedRuleManagedKeys",
);
let encoded = serde_json::to_string(&input).unwrap();
request.set_payload(Some(encoded));
self.client.sign_and_dispatch(request, |response| {
if response.status.is_success() {
Box::new(response.buffer().from_err().and_then(|response| {
proto::json::ResponsePayload::new(&response)
.deserialize::<GetRateBasedRuleManagedKeysResponse, _>()
}))
} else {
Box::new(response.buffer().from_err().and_then(|response| {
Err(GetRateBasedRuleManagedKeysError::from_response(response))
}))
}
})
}
/// <p>Returns the <a>RegexMatchSet</a> specified by <code>RegexMatchSetId</code>.</p>
fn get_regex_match_set(
&self,
input: GetRegexMatchSetRequest,
) -> RusotoFuture<GetRegexMatchSetResponse, GetRegexMatchSetError> {
let mut request = SignedRequest::new("POST", "waf", &self.region, "/");
request.set_content_type("application/x-amz-json-1.1".to_owned());
request.add_header("x-amz-target", "AWSWAF_20150824.GetRegexMatchSet");
let encoded = serde_json::to_string(&input).unwrap();
request.set_payload(Some(encoded));
self.client.sign_and_dispatch(request, |response| {
if response.status.is_success() {
Box::new(response.buffer().from_err().and_then(|response| {
proto::json::ResponsePayload::new(&response)
.deserialize::<GetRegexMatchSetResponse, _>()
}))
} else {
Box::new(
response
.buffer()
.from_err()
.and_then(|response| Err(GetRegexMatchSetError::from_response(response))),
)
}
})
}
/// <p>Returns the <a>RegexPatternSet</a> specified by <code>RegexPatternSetId</code>.</p>
fn get_regex_pattern_set(
&self,
input: GetRegexPatternSetRequest,
) -> RusotoFuture<GetRegexPatternSetResponse, GetRegexPatternSetError> {
let mut request = SignedRequest::new("POST", "waf", &self.region, "/");
request.set_content_type("application/x-amz-json-1.1".to_owned());
request.add_header("x-amz-target", "AWSWAF_20150824.GetRegexPatternSet");
let encoded = serde_json::to_string(&input).unwrap();
request.set_payload(Some(encoded));
self.client.sign_and_dispatch(request, |response| {
if response.status.is_success() {
Box::new(response.buffer().from_err().and_then(|response| {
proto::json::ResponsePayload::new(&response)
.deserialize::<GetRegexPatternSetResponse, _>()
}))
} else {
Box::new(
response
.buffer()
.from_err()
.and_then(|response| Err(GetRegexPatternSetError::from_response(response))),
)
}
})
}
/// <p>Returns the <a>Rule</a> that is specified by the <code>RuleId</code> that you included in the <code>GetRule</code> request.</p>
fn get_rule(&self, input: GetRuleRequest) -> RusotoFuture<GetRuleResponse, GetRuleError> {
let mut request = SignedRequest::new("POST", "waf", &self.region, "/");
request.set_content_type("application/x-amz-json-1.1".to_owned());
request.add_header("x-amz-target", "AWSWAF_20150824.GetRule");
let encoded = serde_json::to_string(&input).unwrap();
request.set_payload(Some(encoded));
self.client.sign_and_dispatch(request, |response| {
if response.status.is_success() {
Box::new(response.buffer().from_err().and_then(|response| {
proto::json::ResponsePayload::new(&response).deserialize::<GetRuleResponse, _>()
}))
} else {
Box::new(
response
.buffer()
.from_err()
.and_then(|response| Err(GetRuleError::from_response(response))),
)
}
})
}
/// <p>Returns the <a>RuleGroup</a> that is specified by the <code>RuleGroupId</code> that you included in the <code>GetRuleGroup</code> request.</p> <p>To view the rules in a rule group, use <a>ListActivatedRulesInRuleGroup</a>.</p>
fn get_rule_group(
&self,
input: GetRuleGroupRequest,
) -> RusotoFuture<GetRuleGroupResponse, GetRuleGroupError> {
let mut request = SignedRequest::new("POST", "waf", &self.region, "/");
request.set_content_type("application/x-amz-json-1.1".to_owned());
request.add_header("x-amz-target", "AWSWAF_20150824.GetRuleGroup");
let encoded = serde_json::to_string(&input).unwrap();
request.set_payload(Some(encoded));
self.client.sign_and_dispatch(request, |response| {
if response.status.is_success() {
Box::new(response.buffer().from_err().and_then(|response| {
proto::json::ResponsePayload::new(&response)
.deserialize::<GetRuleGroupResponse, _>()
}))
} else {
Box::new(
response
.buffer()
.from_err()
.and_then(|response| Err(GetRuleGroupError::from_response(response))),
)
}
})
}
/// <p>Gets detailed information about a specified number of requests--a sample--that AWS WAF randomly selects from among the first 5,000 requests that your AWS resource received during a time range that you choose. You can specify a sample size of up to 500 requests, and you can specify any time range in the previous three hours.</p> <p> <code>GetSampledRequests</code> returns a time range, which is usually the time range that you specified. However, if your resource (such as a CloudFront distribution) received 5,000 requests before the specified time range elapsed, <code>GetSampledRequests</code> returns an updated time range. This new time range indicates the actual period during which AWS WAF selected the requests in the sample.</p>
fn get_sampled_requests(
&self,
input: GetSampledRequestsRequest,
) -> RusotoFuture<GetSampledRequestsResponse, GetSampledRequestsError> {
let mut request = SignedRequest::new("POST", "waf", &self.region, "/");
request.set_content_type("application/x-amz-json-1.1".to_owned());
request.add_header("x-amz-target", "AWSWAF_20150824.GetSampledRequests");
let encoded = serde_json::to_string(&input).unwrap();
request.set_payload(Some(encoded));
self.client.sign_and_dispatch(request, |response| {
if response.status.is_success() {
Box::new(response.buffer().from_err().and_then(|response| {
proto::json::ResponsePayload::new(&response)
.deserialize::<GetSampledRequestsResponse, _>()
}))
} else {
Box::new(
response
.buffer()
.from_err()
.and_then(|response| Err(GetSampledRequestsError::from_response(response))),
)
}
})
}
/// <p>Returns the <a>SizeConstraintSet</a> specified by <code>SizeConstraintSetId</code>.</p>
fn get_size_constraint_set(
&self,
input: GetSizeConstraintSetRequest,
) -> RusotoFuture<GetSizeConstraintSetResponse, GetSizeConstraintSetError> {
let mut request = SignedRequest::new("POST", "waf", &self.region, "/");
request.set_content_type("application/x-amz-json-1.1".to_owned());
request.add_header("x-amz-target", "AWSWAF_20150824.GetSizeConstraintSet");
let encoded = serde_json::to_string(&input).unwrap();
request.set_payload(Some(encoded));
self.client.sign_and_dispatch(request, |response| {
if response.status.is_success() {
Box::new(response.buffer().from_err().and_then(|response| {
proto::json::ResponsePayload::new(&response)
.deserialize::<GetSizeConstraintSetResponse, _>()
}))
} else {
Box::new(
response.buffer().from_err().and_then(|response| {
Err(GetSizeConstraintSetError::from_response(response))
}),
)
}
})
}
/// <p>Returns the <a>SqlInjectionMatchSet</a> that is specified by <code>SqlInjectionMatchSetId</code>.</p>
fn get_sql_injection_match_set(
&self,
input: GetSqlInjectionMatchSetRequest,
) -> RusotoFuture<GetSqlInjectionMatchSetResponse, GetSqlInjectionMatchSetError> {
let mut request = SignedRequest::new("POST", "waf", &self.region, "/");
request.set_content_type("application/x-amz-json-1.1".to_owned());
request.add_header("x-amz-target", "AWSWAF_20150824.GetSqlInjectionMatchSet");
let encoded = serde_json::to_string(&input).unwrap();
request.set_payload(Some(encoded));
self.client.sign_and_dispatch(request, |response| {
if response.status.is_success() {
Box::new(response.buffer().from_err().and_then(|response| {
proto::json::ResponsePayload::new(&response)
.deserialize::<GetSqlInjectionMatchSetResponse, _>()
}))
} else {
Box::new(response.buffer().from_err().and_then(|response| {
Err(GetSqlInjectionMatchSetError::from_response(response))
}))
}
})
}
/// <p>Returns the <a>WebACL</a> that is specified by <code>WebACLId</code>.</p>
fn get_web_acl(
&self,
input: GetWebACLRequest,
) -> RusotoFuture<GetWebACLResponse, GetWebACLError> {
let mut request = SignedRequest::new("POST", "waf", &self.region, "/");
request.set_content_type("application/x-amz-json-1.1".to_owned());
request.add_header("x-amz-target", "AWSWAF_20150824.GetWebACL");
let encoded = serde_json::to_string(&input).unwrap();
request.set_payload(Some(encoded));
self.client.sign_and_dispatch(request, |response| {
if response.status.is_success() {
Box::new(response.buffer().from_err().and_then(|response| {
proto::json::ResponsePayload::new(&response)
.deserialize::<GetWebACLResponse, _>()
}))
} else {
Box::new(
response
.buffer()
.from_err()
.and_then(|response| Err(GetWebACLError::from_response(response))),
)
}
})
}
/// <p>Returns the <a>XssMatchSet</a> that is specified by <code>XssMatchSetId</code>.</p>
fn get_xss_match_set(
&self,
input: GetXssMatchSetRequest,
) -> RusotoFuture<GetXssMatchSetResponse, GetXssMatchSetError> {
let mut request = SignedRequest::new("POST", "waf", &self.region, "/");
request.set_content_type("application/x-amz-json-1.1".to_owned());
request.add_header("x-amz-target", "AWSWAF_20150824.GetXssMatchSet");
let encoded = serde_json::to_string(&input).unwrap();
request.set_payload(Some(encoded));
self.client.sign_and_dispatch(request, |response| {
if response.status.is_success() {
Box::new(response.buffer().from_err().and_then(|response| {
proto::json::ResponsePayload::new(&response)
.deserialize::<GetXssMatchSetResponse, _>()
}))
} else {
Box::new(
response
.buffer()
.from_err()
.and_then(|response| Err(GetXssMatchSetError::from_response(response))),
)
}
})
}
/// <p>Returns an array of <a>ActivatedRule</a> objects.</p>
fn list_activated_rules_in_rule_group(
&self,
input: ListActivatedRulesInRuleGroupRequest,
) -> RusotoFuture<ListActivatedRulesInRuleGroupResponse, ListActivatedRulesInRuleGroupError>
{
let mut request = SignedRequest::new("POST", "waf", &self.region, "/");
request.set_content_type("application/x-amz-json-1.1".to_owned());
request.add_header(
"x-amz-target",
"AWSWAF_20150824.ListActivatedRulesInRuleGroup",
);
let encoded = serde_json::to_string(&input).unwrap();
request.set_payload(Some(encoded));
self.client.sign_and_dispatch(request, |response| {
if response.status.is_success() {
Box::new(response.buffer().from_err().and_then(|response| {
proto::json::ResponsePayload::new(&response)
.deserialize::<ListActivatedRulesInRuleGroupResponse, _>()
}))
} else {
Box::new(response.buffer().from_err().and_then(|response| {
Err(ListActivatedRulesInRuleGroupError::from_response(response))
}))
}
})
}
/// <p>Returns an array of <a>ByteMatchSetSummary</a> objects.</p>
fn list_byte_match_sets(
&self,
input: ListByteMatchSetsRequest,
) -> RusotoFuture<ListByteMatchSetsResponse, ListByteMatchSetsError> {
let mut request = SignedRequest::new("POST", "waf", &self.region, "/");
request.set_content_type("application/x-amz-json-1.1".to_owned());
request.add_header("x-amz-target", "AWSWAF_20150824.ListByteMatchSets");
let encoded = serde_json::to_string(&input).unwrap();
request.set_payload(Some(encoded));
self.client.sign_and_dispatch(request, |response| {
if response.status.is_success() {
Box::new(response.buffer().from_err().and_then(|response| {
proto::json::ResponsePayload::new(&response)
.deserialize::<ListByteMatchSetsResponse, _>()
}))
} else {
Box::new(
response
.buffer()
.from_err()
.and_then(|response| Err(ListByteMatchSetsError::from_response(response))),
)
}
})
}
/// <p>Returns an array of <a>GeoMatchSetSummary</a> objects in the response.</p>
fn list_geo_match_sets(
&self,
input: ListGeoMatchSetsRequest,
) -> RusotoFuture<ListGeoMatchSetsResponse, ListGeoMatchSetsError> {
let mut request = SignedRequest::new("POST", "waf", &self.region, "/");
request.set_content_type("application/x-amz-json-1.1".to_owned());
request.add_header("x-amz-target", "AWSWAF_20150824.ListGeoMatchSets");
let encoded = serde_json::to_string(&input).unwrap();
request.set_payload(Some(encoded));
self.client.sign_and_dispatch(request, |response| {
if response.status.is_success() {
Box::new(response.buffer().from_err().and_then(|response| {
proto::json::ResponsePayload::new(&response)
.deserialize::<ListGeoMatchSetsResponse, _>()
}))
} else {
Box::new(
response
.buffer()
.from_err()
.and_then(|response| Err(ListGeoMatchSetsError::from_response(response))),
)
}
})
}
/// <p>Returns an array of <a>IPSetSummary</a> objects in the response.</p>
fn list_ip_sets(
&self,
input: ListIPSetsRequest,
) -> RusotoFuture<ListIPSetsResponse, ListIPSetsError> {
let mut request = SignedRequest::new("POST", "waf", &self.region, "/");
request.set_content_type("application/x-amz-json-1.1".to_owned());
request.add_header("x-amz-target", "AWSWAF_20150824.ListIPSets");
let encoded = serde_json::to_string(&input).unwrap();
request.set_payload(Some(encoded));
self.client.sign_and_dispatch(request, |response| {
if response.status.is_success() {
Box::new(response.buffer().from_err().and_then(|response| {
proto::json::ResponsePayload::new(&response)
.deserialize::<ListIPSetsResponse, _>()
}))
} else {
Box::new(
response
.buffer()
.from_err()
.and_then(|response| Err(ListIPSetsError::from_response(response))),
)
}
})
}
/// <p>Returns an array of <a>LoggingConfiguration</a> objects.</p>
fn list_logging_configurations(
&self,
input: ListLoggingConfigurationsRequest,
) -> RusotoFuture<ListLoggingConfigurationsResponse, ListLoggingConfigurationsError> {
let mut request = SignedRequest::new("POST", "waf", &self.region, "/");
request.set_content_type("application/x-amz-json-1.1".to_owned());
request.add_header("x-amz-target", "AWSWAF_20150824.ListLoggingConfigurations");
let encoded = serde_json::to_string(&input).unwrap();
request.set_payload(Some(encoded));
self.client.sign_and_dispatch(request, |response| {
if response.status.is_success() {
Box::new(response.buffer().from_err().and_then(|response| {
proto::json::ResponsePayload::new(&response)
.deserialize::<ListLoggingConfigurationsResponse, _>()
}))
} else {
Box::new(response.buffer().from_err().and_then(|response| {
Err(ListLoggingConfigurationsError::from_response(response))
}))
}
})
}
/// <p>Returns an array of <a>RuleSummary</a> objects.</p>
fn list_rate_based_rules(
&self,
input: ListRateBasedRulesRequest,
) -> RusotoFuture<ListRateBasedRulesResponse, ListRateBasedRulesError> {
let mut request = SignedRequest::new("POST", "waf", &self.region, "/");
request.set_content_type("application/x-amz-json-1.1".to_owned());
request.add_header("x-amz-target", "AWSWAF_20150824.ListRateBasedRules");
let encoded = serde_json::to_string(&input).unwrap();
request.set_payload(Some(encoded));
self.client.sign_and_dispatch(request, |response| {
if response.status.is_success() {
Box::new(response.buffer().from_err().and_then(|response| {
proto::json::ResponsePayload::new(&response)
.deserialize::<ListRateBasedRulesResponse, _>()
}))
} else {
Box::new(
response
.buffer()
.from_err()
.and_then(|response| Err(ListRateBasedRulesError::from_response(response))),
)
}
})
}
/// <p>Returns an array of <a>RegexMatchSetSummary</a> objects.</p>
fn list_regex_match_sets(
&self,
input: ListRegexMatchSetsRequest,
) -> RusotoFuture<ListRegexMatchSetsResponse, ListRegexMatchSetsError> {
let mut request = SignedRequest::new("POST", "waf", &self.region, "/");
request.set_content_type("application/x-amz-json-1.1".to_owned());
request.add_header("x-amz-target", "AWSWAF_20150824.ListRegexMatchSets");
let encoded = serde_json::to_string(&input).unwrap();
request.set_payload(Some(encoded));
self.client.sign_and_dispatch(request, |response| {
if response.status.is_success() {
Box::new(response.buffer().from_err().and_then(|response| {
proto::json::ResponsePayload::new(&response)
.deserialize::<ListRegexMatchSetsResponse, _>()
}))
} else {
Box::new(
response
.buffer()
.from_err()
.and_then(|response| Err(ListRegexMatchSetsError::from_response(response))),
)
}
})
}
/// <p>Returns an array of <a>RegexPatternSetSummary</a> objects.</p>
fn list_regex_pattern_sets(
&self,
input: ListRegexPatternSetsRequest,
) -> RusotoFuture<ListRegexPatternSetsResponse, ListRegexPatternSetsError> {
let mut request = SignedRequest::new("POST", "waf", &self.region, "/");
request.set_content_type("application/x-amz-json-1.1".to_owned());
request.add_header("x-amz-target", "AWSWAF_20150824.ListRegexPatternSets");
let encoded = serde_json::to_string(&input).unwrap();
request.set_payload(Some(encoded));
self.client.sign_and_dispatch(request, |response| {
if response.status.is_success() {
Box::new(response.buffer().from_err().and_then(|response| {
proto::json::ResponsePayload::new(&response)
.deserialize::<ListRegexPatternSetsResponse, _>()
}))
} else {
Box::new(
response.buffer().from_err().and_then(|response| {
Err(ListRegexPatternSetsError::from_response(response))
}),
)
}
})
}
/// <p>Returns an array of <a>RuleGroup</a> objects.</p>
fn list_rule_groups(
&self,
input: ListRuleGroupsRequest,
) -> RusotoFuture<ListRuleGroupsResponse, ListRuleGroupsError> {
let mut request = SignedRequest::new("POST", "waf", &self.region, "/");
request.set_content_type("application/x-amz-json-1.1".to_owned());
request.add_header("x-amz-target", "AWSWAF_20150824.ListRuleGroups");
let encoded = serde_json::to_string(&input).unwrap();
request.set_payload(Some(encoded));
self.client.sign_and_dispatch(request, |response| {
if response.status.is_success() {
Box::new(response.buffer().from_err().and_then(|response| {
proto::json::ResponsePayload::new(&response)
.deserialize::<ListRuleGroupsResponse, _>()
}))
} else {
Box::new(
response
.buffer()
.from_err()
.and_then(|response| Err(ListRuleGroupsError::from_response(response))),
)
}
})
}
/// <p>Returns an array of <a>RuleSummary</a> objects.</p>
fn list_rules(
&self,
input: ListRulesRequest,
) -> RusotoFuture<ListRulesResponse, ListRulesError> {
let mut request = SignedRequest::new("POST", "waf", &self.region, "/");
request.set_content_type("application/x-amz-json-1.1".to_owned());
request.add_header("x-amz-target", "AWSWAF_20150824.ListRules");
let encoded = serde_json::to_string(&input).unwrap();
request.set_payload(Some(encoded));
self.client.sign_and_dispatch(request, |response| {
if response.status.is_success() {
Box::new(response.buffer().from_err().and_then(|response| {
proto::json::ResponsePayload::new(&response)
.deserialize::<ListRulesResponse, _>()
}))
} else {
Box::new(
response
.buffer()
.from_err()
.and_then(|response| Err(ListRulesError::from_response(response))),
)
}
})
}
/// <p>Returns an array of <a>SizeConstraintSetSummary</a> objects.</p>
fn list_size_constraint_sets(
&self,
input: ListSizeConstraintSetsRequest,
) -> RusotoFuture<ListSizeConstraintSetsResponse, ListSizeConstraintSetsError> {
let mut request = SignedRequest::new("POST", "waf", &self.region, "/");
request.set_content_type("application/x-amz-json-1.1".to_owned());
request.add_header("x-amz-target", "AWSWAF_20150824.ListSizeConstraintSets");
let encoded = serde_json::to_string(&input).unwrap();
request.set_payload(Some(encoded));
self.client.sign_and_dispatch(request, |response| {
if response.status.is_success() {
Box::new(response.buffer().from_err().and_then(|response| {
proto::json::ResponsePayload::new(&response)
.deserialize::<ListSizeConstraintSetsResponse, _>()
}))
} else {
Box::new(
response.buffer().from_err().and_then(|response| {
Err(ListSizeConstraintSetsError::from_response(response))
}),
)
}
})
}
/// <p>Returns an array of <a>SqlInjectionMatchSet</a> objects.</p>
fn list_sql_injection_match_sets(
&self,
input: ListSqlInjectionMatchSetsRequest,
) -> RusotoFuture<ListSqlInjectionMatchSetsResponse, ListSqlInjectionMatchSetsError> {
let mut request = SignedRequest::new("POST", "waf", &self.region, "/");
request.set_content_type("application/x-amz-json-1.1".to_owned());
request.add_header("x-amz-target", "AWSWAF_20150824.ListSqlInjectionMatchSets");
let encoded = serde_json::to_string(&input).unwrap();
request.set_payload(Some(encoded));
self.client.sign_and_dispatch(request, |response| {
if response.status.is_success() {
Box::new(response.buffer().from_err().and_then(|response| {
proto::json::ResponsePayload::new(&response)
.deserialize::<ListSqlInjectionMatchSetsResponse, _>()
}))
} else {
Box::new(response.buffer().from_err().and_then(|response| {
Err(ListSqlInjectionMatchSetsError::from_response(response))
}))
}
})
}
/// <p>Returns an array of <a>RuleGroup</a> objects that you are subscribed to.</p>
fn list_subscribed_rule_groups(
&self,
input: ListSubscribedRuleGroupsRequest,
) -> RusotoFuture<ListSubscribedRuleGroupsResponse, ListSubscribedRuleGroupsError> {
let mut request = SignedRequest::new("POST", "waf", &self.region, "/");
request.set_content_type("application/x-amz-json-1.1".to_owned());
request.add_header("x-amz-target", "AWSWAF_20150824.ListSubscribedRuleGroups");
let encoded = serde_json::to_string(&input).unwrap();
request.set_payload(Some(encoded));
self.client.sign_and_dispatch(request, |response| {
if response.status.is_success() {
Box::new(response.buffer().from_err().and_then(|response| {
proto::json::ResponsePayload::new(&response)
.deserialize::<ListSubscribedRuleGroupsResponse, _>()
}))
} else {
Box::new(response.buffer().from_err().and_then(|response| {
Err(ListSubscribedRuleGroupsError::from_response(response))
}))
}
})
}
/// <p>Returns an array of <a>WebACLSummary</a> objects in the response.</p>
fn list_web_ac_ls(
&self,
input: ListWebACLsRequest,
) -> RusotoFuture<ListWebACLsResponse, ListWebACLsError> {
let mut request = SignedRequest::new("POST", "waf", &self.region, "/");
request.set_content_type("application/x-amz-json-1.1".to_owned());
request.add_header("x-amz-target", "AWSWAF_20150824.ListWebACLs");
let encoded = serde_json::to_string(&input).unwrap();
request.set_payload(Some(encoded));
self.client.sign_and_dispatch(request, |response| {
if response.status.is_success() {
Box::new(response.buffer().from_err().and_then(|response| {
proto::json::ResponsePayload::new(&response)
.deserialize::<ListWebACLsResponse, _>()
}))
} else {
Box::new(
response
.buffer()
.from_err()
.and_then(|response| Err(ListWebACLsError::from_response(response))),
)
}
})
}
/// <p>Returns an array of <a>XssMatchSet</a> objects.</p>
fn list_xss_match_sets(
&self,
input: ListXssMatchSetsRequest,
) -> RusotoFuture<ListXssMatchSetsResponse, ListXssMatchSetsError> {
let mut request = SignedRequest::new("POST", "waf", &self.region, "/");
request.set_content_type("application/x-amz-json-1.1".to_owned());
request.add_header("x-amz-target", "AWSWAF_20150824.ListXssMatchSets");
let encoded = serde_json::to_string(&input).unwrap();
request.set_payload(Some(encoded));
self.client.sign_and_dispatch(request, |response| {
if response.status.is_success() {
Box::new(response.buffer().from_err().and_then(|response| {
proto::json::ResponsePayload::new(&response)
.deserialize::<ListXssMatchSetsResponse, _>()
}))
} else {
Box::new(
response
.buffer()
.from_err()
.and_then(|response| Err(ListXssMatchSetsError::from_response(response))),
)
}
})
}
/// <p>Associates a <a>LoggingConfiguration</a> with a specified web ACL.</p> <p>You can access information about all traffic that AWS WAF inspects using the following steps:</p> <ol> <li> <p>Create an Amazon Kinesis Data Firehose . </p> </li> <li> <p>Associate that firehose to your web ACL using a <code>PutLoggingConfiguration</code> request.</p> </li> </ol> <p>When you successfully enable logging using a <code>PutLoggingConfiguration</code> request, AWS WAF will create a service linked role with the necessary permissions to write logs to the Amazon Kinesis Data Firehose. For more information, see <a href="http://docs.aws.amazon.com/waf/latest/developerguide/logging.html">Logging Web ACL Traffic Information</a> in the <i>AWS WAF Developer Guide</i>.</p>
fn put_logging_configuration(
&self,
input: PutLoggingConfigurationRequest,
) -> RusotoFuture<PutLoggingConfigurationResponse, PutLoggingConfigurationError> {
let mut request = SignedRequest::new("POST", "waf", &self.region, "/");
request.set_content_type("application/x-amz-json-1.1".to_owned());
request.add_header("x-amz-target", "AWSWAF_20150824.PutLoggingConfiguration");
let encoded = serde_json::to_string(&input).unwrap();
request.set_payload(Some(encoded));
self.client.sign_and_dispatch(request, |response| {
if response.status.is_success() {
Box::new(response.buffer().from_err().and_then(|response| {
proto::json::ResponsePayload::new(&response)
.deserialize::<PutLoggingConfigurationResponse, _>()
}))
} else {
Box::new(response.buffer().from_err().and_then(|response| {
Err(PutLoggingConfigurationError::from_response(response))
}))
}
})
}
/// <p>Attaches a IAM policy to the specified resource. The only supported use for this action is to share a RuleGroup across accounts.</p> <p>The <code>PutPermissionPolicy</code> is subject to the following restrictions:</p> <ul> <li> <p>You can attach only one policy with each <code>PutPermissionPolicy</code> request.</p> </li> <li> <p>The policy must include an <code>Effect</code>, <code>Action</code> and <code>Principal</code>. </p> </li> <li> <p> <code>Effect</code> must specify <code>Allow</code>.</p> </li> <li> <p>The <code>Action</code> in the policy must be <code>waf:UpdateWebACL</code>, <code>waf-regional:UpdateWebACL</code>, <code>waf:GetRuleGroup</code> and <code>waf-regional:GetRuleGroup</code> . Any extra or wildcard actions in the policy will be rejected.</p> </li> <li> <p>The policy cannot include a <code>Resource</code> parameter.</p> </li> <li> <p>The ARN in the request must be a valid WAF RuleGroup ARN and the RuleGroup must exist in the same region.</p> </li> <li> <p>The user making the request must be the owner of the RuleGroup.</p> </li> <li> <p>Your policy must be composed using IAM Policy version 2012-10-17.</p> </li> </ul> <p>For more information, see <a href="https://docs.aws.amazon.com/IAM/latest/UserGuide/access_policies.html">IAM Policies</a>. </p> <p>An example of a valid policy parameter is shown in the Examples section below.</p>
fn put_permission_policy(
&self,
input: PutPermissionPolicyRequest,
) -> RusotoFuture<PutPermissionPolicyResponse, PutPermissionPolicyError> {
let mut request = SignedRequest::new("POST", "waf", &self.region, "/");
request.set_content_type("application/x-amz-json-1.1".to_owned());
request.add_header("x-amz-target", "AWSWAF_20150824.PutPermissionPolicy");
let encoded = serde_json::to_string(&input).unwrap();
request.set_payload(Some(encoded));
self.client.sign_and_dispatch(request, |response| {
if response.status.is_success() {
Box::new(response.buffer().from_err().and_then(|response| {
proto::json::ResponsePayload::new(&response)
.deserialize::<PutPermissionPolicyResponse, _>()
}))
} else {
Box::new(
response.buffer().from_err().and_then(|response| {
Err(PutPermissionPolicyError::from_response(response))
}),
)
}
})
}
/// <p>Inserts or deletes <a>ByteMatchTuple</a> objects (filters) in a <a>ByteMatchSet</a>. For each <code>ByteMatchTuple</code> object, you specify the following values: </p> <ul> <li> <p>Whether to insert or delete the object from the array. If you want to change a <code>ByteMatchSetUpdate</code> object, you delete the existing object and add a new one.</p> </li> <li> <p>The part of a web request that you want AWS WAF to inspect, such as a query string or the value of the <code>User-Agent</code> header. </p> </li> <li> <p>The bytes (typically a string that corresponds with ASCII characters) that you want AWS WAF to look for. For more information, including how you specify the values for the AWS WAF API and the AWS CLI or SDKs, see <code>TargetString</code> in the <a>ByteMatchTuple</a> data type. </p> </li> <li> <p>Where to look, such as at the beginning or the end of a query string.</p> </li> <li> <p>Whether to perform any conversions on the request, such as converting it to lowercase, before inspecting it for the specified string.</p> </li> </ul> <p>For example, you can add a <code>ByteMatchSetUpdate</code> object that matches web requests in which <code>User-Agent</code> headers contain the string <code>BadBot</code>. You can then configure AWS WAF to block those requests.</p> <p>To create and configure a <code>ByteMatchSet</code>, perform the following steps:</p> <ol> <li> <p>Create a <code>ByteMatchSet.</code> For more information, see <a>CreateByteMatchSet</a>.</p> </li> <li> <p>Use <a>GetChangeToken</a> to get the change token that you provide in the <code>ChangeToken</code> parameter of an <code>UpdateByteMatchSet</code> request.</p> </li> <li> <p>Submit an <code>UpdateByteMatchSet</code> request to specify the part of the request that you want AWS WAF to inspect (for example, the header or the URI) and the value that you want AWS WAF to watch for.</p> </li> </ol> <p>For more information about how to use the AWS WAF API to allow or block HTTP requests, see the <a href="http://docs.aws.amazon.com/waf/latest/developerguide/">AWS WAF Developer Guide</a>.</p>
fn update_byte_match_set(
&self,
input: UpdateByteMatchSetRequest,
) -> RusotoFuture<UpdateByteMatchSetResponse, UpdateByteMatchSetError> {
let mut request = SignedRequest::new("POST", "waf", &self.region, "/");
request.set_content_type("application/x-amz-json-1.1".to_owned());
request.add_header("x-amz-target", "AWSWAF_20150824.UpdateByteMatchSet");
let encoded = serde_json::to_string(&input).unwrap();
request.set_payload(Some(encoded));
self.client.sign_and_dispatch(request, |response| {
if response.status.is_success() {
Box::new(response.buffer().from_err().and_then(|response| {
proto::json::ResponsePayload::new(&response)
.deserialize::<UpdateByteMatchSetResponse, _>()
}))
} else {
Box::new(
response
.buffer()
.from_err()
.and_then(|response| Err(UpdateByteMatchSetError::from_response(response))),
)
}
})
}
/// <p>Inserts or deletes <a>GeoMatchConstraint</a> objects in an <code>GeoMatchSet</code>. For each <code>GeoMatchConstraint</code> object, you specify the following values: </p> <ul> <li> <p>Whether to insert or delete the object from the array. If you want to change an <code>GeoMatchConstraint</code> object, you delete the existing object and add a new one.</p> </li> <li> <p>The <code>Type</code>. The only valid value for <code>Type</code> is <code>Country</code>.</p> </li> <li> <p>The <code>Value</code>, which is a two character code for the country to add to the <code>GeoMatchConstraint</code> object. Valid codes are listed in <a>GeoMatchConstraint$Value</a>.</p> </li> </ul> <p>To create and configure an <code>GeoMatchSet</code>, perform the following steps:</p> <ol> <li> <p>Submit a <a>CreateGeoMatchSet</a> request.</p> </li> <li> <p>Use <a>GetChangeToken</a> to get the change token that you provide in the <code>ChangeToken</code> parameter of an <a>UpdateGeoMatchSet</a> request.</p> </li> <li> <p>Submit an <code>UpdateGeoMatchSet</code> request to specify the country that you want AWS WAF to watch for.</p> </li> </ol> <p>When you update an <code>GeoMatchSet</code>, you specify the country that you want to add and/or the country that you want to delete. If you want to change a country, you delete the existing country and add the new one.</p> <p>For more information about how to use the AWS WAF API to allow or block HTTP requests, see the <a href="http://docs.aws.amazon.com/waf/latest/developerguide/">AWS WAF Developer Guide</a>.</p>
fn update_geo_match_set(
&self,
input: UpdateGeoMatchSetRequest,
) -> RusotoFuture<UpdateGeoMatchSetResponse, UpdateGeoMatchSetError> {
let mut request = SignedRequest::new("POST", "waf", &self.region, "/");
request.set_content_type("application/x-amz-json-1.1".to_owned());
request.add_header("x-amz-target", "AWSWAF_20150824.UpdateGeoMatchSet");
let encoded = serde_json::to_string(&input).unwrap();
request.set_payload(Some(encoded));
self.client.sign_and_dispatch(request, |response| {
if response.status.is_success() {
Box::new(response.buffer().from_err().and_then(|response| {
proto::json::ResponsePayload::new(&response)
.deserialize::<UpdateGeoMatchSetResponse, _>()
}))
} else {
Box::new(
response
.buffer()
.from_err()
.and_then(|response| Err(UpdateGeoMatchSetError::from_response(response))),
)
}
})
}
/// <p>Inserts or deletes <a>IPSetDescriptor</a> objects in an <code>IPSet</code>. For each <code>IPSetDescriptor</code> object, you specify the following values: </p> <ul> <li> <p>Whether to insert or delete the object from the array. If you want to change an <code>IPSetDescriptor</code> object, you delete the existing object and add a new one.</p> </li> <li> <p>The IP address version, <code>IPv4</code> or <code>IPv6</code>. </p> </li> <li> <p>The IP address in CIDR notation, for example, <code>192.0.2.0/24</code> (for the range of IP addresses from <code>192.0.2.0</code> to <code>192.0.2.255</code>) or <code>192.0.2.44/32</code> (for the individual IP address <code>192.0.2.44</code>). </p> </li> </ul> <p>AWS WAF supports IPv4 address ranges: /8 and any range between /16 through /32. AWS WAF supports IPv6 address ranges: /16, /24, /32, /48, /56, /64, and /128. For more information about CIDR notation, see the Wikipedia entry <a href="https://en.wikipedia.org/wiki/Classless_Inter-Domain_Routing">Classless Inter-Domain Routing</a>.</p> <p>IPv6 addresses can be represented using any of the following formats:</p> <ul> <li> <p>1111:0000:0000:0000:0000:0000:0000:0111/128</p> </li> <li> <p>1111:0:0:0:0:0:0:0111/128</p> </li> <li> <p>1111::0111/128</p> </li> <li> <p>1111::111/128</p> </li> </ul> <p>You use an <code>IPSet</code> to specify which web requests you want to allow or block based on the IP addresses that the requests originated from. For example, if you're receiving a lot of requests from one or a small number of IP addresses and you want to block the requests, you can create an <code>IPSet</code> that specifies those IP addresses, and then configure AWS WAF to block the requests. </p> <p>To create and configure an <code>IPSet</code>, perform the following steps:</p> <ol> <li> <p>Submit a <a>CreateIPSet</a> request.</p> </li> <li> <p>Use <a>GetChangeToken</a> to get the change token that you provide in the <code>ChangeToken</code> parameter of an <a>UpdateIPSet</a> request.</p> </li> <li> <p>Submit an <code>UpdateIPSet</code> request to specify the IP addresses that you want AWS WAF to watch for.</p> </li> </ol> <p>When you update an <code>IPSet</code>, you specify the IP addresses that you want to add and/or the IP addresses that you want to delete. If you want to change an IP address, you delete the existing IP address and add the new one.</p> <p>You can insert a maximum of 1000 addresses in a single request.</p> <p>For more information about how to use the AWS WAF API to allow or block HTTP requests, see the <a href="http://docs.aws.amazon.com/waf/latest/developerguide/">AWS WAF Developer Guide</a>.</p>
fn update_ip_set(
&self,
input: UpdateIPSetRequest,
) -> RusotoFuture<UpdateIPSetResponse, UpdateIPSetError> {
let mut request = SignedRequest::new("POST", "waf", &self.region, "/");
request.set_content_type("application/x-amz-json-1.1".to_owned());
request.add_header("x-amz-target", "AWSWAF_20150824.UpdateIPSet");
let encoded = serde_json::to_string(&input).unwrap();
request.set_payload(Some(encoded));
self.client.sign_and_dispatch(request, |response| {
if response.status.is_success() {
Box::new(response.buffer().from_err().and_then(|response| {
proto::json::ResponsePayload::new(&response)
.deserialize::<UpdateIPSetResponse, _>()
}))
} else {
Box::new(
response
.buffer()
.from_err()
.and_then(|response| Err(UpdateIPSetError::from_response(response))),
)
}
})
}
/// <p>Inserts or deletes <a>Predicate</a> objects in a rule and updates the <code>RateLimit</code> in the rule. </p> <p>Each <code>Predicate</code> object identifies a predicate, such as a <a>ByteMatchSet</a> or an <a>IPSet</a>, that specifies the web requests that you want to block or count. The <code>RateLimit</code> specifies the number of requests every five minutes that triggers the rule.</p> <p>If you add more than one predicate to a <code>RateBasedRule</code>, a request must match all the predicates and exceed the <code>RateLimit</code> to be counted or blocked. For example, suppose you add the following to a <code>RateBasedRule</code>:</p> <ul> <li> <p>An <code>IPSet</code> that matches the IP address <code>192.0.2.44/32</code> </p> </li> <li> <p>A <code>ByteMatchSet</code> that matches <code>BadBot</code> in the <code>User-Agent</code> header</p> </li> </ul> <p>Further, you specify a <code>RateLimit</code> of 15,000.</p> <p>You then add the <code>RateBasedRule</code> to a <code>WebACL</code> and specify that you want to block requests that satisfy the rule. For a request to be blocked, it must come from the IP address 192.0.2.44 <i>and</i> the <code>User-Agent</code> header in the request must contain the value <code>BadBot</code>. Further, requests that match these two conditions much be received at a rate of more than 15,000 every five minutes. If the rate drops below this limit, AWS WAF no longer blocks the requests.</p> <p>As a second example, suppose you want to limit requests to a particular page on your site. To do this, you could add the following to a <code>RateBasedRule</code>:</p> <ul> <li> <p>A <code>ByteMatchSet</code> with <code>FieldToMatch</code> of <code>URI</code> </p> </li> <li> <p>A <code>PositionalConstraint</code> of <code>STARTS_WITH</code> </p> </li> <li> <p>A <code>TargetString</code> of <code>login</code> </p> </li> </ul> <p>Further, you specify a <code>RateLimit</code> of 15,000.</p> <p>By adding this <code>RateBasedRule</code> to a <code>WebACL</code>, you could limit requests to your login page without affecting the rest of your site.</p>
fn update_rate_based_rule(
&self,
input: UpdateRateBasedRuleRequest,
) -> RusotoFuture<UpdateRateBasedRuleResponse, UpdateRateBasedRuleError> {
let mut request = SignedRequest::new("POST", "waf", &self.region, "/");
request.set_content_type("application/x-amz-json-1.1".to_owned());
request.add_header("x-amz-target", "AWSWAF_20150824.UpdateRateBasedRule");
let encoded = serde_json::to_string(&input).unwrap();
request.set_payload(Some(encoded));
self.client.sign_and_dispatch(request, |response| {
if response.status.is_success() {
Box::new(response.buffer().from_err().and_then(|response| {
proto::json::ResponsePayload::new(&response)
.deserialize::<UpdateRateBasedRuleResponse, _>()
}))
} else {
Box::new(
response.buffer().from_err().and_then(|response| {
Err(UpdateRateBasedRuleError::from_response(response))
}),
)
}
})
}
/// <p>Inserts or deletes <a>RegexMatchTuple</a> objects (filters) in a <a>RegexMatchSet</a>. For each <code>RegexMatchSetUpdate</code> object, you specify the following values: </p> <ul> <li> <p>Whether to insert or delete the object from the array. If you want to change a <code>RegexMatchSetUpdate</code> object, you delete the existing object and add a new one.</p> </li> <li> <p>The part of a web request that you want AWS WAF to inspectupdate, such as a query string or the value of the <code>User-Agent</code> header. </p> </li> <li> <p>The identifier of the pattern (a regular expression) that you want AWS WAF to look for. For more information, see <a>RegexPatternSet</a>. </p> </li> <li> <p>Whether to perform any conversions on the request, such as converting it to lowercase, before inspecting it for the specified string.</p> </li> </ul> <p> For example, you can create a <code>RegexPatternSet</code> that matches any requests with <code>User-Agent</code> headers that contain the string <code>B[a@]dB[o0]t</code>. You can then configure AWS WAF to reject those requests.</p> <p>To create and configure a <code>RegexMatchSet</code>, perform the following steps:</p> <ol> <li> <p>Create a <code>RegexMatchSet.</code> For more information, see <a>CreateRegexMatchSet</a>.</p> </li> <li> <p>Use <a>GetChangeToken</a> to get the change token that you provide in the <code>ChangeToken</code> parameter of an <code>UpdateRegexMatchSet</code> request.</p> </li> <li> <p>Submit an <code>UpdateRegexMatchSet</code> request to specify the part of the request that you want AWS WAF to inspect (for example, the header or the URI) and the identifier of the <code>RegexPatternSet</code> that contain the regular expression patters you want AWS WAF to watch for.</p> </li> </ol> <p>For more information about how to use the AWS WAF API to allow or block HTTP requests, see the <a href="http://docs.aws.amazon.com/waf/latest/developerguide/">AWS WAF Developer Guide</a>.</p>
fn update_regex_match_set(
&self,
input: UpdateRegexMatchSetRequest,
) -> RusotoFuture<UpdateRegexMatchSetResponse, UpdateRegexMatchSetError> {
let mut request = SignedRequest::new("POST", "waf", &self.region, "/");
request.set_content_type("application/x-amz-json-1.1".to_owned());
request.add_header("x-amz-target", "AWSWAF_20150824.UpdateRegexMatchSet");
let encoded = serde_json::to_string(&input).unwrap();
request.set_payload(Some(encoded));
self.client.sign_and_dispatch(request, |response| {
if response.status.is_success() {
Box::new(response.buffer().from_err().and_then(|response| {
proto::json::ResponsePayload::new(&response)
.deserialize::<UpdateRegexMatchSetResponse, _>()
}))
} else {
Box::new(
response.buffer().from_err().and_then(|response| {
Err(UpdateRegexMatchSetError::from_response(response))
}),
)
}
})
}
/// <p>Inserts or deletes <code>RegexPatternString</code> objects in a <a>RegexPatternSet</a>. For each <code>RegexPatternString</code> object, you specify the following values: </p> <ul> <li> <p>Whether to insert or delete the <code>RegexPatternString</code>.</p> </li> <li> <p>The regular expression pattern that you want to insert or delete. For more information, see <a>RegexPatternSet</a>. </p> </li> </ul> <p> For example, you can create a <code>RegexPatternString</code> such as <code>B[a@]dB[o0]t</code>. AWS WAF will match this <code>RegexPatternString</code> to:</p> <ul> <li> <p>BadBot</p> </li> <li> <p>BadB0t</p> </li> <li> <p>B@dBot</p> </li> <li> <p>B@dB0t</p> </li> </ul> <p>To create and configure a <code>RegexPatternSet</code>, perform the following steps:</p> <ol> <li> <p>Create a <code>RegexPatternSet.</code> For more information, see <a>CreateRegexPatternSet</a>.</p> </li> <li> <p>Use <a>GetChangeToken</a> to get the change token that you provide in the <code>ChangeToken</code> parameter of an <code>UpdateRegexPatternSet</code> request.</p> </li> <li> <p>Submit an <code>UpdateRegexPatternSet</code> request to specify the regular expression pattern that you want AWS WAF to watch for.</p> </li> </ol> <p>For more information about how to use the AWS WAF API to allow or block HTTP requests, see the <a href="http://docs.aws.amazon.com/waf/latest/developerguide/">AWS WAF Developer Guide</a>.</p>
fn update_regex_pattern_set(
&self,
input: UpdateRegexPatternSetRequest,
) -> RusotoFuture<UpdateRegexPatternSetResponse, UpdateRegexPatternSetError> {
let mut request = SignedRequest::new("POST", "waf", &self.region, "/");
request.set_content_type("application/x-amz-json-1.1".to_owned());
request.add_header("x-amz-target", "AWSWAF_20150824.UpdateRegexPatternSet");
let encoded = serde_json::to_string(&input).unwrap();
request.set_payload(Some(encoded));
self.client.sign_and_dispatch(request, |response| {
if response.status.is_success() {
Box::new(response.buffer().from_err().and_then(|response| {
proto::json::ResponsePayload::new(&response)
.deserialize::<UpdateRegexPatternSetResponse, _>()
}))
} else {
Box::new(
response.buffer().from_err().and_then(|response| {
Err(UpdateRegexPatternSetError::from_response(response))
}),
)
}
})
}
/// <p>Inserts or deletes <a>Predicate</a> objects in a <code>Rule</code>. Each <code>Predicate</code> object identifies a predicate, such as a <a>ByteMatchSet</a> or an <a>IPSet</a>, that specifies the web requests that you want to allow, block, or count. If you add more than one predicate to a <code>Rule</code>, a request must match all of the specifications to be allowed, blocked, or counted. For example, suppose that you add the following to a <code>Rule</code>: </p> <ul> <li> <p>A <code>ByteMatchSet</code> that matches the value <code>BadBot</code> in the <code>User-Agent</code> header</p> </li> <li> <p>An <code>IPSet</code> that matches the IP address <code>192.0.2.44</code> </p> </li> </ul> <p>You then add the <code>Rule</code> to a <code>WebACL</code> and specify that you want to block requests that satisfy the <code>Rule</code>. For a request to be blocked, the <code>User-Agent</code> header in the request must contain the value <code>BadBot</code> <i>and</i> the request must originate from the IP address 192.0.2.44.</p> <p>To create and configure a <code>Rule</code>, perform the following steps:</p> <ol> <li> <p>Create and update the predicates that you want to include in the <code>Rule</code>.</p> </li> <li> <p>Create the <code>Rule</code>. See <a>CreateRule</a>.</p> </li> <li> <p>Use <code>GetChangeToken</code> to get the change token that you provide in the <code>ChangeToken</code> parameter of an <a>UpdateRule</a> request.</p> </li> <li> <p>Submit an <code>UpdateRule</code> request to add predicates to the <code>Rule</code>.</p> </li> <li> <p>Create and update a <code>WebACL</code> that contains the <code>Rule</code>. See <a>CreateWebACL</a>.</p> </li> </ol> <p>If you want to replace one <code>ByteMatchSet</code> or <code>IPSet</code> with another, you delete the existing one and add the new one.</p> <p>For more information about how to use the AWS WAF API to allow or block HTTP requests, see the <a href="http://docs.aws.amazon.com/waf/latest/developerguide/">AWS WAF Developer Guide</a>.</p>
fn update_rule(
&self,
input: UpdateRuleRequest,
) -> RusotoFuture<UpdateRuleResponse, UpdateRuleError> {
let mut request = SignedRequest::new("POST", "waf", &self.region, "/");
request.set_content_type("application/x-amz-json-1.1".to_owned());
request.add_header("x-amz-target", "AWSWAF_20150824.UpdateRule");
let encoded = serde_json::to_string(&input).unwrap();
request.set_payload(Some(encoded));
self.client.sign_and_dispatch(request, |response| {
if response.status.is_success() {
Box::new(response.buffer().from_err().and_then(|response| {
proto::json::ResponsePayload::new(&response)
.deserialize::<UpdateRuleResponse, _>()
}))
} else {
Box::new(
response
.buffer()
.from_err()
.and_then(|response| Err(UpdateRuleError::from_response(response))),
)
}
})
}
/// <p>Inserts or deletes <a>ActivatedRule</a> objects in a <code>RuleGroup</code>.</p> <p>You can only insert <code>REGULAR</code> rules into a rule group.</p> <p>You can have a maximum of ten rules per rule group.</p> <p>To create and configure a <code>RuleGroup</code>, perform the following steps:</p> <ol> <li> <p>Create and update the <code>Rules</code> that you want to include in the <code>RuleGroup</code>. See <a>CreateRule</a>.</p> </li> <li> <p>Use <code>GetChangeToken</code> to get the change token that you provide in the <code>ChangeToken</code> parameter of an <a>UpdateRuleGroup</a> request.</p> </li> <li> <p>Submit an <code>UpdateRuleGroup</code> request to add <code>Rules</code> to the <code>RuleGroup</code>.</p> </li> <li> <p>Create and update a <code>WebACL</code> that contains the <code>RuleGroup</code>. See <a>CreateWebACL</a>.</p> </li> </ol> <p>If you want to replace one <code>Rule</code> with another, you delete the existing one and add the new one.</p> <p>For more information about how to use the AWS WAF API to allow or block HTTP requests, see the <a href="http://docs.aws.amazon.com/waf/latest/developerguide/">AWS WAF Developer Guide</a>.</p>
fn update_rule_group(
&self,
input: UpdateRuleGroupRequest,
) -> RusotoFuture<UpdateRuleGroupResponse, UpdateRuleGroupError> {
let mut request = SignedRequest::new("POST", "waf", &self.region, "/");
request.set_content_type("application/x-amz-json-1.1".to_owned());
request.add_header("x-amz-target", "AWSWAF_20150824.UpdateRuleGroup");
let encoded = serde_json::to_string(&input).unwrap();
request.set_payload(Some(encoded));
self.client.sign_and_dispatch(request, |response| {
if response.status.is_success() {
Box::new(response.buffer().from_err().and_then(|response| {
proto::json::ResponsePayload::new(&response)
.deserialize::<UpdateRuleGroupResponse, _>()
}))
} else {
Box::new(
response
.buffer()
.from_err()
.and_then(|response| Err(UpdateRuleGroupError::from_response(response))),
)
}
})
}
/// <p>Inserts or deletes <a>SizeConstraint</a> objects (filters) in a <a>SizeConstraintSet</a>. For each <code>SizeConstraint</code> object, you specify the following values: </p> <ul> <li> <p>Whether to insert or delete the object from the array. If you want to change a <code>SizeConstraintSetUpdate</code> object, you delete the existing object and add a new one.</p> </li> <li> <p>The part of a web request that you want AWS WAF to evaluate, such as the length of a query string or the length of the <code>User-Agent</code> header.</p> </li> <li> <p>Whether to perform any transformations on the request, such as converting it to lowercase, before checking its length. Note that transformations of the request body are not supported because the AWS resource forwards only the first <code>8192</code> bytes of your request to AWS WAF.</p> <p>You can only specify a single type of TextTransformation.</p> </li> <li> <p>A <code>ComparisonOperator</code> used for evaluating the selected part of the request against the specified <code>Size</code>, such as equals, greater than, less than, and so on.</p> </li> <li> <p>The length, in bytes, that you want AWS WAF to watch for in selected part of the request. The length is computed after applying the transformation.</p> </li> </ul> <p>For example, you can add a <code>SizeConstraintSetUpdate</code> object that matches web requests in which the length of the <code>User-Agent</code> header is greater than 100 bytes. You can then configure AWS WAF to block those requests.</p> <p>To create and configure a <code>SizeConstraintSet</code>, perform the following steps:</p> <ol> <li> <p>Create a <code>SizeConstraintSet.</code> For more information, see <a>CreateSizeConstraintSet</a>.</p> </li> <li> <p>Use <a>GetChangeToken</a> to get the change token that you provide in the <code>ChangeToken</code> parameter of an <code>UpdateSizeConstraintSet</code> request.</p> </li> <li> <p>Submit an <code>UpdateSizeConstraintSet</code> request to specify the part of the request that you want AWS WAF to inspect (for example, the header or the URI) and the value that you want AWS WAF to watch for.</p> </li> </ol> <p>For more information about how to use the AWS WAF API to allow or block HTTP requests, see the <a href="http://docs.aws.amazon.com/waf/latest/developerguide/">AWS WAF Developer Guide</a>.</p>
fn update_size_constraint_set(
&self,
input: UpdateSizeConstraintSetRequest,
) -> RusotoFuture<UpdateSizeConstraintSetResponse, UpdateSizeConstraintSetError> {
let mut request = SignedRequest::new("POST", "waf", &self.region, "/");
request.set_content_type("application/x-amz-json-1.1".to_owned());
request.add_header("x-amz-target", "AWSWAF_20150824.UpdateSizeConstraintSet");
let encoded = serde_json::to_string(&input).unwrap();
request.set_payload(Some(encoded));
self.client.sign_and_dispatch(request, |response| {
if response.status.is_success() {
Box::new(response.buffer().from_err().and_then(|response| {
proto::json::ResponsePayload::new(&response)
.deserialize::<UpdateSizeConstraintSetResponse, _>()
}))
} else {
Box::new(response.buffer().from_err().and_then(|response| {
Err(UpdateSizeConstraintSetError::from_response(response))
}))
}
})
}
/// <p>Inserts or deletes <a>SqlInjectionMatchTuple</a> objects (filters) in a <a>SqlInjectionMatchSet</a>. For each <code>SqlInjectionMatchTuple</code> object, you specify the following values:</p> <ul> <li> <p> <code>Action</code>: Whether to insert the object into or delete the object from the array. To change a <code>SqlInjectionMatchTuple</code>, you delete the existing object and add a new one.</p> </li> <li> <p> <code>FieldToMatch</code>: The part of web requests that you want AWS WAF to inspect and, if you want AWS WAF to inspect a header or custom query parameter, the name of the header or parameter.</p> </li> <li> <p> <code>TextTransformation</code>: Which text transformation, if any, to perform on the web request before inspecting the request for snippets of malicious SQL code.</p> <p>You can only specify a single type of TextTransformation.</p> </li> </ul> <p>You use <code>SqlInjectionMatchSet</code> objects to specify which CloudFront requests that you want to allow, block, or count. For example, if you're receiving requests that contain snippets of SQL code in the query string and you want to block the requests, you can create a <code>SqlInjectionMatchSet</code> with the applicable settings, and then configure AWS WAF to block the requests. </p> <p>To create and configure a <code>SqlInjectionMatchSet</code>, perform the following steps:</p> <ol> <li> <p>Submit a <a>CreateSqlInjectionMatchSet</a> request.</p> </li> <li> <p>Use <a>GetChangeToken</a> to get the change token that you provide in the <code>ChangeToken</code> parameter of an <a>UpdateIPSet</a> request.</p> </li> <li> <p>Submit an <code>UpdateSqlInjectionMatchSet</code> request to specify the parts of web requests that you want AWS WAF to inspect for snippets of SQL code.</p> </li> </ol> <p>For more information about how to use the AWS WAF API to allow or block HTTP requests, see the <a href="http://docs.aws.amazon.com/waf/latest/developerguide/">AWS WAF Developer Guide</a>.</p>
fn update_sql_injection_match_set(
&self,
input: UpdateSqlInjectionMatchSetRequest,
) -> RusotoFuture<UpdateSqlInjectionMatchSetResponse, UpdateSqlInjectionMatchSetError> {
let mut request = SignedRequest::new("POST", "waf", &self.region, "/");
request.set_content_type("application/x-amz-json-1.1".to_owned());
request.add_header("x-amz-target", "AWSWAF_20150824.UpdateSqlInjectionMatchSet");
let encoded = serde_json::to_string(&input).unwrap();
request.set_payload(Some(encoded));
self.client.sign_and_dispatch(request, |response| {
if response.status.is_success() {
Box::new(response.buffer().from_err().and_then(|response| {
proto::json::ResponsePayload::new(&response)
.deserialize::<UpdateSqlInjectionMatchSetResponse, _>()
}))
} else {
Box::new(response.buffer().from_err().and_then(|response| {
Err(UpdateSqlInjectionMatchSetError::from_response(response))
}))
}
})
}
/// <p>Inserts or deletes <a>ActivatedRule</a> objects in a <code>WebACL</code>. Each <code>Rule</code> identifies web requests that you want to allow, block, or count. When you update a <code>WebACL</code>, you specify the following values:</p> <ul> <li> <p>A default action for the <code>WebACL</code>, either <code>ALLOW</code> or <code>BLOCK</code>. AWS WAF performs the default action if a request doesn't match the criteria in any of the <code>Rules</code> in a <code>WebACL</code>.</p> </li> <li> <p>The <code>Rules</code> that you want to add or delete. If you want to replace one <code>Rule</code> with another, you delete the existing <code>Rule</code> and add the new one.</p> </li> <li> <p>For each <code>Rule</code>, whether you want AWS WAF to allow requests, block requests, or count requests that match the conditions in the <code>Rule</code>.</p> </li> <li> <p>The order in which you want AWS WAF to evaluate the <code>Rules</code> in a <code>WebACL</code>. If you add more than one <code>Rule</code> to a <code>WebACL</code>, AWS WAF evaluates each request against the <code>Rules</code> in order based on the value of <code>Priority</code>. (The <code>Rule</code> that has the lowest value for <code>Priority</code> is evaluated first.) When a web request matches all the predicates (such as <code>ByteMatchSets</code> and <code>IPSets</code>) in a <code>Rule</code>, AWS WAF immediately takes the corresponding action, allow or block, and doesn't evaluate the request against the remaining <code>Rules</code> in the <code>WebACL</code>, if any. </p> </li> </ul> <p>To create and configure a <code>WebACL</code>, perform the following steps:</p> <ol> <li> <p>Create and update the predicates that you want to include in <code>Rules</code>. For more information, see <a>CreateByteMatchSet</a>, <a>UpdateByteMatchSet</a>, <a>CreateIPSet</a>, <a>UpdateIPSet</a>, <a>CreateSqlInjectionMatchSet</a>, and <a>UpdateSqlInjectionMatchSet</a>.</p> </li> <li> <p>Create and update the <code>Rules</code> that you want to include in the <code>WebACL</code>. For more information, see <a>CreateRule</a> and <a>UpdateRule</a>.</p> </li> <li> <p>Create a <code>WebACL</code>. See <a>CreateWebACL</a>.</p> </li> <li> <p>Use <code>GetChangeToken</code> to get the change token that you provide in the <code>ChangeToken</code> parameter of an <a>UpdateWebACL</a> request.</p> </li> <li> <p>Submit an <code>UpdateWebACL</code> request to specify the <code>Rules</code> that you want to include in the <code>WebACL</code>, to specify the default action, and to associate the <code>WebACL</code> with a CloudFront distribution. </p> <p>The <code>ActivatedRule</code> can be a rule group. If you specify a rule group as your <code>ActivatedRule</code>, you can exclude specific rules from that rule group.</p> <p>If you already have a rule group associated with a web ACL and want to submit an <code>UpdateWebACL</code> request to exclude certain rules from that rule group, you must first remove the rule group from the web ACL, the re-insert it again, specifying the excluded rules. For details, see <a>ActivatedRule$ExcludedRules</a>. </p> </li> </ol> <p>Be aware that if you try to add a RATE_BASED rule to a web ACL without setting the rule type when first creating the rule, the <a>UpdateWebACL</a> request will fail because the request tries to add a REGULAR rule (the default rule type) with the specified ID, which does not exist. </p> <p>For more information about how to use the AWS WAF API to allow or block HTTP requests, see the <a href="http://docs.aws.amazon.com/waf/latest/developerguide/">AWS WAF Developer Guide</a>.</p>
fn update_web_acl(
&self,
input: UpdateWebACLRequest,
) -> RusotoFuture<UpdateWebACLResponse, UpdateWebACLError> {
let mut request = SignedRequest::new("POST", "waf", &self.region, "/");
request.set_content_type("application/x-amz-json-1.1".to_owned());
request.add_header("x-amz-target", "AWSWAF_20150824.UpdateWebACL");
let encoded = serde_json::to_string(&input).unwrap();
request.set_payload(Some(encoded));
self.client.sign_and_dispatch(request, |response| {
if response.status.is_success() {
Box::new(response.buffer().from_err().and_then(|response| {
proto::json::ResponsePayload::new(&response)
.deserialize::<UpdateWebACLResponse, _>()
}))
} else {
Box::new(
response
.buffer()
.from_err()
.and_then(|response| Err(UpdateWebACLError::from_response(response))),
)
}
})
}
/// <p>Inserts or deletes <a>XssMatchTuple</a> objects (filters) in an <a>XssMatchSet</a>. For each <code>XssMatchTuple</code> object, you specify the following values:</p> <ul> <li> <p> <code>Action</code>: Whether to insert the object into or delete the object from the array. To change an <code>XssMatchTuple</code>, you delete the existing object and add a new one.</p> </li> <li> <p> <code>FieldToMatch</code>: The part of web requests that you want AWS WAF to inspect and, if you want AWS WAF to inspect a header or custom query parameter, the name of the header or parameter.</p> </li> <li> <p> <code>TextTransformation</code>: Which text transformation, if any, to perform on the web request before inspecting the request for cross-site scripting attacks.</p> <p>You can only specify a single type of TextTransformation.</p> </li> </ul> <p>You use <code>XssMatchSet</code> objects to specify which CloudFront requests that you want to allow, block, or count. For example, if you're receiving requests that contain cross-site scripting attacks in the request body and you want to block the requests, you can create an <code>XssMatchSet</code> with the applicable settings, and then configure AWS WAF to block the requests. </p> <p>To create and configure an <code>XssMatchSet</code>, perform the following steps:</p> <ol> <li> <p>Submit a <a>CreateXssMatchSet</a> request.</p> </li> <li> <p>Use <a>GetChangeToken</a> to get the change token that you provide in the <code>ChangeToken</code> parameter of an <a>UpdateIPSet</a> request.</p> </li> <li> <p>Submit an <code>UpdateXssMatchSet</code> request to specify the parts of web requests that you want AWS WAF to inspect for cross-site scripting attacks.</p> </li> </ol> <p>For more information about how to use the AWS WAF API to allow or block HTTP requests, see the <a href="http://docs.aws.amazon.com/waf/latest/developerguide/">AWS WAF Developer Guide</a>.</p>
fn update_xss_match_set(
&self,
input: UpdateXssMatchSetRequest,
) -> RusotoFuture<UpdateXssMatchSetResponse, UpdateXssMatchSetError> {
let mut request = SignedRequest::new("POST", "waf", &self.region, "/");
request.set_content_type("application/x-amz-json-1.1".to_owned());
request.add_header("x-amz-target", "AWSWAF_20150824.UpdateXssMatchSet");
let encoded = serde_json::to_string(&input).unwrap();
request.set_payload(Some(encoded));
self.client.sign_and_dispatch(request, |response| {
if response.status.is_success() {
Box::new(response.buffer().from_err().and_then(|response| {
proto::json::ResponsePayload::new(&response)
.deserialize::<UpdateXssMatchSetResponse, _>()
}))
} else {
Box::new(
response
.buffer()
.from_err()
.and_then(|response| Err(UpdateXssMatchSetError::from_response(response))),
)
}
})
}
}
| {
match *self {
DeleteByteMatchSetError::WAFInternalError(ref cause) => cause,
DeleteByteMatchSetError::WAFInvalidAccount(ref cause) => cause,
DeleteByteMatchSetError::WAFNonEmptyEntity(ref cause) => cause,
DeleteByteMatchSetError::WAFNonexistentItem(ref cause) => cause,
DeleteByteMatchSetError::WAFReferencedItem(ref cause) => cause,
DeleteByteMatchSetError::WAFStaleData(ref cause) => cause,
}
} |
Grid.test.tsx | import { createPassThroughPropsTest } from '@shared/tests/utils';
import { mount } from 'enzyme';
import { Grid } from '@ui5/webcomponents-react/lib/Grid';
import React from 'react';
import { GridPosition } from '@ui5/webcomponents-react/lib/GridPosition';
const GridContent = () => (
<>
<div
style={{
height: '6rem',
width: '100%',
backgroundColor: '#A9EAFF'
}}
/>
<div
style={{
height: '6rem',
width: '100%',
backgroundColor: '#A9EAFF'
}}
/>
<div
style={{
height: '6rem',
width: '100%',
backgroundColor: '#A9EAFF'
}}
/>
<div
style={{ | height: '6rem',
width: '100%',
backgroundColor: '#A9EAFF'
}}
/>
</>
);
describe('Grid', () => {
test('Renders Children', () => {
const wrapper = mount(
<Grid>
<GridContent />
</Grid>
);
expect(wrapper.render()).toMatchSnapshot();
});
test('Custom Class Names and Styling', () => {
const wrapper = mount(
<Grid style={{ backgroundColor: 'purple' }} className="customClassName" hSpacing={0}>
<GridContent />
</Grid>
);
expect(wrapper.render()).toMatchSnapshot();
});
test('Renders Children with custom layout data', () => {
const wrapper = mount(
<Grid>
<div
style={{
height: '6rem',
width: '100%',
backgroundColor: '#A9EAFF'
}}
data-layout-span="XL12 L12 M12 S12"
/>
<div
style={{
height: '6rem',
width: '100%',
backgroundColor: '#A9EAFF'
}}
data-layout-indent="XL1 L1 M1 S1"
/>
</Grid>
);
expect(wrapper.render()).toMatchSnapshot();
});
test('Grid Position Center', () => {
const wrapper = mount(
<Grid style={{ width: '60%' }} position={GridPosition.Center}>
<GridContent />
</Grid>
);
expect(wrapper.render()).toMatchSnapshot();
});
test('Grid Position Right', () => {
const wrapper = mount(
<Grid style={{ width: '60%' }} position={GridPosition.Right}>
<GridContent />
</Grid>
);
expect(wrapper.render()).toMatchSnapshot();
});
createPassThroughPropsTest(Grid);
}); | |
box_predictor.py | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Box predictor for object detectors.
Box predictors are classes that take a high level
image feature map as input and produce two predictions,
(1) a tensor encoding box locations, and
(2) a tensor encoding classes for each box.
These components are passed directly to loss functions
in our detection models.
These modules are separated from the main model since the same
few box predictor architectures are shared across many models.
"""
from abc import abstractmethod
import tensorflow as tf
from app.object_detection.utils import ops
from app.object_detection.utils import shape_utils
from app.object_detection.utils import static_shape
slim = tf.contrib.slim
BOX_ENCODINGS = 'box_encodings'
CLASS_PREDICTIONS_WITH_BACKGROUND = 'class_predictions_with_background'
MASK_PREDICTIONS = 'mask_predictions'
class BoxPredictor(object):
"""BoxPredictor."""
def __init__(self, is_training, num_classes):
"""Constructor.
Args:
is_training: Indicates whether the BoxPredictor is in training mode.
num_classes: number of classes. Note that num_classes *does not*
include the background category, so if groundtruth labels take values
in {0, 1, .., K-1}, num_classes=K (and not K+1, even though the
assigned classification targets can range from {0,... K}).
"""
self._is_training = is_training
self._num_classes = num_classes
@property
def num_classes(self):
return self._num_classes
def predict(self, image_features, num_predictions_per_location, scope,
**params):
"""Computes encoded object locations and corresponding confidences.
Takes a high level image feature map as input and produce two predictions,
(1) a tensor encoding box locations, and
(2) a tensor encoding class scores for each corresponding box.
In this interface, we only assume that two tensors are returned as output
and do not assume anything about their shapes.
Args:
image_features: A float tensor of shape [batch_size, height, width,
channels] containing features for a batch of images.
num_predictions_per_location: an integer representing the number of box
predictions to be made per spatial location in the feature map.
scope: Variable and Op scope name.
**params: Additional keyword arguments for specific implementations of
BoxPredictor.
Returns:
A dictionary containing at least the following tensors.
box_encodings: A float tensor of shape
[batch_size, num_anchors, q, code_size] representing the location of
the objects, where q is 1 or the number of classes.
class_predictions_with_background: A float tensor of shape
[batch_size, num_anchors, num_classes + 1] representing the class
predictions for the proposals.
"""
with tf.variable_scope(scope):
return self._predict(image_features, num_predictions_per_location,
**params)
# TODO: num_predictions_per_location could be moved to constructor.
# This is currently only used by ConvolutionalBoxPredictor.
@abstractmethod
def _predict(self, image_features, num_predictions_per_location, **params):
"""Implementations must override this method.
Args:
image_features: A float tensor of shape [batch_size, height, width,
channels] containing features for a batch of images.
num_predictions_per_location: an integer representing the number of box
predictions to be made per spatial location in the feature map.
**params: Additional keyword arguments for specific implementations of
BoxPredictor.
Returns:
A dictionary containing at least the following tensors.
box_encodings: A float tensor of shape
[batch_size, num_anchors, q, code_size] representing the location of
the objects, where q is 1 or the number of classes.
class_predictions_with_background: A float tensor of shape
[batch_size, num_anchors, num_classes + 1] representing the class
predictions for the proposals.
"""
pass
class RfcnBoxPredictor(BoxPredictor):
"""RFCN Box Predictor.
Applies a position sensitve ROI pooling on position sensitive feature maps to
predict classes and refined locations. See https://arxiv.org/abs/1605.06409
for details.
This is used for the second stage of the RFCN meta architecture. Notice that
locations are *not* shared across classes, thus for each anchor, a separate
prediction is made for each class.
"""
def __init__(self,
is_training,
num_classes,
conv_hyperparams,
num_spatial_bins,
depth,
crop_size,
box_code_size):
"""Constructor.
Args:
is_training: Indicates whether the BoxPredictor is in training mode.
num_classes: number of classes. Note that num_classes *does not*
include the background category, so if groundtruth labels take values
in {0, 1, .., K-1}, num_classes=K (and not K+1, even though the
assigned classification targets can range from {0,... K}).
conv_hyperparams: Slim arg_scope with hyperparameters for conolutional
layers.
num_spatial_bins: A list of two integers `[spatial_bins_y,
spatial_bins_x]`.
depth: Target depth to reduce the input feature maps to.
crop_size: A list of two integers `[crop_height, crop_width]`.
box_code_size: Size of encoding for each box.
"""
super(RfcnBoxPredictor, self).__init__(is_training, num_classes)
self._conv_hyperparams = conv_hyperparams
self._num_spatial_bins = num_spatial_bins
self._depth = depth
self._crop_size = crop_size
self._box_code_size = box_code_size
@property
def num_classes(self):
return self._num_classes
def _predict(self, image_features, num_predictions_per_location,
proposal_boxes):
"""Computes encoded object locations and corresponding confidences.
Args:
image_features: A float tensor of shape [batch_size, height, width,
channels] containing features for a batch of images.
num_predictions_per_location: an integer representing the number of box
predictions to be made per spatial location in the feature map.
Currently, this must be set to 1, or an error will be raised.
proposal_boxes: A float tensor of shape [batch_size, num_proposals,
box_code_size].
Returns:
box_encodings: A float tensor of shape
[batch_size, 1, num_classes, code_size] representing the
location of the objects.
class_predictions_with_background: A float tensor of shape
[batch_size, 1, num_classes + 1] representing the class
predictions for the proposals.
Raises:
ValueError: if num_predictions_per_location is not 1.
"""
if num_predictions_per_location != 1:
raise ValueError('Currently RfcnBoxPredictor only supports '
'predicting a single box per class per location.')
batch_size = tf.shape(proposal_boxes)[0]
num_boxes = tf.shape(proposal_boxes)[1]
def get_box_indices(proposals):
proposals_shape = proposals.get_shape().as_list()
if any(dim is None for dim in proposals_shape):
proposals_shape = tf.shape(proposals)
ones_mat = tf.ones(proposals_shape[:2], dtype=tf.int32)
multiplier = tf.expand_dims(
tf.range(start=0, limit=proposals_shape[0]), 1)
return tf.reshape(ones_mat * multiplier, [-1])
net = image_features
with slim.arg_scope(self._conv_hyperparams):
net = slim.conv2d(net, self._depth, [1, 1], scope='reduce_depth')
# Location predictions.
location_feature_map_depth = (self._num_spatial_bins[0] *
self._num_spatial_bins[1] *
self.num_classes *
self._box_code_size)
location_feature_map = slim.conv2d(net, location_feature_map_depth,
[1, 1], activation_fn=None,
scope='refined_locations')
box_encodings = ops.position_sensitive_crop_regions(
location_feature_map,
boxes=tf.reshape(proposal_boxes, [-1, self._box_code_size]),
box_ind=get_box_indices(proposal_boxes),
crop_size=self._crop_size,
num_spatial_bins=self._num_spatial_bins,
global_pool=True)
box_encodings = tf.squeeze(box_encodings, squeeze_dims=[1, 2])
box_encodings = tf.reshape(box_encodings,
[batch_size * num_boxes, 1, self.num_classes,
self._box_code_size])
# Class predictions.
total_classes = self.num_classes + 1 # Account for background class.
class_feature_map_depth = (self._num_spatial_bins[0] *
self._num_spatial_bins[1] *
total_classes)
class_feature_map = slim.conv2d(net, class_feature_map_depth, [1, 1],
activation_fn=None,
scope='class_predictions')
class_predictions_with_background = ops.position_sensitive_crop_regions(
class_feature_map,
boxes=tf.reshape(proposal_boxes, [-1, self._box_code_size]),
box_ind=get_box_indices(proposal_boxes),
crop_size=self._crop_size,
num_spatial_bins=self._num_spatial_bins,
global_pool=True)
class_predictions_with_background = tf.squeeze(
class_predictions_with_background, squeeze_dims=[1, 2])
class_predictions_with_background = tf.reshape(
class_predictions_with_background,
[batch_size * num_boxes, 1, total_classes])
return {BOX_ENCODINGS: box_encodings,
CLASS_PREDICTIONS_WITH_BACKGROUND:
class_predictions_with_background}
class MaskRCNNBoxPredictor(BoxPredictor):
"""Mask R-CNN Box Predictor.
See Mask R-CNN: He, K., Gkioxari, G., Dollar, P., & Girshick, R. (2017).
Mask R-CNN. arXiv preprint arXiv:1703.06870.
This is used for the second stage of the Mask R-CNN detector where proposals
cropped from an image are arranged along the batch dimension of the input
image_features tensor. Notice that locations are *not* shared across classes,
thus for each anchor, a separate prediction is made for each class.
In addition to predicting boxes and classes, optionally this class allows
predicting masks and/or keypoints inside detection boxes.
Currently this box predictor makes per-class predictions; that is, each
anchor makes a separate box prediction for each class.
"""
def __init__(self,
is_training,
num_classes,
fc_hyperparams,
use_dropout,
dropout_keep_prob,
box_code_size,
conv_hyperparams=None,
predict_instance_masks=False,
mask_height=14,
mask_width=14,
mask_prediction_conv_depth=256,
predict_keypoints=False):
"""Constructor.
Args:
is_training: Indicates whether the BoxPredictor is in training mode.
num_classes: number of classes. Note that num_classes *does not*
include the background category, so if groundtruth labels take values
in {0, 1, .., K-1}, num_classes=K (and not K+1, even though the
assigned classification targets can range from {0,... K}).
fc_hyperparams: Slim arg_scope with hyperparameters for fully
connected ops.
use_dropout: Option to use dropout or not. Note that a single dropout
op is applied here prior to both box and class predictions, which stands
in contrast to the ConvolutionalBoxPredictor below.
dropout_keep_prob: Keep probability for dropout.
This is only used if use_dropout is True.
box_code_size: Size of encoding for each box.
conv_hyperparams: Slim arg_scope with hyperparameters for convolution
ops.
predict_instance_masks: Whether to predict object masks inside detection
boxes.
mask_height: Desired output mask height. The default value is 14.
mask_width: Desired output mask width. The default value is 14.
mask_prediction_conv_depth: The depth for the first conv2d_transpose op
applied to the image_features in the mask prediciton branch.
predict_keypoints: Whether to predict keypoints insde detection boxes.
Raises:
ValueError: If predict_instance_masks or predict_keypoints is true.
"""
super(MaskRCNNBoxPredictor, self).__init__(is_training, num_classes)
self._fc_hyperparams = fc_hyperparams
self._use_dropout = use_dropout
self._box_code_size = box_code_size
self._dropout_keep_prob = dropout_keep_prob
self._conv_hyperparams = conv_hyperparams
self._predict_instance_masks = predict_instance_masks
self._mask_height = mask_height
self._mask_width = mask_width
self._mask_prediction_conv_depth = mask_prediction_conv_depth
self._predict_keypoints = predict_keypoints
if self._predict_keypoints:
raise ValueError('Keypoint prediction is unimplemented.')
if ((self._predict_instance_masks or self._predict_keypoints) and
self._conv_hyperparams is None):
raise ValueError('`conv_hyperparams` must be provided when predicting '
'masks.')
@property
def num_classes(self):
|
def _predict(self, image_features, num_predictions_per_location):
"""Computes encoded object locations and corresponding confidences.
Flattens image_features and applies fully connected ops (with no
non-linearity) to predict box encodings and class predictions. In this
setting, anchors are not spatially arranged in any way and are assumed to
have been folded into the batch dimension. Thus we output 1 for the
anchors dimension.
Also optionally predicts instance masks.
The mask prediction head is based on the Mask RCNN paper with the following
modifications: We replace the deconvolution layer with a bilinear resize
and a convolution.
Args:
image_features: A float tensor of shape [batch_size, height, width,
channels] containing features for a batch of images.
num_predictions_per_location: an integer representing the number of box
predictions to be made per spatial location in the feature map.
Currently, this must be set to 1, or an error will be raised.
Returns:
A dictionary containing the following tensors.
box_encodings: A float tensor of shape
[batch_size, 1, num_classes, code_size] representing the
location of the objects.
class_predictions_with_background: A float tensor of shape
[batch_size, 1, num_classes + 1] representing the class
predictions for the proposals.
If predict_masks is True the dictionary also contains:
instance_masks: A float tensor of shape
[batch_size, 1, num_classes, image_height, image_width]
If predict_keypoints is True the dictionary also contains:
keypoints: [batch_size, 1, num_keypoints, 2]
Raises:
ValueError: if num_predictions_per_location is not 1.
"""
if num_predictions_per_location != 1:
raise ValueError('Currently FullyConnectedBoxPredictor only supports '
'predicting a single box per class per location.')
spatial_averaged_image_features = tf.reduce_mean(image_features, [1, 2],
keep_dims=True,
name='AvgPool')
flattened_image_features = slim.flatten(spatial_averaged_image_features)
if self._use_dropout:
flattened_image_features = slim.dropout(flattened_image_features,
keep_prob=self._dropout_keep_prob,
is_training=self._is_training)
with slim.arg_scope(self._fc_hyperparams):
box_encodings = slim.fully_connected(
flattened_image_features,
self._num_classes * self._box_code_size,
activation_fn=None,
scope='BoxEncodingPredictor')
class_predictions_with_background = slim.fully_connected(
flattened_image_features,
self._num_classes + 1,
activation_fn=None,
scope='ClassPredictor')
box_encodings = tf.reshape(
box_encodings, [-1, 1, self._num_classes, self._box_code_size])
class_predictions_with_background = tf.reshape(
class_predictions_with_background, [-1, 1, self._num_classes + 1])
predictions_dict = {
BOX_ENCODINGS: box_encodings,
CLASS_PREDICTIONS_WITH_BACKGROUND: class_predictions_with_background
}
if self._predict_instance_masks:
with slim.arg_scope(self._conv_hyperparams):
upsampled_features = tf.image.resize_bilinear(
image_features,
[self._mask_height, self._mask_width],
align_corners=True)
upsampled_features = slim.conv2d(
upsampled_features,
num_outputs=self._mask_prediction_conv_depth,
kernel_size=[2, 2])
mask_predictions = slim.conv2d(upsampled_features,
num_outputs=self.num_classes,
activation_fn=None,
kernel_size=[3, 3])
instance_masks = tf.expand_dims(tf.transpose(mask_predictions,
perm=[0, 3, 1, 2]),
axis=1,
name='MaskPredictor')
predictions_dict[MASK_PREDICTIONS] = instance_masks
return predictions_dict
class ConvolutionalBoxPredictor(BoxPredictor):
"""Convolutional Box Predictor.
Optionally add an intermediate 1x1 convolutional layer after features and
predict in parallel branches box_encodings and
class_predictions_with_background.
Currently this box predictor assumes that predictions are "shared" across
classes --- that is each anchor makes box predictions which do not depend
on class.
"""
def __init__(self,
is_training,
num_classes,
conv_hyperparams,
min_depth,
max_depth,
num_layers_before_predictor,
use_dropout,
dropout_keep_prob,
kernel_size,
box_code_size,
apply_sigmoid_to_scores=False,
class_prediction_bias_init=0.0):
"""Constructor.
Args:
is_training: Indicates whether the BoxPredictor is in training mode.
num_classes: number of classes. Note that num_classes *does not*
include the background category, so if groundtruth labels take values
in {0, 1, .., K-1}, num_classes=K (and not K+1, even though the
assigned classification targets can range from {0,... K}).
conv_hyperparams: Slim arg_scope with hyperparameters for convolution ops.
min_depth: Minumum feature depth prior to predicting box encodings
and class predictions.
max_depth: Maximum feature depth prior to predicting box encodings
and class predictions. If max_depth is set to 0, no additional
feature map will be inserted before location and class predictions.
num_layers_before_predictor: Number of the additional conv layers before
the predictor.
use_dropout: Option to use dropout for class prediction or not.
dropout_keep_prob: Keep probability for dropout.
This is only used if use_dropout is True.
kernel_size: Size of final convolution kernel. If the
spatial resolution of the feature map is smaller than the kernel size,
then the kernel size is automatically set to be
min(feature_width, feature_height).
box_code_size: Size of encoding for each box.
apply_sigmoid_to_scores: if True, apply the sigmoid on the output
class_predictions.
class_prediction_bias_init: constant value to initialize bias of the last
conv2d layer before class prediction.
Raises:
ValueError: if min_depth > max_depth.
"""
super(ConvolutionalBoxPredictor, self).__init__(is_training, num_classes)
if min_depth > max_depth:
raise ValueError('min_depth should be less than or equal to max_depth')
self._conv_hyperparams = conv_hyperparams
self._min_depth = min_depth
self._max_depth = max_depth
self._num_layers_before_predictor = num_layers_before_predictor
self._use_dropout = use_dropout
self._kernel_size = kernel_size
self._box_code_size = box_code_size
self._dropout_keep_prob = dropout_keep_prob
self._apply_sigmoid_to_scores = apply_sigmoid_to_scores
self._class_prediction_bias_init = class_prediction_bias_init
def _predict(self, image_features, num_predictions_per_location):
"""Computes encoded object locations and corresponding confidences.
Args:
image_features: A float tensor of shape [batch_size, height, width,
channels] containing features for a batch of images.
num_predictions_per_location: an integer representing the number of box
predictions to be made per spatial location in the feature map.
Returns:
A dictionary containing the following tensors.
box_encodings: A float tensor of shape [batch_size, num_anchors, 1,
code_size] representing the location of the objects, where
num_anchors = feat_height * feat_width * num_predictions_per_location
class_predictions_with_background: A float tensor of shape
[batch_size, num_anchors, num_classes + 1] representing the class
predictions for the proposals.
"""
# Add a slot for the background class.
num_class_slots = self.num_classes + 1
net = image_features
with slim.arg_scope(self._conv_hyperparams), \
slim.arg_scope([slim.dropout], is_training=self._is_training):
# Add additional conv layers before the class predictor.
features_depth = static_shape.get_depth(image_features.get_shape())
depth = max(min(features_depth, self._max_depth), self._min_depth)
tf.logging.info('depth of additional conv before box predictor: {}'.
format(depth))
if depth > 0 and self._num_layers_before_predictor > 0:
for i in range(self._num_layers_before_predictor):
net = slim.conv2d(
net, depth, [1, 1], scope='Conv2d_%d_1x1_%d' % (i, depth))
with slim.arg_scope([slim.conv2d], activation_fn=None,
normalizer_fn=None, normalizer_params=None):
box_encodings = slim.conv2d(
net, num_predictions_per_location * self._box_code_size,
[self._kernel_size, self._kernel_size],
scope='BoxEncodingPredictor')
if self._use_dropout:
net = slim.dropout(net, keep_prob=self._dropout_keep_prob)
class_predictions_with_background = slim.conv2d(
net, num_predictions_per_location * num_class_slots,
[self._kernel_size, self._kernel_size], scope='ClassPredictor',
biases_initializer=tf.constant_initializer(
self._class_prediction_bias_init))
if self._apply_sigmoid_to_scores:
class_predictions_with_background = tf.sigmoid(
class_predictions_with_background)
combined_feature_map_shape = shape_utils.combined_static_and_dynamic_shape(
image_features)
box_encodings = tf.reshape(
box_encodings, tf.stack([combined_feature_map_shape[0],
combined_feature_map_shape[1] *
combined_feature_map_shape[2] *
num_predictions_per_location,
1, self._box_code_size]))
class_predictions_with_background = tf.reshape(
class_predictions_with_background,
tf.stack([combined_feature_map_shape[0],
combined_feature_map_shape[1] *
combined_feature_map_shape[2] *
num_predictions_per_location,
num_class_slots]))
return {BOX_ENCODINGS: box_encodings,
CLASS_PREDICTIONS_WITH_BACKGROUND:
class_predictions_with_background}
| return self._num_classes |
firewallrule.py | # Copyright 2016-2017 FUJITSU LIMITED
# All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
import copy
import logging
from cliff import columns as cliff_columns
from osc_lib.command import command
from osc_lib import exceptions
from osc_lib import utils
from osc_lib.utils import columns as column_util
from neutronclient._i18n import _
from neutronclient.common import utils as nc_utils
from neutronclient.osc import utils as osc_utils
from neutronclient.osc.v2.fwaas import constants as const
LOG = logging.getLogger(__name__)
_attr_map = (
('id', 'ID', column_util.LIST_BOTH),
('name', 'Name', column_util.LIST_BOTH),
('enabled', 'Enabled', column_util.LIST_BOTH),
('summary', 'Summary', column_util.LIST_SHORT_ONLY),
('description', 'Description', column_util.LIST_LONG_ONLY),
('ip_version', 'IP Version', column_util.LIST_LONG_ONLY),
('action', 'Action', column_util.LIST_LONG_ONLY),
('protocol', 'Protocol', column_util.LIST_LONG_ONLY),
('source_ip_address', 'Source IP Address', column_util.LIST_LONG_ONLY),
('source_port', 'Source Port', column_util.LIST_LONG_ONLY),
('destination_ip_address', 'Destination IP Address',
column_util.LIST_LONG_ONLY),
('destination_port', 'Destination Port', column_util.LIST_LONG_ONLY),
('shared', 'Shared', column_util.LIST_LONG_ONLY),
('tenant_id', 'Project', column_util.LIST_LONG_ONLY),
('source_firewall_group_id', 'Source Firewall Group ID',
column_util.LIST_LONG_ONLY),
('destination_firewall_group_id', 'Destination Firewall Group ID',
column_util.LIST_LONG_ONLY),
)
def _get_common_parser(parser):
parser.add_argument(
'--name',
metavar='<name>',
help=_('Name of the firewall rule'))
parser.add_argument(
'--description',
metavar='<description>',
help=_('Description of the firewall rule'))
parser.add_argument(
'--protocol',
choices=['tcp', 'udp', 'icmp', 'any'],
type=nc_utils.convert_to_lowercase,
help=_('Protocol for the firewall rule'))
parser.add_argument(
'--action',
choices=['allow', 'deny', 'reject'],
type=nc_utils.convert_to_lowercase,
help=_('Action for the firewall rule'))
parser.add_argument(
'--ip-version',
metavar='<ip-version>',
choices=['4', '6'],
help=_('Set IP version 4 or 6 (default is 4)'))
src_ip_group = parser.add_mutually_exclusive_group()
src_ip_group.add_argument(
'--source-ip-address',
metavar='<source-ip-address>',
help=_('Source IP address or subnet'))
src_ip_group.add_argument(
'--no-source-ip-address',
action='store_true',
help=_('Detach source IP address'))
dst_ip_group = parser.add_mutually_exclusive_group()
dst_ip_group.add_argument(
'--destination-ip-address',
metavar='<destination-ip-address>',
help=_('Destination IP address or subnet'))
dst_ip_group.add_argument(
'--no-destination-ip-address',
action='store_true',
help=_('Detach destination IP address'))
src_port_group = parser.add_mutually_exclusive_group()
src_port_group.add_argument(
'--source-port',
metavar='<source-port>',
help=_('Source port number or range'
'(integer in [1, 65535] or range like 123:456)'))
src_port_group.add_argument(
'--no-source-port',
action='store_true',
help=_('Detach source port number or range'))
dst_port_group = parser.add_mutually_exclusive_group()
dst_port_group.add_argument(
'--destination-port',
metavar='<destination-port>',
help=_('Destination port number or range'
'(integer in [1, 65535] or range like 123:456)'))
dst_port_group.add_argument(
'--no-destination-port',
action='store_true',
help=_('Detach destination port number or range'))
shared_group = parser.add_mutually_exclusive_group()
shared_group.add_argument(
'--public',
action='store_true',
help=_('Make the firewall policy public, which allows it to be '
'used in all projects (as opposed to the default, '
'which is to restrict its use to the current project). '
'This option is deprecated and would be removed in R Release'))
shared_group.add_argument(
'--private',
action='store_true',
help=_(
'Restrict use of the firewall rule to the current project.'
'This option is deprecated and would be removed in R release.'))
shared_group.add_argument(
'--share',
action='store_true',
help=_('Share the firewall rule to be used in all projects '
'(by default, it is restricted to be used by the '
'current project).'))
shared_group.add_argument(
'--no-share',
action='store_true',
help=_('Restrict use of the firewall rule to the current project'))
enable_group = parser.add_mutually_exclusive_group()
enable_group.add_argument(
'--enable-rule',
action='store_true',
help=_('Enable this rule (default is enabled)'))
enable_group.add_argument(
'--disable-rule',
action='store_true',
help=_('Disable this rule'))
src_fwg_group = parser.add_mutually_exclusive_group()
src_fwg_group.add_argument(
'--source-firewall-group',
metavar='<source-firewall-group>',
help=_('Source firewall group (name or ID)'))
src_fwg_group.add_argument(
'--no-source-firewall-group',
action='store_true',
help=_('No associated destination firewall group'))
dst_fwg_group = parser.add_mutually_exclusive_group()
dst_fwg_group.add_argument(
'--destination-firewall-group',
metavar='<destination-firewall-group>',
help=_('Destination firewall group (name or ID)'))
dst_fwg_group.add_argument(
'--no-destination-firewall-group',
action='store_true',
help=_('No associated destination firewall group'))
return parser
def _get_common_attrs(client_manager, parsed_args, is_create=True):
attrs = {}
client = client_manager.neutronclient
if is_create:
if 'project' in parsed_args and parsed_args.project is not None:
attrs['tenant_id'] = osc_utils.find_project(
client_manager.identity,
parsed_args.project,
parsed_args.project_domain,
).id
if parsed_args.name:
attrs['name'] = str(parsed_args.name)
if parsed_args.description:
attrs['description'] = str(parsed_args.description)
if parsed_args.protocol:
protocol = parsed_args.protocol
attrs['protocol'] = None if protocol == 'any' else protocol
if parsed_args.action:
attrs['action'] = parsed_args.action
if parsed_args.ip_version:
attrs['ip_version'] = str(parsed_args.ip_version)
if parsed_args.source_port:
attrs['source_port'] = parsed_args.source_port
if parsed_args.no_source_port:
attrs['source_port'] = None
if parsed_args.source_ip_address:
attrs['source_ip_address'] = parsed_args.source_ip_address
if parsed_args.no_source_ip_address:
attrs['source_ip_address'] = None
if parsed_args.destination_port:
attrs['destination_port'] = str(parsed_args.destination_port)
if parsed_args.no_destination_port:
attrs['destination_port'] = None
if parsed_args.destination_ip_address:
attrs['destination_ip_address'] = str(
parsed_args.destination_ip_address)
if parsed_args.no_destination_ip_address:
attrs['destination_ip_address'] = None
if parsed_args.enable_rule:
attrs['enabled'] = True
if parsed_args.disable_rule:
attrs['enabled'] = False
if parsed_args.share or parsed_args.public:
attrs['shared'] = True
if parsed_args.no_share or parsed_args.private:
attrs['shared'] = False
if parsed_args.source_firewall_group:
attrs['source_firewall_group_id'] = client.find_resource(
const.FWG, parsed_args.source_firewall_group,
cmd_resource=const.CMD_FWG)['id']
if parsed_args.no_source_firewall_group:
attrs['source_firewall_group_id'] = None
if parsed_args.destination_firewall_group:
attrs['destination_firewall_group_id'] = client.find_resource(
const.FWG, parsed_args.destination_firewall_group,
cmd_resource=const.CMD_FWG)['id']
if parsed_args.no_destination_firewall_group:
attrs['destination_firewall_group_id'] = None
return attrs
class ProtocolColumn(cliff_columns.FormattableColumn):
def human_readable(self):
return self._value if self._value else 'any'
_formatters = {'protocol': ProtocolColumn}
class CreateFirewallRule(command.ShowOne):
_description = _("Create a new firewall rule")
def get_parser(self, prog_name):
parser = super(CreateFirewallRule, self).get_parser(prog_name)
_get_common_parser(parser)
osc_utils.add_project_owner_option_to_parser(parser)
return parser
def take_action(self, parsed_args):
client = self.app.client_manager.neutronclient
attrs = _get_common_attrs(self.app.client_manager, parsed_args)
obj = client.create_fwaas_firewall_rule(
{const.FWR: attrs})[const.FWR]
columns, display_columns = column_util.get_columns(obj, _attr_map)
data = utils.get_dict_properties(obj, columns, formatters=_formatters)
return display_columns, data
class DeleteFirewallRule(command.Command):
_description = _("Delete firewall rule(s)")
def | (self, prog_name):
parser = super(DeleteFirewallRule, self).get_parser(prog_name)
parser.add_argument(
const.FWR,
metavar='<firewall-rule>',
nargs='+',
help=_('Firewall rule(s) to delete (name or ID)'))
return parser
def take_action(self, parsed_args):
client = self.app.client_manager.neutronclient
result = 0
for fwr in parsed_args.firewall_rule:
try:
fwr_id = client.find_resource(
const.FWR, fwr, cmd_resource=const.CMD_FWR)['id']
client.delete_fwaas_firewall_rule(fwr_id)
except Exception as e:
result += 1
LOG.error(_("Failed to delete Firewall rule with "
"name or ID '%(firewall_rule)s': %(e)s"),
{const.FWR: fwr, 'e': e})
if result > 0:
total = len(parsed_args.firewall_rule)
msg = (_("%(result)s of %(total)s firewall rule(s) failed "
"to delete.") % {'result': result, 'total': total})
raise exceptions.CommandError(msg)
class ListFirewallRule(command.Lister):
_description = _("List firewall rules that belong to a given tenant")
def get_parser(self, prog_name):
parser = super(ListFirewallRule, self).get_parser(prog_name)
parser.add_argument(
'--long',
action='store_true',
default=False,
help=_("List additional fields in output")
)
return parser
def extend_list(self, data, parsed_args):
ext_data = copy.deepcopy(data)
for d in ext_data:
protocol = d['protocol'].upper() if d['protocol'] else 'ANY'
src_ip = 'none specified'
dst_ip = 'none specified'
src_port = '(none specified)'
dst_port = '(none specified)'
if 'source_ip_address' in d and d['source_ip_address']:
src_ip = str(d['source_ip_address']).lower()
if 'source_port' in d and d['source_port']:
src_port = '(' + str(d['source_port']).lower() + ')'
if 'destination_ip_address' in d and d['destination_ip_address']:
dst_ip = str(d['destination_ip_address']).lower()
if 'destination_port' in d and d['destination_port']:
dst_port = '(' + str(d['destination_port']).lower() + ')'
action = d['action'] if d.get('action') else 'no-action'
src = 'source(port): ' + src_ip + src_port
dst = 'dest(port): ' + dst_ip + dst_port
d['summary'] = ',\n '.join([protocol, src, dst, action])
return ext_data
def take_action(self, parsed_args):
client = self.app.client_manager.neutronclient
obj = client.list_fwaas_firewall_rules()[const.FWRS]
obj_extend = self.extend_list(obj, parsed_args)
headers, columns = column_util.get_column_definitions(
_attr_map, long_listing=parsed_args.long)
return (headers, (utils.get_dict_properties(
s, columns, formatters=_formatters) for s in obj_extend))
class SetFirewallRule(command.Command):
_description = _("Set firewall rule properties")
def get_parser(self, prog_name):
parser = super(SetFirewallRule, self).get_parser(prog_name)
_get_common_parser(parser)
parser.add_argument(
const.FWR,
metavar='<firewall-rule>',
help=_('Firewall rule to set (name or ID)'))
return parser
def take_action(self, parsed_args):
client = self.app.client_manager.neutronclient
attrs = _get_common_attrs(self.app.client_manager,
parsed_args, is_create=False)
fwr_id = client.find_resource(
const.FWR, parsed_args.firewall_rule,
cmd_resource=const.CMD_FWR)['id']
try:
client.update_fwaas_firewall_rule(fwr_id, {const.FWR: attrs})
except Exception as e:
msg = (_("Failed to set firewall rule '%(rule)s': %(e)s")
% {'rule': parsed_args.firewall_rule, 'e': e})
raise exceptions.CommandError(msg)
class ShowFirewallRule(command.ShowOne):
_description = _("Display firewall rule details")
def get_parser(self, prog_name):
parser = super(ShowFirewallRule, self).get_parser(prog_name)
parser.add_argument(
const.FWR,
metavar='<firewall-rule>',
help=_('Firewall rule to display (name or ID)'))
return parser
def take_action(self, parsed_args):
client = self.app.client_manager.neutronclient
fwr_id = client.find_resource(
const.FWR, parsed_args.firewall_rule,
cmd_resource=const.CMD_FWR)['id']
obj = client.show_fwaas_firewall_rule(fwr_id)[const.FWR]
columns, display_columns = column_util.get_columns(obj, _attr_map)
data = utils.get_dict_properties(obj, columns, formatters=_formatters)
return (display_columns, data)
class UnsetFirewallRule(command.Command):
_description = _("Unset firewall rule properties")
def get_parser(self, prog_name):
parser = super(UnsetFirewallRule, self).get_parser(prog_name)
parser.add_argument(
const.FWR,
metavar='<firewall-rule>',
help=_('Firewall rule to unset (name or ID)'))
parser.add_argument(
'--source-ip-address',
action='store_true',
help=_('Source IP address or subnet'))
parser.add_argument(
'--destination-ip-address',
action='store_true',
help=_('Destination IP address or subnet'))
parser.add_argument(
'--source-port',
action='store_true',
help=_('Source port number or range'
'(integer in [1, 65535] or range like 123:456)'))
parser.add_argument(
'--destination-port',
action='store_true',
help=_('Destination port number or range'
'(integer in [1, 65535] or range like 123:456)'))
parser.add_argument(
'--share',
action='store_true',
help=_('Restrict use of the firewall rule to the current project'))
parser.add_argument(
'--public',
action='store_true',
help=_('Restrict use of the firewall rule to the current project. '
'This option is deprecated and would be removed in '
'R Release.'))
parser.add_argument(
'--enable-rule',
action='store_true',
help=_('Disable this rule'))
parser.add_argument(
'--source-firewall-group',
action='store_true',
help=_('Source firewall group (name or ID)'))
parser.add_argument(
'--destination-firewall-group',
action='store_true',
help=_('Destination firewall group (name or ID)'))
return parser
def _get_attrs(self, client_manager, parsed_args):
attrs = {}
if parsed_args.source_ip_address:
attrs['source_ip_address'] = None
if parsed_args.source_port:
attrs['source_port'] = None
if parsed_args.destination_ip_address:
attrs['destination_ip_address'] = None
if parsed_args.destination_port:
attrs['destination_port'] = None
if parsed_args.share or parsed_args.public:
attrs['shared'] = False
if parsed_args.enable_rule:
attrs['enabled'] = False
if parsed_args.source_firewall_group:
attrs['source_firewall_group_id'] = None
if parsed_args.source_firewall_group:
attrs['destination_firewall_group_id'] = None
return attrs
def take_action(self, parsed_args):
client = self.app.client_manager.neutronclient
attrs = self._get_attrs(self.app.client_manager, parsed_args)
fwr_id = client.find_resource(
const.FWR, parsed_args.firewall_rule,
cmd_resource=const.CMD_FWR)['id']
try:
client.update_fwaas_firewall_rule(fwr_id, {const.FWR: attrs})
except Exception as e:
msg = (_("Failed to unset firewall rule '%(rule)s': %(e)s")
% {'rule': parsed_args.firewall_rule, 'e': e})
raise exceptions.CommandError(msg)
| get_parser |
baseline_classifier.py | from utils import constant
from sklearn import svm
from sklearn.svm import SVC
from sklearn.linear_model import LogisticRegression
from xgboost import XGBClassifier
def get_classifier(ty="LR", c=1.0, max_depth=5, n_estimators=300, gamma=0):
| if(ty=="LR"):
classifier = LogisticRegression(solver='lbfgs',multi_class='multinomial', C=c)
elif(ty=="SVM"):
classifier = SVC(kernel='linear')
elif(ty=="XGB"):
classifier = XGBClassifier(max_depth=max_depth, n_estimators=n_estimators, gamma=gamma, n_jobs=4, tree_method="gpu_hist") ## change later ##
return classifier |
|
__init__.py | import datetime
import logging
import math
import voluptuous as vol
from esphome import automation
import esphome.config_validation as cv
from esphome.const import CONF_CRON, CONF_DAYS_OF_MONTH, CONF_DAYS_OF_WEEK, CONF_HOURS, \
CONF_MINUTES, CONF_MONTHS, CONF_ON_TIME, CONF_SECONDS, CONF_TIMEZONE, CONF_TRIGGER_ID
from esphome.core import CORE
from esphome.cpp_generator import Pvariable, add
from esphome.cpp_types import App, Component, NoArg, Trigger, esphome_ns
from esphome.py_compat import string_types
_LOGGER = logging.getLogger(__name__)
PLATFORM_SCHEMA = cv.PLATFORM_SCHEMA.extend({
})
time_ns = esphome_ns.namespace('time')
RealTimeClockComponent = time_ns.class_('RealTimeClockComponent', Component)
CronTrigger = time_ns.class_('CronTrigger', Trigger.template(NoArg), Component)
ESPTime = time_ns.struct('ESPTime')
def _tz_timedelta(td):
offset_hour = int(td.total_seconds() / (60 * 60))
offset_minute = int(abs(td.total_seconds() / 60)) % 60
offset_second = int(abs(td.total_seconds())) % 60
if offset_hour == 0 and offset_minute == 0 and offset_second == 0:
return '0'
if offset_minute == 0 and offset_second == 0:
return '{}'.format(offset_hour)
if offset_second == 0:
return '{}:{}'.format(offset_hour, offset_minute)
return '{}:{}:{}'.format(offset_hour, offset_minute, offset_second)
# https://stackoverflow.com/a/16804556/8924614
def _week_of_month(dt):
first_day = dt.replace(day=1)
dom = dt.day
adjusted_dom = dom + first_day.weekday()
return int(math.ceil(adjusted_dom / 7.0))
def _tz_dst_str(dt):
td = datetime.timedelta(hours=dt.hour, minutes=dt.minute, seconds=dt.second)
return 'M{}.{}.{}/{}'.format(dt.month, _week_of_month(dt), dt.isoweekday() % 7,
_tz_timedelta(td))
def convert_tz(pytz_obj):
tz = pytz_obj
def _dst(dt, is_dst):
try:
return tz.dst(dt, is_dst=is_dst)
except TypeError: # stupid pytz...
return tz.dst(dt)
def _tzname(dt, is_dst):
try:
return tz.tzname(dt, is_dst=is_dst)
except TypeError: # stupid pytz...
return tz.tzname(dt)
def _utcoffset(dt, is_dst):
try:
return tz.utcoffset(dt, is_dst=is_dst)
except TypeError: # stupid pytz...
return tz.utcoffset(dt)
dst_begins = None
dst_tzname = None
dst_utcoffset = None
dst_ends = None
norm_tzname = None
norm_utcoffset = None
hour = datetime.timedelta(hours=1)
this_year = datetime.datetime.now().year
dt = datetime.datetime(year=this_year, month=1, day=1)
last_dst = None
while dt.year == this_year:
current_dst = _dst(dt, not last_dst)
is_dst = bool(current_dst)
if is_dst != last_dst:
if is_dst:
dst_begins = dt
dst_tzname = _tzname(dt, True)
dst_utcoffset = _utcoffset(dt, True)
else:
dst_ends = dt + hour
norm_tzname = _tzname(dt, False)
norm_utcoffset = _utcoffset(dt, False)
last_dst = is_dst
dt += hour
tzbase = '{}{}'.format(norm_tzname, _tz_timedelta(-1 * norm_utcoffset))
if dst_begins is None:
# No DST in this timezone
_LOGGER.info("Detected timezone '%s' with UTC offset %s",
norm_tzname, _tz_timedelta(norm_utcoffset))
return tzbase
tzext = '{}{},{},{}'.format(dst_tzname, _tz_timedelta(-1 * dst_utcoffset),
_tz_dst_str(dst_begins), _tz_dst_str(dst_ends))
_LOGGER.info("Detected timezone '%s' with UTC offset %s and daylight savings time from "
"%s to %s",
norm_tzname, _tz_timedelta(norm_utcoffset), dst_begins.strftime("%x %X"),
dst_ends.strftime("%x %X"))
return tzbase + tzext
def detect_tz():
try:
import tzlocal
import pytz
except ImportError:
raise vol.Invalid("No timezone specified and 'tzlocal' not installed. To automatically "
"detect the timezone please install tzlocal (pip install tzlocal)")
try:
tz = tzlocal.get_localzone()
except pytz.exceptions.UnknownTimeZoneError:
_LOGGER.warning("Could not auto-detect timezone. Using UTC...")
return 'UTC'
return convert_tz(tz)
def _parse_cron_int(value, special_mapping, message):
special_mapping = special_mapping or {}
if isinstance(value, string_types) and value in special_mapping:
return special_mapping[value]
try:
return int(value)
except ValueError:
raise vol.Invalid(message.format(value))
def _parse_cron_part(part, min_value, max_value, special_mapping):
if part in ('*', '?'):
return set(x for x in range(min_value, max_value + 1))
if '/' in part:
data = part.split('/')
if len(data) > 2:
raise vol.Invalid(u"Can't have more than two '/' in one time expression, got {}"
.format(part))
offset, repeat = data
offset_n = 0
if offset:
offset_n = _parse_cron_int(offset, special_mapping,
u"Offset for '/' time expression must be an integer, got {}")
try:
repeat_n = int(repeat)
except ValueError:
raise vol.Invalid(u"Repeat for '/' time expression must be an integer, got {}"
.format(repeat))
return set(x for x in range(offset_n, max_value + 1, repeat_n))
if '-' in part:
data = part.split('-')
if len(data) > 2:
raise vol.Invalid(u"Can't have more than two '-' in range time expression '{}'"
.format(part))
begin, end = data
begin_n = _parse_cron_int(begin, special_mapping, u"Number for time range must be integer, "
u"got {}")
end_n = _parse_cron_int(end, special_mapping, u"Number for time range must be integer, "
u"got {}")
if end_n < begin_n:
return set(x for x in range(end_n, max_value + 1)) | \
set(x for x in range(min_value, begin_n + 1))
return set(x for x in range(begin_n, end_n + 1))
return {_parse_cron_int(part, special_mapping, u"Number for time expression must be an "
u"integer, got {}")}
def cron_expression_validator(name, min_value, max_value, special_mapping=None):
def validator(value):
if isinstance(value, list):
for v in value:
if not isinstance(v, int):
raise vol.Invalid(
"Expected integer for {} '{}', got {}".format(v, name, type(v)))
if v < min_value or v > max_value:
raise vol.Invalid(
"{} {} is out of range (min={} max={}).".format(name, v, min_value,
max_value))
return list(sorted(value))
value = cv.string(value)
values = set()
for part in value.split(','):
values |= _parse_cron_part(part, min_value, max_value, special_mapping)
return validator(list(values))
return validator
validate_cron_seconds = cron_expression_validator('seconds', 0, 60)
validate_cron_minutes = cron_expression_validator('minutes', 0, 59)
validate_cron_hours = cron_expression_validator('hours', 0, 23)
validate_cron_days_of_month = cron_expression_validator('days of month', 1, 31)
validate_cron_months = cron_expression_validator('months', 1, 12, {
'JAN': 1, 'FEB': 2, 'MAR': 3, 'APR': 4, 'MAY': 5, 'JUN': 6, 'JUL': 7, 'AUG': 8,
'SEP': 9, 'OCT': 10, 'NOV': 11, 'DEC': 12
})
validate_cron_days_of_week = cron_expression_validator('days of week', 1, 7, {
'SUN': 1, 'MON': 2, 'TUE': 3, 'WED': 4, 'THU': 5, 'FRI': 6, 'SAT': 7
})
CRON_KEYS = [CONF_SECONDS, CONF_MINUTES, CONF_HOURS, CONF_DAYS_OF_MONTH, CONF_MONTHS,
CONF_DAYS_OF_WEEK]
def validate_cron_raw(value):
value = cv.string(value)
value = value.split(' ')
if len(value) != 6: | seconds, minutes, hours, days_of_month, months, days_of_week = value
return {
CONF_SECONDS: validate_cron_seconds(seconds),
CONF_MINUTES: validate_cron_minutes(minutes),
CONF_HOURS: validate_cron_hours(hours),
CONF_DAYS_OF_MONTH: validate_cron_days_of_month(days_of_month),
CONF_MONTHS: validate_cron_months(months),
CONF_DAYS_OF_WEEK: validate_cron_days_of_week(days_of_week),
}
def validate_cron_keys(value):
if CONF_CRON in value:
for key in value.keys():
if key in CRON_KEYS:
raise vol.Invalid("Cannot use option {} when cron: is specified.".format(key))
cron_ = value[CONF_CRON]
value = {x: value[x] for x in value if x != CONF_CRON}
value.update(cron_)
return value
return cv.has_at_least_one_key(*CRON_KEYS)(value)
def validate_tz(value):
value = cv.string_strict(value)
try:
import pytz
return convert_tz(pytz.timezone(value))
except Exception: # pylint: disable=broad-except
return value
TIME_PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Optional(CONF_TIMEZONE, default=detect_tz): validate_tz,
vol.Optional(CONF_ON_TIME): automation.validate_automation({
cv.GenerateID(CONF_TRIGGER_ID): cv.declare_variable_id(CronTrigger),
vol.Optional(CONF_SECONDS): validate_cron_seconds,
vol.Optional(CONF_MINUTES): validate_cron_minutes,
vol.Optional(CONF_HOURS): validate_cron_hours,
vol.Optional(CONF_DAYS_OF_MONTH): validate_cron_days_of_month,
vol.Optional(CONF_MONTHS): validate_cron_months,
vol.Optional(CONF_DAYS_OF_WEEK): validate_cron_days_of_week,
vol.Optional(CONF_CRON): validate_cron_raw,
}, validate_cron_keys),
})
def setup_time_core_(time_var, config):
add(time_var.set_timezone(config[CONF_TIMEZONE]))
for conf in config.get(CONF_ON_TIME, []):
rhs = App.register_component(time_var.Pmake_cron_trigger())
trigger = Pvariable(conf[CONF_TRIGGER_ID], rhs)
seconds = conf.get(CONF_SECONDS, [x for x in range(0, 61)])
add(trigger.add_seconds(seconds))
minutes = conf.get(CONF_MINUTES, [x for x in range(0, 60)])
add(trigger.add_minutes(minutes))
hours = conf.get(CONF_HOURS, [x for x in range(0, 24)])
add(trigger.add_hours(hours))
days_of_month = conf.get(CONF_DAYS_OF_MONTH, [x for x in range(1, 32)])
add(trigger.add_days_of_month(days_of_month))
months = conf.get(CONF_MONTHS, [x for x in range(1, 13)])
add(trigger.add_months(months))
days_of_week = conf.get(CONF_DAYS_OF_WEEK, [x for x in range(1, 8)])
add(trigger.add_days_of_week(days_of_week))
automation.build_automation(trigger, NoArg, conf)
def setup_time(time_var, config):
CORE.add_job(setup_time_core_, time_var, config)
BUILD_FLAGS = '-DUSE_TIME' | raise vol.Invalid("Cron expression must consist of exactly 6 space-separated parts, "
"not {}".format(len(value))) |
Tabs.js | import React, { Component } from 'react';
import { TabContent, TabPane, Nav, NavItem, NavLink } from 'reactstrap';
import classnames from 'classnames';
class Tabs extends Component {
constructor(props) {
super(props);
this.toggle = this.toggle.bind(this);
this.state = {
activeTab: '1',
};
}
toggle(tab) {
if (this.state.activeTab !== tab) {
this.setState({
activeTab: tab,
});
}
}
render() {
return (
<div className="animated fadeIn">
<div className="row">
<div className="col-md-6 mb-4">
<Nav tabs>
<NavItem>
<NavLink
className={classnames({ active: this.state.activeTab === '1' })}
onClick={() => { this.toggle('1'); }}
>
Home
</NavLink>
</NavItem>
<NavItem>
<NavLink
className={classnames({ active: this.state.activeTab === '2' })}
onClick={() => { this.toggle('2'); }}
>
Profile
</NavLink>
</NavItem>
<NavItem>
<NavLink
className={classnames({ active: this.state.activeTab === '3' })}
onClick={() => { this.toggle('3'); }}
>
Messages
</NavLink>
</NavItem>
</Nav>
<TabContent activeTab={this.state.activeTab}>
<TabPane tabId="1">
1. Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.
</TabPane>
<TabPane tabId="2">
2. Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.
</TabPane>
<TabPane tabId="3">
2. Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.
</TabPane>
</TabContent>
</div>
<div className="col-md-6 mb-4">
<Nav tabs>
<NavItem>
<NavLink
className={classnames({ active: this.state.activeTab === '1' })}
onClick={() => { this.toggle('1'); }}
>
<i className="icon-calculator" />
</NavLink>
</NavItem>
<NavItem>
<NavLink
className={classnames({ active: this.state.activeTab === '2' })}
onClick={() => { this.toggle('2'); }}
>
<i className="icon-basket-loaded" />
</NavLink>
</NavItem>
<NavItem>
<NavLink
className={classnames({ active: this.state.activeTab === '3' })}
onClick={() => { this.toggle('3'); }}
>
<i className="icon-pie-chart" />
</NavLink>
</NavItem>
</Nav>
<TabContent activeTab={this.state.activeTab}>
<TabPane tabId="1">
1. Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.
</TabPane>
<TabPane tabId="2">
2. Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.
</TabPane>
<TabPane tabId="3">
2. Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.
</TabPane>
</TabContent>
</div>
<div className="col-md-6 mb-4">
<Nav tabs>
<NavItem>
<NavLink
className={classnames({ active: this.state.activeTab === '1' })}
onClick={() => { this.toggle('1'); }}
>
<i className="icon-calculator" /> Calculator
</NavLink>
</NavItem>
<NavItem>
<NavLink
className={classnames({ active: this.state.activeTab === '2' })}
onClick={() => { this.toggle('2'); }}
>
<i className="icon-basket-loaded" /> Shoping cart
</NavLink>
</NavItem>
<NavItem>
<NavLink
className={classnames({ active: this.state.activeTab === '3' })}
onClick={() => { this.toggle('3'); }}
> | </NavLink>
</NavItem>
</Nav>
<TabContent activeTab={this.state.activeTab}>
<TabPane tabId="1">
1. Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.
</TabPane>
<TabPane tabId="2">
2. Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.
</TabPane>
<TabPane tabId="3">
2. Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.
</TabPane>
</TabContent>
</div>
<div className="col-md-6 mb-4">
<Nav tabs>
<NavItem>
<NavLink
className={classnames({ active: this.state.activeTab === '1' })}
onClick={() => { this.toggle('1'); }}
>
<i className="icon-calculator" /> Calculator <span className="badge badge-success">New</span>
</NavLink>
</NavItem>
<NavItem>
<NavLink
className={classnames({ active: this.state.activeTab === '2' })}
onClick={() => { this.toggle('2'); }}
>
<i className="icon-basket-loaded" /> Shoping cart <span className="badge badge-pill badge-danger">29</span>
</NavLink>
</NavItem>
<NavItem>
<NavLink
className={classnames({ active: this.state.activeTab === '3' })}
onClick={() => { this.toggle('3'); }}
>
<i className="icon-pie-chart" /> Charts
</NavLink>
</NavItem>
</Nav>
<TabContent activeTab={this.state.activeTab}>
<TabPane tabId="1">
1. Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.
</TabPane>
<TabPane tabId="2">
2. Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.
</TabPane>
<TabPane tabId="3">
2. Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.
</TabPane>
</TabContent>
</div>
</div>
</div>
);
}
}
export default Tabs; | <i className="icon-pie-chart" /> Charts |
listAllLoadBalancers.ts | /*
* Copyright (c) Microsoft Corporation.
* Licensed under the MIT License.
*
* Code generated by Microsoft (R) AutoRest Code Generator.
* Changes may cause incorrect behavior and will be lost if the code is regenerated.
*/
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
/**
* This sample demonstrates how to Gets all the load balancers in a subscription.
*
* @summary Gets all the load balancers in a subscription.
* x-ms-original-file: specification/network/resource-manager/Microsoft.Network/stable/2021-05-01/examples/LoadBalancerListAll.json
*/
import { NetworkManagementClient } from "@msinternal/network-resource-manager";
import { DefaultAzureCredential } from "@azure/identity";
async function | () {
const subscriptionId = "subid";
const credential = new DefaultAzureCredential();
const client = new NetworkManagementClient(credential, subscriptionId);
const resArray = new Array();
for await (let item of client.loadBalancers.listAll()) {
resArray.push(item);
}
console.log(resArray);
}
listAllLoadBalancers().catch(console.error);
| listAllLoadBalancers |
sensor.py | """Support gathering system information of hosts which are running glances."""
import logging
from homeassistant.const import CONF_NAME, STATE_UNAVAILABLE
from homeassistant.core import callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.entity import Entity
from .const import DATA_UPDATED, DOMAIN, SENSOR_TYPES
_LOGGER = logging.getLogger(__name__)
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the Glances sensors."""
client = hass.data[DOMAIN][config_entry.entry_id]
name = config_entry.data[CONF_NAME]
dev = []
for sensor_type, sensor_details in SENSOR_TYPES.items():
if not sensor_details[0] in client.api.data:
continue
if sensor_details[0] in client.api.data:
if sensor_details[0] == "fs":
# fs will provide a list of disks attached
for disk in client.api.data[sensor_details[0]]:
dev.append(
GlancesSensor(
client,
name,
disk["mnt_point"],
SENSOR_TYPES[sensor_type][1],
sensor_type,
SENSOR_TYPES[sensor_type],
)
)
elif sensor_details[0] == "sensors":
# sensors will provide temp for different devices
for sensor in client.api.data[sensor_details[0]]:
dev.append(
GlancesSensor(
client,
name,
sensor["label"],
SENSOR_TYPES[sensor_type][1],
sensor_type,
SENSOR_TYPES[sensor_type],
)
)
elif client.api.data[sensor_details[0]]:
dev.append(
GlancesSensor(
client,
name,
"",
SENSOR_TYPES[sensor_type][1],
sensor_type,
SENSOR_TYPES[sensor_type],
)
)
async_add_entities(dev, True)
class GlancesSensor(Entity):
"""Implementation of a Glances sensor."""
def __init__(
self,
glances_data,
name,
sensor_name_prefix,
sensor_name_suffix,
sensor_type,
sensor_details,
):
"""Initialize the sensor."""
self.glances_data = glances_data
self._sensor_name_prefix = sensor_name_prefix
self._sensor_name_suffix = sensor_name_suffix
self._name = name
self.type = sensor_type
self._state = None
self.sensor_details = sensor_details
self.unsub_update = None
@property
def name(self):
"""Return the name of the sensor."""
return f"{self._name} {self._sensor_name_prefix} {self._sensor_name_suffix}"
@property
def unique_id(self):
"""Set unique_id for sensor."""
return f"{self.glances_data.host}-{self.name}"
@property
def icon(self):
"""Icon to use in the frontend, if any."""
return self.sensor_details[3]
@property
def unit_of_measurement(self):
"""Return the unit the value is expressed in."""
return self.sensor_details[2]
@property
def available(self):
"""Could the device be accessed during the last update call."""
return self.glances_data.available
@property
def state(self):
"""Return the state of the resources."""
return self._state
@property
def should_poll(self):
"""Return the polling requirement for this sensor."""
return False
async def async_added_to_hass(self):
"""Handle entity which will be added."""
self.unsub_update = async_dispatcher_connect(
self.hass, DATA_UPDATED, self._schedule_immediate_update
)
@callback
def _schedule_immediate_update(self):
self.async_schedule_update_ha_state(True)
async def will_remove_from_hass(self):
|
async def async_update(self):
"""Get the latest data from REST API."""
value = self.glances_data.api.data
if value is None:
return
if value is not None:
if self.sensor_details[0] == "fs":
for var in value["fs"]:
if var["mnt_point"] == self._sensor_name_prefix:
disk = var
break
if self.type == "disk_use_percent":
self._state = disk["percent"]
elif self.type == "disk_use":
self._state = round(disk["used"] / 1024 ** 3, 1)
elif self.type == "disk_free":
try:
self._state = round(disk["free"] / 1024 ** 3, 1)
except KeyError:
self._state = round(
(disk["size"] - disk["used"]) / 1024 ** 3, 1,
)
elif self.type == "sensor_temp":
for sensor in value["sensors"]:
if sensor["label"] == self._sensor_name_prefix:
self._state = sensor["value"]
break
elif self.type == "memory_use_percent":
self._state = value["mem"]["percent"]
elif self.type == "memory_use":
self._state = round(value["mem"]["used"] / 1024 ** 2, 1)
elif self.type == "memory_free":
self._state = round(value["mem"]["free"] / 1024 ** 2, 1)
elif self.type == "swap_use_percent":
self._state = value["memswap"]["percent"]
elif self.type == "swap_use":
self._state = round(value["memswap"]["used"] / 1024 ** 3, 1)
elif self.type == "swap_free":
self._state = round(value["memswap"]["free"] / 1024 ** 3, 1)
elif self.type == "processor_load":
# Windows systems don't provide load details
try:
self._state = value["load"]["min15"]
except KeyError:
self._state = value["cpu"]["total"]
elif self.type == "process_running":
self._state = value["processcount"]["running"]
elif self.type == "process_total":
self._state = value["processcount"]["total"]
elif self.type == "process_thread":
self._state = value["processcount"]["thread"]
elif self.type == "process_sleeping":
self._state = value["processcount"]["sleeping"]
elif self.type == "cpu_use_percent":
self._state = value["quicklook"]["cpu"]
elif self.type == "docker_active":
count = 0
try:
for container in value["docker"]["containers"]:
if (
container["Status"] == "running"
or "Up" in container["Status"]
):
count += 1
self._state = count
except KeyError:
self._state = count
elif self.type == "docker_cpu_use":
cpu_use = 0.0
try:
for container in value["docker"]["containers"]:
if (
container["Status"] == "running"
or "Up" in container["Status"]
):
cpu_use += container["cpu"]["total"]
self._state = round(cpu_use, 1)
except KeyError:
self._state = STATE_UNAVAILABLE
elif self.type == "docker_memory_use":
mem_use = 0.0
try:
for container in value["docker"]["containers"]:
if (
container["Status"] == "running"
or "Up" in container["Status"]
):
mem_use += container["memory"]["usage"]
self._state = round(mem_use / 1024 ** 2, 1)
except KeyError:
self._state = STATE_UNAVAILABLE
| """Unsubscribe from update dispatcher."""
if self.unsub_update:
self.unsub_update()
self.unsub_update = None |
result.py | # !/usr/bin/env python3
# Author: C.K
# Email: [email protected]
# DateTime:2021-04-27 21:41:14
# Description:
import os
import sys
class Solution(object):
def subsets(self, nums):
ret = []
self.dfs(nums, [], ret)
return ret
def dfs(self, nums, path, ret):
ret.append(path)
for i in range(len(nums)):
self.dfs(nums[i + 1:], path + [nums[i]], ret)
# Bit Manipulation
def | (self, nums):
res = []
nums.sort()
for i in xrange(1 << len(nums)):
tmp = []
for j in xrange(len(nums)):
if i & 1 << j: # if i >> j & 1:
tmp.append(nums[j])
res.append(tmp)
return res
# Iteratively
def subsets(self, nums):
res = [[]]
for num in sorted(nums):
res += [item + [num] for item in res]
return res
if __name__ == "__main__":
pass
| subsets2 |
inRange.ts | import { isGreaterOrEqual } from "./isGreaterOrEqual";
import { isLessOrEqual } from "./isLessOrEqual";
/**
* @memberof Number
* @name inRange
* @description 判断某个数是否在一个闭区间内
* @param {number} num - 需要判断的数
* @param {number} low - 闭区间左值
* @param {number} high - 闭区间右值
* @returns - {number} | const inRange = (num: number, low: number, high: number): boolean =>
isGreaterOrEqual(num, low) && isLessOrEqual(num, high);
export { inRange }; | */
|
general.py | # Copyright (c) Facebook, Inc. and its affiliates.
import collections
import gc
import os
from bisect import bisect
import requests
import torch
import tqdm
import yaml
from torch import nn
def lr_lambda_update(i_iter, cfg):
if (
cfg["training_parameters"]["use_warmup"] is True
and i_iter <= cfg["training_parameters"]["warmup_iterations"]
):
alpha = float(i_iter) / float(cfg["training_parameters"]["warmup_iterations"])
return cfg["training_parameters"]["warmup_factor"] * (1.0 - alpha) + alpha
else:
idx = bisect(cfg["training_parameters"]["lr_steps"], i_iter)
return pow(cfg["training_parameters"]["lr_ratio"], idx)
def clip_gradients(model, i_iter, writer, config):
# TODO: Fix question model retrieval
max_grad_l2_norm = config["training_parameters"]["max_grad_l2_norm"]
clip_norm_mode = config["training_parameters"]["clip_norm_mode"]
if max_grad_l2_norm is not None:
if clip_norm_mode == "all":
norm = nn.utils.clip_grad_norm_(model.parameters(), max_grad_l2_norm)
writer.add_scalars({"grad_norm": norm}, i_iter)
elif clip_norm_mode == "question":
question_embedding = model.module.question_embedding_module
norm = nn.utils.clip_grad_norm(
question_embedding.parameters(), max_grad_l2_norm
)
writer.add_scalars({"question_grad_norm": norm}, i_iter)
else:
raise NotImplementedError(
"Clip norm mode %s not implemented" % clip_norm_mode
)
def ckpt_name_from_core_args(config):
return "%s_%s_%s_%d" % (
config["tasks"],
config["datasets"],
config["model"],
config["training_parameters"]["seed"],
)
def foldername_from_config_override(args):
cfg_override = None
if hasattr(args, "config_override"):
cfg_override = args.config_override
elif "config_override" in args:
cfg_override = args["config_override"]
folder_name = ""
if cfg_override is not None and len(cfg_override) > 0:
folder_name = yaml.safe_dump(cfg_override, default_flow_style=True)
folder_name = folder_name.replace(":", ".").replace("\n", " ")
folder_name = folder_name.replace("/", "_")
folder_name = " ".join(folder_name.split())
folder_name = folder_name.replace(". ", ".").replace(" ", "_")
folder_name = "_" + folder_name
return folder_name
def get_pythia_root():
from pythia.common.registry import registry
pythia_root = registry.get("pythia_root", no_warning=True)
if pythia_root is None:
pythia_root = os.path.dirname(os.path.abspath(__file__))
pythia_root = os.path.abspath(os.path.join(pythia_root, ".."))
registry.register("pythia_root", pythia_root)
return pythia_root
def download_file(url, output_dir=".", filename=""):
if len(filename) == 0:
filename = os.path.join(".", url.split("/")[-1])
os.makedirs(output_dir, exist_ok=True)
filename = os.path.join(output_dir, filename)
r = requests.get(url, stream=True)
file_size = int(r.headers["Content-Length"])
chunk_size = 1024 * 1024
num_bars = int(file_size / chunk_size)
with open(filename, "wb") as fh:
for chunk in tqdm.tqdm(
r.iter_content(chunk_size=chunk_size),
total=num_bars,
unit="MB",
desc=filename,
leave=True,
):
fh.write(chunk)
def get_optimizer_parameters(model, config):
|
def dict_to_string(dictionary):
logs = []
if dictionary is None:
return ""
for key, val in dictionary.items():
if hasattr(val, "item"):
val = val.item()
# if key.count('_') == 2:
# key = key[key.find('_') + 1:]
logs.append("%s: %.4f" % (key, val))
return ", ".join(logs)
def get_overlap_score(candidate, target):
"""Takes a candidate word and a target word and returns the overlap
score between the two.
Parameters
----------
candidate : str
Candidate word whose overlap has to be detected.
target : str
Target word against which the overlap will be detected
Returns
-------
float
Overlap score betwen candidate and the target.
"""
if len(candidate) < len(target):
temp = candidate
candidate = target
target = temp
overlap = 0.0
while len(target) >= 2:
if target in candidate:
overlap = len(target)
return overlap * 1.0 / len(candidate)
else:
target = target[:-1]
return 0.0
def updir(d, n):
"""Given path d, go up n dirs from d and return that path"""
ret_val = d
for _ in range(n):
ret_val = os.path.dirname(ret_val)
return ret_val
def print_cuda_usage():
print("Memory Allocated:", torch.cuda.memory_allocated() / (1024 * 1024))
print("Max Memory Allocated:", torch.cuda.max_memory_allocated() / (1024 * 1024))
print("Memory Cached:", torch.cuda.memory_cached() / (1024 * 1024))
print("Max Memory Cached:", torch.cuda.max_memory_cached() / (1024 * 1024))
def get_current_tensors():
for obj in gc.get_objects():
try:
if torch.is_tensor(obj) or (
hasattr(obj, "data") and torch.is_tensor(obj.data)
):
print(type(obj), obj.size())
except:
pass
| parameters = model.parameters()
has_custom = hasattr(model, "get_optimizer_parameters")
if has_custom:
parameters = model.get_optimizer_parameters(config)
is_parallel = isinstance(model, nn.DataParallel)
if is_parallel and hasattr(model.module, "get_optimizer_parameters"):
parameters = model.module.get_optimizer_parameters(config)
return parameters |
typescript-starter.ts | import { readFileSync, renameSync, writeFileSync } from 'fs';
import { join } from 'path';
import chalk from 'chalk';
import del from 'del';
import ora from 'ora';
import { replaceInFile } from 'replace-in-file';
import { Placeholders, Tasks } from './tasks';
import { normalizePath, Runner, TypescriptStarterOptions } from './utils';
const readPackageJson = (path: string) =>
JSON.parse(readFileSync(path, 'utf8'));
const writePackageJson = (path: string, pkg: unknown) => {
// write using the same format as npm:
// https://github.com/npm/npm/blob/latest/lib/install/update-package-json.js#L48
const stringified = JSON.stringify(pkg, null, 2) + '\n';
return writeFileSync(path, stringified);
};
export async function typescriptStarter(
{
appveyor,
circleci,
cspell,
description,
domDefinitions,
editorconfig,
email,
fullName,
githubUsername,
functional,
install,
nodeDefinitions,
projectName,
repoInfo,
runner,
strict,
travis,
vscode,
workingDirectory,
}: TypescriptStarterOptions,
tasks: Tasks
): Promise<void> {
console.log();
const { commitHash, gitHistoryDir } = await tasks.cloneRepo(
repoInfo,
workingDirectory,
projectName
);
await del([normalizePath(gitHistoryDir)]);
console.log(`
${chalk.dim(`Cloned at commit: ${commitHash}`)}
`);
const spinnerPackage = ora('Updating package.json').start();
const projectPath = join(workingDirectory, projectName);
const pkgPath = join(projectPath, 'package.json');
const keptDevDeps: ReadonlyArray<string> = [
'@ava/typescript',
'@istanbuljs/nyc-config-typescript',
'@typescript-eslint/eslint-plugin',
'@typescript-eslint/parser',
'ava',
'codecov',
'cspell',
'cz-conventional-changelog',
'eslint',
'eslint-config-prettier',
'eslint-plugin-eslint-comments',
...(functional ? ['eslint-plugin-functional'] : []),
'eslint-plugin-import',
'gh-pages',
'npm-run-all',
'nyc',
'open-cli',
'prettier',
'standard-version',
'trash-cli',
'ts-node',
'typedoc',
'typescript',
];
/**
* dependencies to retain for Node.js applications
*/
const nodeKeptDeps: ReadonlyArray<string> = ['@bitauth/libauth'];
const filterAllBut = (
keep: ReadonlyArray<string>,
from: { readonly [module: string]: number }
) =>
keep.reduce<{ readonly [module: string]: number }>(
(acc, moduleName: string) => {
return { ...acc, [moduleName]: from[moduleName] };
},
{}
);
const pkg = readPackageJson(pkgPath);
const newPkg = {
...pkg,
dependencies: nodeDefinitions
? filterAllBut(nodeKeptDeps, pkg.dependencies)
: {},
description,
devDependencies: filterAllBut(keptDevDeps, pkg.devDependencies),
keywords: [],
name: projectName,
repository: `https://github.com/${githubUsername}/${projectName}`,
scripts:
runner === Runner.Yarn
? {
...pkg.scripts,
'reset-hard': `git clean -dfx && git reset --hard && yarn`,
}
: { ...pkg.scripts },
version: '1.0.0',
};
// eslint-disable-next-line functional/immutable-data
delete newPkg.bin;
// eslint-disable-next-line functional/immutable-data
delete newPkg.NOTE;
// eslint-disable-next-line functional/immutable-data
delete newPkg.NOTE_2;
writePackageJson(pkgPath, newPkg);
spinnerPackage.succeed();
const spinnerGitignore = ora('Updating .gitignore').start();
if (runner === Runner.Yarn) {
await replaceInFile({
files: join(projectPath, '.gitignore'),
from: 'yarn.lock',
to: 'package-lock.json',
});
}
spinnerGitignore.succeed();
const spinnerLicense = ora('Updating LICENSE').start();
await replaceInFile({
files: join(projectPath, 'LICENSE'),
// cspell: disable-next-line
from: 'Jason Dreyzehner',
to: fullName,
});
await replaceInFile({
files: join(projectPath, 'LICENSE'),
from: '2017',
to: new Date().getUTCFullYear().toString(),
});
spinnerLicense.succeed();
const spinnerDelete = ora('Deleting unnecessary files').start();
await del([
normalizePath(join(projectPath, 'CHANGELOG.md')),
normalizePath(join(projectPath, 'README.md')),
normalizePath(join(projectPath, 'package-lock.json')),
normalizePath(join(projectPath, 'bin')),
normalizePath(join(projectPath, 'src', 'cli')),
]);
if (!appveyor) {
del([normalizePath(join(projectPath, 'appveyor.yml'))]);
}
if (!circleci) {
del([normalizePath(join(projectPath, '.circleci'))]);
}
if (!cspell) {
del([normalizePath(join(projectPath, '.cspell.json'))]);
if (vscode) {
await replaceInFile({
files: join(projectPath, '.vscode', 'settings.json'),
from: ` "cSpell.userWords": [], // only use words from .cspell.json\n`,
to: '',
});
await replaceInFile({
files: join(projectPath, '.vscode', 'settings.json'),
from: ` "cSpell.enabled": true,\n`,
to: '',
});
}
}
if (!travis) {
del([normalizePath(join(projectPath, '.travis.yml'))]);
}
if (!editorconfig) {
del([normalizePath(join(projectPath, '.editorconfig'))]);
}
if (!vscode) {
del([normalizePath(join(projectPath, '.vscode'))]);
}
spinnerDelete.succeed();
const spinnerTsconfigModule = ora('Removing traces of the CLI').start();
await replaceInFile({
files: join(projectPath, 'tsconfig.module.json'),
from: /,\s+\/\/ typescript-starter:[\s\S]*"src\/cli\/\*\*\/\*\.ts"/,
to: '',
});
if (vscode) {
await replaceInFile({
files: join(projectPath, '.vscode', 'launch.json'),
from: /,[\s]*\/\/ --- cut here ---[\s\S]*]/,
to: ']',
});
}
spinnerTsconfigModule.succeed();
const spinnerReadme = ora('Creating README.md').start();
renameSync( | join(projectPath, 'README.md')
);
await replaceInFile({
files: join(projectPath, 'README.md'),
from: '[package-name]',
to: projectName,
});
await replaceInFile({
files: join(projectPath, 'README.md'),
from: '[description]',
to: description,
});
spinnerReadme.succeed();
if (!strict) {
const spinnerStrict = ora(`tsconfig: disable strict`).start();
await replaceInFile({
files: join(projectPath, 'tsconfig.json'),
from: '"strict": true',
to: '// "strict": true',
});
spinnerStrict.succeed();
}
if (!domDefinitions) {
const spinnerDom = ora(`tsconfig: don't include "dom" lib`).start();
await replaceInFile({
files: join(projectPath, 'tsconfig.json'),
from: '"lib": ["es2017", "dom"]',
to: '"lib": ["es2017"]',
});
spinnerDom.succeed();
}
if (!nodeDefinitions) {
const spinnerNode = ora(`tsconfig: don't include "node" types`).start();
await replaceInFile({
files: join(projectPath, 'tsconfig.json'),
from: '"types": ["node"]',
to: '"types": []',
});
await replaceInFile({
files: join(projectPath, 'src', 'index.ts'),
from: /^export[\S\s]*hash';\s*/,
to: '',
});
await del([
normalizePath(join(projectPath, 'src', 'lib', 'hash.ts')),
normalizePath(join(projectPath, 'src', 'lib', 'hash.spec.ts')),
normalizePath(join(projectPath, 'src', 'lib', 'async.ts')),
normalizePath(join(projectPath, 'src', 'lib', 'async.spec.ts')),
]);
spinnerNode.succeed();
}
if (!functional) {
const spinnerEslint = ora(
`eslint: disable eslint-plugin-functional`
).start();
await replaceInFile({
files: join(projectPath, '.eslintrc.json'),
from: '"plugins": ["import", "eslint-comments", "functional"]',
to: '"plugins": ["import", "eslint-comments"]',
});
await replaceInFile({
files: join(projectPath, '.eslintrc.json'),
from: '"plugin:functional/lite",\n',
to: '',
});
spinnerEslint.succeed();
}
if (install) {
await tasks.install(runner, projectPath);
}
const gitIsConfigured =
fullName !== Placeholders.name && email !== Placeholders.email
? true
: false;
if (gitIsConfigured) {
const spinnerGitInit = ora(`Initializing git repository...`).start();
await tasks.initialCommit(commitHash, projectPath, fullName);
spinnerGitInit.succeed();
}
console.log(`\n${chalk.blue.bold(`Created ${projectName} 🎉`)}\n`);
} | join(projectPath, 'README-starter.md'), |
main.go | /*
Copyright 2020 The Magma Authors.
This source code is licensed under the BSD-style license found in the
LICENSE file in the root directory of this source tree.
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
/*
Configurator is a dedicated Magma Cloud service which maintains configurations
and meta data for the network and network entity structures.
*/
package main
import (
"magma/orc8r/cloud/go/orc8r"
"magma/orc8r/cloud/go/service"
"magma/orc8r/cloud/go/services/configurator"
"magma/orc8r/cloud/go/services/configurator/protos"
"magma/orc8r/cloud/go/services/configurator/servicers"
"magma/orc8r/cloud/go/services/configurator/storage"
"magma/orc8r/cloud/go/sqorc"
storage2 "magma/orc8r/cloud/go/storage"
"github.com/golang/glog"
)
const (
maxEntityLoadSizeConfigKey = "maxEntityLoadSize"
)
func | () {
// Create the service
srv, err := service.NewOrchestratorService(orc8r.ModuleName, configurator.ServiceName)
if err != nil {
glog.Fatalf("Error creating service: %s", err)
}
db, err := sqorc.Open(storage2.SQLDriver, storage2.DatabaseSource)
if err != nil {
glog.Fatalf("Failed to connect to database: %s", err)
}
maxEntityLoadSize, err := srv.Config.GetInt(maxEntityLoadSizeConfigKey)
if err != nil {
glog.Fatalf("Failed to load '%s' from config: %s", maxEntityLoadSizeConfigKey, err)
}
factory := storage.NewSQLConfiguratorStorageFactory(db, &storage2.UUIDGenerator{}, sqorc.GetSqlBuilder(), uint32(maxEntityLoadSize))
err = factory.InitializeServiceStorage()
if err != nil {
glog.Fatalf("Failed to initialize configurator database: %s", err)
}
nbServicer, err := servicers.NewNorthboundConfiguratorServicer(factory)
if err != nil {
glog.Fatalf("Failed to instantiate the user-facing configurator servicer: %v", nbServicer)
}
protos.RegisterNorthboundConfiguratorServer(srv.GrpcServer, nbServicer)
sbServicer, err := servicers.NewSouthboundConfiguratorServicer(factory)
if err != nil {
glog.Fatalf("Failed to instantiate the device-facing configurator servicer: %v", sbServicer)
}
protos.RegisterSouthboundConfiguratorServer(srv.GrpcServer, sbServicer)
err = srv.Run()
if err != nil {
glog.Fatalf("Failed to start configurator service: %v", err)
}
}
| main |
reserved-words-test.js | /* */
jest.autoMockOff();
describe('reserved-words', function() {
var transformFn;
var visitors;
beforeEach(function() {
require('mock-modules').dumpCache();
visitors = require('../reserved-words-visitors').visitorList;
transformFn = require('../../src/jstransform').transform;
});
function | (code, opts) {
return transformFn(visitors, code, opts).code;
}
describe('reserved words in member expressions', function() {
it('should transform to reserved word members to computed', function() {
var code = 'foo.delete;';
expect(transform(code)).toEqual('foo["delete"];');
code = '(foo++).delete;';
expect(transform(code)).toEqual('(foo++)["delete"];');
});
it('should handle call expressions', function() {
var code = 'foo.return();';
expect(transform(code)).toEqual('foo["return"]();');
});
it('should only quote ES3 reserved words', function() {
var code = 'foo.await();';
expect(transform(code)).toEqual('foo.await();');
});
});
describe('reserved words in properties', function() {
it('should quote reserved words in properties', function() {
var code = 'var x = {null: 1};';
expect(transform(code)).toEqual('var x = {"null": 1};');
});
it('should only quote ES3 reserved words', function() {
var code = 'var x = {await: 1};';
expect(transform(code)).toEqual('var x = {await: 1};');
});
});
});
| transform |
he_is_back.py | # -*- coding: utf-8 -*-
'''
Episode 7-3
'''
import os
import sys
sys.path.append(os.path.join(os.path.dirname(__file__), '../..'))
sys.path.append('storybuilder')
from storybuilder.builder.world import World
# DEFINE
TITLE = "英雄の帰還"
# NOTE: outlines
ABSTRACT = """
変身して$sherlockたちを追い詰める$jake。しかし$sherlockの機転で工場に穴を開け、日光を浴びせかけることで$jakeは皮膚から大量に出血し、爆発した。
その爆音を聞いて$limeたちが駆けつける。$maryが身を挺して$sherlockを守っていたが、$maryは大怪我を負ってしまった。入院することになる$mary。
戻った$sherlockは、一旦$wilsonの家で$limeたちに事情を語る。
$morianoとの対決により滝壺に落下し、死を覚悟した$sherlockだったが、$maryが繕ってくれた服の裾が引っかかり、何とか死だけは免れた。
ただ大怪我をしており、そこを助けてくれたのが、$jackだった。彼女の別荘で回復するまで休養しながら各国の情報を集め、$moriano配下の動向を追いかけていた。
未だに$sherlockを探す動きが見えたので、おびき出すために空き家の件をでっち上げた。だがそれを利用した$jakeにより$maryがおびき出された、というのが今回の一件だった。
$sherlockは$maryに預けておいた$blue_stoneを取り戻す必要があると言う。
しかし$sherlockたちが病院に駆けつけると、$maryの姿が消えていた。
"""
# Episode
def main(w: World):
return w.episode(TITLE,
# NOTE
w.plot_setup("連続殺人犯$jakeは$maryを殺そうとする"),
w.plot_turnpoint("そこにホームレスが助けに入る"),
w.plot_develop("$sherlockは$jakeがどんな人生を歩んできたかを全て言い当て$jakeの牙を無力化しようとする"),
w.plot_turnpoint("$transformした$maryにより$sherlockが守られるが、彼女が負傷する"),
w.plot_resolve("$sherlockが呼んでおいた警察により$jakeは捕らえられた。$maryは入院し、$sherlockも治療を受ける"),
w.plot_turnpoint("入院している$maryから$blue_stoneを貰おうと思ったが$patosonにより連れ出された後だった"),
w.plot_note("$maryは病室で目覚める"),
w.plot_note("そこには$patsonの姿があった"),
w.plot_note("$maryは$sherlockは? と尋ねるが、わからないと言われる"),
w.plot_note("$patsonは$maryへの事情聴取を行う"),
w.plot_note("一体あそこで何を見たのか"),
| w.plot_note("$maryはその黒焦げの遺体が、連続猟奇殺人事件の犯人だと証言した"),
w.plot_note("$patsonは$jakeがそう告白したのか? と尋ねた"),
#
w.plot_note("$limeは$ignesたちから$maryが爆発現場で発見されたと聞く"),
w.plot_note("その$ignesはホームレスと仲良さそうに話している"),
w.plot_note("その男こそ$sherlockだった"),
w.plot_note("$limeは驚き、事情を聞く"),
w.plot_note("$sherlockは実はずいぶん前に国内に戻ってきていて、$ignesは事情を知らされていた"),
w.plot_note("$sherlockを狙う連中をごまかすために、色々と嘘の情報をばらまいていた"),
w.plot_note("空き家情報も嘘のものだったが、それを使って猟奇殺人犯の$jakeが細工をし、$maryをおびき出した"),
w.plot_note("それを先導した人間が誰かいる、と$sherlockは言う"),
w.plot_note("滝壺から落ちたあと、$jackに助けられ、彼女の隠れ家で治療をしてもらっていた"),
w.plot_note("今回殺害されていた$ronaldが所有していた最後の$black_stoneが盗まれたことがわかり、戻ってきた"),
w.plot_note("四つ$stoneを揃えられるとまずい、と$shserlockは言う"),
w.plot_note("ひとまず$maryの様子を見に行くことにし、タクシーを拾う(これが$jack)"),
#
w.plot_note("病院にやってくると先に様子をみにきていた$refiがいる"),
w.plot_note("$refiは泣きそうになって、$maryを$patsonが連れ出したという"),
w.plot_note("$sherlockはそれで理解し、すぐに大聖堂に向かうと"),
w.plot_note("しかし$wilsonがいない。タクシー運転手に頼んで向かってもらう"),
#
w.plot_note("車内で説明する$sherlock"),
w.plot_note("四つの$stoneは$boss復活の儀式に必要な祭具だった"),
w.plot_note("かつて$bossを倒した$heroたちの神器にはまっていたものだが、$bossの力を吸収し、封じ込めたもの"),
w.plot_note("それが時代を経て、売られたり、盗まれたりし、行方不明になった"),
w.plot_note("今ある多くはレプリカだという"),
w.plot_note("実際に四つ揃え、かつての$boss城があった場所で儀式を行う"),
w.plot_note("それが大聖堂だという"),
w.plot_note("$boss城を封じる目的であの場所に建っていたのだ"),
w.plot_note("昨年春にあった地震は儀式の失敗だという"),
w.plot_note("その頃はまだ何が必要なのか、すべて判明していなかった。だが$stein教授により解明された"),
w.plot_note("その資料は$morianoにより盗まれ、紛失している"),
w.plot_note("実際にどういうものなのかは$sherlockも知らない"),
#
"$wilsonは最後に登場",
w.plot_note("大聖堂にやってくると、何があったのか警官($parkerたち)が警備していた"),
w.plot_note("巨大な爆弾が見つかったというのでみんなを避難させるように言われたと"),
w.plot_note("そこに$restradeもやってきて、困惑している"),
w.plot_note("一体何をやってるんだ、$patsonはと"),
w.plot_note("$sherlockはすぐ$patsonの家を調べるように言う。彼が$cultXの手先だった"),
w.plot_note("$sherlockは中に入る"),
#
w.plot_note("大聖堂の中は人がいなくなり、静まり返っていた"),
w.plot_note("聖堂を進む"),
w.plot_note("偉人たちの墓が並ぶ聖廟でもあった"),
w.plot_note("その一つが開けられている。中身はない"),
w.plot_note("扉があり、奥にいくと地下への階段"),
w.plot_note("地下に降りていく$sherlockたち"),
w.plot_note("そこには巨大なホールが広がっていた"),
w.plot_note("祭壇には四つの$stoneが供えられ、$patsonが儀式を始めようとしている"),
w.plot_note("誰も入れるなと言ったのに、と不敵な顔の$patson"),
w.plot_note("$maryは倒れていた。服が少し破れている。中に$stoneを身に着けていたからだ"),
w.plot_note("$sherlockがすぐにやめるように忠告する"),
w.plot_note("儀式は失敗すると言った"),
w.plot_note("しかし$patsonは儀式を行うべく、祝詞をとなえる"),
w.plot_note("その$patsonを現れた$wilsonが$gunで撃ち抜いた"),
w.plot_note("「間に合ってよかったよ」という$wilson"),
outline=ABSTRACT)
| |
authmap.py | import re
import pwd
import grp
import ldap.asyncsearch
import ldap.filter
import ldap.dn
from ldapauthkeys.util import *
from ldapauthkeys.logging import get_logger
from ldapauthkeys.config import array_merge_unique
# List of paths where we'll search for an authmap
authmap_paths = [
'./authmap',
'/etc/openssh-ldap-authkeys/authmap',
]
class AuthorizedEntity:
"""
Represents a single LDAP entity (user or group) authorized to login to a
local account.
"""
entity = None
realm = None
def __init__(self, entity, realm):
self.entity = entity
self.realm = realm
def to_ldap_user_list(self, connection, config):
"""
Turn this authorized entity into a user, or list of users, permitted to
login to LDAP. Must return a list of (user, realm) tuples. Example
return value:
[("jdoe", "EXAMPLE.COM",),]
Arguments:
- LDAP connection handle
- Configuration YAML hash
"""
return []
class AuthorizedGroup(AuthorizedEntity):
"""
Represents an LDAP group.
"""
def to_ldap_user_list(self, connection, config):
# Initialize the result
users = []
# Attempt to obtain a record of this group from LDAP. Perform a search
# matching on the configured group filter, group name attribute and name
# of the group.
search = ldap.asyncsearch.List(connection)
search_filter = ldap.filter.filter_format(
"(&%s%s)" % (config['ldap']['filters']['group'], "(%s=%s)"),
[config['ldap']['attributes']['group_name'], self.entity]
)
get_logger('authmap').info(
'Attempting to find group "%s" in basedn "%s"' % (
self.entity,
self.realm
)
)
try:
# Search the basedn for the group. Retrieve only the group
# membership attribute - we don't care about the rest.
search.startSearch(self.realm, ldap.SCOPE_SUBTREE, search_filter,
[config['ldap']['attributes']['group_member']])
except Exception as e:
# On any exception, return an empty array.
get_logger('ldap').error(
'Failed to search for group "%s" in basedn "%s": %s: %s' % (
self.entity,
self.realm,
e.__class__.__name__,
repr(e.args)
)
)
return []
search.processResults()
for rcode, result in search.allResults:
# For each result, we have the DN of the group, and attributes.
group_dn, attrs = result
get_logger('authmap').info(
'Located group "%s" at DN "%s"' % (
self.entity,
group_dn
)
)
# Iterate through group members.
for user in attrs[config['ldap']['attributes']['group_member']]:
if config['ldap']['group_membership'] == 'uid':
# "user" contains just a username (i.e. "jdoe"). We'll
# search the user tree for this username, relying on the
# config to tell us which attribute the username must match.
users.append([user.decode('utf-8'), self.realm])
elif config['ldap']['group_membership'] == 'dn':
# FIXME: this LDAP server uses DNs to identify members of
# groups, but if the RDN attribute isn't the same as the
# configured username attribute, we would need to go get the
# user by DN and then search and retrieve their entry a
# second time to get their SSH keys. This isn't suppported
# at the moment. Throw a nice warning and skip this user.
user_dn = ldap.dn.str2dn(user.decode('utf-8'))
attr, value = user_dn[0][0][0:2]
if attr != config['ldap']['attributes']['username']:
get_logger('authmap').error(
('At present, group membership lookups rely on the username attribute ' +
'being the RDN attribute for users. The group "%s" references the DN "%s", ' +
'which we can\'t look up as part of a search because the UID attribute ' +
'is not "%s" but rather "%s". This user won\'t be authorized.') % (
group_dn, user.decode('utf-8'), attr, config['ldap']['attributes']['username']
)
)
get_logger('authmap').info(
'Found user "%s" in group "%s"' % (
value,
self.entity
)
)
users.append([value, self.realm])
else:
raise ValueError('Unsupported group membership strategy "%s". Supported strategies are "uid" and "dn".' % (
config['ldap']['group_membership']
))
return users
class AuthorizedUser(AuthorizedEntity):
"""
Represents a single authorized user.
"""
def to_ldap_user_list(self, ldap, config):
"""
1:1 map of a username to an LDAP entry
"""
return [(self.entity, self.realm,)]
class AuthorizedEntityCollection:
"""
Collection of AuthorizedEntity objects.
"""
entries = []
def append(self, entity):
for e in self.entries:
if e.entity == entity.entity and e.realm == entity.realm and isinstance(e, entity.__class__):
return
self.entries.append(entity)
def length(self):
"""
Get the length of the collection.
"""
return len(self.entries)
def to_ldap_search(self, ldap, config):
"""
Generate a list of LDAP entries to search for. The return value of this
method is to be provided to fetch_ldap_authkeys() (ldap.py) for
execution of the search.
Returns a dict composed in the following manner:
- The keys are the realm name, which is used to set the basedn of the
search.
- The values are lists of usernames. (This is the stage at which
groups are flattened to lists of users.)
"""
searches = {}
for entry in self.entries:
users = entry.to_ldap_user_list(ldap, config)
for uid, realm in users:
if not realm in searches.keys():
searches[realm] = []
if not uid in searches[realm]:
searches[realm].append(uid)
return searches
def parse_authmap(fp):
"""
Low level parser for authmap files. Takes a file pointer to an open authmap
file and parses it into a dict.
The returned dict will be composed as follows:
- The keys are the names of local entities (users or groups)
- The values are lists of authorized LDAP entities, represented as
strings.
"""
authmap = {}
while True:
line = line = fp.readline()
if line == '':
break
line = line.strip()
if line == '' or line[0] == '#' or line.rstrip() == '':
continue
localuser, entities = line.split(':')
if not localuser in authmap:
authmap[localuser] = []
for entity in entities.split(','):
authmap[localuser].append(entity.strip())
return authmap
def get_authmap():
|
def lookup_authmap(authmap, user, config):
"""
Take an authmap and return the list of LDAP entities authorized to log in as
the given user.
Input:
- The return value of parse_authmap()
- The local username being logged into
- The OLAK config dict (load_config() in config.py)
Output:
- An AuthorizedEntityCollection representing all of the LDAP entities
that are allowed to log in as that local user
"""
entities = []
for key in authmap.keys():
entry = authmap[key]
if key[0] == '&':
# Local group
try:
group = grp.getgrnam(key[1:])
if user in group.gr_mem:
entities.append(entry)
except KeyError as e:
get_logger('authmap').warn('Local group "%s" does not exist' % (key[1:]))
elif key == '@all' or key == user:
for e in entry:
entities.append(entry)
entities_stripped = AuthorizedEntityCollection()
for entry in entities:
for e in entry:
try:
domain_user, realm = e.split('@')
realm = domain_to_basedn(realm.lower())
except ValueError:
domain_user = e
realm = config['ldap']['default_realm']
if domain_user == '~self':
domain_user = user
if domain_user[0] == '&':
entities_stripped.append(AuthorizedGroup(domain_user[1:], realm))
else:
entities_stripped.append(AuthorizedUser(domain_user, realm))
return entities_stripped
| """
Load and parse the authmap file.
Returns a dict in the format documented by parse_authmap().
"""
for path in authmap_paths:
try:
with open(path) as fp:
authmap = parse_authmap(fp)
get_logger('authmap').info("Loaded auth from %s" % (path))
return authmap
except Exception as e:
pass
raise FileNotFoundError("Unable to load the OLAK authorized entity file from any of these paths: %s" % (', '.join(authmap_paths))) |
simulation.pb.go | // Code generated by protoc-gen-go. DO NOT EDIT.
// source: google/ads/googleads/v3/common/simulation.proto
package common
import (
fmt "fmt"
math "math"
proto "github.com/catper/protobuf/proto"
wrappers "github.com/catper/protobuf/ptypes/wrappers"
_ "google.golang.org/genproto/googleapis/api/annotations"
)
// Reference imports to suppress errors if they are not otherwise used.
var _ = proto.Marshal
var _ = fmt.Errorf
var _ = math.Inf
// This is a compile-time assertion to ensure that this generated file
// is compatible with the proto package it is being compiled against.
// A compilation error at this line likely means your copy of the
// proto package needs to be updated.
const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package
// A container for simulation points for simulations of type BID_MODIFIER.
type BidModifierSimulationPointList struct {
// Projected metrics for a series of bid modifier amounts.
Points []*BidModifierSimulationPoint `protobuf:"bytes,1,rep,name=points,proto3" json:"points,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *BidModifierSimulationPointList) Reset() { *m = BidModifierSimulationPointList{} }
func (m *BidModifierSimulationPointList) String() string { return proto.CompactTextString(m) }
func (*BidModifierSimulationPointList) ProtoMessage() {}
func (*BidModifierSimulationPointList) Descriptor() ([]byte, []int) {
return fileDescriptor_5ab119bcaf279127, []int{0}
}
func (m *BidModifierSimulationPointList) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_BidModifierSimulationPointList.Unmarshal(m, b)
}
func (m *BidModifierSimulationPointList) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_BidModifierSimulationPointList.Marshal(b, m, deterministic)
}
func (m *BidModifierSimulationPointList) XXX_Merge(src proto.Message) {
xxx_messageInfo_BidModifierSimulationPointList.Merge(m, src)
}
func (m *BidModifierSimulationPointList) XXX_Size() int {
return xxx_messageInfo_BidModifierSimulationPointList.Size(m)
}
func (m *BidModifierSimulationPointList) XXX_DiscardUnknown() {
xxx_messageInfo_BidModifierSimulationPointList.DiscardUnknown(m)
}
var xxx_messageInfo_BidModifierSimulationPointList proto.InternalMessageInfo
func (m *BidModifierSimulationPointList) GetPoints() []*BidModifierSimulationPoint {
if m != nil {
return m.Points
}
return nil
}
// A container for simulation points for simulations of type CPC_BID.
type CpcBidSimulationPointList struct {
// Projected metrics for a series of CPC bid amounts.
Points []*CpcBidSimulationPoint `protobuf:"bytes,1,rep,name=points,proto3" json:"points,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *CpcBidSimulationPointList) Reset() { *m = CpcBidSimulationPointList{} }
func (m *CpcBidSimulationPointList) String() string { return proto.CompactTextString(m) }
func (*CpcBidSimulationPointList) ProtoMessage() {}
func (*CpcBidSimulationPointList) Descriptor() ([]byte, []int) {
return fileDescriptor_5ab119bcaf279127, []int{1}
}
func (m *CpcBidSimulationPointList) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_CpcBidSimulationPointList.Unmarshal(m, b)
}
func (m *CpcBidSimulationPointList) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_CpcBidSimulationPointList.Marshal(b, m, deterministic)
}
func (m *CpcBidSimulationPointList) XXX_Merge(src proto.Message) {
xxx_messageInfo_CpcBidSimulationPointList.Merge(m, src)
}
func (m *CpcBidSimulationPointList) XXX_Size() int {
return xxx_messageInfo_CpcBidSimulationPointList.Size(m)
}
func (m *CpcBidSimulationPointList) XXX_DiscardUnknown() {
xxx_messageInfo_CpcBidSimulationPointList.DiscardUnknown(m)
}
var xxx_messageInfo_CpcBidSimulationPointList proto.InternalMessageInfo
func (m *CpcBidSimulationPointList) GetPoints() []*CpcBidSimulationPoint {
if m != nil {
return m.Points
}
return nil
}
// A container for simulation points for simulations of type CPV_BID.
type CpvBidSimulationPointList struct {
// Projected metrics for a series of CPV bid amounts.
Points []*CpvBidSimulationPoint `protobuf:"bytes,1,rep,name=points,proto3" json:"points,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *CpvBidSimulationPointList) Reset() { *m = CpvBidSimulationPointList{} }
func (m *CpvBidSimulationPointList) String() string { return proto.CompactTextString(m) }
func (*CpvBidSimulationPointList) ProtoMessage() {}
func (*CpvBidSimulationPointList) Descriptor() ([]byte, []int) {
return fileDescriptor_5ab119bcaf279127, []int{2}
}
func (m *CpvBidSimulationPointList) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_CpvBidSimulationPointList.Unmarshal(m, b)
}
func (m *CpvBidSimulationPointList) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_CpvBidSimulationPointList.Marshal(b, m, deterministic)
}
func (m *CpvBidSimulationPointList) XXX_Merge(src proto.Message) {
xxx_messageInfo_CpvBidSimulationPointList.Merge(m, src)
}
func (m *CpvBidSimulationPointList) XXX_Size() int {
return xxx_messageInfo_CpvBidSimulationPointList.Size(m)
}
func (m *CpvBidSimulationPointList) XXX_DiscardUnknown() {
xxx_messageInfo_CpvBidSimulationPointList.DiscardUnknown(m)
}
var xxx_messageInfo_CpvBidSimulationPointList proto.InternalMessageInfo
func (m *CpvBidSimulationPointList) GetPoints() []*CpvBidSimulationPoint {
if m != nil {
return m.Points
}
return nil
}
// A container for simulation points for simulations of type TARGET_CPA.
type TargetCpaSimulationPointList struct {
// Projected metrics for a series of target CPA amounts.
Points []*TargetCpaSimulationPoint `protobuf:"bytes,1,rep,name=points,proto3" json:"points,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *TargetCpaSimulationPointList) Reset() { *m = TargetCpaSimulationPointList{} }
func (m *TargetCpaSimulationPointList) String() string { return proto.CompactTextString(m) }
func (*TargetCpaSimulationPointList) ProtoMessage() {}
func (*TargetCpaSimulationPointList) Descriptor() ([]byte, []int) {
return fileDescriptor_5ab119bcaf279127, []int{3}
}
func (m *TargetCpaSimulationPointList) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_TargetCpaSimulationPointList.Unmarshal(m, b)
}
func (m *TargetCpaSimulationPointList) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_TargetCpaSimulationPointList.Marshal(b, m, deterministic)
}
func (m *TargetCpaSimulationPointList) XXX_Merge(src proto.Message) {
xxx_messageInfo_TargetCpaSimulationPointList.Merge(m, src)
}
func (m *TargetCpaSimulationPointList) XXX_Size() int {
return xxx_messageInfo_TargetCpaSimulationPointList.Size(m)
}
func (m *TargetCpaSimulationPointList) XXX_DiscardUnknown() {
xxx_messageInfo_TargetCpaSimulationPointList.DiscardUnknown(m)
}
var xxx_messageInfo_TargetCpaSimulationPointList proto.InternalMessageInfo
func (m *TargetCpaSimulationPointList) GetPoints() []*TargetCpaSimulationPoint {
if m != nil {
return m.Points
}
return nil
}
// Projected metrics for a specific bid modifier amount.
type BidModifierSimulationPoint struct {
// The simulated bid modifier upon which projected metrics are based.
BidModifier *wrappers.DoubleValue `protobuf:"bytes,1,opt,name=bid_modifier,json=bidModifier,proto3" json:"bid_modifier,omitempty"`
// Projected number of biddable conversions.
// Only search advertising channel type supports this field.
BiddableConversions *wrappers.DoubleValue `protobuf:"bytes,2,opt,name=biddable_conversions,json=biddableConversions,proto3" json:"biddable_conversions,omitempty"`
// Projected total value of biddable conversions.
// Only search advertising channel type supports this field.
BiddableConversionsValue *wrappers.DoubleValue `protobuf:"bytes,3,opt,name=biddable_conversions_value,json=biddableConversionsValue,proto3" json:"biddable_conversions_value,omitempty"`
// Projected number of clicks.
Clicks *wrappers.Int64Value `protobuf:"bytes,4,opt,name=clicks,proto3" json:"clicks,omitempty"`
// Projected cost in micros.
CostMicros *wrappers.Int64Value `protobuf:"bytes,5,opt,name=cost_micros,json=costMicros,proto3" json:"cost_micros,omitempty"`
// Projected number of impressions.
Impressions *wrappers.Int64Value `protobuf:"bytes,6,opt,name=impressions,proto3" json:"impressions,omitempty"`
// Projected number of top slot impressions.
// Only search advertising channel type supports this field.
TopSlotImpressions *wrappers.Int64Value `protobuf:"bytes,7,opt,name=top_slot_impressions,json=topSlotImpressions,proto3" json:"top_slot_impressions,omitempty"`
// Projected number of biddable conversions for the parent resource.
// Only search advertising channel type supports this field.
ParentBiddableConversions *wrappers.DoubleValue `protobuf:"bytes,8,opt,name=parent_biddable_conversions,json=parentBiddableConversions,proto3" json:"parent_biddable_conversions,omitempty"`
// Projected total value of biddable conversions for the parent resource.
// Only search advertising channel type supports this field.
ParentBiddableConversionsValue *wrappers.DoubleValue `protobuf:"bytes,9,opt,name=parent_biddable_conversions_value,json=parentBiddableConversionsValue,proto3" json:"parent_biddable_conversions_value,omitempty"`
// Projected number of clicks for the parent resource.
ParentClicks *wrappers.Int64Value `protobuf:"bytes,10,opt,name=parent_clicks,json=parentClicks,proto3" json:"parent_clicks,omitempty"`
// Projected cost in micros for the parent resource.
ParentCostMicros *wrappers.Int64Value `protobuf:"bytes,11,opt,name=parent_cost_micros,json=parentCostMicros,proto3" json:"parent_cost_micros,omitempty"`
// Projected number of impressions for the parent resource.
ParentImpressions *wrappers.Int64Value `protobuf:"bytes,12,opt,name=parent_impressions,json=parentImpressions,proto3" json:"parent_impressions,omitempty"`
// Projected number of top slot impressions for the parent resource.
// Only search advertising channel type supports this field.
ParentTopSlotImpressions *wrappers.Int64Value `protobuf:"bytes,13,opt,name=parent_top_slot_impressions,json=parentTopSlotImpressions,proto3" json:"parent_top_slot_impressions,omitempty"`
// Projected minimum daily budget that must be available to the parent
// resource to realize this simulation.
ParentRequiredBudgetMicros *wrappers.Int64Value `protobuf:"bytes,14,opt,name=parent_required_budget_micros,json=parentRequiredBudgetMicros,proto3" json:"parent_required_budget_micros,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *BidModifierSimulationPoint) Reset() { *m = BidModifierSimulationPoint{} }
func (m *BidModifierSimulationPoint) String() string { return proto.CompactTextString(m) }
func (*BidModifierSimulationPoint) ProtoMessage() {}
func (*BidModifierSimulationPoint) Descriptor() ([]byte, []int) {
return fileDescriptor_5ab119bcaf279127, []int{4}
}
func (m *BidModifierSimulationPoint) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_BidModifierSimulationPoint.Unmarshal(m, b)
}
func (m *BidModifierSimulationPoint) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_BidModifierSimulationPoint.Marshal(b, m, deterministic)
}
func (m *BidModifierSimulationPoint) XXX_Merge(src proto.Message) {
xxx_messageInfo_BidModifierSimulationPoint.Merge(m, src)
}
func (m *BidModifierSimulationPoint) XXX_Size() int {
return xxx_messageInfo_BidModifierSimulationPoint.Size(m)
}
func (m *BidModifierSimulationPoint) XXX_DiscardUnknown() {
xxx_messageInfo_BidModifierSimulationPoint.DiscardUnknown(m)
}
var xxx_messageInfo_BidModifierSimulationPoint proto.InternalMessageInfo
func (m *BidModifierSimulationPoint) GetBidModifier() *wrappers.DoubleValue {
if m != nil {
return m.BidModifier
}
return nil
}
func (m *BidModifierSimulationPoint) GetBiddableConversions() *wrappers.DoubleValue {
if m != nil {
return m.BiddableConversions
}
return nil
}
func (m *BidModifierSimulationPoint) GetBiddableConversionsValue() *wrappers.DoubleValue {
if m != nil |
return nil
}
func (m *BidModifierSimulationPoint) GetClicks() *wrappers.Int64Value {
if m != nil {
return m.Clicks
}
return nil
}
func (m *BidModifierSimulationPoint) GetCostMicros() *wrappers.Int64Value {
if m != nil {
return m.CostMicros
}
return nil
}
func (m *BidModifierSimulationPoint) GetImpressions() *wrappers.Int64Value {
if m != nil {
return m.Impressions
}
return nil
}
func (m *BidModifierSimulationPoint) GetTopSlotImpressions() *wrappers.Int64Value {
if m != nil {
return m.TopSlotImpressions
}
return nil
}
func (m *BidModifierSimulationPoint) GetParentBiddableConversions() *wrappers.DoubleValue {
if m != nil {
return m.ParentBiddableConversions
}
return nil
}
func (m *BidModifierSimulationPoint) GetParentBiddableConversionsValue() *wrappers.DoubleValue {
if m != nil {
return m.ParentBiddableConversionsValue
}
return nil
}
func (m *BidModifierSimulationPoint) GetParentClicks() *wrappers.Int64Value {
if m != nil {
return m.ParentClicks
}
return nil
}
func (m *BidModifierSimulationPoint) GetParentCostMicros() *wrappers.Int64Value {
if m != nil {
return m.ParentCostMicros
}
return nil
}
func (m *BidModifierSimulationPoint) GetParentImpressions() *wrappers.Int64Value {
if m != nil {
return m.ParentImpressions
}
return nil
}
func (m *BidModifierSimulationPoint) GetParentTopSlotImpressions() *wrappers.Int64Value {
if m != nil {
return m.ParentTopSlotImpressions
}
return nil
}
func (m *BidModifierSimulationPoint) GetParentRequiredBudgetMicros() *wrappers.Int64Value {
if m != nil {
return m.ParentRequiredBudgetMicros
}
return nil
}
// Projected metrics for a specific CPC bid amount.
type CpcBidSimulationPoint struct {
// The simulated CPC bid upon which projected metrics are based.
CpcBidMicros *wrappers.Int64Value `protobuf:"bytes,1,opt,name=cpc_bid_micros,json=cpcBidMicros,proto3" json:"cpc_bid_micros,omitempty"`
// Projected number of biddable conversions.
BiddableConversions *wrappers.DoubleValue `protobuf:"bytes,2,opt,name=biddable_conversions,json=biddableConversions,proto3" json:"biddable_conversions,omitempty"`
// Projected total value of biddable conversions.
BiddableConversionsValue *wrappers.DoubleValue `protobuf:"bytes,3,opt,name=biddable_conversions_value,json=biddableConversionsValue,proto3" json:"biddable_conversions_value,omitempty"`
// Projected number of clicks.
Clicks *wrappers.Int64Value `protobuf:"bytes,4,opt,name=clicks,proto3" json:"clicks,omitempty"`
// Projected cost in micros.
CostMicros *wrappers.Int64Value `protobuf:"bytes,5,opt,name=cost_micros,json=costMicros,proto3" json:"cost_micros,omitempty"`
// Projected number of impressions.
Impressions *wrappers.Int64Value `protobuf:"bytes,6,opt,name=impressions,proto3" json:"impressions,omitempty"`
// Projected number of top slot impressions.
// Only search advertising channel type supports this field.
TopSlotImpressions *wrappers.Int64Value `protobuf:"bytes,7,opt,name=top_slot_impressions,json=topSlotImpressions,proto3" json:"top_slot_impressions,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *CpcBidSimulationPoint) Reset() { *m = CpcBidSimulationPoint{} }
func (m *CpcBidSimulationPoint) String() string { return proto.CompactTextString(m) }
func (*CpcBidSimulationPoint) ProtoMessage() {}
func (*CpcBidSimulationPoint) Descriptor() ([]byte, []int) {
return fileDescriptor_5ab119bcaf279127, []int{5}
}
func (m *CpcBidSimulationPoint) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_CpcBidSimulationPoint.Unmarshal(m, b)
}
func (m *CpcBidSimulationPoint) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_CpcBidSimulationPoint.Marshal(b, m, deterministic)
}
func (m *CpcBidSimulationPoint) XXX_Merge(src proto.Message) {
xxx_messageInfo_CpcBidSimulationPoint.Merge(m, src)
}
func (m *CpcBidSimulationPoint) XXX_Size() int {
return xxx_messageInfo_CpcBidSimulationPoint.Size(m)
}
func (m *CpcBidSimulationPoint) XXX_DiscardUnknown() {
xxx_messageInfo_CpcBidSimulationPoint.DiscardUnknown(m)
}
var xxx_messageInfo_CpcBidSimulationPoint proto.InternalMessageInfo
func (m *CpcBidSimulationPoint) GetCpcBidMicros() *wrappers.Int64Value {
if m != nil {
return m.CpcBidMicros
}
return nil
}
func (m *CpcBidSimulationPoint) GetBiddableConversions() *wrappers.DoubleValue {
if m != nil {
return m.BiddableConversions
}
return nil
}
func (m *CpcBidSimulationPoint) GetBiddableConversionsValue() *wrappers.DoubleValue {
if m != nil {
return m.BiddableConversionsValue
}
return nil
}
func (m *CpcBidSimulationPoint) GetClicks() *wrappers.Int64Value {
if m != nil {
return m.Clicks
}
return nil
}
func (m *CpcBidSimulationPoint) GetCostMicros() *wrappers.Int64Value {
if m != nil {
return m.CostMicros
}
return nil
}
func (m *CpcBidSimulationPoint) GetImpressions() *wrappers.Int64Value {
if m != nil {
return m.Impressions
}
return nil
}
func (m *CpcBidSimulationPoint) GetTopSlotImpressions() *wrappers.Int64Value {
if m != nil {
return m.TopSlotImpressions
}
return nil
}
// Projected metrics for a specific CPV bid amount.
type CpvBidSimulationPoint struct {
// The simulated CPV bid upon which projected metrics are based.
CpvBidMicros *wrappers.Int64Value `protobuf:"bytes,1,opt,name=cpv_bid_micros,json=cpvBidMicros,proto3" json:"cpv_bid_micros,omitempty"`
// Projected cost in micros.
CostMicros *wrappers.Int64Value `protobuf:"bytes,2,opt,name=cost_micros,json=costMicros,proto3" json:"cost_micros,omitempty"`
// Projected number of impressions.
Impressions *wrappers.Int64Value `protobuf:"bytes,3,opt,name=impressions,proto3" json:"impressions,omitempty"`
// Projected number of views.
Views *wrappers.Int64Value `protobuf:"bytes,4,opt,name=views,proto3" json:"views,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *CpvBidSimulationPoint) Reset() { *m = CpvBidSimulationPoint{} }
func (m *CpvBidSimulationPoint) String() string { return proto.CompactTextString(m) }
func (*CpvBidSimulationPoint) ProtoMessage() {}
func (*CpvBidSimulationPoint) Descriptor() ([]byte, []int) {
return fileDescriptor_5ab119bcaf279127, []int{6}
}
func (m *CpvBidSimulationPoint) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_CpvBidSimulationPoint.Unmarshal(m, b)
}
func (m *CpvBidSimulationPoint) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_CpvBidSimulationPoint.Marshal(b, m, deterministic)
}
func (m *CpvBidSimulationPoint) XXX_Merge(src proto.Message) {
xxx_messageInfo_CpvBidSimulationPoint.Merge(m, src)
}
func (m *CpvBidSimulationPoint) XXX_Size() int {
return xxx_messageInfo_CpvBidSimulationPoint.Size(m)
}
func (m *CpvBidSimulationPoint) XXX_DiscardUnknown() {
xxx_messageInfo_CpvBidSimulationPoint.DiscardUnknown(m)
}
var xxx_messageInfo_CpvBidSimulationPoint proto.InternalMessageInfo
func (m *CpvBidSimulationPoint) GetCpvBidMicros() *wrappers.Int64Value {
if m != nil {
return m.CpvBidMicros
}
return nil
}
func (m *CpvBidSimulationPoint) GetCostMicros() *wrappers.Int64Value {
if m != nil {
return m.CostMicros
}
return nil
}
func (m *CpvBidSimulationPoint) GetImpressions() *wrappers.Int64Value {
if m != nil {
return m.Impressions
}
return nil
}
func (m *CpvBidSimulationPoint) GetViews() *wrappers.Int64Value {
if m != nil {
return m.Views
}
return nil
}
// Projected metrics for a specific target CPA amount.
type TargetCpaSimulationPoint struct {
// The simulated target CPA upon which projected metrics are based.
TargetCpaMicros *wrappers.Int64Value `protobuf:"bytes,1,opt,name=target_cpa_micros,json=targetCpaMicros,proto3" json:"target_cpa_micros,omitempty"`
// Projected number of biddable conversions.
BiddableConversions *wrappers.DoubleValue `protobuf:"bytes,2,opt,name=biddable_conversions,json=biddableConversions,proto3" json:"biddable_conversions,omitempty"`
// Projected total value of biddable conversions.
BiddableConversionsValue *wrappers.DoubleValue `protobuf:"bytes,3,opt,name=biddable_conversions_value,json=biddableConversionsValue,proto3" json:"biddable_conversions_value,omitempty"`
// Projected number of clicks.
Clicks *wrappers.Int64Value `protobuf:"bytes,4,opt,name=clicks,proto3" json:"clicks,omitempty"`
// Projected cost in micros.
CostMicros *wrappers.Int64Value `protobuf:"bytes,5,opt,name=cost_micros,json=costMicros,proto3" json:"cost_micros,omitempty"`
// Projected number of impressions.
Impressions *wrappers.Int64Value `protobuf:"bytes,6,opt,name=impressions,proto3" json:"impressions,omitempty"`
// Projected number of top slot impressions.
// Only search advertising channel type supports this field.
TopSlotImpressions *wrappers.Int64Value `protobuf:"bytes,7,opt,name=top_slot_impressions,json=topSlotImpressions,proto3" json:"top_slot_impressions,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *TargetCpaSimulationPoint) Reset() { *m = TargetCpaSimulationPoint{} }
func (m *TargetCpaSimulationPoint) String() string { return proto.CompactTextString(m) }
func (*TargetCpaSimulationPoint) ProtoMessage() {}
func (*TargetCpaSimulationPoint) Descriptor() ([]byte, []int) {
return fileDescriptor_5ab119bcaf279127, []int{7}
}
func (m *TargetCpaSimulationPoint) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_TargetCpaSimulationPoint.Unmarshal(m, b)
}
func (m *TargetCpaSimulationPoint) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_TargetCpaSimulationPoint.Marshal(b, m, deterministic)
}
func (m *TargetCpaSimulationPoint) XXX_Merge(src proto.Message) {
xxx_messageInfo_TargetCpaSimulationPoint.Merge(m, src)
}
func (m *TargetCpaSimulationPoint) XXX_Size() int {
return xxx_messageInfo_TargetCpaSimulationPoint.Size(m)
}
func (m *TargetCpaSimulationPoint) XXX_DiscardUnknown() {
xxx_messageInfo_TargetCpaSimulationPoint.DiscardUnknown(m)
}
var xxx_messageInfo_TargetCpaSimulationPoint proto.InternalMessageInfo
func (m *TargetCpaSimulationPoint) GetTargetCpaMicros() *wrappers.Int64Value {
if m != nil {
return m.TargetCpaMicros
}
return nil
}
func (m *TargetCpaSimulationPoint) GetBiddableConversions() *wrappers.DoubleValue {
if m != nil {
return m.BiddableConversions
}
return nil
}
func (m *TargetCpaSimulationPoint) GetBiddableConversionsValue() *wrappers.DoubleValue {
if m != nil {
return m.BiddableConversionsValue
}
return nil
}
func (m *TargetCpaSimulationPoint) GetClicks() *wrappers.Int64Value {
if m != nil {
return m.Clicks
}
return nil
}
func (m *TargetCpaSimulationPoint) GetCostMicros() *wrappers.Int64Value {
if m != nil {
return m.CostMicros
}
return nil
}
func (m *TargetCpaSimulationPoint) GetImpressions() *wrappers.Int64Value {
if m != nil {
return m.Impressions
}
return nil
}
func (m *TargetCpaSimulationPoint) GetTopSlotImpressions() *wrappers.Int64Value {
if m != nil {
return m.TopSlotImpressions
}
return nil
}
func init() {
proto.RegisterType((*BidModifierSimulationPointList)(nil), "google.ads.googleads.v3.common.BidModifierSimulationPointList")
proto.RegisterType((*CpcBidSimulationPointList)(nil), "google.ads.googleads.v3.common.CpcBidSimulationPointList")
proto.RegisterType((*CpvBidSimulationPointList)(nil), "google.ads.googleads.v3.common.CpvBidSimulationPointList")
proto.RegisterType((*TargetCpaSimulationPointList)(nil), "google.ads.googleads.v3.common.TargetCpaSimulationPointList")
proto.RegisterType((*BidModifierSimulationPoint)(nil), "google.ads.googleads.v3.common.BidModifierSimulationPoint")
proto.RegisterType((*CpcBidSimulationPoint)(nil), "google.ads.googleads.v3.common.CpcBidSimulationPoint")
proto.RegisterType((*CpvBidSimulationPoint)(nil), "google.ads.googleads.v3.common.CpvBidSimulationPoint")
proto.RegisterType((*TargetCpaSimulationPoint)(nil), "google.ads.googleads.v3.common.TargetCpaSimulationPoint")
}
func init() {
proto.RegisterFile("google/ads/googleads/v3/common/simulation.proto", fileDescriptor_5ab119bcaf279127)
}
var fileDescriptor_5ab119bcaf279127 = []byte{
// 741 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xec, 0x97, 0xcd, 0x6e, 0xd3, 0x4e,
0x14, 0xc5, 0x95, 0xa4, 0xcd, 0xbf, 0xff, 0x49, 0x3f, 0xa8, 0x29, 0x92, 0x9b, 0x96, 0xa8, 0x64,
0xd5, 0x95, 0x2d, 0x6a, 0x40, 0xc8, 0x80, 0x20, 0x49, 0xa5, 0xaa, 0x88, 0x88, 0x2a, 0xad, 0xb2,
0xa8, 0x22, 0x2c, 0x7f, 0x4c, 0xad, 0x01, 0xdb, 0x33, 0x78, 0x26, 0xee, 0x2b, 0x20, 0x76, 0xbc,
0x02, 0x4b, 0x1e, 0x85, 0x17, 0x41, 0xea, 0x53, 0x20, 0x7b, 0xc6, 0x8e, 0x69, 0xed, 0x3a, 0x15,
0x6c, 0x2a, 0x75, 0x95, 0x49, 0xe6, 0x9e, 0xdf, 0x71, 0xee, 0xb9, 0x9e, 0xc4, 0x40, 0x75, 0x31,
0x76, 0x3d, 0xa8, 0x9a, 0x0e, 0x15, 0xcb, 0x78, 0x15, 0x69, 0xaa, 0x8d, 0x7d, 0x1f, 0x07, 0x2a,
0x45, 0xfe, 0xd4, 0x33, 0x19, 0xc2, 0x81, 0x42, 0x42, 0xcc, 0xb0, 0xd4, 0xe1, 0x55, 0x8a, 0xe9,
0x50, 0x25, 0x13, 0x28, 0x91, 0xa6, 0x70, 0x41, 0x5b, 0xec, 0xab, 0x49, 0xb5, 0x35, 0x3d, 0x53,
0xcf, 0x43, 0x93, 0x10, 0x18, 0x52, 0xae, 0x6f, 0x6f, 0xa7, 0x86, 0x04, 0xa9, 0x66, 0x10, 0x60,
0x96, 0xc0, 0xc5, 0x6e, 0x97, 0x81, 0x4e, 0x1f, 0x39, 0x43, 0xec, 0xa0, 0x33, 0x04, 0xc3, 0xe3,
0xcc, 0xfc, 0x08, 0xa3, 0x80, 0xbd, 0x43, 0x94, 0x49, 0x23, 0xd0, 0x24, 0xf1, 0x1b, 0x2a, 0xd7,
0x76, 0x1a, 0xbb, 0xad, 0x3d, 0x5d, 0xb9, 0xfe, 0x82, 0x94, 0x72, 0xde, 0x48, 0x90, 0xba, 0x1f,
0xc1, 0xe6, 0x80, 0xd8, 0x7d, 0xe4, 0x14, 0x19, 0x0e, 0x2f, 0x19, 0x3e, 0xad, 0x32, 0x2c, 0x44,
0xfd, 0xe9, 0x15, 0xfd, 0x3b, 0xaf, 0xe8, 0x1a, 0x2f, 0x02, 0xb6, 0x4f, 0xcc, 0xd0, 0x85, 0x6c,
0x40, 0xcc, 0x22, 0xbb, 0xa3, 0x4b, 0x76, 0xcf, 0xab, 0xec, 0xca, 0x68, 0x99, 0xe3, 0xaf, 0x25,
0xd0, 0x2e, 0x6f, 0xb8, 0xf4, 0x1a, 0x2c, 0x5b, 0xc8, 0x31, 0x7c, 0xb1, 0x2d, 0xd7, 0x76, 0x6a,
0xbb, 0xad, 0xbd, 0xed, 0xd4, 0x36, 0x9d, 0x19, 0x65, 0x1f, 0x4f, 0x2d, 0x0f, 0x8e, 0x4d, 0x6f,
0x0a, 0x47, 0x2d, 0x6b, 0xc6, 0x93, 0xde, 0x83, 0x0d, 0x0b, 0x39, 0x8e, 0x69, 0x79, 0xd0, 0xb0,
0x71, 0x10, 0xc1, 0x90, 0xc6, 0xd3, 0x23, 0xd7, 0xe7, 0x00, 0xdd, 0x4f, 0x95, 0x83, 0x99, 0x50,
0x3a, 0x05, 0xed, 0x22, 0xa0, 0x11, 0xc5, 0x12, 0xb9, 0x31, 0x07, 0x56, 0x2e, 0xc0, 0x26, 0x3b,
0x92, 0x06, 0x9a, 0xb6, 0x87, 0xec, 0x4f, 0x54, 0x5e, 0x48, 0x38, 0x5b, 0x57, 0x38, 0x87, 0x01,
0x7b, 0xf6, 0x84, 0x63, 0x44, 0xa9, 0xf4, 0x12, 0xb4, 0x6c, 0x4c, 0x99, 0xe1, 0x23, 0x3b, 0xc4,
0x54, 0x5e, 0xac, 0x56, 0x82, 0xb8, 0x7e, 0x98, 0x94, 0x4b, 0xaf, 0x40, 0x0b, 0xf9, 0x24, 0x84,
0x94, 0xb7, 0xa5, 0x59, 0xad, 0xce, 0xd7, 0x4b, 0x43, 0xb0, 0xc1, 0x30, 0x31, 0xa8, 0x87, 0x99,
0x91, 0xe7, 0xfc, 0x57, 0xcd, 0x91, 0x18, 0x26, 0xc7, 0x1e, 0x66, 0x87, 0x39, 0xdc, 0x04, 0x6c,
0x11, 0x33, 0x84, 0x01, 0x33, 0x0a, 0x43, 0x5b, 0x9a, 0xa3, 0xbb, 0x9b, 0x1c, 0xd0, 0x2f, 0x88,
0xce, 0x05, 0x8f, 0xae, 0xa1, 0x8b, 0x04, 0xff, 0x9f, 0xc3, 0xa3, 0x53, 0xea, 0xc1, 0x73, 0x7c,
0x03, 0x56, 0x84, 0x91, 0x88, 0x13, 0x54, 0xb7, 0x63, 0x99, 0x2b, 0x06, 0x3c, 0xd4, 0x43, 0x20,
0xa5, 0x84, 0x5c, 0xb6, 0xad, 0x6a, 0xcc, 0x3d, 0x81, 0x99, 0x25, 0xfc, 0x36, 0x43, 0xe5, 0x03,
0x5a, 0xae, 0x46, 0xad, 0x73, 0x59, 0x3e, 0x9f, 0xd3, 0x2c, 0x9f, 0xc2, 0xd4, 0x57, 0xaa, 0xa1,
0x32, 0xd7, 0x9f, 0x5c, 0xcd, 0xfe, 0x03, 0x78, 0x28, 0xd8, 0x21, 0xfc, 0x3c, 0x45, 0x21, 0x74,
0x0c, 0x6b, 0xea, 0xb8, 0x30, 0xfb, 0xf6, 0xab, 0xd5, 0xf4, 0x36, 0x27, 0x8c, 0x04, 0xa0, 0x9f,
0xe8, 0x79, 0x1f, 0xba, 0x5f, 0x16, 0xc0, 0x83, 0xc2, 0x93, 0x56, 0xea, 0x81, 0x55, 0x9b, 0xd8,
0x46, 0x72, 0xd0, 0x70, 0xab, 0xda, 0x1c, 0x79, 0xd9, 0x09, 0x4b, 0x34, 0xf9, 0xee, 0x98, 0xb9,
0x45, 0xc7, 0x4c, 0xf7, 0x6b, 0x3d, 0x1e, 0x85, 0xa8, 0x6c, 0x14, 0xa2, 0x9b, 0x8f, 0x42, 0x34,
0x1b, 0x85, 0x4b, 0x8d, 0xaa, 0xff, 0x55, 0xa3, 0x1a, 0x37, 0x6c, 0xd4, 0x63, 0xb0, 0x18, 0x21,
0x78, 0x3e, 0x57, 0xb2, 0xbc, 0xb2, 0xfb, 0x6d, 0x01, 0xc8, 0x65, 0x3f, 0xd3, 0xd2, 0x01, 0x58,
0x67, 0xc9, 0x9e, 0x61, 0x13, 0xf3, 0x06, 0x2d, 0x59, 0x63, 0x29, 0xf1, 0xee, 0x06, 0xb9, 0x7d,
0x37, 0x48, 0xff, 0xa2, 0x06, 0xba, 0x36, 0xf6, 0x2b, 0xfe, 0xdd, 0xf5, 0xd7, 0x72, 0xe3, 0x12,
0x93, 0x8f, 0x6a, 0xa7, 0xfb, 0x42, 0xe2, 0x62, 0xcf, 0x0c, 0x5c, 0x05, 0x87, 0xae, 0xea, 0xc2,
0x20, 0xf1, 0x4d, 0x1f, 0x17, 0x08, 0xa2, 0x65, 0x4f, 0x0f, 0x2f, 0xf8, 0xcb, 0xf7, 0x7a, 0xe3,
0xa0, 0xd7, 0xfb, 0x51, 0xef, 0x1c, 0x70, 0x58, 0xcf, 0xa1, 0x0a, 0x5f, 0xc6, 0xab, 0xb1, 0xa6,
0x0c, 0x92, 0xb2, 0x9f, 0x69, 0xc1, 0xa4, 0xe7, 0xd0, 0x49, 0x56, 0x30, 0x19, 0x6b, 0x13, 0x5e,
0x70, 0x51, 0xef, 0xf2, 0x4f, 0x75, 0xbd, 0xe7, 0x50, 0x5d, 0xcf, 0x4a, 0x74, 0x7d, 0xac, 0xe9,
0x3a, 0x2f, 0xb2, 0x9a, 0xc9, 0xd5, 0x69, 0xbf, 0x03, 0x00, 0x00, 0xff, 0xff, 0x53, 0x82, 0xf1,
0xae, 0xda, 0x0c, 0x00, 0x00,
}
| {
return m.BiddableConversionsValue
} |
lib.rs | mod utils;
extern crate rustoku;
use wasm_bindgen::prelude::*;
use std::error::Error;
use std::convert::TryInto;
use rustoku::solve::move_change::{ChangeType, IndexValuePair};
use std::fmt::{Display, Formatter};
extern crate js_sys;
#[cfg(feature = "wee_alloc")]
#[global_allocator]
static ALLOC: wee_alloc::WeeAlloc = wee_alloc::WeeAlloc::INIT;
/*
The Rustoku library does not implement wasm_bindgen itself, so we create wrappers for the needed
data structures
*/
#[wasm_bindgen]
pub struct Sudoku {
puz: rustoku::Sudoku,
solver: rustoku::HumanSolve,
}
#[wasm_bindgen]
pub struct Square(rustoku::Square);
#[wasm_bindgen]
pub struct Move(rustoku::Move);
#[wasm_bindgen]
impl Sudoku {
pub fn new(input_str: &str) -> Sudoku {
let puz = rustoku::Sudoku::new(input_str).unwrap();
let solver = rustoku::HumanSolve::new();
Sudoku {
puz,
solver
}
}
pub fn values(&self) -> Vec<u8> {
self.puz.iter().map(|sq| sq.num()).collect()
}
pub fn square(&self, ind: usize) -> Square {
Square(self.puz[ind])
}
pub fn value(&self, ind: u32) -> u8 |
pub fn poss(&self, ind: u32) -> Vec<u8>{
self.puz[ind as usize].possibilities()
}
pub fn board_size() -> u32 {
rustoku::Sudoku::board_size() as u32
}
pub fn hint(&self) -> Move {
Move(self.solver.next(&self.puz).unwrap())
}
pub fn is_fixed(&self, ind: u32) -> bool {
self.puz[ind as usize].is_fixed()
}
}
#[wasm_bindgen]
impl Square {
pub fn value(&self) -> u8 {
self.0.num()
}
pub fn possibilities (&self) -> Vec<u8> {
self.0.possibilities()
}
}
#[wasm_bindgen]
impl Move {
pub fn update_board(&self) {
let window = web_sys::window().expect("no global `window` exists");
let document = window.document().expect("should have a document on window");
for pair in self.0.involved_vec() {
Self::update_candidates(&document, pair, "candidate--highlight")
}
for pair in self.0.changes_vec() {
// Does not matter right now if the change type removes potentials or sets a value
let v = match pair {
ChangeType::RemovedPot(v) => v,
ChangeType::SetValue(v) => v,
};
Self::update_candidates(&document, v, "candidate--to-remove");
}
let message = document.get_element_by_id("message-p").unwrap();
message.set_inner_html(&format!("{}", self.0.method()));
}
// This function interacts with the JS window document directly, making it easier to deal
// with the more complex data type over the wasm boundary.
fn update_candidates(document: &web_sys::Document, pair: &IndexValuePair, class_to_add: &str) {
let ind = pair.index();
let values = pair.value_vec();
let mut sq = document.get_element_by_id(&format!("sq{}", ind)).unwrap();
let mut sqp = document.get_element_by_id(&format!("sq{}-cand", ind)).unwrap();
web_sys::console::log_1(&JsValue::from_str(&format!("{:?}", values)));
for value in values {
sqp.children().get_with_index(value as u32 - 1).unwrap().class_list().add_1(class_to_add);
}
}
pub fn apply(&self, puz: &mut Sudoku) {
let amove = self.0.clone();
amove.apply(&mut puz.puz);
}
}
| {
self.puz[ind as usize].num()
} |
vector_tile.pb.go | // Code generated by protoc-gen-go.
// source: vector_tile.proto
// DO NOT EDIT!
/*
Package vectorTile is a generated protocol buffer package.
It is generated from these files:
vector_tile.proto
It has these top-level messages:
Tile
*/
package vectorTile
import proto "github.com/golang/protobuf/proto"
import fmt "fmt"
import math "math"
// Reference imports to suppress errors if they are not otherwise used.
var _ = proto.Marshal
var _ = fmt.Errorf
var _ = math.Inf
// This is a compile-time assertion to ensure that this generated file
// is compatible with the proto package it is being compiled against.
// A compilation error at this line likely means your copy of the
// proto package needs to be updated.
const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package
// GeomType is described in section 4.3.4 of the specification
type Tile_GeomType int32
const (
Tile_UNKNOWN Tile_GeomType = 0
Tile_POINT Tile_GeomType = 1
Tile_LINESTRING Tile_GeomType = 2
Tile_POLYGON Tile_GeomType = 3
)
var Tile_GeomType_name = map[int32]string{
0: "UNKNOWN",
1: "POINT",
2: "LINESTRING",
3: "POLYGON",
}
var Tile_GeomType_value = map[string]int32{
"UNKNOWN": 0,
"POINT": 1,
"LINESTRING": 2,
"POLYGON": 3,
}
func (x Tile_GeomType) Enum() *Tile_GeomType {
p := new(Tile_GeomType)
*p = x
return p
}
func (x Tile_GeomType) String() string {
return proto.EnumName(Tile_GeomType_name, int32(x))
}
func (x *Tile_GeomType) UnmarshalJSON(data []byte) error {
value, err := proto.UnmarshalJSONEnum(Tile_GeomType_value, data, "Tile_GeomType")
if err != nil {
return err
}
*x = Tile_GeomType(value)
return nil
}
func (Tile_GeomType) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{0, 0} }
type Tile struct {
Layers []*Tile_Layer `protobuf:"bytes,3,rep,name=layers" json:"layers,omitempty"`
proto.XXX_InternalExtensions `json:"-"`
XXX_unrecognized []byte `json:"-"`
}
func (m *Tile) Reset() { *m = Tile{} }
func (m *Tile) String() string { return proto.CompactTextString(m) }
func (*Tile) ProtoMessage() {}
func (*Tile) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0} }
var extRange_Tile = []proto.ExtensionRange{
{16, 8191},
}
func (*Tile) ExtensionRangeArray() []proto.ExtensionRange {
return extRange_Tile
}
func (m *Tile) GetLayers() []*Tile_Layer {
if m != nil {
return m.Layers
}
return nil
}
// Variant type encoding
// The use of values is described in section 4.1 of the specification
type Tile_Value struct {
// Exactly one of these values must be present in a valid message
StringValue *string `protobuf:"bytes,1,opt,name=string_value" json:"string_value,omitempty"`
FloatValue *float32 `protobuf:"fixed32,2,opt,name=float_value" json:"float_value,omitempty"`
DoubleValue *float64 `protobuf:"fixed64,3,opt,name=double_value" json:"double_value,omitempty"`
IntValue *int64 `protobuf:"varint,4,opt,name=int_value" json:"int_value,omitempty"`
UintValue *uint64 `protobuf:"varint,5,opt,name=uint_value" json:"uint_value,omitempty"`
SintValue *int64 `protobuf:"zigzag64,6,opt,name=sint_value" json:"sint_value,omitempty"`
BoolValue *bool `protobuf:"varint,7,opt,name=bool_value" json:"bool_value,omitempty"`
proto.XXX_InternalExtensions `json:"-"`
XXX_unrecognized []byte `json:"-"`
}
func (m *Tile_Value) Reset() { *m = Tile_Value{} }
func (m *Tile_Value) String() string { return proto.CompactTextString(m) }
func (*Tile_Value) ProtoMessage() {}
func (*Tile_Value) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0, 0} }
var extRange_Tile_Value = []proto.ExtensionRange{
{8, 536870911},
}
func (*Tile_Value) ExtensionRangeArray() []proto.ExtensionRange {
return extRange_Tile_Value
}
func (m *Tile_Value) GetStringValue() string {
if m != nil && m.StringValue != nil |
return ""
}
func (m *Tile_Value) GetFloatValue() float32 {
if m != nil && m.FloatValue != nil {
return *m.FloatValue
}
return 0
}
func (m *Tile_Value) GetDoubleValue() float64 {
if m != nil && m.DoubleValue != nil {
return *m.DoubleValue
}
return 0
}
func (m *Tile_Value) GetIntValue() int64 {
if m != nil && m.IntValue != nil {
return *m.IntValue
}
return 0
}
func (m *Tile_Value) GetUintValue() uint64 {
if m != nil && m.UintValue != nil {
return *m.UintValue
}
return 0
}
func (m *Tile_Value) GetSintValue() int64 {
if m != nil && m.SintValue != nil {
return *m.SintValue
}
return 0
}
func (m *Tile_Value) GetBoolValue() bool {
if m != nil && m.BoolValue != nil {
return *m.BoolValue
}
return false
}
// Features are described in section 4.2 of the specification
type Tile_Feature struct {
Id *uint64 `protobuf:"varint,1,opt,name=id,def=0" json:"id,omitempty"`
// Tags of this feature are encoded as repeated pairs of
// integers.
// A detailed description of tags is located in sections
// 4.2 and 4.4 of the specification
Tags []uint32 `protobuf:"varint,2,rep,packed,name=tags" json:"tags,omitempty"`
// The type of geometry stored in this feature.
Type *Tile_GeomType `protobuf:"varint,3,opt,name=type,enum=vector_tile.Tile_GeomType,def=0" json:"type,omitempty"`
// Contains a stream of commands and parameters (vertices).
// A detailed description on geometry encoding is located in
// section 4.3 of the specification.
Geometry []uint32 `protobuf:"varint,4,rep,packed,name=geometry" json:"geometry,omitempty"`
XXX_unrecognized []byte `json:"-"`
}
func (m *Tile_Feature) Reset() { *m = Tile_Feature{} }
func (m *Tile_Feature) String() string { return proto.CompactTextString(m) }
func (*Tile_Feature) ProtoMessage() {}
func (*Tile_Feature) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0, 1} }
const Default_Tile_Feature_Id uint64 = 0
const Default_Tile_Feature_Type Tile_GeomType = Tile_UNKNOWN
func (m *Tile_Feature) GetId() uint64 {
if m != nil && m.Id != nil {
return *m.Id
}
return Default_Tile_Feature_Id
}
func (m *Tile_Feature) GetTags() []uint32 {
if m != nil {
return m.Tags
}
return nil
}
func (m *Tile_Feature) GetType() Tile_GeomType {
if m != nil && m.Type != nil {
return *m.Type
}
return Default_Tile_Feature_Type
}
func (m *Tile_Feature) GetGeometry() []uint32 {
if m != nil {
return m.Geometry
}
return nil
}
// Layers are described in section 4.1 of the specification
type Tile_Layer struct {
// Any compliant implementation must first read the version
// number encoded in this message and choose the correct
// implementation for this version number before proceeding to
// decode other parts of this message.
Version *uint32 `protobuf:"varint,15,req,name=version,def=1" json:"version,omitempty"`
Name *string `protobuf:"bytes,1,req,name=name" json:"name,omitempty"`
// The actual features in this tile.
Features []*Tile_Feature `protobuf:"bytes,2,rep,name=features" json:"features,omitempty"`
// Dictionary encoding for keys
Keys []string `protobuf:"bytes,3,rep,name=keys" json:"keys,omitempty"`
// Dictionary encoding for values
Values []*Tile_Value `protobuf:"bytes,4,rep,name=values" json:"values,omitempty"`
// Although this is an "optional" field it is required by the specification.
// See https://github.com/mapbox/vector-tile-spec/issues/47
Extent *uint32 `protobuf:"varint,5,opt,name=extent,def=4096" json:"extent,omitempty"`
proto.XXX_InternalExtensions `json:"-"`
XXX_unrecognized []byte `json:"-"`
}
func (m *Tile_Layer) Reset() { *m = Tile_Layer{} }
func (m *Tile_Layer) String() string { return proto.CompactTextString(m) }
func (*Tile_Layer) ProtoMessage() {}
func (*Tile_Layer) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0, 2} }
var extRange_Tile_Layer = []proto.ExtensionRange{
{16, 536870911},
}
func (*Tile_Layer) ExtensionRangeArray() []proto.ExtensionRange {
return extRange_Tile_Layer
}
const Default_Tile_Layer_Version uint32 = 1
const Default_Tile_Layer_Extent uint32 = 4096
func (m *Tile_Layer) GetVersion() uint32 {
if m != nil && m.Version != nil {
return *m.Version
}
return Default_Tile_Layer_Version
}
func (m *Tile_Layer) GetName() string {
if m != nil && m.Name != nil {
return *m.Name
}
return ""
}
func (m *Tile_Layer) GetFeatures() []*Tile_Feature {
if m != nil {
return m.Features
}
return nil
}
func (m *Tile_Layer) GetKeys() []string {
if m != nil {
return m.Keys
}
return nil
}
func (m *Tile_Layer) GetValues() []*Tile_Value {
if m != nil {
return m.Values
}
return nil
}
func (m *Tile_Layer) GetExtent() uint32 {
if m != nil && m.Extent != nil {
return *m.Extent
}
return Default_Tile_Layer_Extent
}
func init() {
proto.RegisterType((*Tile)(nil), "vector_tile.Tile")
proto.RegisterType((*Tile_Value)(nil), "vector_tile.Tile.Value")
proto.RegisterType((*Tile_Feature)(nil), "vector_tile.Tile.Feature")
proto.RegisterType((*Tile_Layer)(nil), "vector_tile.Tile.Layer")
proto.RegisterEnum("vector_tile.Tile_GeomType", Tile_GeomType_name, Tile_GeomType_value)
}
var fileDescriptor0 = []byte{
// 420 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x09, 0x6e, 0x88, 0x02, 0xff, 0x6c, 0x91, 0xdf, 0x8e, 0x93, 0x40,
0x14, 0xc6, 0x65, 0x18, 0x0a, 0x1c, 0x96, 0xca, 0x8e, 0x26, 0x56, 0xae, 0x9a, 0xbd, 0x91, 0xac,
0x49, 0xb3, 0xdb, 0x18, 0x13, 0xb9, 0xd1, 0x6c, 0xa2, 0xb5, 0xb1, 0x81, 0x8d, 0xa2, 0x46, 0x6f,
0x36, 0xac, 0x3b, 0xdb, 0x10, 0x59, 0xa6, 0x81, 0xa1, 0x91, 0x3b, 0x5f, 0xc5, 0x07, 0xf0, 0x1d,
0x7c, 0x34, 0xcf, 0xf0, 0x27, 0x6d, 0xd2, 0xe5, 0x0a, 0x7e, 0x7c, 0x33, 0xe7, 0xfb, 0xbe, 0x03,
0xc7, 0x5b, 0xfe, 0x43, 0x8a, 0xf2, 0x4a, 0x66, 0x39, 0x9f, 0x6d, 0x4a, 0x21, 0x05, 0x73, 0xf6,
0xd0, 0xc9, 0x3f, 0x0a, 0x34, 0xc1, 0x17, 0xf6, 0x0c, 0x46, 0x79, 0xda, 0xf0, 0xb2, 0x9a, 0xe8,
0x53, 0x3d, 0x70, 0xe6, 0x4f, 0x66, 0xfb, 0x27, 0x95, 0x64, 0xb6, 0x52, 0xff, 0xfd, 0x3f, 0x1a,
0x18, 0x5f, 0xd2, 0xbc, 0xe6, 0xec, 0x31, 0x1c, 0x55, 0xb2, 0xcc, 0x8a, 0xf5, 0xd5, 0x56, 0x7d,
0x4f, 0xb4, 0xa9, 0x16, 0xd8, 0xec, 0x11, 0x38, 0xb7, 0xb9, 0x48, 0x65, 0x0f, 0x09, 0x42, 0xa2,
0xa4, 0x37, 0xa2, 0xbe, 0xce, 0x79, 0x4f, 0x75, 0xa4, 0x1a, 0x3b, 0x06, 0x3b, 0x2b, 0x06, 0x21,
0x45, 0xa4, 0x33, 0x06, 0x50, 0xef, 0x98, 0x81, 0x8c, 0x2a, 0x56, 0xed, 0xd8, 0x08, 0x19, 0x53,
0xec, 0x5a, 0x88, 0xbc, 0x67, 0x26, 0x32, 0xeb, 0xd4, 0xb2, 0x2c, 0xef, 0x37, 0x3e, 0xc4, 0xaf,
0xc0, 0x7c, 0xc7, 0x53, 0x59, 0x97, 0x9c, 0xb9, 0x40, 0xb2, 0x9b, 0xd6, 0x1a, 0x0d, 0xb5, 0x33,
0xe6, 0x01, 0x95, 0xe9, 0xba, 0x42, 0x5b, 0x7a, 0xe0, 0x5e, 0x10, 0x4f, 0x63, 0xe7, 0x48, 0x9a,
0x4d, 0x67, 0x69, 0x3c, 0xf7, 0x0f, 0x63, 0x2f, 0xb8, 0xb8, 0x4b, 0x50, 0x11, 0x9a, 0x9f, 0xa3,
0x0f, 0x51, 0xfc, 0x35, 0xc2, 0x34, 0xd6, 0x1a, 0x21, 0x97, 0x65, 0x83, 0xb6, 0xfb, 0x8b, 0xfc,
0xbf, 0x58, 0x4c, 0x5b, 0x11, 0x9a, 0x33, 0xb7, 0xd8, 0x64, 0x26, 0x8a, 0xc9, 0xc3, 0x29, 0x09,
0xdc, 0x50, 0x3b, 0x67, 0x47, 0x40, 0x8b, 0xf4, 0x4e, 0x95, 0x44, 0xb0, 0xa4, 0xe7, 0x60, 0xdd,
0x76, 0x06, 0x3b, 0x2b, 0xce, 0xfc, 0xe9, 0xe1, 0xe0, 0x21, 0x02, 0x1e, 0xfd, 0xc9, 0x9b, 0x6e,
0x31, 0xb6, 0x5a, 0x54, 0x1b, 0xba, 0x6a, 0x47, 0xdf, 0xbb, 0xa8, 0x61, 0x3d, 0x23, 0xfe, 0x4b,
0xf2, 0x42, 0xb6, 0x35, 0xba, 0x21, 0x7d, 0x71, 0xf6, 0xea, 0x25, 0x96, 0xe4, 0x75, 0x25, 0x9d,
0xbc, 0x06, 0x6b, 0x88, 0xc6, 0x1c, 0x18, 0xc2, 0x79, 0x0f, 0x98, 0x0d, 0xc6, 0x65, 0xbc, 0x8c,
0x12, 0x2c, 0x67, 0x0c, 0xb0, 0x5a, 0x46, 0x6f, 0x3f, 0x25, 0x1f, 0x97, 0xd1, 0xc2, 0x23, 0x4a,
0x77, 0x19, 0xaf, 0xbe, 0x2d, 0xe2, 0xc8, 0xd3, 0x4f, 0x0d, 0x75, 0xd5, 0x9b, 0x8b, 0xf1, 0x7b,
0xfd, 0x3b, 0x74, 0x1e, 0xd4, 0xf4, 0xff, 0x01, 0x00, 0x00, 0xff, 0xff, 0x3b, 0xb8, 0x13, 0x0c,
0x73, 0x02, 0x00, 0x00,
}
| {
return *m.StringValue
} |
operations.rs | #![doc = "generated by AutoRust 0.1.0"]
#![allow(unused_mut)]
#![allow(unused_variables)]
#![allow(unused_imports)]
use crate::models::*;
pub mod billing_accounts {
use crate::models::*;
pub async fn list(
operation_config: &crate::OperationConfig,
expand: Option<&str>,
) -> std::result::Result<BillingAccountListResult, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/providers/Microsoft.Billing/billingAccounts", operation_config.base_path(),);
let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
if let Some(expand) = expand {
url.query_pairs_mut().append_pair("$expand", expand);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: BillingAccountListResult =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
expand: Option<&str>,
) -> std::result::Result<BillingAccount, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}",
operation_config.base_path(),
billing_account_name
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
if let Some(expand) = expand {
url.query_pairs_mut().append_pair("$expand", expand);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: BillingAccount =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn update(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
parameters: &BillingAccountUpdateRequest,
) -> std::result::Result<update::Response, update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}",
operation_config.base_path(),
billing_account_name
);
let mut url = url::Url::parse(url_str).map_err(update::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PATCH);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(update::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = azure_core::to_json(parameters).map_err(update::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(update::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(update::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: BillingAccount =
serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(update::Response::Ok200(rsp_value))
}
http::StatusCode::ACCEPTED => Ok(update::Response::Accepted202),
status_code => {
let rsp_body = rsp.body();
let rsp_value: ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?;
Err(update::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod update {
use crate::{models, models::*};
#[derive(Debug)]
pub enum Response {
Ok200(BillingAccount),
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_invoice_sections_by_create_subscription_permission(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
) -> std::result::Result<InvoiceSectionListWithCreateSubPermissionResult, list_invoice_sections_by_create_subscription_permission::Error>
{
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/listInvoiceSectionsWithCreateSubscriptionPermission",
operation_config.base_path(),
billing_account_name
);
let mut url = url::Url::parse(url_str).map_err(list_invoice_sections_by_create_subscription_permission::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_invoice_sections_by_create_subscription_permission::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(list_invoice_sections_by_create_subscription_permission::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_invoice_sections_by_create_subscription_permission::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: InvoiceSectionListWithCreateSubPermissionResult = serde_json::from_slice(rsp_body).map_err(|source| {
list_invoice_sections_by_create_subscription_permission::Error::DeserializeError(source, rsp_body.clone())
})?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: ErrorResponse = serde_json::from_slice(rsp_body).map_err(|source| {
list_invoice_sections_by_create_subscription_permission::Error::DeserializeError(source, rsp_body.clone())
})?;
Err(list_invoice_sections_by_create_subscription_permission::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_invoice_sections_by_create_subscription_permission {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod address {
use crate::models::*;
pub async fn validate(
operation_config: &crate::OperationConfig,
address: &AddressDetails,
) -> std::result::Result<ValidateAddressResponse, validate::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/providers/Microsoft.Billing/validateAddress", operation_config.base_path(),);
let mut url = url::Url::parse(url_str).map_err(validate::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(validate::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = azure_core::to_json(address).map_err(validate::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(validate::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(validate::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: ValidateAddressResponse =
serde_json::from_slice(rsp_body).map_err(|source| validate::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| validate::Error::DeserializeError(source, rsp_body.clone()))?;
Err(validate::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod validate {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod available_balances {
use crate::models::*;
pub async fn get(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
billing_profile_name: &str,
) -> std::result::Result<AvailableBalance, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/billingProfiles/{}/availableBalance/default",
operation_config.base_path(),
billing_account_name,
billing_profile_name
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: AvailableBalance =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod instructions {
use crate::models::*;
pub async fn list_by_billing_profile(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
billing_profile_name: &str,
) -> std::result::Result<InstructionListResult, list_by_billing_profile::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/billingProfiles/{}/instructions",
operation_config.base_path(),
billing_account_name,
billing_profile_name
);
let mut url = url::Url::parse(url_str).map_err(list_by_billing_profile::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_by_billing_profile::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(list_by_billing_profile::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_by_billing_profile::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: InstructionListResult = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_billing_profile::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_billing_profile::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_by_billing_profile::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_by_billing_profile {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
billing_profile_name: &str,
instruction_name: &str,
) -> std::result::Result<Instruction, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/billingProfiles/{}/instructions/{}",
operation_config.base_path(),
billing_account_name,
billing_profile_name,
instruction_name
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: Instruction =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn put(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
billing_profile_name: &str,
instruction_name: &str,
parameters: &Instruction,
) -> std::result::Result<Instruction, put::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/billingProfiles/{}/instructions/{}",
operation_config.base_path(),
billing_account_name,
billing_profile_name,
instruction_name
);
let mut url = url::Url::parse(url_str).map_err(put::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(put::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = azure_core::to_json(parameters).map_err(put::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(put::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(put::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: Instruction =
serde_json::from_slice(rsp_body).map_err(|source| put::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| put::Error::DeserializeError(source, rsp_body.clone()))?;
Err(put::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod put {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod billing_profiles {
use crate::models::*;
pub async fn list_by_billing_account(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
expand: Option<&str>,
) -> std::result::Result<BillingProfileListResult, list_by_billing_account::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/billingProfiles",
operation_config.base_path(),
billing_account_name
);
let mut url = url::Url::parse(url_str).map_err(list_by_billing_account::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_by_billing_account::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
if let Some(expand) = expand {
url.query_pairs_mut().append_pair("$expand", expand);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(list_by_billing_account::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_by_billing_account::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: BillingProfileListResult = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_billing_account::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_billing_account::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_by_billing_account::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_by_billing_account {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
billing_profile_name: &str,
expand: Option<&str>,
) -> std::result::Result<BillingProfile, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/billingProfiles/{}",
operation_config.base_path(),
billing_account_name,
billing_profile_name
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
if let Some(expand) = expand {
url.query_pairs_mut().append_pair("$expand", expand);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: BillingProfile =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn create_or_update(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
billing_profile_name: &str,
parameters: &BillingProfile,
) -> std::result::Result<create_or_update::Response, create_or_update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/billingProfiles/{}",
operation_config.base_path(),
billing_account_name,
billing_profile_name
);
let mut url = url::Url::parse(url_str).map_err(create_or_update::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(create_or_update::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = azure_core::to_json(parameters).map_err(create_or_update::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(create_or_update::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(create_or_update::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: BillingProfile = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create_or_update::Response::Ok200(rsp_value))
}
http::StatusCode::ACCEPTED => Ok(create_or_update::Response::Accepted202),
status_code => {
let rsp_body = rsp.body();
let rsp_value: ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Err(create_or_update::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod create_or_update {
use crate::{models, models::*};
#[derive(Debug)]
pub enum Response {
Ok200(BillingProfile),
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod customers {
use crate::models::*;
pub async fn list_by_billing_profile(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
billing_profile_name: &str,
search: Option<&str>,
filter: Option<&str>,
) -> std::result::Result<CustomerListResult, list_by_billing_profile::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/billingProfiles/{}/customers",
operation_config.base_path(),
billing_account_name,
billing_profile_name
);
let mut url = url::Url::parse(url_str).map_err(list_by_billing_profile::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_by_billing_profile::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
if let Some(search) = search {
url.query_pairs_mut().append_pair("$search", search);
}
if let Some(filter) = filter {
url.query_pairs_mut().append_pair("$filter", filter);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(list_by_billing_profile::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_by_billing_profile::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: CustomerListResult = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_billing_profile::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_billing_profile::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_by_billing_profile::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_by_billing_profile {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_by_billing_account(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
search: Option<&str>,
filter: Option<&str>,
) -> std::result::Result<CustomerListResult, list_by_billing_account::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/customers",
operation_config.base_path(),
billing_account_name
);
let mut url = url::Url::parse(url_str).map_err(list_by_billing_account::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_by_billing_account::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
if let Some(search) = search {
url.query_pairs_mut().append_pair("$search", search);
}
if let Some(filter) = filter {
url.query_pairs_mut().append_pair("$filter", filter);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(list_by_billing_account::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_by_billing_account::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: CustomerListResult = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_billing_account::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_billing_account::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_by_billing_account::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_by_billing_account {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
customer_name: &str,
expand: Option<&str>,
) -> std::result::Result<Customer, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/customers/{}",
operation_config.base_path(),
billing_account_name,
customer_name
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
if let Some(expand) = expand {
url.query_pairs_mut().append_pair("$expand", expand);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: Customer =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod invoice_sections {
use crate::models::*;
pub async fn list_by_billing_profile(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
billing_profile_name: &str,
) -> std::result::Result<InvoiceSectionListResult, list_by_billing_profile::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/billingProfiles/{}/invoiceSections",
operation_config.base_path(),
billing_account_name,
billing_profile_name
);
let mut url = url::Url::parse(url_str).map_err(list_by_billing_profile::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_by_billing_profile::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(list_by_billing_profile::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_by_billing_profile::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: InvoiceSectionListResult = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_billing_profile::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_billing_profile::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_by_billing_profile::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_by_billing_profile {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
billing_profile_name: &str,
invoice_section_name: &str,
) -> std::result::Result<InvoiceSection, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/billingProfiles/{}/invoiceSections/{}",
operation_config.base_path(),
billing_account_name,
billing_profile_name,
invoice_section_name
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: InvoiceSection =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn create_or_update(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
billing_profile_name: &str,
invoice_section_name: &str,
parameters: &InvoiceSection,
) -> std::result::Result<create_or_update::Response, create_or_update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/billingProfiles/{}/invoiceSections/{}",
operation_config.base_path(),
billing_account_name,
billing_profile_name,
invoice_section_name
);
let mut url = url::Url::parse(url_str).map_err(create_or_update::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(create_or_update::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = azure_core::to_json(parameters).map_err(create_or_update::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(create_or_update::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(create_or_update::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: InvoiceSection = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create_or_update::Response::Ok200(rsp_value))
}
http::StatusCode::ACCEPTED => Ok(create_or_update::Response::Accepted202),
status_code => {
let rsp_body = rsp.body();
let rsp_value: ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Err(create_or_update::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod create_or_update {
use crate::{models, models::*};
#[derive(Debug)]
pub enum Response {
Ok200(InvoiceSection),
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod billing_permissions {
use crate::models::*;
pub async fn list_by_customer(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
customer_name: &str,
) -> std::result::Result<BillingPermissionsListResult, list_by_customer::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/customers/{}/billingPermissions",
operation_config.base_path(),
billing_account_name,
customer_name
);
let mut url = url::Url::parse(url_str).map_err(list_by_customer::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_by_customer::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list_by_customer::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_by_customer::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: BillingPermissionsListResult = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_customer::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_customer::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_by_customer::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_by_customer {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_by_billing_account(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
) -> std::result::Result<BillingPermissionsListResult, list_by_billing_account::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/billingPermissions",
operation_config.base_path(),
billing_account_name
);
let mut url = url::Url::parse(url_str).map_err(list_by_billing_account::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_by_billing_account::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(list_by_billing_account::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_by_billing_account::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: BillingPermissionsListResult = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_billing_account::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_billing_account::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_by_billing_account::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_by_billing_account {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_by_invoice_sections(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
billing_profile_name: &str,
invoice_section_name: &str,
) -> std::result::Result<BillingPermissionsListResult, list_by_invoice_sections::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/billingProfiles/{}/invoiceSections/{}/billingPermissions",
operation_config.base_path(),
billing_account_name,
billing_profile_name,
invoice_section_name
);
let mut url = url::Url::parse(url_str).map_err(list_by_invoice_sections::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_by_invoice_sections::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(list_by_invoice_sections::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_by_invoice_sections::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: BillingPermissionsListResult = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_invoice_sections::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_invoice_sections::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_by_invoice_sections::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_by_invoice_sections {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_by_billing_profile(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
billing_profile_name: &str,
) -> std::result::Result<BillingPermissionsListResult, list_by_billing_profile::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/billingProfiles/{}/billingPermissions",
operation_config.base_path(),
billing_account_name,
billing_profile_name
);
let mut url = url::Url::parse(url_str).map_err(list_by_billing_profile::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_by_billing_profile::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(list_by_billing_profile::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_by_billing_profile::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: BillingPermissionsListResult = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_billing_profile::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_billing_profile::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_by_billing_profile::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_by_billing_profile {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod billing_subscriptions {
use crate::models::*;
pub async fn list_by_customer(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
customer_name: &str,
) -> std::result::Result<BillingSubscriptionsListResult, list_by_customer::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/customers/{}/billingSubscriptions",
operation_config.base_path(),
billing_account_name,
customer_name
);
let mut url = url::Url::parse(url_str).map_err(list_by_customer::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_by_customer::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list_by_customer::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_by_customer::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: BillingSubscriptionsListResult = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_customer::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_customer::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_by_customer::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_by_customer {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_by_billing_account(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
) -> std::result::Result<BillingSubscriptionsListResult, list_by_billing_account::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/billingSubscriptions",
operation_config.base_path(),
billing_account_name
);
let mut url = url::Url::parse(url_str).map_err(list_by_billing_account::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_by_billing_account::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(list_by_billing_account::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_by_billing_account::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: BillingSubscriptionsListResult = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_billing_account::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_billing_account::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_by_billing_account::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_by_billing_account {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_by_billing_profile(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
billing_profile_name: &str,
) -> std::result::Result<BillingSubscriptionsListResult, list_by_billing_profile::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/billingProfiles/{}/billingSubscriptions",
operation_config.base_path(),
billing_account_name,
billing_profile_name
);
let mut url = url::Url::parse(url_str).map_err(list_by_billing_profile::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_by_billing_profile::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(list_by_billing_profile::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_by_billing_profile::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: BillingSubscriptionsListResult = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_billing_profile::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_billing_profile::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_by_billing_profile::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_by_billing_profile {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_by_invoice_section(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
billing_profile_name: &str,
invoice_section_name: &str,
) -> std::result::Result<BillingSubscriptionsListResult, list_by_invoice_section::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/billingProfiles/{}/invoiceSections/{}/billingSubscriptions",
operation_config.base_path(),
billing_account_name,
billing_profile_name,
invoice_section_name
);
let mut url = url::Url::parse(url_str).map_err(list_by_invoice_section::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_by_invoice_section::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(list_by_invoice_section::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_by_invoice_section::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: BillingSubscriptionsListResult = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_invoice_section::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_invoice_section::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_by_invoice_section::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_by_invoice_section {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
subscription_id: &str,
) -> std::result::Result<BillingSubscription, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/billingSubscriptions/{}",
operation_config.base_path(),
billing_account_name,
subscription_id
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: BillingSubscription =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn update(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
subscription_id: &str,
parameters: &BillingSubscription,
) -> std::result::Result<BillingSubscription, update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/billingSubscriptions/{}",
operation_config.base_path(),
billing_account_name,
subscription_id
);
let mut url = url::Url::parse(url_str).map_err(update::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PATCH);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(update::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = azure_core::to_json(parameters).map_err(update::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(update::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(update::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: BillingSubscription =
serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?;
Err(update::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod update {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn move_(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
subscription_id: &str,
parameters: &TransferBillingSubscriptionRequestProperties,
) -> std::result::Result<move_::Response, move_::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/billingSubscriptions/{}/move",
operation_config.base_path(),
billing_account_name,
subscription_id
);
let mut url = url::Url::parse(url_str).map_err(move_::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(move_::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = azure_core::to_json(parameters).map_err(move_::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(move_::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(move_::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: BillingSubscription =
serde_json::from_slice(rsp_body).map_err(|source| move_::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(move_::Response::Ok200(rsp_value))
}
http::StatusCode::ACCEPTED => Ok(move_::Response::Accepted202),
status_code => {
let rsp_body = rsp.body();
let rsp_value: ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| move_::Error::DeserializeError(source, rsp_body.clone()))?;
Err(move_::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod move_ {
use crate::{models, models::*};
#[derive(Debug)]
pub enum Response {
Ok200(BillingSubscription),
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn validate_move(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
subscription_id: &str,
parameters: &TransferBillingSubscriptionRequestProperties,
) -> std::result::Result<ValidateSubscriptionTransferEligibilityResult, validate_move::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/billingSubscriptions/{}/validateMoveEligibility",
operation_config.base_path(),
billing_account_name,
subscription_id
);
let mut url = url::Url::parse(url_str).map_err(validate_move::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(validate_move::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = azure_core::to_json(parameters).map_err(validate_move::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(validate_move::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(validate_move::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: ValidateSubscriptionTransferEligibilityResult =
serde_json::from_slice(rsp_body).map_err(|source| validate_move::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| validate_move::Error::DeserializeError(source, rsp_body.clone()))?;
Err(validate_move::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod validate_move {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod products {
use crate::models::*;
pub async fn list_by_customer(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
customer_name: &str,
) -> std::result::Result<ProductsListResult, list_by_customer::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/customers/{}/products",
operation_config.base_path(),
billing_account_name,
customer_name
);
let mut url = url::Url::parse(url_str).map_err(list_by_customer::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_by_customer::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list_by_customer::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_by_customer::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: ProductsListResult = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_customer::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_customer::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_by_customer::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_by_customer {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_by_billing_account(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
filter: Option<&str>,
) -> std::result::Result<ProductsListResult, list_by_billing_account::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/products",
operation_config.base_path(),
billing_account_name
);
let mut url = url::Url::parse(url_str).map_err(list_by_billing_account::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_by_billing_account::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
if let Some(filter) = filter {
url.query_pairs_mut().append_pair("$filter", filter);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(list_by_billing_account::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_by_billing_account::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: ProductsListResult = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_billing_account::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_billing_account::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_by_billing_account::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_by_billing_account {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_by_billing_profile(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
billing_profile_name: &str,
filter: Option<&str>,
) -> std::result::Result<ProductsListResult, list_by_billing_profile::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/billingProfiles/{}/products",
operation_config.base_path(),
billing_account_name,
billing_profile_name
);
let mut url = url::Url::parse(url_str).map_err(list_by_billing_profile::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_by_billing_profile::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
if let Some(filter) = filter {
url.query_pairs_mut().append_pair("$filter", filter);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(list_by_billing_profile::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_by_billing_profile::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: ProductsListResult = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_billing_profile::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_billing_profile::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_by_billing_profile::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_by_billing_profile {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_by_invoice_section(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
billing_profile_name: &str,
invoice_section_name: &str,
filter: Option<&str>,
) -> std::result::Result<ProductsListResult, list_by_invoice_section::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/billingProfiles/{}/invoiceSections/{}/products",
operation_config.base_path(),
billing_account_name,
billing_profile_name,
invoice_section_name
);
let mut url = url::Url::parse(url_str).map_err(list_by_invoice_section::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_by_invoice_section::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
if let Some(filter) = filter {
url.query_pairs_mut().append_pair("$filter", filter);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(list_by_invoice_section::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_by_invoice_section::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: ProductsListResult = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_invoice_section::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_invoice_section::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_by_invoice_section::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_by_invoice_section {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
product_name: &str,
) -> std::result::Result<Product, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/products/{}",
operation_config.base_path(),
billing_account_name,
product_name
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: Product =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn update(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
product_name: &str,
parameters: &Product,
) -> std::result::Result<Product, update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/products/{}",
operation_config.base_path(),
billing_account_name,
product_name
);
let mut url = url::Url::parse(url_str).map_err(update::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PATCH);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(update::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = azure_core::to_json(parameters).map_err(update::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(update::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(update::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: Product =
serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?;
Err(update::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod update {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn move_(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
product_name: &str,
parameters: &TransferProductRequestProperties,
) -> std::result::Result<move_::Response, move_::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/products/{}/move",
operation_config.base_path(),
billing_account_name,
product_name
);
let mut url = url::Url::parse(url_str).map_err(move_::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(move_::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = azure_core::to_json(parameters).map_err(move_::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(move_::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(move_::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: Product =
serde_json::from_slice(rsp_body).map_err(|source| move_::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(move_::Response::Ok200(rsp_value))
}
http::StatusCode::ACCEPTED => Ok(move_::Response::Accepted202),
status_code => {
let rsp_body = rsp.body();
let rsp_value: ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| move_::Error::DeserializeError(source, rsp_body.clone()))?;
Err(move_::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod move_ {
use crate::{models, models::*};
#[derive(Debug)]
pub enum Response {
Ok200(Product),
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn validate_move(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
product_name: &str,
parameters: &TransferProductRequestProperties,
) -> std::result::Result<ValidateProductTransferEligibilityResult, validate_move::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/products/{}/validateMoveEligibility",
operation_config.base_path(),
billing_account_name,
product_name
);
let mut url = url::Url::parse(url_str).map_err(validate_move::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(validate_move::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = azure_core::to_json(parameters).map_err(validate_move::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(validate_move::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(validate_move::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: ValidateProductTransferEligibilityResult =
serde_json::from_slice(rsp_body).map_err(|source| validate_move::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| validate_move::Error::DeserializeError(source, rsp_body.clone()))?;
Err(validate_move::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod validate_move {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod invoices {
use crate::models::*;
pub async fn list_by_billing_account(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
period_start_date: &str,
period_end_date: &str,
) -> std::result::Result<InvoiceListResult, list_by_billing_account::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/invoices",
operation_config.base_path(),
billing_account_name
);
let mut url = url::Url::parse(url_str).map_err(list_by_billing_account::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_by_billing_account::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
url.query_pairs_mut().append_pair("periodStartDate", period_start_date);
url.query_pairs_mut().append_pair("periodEndDate", period_end_date);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(list_by_billing_account::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_by_billing_account::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: InvoiceListResult = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_billing_account::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_billing_account::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_by_billing_account::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_by_billing_account {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_by_billing_profile(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
billing_profile_name: &str,
period_start_date: &str,
period_end_date: &str,
) -> std::result::Result<InvoiceListResult, list_by_billing_profile::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/billingProfiles/{}/invoices",
operation_config.base_path(),
billing_account_name,
billing_profile_name
);
let mut url = url::Url::parse(url_str).map_err(list_by_billing_profile::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_by_billing_profile::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
url.query_pairs_mut().append_pair("periodStartDate", period_start_date);
url.query_pairs_mut().append_pair("periodEndDate", period_end_date);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(list_by_billing_profile::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_by_billing_profile::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: InvoiceListResult = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_billing_profile::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_billing_profile::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_by_billing_profile::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_by_billing_profile {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
invoice_name: &str,
) -> std::result::Result<Invoice, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/invoices/{}",
operation_config.base_path(),
billing_account_name,
invoice_name
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: Invoice =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get_by_id(
operation_config: &crate::OperationConfig,
invoice_name: &str,
) -> std::result::Result<Invoice, get_by_id::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/default/invoices/{}",
operation_config.base_path(),
invoice_name
);
let mut url = url::Url::parse(url_str).map_err(get_by_id::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_by_id::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get_by_id::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_by_id::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: Invoice =
serde_json::from_slice(rsp_body).map_err(|source| get_by_id::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| get_by_id::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get_by_id::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get_by_id {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn download_invoice(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
invoice_name: &str,
download_token: &str,
) -> std::result::Result<download_invoice::Response, download_invoice::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/invoices/{}/download",
operation_config.base_path(),
billing_account_name,
invoice_name
);
let mut url = url::Url::parse(url_str).map_err(download_invoice::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(download_invoice::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
url.query_pairs_mut().append_pair("downloadToken", download_token);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(download_invoice::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(download_invoice::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::ACCEPTED => Ok(download_invoice::Response::Accepted202),
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: DownloadUrl = serde_json::from_slice(rsp_body)
.map_err(|source| download_invoice::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(download_invoice::Response::Ok200(rsp_value))
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| download_invoice::Error::DeserializeError(source, rsp_body.clone()))?;
Err(download_invoice::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod download_invoice {
use crate::{models, models::*};
#[derive(Debug)]
pub enum Response {
Accepted202,
Ok200(DownloadUrl),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn download_multiple_billing_profile_invoices(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
download_urls: &Vec<&str>,
) -> std::result::Result<download_multiple_billing_profile_invoices::Response, download_multiple_billing_profile_invoices::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/downloadDocuments",
operation_config.base_path(),
billing_account_name
);
let mut url = url::Url::parse(url_str).map_err(download_multiple_billing_profile_invoices::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(download_multiple_billing_profile_invoices::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = azure_core::to_json(download_urls).map_err(download_multiple_billing_profile_invoices::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(download_multiple_billing_profile_invoices::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(download_multiple_billing_profile_invoices::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::ACCEPTED => Ok(download_multiple_billing_profile_invoices::Response::Accepted202),
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: DownloadUrl = serde_json::from_slice(rsp_body)
.map_err(|source| download_multiple_billing_profile_invoices::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(download_multiple_billing_profile_invoices::Response::Ok200(rsp_value))
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| download_multiple_billing_profile_invoices::Error::DeserializeError(source, rsp_body.clone()))?;
Err(download_multiple_billing_profile_invoices::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod download_multiple_billing_profile_invoices {
use crate::{models, models::*};
#[derive(Debug)]
pub enum Response {
Accepted202,
Ok200(DownloadUrl),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_by_billing_subscription(
operation_config: &crate::OperationConfig,
subscription_id: &str,
period_start_date: &str,
period_end_date: &str,
) -> std::result::Result<InvoiceListResult, list_by_billing_subscription::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/default/billingSubscriptions/{}/invoices",
operation_config.base_path(),
subscription_id
);
let mut url = url::Url::parse(url_str).map_err(list_by_billing_subscription::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_by_billing_subscription::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
url.query_pairs_mut().append_pair("periodStartDate", period_start_date);
url.query_pairs_mut().append_pair("periodEndDate", period_end_date);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(list_by_billing_subscription::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_by_billing_subscription::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: InvoiceListResult = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_billing_subscription::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_billing_subscription::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_by_billing_subscription::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_by_billing_subscription {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get_by_subscription_and_invoice_id(
operation_config: &crate::OperationConfig,
subscription_id: &str,
invoice_name: &str,
) -> std::result::Result<Invoice, get_by_subscription_and_invoice_id::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/default/billingSubscriptions/{}/invoices/{}",
operation_config.base_path(),
subscription_id,
invoice_name
);
let mut url = url::Url::parse(url_str).map_err(get_by_subscription_and_invoice_id::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_by_subscription_and_invoice_id::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(get_by_subscription_and_invoice_id::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_by_subscription_and_invoice_id::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: Invoice = serde_json::from_slice(rsp_body)
.map_err(|source| get_by_subscription_and_invoice_id::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| get_by_subscription_and_invoice_id::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get_by_subscription_and_invoice_id::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get_by_subscription_and_invoice_id {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn download_billing_subscription_invoice(
operation_config: &crate::OperationConfig,
subscription_id: &str,
invoice_name: &str,
download_token: &str,
) -> std::result::Result<download_billing_subscription_invoice::Response, download_billing_subscription_invoice::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/default/billingSubscriptions/{}/invoices/{}/download",
operation_config.base_path(),
subscription_id,
invoice_name
);
let mut url = url::Url::parse(url_str).map_err(download_billing_subscription_invoice::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(download_billing_subscription_invoice::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
url.query_pairs_mut().append_pair("downloadToken", download_token);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(download_billing_subscription_invoice::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(download_billing_subscription_invoice::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::ACCEPTED => Ok(download_billing_subscription_invoice::Response::Accepted202),
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: DownloadUrl = serde_json::from_slice(rsp_body)
.map_err(|source| download_billing_subscription_invoice::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(download_billing_subscription_invoice::Response::Ok200(rsp_value))
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| download_billing_subscription_invoice::Error::DeserializeError(source, rsp_body.clone()))?;
Err(download_billing_subscription_invoice::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod download_billing_subscription_invoice {
use crate::{models, models::*};
#[derive(Debug)]
pub enum Response {
Accepted202,
Ok200(DownloadUrl),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn download_multiple_billing_subscription_invoices(
operation_config: &crate::OperationConfig,
subscription_id: &str,
download_urls: &Vec<&str>,
) -> std::result::Result<
download_multiple_billing_subscription_invoices::Response,
download_multiple_billing_subscription_invoices::Error,
> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/default/billingSubscriptions/{}/downloadDocuments",
operation_config.base_path(),
subscription_id
);
let mut url = url::Url::parse(url_str).map_err(download_multiple_billing_subscription_invoices::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(download_multiple_billing_subscription_invoices::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body =
azure_core::to_json(download_urls).map_err(download_multiple_billing_subscription_invoices::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(download_multiple_billing_subscription_invoices::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(download_multiple_billing_subscription_invoices::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::ACCEPTED => Ok(download_multiple_billing_subscription_invoices::Response::Accepted202),
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: DownloadUrl = serde_json::from_slice(rsp_body)
.map_err(|source| download_multiple_billing_subscription_invoices::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(download_multiple_billing_subscription_invoices::Response::Ok200(rsp_value))
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| download_multiple_billing_subscription_invoices::Error::DeserializeError(source, rsp_body.clone()))?;
Err(download_multiple_billing_subscription_invoices::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod download_multiple_billing_subscription_invoices {
use crate::{models, models::*};
#[derive(Debug)]
pub enum Response {
Accepted202,
Ok200(DownloadUrl),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod transactions {
use crate::models::*;
pub async fn list_by_invoice(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
invoice_name: &str,
) -> std::result::Result<TransactionListResult, list_by_invoice::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/invoices/{}/transactions",
operation_config.base_path(),
billing_account_name,
invoice_name
);
let mut url = url::Url::parse(url_str).map_err(list_by_invoice::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_by_invoice::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list_by_invoice::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_by_invoice::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: TransactionListResult = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_invoice::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_invoice::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_by_invoice::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_by_invoice {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod policies {
use crate::models::*;
pub async fn get_by_billing_profile(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
billing_profile_name: &str,
) -> std::result::Result<Policy, get_by_billing_profile::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/billingProfiles/{}/policies/default",
operation_config.base_path(),
billing_account_name,
billing_profile_name
);
let mut url = url::Url::parse(url_str).map_err(get_by_billing_profile::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_by_billing_profile::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(get_by_billing_profile::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_by_billing_profile::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: Policy = serde_json::from_slice(rsp_body)
.map_err(|source| get_by_billing_profile::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| get_by_billing_profile::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get_by_billing_profile::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get_by_billing_profile {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn update(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
billing_profile_name: &str,
parameters: &Policy,
) -> std::result::Result<Policy, update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/billingProfiles/{}/policies/default",
operation_config.base_path(),
billing_account_name,
billing_profile_name
);
let mut url = url::Url::parse(url_str).map_err(update::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(update::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = azure_core::to_json(parameters).map_err(update::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(update::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(update::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: Policy =
serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?;
Err(update::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod update {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get_by_customer(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
customer_name: &str,
) -> std::result::Result<CustomerPolicy, get_by_customer::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/customers/{}/policies/default",
operation_config.base_path(),
billing_account_name,
customer_name
);
let mut url = url::Url::parse(url_str).map_err(get_by_customer::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_by_customer::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get_by_customer::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_by_customer::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: CustomerPolicy = serde_json::from_slice(rsp_body)
.map_err(|source| get_by_customer::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| get_by_customer::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get_by_customer::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get_by_customer {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn update_customer(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
customer_name: &str,
parameters: &CustomerPolicy,
) -> std::result::Result<CustomerPolicy, update_customer::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/customers/{}/policies/default",
operation_config.base_path(),
billing_account_name,
customer_name
);
let mut url = url::Url::parse(url_str).map_err(update_customer::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(update_customer::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = azure_core::to_json(parameters).map_err(update_customer::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(update_customer::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(update_customer::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: CustomerPolicy = serde_json::from_slice(rsp_body)
.map_err(|source| update_customer::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| update_customer::Error::DeserializeError(source, rsp_body.clone()))?;
Err(update_customer::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod update_customer {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod billing_property {
use crate::models::*;
pub async fn get(operation_config: &crate::OperationConfig, subscription_id: &str) -> std::result::Result<BillingProperty, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.Billing/billingProperty/default",
operation_config.base_path(),
subscription_id
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: BillingProperty =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn update(
operation_config: &crate::OperationConfig,
subscription_id: &str,
parameters: &BillingProperty,
) -> std::result::Result<BillingProperty, update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.Billing/billingProperty/default",
operation_config.base_path(),
subscription_id
);
let mut url = url::Url::parse(url_str).map_err(update::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PATCH);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(update::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = azure_core::to_json(parameters).map_err(update::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(update::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(update::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: BillingProperty =
serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?;
Err(update::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod update {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod operations {
use crate::models::*;
pub async fn list(operation_config: &crate::OperationConfig) -> std::result::Result<OperationListResult, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/providers/Microsoft.Billing/operations", operation_config.base_path(),);
let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: OperationListResult =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod billing_role_definitions {
use crate::models::*;
pub async fn get_by_billing_account(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
billing_role_definition_name: &str,
) -> std::result::Result<BillingRoleDefinition, get_by_billing_account::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/billingRoleDefinitions/{}",
operation_config.base_path(),
billing_account_name,
billing_role_definition_name
);
let mut url = url::Url::parse(url_str).map_err(get_by_billing_account::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_by_billing_account::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(get_by_billing_account::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_by_billing_account::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: BillingRoleDefinition = serde_json::from_slice(rsp_body)
.map_err(|source| get_by_billing_account::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| get_by_billing_account::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get_by_billing_account::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get_by_billing_account {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get_by_invoice_section(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
billing_profile_name: &str,
invoice_section_name: &str,
billing_role_definition_name: &str,
) -> std::result::Result<BillingRoleDefinition, get_by_invoice_section::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/billingProfiles/{}/invoiceSections/{}/billingRoleDefinitions/{}",
operation_config.base_path(),
billing_account_name,
billing_profile_name,
invoice_section_name,
billing_role_definition_name
);
let mut url = url::Url::parse(url_str).map_err(get_by_invoice_section::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_by_invoice_section::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(get_by_invoice_section::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_by_invoice_section::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: BillingRoleDefinition = serde_json::from_slice(rsp_body)
.map_err(|source| get_by_invoice_section::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| get_by_invoice_section::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get_by_invoice_section::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get_by_invoice_section {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get_by_billing_profile(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
billing_profile_name: &str,
billing_role_definition_name: &str,
) -> std::result::Result<BillingRoleDefinition, get_by_billing_profile::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/billingProfiles/{}/billingRoleDefinitions/{}",
operation_config.base_path(),
billing_account_name,
billing_profile_name,
billing_role_definition_name
);
let mut url = url::Url::parse(url_str).map_err(get_by_billing_profile::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_by_billing_profile::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(get_by_billing_profile::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_by_billing_profile::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => |
status_code => {
let rsp_body = rsp.body();
let rsp_value: ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| get_by_billing_profile::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get_by_billing_profile::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get_by_billing_profile {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_by_billing_account(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
) -> std::result::Result<BillingRoleDefinitionListResult, list_by_billing_account::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/billingRoleDefinitions",
operation_config.base_path(),
billing_account_name
);
let mut url = url::Url::parse(url_str).map_err(list_by_billing_account::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_by_billing_account::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(list_by_billing_account::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_by_billing_account::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: BillingRoleDefinitionListResult = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_billing_account::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_billing_account::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_by_billing_account::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_by_billing_account {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_by_invoice_section(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
billing_profile_name: &str,
invoice_section_name: &str,
) -> std::result::Result<BillingRoleDefinitionListResult, list_by_invoice_section::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/billingProfiles/{}/invoiceSections/{}/billingRoleDefinitions",
operation_config.base_path(),
billing_account_name,
billing_profile_name,
invoice_section_name
);
let mut url = url::Url::parse(url_str).map_err(list_by_invoice_section::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_by_invoice_section::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(list_by_invoice_section::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_by_invoice_section::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: BillingRoleDefinitionListResult = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_invoice_section::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_invoice_section::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_by_invoice_section::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_by_invoice_section {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_by_billing_profile(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
billing_profile_name: &str,
) -> std::result::Result<BillingRoleDefinitionListResult, list_by_billing_profile::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/billingProfiles/{}/billingRoleDefinitions",
operation_config.base_path(),
billing_account_name,
billing_profile_name
);
let mut url = url::Url::parse(url_str).map_err(list_by_billing_profile::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_by_billing_profile::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(list_by_billing_profile::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_by_billing_profile::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: BillingRoleDefinitionListResult = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_billing_profile::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_billing_profile::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_by_billing_profile::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_by_billing_profile {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod billing_role_assignments {
use crate::models::*;
pub async fn get_by_billing_account(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
billing_role_assignment_name: &str,
) -> std::result::Result<BillingRoleAssignment, get_by_billing_account::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/billingRoleAssignments/{}",
operation_config.base_path(),
billing_account_name,
billing_role_assignment_name
);
let mut url = url::Url::parse(url_str).map_err(get_by_billing_account::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_by_billing_account::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(get_by_billing_account::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_by_billing_account::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: BillingRoleAssignment = serde_json::from_slice(rsp_body)
.map_err(|source| get_by_billing_account::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| get_by_billing_account::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get_by_billing_account::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get_by_billing_account {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn delete_by_billing_account(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
billing_role_assignment_name: &str,
) -> std::result::Result<BillingRoleAssignment, delete_by_billing_account::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/billingRoleAssignments/{}",
operation_config.base_path(),
billing_account_name,
billing_role_assignment_name
);
let mut url = url::Url::parse(url_str).map_err(delete_by_billing_account::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(delete_by_billing_account::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(delete_by_billing_account::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(delete_by_billing_account::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: BillingRoleAssignment = serde_json::from_slice(rsp_body)
.map_err(|source| delete_by_billing_account::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| delete_by_billing_account::Error::DeserializeError(source, rsp_body.clone()))?;
Err(delete_by_billing_account::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod delete_by_billing_account {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get_by_invoice_section(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
billing_profile_name: &str,
invoice_section_name: &str,
billing_role_assignment_name: &str,
) -> std::result::Result<BillingRoleAssignment, get_by_invoice_section::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/billingProfiles/{}/invoiceSections/{}/billingRoleAssignments/{}",
operation_config.base_path(),
billing_account_name,
billing_profile_name,
invoice_section_name,
billing_role_assignment_name
);
let mut url = url::Url::parse(url_str).map_err(get_by_invoice_section::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_by_invoice_section::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(get_by_invoice_section::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_by_invoice_section::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: BillingRoleAssignment = serde_json::from_slice(rsp_body)
.map_err(|source| get_by_invoice_section::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| get_by_invoice_section::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get_by_invoice_section::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get_by_invoice_section {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn delete_by_invoice_section(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
billing_profile_name: &str,
invoice_section_name: &str,
billing_role_assignment_name: &str,
) -> std::result::Result<BillingRoleAssignment, delete_by_invoice_section::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/billingProfiles/{}/invoiceSections/{}/billingRoleAssignments/{}",
operation_config.base_path(),
billing_account_name,
billing_profile_name,
invoice_section_name,
billing_role_assignment_name
);
let mut url = url::Url::parse(url_str).map_err(delete_by_invoice_section::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(delete_by_invoice_section::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(delete_by_invoice_section::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(delete_by_invoice_section::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: BillingRoleAssignment = serde_json::from_slice(rsp_body)
.map_err(|source| delete_by_invoice_section::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| delete_by_invoice_section::Error::DeserializeError(source, rsp_body.clone()))?;
Err(delete_by_invoice_section::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod delete_by_invoice_section {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get_by_billing_profile(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
billing_profile_name: &str,
billing_role_assignment_name: &str,
) -> std::result::Result<BillingRoleAssignment, get_by_billing_profile::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/billingProfiles/{}/billingRoleAssignments/{}",
operation_config.base_path(),
billing_account_name,
billing_profile_name,
billing_role_assignment_name
);
let mut url = url::Url::parse(url_str).map_err(get_by_billing_profile::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_by_billing_profile::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(get_by_billing_profile::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_by_billing_profile::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: BillingRoleAssignment = serde_json::from_slice(rsp_body)
.map_err(|source| get_by_billing_profile::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| get_by_billing_profile::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get_by_billing_profile::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get_by_billing_profile {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn delete_by_billing_profile(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
billing_profile_name: &str,
billing_role_assignment_name: &str,
) -> std::result::Result<BillingRoleAssignment, delete_by_billing_profile::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/billingProfiles/{}/billingRoleAssignments/{}",
operation_config.base_path(),
billing_account_name,
billing_profile_name,
billing_role_assignment_name
);
let mut url = url::Url::parse(url_str).map_err(delete_by_billing_profile::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(delete_by_billing_profile::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(delete_by_billing_profile::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(delete_by_billing_profile::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: BillingRoleAssignment = serde_json::from_slice(rsp_body)
.map_err(|source| delete_by_billing_profile::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| delete_by_billing_profile::Error::DeserializeError(source, rsp_body.clone()))?;
Err(delete_by_billing_profile::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod delete_by_billing_profile {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_by_billing_account(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
) -> std::result::Result<BillingRoleAssignmentListResult, list_by_billing_account::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/billingRoleAssignments",
operation_config.base_path(),
billing_account_name
);
let mut url = url::Url::parse(url_str).map_err(list_by_billing_account::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_by_billing_account::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(list_by_billing_account::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_by_billing_account::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: BillingRoleAssignmentListResult = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_billing_account::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_billing_account::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_by_billing_account::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_by_billing_account {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_by_invoice_section(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
billing_profile_name: &str,
invoice_section_name: &str,
) -> std::result::Result<BillingRoleAssignmentListResult, list_by_invoice_section::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/billingProfiles/{}/invoiceSections/{}/billingRoleAssignments",
operation_config.base_path(),
billing_account_name,
billing_profile_name,
invoice_section_name
);
let mut url = url::Url::parse(url_str).map_err(list_by_invoice_section::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_by_invoice_section::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(list_by_invoice_section::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_by_invoice_section::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: BillingRoleAssignmentListResult = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_invoice_section::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_invoice_section::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_by_invoice_section::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_by_invoice_section {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_by_billing_profile(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
billing_profile_name: &str,
) -> std::result::Result<BillingRoleAssignmentListResult, list_by_billing_profile::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/billingProfiles/{}/billingRoleAssignments",
operation_config.base_path(),
billing_account_name,
billing_profile_name
);
let mut url = url::Url::parse(url_str).map_err(list_by_billing_profile::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_by_billing_profile::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(list_by_billing_profile::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_by_billing_profile::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: BillingRoleAssignmentListResult = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_billing_profile::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_billing_profile::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_by_billing_profile::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_by_billing_profile {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod agreements {
use crate::models::*;
pub async fn list_by_billing_account(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
expand: Option<&str>,
) -> std::result::Result<AgreementListResult, list_by_billing_account::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/agreements",
operation_config.base_path(),
billing_account_name
);
let mut url = url::Url::parse(url_str).map_err(list_by_billing_account::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_by_billing_account::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
if let Some(expand) = expand {
url.query_pairs_mut().append_pair("$expand", expand);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(list_by_billing_account::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_by_billing_account::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: AgreementListResult = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_billing_account::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_billing_account::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_by_billing_account::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_by_billing_account {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
agreement_name: &str,
expand: Option<&str>,
) -> std::result::Result<Agreement, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/agreements/{}",
operation_config.base_path(),
billing_account_name,
agreement_name
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
if let Some(expand) = expand {
url.query_pairs_mut().append_pair("$expand", expand);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: Agreement =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod reservations {
use crate::models::*;
pub async fn list_by_billing_account(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
filter: Option<&str>,
orderby: Option<&str>,
refresh_summary: Option<&str>,
selected_state: Option<&str>,
) -> std::result::Result<ReservationsListResult, list_by_billing_account::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/reservations",
operation_config.base_path(),
billing_account_name
);
let mut url = url::Url::parse(url_str).map_err(list_by_billing_account::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_by_billing_account::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
if let Some(filter) = filter {
url.query_pairs_mut().append_pair("$filter", filter);
}
if let Some(orderby) = orderby {
url.query_pairs_mut().append_pair("$orderby", orderby);
}
if let Some(refresh_summary) = refresh_summary {
url.query_pairs_mut().append_pair("refreshSummary", refresh_summary);
}
if let Some(selected_state) = selected_state {
url.query_pairs_mut().append_pair("selectedState", selected_state);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(list_by_billing_account::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_by_billing_account::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: ReservationsListResult = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_billing_account::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_billing_account::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_by_billing_account::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_by_billing_account {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_by_billing_profile(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
billing_profile_name: &str,
filter: Option<&str>,
orderby: Option<&str>,
refresh_summary: Option<&str>,
selected_state: Option<&str>,
) -> std::result::Result<ReservationsListResult, list_by_billing_profile::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/billingProfiles/{}/reservations",
operation_config.base_path(),
billing_account_name,
billing_profile_name
);
let mut url = url::Url::parse(url_str).map_err(list_by_billing_profile::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_by_billing_profile::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
if let Some(filter) = filter {
url.query_pairs_mut().append_pair("$filter", filter);
}
if let Some(orderby) = orderby {
url.query_pairs_mut().append_pair("$orderby", orderby);
}
if let Some(refresh_summary) = refresh_summary {
url.query_pairs_mut().append_pair("refreshSummary", refresh_summary);
}
if let Some(selected_state) = selected_state {
url.query_pairs_mut().append_pair("selectedState", selected_state);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(list_by_billing_profile::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_by_billing_profile::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: ReservationsListResult = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_billing_profile::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_billing_profile::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_by_billing_profile::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_by_billing_profile {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod promotion {
use crate::models::*;
pub async fn get(operation_config: &crate::OperationConfig, promotion_id: &str) -> std::result::Result<PromotionResponse, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/providers/Microsoft.Billing/promotions/{}",
operation_config.base_path(),
promotion_id
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: PromotionResponse =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn check_eligibility(
operation_config: &crate::OperationConfig,
promotion_sku_id: &str,
subscription_id: &str,
) -> std::result::Result<PromotionCheckEligibilityResponse, check_eligibility::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.Billing/promotions/{}/checkEligibility",
operation_config.base_path(),
subscription_id,
promotion_sku_id
);
let mut url = url::Url::parse(url_str).map_err(check_eligibility::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(check_eligibility::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(check_eligibility::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(check_eligibility::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: PromotionCheckEligibilityResponse = serde_json::from_slice(rsp_body)
.map_err(|source| check_eligibility::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| check_eligibility::Error::DeserializeError(source, rsp_body.clone()))?;
Err(check_eligibility::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod check_eligibility {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod activate {
use crate::models::*;
pub async fn promotion(
operation_config: &crate::OperationConfig,
promotion_id: &str,
parameters: &PromotionCreateRequest,
) -> std::result::Result<PromotionResponse, promotion::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/providers/Microsoft.Billing/promotions/{}",
operation_config.base_path(),
promotion_id
);
let mut url = url::Url::parse(url_str).map_err(promotion::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(promotion::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = azure_core::to_json(parameters).map_err(promotion::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(promotion::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(promotion::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::ACCEPTED => {
let rsp_body = rsp.body();
let rsp_value: PromotionResponse =
serde_json::from_slice(rsp_body).map_err(|source| promotion::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| promotion::Error::DeserializeError(source, rsp_body.clone()))?;
Err(promotion::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod promotion {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod promotions {
use crate::models::*;
pub async fn list(operation_config: &crate::OperationConfig) -> std::result::Result<PromotionList, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/providers/Microsoft.Billing/promotions", operation_config.base_path(),);
let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: PromotionList =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
| {
let rsp_body = rsp.body();
let rsp_value: BillingRoleDefinition = serde_json::from_slice(rsp_body)
.map_err(|source| get_by_billing_profile::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
} |
usersync.go | package smartadserver
import (
"text/template"
"github.com/prebid/prebid-server/adapters"
"github.com/prebid/prebid-server/usersync"
)
func | (temp *template.Template) usersync.Usersyncer {
return adapters.NewSyncer("smartadserver", temp, adapters.SyncTypeRedirect)
}
| NewSmartadserverSyncer |
entity.rs | // Copyright (c) 2021, BlockProject 3D
//
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without modification,
// are permitted provided that the following conditions are met:
//
// * Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
// * Neither the name of BlockProject 3D nor the names of its contributors
// may be used to endorse or promote products derived from this software
// without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
// CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
// EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
// PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
// PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
// LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
use crate::{
component::{Component},
object::ObjectRef
};
use crate::component::ComponentRef;
use crate::component::pool::{Attachments, ComponentManager};
pub struct ComponentType<T: Component>
{
useless: std::marker::PhantomData<T>
}
impl<T: Component> ComponentType<T>
{
pub fn new() -> ComponentType<T>
{
return ComponentType {
useless: std::marker::PhantomData::default()
};
}
}
pub trait ComponentTypeProvider<T: Component>
{
fn class() -> ComponentType<T>;
}
impl<T: Component> ComponentTypeProvider<T> for T
{
fn class() -> ComponentType<T>
{
return ComponentType::<T>::new();
}
}
pub struct Entity<'a, ComponentManager>
{
mgr: &'a mut ComponentManager,
entity: ObjectRef
}
impl<'a, ComponentManager> Entity<'a, ComponentManager>
{
pub fn aquire(&mut self, other: ObjectRef) -> Entity<ComponentManager>
{
return Entity {
mgr: self.mgr,
entity: other
};
}
}
pub trait EntityPart<T: Component, CM: ComponentManager<T>>
{
fn add(&mut self, comp: T) -> ComponentRef<T>;
fn get_mut(&mut self, r: ComponentRef<T>) -> &mut T;
fn get(&self, r: ComponentRef<T>) -> &T;
fn remove(&mut self, r: ComponentRef<T>);
fn list(&self, _: ComponentType<T>) -> Option<Vec<ComponentRef<T>>>;
fn get_first(&self, _: ComponentType<T>) -> Option<&T>;
fn get_first_mut(&mut self, _: ComponentType<T>) -> Option<&mut T>;
}
impl<'a, T: Component, CM: ComponentManager<T>>
EntityPart<T, CM> for Entity<'a, CM>
where
T::Pool: Attachments<T>
{
fn | (&mut self, comp: T) -> ComponentRef<T>
{
let r = self.mgr.add_component(comp);
self.mgr.get_mut().attach(self.entity, r);
return r;
}
fn get_mut(&mut self, r: ComponentRef<T>) -> &mut T
{
self.mgr.get_component_mut(r)
}
fn get(&self, r: ComponentRef<T>) -> &T
{
self.mgr.get_component(r)
}
fn remove(&mut self, r: ComponentRef<T>)
{
self.mgr.remove_component(r)
}
fn list(&self, _: ComponentType<T>) -> Option<Vec<ComponentRef<T>>>
{
return self.mgr.get().list(self.entity);
}
fn get_first(&self, _: ComponentType<T>) -> Option<&T>
{
self.mgr.get().get_first(self.entity)
}
fn get_first_mut(&mut self, _: ComponentType<T>) -> Option<&mut T>
{
self.mgr.get_mut().get_first_mut(self.entity)
}
}
impl<'a, ComponentManager> Entity<'a, ComponentManager>
{
pub fn new(mgr: &'a mut ComponentManager, entity: ObjectRef) -> Entity<'a, ComponentManager>
{
return Entity { mgr, entity };
}
}
| add |
main.rs | use std::cmp::Reverse;
use std::collections::BinaryHeap;
fn main()
{
let bots = include_str!("../input.txt").lines().map(Bot::parse).collect::<Vec<Bot>>();
// part 1: how many bots are in range of the bot with the largest signal radius?
let largest = bots.iter().max_by_key(|b| b.radius).unwrap();
println!("{}", bots.iter().filter(|b| largest.contains(b.position)).count());
// find the bounding rectangle of the bots radii to construct the initial OctreeNode
let min_corner = (i32::MIN, i32::MIN, i32::MIN);
let max_corner = (i32::MAX, i32::MAX, i32::MAX);
let (min_bound, max_bound) = bots.iter().fold((max_corner, min_corner), |((min_x, min_y, min_z), (max_x, max_y, max_z)), b|
{
let (x, y, z) = b.min_bound();
let min_bound = (min_x.min(x), min_y.min(y), min_z.min(z));
let (x, y, z) = b.max_bound();
let max_bound = (max_x.max(x), max_y.max(y), max_z.max(z));
(min_bound, max_bound)
});
// compute the priority information that will be stored in the queue
let priority = |node : OctreeNode|
{
let bot_count = bots.iter().filter(|b| node.in_range(b)).count();
let distance = manhattan((0, 0, 0), node.min_corner);
(bot_count, Reverse(node.size), Reverse(distance), node)
};
// part 2: priority search for a OctreeNode of size 1 that maximises the
// number of bots in range. Print the manhattan distance to the origin.
let mut queue = BinaryHeap::new();
queue.push(priority(OctreeNode::new(min_bound, max_bound)));
while let Some((_, Reverse(size), Reverse(distance), node)) = queue.pop()
{
if size == 1
{
println!("{}", distance);
break
}
queue.extend(node.split().map(priority));
}
}
type Pos = (i32, i32, i32);
fn manhattan((x1, y1, z1) : Pos, (x2, y2, z2) : Pos) -> u32
{
((x1 - x2).abs() + (y1 - y2).abs() + (z1 - z2).abs()) as u32
}
struct Bot
{
position: Pos,
radius: u32
}
impl Bot
{
fn parse(s : &str) -> Bot
{
fn parse_integer(s : &str) -> (i32, &str)
{
let (integer, rest) = s.split_at(s.find(|c : char| !(c.is_ascii_digit() || c == '-')).unwrap_or_else(|| s.len()));
(integer.parse().unwrap(), rest)
}
let (x, s) = parse_integer(&s[5..]);
let (y, s) = parse_integer(&s[1..]);
let (z, s) = parse_integer(&s[1..]);
let (r, _) = parse_integer(&s[5..]);
Bot { position: (x, y, z), radius: r as u32 }
}
fn contains(&self, position : Pos) -> bool
{
manhattan(position, self.position) <= self.radius
}
fn min_bound(&self) -> Pos
{
let (x, y, z) = self.position;
let r = self.radius as i32;
(x - r, y - r, z - r)
}
fn max_bound(&self) -> Pos
{
let (x, y, z) = self.position;
let r = self.radius as i32;
(x + r, y + r, z + r)
}
}
#[derive(PartialEq, Eq, PartialOrd, Ord)]
struct OctreeNode
{
size: u32,
min_corner: Pos
}
impl OctreeNode
{
fn new((min_x, min_y, min_z) : Pos, (max_x, max_y, max_z) : Pos) -> OctreeNode
{
// find the largest side-length of the bounding rectangle
let side_length = 1 + (max_x - min_x).max(max_y - min_y).max(max_z - min_z) as u32;
// the size is the smallest power of two which is at least side_length
OctreeNode
{
size: (0 ..).map(|n| 2_u32.pow(n)).find(|&s| s >= side_length).unwrap(),
min_corner: (min_x, min_y, min_z)
}
}
fn max_corner(&self) -> Pos
|
// a node is in range of a bot if the manhattan distance from
// the bot to any of the node;s edges is at most the bot's radius
fn in_range(&self, bot : &Bot) -> bool
{
let (min_x, min_y, min_z) = self.min_corner;
let (max_x, max_y, max_z) = self.max_corner();
let (bot_x, bot_y, bot_z) = bot.position;
let mut distance = 0;
if bot_x < min_x { distance += min_x - bot_x }
if bot_x > max_x { distance += bot_x - max_x }
if bot_y < min_y { distance += min_y - bot_y }
if bot_y > max_y { distance += bot_y - max_y }
if bot_z < min_z { distance += min_z - bot_z }
if bot_z > max_z { distance += bot_z - max_z }
distance as u32 <= bot.radius
}
// split this node into eight smaller nodes by subdividing each axis by two
fn split(&self) -> impl Iterator<Item = OctreeNode>
{
let size = self.size / 2;
let s = size as i32;
let (x, y, z) = self.min_corner;
vec![(x, y, z ),
(x, y, z+s),
(x, y+s, z ),
(x, y+s, z+s),
(x+s, y, z ),
(x+s, y, z+s),
(x+s, y+s, z ),
(x+s, y+s, z+s)].into_iter()
.map(move |min_corner| OctreeNode { size, min_corner })
}
}
| {
let (x, y, z) = self.min_corner;
let offset = self.size as i32 - 1;
(x + offset, y + offset, z + offset)
} |
writer.go | package tar
import (
"archive/tar"
"io"
"path"
"time"
cxt "context"
proto "gx/ipfs/QmZ4Qi3GaRbjcx28Sme5eMH7RQjGkt8wHxt2a65oLaeFEV/gogo-protobuf/proto"
mdag "github.com/ipfs/go-ipfs/merkledag"
ft "github.com/ipfs/go-ipfs/unixfs"
uio "github.com/ipfs/go-ipfs/unixfs/io"
upb "github.com/ipfs/go-ipfs/unixfs/pb"
)
// Writer is a utility structure that helps to write
// unixfs merkledag nodes as a tar archive format.
// It wraps any io.Writer.
type Writer struct {
Dag mdag.DAGService
TarW *tar.Writer
ctx cxt.Context
}
// NewWriter wraps given io.Writer.
func NewWriter(ctx cxt.Context, dag mdag.DAGService, archive bool, compression int, w io.Writer) (*Writer, error) {
return &Writer{
Dag: dag,
TarW: tar.NewWriter(w),
ctx: ctx,
}, nil
}
func (w *Writer) writeDir(nd *mdag.ProtoNode, fpath string) error {
if err := writeDirHeader(w.TarW, fpath); err != nil {
return err
}
for i, ng := range mdag.GetDAG(w.ctx, w.Dag, nd) {
child, err := ng.Get(w.ctx)
if err != nil {
return err
}
childpb, ok := child.(*mdag.ProtoNode)
if !ok {
return mdag.ErrNotProtobuf
}
npath := path.Join(fpath, nd.Links()[i].Name)
if err := w.WriteNode(childpb, npath); err != nil {
return err
}
}
return nil
}
func (w *Writer) writeFile(nd *mdag.ProtoNode, pb *upb.Data, fpath string) error {
if err := writeFileHeader(w.TarW, fpath, pb.GetFilesize()); err != nil {
return err
}
dagr := uio.NewDataFileReader(w.ctx, nd, pb, w.Dag)
if _, err := dagr.WriteTo(w.TarW); err != nil {
return err
}
w.TarW.Flush()
return nil
}
func (w *Writer) WriteNode(nd *mdag.ProtoNode, fpath string) error {
pb := new(upb.Data)
if err := proto.Unmarshal(nd.Data(), pb); err != nil {
return err
}
switch pb.GetType() {
case upb.Data_Metadata:
fallthrough
case upb.Data_Directory:
return w.writeDir(nd, fpath)
case upb.Data_Raw:
fallthrough
case upb.Data_File:
return w.writeFile(nd, pb, fpath)
case upb.Data_Symlink:
return writeSymlinkHeader(w.TarW, string(pb.GetData()), fpath)
default:
return ft.ErrUnrecognizedType
}
}
func (w *Writer) Close() error {
return w.TarW.Close()
}
func writeDirHeader(w *tar.Writer, fpath string) error {
return w.WriteHeader(&tar.Header{
Name: fpath,
Typeflag: tar.TypeDir,
Mode: 0777,
ModTime: time.Now(),
// TODO: set mode, dates, etc. when added to unixFS
})
}
func writeFileHeader(w *tar.Writer, fpath string, size uint64) error |
func writeSymlinkHeader(w *tar.Writer, target, fpath string) error {
return w.WriteHeader(&tar.Header{
Name: fpath,
Linkname: target,
Mode: 0777,
Typeflag: tar.TypeSymlink,
})
}
| {
return w.WriteHeader(&tar.Header{
Name: fpath,
Size: int64(size),
Typeflag: tar.TypeReg,
Mode: 0644,
ModTime: time.Now(),
// TODO: set mode, dates, etc. when added to unixFS
})
} |
DemandPeriodFilter.tsx | import { TimeFrame } from '@energyweb/exchange-irec-react-query-client';
import { SelectRegular } from '@energyweb/origin-ui-core';
import { periodTypeOptions } from '@energyweb/origin-ui-exchange-logic';
import { IconButton, InputAdornment } from '@mui/material';
import { Close } from '@mui/icons-material';
import React from 'react';
import { FC } from 'react'; |
interface PeriodFilterProps {
value: TimeFrame;
handleFilterChange: (newValue: TimeFrame | undefined) => void;
}
export const DemandPeriodFilter: FC<PeriodFilterProps> = ({
value,
handleFilterChange,
}) => {
const { t } = useTranslation();
const handleChange = (event: React.ChangeEvent<HTMLInputElement>) => {
handleFilterChange(event.target.value as TimeFrame);
};
const handleClear = () => {
handleFilterChange(undefined);
};
return (
<SelectRegular
field={{
name: 'periodFilter',
label: t('exchange.myOrders.period'),
options: periodTypeOptions(t, false),
textFieldProps: {
margin: 'dense',
InputProps: value
? {
endAdornment: (
<InputAdornment position="end" style={{ marginRight: 15 }}>
<IconButton onClick={handleClear}>
<Close />
</IconButton>
</InputAdornment>
),
}
: undefined,
},
}}
variant="filled"
value={value}
onChange={handleChange}
/>
);
}; | import { useTranslation } from 'react-i18next'; |
QueuePlayNextTwoTone.tsx | import createSvgIcon from './helpers/createSvgIcon';
export default createSvgIcon(
<path d="M13 15v-3h3v-2h-3V7h-2v3H8v2h3v3zm5 0l3 3-3 3 1.5 1.5L24 18l-4.5-4.5zM8 19v2h8v-2h2v-2H3V5h18v8h2V5c0-1.11-.9-2-2-2H3c-1.11 0-2 .89-2 2v12c0 1.1.89 2 2 2h5z" />,
'QueuePlayNextTwoTone',
); | import React from 'react'; |
|
worktree.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2012-2021 SoftBank Robotics. All rights reserved.
# Use of this source code is governed by a BSD-style license (see the COPYING file).
""" Worktree """
from __future__ import absolute_import
from __future__ import unicode_literals
from __future__ import print_function
import os
import virtualenv
from qisys import ui
import qisys.worktree
import qisys.qixml
import qipy.project
class PythonWorkTree(qisys.worktree.WorkTreeObserver):
""" Container for Python projects """
def __init__(self, worktree, config="system"):
""" PythonWorkTree Init """
self.worktree = worktree
self.python_projects = list()
self._load_python_projects()
self.config = config
worktree.register(self)
def reload(self):
""" Reload """
self._load_python_projects()
@property
def root(self):
""" Root """
return self.worktree.root
def _load_python_projects(self):
""" Load Python Projects """
seen_names = dict()
self.python_projects = list()
for project in self.worktree.projects:
qiproject_xml = os.path.join(project.path, "qiproject.xml")
if not os.path.exists(qiproject_xml):
continue
new_project = new_python_project(self, project)
if not new_project:
continue
if new_project.name in seen_names:
mess = """ \
Found two projects with the same name. (%s)
%s
%s
""" % (new_project.name, seen_names[new_project.name], new_project.src)
raise Exception(mess)
self.python_projects.append(new_project)
seen_names[new_project.name] = new_project.src
def get_python_project(self, name, raises=False):
|
def bin_path(self, name):
""" Path to the virtualenv's binaries """
binaries_path = virtualenv.path_locations(self.venv_path)[-1]
return os.path.join(binaries_path, name)
@property
def venv_path(self):
""" Path to the virtualenv """
res = os.path.join(self.worktree.dot_qi, "venvs",
self.config)
return res
@property
def pip(self):
""" Path to the pip binary """
return self.bin_path("pip")
@property
def python(self):
""" Path to the python executable in the virtualenv """
return self.bin_path("python")
def activate_this(self):
""" Activate this virtualenv """
activate_this_dot_py = self.bin_path("activate_this.py")
execfile(activate_this_dot_py, {"__file__": activate_this_dot_py})
def new_python_project(worktree, project):
""" New Python Project """
qiproject_xml = project.qiproject_xml
tree = qisys.qixml.read(qiproject_xml)
qipython_elem = tree.find("qipython")
if qipython_elem is None:
return None
name = qisys.qixml.parse_required_attr(qipython_elem, "name",
xml_path=qiproject_xml)
python_project = qipy.project.PythonProject(worktree, project.src, name)
script_elems = qipython_elem.findall("script")
for script_elem in script_elems:
src = qisys.qixml.parse_required_attr(script_elem, "src",
xml_path=qiproject_xml)
script = qipy.project.Script(src)
python_project.scripts.append(script)
module_elems = qipython_elem.findall("module")
for module_elem in module_elems:
src = module_elem.get("src", "")
name = qisys.qixml.parse_required_attr(module_elem, "name",
xml_path=qiproject_xml)
module = qipy.project.Module(name, src)
module.qimodule = qisys.qixml.parse_bool_attr(module_elem, "qimodule")
python_project.modules.append(module)
package_elems = qipython_elem.findall("package")
for package_elem in package_elems:
name = qisys.qixml.parse_required_attr(package_elem, "name",
xml_path=qiproject_xml)
src = package_elem.get("src", "")
package = qipy.project.Package(name, src)
package.qimodule = qisys.qixml.parse_bool_attr(package_elem, "qimodule")
python_project.packages.append(package)
setup_elem = qipython_elem.find("setup")
if setup_elem is not None:
python_project.setup_with_distutils = \
qisys.qixml.parse_bool_attr(setup_elem, "with_distutils")
return python_project
| """ Get a Python project given its name """
for project in self.python_projects:
if project.name == name:
return project
if raises:
mess = ui.did_you_mean("No such python project: %s" % name,
name, [x.name for x in self.python_projects])
raise qisys.worktree.NoSuchProject(name, mess)
else:
return None |
build.go | package main
import (
"bytes"
"flag"
"fmt"
"io"
"io/ioutil"
"net/http"
"os"
"path/filepath"
"runtime"
"strconv"
"strings"
log "github.com/Sirupsen/logrus"
"github.com/moby/tool/src/moby"
)
const defaultNameForStdin = "moby"
type outputList []string
func (o *outputList) String() string {
return fmt.Sprint(*o)
}
func (o *outputList) Set(value string) error {
// allow comma seperated options or multiple options
for _, cs := range strings.Split(value, ",") {
*o = append(*o, cs)
}
return nil
}
// Parse a string which is either a number in MB, or a number with
// either M (for Megabytes) or G (for GigaBytes) as a suffix and
// returns the number in MB. Return 0 if string is empty.
func getDiskSizeMB(s string) (int, error) {
if s == "" {
return 0, nil
}
sz := len(s)
if strings.HasSuffix(s, "G") {
i, err := strconv.Atoi(s[:sz-1])
if err != nil {
return 0, err
}
return i * 1024, nil
}
if strings.HasSuffix(s, "M") {
s = s[:sz-1]
}
return strconv.Atoi(s)
}
// Process the build arguments and execute build
func build(args []string) {
var buildOut outputList
outputTypes := moby.OutputTypes()
buildCmd := flag.NewFlagSet("build", flag.ExitOnError)
buildCmd.Usage = func() {
fmt.Printf("USAGE: %s build [options] <file>[.yml] | -\n\n", os.Args[0])
fmt.Printf("Options:\n")
buildCmd.PrintDefaults()
}
buildName := buildCmd.String("name", "", "Name to use for output files")
buildDir := buildCmd.String("dir", "", "Directory for output files, default current directory")
buildOutputFile := buildCmd.String("o", "", "File to use for a single output, or '-' for stdout")
buildSize := buildCmd.String("size", "1024M", "Size for output image, if supported and fixed size")
buildPull := buildCmd.Bool("pull", false, "Always pull images")
buildDisableTrust := buildCmd.Bool("disable-content-trust", false, "Skip image trust verification specified in trust section of config (default false)")
buildHyperkit := buildCmd.Bool("hyperkit", runtime.GOOS == "darwin", "Use hyperkit for LinuxKit based builds where possible")
buildCmd.Var(&buildOut, "output", "Output types to create [ "+strings.Join(outputTypes, " ")+" ]")
if err := buildCmd.Parse(args); err != nil {
log.Fatal("Unable to parse args")
}
remArgs := buildCmd.Args()
if len(remArgs) == 0 {
fmt.Println("Please specify a configuration file")
buildCmd.Usage()
os.Exit(1)
}
name := *buildName
if name == "" {
conf := remArgs[len(remArgs)-1]
if conf == "-" {
name = defaultNameForStdin
} else {
name = strings.TrimSuffix(filepath.Base(conf), filepath.Ext(conf))
}
}
// There are two types of output, they will probably be split into "build" and "package" later
// the basic outputs are tarballs, while the packaged ones are the LinuxKit out formats that
// cannot be streamed but we do allow multiple ones to be built.
if len(buildOut) == 0 {
if *buildOutputFile == "" {
buildOut = outputList{"kernel+initrd"}
} else {
buildOut = outputList{"tar"}
}
}
log.Debugf("Outputs selected: %s", buildOut.String())
if len(buildOut) > 1 {
for _, o := range buildOut {
if moby.Streamable(o) {
log.Fatalf("Output type %s must be the only output specified", o)
}
}
}
if len(buildOut) == 1 && moby.Streamable(buildOut[0]) {
if *buildOutputFile == "" {
*buildOutputFile = filepath.Join(*buildDir, name+"."+buildOut[0])
// stop the errors in the validation below
*buildName = ""
*buildDir = ""
}
} else {
err := moby.ValidateOutputs(buildOut)
if err != nil {
log.Errorf("Error parsing outputs: %v", err)
buildCmd.Usage()
os.Exit(1)
}
}
var outputFile *os.File
if *buildOutputFile != "" {
if len(buildOut) > 1 {
log.Fatal("The -output option can only be specified when generating a single output format")
}
if *buildName != "" {
log.Fatal("The -output option cannot be specified with -name")
}
if *buildDir != "" {
log.Fatal("The -output option cannot be specified with -dir")
}
if !moby.Streamable(buildOut[0]) {
log.Fatalf("The -output option cannot be specified for build type %s as it cannot be streamed", buildOut[0])
}
if *buildOutputFile == "-" {
outputFile = os.Stdout
} else {
var err error
outputFile, err = os.Create(*buildOutputFile)
if err != nil {
log.Fatalf("Cannot open output file: %v", err)
}
defer outputFile.Close()
}
}
size, err := getDiskSizeMB(*buildSize)
if err != nil {
log.Fatalf("Unable to parse disk size: %v", err)
}
var m moby.Moby
for _, arg := range remArgs {
var config []byte
if conf := arg; conf == "-" {
var err error
config, err = ioutil.ReadAll(os.Stdin)
if err != nil {
log.Fatalf("Cannot read stdin: %v", err)
} | response, err := http.Get(arg)
if err != nil {
log.Fatal("Cannot fetch remote yaml file: %v", err)
}
defer response.Body.Close()
_, err = io.Copy(buffer, response.Body)
if err != nil {
log.Fatalf("Error reading http body: %v", err)
}
config = buffer.Bytes()
} else {
var err error
config, err = ioutil.ReadFile(conf)
if err != nil {
log.Fatalf("Cannot open config file: %v", err)
}
}
c, err := moby.NewConfig(config)
if err != nil {
log.Fatalf("Invalid config: %v", err)
}
m, err = moby.AppendConfig(m, c)
if err != nil {
log.Fatalf("Cannot append config files: %v", err)
}
}
if *buildDisableTrust {
log.Debugf("Disabling content trust checks for this build")
m.Trust = moby.TrustConfig{}
}
var buf *bytes.Buffer
var w io.Writer
if outputFile != nil {
w = outputFile
} else {
buf = new(bytes.Buffer)
w = buf
}
// this is a weird interface, but currently only streamable types can have additional files
// need to split up the base tarball outputs from the secondary stages
var tp string
if moby.Streamable(buildOut[0]) {
tp = buildOut[0]
}
err = moby.Build(m, w, *buildPull, tp)
if err != nil {
log.Fatalf("%v", err)
}
if outputFile == nil {
image := buf.Bytes()
log.Infof("Create outputs:")
err = moby.Outputs(filepath.Join(*buildDir, name), image, buildOut, size, *buildHyperkit)
if err != nil {
log.Fatalf("Error writing outputs: %v", err)
}
}
} | } else if strings.HasPrefix(arg, "http://") || strings.HasPrefix(arg, "https://") {
buffer := new(bytes.Buffer) |
intents_client.js | // Copyright 2018 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
'use strict';
const gapicConfig = require('./intents_client_config');
const gax = require('google-gax');
const merge = require('lodash.merge');
const path = require('path');
const protobuf = require('protobufjs');
const VERSION = require('../../package.json').version;
/**
* An intent represents a mapping between input from a user and an action to
* be taken by your application. When you pass user input to the
* DetectIntent (or
* StreamingDetectIntent) method, the
* Dialogflow API analyzes the input and searches
* for a matching intent. If no match is found, the Dialogflow API returns a
* fallback intent (`is_fallback` = true).
*
* You can provide additional information for the Dialogflow API to use to
* match user input to an intent by adding the following to your intent.
*
* * **Contexts** - provide additional context for intent analysis. For
* example, if an intent is related to an object in your application that
* plays music, you can provide a context to determine when to match the
* intent if the user input is “turn it off”. You can include a context
* that matches the intent when there is previous user input of
* "play music", and not when there is previous user input of
* "turn on the light".
*
* * **Events** - allow for matching an intent by using an event name
* instead of user input. Your application can provide an event name and
* related parameters to the Dialogflow API to match an intent. For
* example, when your application starts, you can send a welcome event
* with a user name parameter to the Dialogflow API to match an intent with
* a personalized welcome message for the user.
*
* * **Training phrases** - provide examples of user input to train the
* Dialogflow API agent to better match intents.
*
* For more information about intents, see the
* [Dialogflow documentation](https://dialogflow.com/docs/intents).
*
* @class
* @memberof v2
*/
class IntentsClient {
/**
* Construct an instance of IntentsClient.
*
* @param {object} [options] - The configuration object. See the subsequent
* parameters for more details.
* @param {object} [options.credentials] - Credentials object.
* @param {string} [options.credentials.client_email]
* @param {string} [options.credentials.private_key]
* @param {string} [options.email] - Account email address. Required when
* using a .pem or .p12 keyFilename.
* @param {string} [options.keyFilename] - Full path to the a .json, .pem, or
* .p12 key downloaded from the Google Developers Console. If you provide
* a path to a JSON file, the projectId option below is not necessary.
* NOTE: .pem and .p12 require you to specify options.email as well.
* @param {number} [options.port] - The port on which to connect to
* the remote host.
* @param {string} [options.projectId] - The project ID from the Google
* Developer's Console, e.g. 'grape-spaceship-123'. We will also check
* the environment variable GCLOUD_PROJECT for your project ID. If your
* app is running in an environment which supports
* {@link https://developers.google.com/identity/protocols/application-default-credentials Application Default Credentials},
* your project ID will be detected automatically.
* @param {function} [options.promise] - Custom promise module to use instead
* of native Promises.
* @param {string} [options.servicePath] - The domain name of the
* API remote host.
*/
constructor(opts) {
this._descriptors = {};
// Ensure that options include the service address and port.
opts = Object.assign(
{
clientConfig: {},
port: this.constructor.port,
servicePath: this.constructor.servicePath,
},
opts
);
// Create a `gaxGrpc` object, with any grpc-specific options
// sent to the client.
opts.scopes = this.constructor.scopes;
var gaxGrpc = new gax.GrpcClient(opts);
// Save the auth object to the client, for use by other methods.
this.auth = gaxGrpc.auth;
// Determine the client header string.
var clientHeader = [
`gl-node/${process.version}`,
`grpc/${gaxGrpc.grpcVersion}`,
`gax/${gax.version}`,
`gapic/${VERSION}`,
];
if (opts.libName && opts.libVersion) {
clientHeader.push(`${opts.libName}/${opts.libVersion}`);
}
// Load the applicable protos.
var protos = merge(
{},
gaxGrpc.loadProto(
path.join(__dirname, '..', '..', 'protos'),
'google/cloud/dialogflow/v2/intent.proto'
)
);
// This API contains "path templates"; forward-slash-separated
// identifiers to uniquely identify resources within the API.
// Create useful helper objects for these.
this._pathTemplates = {
projectAgentPathTemplate: new gax.PathTemplate(
'projects/{project}/agent'
),
intentPathTemplate: new gax.PathTemplate(
'projects/{project}/agent/intents/{intent}'
),
agentPathTemplate: new gax.PathTemplate(
'projects/{project}/agents/{agent}'
),
};
// Some of the methods on this service return "paged" results,
// (e.g. 50 results at a time, with tokens to get subsequent
// pages). Denote the keys used for pagination and results.
this._descriptors.page = {
listIntents: new gax.PageDescriptor(
'pageToken',
'nextPageToken',
'intents'
),
};
var protoFilesRoot = new gax.GoogleProtoFilesRoot();
protoFilesRoot = protobuf.loadSync(
path.join(
__dirname,
'..',
'..',
'protos',
'google/cloud/dialogflow/v2/intent.proto'
),
protoFilesRoot
);
// This API contains "long-running operations", which return a
// an Operation object that allows for tracking of the operation,
// rather than holding a request open.
this.operationsClient = new gax.lro({
auth: gaxGrpc.auth,
grpc: gaxGrpc.grpc,
}).operationsClient(opts);
var batchUpdateIntentsResponse = protoFilesRoot.lookup(
'google.cloud.dialogflow.v2.BatchUpdateIntentsResponse'
);
var batchUpdateIntentsMetadata = protoFilesRoot.lookup(
'google.protobuf.Struct'
);
var batchDeleteIntentsResponse = protoFilesRoot.lookup(
'google.protobuf.Empty'
);
var batchDeleteIntentsMetadata = protoFilesRoot.lookup(
'google.protobuf.Struct'
);
this._descriptors.longrunning = {
batchUpdateIntents: new gax.LongrunningDescriptor(
this.operationsClient,
batchUpdateIntentsResponse.decode.bind(batchUpdateIntentsResponse),
batchUpdateIntentsMetadata.decode.bind(batchUpdateIntentsMetadata)
),
batchDeleteIntents: new gax.LongrunningDescriptor(
this.operationsClient,
batchDeleteIntentsResponse.decode.bind(batchDeleteIntentsResponse),
batchDeleteIntentsMetadata.decode.bind(batchDeleteIntentsMetadata)
),
};
// Put together the default options sent with requests.
var defaults = gaxGrpc.constructSettings(
'google.cloud.dialogflow.v2.Intents',
gapicConfig,
opts.clientConfig,
{'x-goog-api-client': clientHeader.join(' ')}
);
// Set up a dictionary of "inner API calls"; the core implementation
// of calling the API is handled in `google-gax`, with this code
// merely providing the destination and request information.
this._innerApiCalls = {};
// Put together the "service stub" for
// google.cloud.dialogflow.v2.Intents.
var intentsStub = gaxGrpc.createStub(
protos.google.cloud.dialogflow.v2.Intents,
opts
);
// Iterate over each of the methods that the service provides
// and create an API call method for each.
var intentsStubMethods = [
'listIntents',
'getIntent',
'createIntent',
'updateIntent',
'deleteIntent',
'batchUpdateIntents',
'batchDeleteIntents',
];
for (let methodName of intentsStubMethods) {
this._innerApiCalls[methodName] = gax.createApiCall(
intentsStub.then(
stub =>
function() {
var args = Array.prototype.slice.call(arguments, 0);
return stub[methodName].apply(stub, args);
}
),
defaults[methodName],
this._descriptors.page[methodName] ||
this._descriptors.longrunning[methodName]
);
}
}
/**
* The DNS address for this API service.
*/
static get servicePath() {
return 'dialogflow.googleapis.com';
}
/**
* The port for this API service.
*/
static get port() {
return 443;
}
/**
* The scopes needed to make gRPC calls for every method defined
* in this service.
*/
static get scopes() {
return ['https://www.googleapis.com/auth/cloud-platform'];
}
/**
* Return the project ID used by this class.
* @param {function(Error, string)} callback - the callback to | * be called with the current project Id.
*/
getProjectId(callback) {
return this.auth.getProjectId(callback);
}
// -------------------
// -- Service calls --
// -------------------
/**
* Returns the list of all intents in the specified agent.
*
* @param {Object} request
* The request object that will be sent.
* @param {string} request.parent
* Required. The agent to list all intents from.
* Format: `projects/<Project ID>/agent`.
* @param {string} [request.languageCode]
* Optional. The language to list training phrases, parameters and rich
* messages for. If not specified, the agent's default language is used.
* [More than a dozen
* languages](https://dialogflow.com/docs/reference/language) are supported.
* Note: languages must be enabled in the agent before they can be used.
* @param {number} [request.intentView]
* Optional. The resource view to apply to the returned intent.
*
* The number should be among the values of [IntentView]{@link google.cloud.dialogflow.v2.IntentView}
* @param {number} [request.pageSize]
* The maximum number of resources contained in the underlying API
* response. If page streaming is performed per-resource, this
* parameter does not affect the return value. If page streaming is
* performed per-page, this determines the maximum number of
* resources in a page.
* @param {Object} [options]
* Optional parameters. You can override the default settings for this call, e.g, timeout,
* retries, paginations, etc. See [gax.CallOptions]{@link https://googleapis.github.io/gax-nodejs/global.html#CallOptions} for the details.
* @param {function(?Error, ?Array, ?Object, ?Object)} [callback]
* The function which will be called with the result of the API call.
*
* The second parameter to the callback is Array of [Intent]{@link google.cloud.dialogflow.v2.Intent}.
*
* When autoPaginate: false is specified through options, it contains the result
* in a single response. If the response indicates the next page exists, the third
* parameter is set to be used for the next request object. The fourth parameter keeps
* the raw response object of an object representing [ListIntentsResponse]{@link google.cloud.dialogflow.v2.ListIntentsResponse}.
* @returns {Promise} - The promise which resolves to an array.
* The first element of the array is Array of [Intent]{@link google.cloud.dialogflow.v2.Intent}.
*
* When autoPaginate: false is specified through options, the array has three elements.
* The first element is Array of [Intent]{@link google.cloud.dialogflow.v2.Intent} in a single response.
* The second element is the next request object if the response
* indicates the next page exists, or null. The third element is
* an object representing [ListIntentsResponse]{@link google.cloud.dialogflow.v2.ListIntentsResponse}.
*
* The promise has a method named "cancel" which cancels the ongoing API call.
*
* @example
*
* const dialogflow = require('dialogflow.v2');
*
* var client = new dialogflow.v2.IntentsClient({
* // optional auth parameters.
* });
*
* // Iterate over all elements.
* var formattedParent = client.projectAgentPath('[PROJECT]');
*
* client.listIntents({parent: formattedParent})
* .then(responses => {
* var resources = responses[0];
* for (let i = 0; i < resources.length; i += 1) {
* // doThingsWith(resources[i])
* }
* })
* .catch(err => {
* console.error(err);
* });
*
* // Or obtain the paged response.
* var formattedParent = client.projectAgentPath('[PROJECT]');
*
*
* var options = {autoPaginate: false};
* var callback = responses => {
* // The actual resources in a response.
* var resources = responses[0];
* // The next request if the response shows that there are more responses.
* var nextRequest = responses[1];
* // The actual response object, if necessary.
* // var rawResponse = responses[2];
* for (let i = 0; i < resources.length; i += 1) {
* // doThingsWith(resources[i]);
* }
* if (nextRequest) {
* // Fetch the next page.
* return client.listIntents(nextRequest, options).then(callback);
* }
* }
* client.listIntents({parent: formattedParent}, options)
* .then(callback)
* .catch(err => {
* console.error(err);
* });
*/
listIntents(request, options, callback) {
if (options instanceof Function && callback === undefined) {
callback = options;
options = {};
}
options = options || {};
return this._innerApiCalls.listIntents(request, options, callback);
}
/**
* Equivalent to {@link listIntents}, but returns a NodeJS Stream object.
*
* This fetches the paged responses for {@link listIntents} continuously
* and invokes the callback registered for 'data' event for each element in the
* responses.
*
* The returned object has 'end' method when no more elements are required.
*
* autoPaginate option will be ignored.
*
* @see {@link https://nodejs.org/api/stream.html}
*
* @param {Object} request
* The request object that will be sent.
* @param {string} request.parent
* Required. The agent to list all intents from.
* Format: `projects/<Project ID>/agent`.
* @param {string} [request.languageCode]
* Optional. The language to list training phrases, parameters and rich
* messages for. If not specified, the agent's default language is used.
* [More than a dozen
* languages](https://dialogflow.com/docs/reference/language) are supported.
* Note: languages must be enabled in the agent before they can be used.
* @param {number} [request.intentView]
* Optional. The resource view to apply to the returned intent.
*
* The number should be among the values of [IntentView]{@link google.cloud.dialogflow.v2.IntentView}
* @param {number} [request.pageSize]
* The maximum number of resources contained in the underlying API
* response. If page streaming is performed per-resource, this
* parameter does not affect the return value. If page streaming is
* performed per-page, this determines the maximum number of
* resources in a page.
* @param {Object} [options]
* Optional parameters. You can override the default settings for this call, e.g, timeout,
* retries, paginations, etc. See [gax.CallOptions]{@link https://googleapis.github.io/gax-nodejs/global.html#CallOptions} for the details.
* @returns {Stream}
* An object stream which emits an object representing [Intent]{@link google.cloud.dialogflow.v2.Intent} on 'data' event.
*
* @example
*
* const dialogflow = require('dialogflow.v2');
*
* var client = new dialogflow.v2.IntentsClient({
* // optional auth parameters.
* });
*
* var formattedParent = client.projectAgentPath('[PROJECT]');
* client.listIntentsStream({parent: formattedParent})
* .on('data', element => {
* // doThingsWith(element)
* }).on('error', err => {
* console.log(err);
* });
*/
listIntentsStream(request, options) {
options = options || {};
return this._descriptors.page.listIntents.createStream(
this._innerApiCalls.listIntents,
request,
options
);
}
/**
* Retrieves the specified intent.
*
* @param {Object} request
* The request object that will be sent.
* @param {string} request.name
* Required. The name of the intent.
* Format: `projects/<Project ID>/agent/intents/<Intent ID>`.
* @param {string} [request.languageCode]
* Optional. The language to retrieve training phrases, parameters and rich
* messages for. If not specified, the agent's default language is used.
* [More than a dozen
* languages](https://dialogflow.com/docs/reference/language) are supported.
* Note: languages must be enabled in the agent, before they can be used.
* @param {number} [request.intentView]
* Optional. The resource view to apply to the returned intent.
*
* The number should be among the values of [IntentView]{@link google.cloud.dialogflow.v2.IntentView}
* @param {Object} [options]
* Optional parameters. You can override the default settings for this call, e.g, timeout,
* retries, paginations, etc. See [gax.CallOptions]{@link https://googleapis.github.io/gax-nodejs/global.html#CallOptions} for the details.
* @param {function(?Error, ?Object)} [callback]
* The function which will be called with the result of the API call.
*
* The second parameter to the callback is an object representing [Intent]{@link google.cloud.dialogflow.v2.Intent}.
* @returns {Promise} - The promise which resolves to an array.
* The first element of the array is an object representing [Intent]{@link google.cloud.dialogflow.v2.Intent}.
* The promise has a method named "cancel" which cancels the ongoing API call.
*
* @example
*
* const dialogflow = require('dialogflow.v2');
*
* var client = new dialogflow.v2.IntentsClient({
* // optional auth parameters.
* });
*
* var formattedName = client.intentPath('[PROJECT]', '[INTENT]');
* client.getIntent({name: formattedName})
* .then(responses => {
* var response = responses[0];
* // doThingsWith(response)
* })
* .catch(err => {
* console.error(err);
* });
*/
getIntent(request, options, callback) {
if (options instanceof Function && callback === undefined) {
callback = options;
options = {};
}
options = options || {};
return this._innerApiCalls.getIntent(request, options, callback);
}
/**
* Creates an intent in the specified agent.
*
* @param {Object} request
* The request object that will be sent.
* @param {string} request.parent
* Required. The agent to create a intent for.
* Format: `projects/<Project ID>/agent`.
* @param {Object} request.intent
* Required. The intent to create.
*
* This object should have the same structure as [Intent]{@link google.cloud.dialogflow.v2.Intent}
* @param {string} [request.languageCode]
* Optional. The language of training phrases, parameters and rich messages
* defined in `intent`. If not specified, the agent's default language is
* used. [More than a dozen
* languages](https://dialogflow.com/docs/reference/language) are supported.
* Note: languages must be enabled in the agent, before they can be used.
* @param {number} [request.intentView]
* Optional. The resource view to apply to the returned intent.
*
* The number should be among the values of [IntentView]{@link google.cloud.dialogflow.v2.IntentView}
* @param {Object} [options]
* Optional parameters. You can override the default settings for this call, e.g, timeout,
* retries, paginations, etc. See [gax.CallOptions]{@link https://googleapis.github.io/gax-nodejs/global.html#CallOptions} for the details.
* @param {function(?Error, ?Object)} [callback]
* The function which will be called with the result of the API call.
*
* The second parameter to the callback is an object representing [Intent]{@link google.cloud.dialogflow.v2.Intent}.
* @returns {Promise} - The promise which resolves to an array.
* The first element of the array is an object representing [Intent]{@link google.cloud.dialogflow.v2.Intent}.
* The promise has a method named "cancel" which cancels the ongoing API call.
*
* @example
*
* const dialogflow = require('dialogflow.v2');
*
* var client = new dialogflow.v2.IntentsClient({
* // optional auth parameters.
* });
*
* var formattedParent = client.projectAgentPath('[PROJECT]');
* var intent = {};
* var request = {
* parent: formattedParent,
* intent: intent,
* };
* client.createIntent(request)
* .then(responses => {
* var response = responses[0];
* // doThingsWith(response)
* })
* .catch(err => {
* console.error(err);
* });
*/
createIntent(request, options, callback) {
if (options instanceof Function && callback === undefined) {
callback = options;
options = {};
}
options = options || {};
return this._innerApiCalls.createIntent(request, options, callback);
}
/**
* Updates the specified intent.
*
* @param {Object} request
* The request object that will be sent.
* @param {Object} request.intent
* Required. The intent to update.
* Format: `projects/<Project ID>/agent/intents/<Intent ID>`.
*
* This object should have the same structure as [Intent]{@link google.cloud.dialogflow.v2.Intent}
* @param {string} request.languageCode
* Optional. The language of training phrases, parameters and rich messages
* defined in `intent`. If not specified, the agent's default language is
* used. [More than a dozen
* languages](https://dialogflow.com/docs/reference/language) are supported.
* Note: languages must be enabled in the agent, before they can be used.
* @param {Object} [request.updateMask]
* Optional. The mask to control which fields get updated.
*
* This object should have the same structure as [FieldMask]{@link google.protobuf.FieldMask}
* @param {number} [request.intentView]
* Optional. The resource view to apply to the returned intent.
*
* The number should be among the values of [IntentView]{@link google.cloud.dialogflow.v2.IntentView}
* @param {Object} [options]
* Optional parameters. You can override the default settings for this call, e.g, timeout,
* retries, paginations, etc. See [gax.CallOptions]{@link https://googleapis.github.io/gax-nodejs/global.html#CallOptions} for the details.
* @param {function(?Error, ?Object)} [callback]
* The function which will be called with the result of the API call.
*
* The second parameter to the callback is an object representing [Intent]{@link google.cloud.dialogflow.v2.Intent}.
* @returns {Promise} - The promise which resolves to an array.
* The first element of the array is an object representing [Intent]{@link google.cloud.dialogflow.v2.Intent}.
* The promise has a method named "cancel" which cancels the ongoing API call.
*
* @example
*
* const dialogflow = require('dialogflow.v2');
*
* var client = new dialogflow.v2.IntentsClient({
* // optional auth parameters.
* });
*
* var intent = {};
* var languageCode = '';
* var request = {
* intent: intent,
* languageCode: languageCode,
* };
* client.updateIntent(request)
* .then(responses => {
* var response = responses[0];
* // doThingsWith(response)
* })
* .catch(err => {
* console.error(err);
* });
*/
updateIntent(request, options, callback) {
if (options instanceof Function && callback === undefined) {
callback = options;
options = {};
}
options = options || {};
return this._innerApiCalls.updateIntent(request, options, callback);
}
/**
* Deletes the specified intent.
*
* @param {Object} request
* The request object that will be sent.
* @param {string} request.name
* Required. The name of the intent to delete.
* Format: `projects/<Project ID>/agent/intents/<Intent ID>`.
* @param {Object} [options]
* Optional parameters. You can override the default settings for this call, e.g, timeout,
* retries, paginations, etc. See [gax.CallOptions]{@link https://googleapis.github.io/gax-nodejs/global.html#CallOptions} for the details.
* @param {function(?Error)} [callback]
* The function which will be called with the result of the API call.
* @returns {Promise} - The promise which resolves when API call finishes.
* The promise has a method named "cancel" which cancels the ongoing API call.
*
* @example
*
* const dialogflow = require('dialogflow.v2');
*
* var client = new dialogflow.v2.IntentsClient({
* // optional auth parameters.
* });
*
* var formattedName = client.intentPath('[PROJECT]', '[INTENT]');
* client.deleteIntent({name: formattedName}).catch(err => {
* console.error(err);
* });
*/
deleteIntent(request, options, callback) {
if (options instanceof Function && callback === undefined) {
callback = options;
options = {};
}
options = options || {};
return this._innerApiCalls.deleteIntent(request, options, callback);
}
/**
* Updates/Creates multiple intents in the specified agent.
*
* Operation <response: BatchUpdateIntentsResponse>
*
* @param {Object} request
* The request object that will be sent.
* @param {string} request.parent
* Required. The name of the agent to update or create intents in.
* Format: `projects/<Project ID>/agent`.
* @param {string} request.languageCode
* Optional. The language of training phrases, parameters and rich messages
* defined in `intents`. If not specified, the agent's default language is
* used. [More than a dozen
* languages](https://dialogflow.com/docs/reference/language) are supported.
* Note: languages must be enabled in the agent, before they can be used.
* @param {string} [request.intentBatchUri]
* The URI to a Google Cloud Storage file containing intents to update or
* create. The file format can either be a serialized proto (of IntentBatch
* type) or JSON object. Note: The URI must start with "gs://".
* @param {Object} [request.intentBatchInline]
* The collection of intents to update or create.
*
* This object should have the same structure as [IntentBatch]{@link google.cloud.dialogflow.v2.IntentBatch}
* @param {Object} [request.updateMask]
* Optional. The mask to control which fields get updated.
*
* This object should have the same structure as [FieldMask]{@link google.protobuf.FieldMask}
* @param {number} [request.intentView]
* Optional. The resource view to apply to the returned intent.
*
* The number should be among the values of [IntentView]{@link google.cloud.dialogflow.v2.IntentView}
* @param {Object} [options]
* Optional parameters. You can override the default settings for this call, e.g, timeout,
* retries, paginations, etc. See [gax.CallOptions]{@link https://googleapis.github.io/gax-nodejs/global.html#CallOptions} for the details.
* @param {function(?Error, ?Object)} [callback]
* The function which will be called with the result of the API call.
*
* The second parameter to the callback is a [gax.Operation]{@link https://googleapis.github.io/gax-nodejs/Operation} object.
* @returns {Promise} - The promise which resolves to an array.
* The first element of the array is a [gax.Operation]{@link https://googleapis.github.io/gax-nodejs/Operation} object.
* The promise has a method named "cancel" which cancels the ongoing API call.
*
* @example
*
* const dialogflow = require('dialogflow.v2');
*
* var client = new dialogflow.v2.IntentsClient({
* // optional auth parameters.
* });
*
* var formattedParent = client.projectAgentPath('[PROJECT]');
* var languageCode = '';
* var request = {
* parent: formattedParent,
* languageCode: languageCode,
* };
*
* // Handle the operation using the promise pattern.
* client.batchUpdateIntents(request)
* .then(responses => {
* var operation = responses[0];
* var initialApiResponse = responses[1];
*
* // Operation#promise starts polling for the completion of the LRO.
* return operation.promise();
* })
* .then(responses => {
* // The final result of the operation.
* var result = responses[0];
*
* // The metadata value of the completed operation.
* var metadata = responses[1];
*
* // The response of the api call returning the complete operation.
* var finalApiResponse = responses[2];
* })
* .catch(err => {
* console.error(err);
* });
*
* var formattedParent = client.projectAgentPath('[PROJECT]');
* var languageCode = '';
* var request = {
* parent: formattedParent,
* languageCode: languageCode,
* };
*
* // Handle the operation using the event emitter pattern.
* client.batchUpdateIntents(request)
* .then(responses => {
* var operation = responses[0];
* var initialApiResponse = responses[1];
*
* // Adding a listener for the "complete" event starts polling for the
* // completion of the operation.
* operation.on('complete', (result, metadata, finalApiResponse) => {
* // doSomethingWith(result);
* });
*
* // Adding a listener for the "progress" event causes the callback to be
* // called on any change in metadata when the operation is polled.
* operation.on('progress', (metadata, apiResponse) => {
* // doSomethingWith(metadata)
* });
*
* // Adding a listener for the "error" event handles any errors found during polling.
* operation.on('error', err => {
* // throw(err);
* });
* })
* .catch(err => {
* console.error(err);
* });
*/
batchUpdateIntents(request, options, callback) {
if (options instanceof Function && callback === undefined) {
callback = options;
options = {};
}
options = options || {};
return this._innerApiCalls.batchUpdateIntents(request, options, callback);
}
/**
* Deletes intents in the specified agent.
*
* Operation <response: google.protobuf.Empty>
*
* @param {Object} request
* The request object that will be sent.
* @param {string} request.parent
* Required. The name of the agent to delete all entities types for. Format:
* `projects/<Project ID>/agent`.
* @param {Object[]} request.intents
* Required. The collection of intents to delete. Only intent `name` must be
* filled in.
*
* This object should have the same structure as [Intent]{@link google.cloud.dialogflow.v2.Intent}
* @param {Object} [options]
* Optional parameters. You can override the default settings for this call, e.g, timeout,
* retries, paginations, etc. See [gax.CallOptions]{@link https://googleapis.github.io/gax-nodejs/global.html#CallOptions} for the details.
* @param {function(?Error, ?Object)} [callback]
* The function which will be called with the result of the API call.
*
* The second parameter to the callback is a [gax.Operation]{@link https://googleapis.github.io/gax-nodejs/Operation} object.
* @returns {Promise} - The promise which resolves to an array.
* The first element of the array is a [gax.Operation]{@link https://googleapis.github.io/gax-nodejs/Operation} object.
* The promise has a method named "cancel" which cancels the ongoing API call.
*
* @example
*
* const dialogflow = require('dialogflow.v2');
*
* var client = new dialogflow.v2.IntentsClient({
* // optional auth parameters.
* });
*
* var formattedParent = client.projectAgentPath('[PROJECT]');
* var intents = [];
* var request = {
* parent: formattedParent,
* intents: intents,
* };
*
* // Handle the operation using the promise pattern.
* client.batchDeleteIntents(request)
* .then(responses => {
* var operation = responses[0];
* var initialApiResponse = responses[1];
*
* // Operation#promise starts polling for the completion of the LRO.
* return operation.promise();
* })
* .then(responses => {
* // The final result of the operation.
* var result = responses[0];
*
* // The metadata value of the completed operation.
* var metadata = responses[1];
*
* // The response of the api call returning the complete operation.
* var finalApiResponse = responses[2];
* })
* .catch(err => {
* console.error(err);
* });
*
* var formattedParent = client.projectAgentPath('[PROJECT]');
* var intents = [];
* var request = {
* parent: formattedParent,
* intents: intents,
* };
*
* // Handle the operation using the event emitter pattern.
* client.batchDeleteIntents(request)
* .then(responses => {
* var operation = responses[0];
* var initialApiResponse = responses[1];
*
* // Adding a listener for the "complete" event starts polling for the
* // completion of the operation.
* operation.on('complete', (result, metadata, finalApiResponse) => {
* // doSomethingWith(result);
* });
*
* // Adding a listener for the "progress" event causes the callback to be
* // called on any change in metadata when the operation is polled.
* operation.on('progress', (metadata, apiResponse) => {
* // doSomethingWith(metadata)
* });
*
* // Adding a listener for the "error" event handles any errors found during polling.
* operation.on('error', err => {
* // throw(err);
* });
* })
* .catch(err => {
* console.error(err);
* });
*/
batchDeleteIntents(request, options, callback) {
if (options instanceof Function && callback === undefined) {
callback = options;
options = {};
}
options = options || {};
return this._innerApiCalls.batchDeleteIntents(request, options, callback);
}
// --------------------
// -- Path templates --
// --------------------
/**
* Return a fully-qualified project_agent resource name string.
*
* @param {String} project
* @returns {String}
*/
projectAgentPath(project) {
return this._pathTemplates.projectAgentPathTemplate.render({
project: project,
});
}
/**
* Return a fully-qualified intent resource name string.
*
* @param {String} project
* @param {String} intent
* @returns {String}
*/
intentPath(project, intent) {
return this._pathTemplates.intentPathTemplate.render({
project: project,
intent: intent,
});
}
/**
* Return a fully-qualified agent resource name string.
*
* @param {String} project
* @param {String} agent
* @returns {String}
*/
agentPath(project, agent) {
return this._pathTemplates.agentPathTemplate.render({
project: project,
agent: agent,
});
}
/**
* Parse the projectAgentName from a project_agent resource.
*
* @param {String} projectAgentName
* A fully-qualified path representing a project_agent resources.
* @returns {String} - A string representing the project.
*/
matchProjectFromProjectAgentName(projectAgentName) {
return this._pathTemplates.projectAgentPathTemplate.match(projectAgentName)
.project;
}
/**
* Parse the intentName from a intent resource.
*
* @param {String} intentName
* A fully-qualified path representing a intent resources.
* @returns {String} - A string representing the project.
*/
matchProjectFromIntentName(intentName) {
return this._pathTemplates.intentPathTemplate.match(intentName).project;
}
/**
* Parse the intentName from a intent resource.
*
* @param {String} intentName
* A fully-qualified path representing a intent resources.
* @returns {String} - A string representing the intent.
*/
matchIntentFromIntentName(intentName) {
return this._pathTemplates.intentPathTemplate.match(intentName).intent;
}
/**
* Parse the agentName from a agent resource.
*
* @param {String} agentName
* A fully-qualified path representing a agent resources.
* @returns {String} - A string representing the project.
*/
matchProjectFromAgentName(agentName) {
return this._pathTemplates.agentPathTemplate.match(agentName).project;
}
/**
* Parse the agentName from a agent resource.
*
* @param {String} agentName
* A fully-qualified path representing a agent resources.
* @returns {String} - A string representing the agent.
*/
matchAgentFromAgentName(agentName) {
return this._pathTemplates.agentPathTemplate.match(agentName).agent;
}
}
module.exports = IntentsClient; | |
typeDefs.js | // import the gql tagged template function
const { gql } = require('apollo-server-express');
// create our typeDefs
const typeDefs = gql`
type Query {
user: User
}
type User {
_id: ID!
username: String
email: String
password: String
bookCount: Int
savedBooks: [Book]
}
type Mutation {
login(email: String!, password: String!): Auth
addUser(username: String!, email: String!, password: String!) : Auth
saveBook(body: saveBookInput): User
removeBook(bookId: String!): User
}
type saveBookInput {
description: String!,
titel: String
bookId: String
image: String
link: String
authors: [String]
}
type Book {
_id: ID
authors: [String]
description: String | title: String
bookId: String
image: String
link: String
}
type Auth {
token: ID!
user: User
}
`;
// export the typeDefs
module.exports = typeDefs; | |
app.js | // S E C T I O N 3 : ES Modules
//Example 1
import {
message
} from './message.js'
const h1 = document.createElement('h1');
h1.textContent = message |
//Example 2
import {
message,
setMessage
} from './greeting.js';
console.log(message); // 'Hi'
setMessage('Hello');
console.log(message); // 'Hello'
//Example 3
import {
foo
} from './foo.js';
console.log(foo); // 10; |
document.body.appendChild(h1) |
http.go | package main
import (
"bytes"
"embed"
"html/template"
"image"
"image/png"
"io/fs"
"log"
"net/http"
"os"
"strconv"
api "github.com/StevenWeathers/thunderdome-planning-poker/api"
"github.com/anthonynsimon/bild/transform"
"github.com/gorilla/mux"
"github.com/ipsn/go-adorable"
"github.com/o1egl/govatar"
"github.com/spf13/viper"
)
//go:embed dist
var f embed.FS
func | (useOS bool) (http.FileSystem, fs.FS) {
if useOS {
log.Print("using live mode")
return http.FS(os.DirFS("dist")), fs.FS(os.DirFS("dist"))
}
fsys, err := fs.Sub(f, "dist")
if err != nil {
panic(err)
}
return http.FS(fsys), fs.FS(fsys)
}
func (s *server) routes() {
HFS, FSS := getFileSystem(embedUseOS)
staticHandler := http.FileServer(HFS)
// api (used by the webapp but can be enabled for external use)
apiConfig := &api.Config{
AppDomain: s.config.AppDomain,
FrontendCookieName: s.config.FrontendCookieName,
SecureCookieName: viper.GetString("http.backend_cookie_name"),
SecureCookieFlag: viper.GetBool("http.secure_cookie"),
SessionCookieName: viper.GetString("http.session_cookie_name"),
PathPrefix: s.config.PathPrefix,
ExternalAPIEnabled: s.config.ExternalAPIEnabled,
UserAPIKeyLimit: s.config.UserAPIKeyLimit,
LdapEnabled: s.config.LdapEnabled,
FeaturePoker: viper.GetBool("feature.poker"),
FeatureRetro: viper.GetBool("feature.retro"),
FeatureStoryboard: viper.GetBool("feature.storyboard"),
}
api.Init(apiConfig, s.router, s.db, s.email, s.cookie)
// static assets
s.router.PathPrefix("/static/").Handler(http.StripPrefix(s.config.PathPrefix, staticHandler))
s.router.PathPrefix("/img/").Handler(http.StripPrefix(s.config.PathPrefix, staticHandler))
s.router.PathPrefix("/lang/").Handler(http.StripPrefix(s.config.PathPrefix, staticHandler))
// user avatar generation
if s.config.AvatarService == "goadorable" || s.config.AvatarService == "govatar" {
s.router.PathPrefix("/avatar/{width}/{id}/{avatar}").Handler(s.handleUserAvatar()).Methods("GET")
s.router.PathPrefix("/avatar/{width}/{id}").Handler(s.handleUserAvatar()).Methods("GET")
}
// handle index.html
s.router.PathPrefix("/").HandlerFunc(s.handleIndex(FSS))
}
// get the index template from embedded filesystem
func (s *server) getIndexTemplate(FSS fs.FS) *template.Template {
// get the html template from dist, have it ready for requests
tmplContent, ioErr := fs.ReadFile(FSS, "static/index.html")
if ioErr != nil {
log.Println("Error opening index template")
if !embedUseOS {
log.Fatal(ioErr)
}
}
tmplString := string(tmplContent)
tmpl, tmplErr := template.New("index").Parse(tmplString)
if tmplErr != nil {
log.Println("Error parsing index template")
if !embedUseOS {
log.Fatal(tmplErr)
}
}
return tmpl
}
/*
Handlers
*/
// handleIndex parses the index html file, injecting any relevant data
func (s *server) handleIndex(FSS fs.FS) http.HandlerFunc {
type AppConfig struct {
AllowedPointValues []string
DefaultPointValues []string
ShowWarriorRank bool
AvatarService string
ToastTimeout int
AllowGuests bool
AllowRegistration bool
AllowJiraImport bool
DefaultLocale string
FriendlyUIVerbs bool
AppVersion string
CookieName string
PathPrefix string
ExternalAPIEnabled bool
UserAPIKeyLimit int
CleanupGuestsDaysOld int
CleanupBattlesDaysOld int
CleanupRetrosDaysOld int
ShowActiveCountries bool
LdapEnabled bool
FeaturePoker bool
FeatureRetro bool
FeatureStoryboard bool
}
type UIConfig struct {
AnalyticsEnabled bool
AnalyticsID string
AppConfig AppConfig
ActiveAlerts []interface{}
}
tmpl := s.getIndexTemplate(FSS)
appConfig := AppConfig{
AllowedPointValues: viper.GetStringSlice("config.allowedPointValues"),
DefaultPointValues: viper.GetStringSlice("config.defaultPointValues"),
ShowWarriorRank: viper.GetBool("config.show_warrior_rank"),
AvatarService: viper.GetString("config.avatar_service"),
ToastTimeout: viper.GetInt("config.toast_timeout"),
AllowGuests: viper.GetBool("config.allow_guests"),
AllowRegistration: viper.GetBool("config.allow_registration") && viper.GetString("auth.method") == "normal",
AllowJiraImport: viper.GetBool("config.allow_jira_import"),
DefaultLocale: viper.GetString("config.default_locale"),
FriendlyUIVerbs: viper.GetBool("config.friendly_ui_verbs"),
ExternalAPIEnabled: s.config.ExternalAPIEnabled,
UserAPIKeyLimit: s.config.UserAPIKeyLimit,
AppVersion: s.config.Version,
CookieName: s.config.FrontendCookieName,
PathPrefix: s.config.PathPrefix,
CleanupGuestsDaysOld: viper.GetInt("config.cleanup_guests_days_old"),
CleanupBattlesDaysOld: viper.GetInt("config.cleanup_battles_days_old"),
CleanupRetrosDaysOld: viper.GetInt("config.cleanup_retros_days_old"),
ShowActiveCountries: viper.GetBool("config.show_active_countries"),
LdapEnabled: s.config.LdapEnabled,
FeaturePoker: viper.GetBool("feature.poker"),
FeatureRetro: viper.GetBool("feature.retro"),
FeatureStoryboard: viper.GetBool("feature.storyboard"),
}
data := UIConfig{
AnalyticsEnabled: s.config.AnalyticsEnabled,
AnalyticsID: s.config.AnalyticsID,
AppConfig: appConfig,
}
api.ActiveAlerts = s.db.GetActiveAlerts() // prime the active alerts cache
return func(w http.ResponseWriter, r *http.Request) {
data.ActiveAlerts = api.ActiveAlerts // get latest alerts from memory
if embedUseOS {
tmpl = s.getIndexTemplate(FSS)
}
tmpl.Execute(w, data)
}
}
// handleUserAvatar creates an avatar for the given user by ID
func (s *server) handleUserAvatar() http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
vars := mux.Vars(r)
Width, _ := strconv.Atoi(vars["width"])
UserID := vars["id"]
AvatarGender := govatar.MALE
userGender, ok := vars["avatar"]
if ok {
if userGender == "female" {
AvatarGender = govatar.FEMALE
}
}
var avatar image.Image
if s.config.AvatarService == "govatar" {
avatar, _ = govatar.GenerateForUsername(AvatarGender, UserID)
} else { // must be goadorable
var err error
avatar, _, err = image.Decode(bytes.NewReader(adorable.PseudoRandom([]byte(UserID))))
if err != nil {
log.Fatalln(err)
}
}
img := transform.Resize(avatar, Width, Width, transform.Linear)
buffer := new(bytes.Buffer)
if err := png.Encode(buffer, img); err != nil {
log.Println("unable to encode image.")
w.WriteHeader(http.StatusInternalServerError)
return
}
w.Header().Set("Content-Type", "image/png")
w.Header().Set("Content-Length", strconv.Itoa(len(buffer.Bytes())))
if _, err := w.Write(buffer.Bytes()); err != nil {
log.Println("unable to write image.")
w.WriteHeader(http.StatusInternalServerError)
return
}
}
}
| getFileSystem |
python3.go | package python3
import (
"net/http"
"time"
"github.com/yookoala/gofast"
)
// NewHandler returns a fastcgi web server implementation as an http.Handler
// Please note that this handler doesn't handle the fastcgi application process.
// You'd need to start it with other means.
//
// entrypoint: the full path to the application entrypoint file (e.g. webapp.py)
// or equivlant path for fastcgi application to identify itself.
// network: network protocol (tcp / tcp4 / tcp6)
// or if it is a unix socket, "unix"
// address: IP address and port, or the socket physical address of the fastcgi
// application.
func NewHandler(entrypoint, network, address string) http.Handler | {
connFactory := gofast.SimpleConnFactory(network, address)
pool := gofast.NewClientPool(
gofast.SimpleClientFactory(connFactory),
10,
60*time.Second,
)
h := gofast.NewHandler(
gofast.NewFileEndpoint(entrypoint)(gofast.BasicSession),
pool.CreateClient,
)
return h
} |
|
session_cols_test.go | // Copyright 2017 The Xorm Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package xorm
import (
"testing"
| "github.com/stretchr/testify/assert"
)
func TestSetExpr(t *testing.T) {
assert.NoError(t, prepareEngine())
type User struct {
Id int64
Show bool
}
assert.NoError(t, testEngine.Sync2(new(User)))
cnt, err := testEngine.Insert(&User{
Show: true,
})
assert.NoError(t, err)
assert.EqualValues(t, 1, cnt)
var not = "NOT"
if testEngine.dialect.DBType() == core.MSSQL {
not = "~"
}
cnt, err = testEngine.SetExpr("show", not+" `show`").Id(1).Update(new(User))
assert.NoError(t, err)
assert.EqualValues(t, 1, cnt)
}
func TestCols(t *testing.T) {
assert.NoError(t, prepareEngine())
type ColsTable struct {
Id int64
Col1 string
Col2 string
}
assertSync(t, new(ColsTable))
_, err := testEngine.Insert(&ColsTable{
Col1: "1",
Col2: "2",
})
assert.NoError(t, err)
sess := testEngine.ID(1)
_, err = sess.Cols("col1").Cols("col2").Update(&ColsTable{
Col1: "",
Col2: "",
})
assert.NoError(t, err)
var tb ColsTable
has, err := testEngine.ID(1).Get(&tb)
assert.NoError(t, err)
assert.True(t, has)
assert.EqualValues(t, "", tb.Col1)
assert.EqualValues(t, "", tb.Col2)
} | "github.com/go-xorm/core" |
events_txframestart.rs | #[doc = "Reader of register EVENTS_TXFRAMESTART"]
pub type R = crate::R<u32, super::EVENTS_TXFRAMESTART>;
#[doc = "Writer for register EVENTS_TXFRAMESTART"]
pub type W = crate::W<u32, super::EVENTS_TXFRAMESTART>;
#[doc = "Register EVENTS_TXFRAMESTART `reset()`'s with value 0"]
impl crate::ResetValue for super::EVENTS_TXFRAMESTART {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "Marks the start of the first symbol of a transmitted frame\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum EVENTS_TXFRAMESTART_A {
#[doc = "0: Event not generated"]
NOTGENERATED,
#[doc = "1: Event generated"]
GENERATED,
}
impl From<EVENTS_TXFRAMESTART_A> for bool {
#[inline(always)]
fn from(variant: EVENTS_TXFRAMESTART_A) -> Self {
match variant {
EVENTS_TXFRAMESTART_A::NOTGENERATED => false,
EVENTS_TXFRAMESTART_A::GENERATED => true,
}
}
}
#[doc = "Reader of field `EVENTS_TXFRAMESTART`"]
pub type EVENTS_TXFRAMESTART_R = crate::R<bool, EVENTS_TXFRAMESTART_A>;
impl EVENTS_TXFRAMESTART_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> EVENTS_TXFRAMESTART_A {
match self.bits {
false => EVENTS_TXFRAMESTART_A::NOTGENERATED,
true => EVENTS_TXFRAMESTART_A::GENERATED,
}
}
#[doc = "Checks if the value of the field is `NOTGENERATED`"]
#[inline(always)]
pub fn is_not_generated(&self) -> bool {
*self == EVENTS_TXFRAMESTART_A::NOTGENERATED
}
#[doc = "Checks if the value of the field is `GENERATED`"]
#[inline(always)]
pub fn is_generated(&self) -> bool {
*self == EVENTS_TXFRAMESTART_A::GENERATED
}
}
#[doc = "Write proxy for field `EVENTS_TXFRAMESTART`"]
pub struct EVENTS_TXFRAMESTART_W<'a> {
w: &'a mut W,
}
impl<'a> EVENTS_TXFRAMESTART_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: EVENTS_TXFRAMESTART_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Event not generated"]
#[inline(always)]
pub fn not_generated(self) -> &'a mut W {
self.variant(EVENTS_TXFRAMESTART_A::NOTGENERATED)
}
#[doc = "Event generated"]
#[inline(always)]
pub fn generated(self) -> &'a mut W {
self.variant(EVENTS_TXFRAMESTART_A::GENERATED)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01);
self.w
}
}
impl R {
#[doc = "Bit 0 - Marks the start of the first symbol of a transmitted frame"]
#[inline(always)]
pub fn events_txframestart(&self) -> EVENTS_TXFRAMESTART_R |
}
impl W {
#[doc = "Bit 0 - Marks the start of the first symbol of a transmitted frame"]
#[inline(always)]
pub fn events_txframestart(&mut self) -> EVENTS_TXFRAMESTART_W {
EVENTS_TXFRAMESTART_W { w: self }
}
}
| {
EVENTS_TXFRAMESTART_R::new((self.bits & 0x01) != 0)
} |
activityIcon.tsx | /* THIS FILE IS AUTO-GENERATED – DO NOT EDIT */
import React, {forwardRef} from 'react'
/**
* @public
*/
export const ActivityIcon = forwardRef(function ActivityIcon(
props: React.SVGProps<SVGSVGElement>,
ref: React.Ref<SVGSVGElement>
) {
return (
<svg
data-sanity-icon="activity"
width="1em"
height="1em" | fill="none"
xmlns="http://www.w3.org/2000/svg"
ref={ref}
{...props}
>
<path d="M21 15H19L15.5 7L11 18L8 12L6 15H4" stroke="currentColor" strokeWidth={1.2} />
</svg>
)
}) | viewBox="0 0 25 25" |
sharp-settings-input-composite.js | var data = {
"body": "<path d=\"M5 2c0-.55-.45-1-1-1s-1 .45-1 1v4H1v6h6V6H5V2zm4 16.82h2V23h2v-4.18h2V14H9v4.82zm-8 0h2V23h2v-4.18h2V14H1v4.82zM21 6V2c0-.55-.45-1-1-1s-1 .45-1 1v4h-2v6h6V6h-2zm-8-4c0-.55-.45-1-1-1s-1 .45-1 1v4H9v6h6V6h-2V2zm4 16.82h2V23h2v-4.18h2V14h-6v4.82z\" fill=\"currentColor\"/>",
"width": 24,
"height": 24
}; | exports.__esModule = true;
exports.default = data; |
|
admin.rs | use goose::prelude::*;
use crate::common;
use rand::seq::SliceRandom;
use std::env;
/// Log into the website.
pub async fn log_in(user: &GooseUser) -> GooseTaskResult {
// Use ADMIN_USERNAME= to set custom admin username.
let admin_username = match env::var("ADMIN_USERNAME") {
Ok(username) => username,
Err(_) => "admin".to_string(),
};
// Use ADMIN_PASSWORD= to set custom admin username.
let admin_password = match env::var("ADMIN_PASSWORD") {
Ok(password) => password,
Err(_) => "P@ssw0rd1234".to_string(),
};
// Load the log in page.
let mut goose = user.get("/en/user/login").await?;
// We can't invoke common::validate_and_load_static_assets as while it's important
// to validate the page and load static elements, we then need to extract form elements
// from the HTML of the page. So we duplicate some of the logic, enhancing it for form
// processing.
let mut logged_in_user;
match goose.response {
Ok(response) => {
// Copy the headers so we have them for logging if there are errors.
let headers = &response.headers().clone();
match response.text().await {
Ok(html) => {
// Be sure we've properly loaded the log in page.
let title = "Log in";
if !common::valid_title(&html, title) {
return user.set_failure(
&format!("{}: title not found: {}", &goose.request.url, title),
&mut goose.request,
Some(&headers),
Some(&html),
);
}
// Load all static elements on the page, as a real user would.
common::load_static_elements(user, &html).await;
// Scrape the HTML to get the values needed in order to POST to the
// log in form.
let form_build_id = common::get_form_value(&html, "form_build_id");
if form_build_id.is_none() {
return user.set_failure(
&format!("{}: no form_build_id on page", goose.request.url),
&mut goose.request,
Some(&headers),
Some(&html),
);
}
// Build log in form with username and password from environment.
let params = [
("name", &admin_username),
("pass", &admin_password),
("form_build_id", &form_build_id.unwrap()),
("form_id", &"user_login_form".to_string()),
("op", &"Log+in".to_string()),
];
let request_builder = user.goose_post("/en/user/login").await?;
logged_in_user = user.goose_send(request_builder.form(¶ms), None).await?;
// A successful log in is redirected.
if !logged_in_user.request.redirected {
return user.set_failure(
&format!(
"{}: login failed (check ADMIN_USERNAME and ADMIN_PASSWORD)",
logged_in_user.request.final_url
),
&mut logged_in_user.request,
Some(&headers),
None,
);
}
}
Err(e) => {
return user.set_failure(
&format!("{}: failed to parse page: {}", goose.request.url, e),
&mut goose.request,
Some(&headers),
None,
);
}
}
}
Err(e) => {
return user.set_failure(
&format!("{}: no response from server: {}", goose.request.url, e),
&mut goose.request,
None,
None,
);
}
}
// Check the title to verify that the user is logged in.
common::validate_and_load_static_assets(user, logged_in_user, &admin_username).await?;
Ok(())
}
/// Load and edit a random article.
pub async fn edit_article(user: &GooseUser) -> GooseTaskResult {
// First, load a random article.
let nodes = common::get_nodes(&common::ContentType::Article);
let article = nodes.choose(&mut rand::thread_rng());
let goose = user.get(article.unwrap().url_en).await?;
common::validate_and_load_static_assets(user, goose, article.unwrap().title_en).await?;
// Next, load the edit link for the chosen article.
let mut goose = user
.get(&format!("/en/node/{}/edit", article.unwrap().nid))
.await?;
let mut saved_article;
match goose.response {
Ok(response) => {
// Copy the headers so we have them for logging if there are errors.
let headers = &response.headers().clone();
match response.text().await {
Ok(html) => {
// Be sure we've properly loaded the edit page.
let title = "Edit Article";
if !common::valid_title(&html, title) {
return user.set_failure(
&format!("{}: title not found: {}", &goose.request.url, title),
&mut goose.request,
Some(&headers),
Some(&html),
);
}
// Load all static elements on the page, as a real user would.
common::load_static_elements(user, &html).await;
// Scrape the HTML to get the values needed in order to POST to the
// log in form.
let form_build_id = common::get_form_value(&html, "form_build_id");
if form_build_id.is_none() {
return user.set_failure(
&format!("{}: no form_build_id on page", goose.request.url),
&mut goose.request,
Some(&headers),
Some(&html),
);
}
let form_token = common::get_form_value(&html, "form_token");
if form_token.is_none() {
return user.set_failure(
&format!("{}: no form_token on page", goose.request.url),
&mut goose.request,
Some(&headers),
Some(&html),
);
}
// Build node form with random word from title.
let params = [
("form_build_id", &form_build_id.unwrap()),
("form_token", &form_token.unwrap()),
("form_id", &"node_article_edit_form".to_string()),
("op", &"Save (this translation)".to_string()),
];
let request_builder = user
.goose_post(&format!("/en/node/{}/edit", article.unwrap().nid))
.await?;
saved_article = user.goose_send(request_builder.form(¶ms), None).await?;
// A successful node save is redirected.
if !saved_article.request.redirected {
return user.set_failure(
&format!("{}: saving article failed", saved_article.request.final_url),
&mut saved_article.request,
Some(&headers),
None,
);
}
}
Err(e) => {
return user.set_failure(
&format!("{}: failed to parse page: {}", goose.request.url, e),
&mut goose.request,
Some(&headers),
None,
);
}
}
}
Err(e) => {
return user.set_failure(
&format!("{}: no response from server: {}", goose.request.url, e),
&mut goose.request, | None,
None,
);
}
}
// Be sure we're viewing the same article after editing it.
common::validate_and_load_static_assets(user, saved_article, article.unwrap().title_en).await?;
Ok(())
} | |
index.tsx | import React, { useEffect, useCallback } from 'react'
// Redux connection
import { connect } from 'react-redux'
// Actions
import actions from 'store/actions/game'
import global from 'store/actions/global'
import TileActions from 'store/actions/tableTile'
// Interfaces
import StoreState from 'interfaces/store-state'
// Components
import Controls from 'components/UI/Controls'
// CSS styles
import './styles.css'
function Game(props: any) {
const {
type,
history,
freeTiles,
onGameWon,
gameState,
difficulty,
onGameTied,
placedTiles,
currentTile,
onPlaceTile,
playerPlayed,
onChangeTurn,
onRestartGame,
currentPlayer,
onReturnToMenu,
onPlayerPlayed
} = props
function gotoMenu() {
onReturnToMenu()
history.push('/')
}
const gameOptionsConfig = [
{ name: 'Restart Game', onclick: onRestartGame },
{ name: 'Return to Menu', onclick: gotoMenu }
]
useEffect(() => {
if (gameState === 'results') history.push('/game/results')
else if (gameState !== 'playing') gotoMenu()
}, [gameState]) // eslint-disable-line
function verifyTurnShift() {
if (currentPlayer !== 'computer') {
if (playerPlayed) {
onPlayerPlayed(false)
onChangeTurn()
}
}
else onChangeTurn()
}
// Game logic
useEffect(() => {
if (placedTiles.length >= 5) {
let win = false
function getFilled(checkTiles: number[]) {
return placedTiles.filter(
(tile: { id: number, tile: string }) => checkTiles.includes(tile.id)
)
}
function verifyRowCol(checkTileInc: number, loopLimit: number, loopInc: number) {
for (let i = 0; i < loopLimit; i += loopInc) {
const filled = getFilled([i, i + checkTileInc, i + (2 * checkTileInc)])
if (filled.length === 3) {
const firstTile = filled[0].tile
let equal = 0
filled.forEach((tile: { tile: string }) => {
if (tile.tile === firstTile) equal++
})
if (equal === 3) {
win = true
break
}
}
}
}
function verifyDiagonal(diagonalTiles: number[]) {
const filled = getFilled(diagonalTiles)
if (filled.length === 3) {
const firstTile = filled[0].tile
let equal = 0
filled.forEach((tile: { tile: string }) => {
if (tile.tile === firstTile) equal++
})
if (equal === 3) win = true
}
}
//Rows
verifyRowCol(1, 9, 3)
// Columns
if (!win) verifyRowCol(3, 3, 1)
// First diagonal
if (!win) verifyDiagonal([0, 4, 8])
// Second diagonal
if (!win) verifyDiagonal([2, 4, 6])
if (win) onGameWon()
else if (placedTiles.length === 9) onGameTied()
else verifyTurnShift()
} else verifyTurnShift()
}, [placedTiles]) // eslint-disable-line
const verifySpecifiedWin = useCallback((player: string) => {
const otherTile = currentTile === 'X' ? 'O' : 'X'
const thisTile = player === currentPlayer ? currentTile : otherTile
let played = false
function filterFilled(tilesId: number[]) {
let freetile = [...tilesId]
return [placedTiles.filter(
(tile: { id: number, tile: string }) => {
freetile = freetile.filter(t => t !== tile.id)
return tilesId.includes(tile.id)
}
), freetile]
}
function verifyPlay(checkTileInc: number, loopLimit: number, loopInc: number) {
for (let i = 0; i < loopLimit; i += loopInc) {
const [filled, freetile] = filterFilled([i, i + checkTileInc, i + (2 * checkTileInc)])
if (filled.length === 2) {
let equal = 0
filled.forEach((tile: { tile: string }) => {
if (tile.tile === thisTile) equal++
})
if (equal === 2) {
if (Math.floor(Math.random() * 100) <= (+difficulty * 100)) {
const tileIndex = freetile[0]
onPlaceTile({ id: tileIndex, tile: currentTile })
played = true
break
}
}
}
}
}
function | (diagTiles: number[]) {
const [filled, freetile] = filterFilled(diagTiles)
if (filled.length === 2) {
let equal = 0
filled.forEach((tile: { tile: string }) => {
if (tile.tile === thisTile) equal++
})
if (equal === 2) {
if (Math.floor(Math.random() * 100) <= (+difficulty * 100)) {
const tileIndex = freetile[0]
onPlaceTile({ id: tileIndex, tile: currentTile })
played = true
}
}
}
}
//Rows
verifyPlay(1, 9, 3)
// Columns
if (!played) verifyPlay(3, 3, 1)
// First diagonal
if (!played) verifyDiagonal([0, 4, 8])
// Second diagonal
if (!played) verifyDiagonal([2, 4, 6])
return played
}, [placedTiles, currentTile]) // eslint-disable-line
function playRandomly() {
const randomTileIndex = freeTiles[Math.floor(Math.random() * freeTiles.length)]
onPlaceTile({ id: randomTileIndex, tile: currentTile })
}
// Computer logic
useEffect(() => {
if (type === 'pve' && currentPlayer === 'computer') {
if (placedTiles.length >= 3) {
let played = verifySpecifiedWin('computer')
if (!played) {
played = verifySpecifiedWin('player')
if (!played) playRandomly()
}
} else playRandomly()
}
}, [currentPlayer])// eslint-disable-line
let playerTurn = 'Your'
switch (props.currentPlayer) {
case 'player1':
{
playerTurn = 'Player 1'
break
}
case 'player2':
{
playerTurn = 'Player 2'
break
}
case 'computer':
{
playerTurn = 'Computer'
break
}
default: playerTurn = 'Your'
}
return (
<div className="Game">
<div className="GameInfo">
<p>{playerTurn} Turn - {props.currentTile}</p>
</div>
<div className="GameOptions">
<Controls
numberOfControls={gameOptionsConfig.length}
controlConfig={gameOptionsConfig}
/>
</div>
</div>
)
}
function mapStateToProps(state: StoreState) {
return {
gameState: state.gameState,
currentPlayer: state.currentPlayer,
currentTile: state.currentTile,
placedTiles: state.placedTiles,
freeTiles: state.freeTiles,
type: state.type,
difficulty: state.difficulty,
playerPlayed: state.playerPlayed
}
}
function mapDispatchToProps(dispatch: any) {
return {
onReturnToMenu: () => dispatch(global.onReturnToMenu()),
onRestartGame: () => dispatch(global.onRestart()),
onPlayerPlayed: (played: boolean) => dispatch(global.onChangePlayerPlayed(played)),
onChangeTurn: () => dispatch(actions.onChangeTurn()),
onGameWon: () => dispatch(actions.onGameWon()),
onGameTied: () => dispatch(actions.onGameTied()),
onPlaceTile: (tileId: number) => dispatch(TileActions.tileClick(tileId)),
}
}
export default connect(mapStateToProps, mapDispatchToProps)(Game) | verifyDiagonal |
request.go | package authCodeFlow
import (
"context"
"fmt"
"log"
"net/http"
"github.com/NathanBeddoeWebDev/xoauth/pkg/db"
"github.com/NathanBeddoeWebDev/xoauth/pkg/interop"
"github.com/NathanBeddoeWebDev/xoauth/pkg/oidc"
"github.com/gookit/color"
)
type CodeFlowInteractor struct {
wellKnownConfig oidc.WellKnownConfiguration
database *db.CredentialStore
operatingSystem string
}
func | (wellKnownConfig oidc.WellKnownConfiguration, database *db.CredentialStore, operatingSystem string) CodeFlowInteractor {
return CodeFlowInteractor{
wellKnownConfig: wellKnownConfig,
database: database,
operatingSystem: operatingSystem,
}
}
func (interactor *CodeFlowInteractor) Request(client db.OidcClient, dryRun bool, localHostPort int) {
interactor.initRequest(client, "", "", dryRun, localHostPort)
}
func (interactor *CodeFlowInteractor) RequestWithProofOfKeyExchange(client db.OidcClient, dryRun bool, localHostPort int) {
var verifierSet, verifierErr = oidc.GenerateCodeVerifier()
if verifierErr != nil {
log.Fatalln(verifierErr)
}
interactor.initRequest(client, verifierSet.CodeVerifier, verifierSet.CodeChallenge, dryRun, localHostPort)
}
func (interactor *CodeFlowInteractor) initRequest(client db.OidcClient, codeVerifier string, codeChallenge string, dryRun bool, localHostPort int) {
redirectUri := fmt.Sprintf("http://localhost:%d/callback", localHostPort)
state, stateErr := oidc.GenerateRandomStringURLSafe(24)
if stateErr != nil {
panic("failed to generate random state. Check that your OS has a crypto implementation available")
}
authorisationUrl := oidc.BuildCodeAuthorisationRequest(
interactor.wellKnownConfig,
client.ClientId,
redirectUri,
client.Scopes,
state,
codeChallenge,
)
if dryRun {
log.Printf("%s\n%s\n",
color.FgWhite.Sprint("Dry run, printing the authorisation request URL"),
color.FgYellow.Sprint(authorisationUrl))
return
}
m := http.NewServeMux()
s := http.Server{Addr: fmt.Sprintf(":%d", localHostPort), Handler: m}
ctx, cancel := context.WithCancel(context.Background())
defer cancel()
// Open a web server to receive the redirect
m.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) {
interactor.handleOidcCallback(w, r,
client.Alias,
client.ClientId,
client.ClientSecret,
redirectUri,
state,
codeVerifier,
cancel,
)
})
log.Printf("%s", color.FgYellow.Sprintf("Opening browser window"))
openErr := interop.OpenBrowser(interactor.operatingSystem, authorisationUrl)
if openErr != nil {
log.Fatalf("failed to open browser window %v", openErr)
}
go func() {
if err := s.ListenAndServe(); err != nil && err != http.ErrServerClosed {
log.Fatal(err)
}
}()
select {
case <-ctx.Done():
// Shutdown the server when the context is canceled
err := s.Shutdown(ctx)
if err != nil && err != context.Canceled {
log.Fatalln(err)
} else {
log.Println("")
}
}
}
| NewCodeFlowInteractor |
freebsd.rs | // vim: tw=80
use std::error::Error;
use super::Snapshot;
use sysctl::{Ctl, CtlIter, Sysctl, SysctlError, CtlValue};
pub(super) struct SnapshotIter {
ctl_iter: CtlIter,
objset_name: Option<String>,
dataset_name: Option<String>,
nunlinked: Option<u64>,
nunlinks: Option<u64>,
nread: Option<u64>,
reads: Option<u64>,
nwritten: Option<u64>,
writes: Option<u64>,
}
impl SnapshotIter {
pub(crate) fn new(pool: Option<&str>) -> Result<Self, Box<dyn Error>> {
let root = if let Some(s) = pool {
Ctl::new(&format!("kstat.zfs.{}.dataset", s.replace(".", "%25")))
.unwrap_or_else(|_e| {
eprintln!("Statistics not found for pool {}", s);
std::process::exit(1);
})
} else {
Ctl::new("kstat.zfs").unwrap()
};
let ctl_iter = CtlIter::below(root);
Ok(SnapshotIter{
ctl_iter,
objset_name: None,
dataset_name: None,
nunlinked: None,
nunlinks: None,
nread: None,
reads: None,
nwritten: None,
writes: None,
})
}
/// Progressively build the next Snapshot
///
/// # Returns
///
/// If all of the sysctls relevant to the snapshot have been received,
/// returns `Some(snapshot)` and prepares `self` to build the next Snapshot.
fn build(&mut self, name: String, value: CtlValue) -> Option<Snapshot> {
let mut fields = name.split('.');
let on = fields.nth(4).unwrap();
if let Some(son) = &self.objset_name {
assert_eq!(son, on);
} else {
self.objset_name = Some(on.to_owned());
}
let field = fields.next().unwrap();
match value {
CtlValue::String(s) => {
if field != "dataset_name" {
eprintln!("Unknown sysctl {:?}", name);
}
assert_eq!(self.dataset_name.replace(s), None);
},
CtlValue::U64(x) => | ,
_ => eprintln!("Unknown sysctl {:?}", name),
};
if self.dataset_name.is_some() &&
self.nunlinked.is_some() &&
self.nunlinks.is_some() &&
self.nread.is_some() &&
self.reads.is_some() &&
self.nwritten.is_some() &&
self.writes.is_some()
{
self.objset_name = None;
Some(Snapshot {
name: self.dataset_name.take().unwrap(),
nunlinked: self.nunlinked.take().unwrap(),
nunlinks: self.nunlinks.take().unwrap(),
nread: self.nread.take().unwrap(),
reads: self.reads.take().unwrap(),
nwritten: self.nwritten.take().unwrap(),
writes: self.writes.take().unwrap(),
})
} else {
None
}
}
/// Return the next Ctl that ztop cares about
fn next_ztop(&mut self) -> Option<Result<(Ctl, String), SysctlError>> {
loop {
match self.ctl_iter.next() {
Some(Ok(ctl)) => {
match ctl.name() {
Ok(name) => {
if name.splitn(4, '.')
.last()
.map(|l| l.starts_with("dataset"))
.unwrap_or(false)
{
break Some(Ok((ctl, name)));
} else {
continue;
}
}
Err(e) => {return Some(Err(e));}
}
}
Some(Err(e)) => {return Some(Err(e));}
None => {return None;}
}
}
}
}
impl Iterator for SnapshotIter {
type Item=Result<Snapshot, Box<SysctlError>>;
fn next(&mut self) -> Option<Self::Item> {
// We need to read several values from the internal iterator to assemble
// a Snapshot. AFAIK they will always be returned in the same order on
// every system. If not this code will grow more complicated.
loop {
match self.next_ztop() {
Some(Ok((ctl, name))) => {
match ctl.value() {
Ok(value) => {
if let Some(snapshot) = self.build(name, value) {
break Some(Ok(snapshot));
}
// else continue
}
Err(e) => {break Some(Err(Box::new(e)))}
}
}
Some(Err(e)) => {break Some(Err(Box::new(e)))},
None => {break None}
}
}
}
}
| {
match field {
"nunlinked" => {self.nunlinked = Some(x);}
"nunlinks" => {self.nunlinks = Some(x);}
"nread" => {self.nread = Some(x);}
"reads" => {self.reads = Some(x);}
"nwritten" => {self.nwritten = Some(x);}
"writes" => {self.writes = Some(x);}
_ => eprintln!("Unknown sysctl {:?}", name),
}
} |
test_misc.py | import os.path
import sys
from nose.tools import assert_raises
from cx_Freeze.common import ConfigError, process_path_specs
rootdir = "C:\\" if sys.platform == "win32" else "/"
def test_process_path_specs():
|
def test_process_path_specs_bad():
with assert_raises(ConfigError):
process_path_specs(
[(os.path.join(rootdir, "foo"), os.path.join(rootdir, "bar"))]
)
with assert_raises(ConfigError):
process_path_specs([("a", "b", "c")])
| inp = [
os.path.join(rootdir, "foo", "bar"),
(os.path.join(rootdir, "foo", "qux"), os.path.join("baz", "xyz")),
]
outp = process_path_specs(inp)
assert outp == [
(os.path.join(rootdir, "foo", "bar"), "bar"),
(os.path.join(rootdir, "foo", "qux"), os.path.join("baz", "xyz")),
] |
selectors.test.js | import {
selectEmployees,
makeSelectEmployeesStatus,
selectSingleEmployee,
makeSelectSingleEmployeeStatus,
} from '../selectors';
const employeesList = [
{
id: 1,
employeeId: 'Emp123',
name: 'john dow',
age: 18,
addresses: [
{
id: 1,
location: 'Kolkata',
},
{
id: 2,
location: 'Delhi',
},
{
id: 3,
location: 'Bombay',
},
{
id: 4,
location: 'Hydrabad',
},
],
},
];
const employeeState = {
employees: {
list: employeesList,
single: employeesList[0],
},
};
describe('selectEmployees', () => {
it('should select the Employees', () => {
expect(selectEmployees(employeeState)).toEqual(employeesList);
});
});
describe('selectSingleEmployee', () => {
it('should select the single Employees', () => {
expect(selectSingleEmployee(employeeState)).toEqual(employeesList[0]);
});
});
describe('makeSelectUsername', () => {
const employeesSelector = makeSelectEmployeesStatus(); | expect(employeesSelector(employeeState)).toEqual(employeesList);
});
});
describe('makeSelectSingleEmployeeStatus', () => {
const employeeSingleSelector = makeSelectSingleEmployeeStatus();
it('should select employees', () => {
expect(employeeSingleSelector(employeeState)).toEqual(employeesList[0]);
});
}); | it('should select employees', () => { |
template.rs | use crate::common::*;
#[derive(PartialEq, Debug)]
pub(crate) struct Template {
source: PathBuf,
directory: PathBuf,
filename: OsString,
}
impl Template {
pub(crate) fn new(path: &Path) -> io::Result<Template> {
let source = path.canonicalize()?;
let components = source.components().collect::<Vec<Component>>();
assert!(!components.is_empty());
if components.len() == 1 {
return Err(io::Error::new(io::ErrorKind::InvalidInput, Error::Root));
}
let directory = components[..components.len() - 1]
.iter()
.collect::<PathBuf>();
let filename = if let Component::Normal(filename) = components[components.len() - 1] {
filename.to_owned()
} else {
return Err(io::Error::new(
io::ErrorKind::InvalidInput,
Error::UnexpectedFinalPathComponent,
));
};
Ok(Template {
source,
directory,
filename,
})
}
pub(crate) fn source(&self) -> &Path {
&self.source
}
pub(crate) fn destination(&self, extension: &OsStr) -> io::Result<PathBuf> {
let filename_with_extension = {
let mut filename = self.filename.clone();
filename.push(".");
filename.push(extension);
filename
};
for n in 0u128.. {
let candidate_filename = {
let mut candidate_filename = filename_with_extension.clone();
if n > 0 {
candidate_filename.push(".");
candidate_filename.push((n - 1).to_string());
}
candidate_filename
};
let candidate = self.directory.join(candidate_filename);
match fs::symlink_metadata(&candidate) {
Ok(_) => continue,
Err(io_error) => {
if io_error.kind() == io::ErrorKind::NotFound {
return Ok(candidate);
} else {
return Err(io_error);
}
}
}
}
unreachable!();
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::testing;
use std::fs;
#[test]
fn root() {
assert_eq!(
Template::new("/".as_ref()).unwrap_err().kind(),
io::ErrorKind::InvalidInput
);
}
#[test]
fn simple() -> io::Result<()> {
let filename = "foo";
let tempdir = testing::tempdir(&[filename])?;
let tempdir_path = tempdir.path().canonicalize()?;
let path = tempdir_path.join(filename);
let template = Template::new(&path)?;
assert_eq!(template.source, path);
assert_eq!(template.directory, tempdir_path);
assert_eq!(template.filename, OsStr::new("foo"));
|
#[test]
fn dot() -> io::Result<()> {
let tempdir = tempfile::tempdir()?;
let tempdir_path = tempdir.path().canonicalize()?;
let foo = tempdir_path.join("foo");
fs::create_dir(&foo)?;
let path = foo.join(".");
let have = Template::new(&path)?;
assert_eq!(have.source, path);
assert_eq!(have.directory, tempdir_path);
assert_eq!(have.filename, OsStr::new("foo"));
Ok(())
}
#[test]
fn dotdot() -> io::Result<()> {
let tempdir = tempfile::tempdir()?;
let tempdir_path = tempdir.path().canonicalize()?;
let foo = tempdir_path.join("foo");
fs::create_dir(&foo)?;
let path = foo.join("..");
let have = Template::new(&path)?;
assert_eq!(have.source, tempdir_path);
assert_eq!(have.directory, tempdir_path.parent().unwrap());
assert_eq!(have.filename, tempdir_path.file_name().unwrap());
Ok(())
}
} | Ok(())
} |
powderBot.py |
#Weather
#Functions TODO
# precip accumilation works well hourly
# sign up for storm alert per IKON or EPIC resort
# timer to check the 3 day for storms
# highest winter in state
from datetime import datetime, timedelta
from dateutil import tz
import discord
import googlemaps
import aiohttp
import asyncio
from PIL import Image, ImageDraw, ImageFont
client = discord.Client()
#Keys
gmaps_key = ''
api_key = ''
gmaps = googlemaps.Client(key=gmaps_key)
#Coordinates
latitude = 0
longitude = 0
#URLs
api_url = 'https://api.darksky.net/forecast/'
excludeExceptHourly = "currently,minutely,daily"
excludeExceptDaily = "currently,hourly,minutely"
@client.event
async def | ():
print('We have logged in as {0.user}'.format(client))
#help()
#func: Takes message author mentionable string and returns a list of commands with an @ author
#param: author: mentionable string for the author of the message
def help(author):
return author + "\n __**Command List:**__ \n **!help:** Displays list of commands \n **!current location:** Displays hourly weather for specified location \n **!forecast location:** Displays 5 day forecast for specified location"
###
### Helper Functions
###
#get_url()
#func: Recieves the message content and exclusion parameter and splits the string, takes second string and any after as location. Inputs into geocoder to gather coordinates and formatted address
#params: message: string contents of message sent, "$cw location", exclude: string that inputs which data to exclude in API JSON request
#returns URL and Location
def get_url(message, exclude):
temp = message.split()
if len(temp) > 2:
count = 1
location = ""
while count < len(temp):
location = location + " " + temp[count]
count = count + 1
#if out of range
else:
try:
location = temp[1]
except IndexError:
return "Index Error", None
geocode_result = gmaps.geocode(location)
#if bad input
if not geocode_result:
return "Input Error", None
latitude = geocode_result[0]["geometry"]["location"]['lat']
longitude = geocode_result[0]["geometry"]["location"]['lng']
location = geocode_result[0]["formatted_address"]
# print(geocode_result[0]["geometry"]["location"])
url = api_url + str(api_key) + "/" + str(latitude) + "," + str(longitude) + "?units=us&exclude=" + exclude
return url, location
#time_zone_util()
#func: Recieves time in UTC and timezone and converts time to specified time zone, returns new time's hour in 12 hour format and either AM or PM
def time_zone_util(time, time_zone):
to_zone = tz.gettz(time_zone)
new_time = int(time.astimezone(to_zone).strftime('%#I'))
am_pm = time.astimezone(to_zone).strftime('%p')
return new_time, am_pm
###
### Primary Functions
###
#currentWeather()
#func: recieves weather API JSON and the formatted address and fills list of data every 3 hours for a total of 12 hours. Creates image to display values
#params: JSON_data is weather API JSON, location is the formatted address for location
def currentWeather(json_data, location):
count = 0
temp, precipChance, precipType, precipIntensity, icon = [None] * 5, [None] * 5, [None] * 5, [None] * 5, [None] * 5
time = json_data["hourly"]["data"][0]["time"]
time_zone = json_data["timezone"]
#Loop goes through the JSON file and outputs the temperature and precip every 4 hours for 8 hours
while count < 5:
hours = 3*count
summary = json_data["hourly"]["summary"]
temp[count]= round(json_data["hourly"]["data"][hours]["temperature"])
icon[count] = json_data["hourly"]["data"][hours]["icon"]
if(icon[count] == "clear-day"):
icon[count] = "clear_day"
if (icon[count] == "clear-night"):
icon[count] = "clear_night"
if (icon[count] == "partly-cloudy-day"):
icon[count] = "partly_cloudy_day"
if (icon[count] == "partly-cloudy-night"):
icon[count] = "partly_cloudy_night"
precipChance[count] = "{:.0%}".format(json_data["hourly"]["data"][hours]["precipProbability"])
if precipChance[count] != "0%" and precipChance[count] != "1%" and precipChance[count] != "2%" and precipChance[count] != "3%" and precipChance[count] != "4%":
precipType[count] = json_data["hourly"]["data"][hours]["precipType"]
precipIntensity[count] = json_data["hourly"]["data"][hours]["precipIntensity"]
if precipType[count] != "snow" and precipIntensity[count] <= .01:
icon[count] = "drizzle"
if precipType[count] != "snow" and .3 <= precipIntensity[count]:
icon[count] = "storm"
count = count + 1
img = Image.new('RGB', (1050, 375), color='white')
#Declare fonts
title_font = ImageFont.truetype('Lib/Fonts/FiraSans-ExtraBold.ttf', 50)
location_font = ImageFont.truetype('Lib/Fonts/FiraSans-Regular.ttf', 34)
summary_font = ImageFont.truetype('Lib/Fonts/FiraSans-Regular.ttf', 21)
time_font = ImageFont.truetype('Lib/Fonts/FiraSans-ExtraBold.ttf', 31)
degree_font = ImageFont.truetype('Lib/Fonts/FiraSans-SemiBold.ttf', 34)
precip_font = ImageFont.truetype('Lib/Fonts/FiraSans-Bold.ttf', 24)
precip_value_font = ImageFont.truetype('Lib/Fonts/FiraSans-Regular.ttf', 24)
#Icons
clear_day = Image.open('Lib/Icons/Sun.jpg')
clear_night = Image.open('Lib/Icons/Moon.jpg')
rain = Image.open('Lib/Icons/Cloud-Rain.jpg')
partly_cloudy_day = Image.open('Lib/Icons/Cloud-Sun.jpg')
partly_cloudy_night = Image.open('Lib/Icons/Cloud-Moon.jpg')
cloudy = Image.open('Lib/Icons/Cloud.jpg')
snow = Image.open('Lib/Icons/Cloud-Snow-Alt.jpg')
sleet = Image.open('Lib/Icons/Cloud-Snow-Alt.jpg')
wind = Image.open('Lib/Icons/Cloud-Wind.jpg')
fog = Image.open('Lib/Icons/Cloud-Fog-Alt.jpg')
drizzle = Image.open('Lib/Icons/Cloud-Drizzle.jpg')
storm = Image.open('Lib/Icons/Cloud-Lightning.jpg')
#Title + Subtitle
d = ImageDraw.Draw(img)
d.text((35, 11), "Hourly Forecast", font=title_font, fill='black')
d.text((400, 26), location, font=location_font, fill='black')
d.text((35, 68), summary, font=summary_font, fill='black')
# Rectangle
d.rectangle([(24, 96), (218, 352)], fill=(214, 214, 214), outline=None)
d.rectangle([(226, 96), (420, 352)], fill=(214, 214, 214), outline=None)
d.rectangle([(427, 96), (621, 352)], fill=(214, 214, 214), outline=None)
d.rectangle([(629, 96), (823, 352)], fill=(214, 214, 214), outline=None)
d.rectangle([(830, 96), (1024, 352)], fill=(214, 214, 214), outline=None)
# Time
from_zone = tz.gettz('UTC')
utc = datetime.utcnow()
time_utc = utc.replace(tzinfo = from_zone)
time_hour1, am_pm1 = time_zone_util(time_utc, time_zone)
time_hour2,am_pm2 = time_zone_util(time_utc + timedelta(hours=3), time_zone)
time_hour3,am_pm3 = time_zone_util(time_utc + timedelta(hours=6),time_zone)
time_hour4,am_pm4 = time_zone_util(time_utc + timedelta(hours=9),time_zone)
time_hour5,am_pm5 = time_zone_util(time_utc + timedelta(hours=12),time_zone)
# Time Width
time_width, trash = d.textsize(str(time_hour1)+ am_pm1, font=time_font)
time_width2, trash = d.textsize(str(time_hour2)+ am_pm2, font=time_font)
time_width3, trash = d.textsize(str(time_hour3)+ am_pm3, font=time_font)
time_width4, trash = d.textsize(str(time_hour4)+ am_pm4, font=time_font)
time_width5, trash = d.textsize(str(time_hour5)+ am_pm5, font=time_font)
# Time input
d.text((((194 - time_width) / 2) + 24, 105), str(time_hour1) + am_pm1, font=time_font, fill="black")
d.text((((194 - time_width2) / 2) + 226, 105), str(time_hour2) + am_pm2, font=time_font, fill="black")
d.text((((194 - time_width3) / 2) + 427, 105), str(time_hour3) + am_pm3, font=time_font, fill="black")
d.text((((194 - time_width4) / 2) + 629, 105), str(time_hour4) + am_pm4, font=time_font, fill="black")
d.text((((194 - time_width5) / 2) + 830, 105), str(time_hour5) + am_pm5, font=time_font, fill="black")
# Icon
img.paste(eval(icon[0]), (59, 147))
img.paste(eval(icon[1]), (261, 147))
img.paste(eval(icon[2]), (462, 147))
img.paste(eval(icon[3]), (664, 147))
img.paste(eval(icon[4]), (865, 147))
# Degree Text Width
temp_holder = str(str(temp[0]) + u"\u00b0" + "F")
temp_width, throwaway = d.textsize(temp_holder, font=degree_font)
# Degree
d.text((((194 - temp_width) / 2) + 24, 263), str(temp[0]) + u"\u00b0" + "F",font=degree_font, fill="black")
d.text((((194 - temp_width) / 2) + 226, 263), str(temp[1]) + u"\u00b0" + "F",font=degree_font, fill="black")
d.text((((194 - temp_width) / 2) + 427, 263), str(temp[2]) + u"\u00b0" + "F",font=degree_font, fill="black")
d.text((((194 - temp_width) / 2) + 629, 263), str(temp[3]) + u"\u00b0" + "F",font=degree_font, fill="black")
d.text((((194 - temp_width) / 2) + 830, 263), str(temp[4]) + u"\u00b0" + "F",font=degree_font, fill="black")
# Precip
d.text((61, 300), "Precip", font=precip_font, fill=(43, 43, 43))
d.text((263, 300), "Precip", font=precip_font, fill=(43, 43, 43))
d.text((465, 300), "Precip", font=precip_font, fill=(43, 43, 43))
d.text((666, 300), "Precip", font=precip_font, fill=(43, 43, 43))
d.text((867, 300), "Precip", font=precip_font, fill=(43, 43, 43))
# Precip Value
d.text((139, 300), str(precipChance[0]), font=precip_value_font, fill="black")
d.text((341, 300), str(precipChance[1]), font=precip_value_font, fill="black")
d.text((541, 300), str(precipChance[2]), font=precip_value_font, fill="black")
d.text((744, 300), str(precipChance[3]), font=precip_value_font, fill="black")
d.text((945, 300), str(precipChance[4]), font=precip_value_font, fill="black")
img.save("hourly_rendered_image.png")
return
#forecast()
#func: Recieves weather API JSON and the formatted address and fills list of data for every day for a total of 5 days. Creates image to display values
#param: json_data: weather data from API, location: formatted address of location
def forecast(json_data, location):
count = 0
#Loop goes through the JSON file and outputs the temperature and precip every 4 hours for 8 hours
icon, temp_high, temp_low, precipChance, precipType, precipIntensity = [None] * 5, [None] * 5, [None] * 5, [0] * 5, [None] * 5, [None] * 5
while count < 5:
hours = count
summary = json_data["daily"]["summary"]
temp_high[count] = round(json_data["daily"]["data"][hours]["temperatureHigh"])
temp_low[count] = round(json_data["daily"]["data"][hours]["temperatureLow"])
icon[count] = json_data["daily"]["data"][hours]["icon"]
if(icon[count] == "clear-day"):
icon[count] = "clear_day"
if (icon[count] == "clear-night"):
icon[count] = "clear_night"
if (icon[count] == "partly-cloudy-day"):
icon[count] = "partly_cloudy_day"
if (icon[count] == "partly-cloudy-night"):
icon[count] = "partly_cloudy_night"
precipChance[count] = "{:.0%}".format(json_data["daily"]["data"][hours]["precipProbability"])
#Below 4% rain type is not displayed
if precipChance[count] != "0%" and precipChance[count] != "1%" and precipChance[count] != "2%" and precipChance[count] != "3%" and precipChance[count] != "4%":
precipType[count] = json_data["daily"]["data"][hours]["precipType"]
precipIntensity[count] = json_data["daily"]["data"][hours]["precipIntensity"]
if precipType[count] != "snow" and precipIntensity[count] <= .01:
icon[count] = "drizzle"
if precipType[count] != "snow" and .3 <= precipIntensity[count]:
icon[count] = "storm"
count+=1
img = Image.new('RGB', (1050, 375), color='white')
#Declare fonts
title_font = ImageFont.truetype('Lib/Fonts/FiraSans-ExtraBold.ttf', 50)
location_font = ImageFont.truetype('Lib/Fonts/FiraSans-Regular.ttf', 34)
summary_font = ImageFont.truetype('Lib/Fonts/FiraSans-Regular.ttf', 21)
day_font = ImageFont.truetype('Lib/Fonts/FiraSans-ExtraBold.ttf', 31)
degree_font = ImageFont.truetype('Lib/Fonts/FiraSans-SemiBold.ttf', 34)
precip_font = ImageFont.truetype('Lib/Fonts/FiraSans-Bold.ttf', 24)
precip_value_font = ImageFont.truetype('Lib/Fonts/FiraSans-Regular.ttf', 24)
#Day Values
day_of_week = datetime.today().weekday()
week = ["Monday","Tuesday","Wednesday","Thursday","Friday","Saturday","Sunday"]
forecast_days = [None] * 5
#For Loop to get next 5 days
day_count = 0
for day_count in range(0,5):
forecast_days[day_count] = week[day_of_week]
day_of_week = day_of_week + 1
day_count = day_count + 1
if day_of_week == 7:
day_of_week = 0
#Icons
clear_day = Image.open('Lib/Icons/Sun.jpg')
clear_night = Image.open('Lib/Icons/Moon.jpg')
rain = Image.open('Lib/Icons/Cloud-Rain.jpg')
partly_cloudy_day = Image.open('Lib/Icons/Cloud-Sun.jpg')
partly_cloudy_night = Image.open('Lib/Icons/Cloud-Moon.jpg')
cloudy = Image.open('Lib/Icons/Cloud.jpg')
snow = Image.open('Lib/Icons/Cloud-Snow-Alt.jpg')
sleet = Image.open('Lib/Icons/Cloud-Snow-Alt.jpg')
wind = Image.open('Lib/Icons/Cloud-Wind.jpg')
fog = Image.open('Lib/Icons/Cloud-Fog-Alt.jpg')
drizzle = Image.open('Lib/Icons/Cloud-Drizzle.jpg')
storm = Image.open('Lib/Icons/Cloud-Lightning.jpg')
#Title + Subtitle
d = ImageDraw.Draw(img)
d.text((35, 11), "5 Day Forecast", font=title_font, fill='black')
d.text((375, 26), location, font=location_font, fill='black')
d.text((35, 68), summary, font=summary_font, fill= 'black')
#Rectangle
d.rectangle([(24,96), (218,352)], fill = (214,214,214), outline=None)
d.rectangle([(226,96), (420,352)], fill = (214,214,214), outline=None)
d.rectangle([(427,96), (621,352)], fill = (214,214,214), outline=None)
d.rectangle([(629,96), (823,352)], fill = (214,214,214), outline=None)
d.rectangle([(830,96), (1024,352)], fill = (214,214,214), outline=None)
#Day of The Week Text Width
text_width, trash =d.textsize(forecast_days[0], font=day_font)
text_width2, trash =d.textsize(forecast_days[1], font=day_font)
text_width3, trash =d.textsize(forecast_days[2], font=day_font)
text_width4, trash =d.textsize(forecast_days[3], font=day_font)
text_width5, trash =d.textsize(forecast_days[4], font=day_font)
#Day of The Week
d.text((((194 - text_width) / 2) + 24, 105), forecast_days[0], font=day_font, fill= "black")
d.text((((194 - text_width2) / 2) + 226, 105), forecast_days[1], font=day_font, fill= "black")
d.text((((194 - text_width3) / 2) + 427, 105), forecast_days[2], font=day_font, fill= "black")
d.text((((194 - text_width4) / 2) + 629, 105), forecast_days[3], font=day_font, fill= "black")
d.text((((194 - text_width5) / 2) + 830, 105), forecast_days[4], font=day_font, fill= "black")
#Icon
img.paste(eval(icon[0]), (59, 147))
img.paste(eval(icon[1]), (261, 147))
img.paste(eval(icon[2]), (462, 147))
img.paste(eval(icon[3]), (664, 147))
img.paste(eval(icon[4]), (865, 147))
#Degree Text Width
temp_holder = str(temp_high[0]) + " - " + str(temp_low[0]) + u"\u00b0" + "F"
temp_width, throwaway = d.textsize(temp_holder, font=degree_font)
#Degree
d.text((((194 - temp_width) / 2) + 24, 263), str(temp_high[0]) + " - " + str(temp_low[0]) + u"\u00b0" + "F", font=degree_font, fill= "black")
d.text((((194 - temp_width) / 2) + 226, 263),str(temp_high[1]) + " - " + str(temp_low[1]) + u"\u00b0" + "F", font=degree_font, fill= "black")
d.text((((194 - temp_width) / 2) + 427, 263), str(temp_high[2]) + " - " + str(temp_low[2]) + u"\u00b0" + "F", font=degree_font, fill= "black")
d.text((((194 - temp_width) / 2) + 629, 263), str(temp_high[3]) + " - " + str(temp_low[3]) + u"\u00b0" + "F", font=degree_font, fill= "black")
d.text((((194 - temp_width) / 2) + 830, 263), str(temp_high[4]) + " - " + str(temp_low[4]) + u"\u00b0" + "F", font=degree_font, fill= "black")
#Precip
d.text((61, 300), "Precip", font=precip_font, fill= (43, 43, 43))
d.text((263, 300), "Precip", font=precip_font, fill= (43, 43, 43))
d.text((465, 300), "Precip", font=precip_font, fill= (43, 43, 43))
d.text((666, 300), "Precip", font=precip_font, fill= (43, 43, 43))
d.text((867, 300), "Precip", font=precip_font, fill= (43, 43, 43))
#Precip Value
d.text((139, 300), str(precipChance[0]), font=precip_value_font, fill= "black")
d.text((341, 300), str(precipChance[1]), font=precip_value_font, fill= "black")
d.text((541, 300), str(precipChance[2]), font=precip_value_font, fill= "black")
d.text((744, 300), str(precipChance[3]), font=precip_value_font, fill= "black")
d.text((945, 300), str(precipChance[4]), font=precip_value_font, fill= "black")
img.save("forecast_rendered_image.png")
return
#Event Function that activates different functions on command message
@client.event
async def on_message(message):
if message.author == client.user:
return
if message.content.startswith('!help'):
output = help(message.author.mention)
await message.channel.send(output)
if message.content.startswith('!current'):
url, location = get_url(message.content, excludeExceptHourly)
print(url)
if url == "Index Error" or url == "Input Error":
if url == "Index Error":
await message.channel.send(message.author.mention + "\n**Error:** Incorrect format, ```!current location``` ")
if url == "Input Error":
await message.channel.send(message.author.mention + "\n**Error:** Invalid input, input name or address of location ```!current location``` ")
else:
async with aiohttp.ClientSession() as session:
async with session.get(url) as r:
if r.status == 200:
json_data = await r.json()
print(await r.json())
output = currentWeather(json_data, location)
await message.channel.send(file=discord.File('hourly_rendered_image.png'))
if message.content.startswith('!forecast'):
url, location = get_url(message.content, excludeExceptDaily)
print(url)
if url == "Index Error" or url == "Input Error":
if url == "Index Error":
await message.channel.send(message.author.mention + "**\nError:** Incorrect format, ```!forecast location``` ")
if url == "Input Error":
await message.channel.send(message.author.mention + "**\nError:** Invalid input, input name or address of location ```!forecast location``` ")
else:
async with aiohttp.ClientSession() as session:
async with session.get(url) as r:
if r.status == 200:
json_data = await r.json()
#print(await r.json())
output = forecast(json_data, location)
await message.channel.send(file=discord.File('forecast_rendered_image.png'))
client.run('.XRMUFw.-kdM')
| on_ready |
nascell.py | # Copyright 2018 DeepMind Technologies Limited. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""rnn_cell.NASCell adapted to support transforms."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow.compat.v1 as tf
class NASCell(tf.nn.rnn_cell.RNNCell):
"""Neural Architecture Search (NAS) recurrent network cell.
This implements the recurrent cell from the paper:
https://arxiv.org/abs/1611.01578
Barret Zoph and Quoc V. Le.
"Neural Architecture Search with Reinforcement Learning" Proc. ICLR 2017.
The class uses an optional projection layer.
"""
def __init__(self, num_units, num_proj=None,
use_biases=False, reuse=None,
initializer=None,
input_transform=None,
state_transform=None,
update_transform=None):
"""Initialize the parameters for a NAS cell.
Args:
num_units: int, The number of units in the NAS cell
num_proj: (optional) int, The output dimensionality for the projection
matrices. If None, no projection is performed.
use_biases: (optional) bool, If True then use biases within the cell. This
is False by default.
reuse: (optional) Python boolean describing whether to reuse variables
in an existing scope. If not `True`, and the existing scope already has
the given variables, an error is raised.
initializer: Initializer for the variables.
input_transform: None, or a function of one argument that
massages the input in some way. For example, variational
dropout can be implemted by passing a Dropout object here.
state_transform: Similar to input_transform, this is
applied to the recurrent state.
update_transform: Similar to input_transform, this is
applied to the proposed update ('j').
"""
super(NASCell, self).__init__(_reuse=reuse)
self._num_units = num_units
self._num_proj = num_proj
self._use_biases = use_biases
self._reuse = reuse
if num_proj is not None:
self._state_size = tf.nn.rnn_cell.LSTMStateTuple(num_units, num_proj)
self._output_size = num_proj
else:
self._state_size = tf.nn.rnn_cell.LSTMStateTuple(num_units, num_units)
self._output_size = num_units
self._initializer = initializer
self._input_transform = input_transform
self._state_transform = state_transform
assert update_transform is None
@property
def state_size(self):
|
@property
def output_size(self):
return self._output_size
def call(self, inputs, state):
"""Run one step of NAS Cell.
Args:
inputs: input Tensor, 2D, batch x num_units.
state: This must be a tuple of state Tensors, both `2-D`, with column
sizes `c_state` and `m_state`.
Returns:
A tuple containing:
- A `2-D, [batch x output_dim]`, Tensor representing the output of the
NAS Cell after reading `inputs` when previous state was `state`.
Here output_dim is:
num_proj if num_proj was set,
num_units otherwise.
- Tensor(s) representing the new state of NAS Cell after reading `inputs`
when the previous state was `state`. Same type and shape(s) as `state`.
Raises:
ValueError: If input size cannot be inferred from inputs via
static shape inference.
"""
sigmoid = tf.sigmoid
tanh = tf.tanh
relu = tf.nn.relu
num_proj = self._num_units if self._num_proj is None else self._num_proj
def maybe_transform(transform, x):
if transform is None:
return x
else:
return transform(x)
(c_prev, m_prev) = state
m_prev = maybe_transform(self._state_transform, m_prev)
dtype = inputs.dtype
input_size = inputs.get_shape().with_rank(2)[1]
inputs = maybe_transform(self._input_transform, inputs)
if input_size.value is None:
raise ValueError("Could not infer input size from inputs.get_shape()[-1]")
# Variables for the NAS cell. W_m is all matrices multiplying the
# hiddenstate and W_inputs is all matrices multiplying the inputs.
concat_w_m = tf.get_variable(
"recurrent_kernel", [num_proj, 8 * self._num_units],
initializer=self._initializer, dtype=dtype)
concat_w_inputs = tf.get_variable(
"kernel", [input_size.value, 8 * self._num_units],
initializer=self._initializer, dtype=dtype)
m_matrix = tf.matmul(m_prev, concat_w_m)
inputs_matrix = tf.matmul(inputs, concat_w_inputs)
if self._use_biases:
b = tf.get_variable(
"bias",
shape=[8 * self._num_units],
initializer=tf.zeros_initializer(),
dtype=dtype)
m_matrix = tf.nn.bias_add(m_matrix, b)
# The NAS cell branches into 8 different splits for both the hiddenstate
# and the input
m_matrix_splits = tf.split(axis=1, num_or_size_splits=8,
value=m_matrix)
inputs_matrix_splits = tf.split(axis=1, num_or_size_splits=8,
value=inputs_matrix)
# First layer
layer1_0 = sigmoid(inputs_matrix_splits[0] + m_matrix_splits[0])
layer1_1 = relu(inputs_matrix_splits[1] + m_matrix_splits[1])
layer1_2 = sigmoid(inputs_matrix_splits[2] + m_matrix_splits[2])
layer1_3 = relu(inputs_matrix_splits[3] * m_matrix_splits[3])
layer1_4 = tanh(inputs_matrix_splits[4] + m_matrix_splits[4])
layer1_5 = sigmoid(inputs_matrix_splits[5] + m_matrix_splits[5])
layer1_6 = tanh(inputs_matrix_splits[6] + m_matrix_splits[6])
layer1_7 = sigmoid(inputs_matrix_splits[7] + m_matrix_splits[7])
# Second layer
l2_0 = tanh(layer1_0 * layer1_1)
l2_1 = tanh(layer1_2 + layer1_3)
l2_2 = tanh(layer1_4 * layer1_5)
l2_3 = sigmoid(layer1_6 + layer1_7)
# Inject the cell
l2_0 = tanh(l2_0 + c_prev)
# Third layer
l3_0_pre = l2_0 * l2_1
new_c = l3_0_pre # create new cell
l3_0 = l3_0_pre
l3_1 = tanh(l2_2 + l2_3)
# Final layer
new_m = tanh(l3_0 * l3_1)
# Projection layer if specified
if self._num_proj is not None:
concat_w_proj = tf.get_variable(
"projection_weights", [self._num_units, self._num_proj],
dtype)
new_m = tf.matmul(new_m, concat_w_proj)
new_state = tf.nn.rnn_cell.LSTMStateTuple(new_c, new_m)
return new_m, new_state
| return self._state_size |
main.rs | use anyhow::{anyhow, bail, format_err, Result};
use rustyline::completion::{Completer, FilenameCompleter, Pair};
use rustyline::config::OutputStreamType;
use rustyline::error::ReadlineError;
use rustyline::highlight::{Highlighter, MatchingBracketHighlighter};
use rustyline::hint::{Hinter, HistoryHinter};
use rustyline::validate::{self, MatchingBracketValidator, Validator};
use rustyline::{Cmd, CompletionType, Config, Context, EditMode, Editor, KeyPress};
use rustyline_derive::Helper;
use std::borrow::Cow::{self, Borrowed, Owned};
use std::boxed::Box;
use structopt::StructOpt;
mod nrepl;
mod prepl;
mod repl;
mod util;
use repl::{Repl, Response};
#[derive(Helper)]
struct MyHelper {
completer: FilenameCompleter,
highlighter: MatchingBracketHighlighter,
validator: MatchingBracketValidator,
hinter: HistoryHinter,
colored_prompt: String,
}
impl Completer for MyHelper {
type Candidate = Pair;
fn | (
&self,
line: &str,
pos: usize,
ctx: &Context<'_>,
) -> Result<(usize, Vec<Pair>), ReadlineError> {
self.completer.complete(line, pos, ctx)
}
}
impl Hinter for MyHelper {
fn hint(&self, line: &str, pos: usize, ctx: &Context<'_>) -> Option<String> {
self.hinter.hint(line, pos, ctx)
}
}
impl Highlighter for MyHelper {
fn highlight_prompt<'b, 's: 'b, 'p: 'b>(
&'s self,
prompt: &'p str,
default: bool,
) -> Cow<'b, str> {
if default {
Borrowed(&self.colored_prompt)
} else {
Borrowed(prompt)
}
}
fn highlight_hint<'h>(&self, hint: &'h str) -> Cow<'h, str> {
Owned("\x1b[1m".to_owned() + hint + "\x1b[m")
}
fn highlight<'l>(&self, line: &'l str, pos: usize) -> Cow<'l, str> {
self.highlighter.highlight(line, pos)
}
fn highlight_char(&self, line: &str, pos: usize) -> bool {
self.highlighter.highlight_char(line, pos)
}
}
impl Validator for MyHelper {
fn validate(
&self,
ctx: &mut validate::ValidationContext,
) -> rustyline::Result<validate::ValidationResult> {
self.validator.validate(ctx)
}
fn validate_while_typing(&self) -> bool {
self.validator.validate_while_typing()
}
}
fn handle_response(repl: &mut Box<dyn Repl>) -> Result<()> {
loop {
match repl.recv()? {
Response::StdErr(s) => {
print!("{}", &s);
}
Response::StdOut(s) => {
print!("{}", &s);
}
Response::Exception(s) => {
println!("{}", &s);
break;
}
Response::Other(_) => {}
Response::Done(opt) => {
if let Some(s) = opt {
println!("{}", &s);
}
break;
}
}
}
Ok(())
}
fn main_loop(mut repl: Box<dyn Repl>) -> Result<()> {
let config = Config::builder()
.history_ignore_space(true)
.completion_type(CompletionType::List)
.edit_mode(EditMode::Emacs)
.output_stream(OutputStreamType::Stdout)
.build();
let h = MyHelper {
completer: rustyline::completion::FilenameCompleter::new(),
highlighter: rustyline::highlight::MatchingBracketHighlighter::new(),
hinter: rustyline::hint::HistoryHinter {},
colored_prompt: "".to_owned(),
validator: rustyline::validate::MatchingBracketValidator::new(),
};
let mut rl = Editor::with_config(config);
rl.set_helper(Some(h));
rl.bind_sequence(KeyPress::Down, Cmd::LineDownOrNextHistory);
rl.bind_sequence(KeyPress::Up, Cmd::LineUpOrPreviousHistory);
rl.bind_sequence(KeyPress::Meta('N'), Cmd::HistorySearchForward);
rl.bind_sequence(KeyPress::Meta('P'), Cmd::HistorySearchBackward);
loop {
let p = &format!("{}=> ", repl.namespace());
rl.helper_mut().expect("No helper").colored_prompt = format!("\x1b[1;32m{}\x1b[0m", p);
match rl.readline(&p) {
Ok(line) => {
if !line.trim().is_empty() {
rl.add_history_entry(&line);
repl.send(&line)?;
} else {
continue;
}
}
Err(ReadlineError::Interrupted) => {
println!("CTRL-C");
break;
}
Err(ReadlineError::Eof) => {
println!("CTRL-D");
break;
}
Err(err) => {
println!("Error: {:?}", err);
}
}
handle_response(&mut repl)?;
}
Ok(())
}
#[derive(StructOpt, Debug)]
#[structopt(name = "ruply")]
struct Opt {
/// Repl host
#[structopt(short, default_value = "127.0.0.1")]
host: String,
/// Repl port
#[structopt(short)]
port: usize,
/// Code snippet for single-shot evaluation
#[structopt(short)]
eval: Option<String>,
}
fn main() -> Result<()> {
let opt = Opt::from_args();
let mut repl = repl::get_repl(&opt.host, opt.port)?;
match opt.eval {
Some(code) => {
repl.send(&code)?;
handle_response(&mut repl)?;
}
None => {
println!(
"\nConnected to {} at {}:{}",
repl.name(),
&opt.host,
opt.port
);
println!("Exit: CTRL+D\n");
main_loop(repl)?;
}
}
Ok(())
}
| complete |
scale.go | package cmd
import (
"encoding/json"
"fmt"
"io/ioutil"
"math/rand"
"os"
"path"
"sort"
"strings"
"time"
"github.com/Azure/acs-engine/pkg/acsengine"
"github.com/Azure/acs-engine/pkg/acsengine/transform"
"github.com/Azure/acs-engine/pkg/api"
"github.com/Azure/acs-engine/pkg/armhelpers"
"github.com/Azure/acs-engine/pkg/armhelpers/utils"
"github.com/Azure/acs-engine/pkg/i18n"
"github.com/Azure/acs-engine/pkg/operations"
"github.com/leonelquinteros/gotext"
log "github.com/sirupsen/logrus"
"github.com/spf13/cobra"
)
type scaleCmd struct {
authArgs
// user input
resourceGroupName string
deploymentDirectory string
newDesiredAgentCount int
containerService *api.ContainerService
apiVersion string
location string
agentPoolToScale string
classicMode bool
// derived
apiModelPath string
agentPool *api.AgentPoolProfile
client armhelpers.ACSEngineClient
locale *gotext.Locale
nameSuffix string
agentPoolIndex int
masterFQDN string
logger *log.Entry
}
const (
scaleName = "scale"
scaleShortDescription = "scale a deployed cluster"
scaleLongDescription = "scale a deployed cluster"
)
// NewScaleCmd run a command to upgrade a Kubernetes cluster
func newScaleCmd() *cobra.Command {
sc := scaleCmd{}
scaleCmd := &cobra.Command{
Use: scaleName,
Short: scaleShortDescription,
Long: scaleLongDescription,
RunE: func(cmd *cobra.Command, args []string) error {
return sc.run(cmd, args)
},
}
f := scaleCmd.Flags()
f.StringVarP(&sc.location, "location", "l", "", "location the cluster is deployed in")
f.StringVarP(&sc.resourceGroupName, "resource-group", "g", "", "the resource group where the cluster is deployed")
f.StringVar(&sc.deploymentDirectory, "deployment-dir", "", "the location of the output from `generate`")
f.IntVar(&sc.newDesiredAgentCount, "new-node-count", 0, "desired number of nodes")
f.BoolVar(&sc.classicMode, "classic-mode", false, "enable classic parameters and outputs")
f.StringVar(&sc.agentPoolToScale, "node-pool", "", "node pool to scale")
f.StringVar(&sc.masterFQDN, "master-FQDN", "", "FQDN for the master load balancer, Needed to scale down Kubernetes agent pools")
addAuthFlags(&sc.authArgs, f) | func (sc *scaleCmd) validate(cmd *cobra.Command, args []string) {
log.Infoln("validating...")
sc.logger = log.New().WithField("source", "scaling command line")
var err error
sc.locale, err = i18n.LoadTranslations()
if err != nil {
log.Fatalf("error loading translation files: %s", err.Error())
}
if sc.resourceGroupName == "" {
cmd.Usage()
log.Fatal("--resource-group must be specified")
}
if sc.location == "" {
cmd.Usage()
log.Fatal("--location must be specified")
}
if sc.newDesiredAgentCount == 0 {
cmd.Usage()
log.Fatal("--new-node-count must be specified")
}
if sc.client, err = sc.authArgs.getClient(); err != nil {
log.Error("Failed to get client:", err)
}
if sc.deploymentDirectory == "" {
cmd.Usage()
log.Fatal("--deployment-dir must be specified")
}
_, err = sc.client.EnsureResourceGroup(sc.resourceGroupName, sc.location, nil)
if err != nil {
log.Fatalln(err)
}
// load apimodel from the deployment directory
sc.apiModelPath = path.Join(sc.deploymentDirectory, "apimodel.json")
if _, err = os.Stat(sc.apiModelPath); os.IsNotExist(err) {
log.Fatalf("specified api model does not exist (%s)", sc.apiModelPath)
}
apiloader := &api.Apiloader{
Translator: &i18n.Translator{
Locale: sc.locale,
},
}
sc.containerService, sc.apiVersion, err = apiloader.LoadContainerServiceFromFile(sc.apiModelPath, true, true, nil)
if err != nil {
log.Fatalf("error parsing the api model: %s", err.Error())
}
if sc.containerService.Location == "" {
sc.containerService.Location = sc.location
} else if sc.containerService.Location != sc.location {
log.Fatalf("--location does not match api model location")
}
if sc.agentPoolToScale == "" {
agentPoolCount := len(sc.containerService.Properties.AgentPoolProfiles)
if agentPoolCount > 1 {
log.Fatal("--node-pool is required if more than one agent pool is defined in the container service")
} else if agentPoolCount == 1 {
sc.agentPool = sc.containerService.Properties.AgentPoolProfiles[0]
sc.agentPoolIndex = 0
sc.agentPoolToScale = sc.containerService.Properties.AgentPoolProfiles[0].Name
} else {
log.Fatal("No node pools found to scale")
}
} else {
agentPoolIndex := -1
for i, pool := range sc.containerService.Properties.AgentPoolProfiles {
if pool.Name == sc.agentPoolToScale {
agentPoolIndex = i
sc.agentPool = pool
sc.agentPoolIndex = i
}
}
if agentPoolIndex == -1 {
log.Fatalf("node pool %s wasn't in the deployed api model", sc.agentPoolToScale)
}
}
templatePath := path.Join(sc.deploymentDirectory, "azuredeploy.json")
contents, _ := ioutil.ReadFile(templatePath)
var template interface{}
json.Unmarshal(contents, &template)
templateMap := template.(map[string]interface{})
templateParameters := templateMap["parameters"].(map[string]interface{})
nameSuffixParam := templateParameters["nameSuffix"].(map[string]interface{})
sc.nameSuffix = nameSuffixParam["defaultValue"].(string)
log.Infoln(fmt.Sprintf("Name suffix: %s", sc.nameSuffix))
}
func (sc *scaleCmd) run(cmd *cobra.Command, args []string) error {
sc.validate(cmd, args)
orchestratorInfo := sc.containerService.Properties.OrchestratorProfile
var currentNodeCount, highestUsedIndex int
indexes := make([]int, 0)
indexToVM := make(map[int]string)
if sc.agentPool.IsAvailabilitySets() {
//TODO handle when there is a nextLink in the response and get more nodes
vms, err := sc.client.ListVirtualMachines(sc.resourceGroupName)
if err != nil {
log.Fatalln("failed to get vms in the resource group. Error: %s", err.Error())
} else if len(*vms.Value) < 1 {
log.Fatalln("The provided resource group does not contain any vms.")
}
for _, vm := range *vms.Value {
poolName, nameSuffix, index, err := utils.K8sLinuxVMNameParts(*vm.Name)
if err != nil || !strings.EqualFold(poolName, sc.agentPoolToScale) || !strings.EqualFold(nameSuffix, sc.nameSuffix) {
continue
}
indexToVM[index] = *vm.Name
indexes = append(indexes, index)
}
sortedIndexes := sort.IntSlice(indexes)
sortedIndexes.Sort()
indexes = []int(sortedIndexes)
currentNodeCount = len(indexes)
if currentNodeCount == sc.newDesiredAgentCount {
log.Info("Cluster is currently at the desired agent count.")
return nil
}
highestUsedIndex = indexes[len(indexes)-1]
// Scale down Scenario
if currentNodeCount > sc.newDesiredAgentCount {
if sc.masterFQDN == "" {
cmd.Usage()
log.Fatal("master-FQDN is required to scale down a kubernetes cluster's agent pool")
}
vmsToDelete := make([]string, 0)
for i := currentNodeCount - 1; i >= sc.newDesiredAgentCount; i-- {
vmsToDelete = append(vmsToDelete, indexToVM[i])
}
if orchestratorInfo.OrchestratorType == api.Kubernetes {
err = sc.drainNodes(vmsToDelete)
if err != nil {
log.Errorf("Got error %+v, while draining the nodes to be deleted", err)
return err
}
}
errList := operations.ScaleDownVMs(sc.client, sc.logger, sc.resourceGroupName, vmsToDelete...)
if errList != nil {
errorMessage := ""
for element := errList.Front(); element != nil; element = element.Next() {
vmError, ok := element.Value.(*operations.VMScalingErrorDetails)
if ok {
error := fmt.Sprintf("Node '%s' failed to delete with error: '%s'", vmError.Name, vmError.Error.Error())
errorMessage = errorMessage + error
}
}
return fmt.Errorf(errorMessage)
}
return nil
}
} else {
vmssList, err := sc.client.ListVirtualMachineScaleSets(sc.resourceGroupName)
if err != nil {
log.Fatalln("failed to get vmss list in the resource group. Error: %s", err.Error())
}
for _, vmss := range *vmssList.Value {
poolName, nameSuffix, err := utils.VmssNameParts(*vmss.Name)
if err != nil || !strings.EqualFold(poolName, sc.agentPoolToScale) || !strings.EqualFold(nameSuffix, sc.nameSuffix) {
continue
}
currentNodeCount = int(*vmss.Sku.Capacity)
highestUsedIndex = 0
}
}
ctx := acsengine.Context{
Translator: &i18n.Translator{
Locale: sc.locale,
},
}
templateGenerator, err := acsengine.InitializeTemplateGenerator(ctx, sc.classicMode)
if err != nil {
log.Fatalln("failed to initialize template generator: %s", err.Error())
}
sc.containerService.Properties.AgentPoolProfiles = []*api.AgentPoolProfile{sc.agentPool}
template, parameters, _, err := templateGenerator.GenerateTemplate(sc.containerService, acsengine.DefaultGeneratorCode, false)
if err != nil {
log.Fatalf("error generating template %s: %s", sc.apiModelPath, err.Error())
os.Exit(1)
}
if template, err = transform.PrettyPrintArmTemplate(template); err != nil {
log.Fatalf("error pretty printing template: %s \n", err.Error())
}
templateJSON := make(map[string]interface{})
parametersJSON := make(map[string]interface{})
err = json.Unmarshal([]byte(template), &templateJSON)
if err != nil {
log.Fatalln(err)
}
err = json.Unmarshal([]byte(parameters), ¶metersJSON)
if err != nil {
log.Fatalln(err)
}
transformer := transform.Transformer{Translator: ctx.Translator}
// Our templates generate a range of nodes based on a count and offset, it is possible for there to be holes in the template
// So we need to set the count in the template to get enough nodes for the range, if there are holes that number will be larger than the desired count
countForTemplate := sc.newDesiredAgentCount
if highestUsedIndex != 0 {
countForTemplate += highestUsedIndex + 1 - currentNodeCount
}
addValue(parametersJSON, sc.agentPool.Name+"Count", countForTemplate)
switch orchestratorInfo.OrchestratorType {
case api.Kubernetes:
err = transformer.NormalizeForK8sVMASScalingUp(sc.logger, templateJSON)
if err != nil {
log.Fatalf("error tranforming the template for scaling template %s: %s", sc.apiModelPath, err.Error())
os.Exit(1)
}
if sc.agentPool.IsAvailabilitySets() {
addValue(parametersJSON, fmt.Sprintf("%sOffset", sc.agentPool.Name), highestUsedIndex+1)
}
case api.Swarm:
case api.SwarmMode:
case api.DCOS:
if sc.agentPool.IsAvailabilitySets() {
log.Fatalf("scaling isn't supported for orchestrator %s, with availability sets", orchestratorInfo.OrchestratorType)
os.Exit(1)
}
transformer.NormalizeForVMSSScaling(sc.logger, templateJSON)
}
random := rand.New(rand.NewSource(time.Now().UnixNano()))
deploymentSuffix := random.Int31()
_, err = sc.client.DeployTemplate(
sc.resourceGroupName,
fmt.Sprintf("%s-%d", sc.resourceGroupName, deploymentSuffix),
templateJSON,
parametersJSON,
nil)
if err != nil {
log.Fatalln(err)
}
apiloader := &api.Apiloader{
Translator: &i18n.Translator{
Locale: sc.locale,
},
}
var apiVersion string
sc.containerService, apiVersion, err = apiloader.LoadContainerServiceFromFile(sc.apiModelPath, false, true, nil)
if err != nil {
return err
}
sc.containerService.Properties.AgentPoolProfiles[sc.agentPoolIndex].Count = sc.newDesiredAgentCount
b, err := apiloader.SerializeContainerService(sc.containerService, apiVersion)
if err != nil {
return err
}
f := acsengine.FileSaver{
Translator: &i18n.Translator{
Locale: sc.locale,
},
}
return f.SaveFile(sc.deploymentDirectory, "apimodel.json", b)
}
type paramsMap map[string]interface{}
func addValue(m paramsMap, k string, v interface{}) {
m[k] = paramsMap{
"value": v,
}
}
func (sc *scaleCmd) drainNodes(vmsToDelete []string) error {
kubeConfig, err := acsengine.GenerateKubeConfig(sc.containerService.Properties, sc.location)
if err != nil {
log.Fatalf("failed to generate kube config: %v", err) // TODO: cleanup
}
masterURL := sc.masterFQDN
if !strings.HasPrefix(masterURL, "https://") {
masterURL = fmt.Sprintf("https://%s", masterURL)
}
numVmsToDrain := len(vmsToDelete)
errChan := make(chan *operations.VMScalingErrorDetails, numVmsToDrain)
defer close(errChan)
for _, vmName := range vmsToDelete {
go func(vmName string) {
err = operations.SafelyDrainNode(sc.client, sc.logger,
masterURL, kubeConfig, vmName, time.Duration(60)*time.Minute)
if err != nil {
log.Errorf("Failed to drain node %s, got error %v", vmName, err)
errChan <- &operations.VMScalingErrorDetails{Error: err, Name: vmName}
return
}
errChan <- nil
}(vmName)
}
for i := 0; i < numVmsToDrain; i++ {
errDetails := <-errChan
if errDetails != nil {
return fmt.Errorf("Node %q failed to drain with error: %v", errDetails.Name, errDetails.Error)
}
}
return nil
} |
return scaleCmd
}
|
_document.tsx | import * as React from 'react';
import Document, { Html, Head, Main, NextScript } from 'next/document';
export default class MyDocument extends Document {
render() {
return (
<Html lang="en"> | <link
rel="stylesheet"
href="https://fonts.googleapis.com/css?family=Roboto:300,400,500,700&display=swap"
/>
</Head>
<body>
<Main />
<NextScript />
</body>
</Html>
);
}
} | <Head>
{/* PWA primary color */}
<meta name="theme-color" /> |
file_io.go | package main
import (
"bufio"
"encoding/json"
"fmt"
"os"
)
func ReadFile(path string) []byte {
var f, err = os.Open(path)
defer f.Close()
if err != nil |
scanner := bufio.NewScanner(f)
var jsonData []byte
for scanner.Scan() {
jsonData = append(jsonData, scanner.Bytes()...)
}
return jsonData
}
func GetParsedValues(data []byte) map[string]interface{} {
var parsedValue map[string]interface{}
err := json.Unmarshal(data, &parsedValue)
if err != nil {
fmt.Println(err)
}
return parsedValue
}
| {
fmt.Println(err.Error())
} |
auto-ref.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
struct Foo {
x: int,
}
|
impl Stuff for Foo {
fn printme(&self) {
io::println(fmt!("%d", self.x));
}
}
pub fn main() {
let x = Foo { x: 3 };
x.printme();
} | trait Stuff {
fn printme(&self);
} |
target_queue_test.go | package buildcontrol
import (
"context"
"fmt"
"testing"
"github.com/docker/distribution/reference"
"github.com/stretchr/testify/assert"
"github.com/windmilleng/tilt/internal/testutils"
"github.com/windmilleng/tilt/internal/container"
"github.com/windmilleng/tilt/internal/store"
"github.com/windmilleng/tilt/pkg/model"
)
func TestTargetQueue_Simple(t *testing.T) {
f := newTargetQueueFixture(t)
t1 := model.NewImageTarget(container.MustParseSelector("vigoda"))
s1 := store.BuildState{}
targets := []model.ImageTarget{t1}
buildStateSet := store.BuildStateSet{
t1.ID(): s1,
}
f.run(targets, buildStateSet)
expectedCalls := map[model.TargetID]fakeBuildHandlerCall{
t1.ID(): newFakeBuildHandlerCall(t1, s1, 1, []store.BuildResult{}),
}
assert.Equal(t, expectedCalls, f.handler.calls)
}
func TestTargetQueue_DepsBuilt(t *testing.T) {
f := newTargetQueueFixture(t)
fooTarget := model.NewImageTarget(container.MustParseSelector("foo"))
s1 := store.BuildState{LastSuccessfulResult: store.NewImageBuildResult(fooTarget.ID(), container.MustParseNamedTagged("foo:1234"))}
barTarget := model.NewImageTarget(container.MustParseSelector("bar")).WithDependencyIDs([]model.TargetID{fooTarget.ID()})
s2 := store.BuildState{}
targets := []model.ImageTarget{fooTarget, barTarget}
buildStateSet := store.BuildStateSet{
fooTarget.ID(): s1,
barTarget.ID(): s2,
}
f.run(targets, buildStateSet)
barCall := newFakeBuildHandlerCall(barTarget, s2, 1, []store.BuildResult{
store.NewImageBuildResult(fooTarget.ID(), store.ImageFromBuildResult(s1.LastSuccessfulResult)),
})
// foo has a valid last result, so only bar gets rebuilt
expectedCalls := map[model.TargetID]fakeBuildHandlerCall{
barTarget.ID(): barCall,
}
assert.Equal(t, expectedCalls, f.handler.calls)
}
func TestTargetQueue_DepsUnbuilt(t *testing.T) {
f := newTargetQueueFixture(t)
fooTarget := model.NewImageTarget(container.MustParseSelector("foo"))
s1 := store.BuildState{}
barTarget := model.NewImageTarget(container.MustParseSelector("bar")).WithDependencyIDs([]model.TargetID{fooTarget.ID()})
var s2 = store.BuildState{LastSuccessfulResult: store.NewImageBuildResult(
barTarget.ID(),
container.MustParseNamedTagged("bar:54321"),
)}
targets := []model.ImageTarget{fooTarget, barTarget}
buildStateSet := store.BuildStateSet{
fooTarget.ID(): s1,
barTarget.ID(): s2,
}
f.run(targets, buildStateSet)
fooCall := newFakeBuildHandlerCall(fooTarget, s1, 1, []store.BuildResult{})
// bar's dep is dirty, so bar should not get its old state
barCall := newFakeBuildHandlerCall(barTarget, store.BuildState{}, 2, []store.BuildResult{fooCall.result})
expectedCalls := map[model.TargetID]fakeBuildHandlerCall{
fooTarget.ID(): fooCall,
barTarget.ID(): barCall,
}
assert.Equal(t, expectedCalls, f.handler.calls)
}
func TestTargetQueue_IncrementalBuild(t *testing.T) {
f := newTargetQueueFixture(t)
fooTarget := model.NewImageTarget(container.MustParseSelector("foo"))
s1 := store.BuildState{
LastSuccessfulResult: store.NewImageBuildResult(
fooTarget.ID(),
container.MustParseNamedTagged("foo:1234"),
),
FilesChangedSet: map[string]bool{"hello.txt": true},
}
targets := []model.ImageTarget{fooTarget}
buildStateSet := store.BuildStateSet{fooTarget.ID(): s1}
f.run(targets, buildStateSet)
fooCall := newFakeBuildHandlerCall(fooTarget, s1, 1, []store.BuildResult{})
expectedCalls := map[model.TargetID]fakeBuildHandlerCall{
fooTarget.ID(): fooCall,
}
assert.Equal(t, expectedCalls, f.handler.calls)
}
func TestTargetQueue_CachedBuild(t *testing.T) {
f := newTargetQueueFixture(t)
fooTarget := model.NewImageTarget(container.MustParseSelector("foo"))
s1 := store.BuildState{
LastSuccessfulResult: store.NewImageBuildResult(
fooTarget.ID(),
container.MustParseNamedTagged("foo:1234"),
),
}
targets := []model.ImageTarget{fooTarget}
buildStateSet := store.BuildStateSet{fooTarget.ID(): s1}
f.run(targets, buildStateSet)
// last result is still valid, so handler doesn't get called at all
expectedCalls := map[model.TargetID]fakeBuildHandlerCall{}
assert.Equal(t, expectedCalls, f.handler.calls)
}
func TestTargetQueue_DepsBuiltButReaped(t *testing.T) |
func newFakeBuildHandlerCall(target model.ImageTarget, state store.BuildState, num int, depResults []store.BuildResult) fakeBuildHandlerCall {
return fakeBuildHandlerCall{
target: target,
state: state,
result: store.NewImageBuildResult(
target.ID(),
container.MustParseNamedTagged(fmt.Sprintf("%s:%d", target.ConfigurationRef.String(), num)),
),
depResults: depResults,
}
}
type fakeBuildHandlerCall struct {
target model.TargetSpec
state store.BuildState
depResults []store.BuildResult
result store.BuildResult
}
type fakeBuildHandler struct {
buildNum int
calls map[model.TargetID]fakeBuildHandlerCall
}
func newFakeBuildHandler() *fakeBuildHandler {
return &fakeBuildHandler{
calls: make(map[model.TargetID]fakeBuildHandlerCall),
}
}
func (fbh *fakeBuildHandler) handle(target model.TargetSpec, state store.BuildState, depResults []store.BuildResult) (store.BuildResult, error) {
iTarget := target.(model.ImageTarget)
fbh.buildNum++
namedTagged := container.MustParseNamedTagged(fmt.Sprintf("%s:%d", iTarget.ConfigurationRef, fbh.buildNum))
result := store.NewImageBuildResult(target.ID(), namedTagged)
fbh.calls[target.ID()] = fakeBuildHandlerCall{target, state, depResults, result}
return result, nil
}
type targetQueueFixture struct {
t *testing.T
ctx context.Context
handler *fakeBuildHandler
missingImages []reference.NamedTagged
}
func newTargetQueueFixture(t *testing.T) *targetQueueFixture {
ctx, _, _ := testutils.CtxAndAnalyticsForTest()
return &targetQueueFixture{
t: t,
ctx: ctx,
handler: newFakeBuildHandler(),
}
}
func (f *targetQueueFixture) imageExists(ctx context.Context, namedTagged reference.NamedTagged) (b bool, e error) {
for _, ref := range f.missingImages {
if ref == namedTagged {
return false, nil
}
}
return true, nil
}
func (f *targetQueueFixture) setMissingImage(namedTagged reference.NamedTagged) {
f.missingImages = append(f.missingImages, namedTagged)
}
func (f *targetQueueFixture) run(targets []model.ImageTarget, buildStateSet store.BuildStateSet) {
tq, err := NewImageTargetQueue(f.ctx, targets, buildStateSet, f.imageExists)
if err != nil {
f.t.Fatal(err)
}
err = tq.RunBuilds(f.handler.handle)
if err != nil {
f.t.Fatal(err)
}
}
| {
f := newTargetQueueFixture(t)
fooTarget := model.NewImageTarget(container.MustParseSelector("foo"))
s1 := store.BuildState{LastSuccessfulResult: store.NewImageBuildResult(fooTarget.ID(), container.MustParseNamedTagged("foo:1234"))}
barTarget := model.NewImageTarget(container.MustParseSelector("bar")).WithDependencyIDs([]model.TargetID{fooTarget.ID()})
s2 := store.BuildState{}
targets := []model.ImageTarget{fooTarget, barTarget}
buildStateSet := store.BuildStateSet{
fooTarget.ID(): s1,
barTarget.ID(): s2,
}
f.setMissingImage(store.ImageFromBuildResult(s1.LastSuccessfulResult))
f.run(targets, buildStateSet)
fooCall := newFakeBuildHandlerCall(fooTarget, s1, 1, []store.BuildResult{})
barCall := newFakeBuildHandlerCall(barTarget, s2, 2, []store.BuildResult{
store.NewImageBuildResult(fooTarget.ID(), store.ImageFromBuildResult(fooCall.result)),
})
// foo has a valid last result, but its image is missing, so we have to rebuild it and its deps
expectedCalls := map[model.TargetID]fakeBuildHandlerCall{
fooTarget.ID(): fooCall,
barTarget.ID(): barCall,
}
assert.Equal(t, expectedCalls, f.handler.calls)
} |
init.go | // Copyright 2017 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package lang
import (
"github.com/gohugoio/hugo/deps"
"github.com/gohugoio/hugo/langs"
"github.com/gohugoio/hugo/tpl/internal"
)
const name = "lang"
func | () {
f := func(d *deps.Deps) *internal.TemplateFuncsNamespace {
ctx := New(d, langs.GetTranslator(d.Language))
ns := &internal.TemplateFuncsNamespace{
Name: name,
Context: func(args ...interface{}) (interface{}, error) { return ctx, nil },
}
ns.AddMethodMapping(ctx.Translate,
[]string{"i18n", "T"},
[][2]string{},
)
ns.AddMethodMapping(ctx.FormatNumber,
nil,
[][2]string{
{`{{ 512.5032 | lang.FormatNumber 2 }}`, `512.50`},
},
)
ns.AddMethodMapping(ctx.FormatPercent,
nil,
[][2]string{
{`{{ 512.5032 | lang.FormatPercent 2 }}`, `512.50%`},
},
)
ns.AddMethodMapping(ctx.FormatCurrency,
nil,
[][2]string{
{`{{ 512.5032 | lang.FormatCurrency 2 "USD" }}`, `$512.50`},
},
)
ns.AddMethodMapping(ctx.FormatAccounting,
nil,
[][2]string{
{`{{ 512.5032 | lang.FormatAccounting 2 "NOK" }}`, `NOK512.50`},
},
)
ns.AddMethodMapping(ctx.FormatNumberCustom,
nil,
[][2]string{
{`{{ lang.FormatNumberCustom 2 12345.6789 }}`, `12,345.68`},
{`{{ lang.FormatNumberCustom 2 12345.6789 "- , ." }}`, `12.345,68`},
{`{{ lang.FormatNumberCustom 6 -12345.6789 "- ." }}`, `-12345.678900`},
{`{{ lang.FormatNumberCustom 0 -12345.6789 "- . ," }}`, `-12,346`},
{`{{ -98765.4321 | lang.FormatNumberCustom 2 }}`, `-98,765.43`},
},
)
return ns
}
internal.AddTemplateFuncsNamespace(f)
}
| init |
getUnitFromResultUnitString.test.ts | import getUnitFromResultUnitString from '../getUnitFromResultUnitString';
describe('getUnitFromResultUnitString', () => {
it('should return unit based on result unit string', () => {
// given
const resultUnitString = '[kg]';
// when
const { unit, error } = getUnitFromResultUnitString(resultUnitString);
// then
expect(unit).toEqual([{ unit: 'kg', power: 1 }]);
expect(error).toEqual(null);
});
it('should not return an error when there are leading and trailing spaces', () => {
// given
const resultUnitString = ' [kg] ';
// when
const { error } = getUnitFromResultUnitString(resultUnitString);
// then
expect(error).toEqual(null);
});
it('should return an error when there are spaces in between characters', () => {
// given
const resultUnitString = '[ kg]';
// when
const { error } = getUnitFromResultUnitString(resultUnitString);
// then
expect(error).not.toEqual(null);
});
it('should return an error when there are characters before square brackets', () => {
// given
const resultUnitString = 'm[kg]';
// when
const { error } = getUnitFromResultUnitString(resultUnitString);
// then
expect(error).not.toEqual(null);
});
it('should return an error when there are characters after square brackets', () => {
// given
const resultUnitString = '[m]kg';
// when
const { error } = getUnitFromResultUnitString(resultUnitString);
// then
expect(error).not.toEqual(null);
});
| const resultUnitString = '[kg+mg]';
// when
const { error } = getUnitFromResultUnitString(resultUnitString);
// then
expect(error).not.toEqual(null);
});
it('should return an error when the unit has adjacent operators', () => {
// given
const resultUnitString = '[kg/*m]';
// when
const { error } = getUnitFromResultUnitString(resultUnitString);
// then
expect(error).not.toEqual(null);
});
it('should return unit based on complex result unit string for', () => {
// given
const resultUnitString = '[kg*N^2/m/s]';
// when
const { unit, error } = getUnitFromResultUnitString(resultUnitString);
// then
expect(unit).toEqual([
{ unit: 'kg', power: 1 },
{ unit: 'N', power: 2 },
{ unit: 'm', power: -1 },
{ unit: 's', power: -1 },
]);
expect(error).toEqual(null);
});
}); | it('should return an error when the unit has invalid characters', () => {
// given |
parser_test_result.py | # coding: utf-8
# Copyright (c) 2016, 2021, Oracle and/or its affiliates. All rights reserved.
# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
from oci.util import formatted_flat_dict, NONE_SENTINEL, value_allowed_none_or_none_sentinel # noqa: F401
from oci.decorators import init_model_state_from_kwargs
@init_model_state_from_kwargs
class ParserTestResult(object):
| """
ParserTestResult
"""
def __init__(self, **kwargs):
"""
Initializes a new ParserTestResult object with values from keyword arguments.
The following keyword arguments are supported (corresponding to the getters/setters of this class):
:param additional_info:
The value to assign to the additional_info property of this ParserTestResult.
:type additional_info: dict(str, str)
:param entries:
The value to assign to the entries property of this ParserTestResult.
:type entries: list[oci.log_analytics.models.AbstractParserTestResultLogEntry]
:param example_content:
The value to assign to the example_content property of this ParserTestResult.
:type example_content: str
:param lines:
The value to assign to the lines property of this ParserTestResult.
:type lines: list[oci.log_analytics.models.AbstractParserTestResultLogLine]
:param named_capture_groups:
The value to assign to the named_capture_groups property of this ParserTestResult.
:type named_capture_groups: list[str]
"""
self.swagger_types = {
'additional_info': 'dict(str, str)',
'entries': 'list[AbstractParserTestResultLogEntry]',
'example_content': 'str',
'lines': 'list[AbstractParserTestResultLogLine]',
'named_capture_groups': 'list[str]'
}
self.attribute_map = {
'additional_info': 'additionalInfo',
'entries': 'entries',
'example_content': 'exampleContent',
'lines': 'lines',
'named_capture_groups': 'namedCaptureGroups'
}
self._additional_info = None
self._entries = None
self._example_content = None
self._lines = None
self._named_capture_groups = None
@property
def additional_info(self):
"""
Gets the additional_info of this ParserTestResult.
Additional information for the test result.
:return: The additional_info of this ParserTestResult.
:rtype: dict(str, str)
"""
return self._additional_info
@additional_info.setter
def additional_info(self, additional_info):
"""
Sets the additional_info of this ParserTestResult.
Additional information for the test result.
:param additional_info: The additional_info of this ParserTestResult.
:type: dict(str, str)
"""
self._additional_info = additional_info
@property
def entries(self):
"""
Gets the entries of this ParserTestResult.
The test result log entries.
:return: The entries of this ParserTestResult.
:rtype: list[oci.log_analytics.models.AbstractParserTestResultLogEntry]
"""
return self._entries
@entries.setter
def entries(self, entries):
"""
Sets the entries of this ParserTestResult.
The test result log entries.
:param entries: The entries of this ParserTestResult.
:type: list[oci.log_analytics.models.AbstractParserTestResultLogEntry]
"""
self._entries = entries
@property
def example_content(self):
"""
Gets the example_content of this ParserTestResult.
The example content.
:return: The example_content of this ParserTestResult.
:rtype: str
"""
return self._example_content
@example_content.setter
def example_content(self, example_content):
"""
Sets the example_content of this ParserTestResult.
The example content.
:param example_content: The example_content of this ParserTestResult.
:type: str
"""
self._example_content = example_content
@property
def lines(self):
"""
Gets the lines of this ParserTestResult.
The test result log lines.
:return: The lines of this ParserTestResult.
:rtype: list[oci.log_analytics.models.AbstractParserTestResultLogLine]
"""
return self._lines
@lines.setter
def lines(self, lines):
"""
Sets the lines of this ParserTestResult.
The test result log lines.
:param lines: The lines of this ParserTestResult.
:type: list[oci.log_analytics.models.AbstractParserTestResultLogLine]
"""
self._lines = lines
@property
def named_capture_groups(self):
"""
Gets the named_capture_groups of this ParserTestResult.
The named capture groups.
:return: The named_capture_groups of this ParserTestResult.
:rtype: list[str]
"""
return self._named_capture_groups
@named_capture_groups.setter
def named_capture_groups(self, named_capture_groups):
"""
Sets the named_capture_groups of this ParserTestResult.
The named capture groups.
:param named_capture_groups: The named_capture_groups of this ParserTestResult.
:type: list[str]
"""
self._named_capture_groups = named_capture_groups
def __repr__(self):
return formatted_flat_dict(self)
def __eq__(self, other):
if other is None:
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self == other |
|
forms.py | from flask_wtf import FlaskForm
from wtforms import StringField, PasswordField, SelectField, IntegerField
from wtforms.validators import InputRequired, EqualTo, Regexp, Length, NumberRange, Optional, Email
from reminder.custom_wtforms import MxRecordValidator
class NewUserForm(FlaskForm):
"""
Validators for a new user account.
"""
username = StringField(validators=[InputRequired(),
Length(min=3, max=40),
Regexp(regex='^[a-zA-Z0-9][a-zA-Z0-9\._-]{1,39}[a-zA-Z0-9]$',
message='Username should contain chars (min 3): a-z, A-Z, 0-9, . _ -')])
email = StringField(validators=[InputRequired(),
Email(message='Please enter valid email address'),
Length(max=70),
MxRecordValidator()])
role = SelectField(choices=[('user', 'User'), ('admin', 'Admin')])
access = SelectField(label='Can log in?',
choices=[('False', 'No'), ('True', 'Yes')])
pass_reset = SelectField(label='Change password on next login?',
choices=[('False', 'No'), ('True', 'Yes')])
password = PasswordField(validators=[Regexp(regex='^(?=.*[A-Za-z])(?=.*\d)(?=.*[@$!%*#?&])[A-Za-z\d@$!%*#?&]'
'{8,40}$',
message='Password must contain minimum 8 characters, at least one '
'letter, one number and one special character')])
password2 = PasswordField(label='Confirm password',
validators=[EqualTo('password')])
class EditUserForm(NewUserForm):
|
class NotifyForm(FlaskForm):
"""
Validators for notification settings
"""
notify_status = StringField(label='Notification status',
validators=[Regexp(regex='^on$'), Optional()])
notify_unit = SelectField('Notification interval time units',
choices=[('hours', 'hours'), ('minutes', 'minutes'), ('seconds', 'seconds')])
notify_interval = IntegerField(label='Notification interval',
validators=[InputRequired(), NumberRange(min=1)])
mail_server = StringField(label='Mail server',
validators=[InputRequired(), Length(max=70)])
mail_port = IntegerField(label='Mail port',
validators=[InputRequired(), NumberRange(min=1)])
mail_security = SelectField(label='Mail security',
choices=[('tls', 'TLS'), ('ssl', 'SSL')])
mail_username = StringField(label='Mail username',
validators=[InputRequired(), Length(max=70)])
mail_password = PasswordField(label='Mail Password') | """
Validators for the user being edited
"""
# the password field can be blank (empty) or match the regex pattern
password = PasswordField(label='Password',
validators=[Regexp(regex='^(?=.*[A-Za-z])(?=.*\d)(?=.*[@$!%*#?&])[A-Za-z\d@$!%*#?&]'
'{8,40}$|^$',
message='Password must contain minimum 8 characters, at least one '
'letter, one number and one special character')])
password2 = PasswordField(label='Confirm password', validators=[EqualTo('password')]) |
test_sync_reports_rotate.py | # coding: utf-8
"""
Copyright 2016 SmartBear Software
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
ref: https://github.com/swagger-api/swagger-codegen
"""
from __future__ import absolute_import
import os
import sys
import unittest
import swagger_client
from swagger_client.rest import ApiException
from swagger_client.models.sync_reports_rotate import SyncReportsRotate
class TestSyncReportsRotate(unittest.TestCase):
""" SyncReportsRotate unit test stubs """
def setUp(self):
pass
def tearDown(self):
pass
def testSyncReportsRotate(self):
"""
Test SyncReportsRotate
"""
model = swagger_client.models.sync_reports_rotate.SyncReportsRotate()
|
if __name__ == '__main__':
unittest.main() |
|
index.js | export { default as Select } from './select' |
||
octave.py | """
Octave (and Matlab) code printer
The `OctaveCodePrinter` converts SymPy expressions into Octave expressions.
It uses a subset of the Octave language for Matlab compatibility.
A complete code generator, which uses `octave_code` extensively, can be found
in `sympy.utilities.codegen`. The `codegen` module can be used to generate
complete source code files.
"""
from __future__ import print_function, division
from sympy.core import Mul, Pow, S, Rational
from sympy.core.compatibility import string_types, range
from sympy.core.mul import _keep_coeff
from sympy.codegen.ast import Assignment
from sympy.printing.codeprinter import CodePrinter
from sympy.printing.precedence import precedence
from re import search
# List of known functions. First, those that have the same name in
# SymPy and Octave. This is almost certainly incomplete!
known_fcns_src1 = ["sin", "cos", "tan", "cot", "sec", "csc",
"asin", "acos", "acot", "atan", "atan2", "asec", "acsc",
"sinh", "cosh", "tanh", "coth", "csch", "sech",
"asinh", "acosh", "atanh", "acoth", "asech", "acsch",
"erfc", "erfi", "erf", "erfinv", "erfcinv",
"besseli", "besselj", "besselk", "bessely",
"exp", "factorial", "floor", "fresnelc", "fresnels",
"gamma", "log", "polylog", "sign", "zeta"]
# These functions have different names ("Sympy": "Octave"), more
# generally a mapping to (argument_conditions, octave_function).
known_fcns_src2 = {
"Abs": "abs",
"ceiling": "ceil",
"Chi": "coshint",
"Ci": "cosint",
"conjugate": "conj",
"DiracDelta": "dirac",
"Heaviside": "heaviside",
"laguerre": "laguerreL",
"li": "logint",
"loggamma": "gammaln",
"polygamma": "psi",
"Shi": "sinhint",
"Si": "sinint",
}
class OctaveCodePrinter(CodePrinter):
"""
A printer to convert expressions to strings of Octave/Matlab code.
"""
printmethod = "_octave"
language = "Octave"
_operators = {
'and': '&',
'or': '|',
'not': '~',
}
_default_settings = {
'order': None,
'full_prec': 'auto',
'precision': 16,
'user_functions': {},
'human': True,
'contract': True,
'inline': True,
}
# Note: contract is for expressing tensors as loops (if True), or just
# assignment (if False). FIXME: this should be looked a more carefully
# for Octave.
def __init__(self, settings={}):
super(OctaveCodePrinter, self).__init__(settings)
self.known_functions = dict(zip(known_fcns_src1, known_fcns_src1))
self.known_functions.update(dict(known_fcns_src2))
userfuncs = settings.get('user_functions', {})
self.known_functions.update(userfuncs)
def _rate_index_position(self, p):
return p*5
def _get_statement(self, codestring):
return "%s;" % codestring
def _get_comment(self, text):
return "% {0}".format(text)
def _declare_number_const(self, name, value):
return "{0} = {1};".format(name, value)
def _format_code(self, lines):
return self.indent_code(lines)
def _traverse_matrix_indices(self, mat):
# Octave uses Fortran order (column-major)
rows, cols = mat.shape
return ((i, j) for j in range(cols) for i in range(rows))
def _get_loop_opening_ending(self, indices):
open_lines = []
close_lines = []
for i in indices:
# Octave arrays start at 1 and end at dimension
var, start, stop = map(self._print,
[i.label, i.lower + 1, i.upper + 1])
open_lines.append("for %s = %s:%s" % (var, start, stop))
close_lines.append("end")
return open_lines, close_lines
def _print_Mul(self, expr):
# print complex numbers nicely in Octave
if (expr.is_number and expr.is_imaginary and
expr.as_coeff_Mul()[0].is_integer):
return "%si" % self._print(-S.ImaginaryUnit*expr)
# cribbed from str.py
prec = precedence(expr)
c, e = expr.as_coeff_Mul()
if c < 0:
expr = _keep_coeff(-c, e)
sign = "-"
else:
sign = ""
a = [] # items in the numerator
b = [] # items that are in the denominator (if any)
if self.order not in ('old', 'none'):
args = expr.as_ordered_factors()
else:
# use make_args in case expr was something like -x -> x
args = Mul.make_args(expr)
# Gather args for numerator/denominator
for item in args:
if (item.is_commutative and item.is_Pow and item.exp.is_Rational
and item.exp.is_negative):
if item.exp != -1:
b.append(Pow(item.base, -item.exp, evaluate=False))
else:
b.append(Pow(item.base, -item.exp))
elif item.is_Rational and item is not S.Infinity:
if item.p != 1:
a.append(Rational(item.p))
if item.q != 1:
b.append(Rational(item.q))
else:
a.append(item)
a = a or [S.One]
a_str = [self.parenthesize(x, prec) for x in a]
b_str = [self.parenthesize(x, prec) for x in b]
# from here it differs from str.py to deal with "*" and ".*"
def multjoin(a, a_str):
# here we probably are assuming the constants will come first
r = a_str[0]
for i in range(1, len(a)):
mulsym = '*' if a[i-1].is_number else '.*'
r = r + mulsym + a_str[i]
return r
if len(b) == 0:
return sign + multjoin(a, a_str)
elif len(b) == 1:
divsym = '/' if b[0].is_number else './'
return sign + multjoin(a, a_str) + divsym + b_str[0]
else:
divsym = '/' if all([bi.is_number for bi in b]) else './'
return (sign + multjoin(a, a_str) +
divsym + "(%s)" % multjoin(b, b_str))
def _print_Pow(self, expr):
powsymbol = '^' if all([x.is_number for x in expr.args]) else '.^'
PREC = precedence(expr)
if expr.exp == S.Half:
return "sqrt(%s)" % self._print(expr.base)
if expr.is_commutative:
if expr.exp == -S.Half:
sym = '/' if expr.base.is_number else './'
return "1" + sym + "sqrt(%s)" % self._print(expr.base)
if expr.exp == -S.One:
sym = '/' if expr.base.is_number else './'
return "1" + sym + "%s" % self.parenthesize(expr.base, PREC)
return '%s%s%s' % (self.parenthesize(expr.base, PREC), powsymbol,
self.parenthesize(expr.exp, PREC))
def _print_MatPow(self, expr):
PREC = precedence(expr)
return '%s^%s' % (self.parenthesize(expr.base, PREC),
self.parenthesize(expr.exp, PREC))
def _print_Pi(self, expr):
return 'pi'
def _print_ImaginaryUnit(self, expr):
return "1i"
def _print_Exp1(self, expr):
return "exp(1)"
def _print_GoldenRatio(self, expr):
# FIXME: how to do better, e.g., for octave_code(2*GoldenRatio)?
#return self._print((1+sqrt(S(5)))/2)
return "(1+sqrt(5))/2"
def _print_NumberSymbol(self, expr):
if self._settings["inline"]:
return self._print(expr.evalf(self._settings["precision"]))
else:
# assign to a variable, perhaps more readable for longer program
return super(OctaveCodePrinter, self)._print_NumberSymbol(expr)
def _print_Assignment(self, expr):
from sympy.functions.elementary.piecewise import Piecewise
from sympy.tensor.indexed import IndexedBase
# Copied from codeprinter, but remove special MatrixSymbol treatment
lhs = expr.lhs
rhs = expr.rhs
# We special case assignments that take multiple lines
if not self._settings["inline"] and isinstance(expr.rhs, Piecewise):
# Here we modify Piecewise so each expression is now
# an Assignment, and then continue on the print.
expressions = []
conditions = []
for (e, c) in rhs.args:
expressions.append(Assignment(lhs, e))
conditions.append(c)
temp = Piecewise(*zip(expressions, conditions))
return self._print(temp)
if self._settings["contract"] and (lhs.has(IndexedBase) or
rhs.has(IndexedBase)):
# Here we check if there is looping to be done, and if so
# print the required loops.
return self._doprint_loops(rhs, lhs)
else:
lhs_code = self._print(lhs)
rhs_code = self._print(rhs)
return self._get_statement("%s = %s" % (lhs_code, rhs_code))
def _print_Infinity(self, expr):
return 'inf'
def _print_NegativeInfinity(self, expr):
return '-inf'
def _print_NaN(self, expr):
return 'NaN'
def _print_list(self, expr):
return '{' + ', '.join(self._print(a) for a in expr) + '}'
_print_tuple = _print_list
_print_Tuple = _print_list
def _print_BooleanTrue(self, expr):
return "true"
def _print_BooleanFalse(self, expr):
return "false"
def _print_bool(self, expr):
return str(expr).lower()
# Could generate quadrature code for definite Integrals?
#_print_Integral = _print_not_supported
def _print_MatrixBase(self, A):
# Handle zero dimensions:
if (A.rows, A.cols) == (0, 0):
return '[]'
elif A.rows == 0 or A.cols == 0:
return 'zeros(%s, %s)' % (A.rows, A.cols)
elif (A.rows, A.cols) == (1, 1):
# Octave does not distinguish between scalars and 1x1 matrices
return self._print(A[0, 0])
elif A.rows == 1:
return "[%s]" % A.table(self, rowstart='', rowend='', colsep=' ')
elif A.cols == 1:
# note .table would unnecessarily equispace the rows
return "[%s]" % "; ".join([self._print(a) for a in A])
return "[%s]" % A.table(self, rowstart='', rowend='',
rowsep=';\n', colsep=' ')
def _print_SparseMatrix(self, A):
from sympy.matrices import Matrix
L = A.col_list();
# make row vectors of the indices and entries
I = Matrix([[k[0] + 1 for k in L]])
J = Matrix([[k[1] + 1 for k in L]])
AIJ = Matrix([[k[2] for k in L]])
return "sparse(%s, %s, %s, %s, %s)" % (self._print(I), self._print(J),
self._print(AIJ), A.rows, A.cols)
# FIXME: Str/CodePrinter could define each of these to call the _print
# method from higher up the class hierarchy (see _print_NumberSymbol).
# Then subclasses like us would not need to repeat all this.
_print_Matrix = \
_print_DenseMatrix = \
_print_MutableDenseMatrix = \
_print_ImmutableMatrix = \
_print_ImmutableDenseMatrix = \
_print_MatrixBase
_print_MutableSparseMatrix = \
_print_ImmutableSparseMatrix = \
_print_SparseMatrix
def _print_MatrixElement(self, expr):
return self._print(expr.parent) + '(%s, %s)'%(expr.i+1, expr.j+1)
def _print_MatrixSlice(self, expr):
def strslice(x, lim):
l = x[0] + 1
h = x[1]
step = x[2]
lstr = self._print(l)
hstr = 'end' if h == lim else self._print(h)
if step == 1:
if l == 1 and h == lim:
return ':'
if l == h:
return lstr
else:
return lstr + ':' + hstr
else:
return ':'.join((lstr, self._print(step), hstr))
return (self._print(expr.parent) + '(' +
strslice(expr.rowslice, expr.parent.shape[0]) + ', ' +
strslice(expr.colslice, expr.parent.shape[1]) + ')')
def _print_Indexed(self, expr):
inds = [ self._print(i) for i in expr.indices ]
return "%s(%s)" % (self._print(expr.base.label), ", ".join(inds))
def _print_Idx(self, expr):
return self._print(expr.label)
def _print_Identity(self, expr):
return "eye(%s)" % self._print(expr.shape[0])
def _print_uppergamma(self, expr):
return "gammainc(%s, %s, 'upper')" % (self._print(expr.args[1]),
self._print(expr.args[0]))
def _print_lowergamma(self, expr):
return "gammainc(%s, %s, 'lower')" % (self._print(expr.args[1]),
self._print(expr.args[0]))
def _print_sinc(self, expr):
#Note: Divide by pi because Octave implements normalized sinc function.
return "sinc(%s)" % self._print(expr.args[0]/S.Pi)
def _print_hankel1(self, expr):
return "besselh(%s, 1, %s)" % (self._print(expr.order),
self._print(expr.argument))
def _print_hankel2(self, expr):
return "besselh(%s, 2, %s)" % (self._print(expr.order), |
# Note: as of 2015, Octave doesn't have spherical Bessel functions
def _print_jn(self, expr):
from sympy.functions import sqrt, besselj
x = expr.argument
expr2 = sqrt(S.Pi/(2*x))*besselj(expr.order + S.Half, x)
return self._print(expr2)
def _print_yn(self, expr):
from sympy.functions import sqrt, bessely
x = expr.argument
expr2 = sqrt(S.Pi/(2*x))*bessely(expr.order + S.Half, x)
return self._print(expr2)
def _print_airyai(self, expr):
return "airy(0, %s)" % self._print(expr.args[0])
def _print_airyaiprime(self, expr):
return "airy(1, %s)" % self._print(expr.args[0])
def _print_airybi(self, expr):
return "airy(2, %s)" % self._print(expr.args[0])
def _print_airybiprime(self, expr):
return "airy(3, %s)" % self._print(expr.args[0])
def _print_Piecewise(self, expr):
if expr.args[-1].cond != True:
# We need the last conditional to be a True, otherwise the resulting
# function may not return a result.
raise ValueError("All Piecewise expressions must contain an "
"(expr, True) statement to be used as a default "
"condition. Without one, the generated "
"expression may not evaluate to anything under "
"some condition.")
lines = []
if self._settings["inline"]:
# Express each (cond, expr) pair in a nested Horner form:
# (condition) .* (expr) + (not cond) .* (<others>)
# Expressions that result in multiple statements won't work here.
ecpairs = ["({0}).*({1}) + (~({0})).*(".format
(self._print(c), self._print(e))
for e, c in expr.args[:-1]]
elast = "%s" % self._print(expr.args[-1].expr)
pw = " ...\n".join(ecpairs) + elast + ")"*len(ecpairs)
# Note: current need these outer brackets for 2*pw. Would be
# nicer to teach parenthesize() to do this for us when needed!
return "(" + pw + ")"
else:
for i, (e, c) in enumerate(expr.args):
if i == 0:
lines.append("if (%s)" % self._print(c))
elif i == len(expr.args) - 1 and c == True:
lines.append("else")
else:
lines.append("elseif (%s)" % self._print(c))
code0 = self._print(e)
lines.append(code0)
if i == len(expr.args) - 1:
lines.append("end")
return "\n".join(lines)
def indent_code(self, code):
"""Accepts a string of code or a list of code lines"""
# code mostly copied from ccode
if isinstance(code, string_types):
code_lines = self.indent_code(code.splitlines(True))
return ''.join(code_lines)
tab = " "
inc_regex = ('^function ', '^if ', '^elseif ', '^else$', '^for ')
dec_regex = ('^end$', '^elseif ', '^else$')
# pre-strip left-space from the code
code = [ line.lstrip(' \t') for line in code ]
increase = [ int(any([search(re, line) for re in inc_regex]))
for line in code ]
decrease = [ int(any([search(re, line) for re in dec_regex]))
for line in code ]
pretty = []
level = 0
for n, line in enumerate(code):
if line == '' or line == '\n':
pretty.append(line)
continue
level -= decrease[n]
pretty.append("%s%s" % (tab*level, line))
level += increase[n]
return pretty
def octave_code(expr, assign_to=None, **settings):
r"""Converts `expr` to a string of Octave (or Matlab) code.
The string uses a subset of the Octave language for Matlab compatibility.
Parameters
==========
expr : Expr
A sympy expression to be converted.
assign_to : optional
When given, the argument is used as the name of the variable to which
the expression is assigned. Can be a string, ``Symbol``,
``MatrixSymbol``, or ``Indexed`` type. This can be helpful for
expressions that generate multi-line statements.
precision : integer, optional
The precision for numbers such as pi [default=16].
user_functions : dict, optional
A dictionary where keys are ``FunctionClass`` instances and values are
their string representations. Alternatively, the dictionary value can
be a list of tuples i.e. [(argument_test, cfunction_string)]. See
below for examples.
human : bool, optional
If True, the result is a single string that may contain some constant
declarations for the number symbols. If False, the same information is
returned in a tuple of (symbols_to_declare, not_supported_functions,
code_text). [default=True].
contract: bool, optional
If True, ``Indexed`` instances are assumed to obey tensor contraction
rules and the corresponding nested loops over indices are generated.
Setting contract=False will not generate loops, instead the user is
responsible to provide values for the indices in the code.
[default=True].
inline: bool, optional
If True, we try to create single-statement code instead of multiple
statements. [default=True].
Examples
========
>>> from sympy import octave_code, symbols, sin, pi
>>> x = symbols('x')
>>> octave_code(sin(x).series(x).removeO())
'x.^5/120 - x.^3/6 + x'
>>> from sympy import Rational, ceiling, Abs
>>> x, y, tau = symbols("x, y, tau")
>>> octave_code((2*tau)**Rational(7, 2))
'8*sqrt(2)*tau.^(7/2)'
Note that element-wise (Hadamard) operations are used by default between
symbols. This is because its very common in Octave to write "vectorized"
code. It is harmless if the values are scalars.
>>> octave_code(sin(pi*x*y), assign_to="s")
's = sin(pi*x.*y);'
If you need a matrix product "*" or matrix power "^", you can specify the
symbol as a ``MatrixSymbol``.
>>> from sympy import Symbol, MatrixSymbol
>>> n = Symbol('n', integer=True, positive=True)
>>> A = MatrixSymbol('A', n, n)
>>> octave_code(3*pi*A**3)
'(3*pi)*A^3'
This class uses several rules to decide which symbol to use a product.
Pure numbers use "*", Symbols use ".*" and MatrixSymbols use "*".
A HadamardProduct can be used to specify componentwise multiplication ".*"
of two MatrixSymbols. There is currently there is no easy way to specify
scalar symbols, so sometimes the code might have some minor cosmetic
issues. For example, suppose x and y are scalars and A is a Matrix, then
while a human programmer might write "(x^2*y)*A^3", we generate:
>>> octave_code(x**2*y*A**3)
'(x.^2.*y)*A^3'
Matrices are supported using Octave inline notation. When using
``assign_to`` with matrices, the name can be specified either as a string
or as a ``MatrixSymbol``. The dimenions must align in the latter case.
>>> from sympy import Matrix, MatrixSymbol
>>> mat = Matrix([[x**2, sin(x), ceiling(x)]])
>>> octave_code(mat, assign_to='A')
'A = [x.^2 sin(x) ceil(x)];'
``Piecewise`` expressions are implemented with logical masking by default.
Alternatively, you can pass "inline=False" to use if-else conditionals.
Note that if the ``Piecewise`` lacks a default term, represented by
``(expr, True)`` then an error will be thrown. This is to prevent
generating an expression that may not evaluate to anything.
>>> from sympy import Piecewise
>>> pw = Piecewise((x + 1, x > 0), (x, True))
>>> octave_code(pw, assign_to=tau)
'tau = ((x > 0).*(x + 1) + (~(x > 0)).*(x));'
Note that any expression that can be generated normally can also exist
inside a Matrix:
>>> mat = Matrix([[x**2, pw, sin(x)]])
>>> octave_code(mat, assign_to='A')
'A = [x.^2 ((x > 0).*(x + 1) + (~(x > 0)).*(x)) sin(x)];'
Custom printing can be defined for certain types by passing a dictionary of
"type" : "function" to the ``user_functions`` kwarg. Alternatively, the
dictionary value can be a list of tuples i.e., [(argument_test,
cfunction_string)]. This can be used to call a custom Octave function.
>>> from sympy import Function
>>> f = Function('f')
>>> g = Function('g')
>>> custom_functions = {
... "f": "existing_octave_fcn",
... "g": [(lambda x: x.is_Matrix, "my_mat_fcn"),
... (lambda x: not x.is_Matrix, "my_fcn")]
... }
>>> mat = Matrix([[1, x]])
>>> octave_code(f(x) + g(x) + g(mat), user_functions=custom_functions)
'existing_octave_fcn(x) + my_fcn(x) + my_mat_fcn([1 x])'
Support for loops is provided through ``Indexed`` types. With
``contract=True`` these expressions will be turned into loops, whereas
``contract=False`` will just print the assignment expression that should be
looped over:
>>> from sympy import Eq, IndexedBase, Idx, ccode
>>> len_y = 5
>>> y = IndexedBase('y', shape=(len_y,))
>>> t = IndexedBase('t', shape=(len_y,))
>>> Dy = IndexedBase('Dy', shape=(len_y-1,))
>>> i = Idx('i', len_y-1)
>>> e = Eq(Dy[i], (y[i+1]-y[i])/(t[i+1]-t[i]))
>>> octave_code(e.rhs, assign_to=e.lhs, contract=False)
'Dy(i) = (y(i + 1) - y(i))./(t(i + 1) - t(i));'
"""
return OctaveCodePrinter(settings).doprint(expr, assign_to)
def print_octave_code(expr, **settings):
"""Prints the Octave (or Matlab) representation of the given expression.
See `octave_code` for the meaning of the optional arguments.
"""
print(octave_code(expr, **settings)) | self._print(expr.argument))
|
activity_instance_dto.rs | /*
* Camunda BPM REST API
*
* OpenApi Spec for Camunda BPM REST API.
*
* The version of the OpenAPI document: 7.14.0
*
* Generated by: https://openapi-generator.tech
*/
/// ActivityInstanceDto : A JSON object corresponding to the Activity Instance tree of the given process instance.
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ActivityInstanceDto {
/// The id of the activity instance.
#[serde(rename = "id", skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
/// The id of the parent activity instance, for example a sub process instance.
#[serde(rename = "parentActivityInstanceId", skip_serializing_if = "Option::is_none")]
pub parent_activity_instance_id: Option<String>,
/// The id of the activity.
#[serde(rename = "activityId", skip_serializing_if = "Option::is_none")]
pub activity_id: Option<String>,
/// The name of the activity
#[serde(rename = "activityName", skip_serializing_if = "Option::is_none")]
pub activity_name: Option<String>,
/// The type of activity (corresponds to the XML element name in the BPMN 2.0, e.g., 'userTask')
#[serde(rename = "activityType", skip_serializing_if = "Option::is_none")]
pub activity_type: Option<String>,
/// The id of the process instance this activity instance is part of.
#[serde(rename = "processInstanceId", skip_serializing_if = "Option::is_none")]
pub process_instance_id: Option<String>,
/// The id of the process definition.
#[serde(rename = "processDefinitionId", skip_serializing_if = "Option::is_none")]
pub process_definition_id: Option<String>,
/// A list of child activity instances.
#[serde(rename = "childActivityInstances", skip_serializing_if = "Option::is_none")]
pub child_activity_instances: Option<Vec<crate::models::ActivityInstanceDto>>,
/// A list of child transition instances. A transition instance represents an execution waiting in an asynchronous continuation.
#[serde(rename = "childTransitionInstances", skip_serializing_if = "Option::is_none")]
pub child_transition_instances: Option<Vec<crate::models::TransitionInstanceDto>>,
/// A list of execution ids.
#[serde(rename = "executionIds", skip_serializing_if = "Option::is_none")]
pub execution_ids: Option<Vec<String>>,
/// A list of incident ids.
#[serde(rename = "incidentIds", skip_serializing_if = "Option::is_none")]
pub incident_ids: Option<Vec<String>>,
/// A list of JSON objects containing incident specific properties: * `id`: the id of the incident * `activityId`: the activity id in which the incident occurred
#[serde(rename = "incidents", skip_serializing_if = "Option::is_none")]
pub incidents: Option<Vec<crate::models::ActivityInstanceIncidentDto>>,
}
impl ActivityInstanceDto {
/// A JSON object corresponding to the Activity Instance tree of the given process instance.
pub fn new() -> ActivityInstanceDto { | parent_activity_instance_id: None,
activity_id: None,
activity_name: None,
activity_type: None,
process_instance_id: None,
process_definition_id: None,
child_activity_instances: None,
child_transition_instances: None,
execution_ids: None,
incident_ids: None,
incidents: None,
}
}
} | ActivityInstanceDto {
id: None, |
CirculantGraphs.py | import numpy as np
import matplotlib.pyplot as plt
import scipy.sparse as sparse
import sys
sys.path.append("..")
from Laplacian import *
def getCirculantAdj(N, lags):
#Setup circular parts
I = range(N)*(len(lags)+2)
J = range(1, N+1) + range(-1, N-1)
J[N-1] = 0
J[N] = N-1
for lag in lags:
J = J + (np.mod(np.arange(N) + lag, N)).tolist()
V = np.ones(len(I))
return sparse.coo_matrix((V, (I, J)), shape=(N, N)).tocsr()
def getOneOnK(N, k):
lags = [i*N/k for i in range(1, k)]
return getCirculantAdj(N, lags)
def getCircleEigs(N):
lambdas = np.zeros(N)
for i in range(1, N/2+1):
val = 2 - 2*np.cos(2*np.pi*i/N)
i1 = i*2-1
i2 = i*2
lambdas[i1] = val
if i2 < N:
lambdas[i2] = val
return lambdas
def getMoebiusEigs(N):
|
def get3WayEigs(N):
lambdas = np.zeros(N)
for i in range(1, N/2+1):
val = 4 - 2*np.cos(2*np.pi*i/N) - 2*np.cos(2*np.pi*i/3)
i1 = i*2-1
i2 = i*2
lambdas[i1] = val
if i2 < N:
lambdas[i2] = val
return (lambdas, np.sort(lambdas))
if __name__ == '__main__':
N = 100
A = getOneOnK(N, 2)
#A = getCirculantAdj(N, [30, 60, 80])
A = A.toarray()
(w, v, L) = getLaplacianEigsDense(A, A.shape[0])
(lambdas, lambdassorted) = get3WayEigs(N)
plt.figure(figsize=(15, 4))
plt.subplot(132)
plt.plot(lambdas)
plt.title("Eigenvalues")
plt.xlabel("Eigenvalue Number")
plt.ylabel("Eigenvalue")
# plt.subplot(224)
# plt.scatter(w, lambdassorted)
# plt.xlabel("Numerically Computed")
# plt.ylabel("Analytic")
# plt.axis('equal')
# plt.title("Checking accuracy")
plt.subplot(131)
plt.imshow(A, interpolation = 'nearest', cmap = 'gray')
plt.title("Adjacency Matrix")
plt.subplot(133)
plt.imshow(v, cmap = 'afmhot', aspect = 'auto', interpolation = 'nearest')
plt.xlabel("k-th Smallest Eigenvector")
plt.title("Eigenvectors")
plt.savefig("Eigs.svg", bbox_inches = 'tight')
| lambdas = np.zeros(N)
for i in range(1, N/2+1):
val = 3 - 2*np.cos(2*np.pi*i/N) - (-1)**i
i1 = i*2-1
i2 = i*2
lambdas[i1] = val
if i2 < N:
lambdas[i2] = val
return (lambdas, np.sort(lambdas)) |
struct_cen.go | package cbn
//Licensed under the Apache License, Version 2.0 (the "License");
//you may not use this file except in compliance with the License.
//You may obtain a copy of the License at
//
//http://www.apache.org/licenses/LICENSE-2.0
//
//Unless required by applicable law or agreed to in writing, software
//distributed under the License is distributed on an "AS IS" BASIS,
//WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//See the License for the specific language governing permissions and | //
// Code generated by Alibaba Cloud SDK Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is regenerated.
// Cen is a nested struct in cbn response
type Cen struct {
CenId string `json:"CenId" xml:"CenId"`
Name string `json:"Name" xml:"Name"`
Description string `json:"Description" xml:"Description"`
ProtectionLevel string `json:"ProtectionLevel" xml:"ProtectionLevel"`
Status string `json:"Status" xml:"Status"`
CreationTime string `json:"CreationTime" xml:"CreationTime"`
Ipv6Level string `json:"Ipv6Level" xml:"Ipv6Level"`
ResourceGroupId string `json:"ResourceGroupId" xml:"ResourceGroupId"`
CenBandwidthPackageIds CenBandwidthPackageIds `json:"CenBandwidthPackageIds" xml:"CenBandwidthPackageIds"`
Tags TagsInDescribeCens `json:"Tags" xml:"Tags"`
} | //limitations under the License. |
main.rs | fn | () {
// println!("Hello, world!");
let s1 = Some(3);
let s2 = Some(6);
let n = None;
let fn_is_even = |x: &i8| x % 2 == 0;
assert_eq!(s1.filter(fn_is_even), n); // Some(3) -> 3 is not even -> None
assert_eq!(s2.filter(fn_is_even), s2); // Some(6) -> 6 is even -> Some(6)
assert_eq!(n.filter(fn_is_even), n); // None -> no value -> None
}
| main |
ordereddicttest.py | #!/usr/bin/env python
from random import shuffle
import copy
import inspect
import pickle
import unittest
from ordereddict import OrderedDict
class TestOrderedDict(unittest.TestCase):
def | (self):
self.assertRaises(TypeError, OrderedDict, ([('a', 1), ('b', 2)], None))
# too many args
pairs = [('a', 1), ('b', 2), ('c', 3), ('d', 4), ('e', 5)]
self.assertEqual(sorted(OrderedDict(dict(pairs)).items()), pairs) # dict input
self.assertEqual(sorted(OrderedDict(**dict(pairs)).items()), pairs) # kwds input
self.assertEqual(list(OrderedDict(pairs).items()), pairs) # pairs input
self.assertEqual(list(OrderedDict([('a', 1), ('b', 2), ('c', 9), ('d', 4)],
c=3, e=5).items()), pairs) # mixed input
# make sure no positional args conflict with possible kwdargs
self.assertEqual(inspect.getargspec(OrderedDict.__dict__['__init__'])[0],
['self'])
# Make sure that direct calls to __init__ do not clear previous contents
d = OrderedDict([('a', 1), ('b', 2), ('c', 3), ('d', 44), ('e', 55)])
d.__init__([('e', 5), ('f', 6)], g=7, d=4)
self.assertEqual(list(d.items()),
[('a', 1), ('b', 2), ('c', 3), ('d', 4), ('e', 5), ('f', 6), ('g', 7)])
def test_update(self):
self.assertRaises(TypeError, OrderedDict().update, [('a', 1), ('b',
2)], None) # too many args
pairs = [('a', 1), ('b', 2), ('c', 3), ('d', 4), ('e', 5)]
od = OrderedDict()
od.update(dict(pairs))
self.assertEqual(sorted(od.items()), pairs) # dict input
od = OrderedDict()
od.update(**dict(pairs))
self.assertEqual(sorted(od.items()), pairs) # kwds input
od = OrderedDict()
od.update(pairs)
self.assertEqual(list(od.items()), pairs) # pairs input
od = OrderedDict()
od.update([('a', 1), ('b', 2), ('c', 9), ('d', 4)], c=3, e=5)
self.assertEqual(list(od.items()), pairs) # mixed input
# Make sure that direct calls to update do not clear previous contents
# add that updates items are not moved to the end
d = OrderedDict([('a', 1), ('b', 2), ('c', 3), ('d', 44), ('e', 55)])
d.update([('e', 5), ('f', 6)], g=7, d=4)
self.assertEqual(list(d.items()),
[('a', 1), ('b', 2), ('c', 3), ('d', 4), ('e', 5), ('f', 6), ('g', 7)])
def test_clear(self):
pairs = [('c', 1), ('b', 2), ('a', 3), ('d', 4), ('e', 5), ('f', 6)]
shuffle(pairs)
od = OrderedDict(pairs)
self.assertEqual(len(od), len(pairs))
od.clear()
self.assertEqual(len(od), 0)
def test_delitem(self):
pairs = [('c', 1), ('b', 2), ('a', 3), ('d', 4), ('e', 5), ('f', 6)]
od = OrderedDict(pairs)
del od['a']
self.assert_('a' not in od)
self.assertRaises(KeyError, od.__delitem__, 'a')
self.assertEqual(list(od.items()), pairs[:2] + pairs[3:])
def test_setitem(self):
od = OrderedDict([('d', 1), ('b', 2), ('c', 3), ('a', 4), ('e', 5)])
od['c'] = 10 # existing element
od['f'] = 20 # new element
self.assertEqual(list(od.items()),
[('d', 1), ('b', 2), ('c', 10), ('a', 4), ('e', 5), ('f', 20)])
def test_iterators(self):
pairs = [('c', 1), ('b', 2), ('a', 3), ('d', 4), ('e', 5), ('f', 6)]
shuffle(pairs)
od = OrderedDict(pairs)
self.assertEqual(list(od), [t[0] for t in pairs])
self.assertEqual(list(od.keys()), [t[0] for t in pairs])
self.assertEqual(list(od.values()), [t[1] for t in pairs])
self.assertEqual(list(od.items()), pairs)
self.assertEqual(list(reversed(od)),
[t[0] for t in reversed(pairs)])
def test_popitem(self):
pairs = [('c', 1), ('b', 2), ('a', 3), ('d', 4), ('e', 5), ('f', 6)]
shuffle(pairs)
od = OrderedDict(pairs)
while pairs:
self.assertEqual(od.popitem(), pairs.pop())
self.assertRaises(KeyError, od.popitem)
self.assertEqual(len(od), 0)
def test_pop(self):
pairs = [('c', 1), ('b', 2), ('a', 3), ('d', 4), ('e', 5), ('f', 6)]
shuffle(pairs)
od = OrderedDict(pairs)
shuffle(pairs)
while pairs:
k, v = pairs.pop()
self.assertEqual(od.pop(k), v)
self.assertRaises(KeyError, od.pop, 'xyz')
self.assertEqual(len(od), 0)
self.assertEqual(od.pop(k, 12345), 12345)
def test_equality(self):
pairs = [('c', 1), ('b', 2), ('a', 3), ('d', 4), ('e', 5), ('f', 6)]
shuffle(pairs)
od1 = OrderedDict(pairs)
od2 = OrderedDict(pairs)
self.assertEqual(od1, od2) # same order implies equality
pairs = pairs[2:] + pairs[:2]
od2 = OrderedDict(pairs)
self.assertNotEqual(od1, od2) # different order implies inequality
# comparison to regular dict is not order sensitive
self.assertEqual(od1, dict(od2))
self.assertEqual(dict(od2), od1)
# different length implied inequality
self.assertNotEqual(od1, OrderedDict(pairs[:-1]))
def test_copying(self):
# Check that ordered dicts are copyable, deepcopyable, picklable,
# and have a repr/eval round-trip
pairs = [('c', 1), ('b', 2), ('a', 3), ('d', 4), ('e', 5), ('f', 6)]
od = OrderedDict(pairs)
update_test = OrderedDict()
update_test.update(od)
for i, dup in enumerate([
od.copy(),
copy.copy(od),
copy.deepcopy(od),
pickle.loads(pickle.dumps(od, 0)),
pickle.loads(pickle.dumps(od, 1)),
pickle.loads(pickle.dumps(od, 2)),
pickle.loads(pickle.dumps(od, -1)),
eval(repr(od)),
update_test,
OrderedDict(od),
]):
self.assert_(dup is not od)
self.assertEquals(dup, od)
self.assertEquals(list(dup.items()), list(od.items()))
self.assertEquals(len(dup), len(od))
self.assertEquals(type(dup), type(od))
def test_repr(self):
od = OrderedDict([('c', 1), ('b', 2), ('a', 3), ('d', 4), ('e', 5), ('f', 6)])
self.assertEqual(repr(od),
"OrderedDict([('c', 1), ('b', 2), ('a', 3), ('d', 4), ('e', 5), ('f', 6)])")
self.assertEqual(eval(repr(od)), od)
self.assertEqual(repr(OrderedDict()), "OrderedDict()")
def test_setdefault(self):
pairs = [('c', 1), ('b', 2), ('a', 3), ('d', 4), ('e', 5), ('f', 6)]
shuffle(pairs)
od = OrderedDict(pairs)
pair_order = list(od.items())
self.assertEqual(od.setdefault('a', 10), 3)
# make sure order didn't change
self.assertEqual(list(od.items()), pair_order)
self.assertEqual(od.setdefault('x', 10), 10)
# make sure 'x' is added to the end
self.assertEqual(list(od.items())[-1], ('x', 10))
def test_reinsert(self):
# Given insert a, insert b, delete a, re-insert a,
# verify that a is now later than b.
od = OrderedDict()
od['a'] = 1
od['b'] = 2
del od['a']
od['a'] = 1
self.assertEqual(list(od.items()), [('b', 2), ('a', 1)])
if __name__ == "__main__":
unittest.main()
| test_init |
main.rs | use structopt::StructOpt;
use stay_awake::Args;
use std::process;
fn main() | {
let args: Args = Args::from_args();
if let Err(e) = stay_awake::run(args) {
println!("Stopping with error: {}", e);
process::exit(1);
}
process::exit(0);
} |
|
int128.rs | use std;
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
#[allow(non_camel_case_types)]
pub struct u128 {
high: u64,
low: u64,
}
impl u128 {
pub fn zero() -> u128 {
u128::from_parts(0, 0)
}
pub fn from_parts(high: u64, low: u64) -> u128 {
u128 {
high: high,
low: low,
}
}
pub fn parts(&self) -> (u64, u64) {
(self.high, self.low)
}
}
impl std::ops::Add<u128> for u128 {
type Output = u128;
fn add(self, rhs: u128) -> u128 {
let low = self.low + rhs.low;
let high = self.high + rhs.high + if low < self.low { 1 } else { 0 };
u128::from_parts(high, low)
}
}
impl<'a> std::ops::Add<&'a u128> for u128 {
type Output = u128;
fn add(self, rhs: &'a u128) -> u128 {
let low = self.low + rhs.low;
let high = self.high + rhs.high + if low < self.low { 1 } else { 0 };
u128::from_parts(high, low)
}
}
impl std::convert::From<u8> for u128 {
fn from(n: u8) -> u128 {
u128::from_parts(0, n as u64)
}
}
impl std::ops::Mul<u128> for u128 {
type Output = u128;
fn mul(self, rhs: u128) -> u128 {
let top: [u64; 4] = [
self.high >> 32,
self.high & 0xFFFFFFFF,
self.low >> 32,
self.low & 0xFFFFFFFF,
];
let bottom: [u64; 4] = [
rhs.high >> 32,
rhs.high & 0xFFFFFFFF,
rhs.low >> 32,
rhs.low & 0xFFFFFFFF,
];
let mut rows = [u128::zero(); 16];
for i in 0..4 {
for j in 0..4 {
let shift = i + j;
let product = top[3 - i] * bottom[3 - j];
let (high, low) = match shift {
0 => (0, product),
1 => (product >> 32, product << 32),
2 => (product, 0),
3 => (product << 32, 0),
_ => {
if product == 0 | else {
panic!("Overflow on mul {:?} {:?} ({} {})", self, rhs, i, j)
}
}
};
rows[j * 4 + i] = u128::from_parts(high, low);
}
}
rows.iter().fold(u128::zero(), std::ops::Add::add)
}
}
| {
(0, 0)
} |
__init__.py | # --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from azure.cli.core import AzCommandsLoader
from azext_identitydirmgt.generated._help import helps # pylint: disable=unused-import
try:
from azext_identitydirmgt.manual._help import helps # pylint: disable=reimported
except ImportError as e:
if e.name.endswith('manual._help'):
pass
else:
raise e
class IdentityDirectoryManagementCommandsLoader(AzCommandsLoader):
def __init__(self, cli_ctx=None):
from azure.cli.core.commands import CliCommandType
from azext_identitydirmgt.generated._client_factory import cf_identitydirmgt_cl
identitydirmgt_custom = CliCommandType(
operations_tmpl='azext_identitydirmgt.custom#{}',
client_factory=cf_identitydirmgt_cl)
parent = super(IdentityDirectoryManagementCommandsLoader, self)
parent.__init__(cli_ctx=cli_ctx, custom_command_type=identitydirmgt_custom)
def load_command_table(self, args):
from azext_identitydirmgt.generated.commands import load_command_table
load_command_table(self, args)
try:
from azext_identitydirmgt.manual.commands import load_command_table as load_command_table_manual
load_command_table_manual(self, args)
except ImportError as e:
if e.name.endswith('manual.commands'):
pass
else:
raise e
return self.command_table
def | (self, command):
from azext_identitydirmgt.generated._params import load_arguments
load_arguments(self, command)
try:
from azext_identitydirmgt.manual._params import load_arguments as load_arguments_manual
load_arguments_manual(self, command)
except ImportError as e:
if e.name.endswith('manual._params'):
pass
else:
raise e
COMMAND_LOADER_CLS = IdentityDirectoryManagementCommandsLoader
| load_arguments |
Ada.d.ts | export declare const AdaDimensions: {
height: number;
width: number;
}; | import { StyledIcon } from '@styled-icons/styled-icon';
export declare const Ada: StyledIcon; |
|
combo_box.rs | // This file was generated by gir (https://github.com/gtk-rs/gir)
// from gir-files (https://github.com/gtk-rs/gir-files.git)
// DO NOT EDIT
use crate::Accessible;
use crate::AccessibleRole;
use crate::Align;
use crate::Buildable;
use crate::CellEditable;
use crate::CellLayout;
use crate::ConstraintTarget;
use crate::LayoutManager;
use crate::Overflow;
use crate::ScrollType;
use crate::SensitivityType;
use crate::TreeIter;
use crate::TreeModel;
use crate::Widget;
use glib::object::Cast;
use glib::object::IsA;
use glib::object::ObjectExt;
use glib::signal::connect_raw;
use glib::signal::SignalHandlerId;
use glib::translate::*;
use glib::StaticType;
use glib::ToValue;
use std::boxed::Box as Box_;
use std::fmt;
use std::mem::transmute;
glib::wrapper! {
#[doc(alias = "GtkComboBox")]
pub struct ComboBox(Object<ffi::GtkComboBox, ffi::GtkComboBoxClass>) @extends Widget, @implements Accessible, Buildable, ConstraintTarget, CellEditable, CellLayout;
match fn {
type_ => || ffi::gtk_combo_box_get_type(),
}
}
impl ComboBox {
#[doc(alias = "gtk_combo_box_new")]
pub fn new() -> ComboBox {
assert_initialized_main_thread!();
unsafe { Widget::from_glib_none(ffi::gtk_combo_box_new()).unsafe_cast() }
}
#[doc(alias = "gtk_combo_box_new_with_entry")]
#[doc(alias = "new_with_entry")]
pub fn with_entry() -> ComboBox {
assert_initialized_main_thread!();
unsafe { Widget::from_glib_none(ffi::gtk_combo_box_new_with_entry()).unsafe_cast() }
}
#[doc(alias = "gtk_combo_box_new_with_model")]
#[doc(alias = "new_with_model")]
pub fn with_model<P: IsA<TreeModel>>(model: &P) -> ComboBox {
skip_assert_initialized!();
unsafe {
Widget::from_glib_none(ffi::gtk_combo_box_new_with_model(
model.as_ref().to_glib_none().0,
))
.unsafe_cast()
}
}
#[doc(alias = "gtk_combo_box_new_with_model_and_entry")]
#[doc(alias = "new_with_model_and_entry")]
pub fn with_model_and_entry<P: IsA<TreeModel>>(model: &P) -> ComboBox {
skip_assert_initialized!();
unsafe {
Widget::from_glib_none(ffi::gtk_combo_box_new_with_model_and_entry(
model.as_ref().to_glib_none().0,
))
.unsafe_cast()
}
}
// rustdoc-stripper-ignore-next
/// Creates a new builder-pattern struct instance to construct [`ComboBox`] objects.
///
/// This method returns an instance of [`ComboBoxBuilder`] which can be used to create [`ComboBox`] objects.
pub fn builder() -> ComboBoxBuilder {
ComboBoxBuilder::default()
}
}
impl Default for ComboBox {
fn default() -> Self {
Self::new()
}
}
#[derive(Clone, Default)]
// rustdoc-stripper-ignore-next
/// A [builder-pattern] type to construct [`ComboBox`] objects.
///
/// [builder-pattern]: https://doc.rust-lang.org/1.0.0/style/ownership/builders.html
pub struct ComboBoxBuilder {
active: Option<i32>,
active_id: Option<String>,
button_sensitivity: Option<SensitivityType>,
child: Option<Widget>,
entry_text_column: Option<i32>,
has_entry: Option<bool>,
has_frame: Option<bool>,
id_column: Option<i32>,
model: Option<TreeModel>,
popup_fixed_width: Option<bool>,
can_focus: Option<bool>,
can_target: Option<bool>,
css_classes: Option<Vec<String>>,
css_name: Option<String>,
cursor: Option<gdk::Cursor>,
focus_on_click: Option<bool>,
focusable: Option<bool>,
halign: Option<Align>,
has_tooltip: Option<bool>,
height_request: Option<i32>,
hexpand: Option<bool>,
hexpand_set: Option<bool>,
layout_manager: Option<LayoutManager>,
margin_bottom: Option<i32>,
margin_end: Option<i32>,
margin_start: Option<i32>,
margin_top: Option<i32>,
name: Option<String>,
opacity: Option<f64>,
overflow: Option<Overflow>,
receives_default: Option<bool>,
sensitive: Option<bool>,
tooltip_markup: Option<String>,
tooltip_text: Option<String>,
valign: Option<Align>,
vexpand: Option<bool>,
vexpand_set: Option<bool>,
visible: Option<bool>,
width_request: Option<i32>,
accessible_role: Option<AccessibleRole>,
editing_canceled: Option<bool>,
}
impl ComboBoxBuilder {
// rustdoc-stripper-ignore-next
/// Create a new [`ComboBoxBuilder`].
pub fn new() -> Self {
Self::default()
}
// rustdoc-stripper-ignore-next
/// Build the [`ComboBox`].
pub fn build(self) -> ComboBox {
let mut properties: Vec<(&str, &dyn ToValue)> = vec![];
if let Some(ref active) = self.active {
properties.push(("active", active));
}
if let Some(ref active_id) = self.active_id {
properties.push(("active-id", active_id));
}
if let Some(ref button_sensitivity) = self.button_sensitivity {
properties.push(("button-sensitivity", button_sensitivity));
}
if let Some(ref child) = self.child {
properties.push(("child", child));
}
if let Some(ref entry_text_column) = self.entry_text_column {
properties.push(("entry-text-column", entry_text_column));
}
if let Some(ref has_entry) = self.has_entry {
properties.push(("has-entry", has_entry));
}
if let Some(ref has_frame) = self.has_frame {
properties.push(("has-frame", has_frame));
}
if let Some(ref id_column) = self.id_column {
properties.push(("id-column", id_column));
}
if let Some(ref model) = self.model {
properties.push(("model", model));
}
if let Some(ref popup_fixed_width) = self.popup_fixed_width {
properties.push(("popup-fixed-width", popup_fixed_width));
}
if let Some(ref can_focus) = self.can_focus {
properties.push(("can-focus", can_focus));
}
if let Some(ref can_target) = self.can_target {
properties.push(("can-target", can_target));
}
if let Some(ref css_classes) = self.css_classes {
properties.push(("css-classes", css_classes));
}
if let Some(ref css_name) = self.css_name {
properties.push(("css-name", css_name));
}
if let Some(ref cursor) = self.cursor {
properties.push(("cursor", cursor));
}
if let Some(ref focus_on_click) = self.focus_on_click {
properties.push(("focus-on-click", focus_on_click));
}
if let Some(ref focusable) = self.focusable {
properties.push(("focusable", focusable));
}
if let Some(ref halign) = self.halign {
properties.push(("halign", halign));
}
if let Some(ref has_tooltip) = self.has_tooltip {
properties.push(("has-tooltip", has_tooltip));
}
if let Some(ref height_request) = self.height_request {
properties.push(("height-request", height_request));
}
if let Some(ref hexpand) = self.hexpand {
properties.push(("hexpand", hexpand));
}
if let Some(ref hexpand_set) = self.hexpand_set {
properties.push(("hexpand-set", hexpand_set));
}
if let Some(ref layout_manager) = self.layout_manager {
properties.push(("layout-manager", layout_manager));
}
if let Some(ref margin_bottom) = self.margin_bottom {
properties.push(("margin-bottom", margin_bottom));
}
if let Some(ref margin_end) = self.margin_end {
properties.push(("margin-end", margin_end));
}
if let Some(ref margin_start) = self.margin_start {
properties.push(("margin-start", margin_start));
}
if let Some(ref margin_top) = self.margin_top {
properties.push(("margin-top", margin_top));
}
if let Some(ref name) = self.name {
properties.push(("name", name));
}
if let Some(ref opacity) = self.opacity {
properties.push(("opacity", opacity));
}
if let Some(ref overflow) = self.overflow {
properties.push(("overflow", overflow));
}
if let Some(ref receives_default) = self.receives_default {
properties.push(("receives-default", receives_default));
}
if let Some(ref sensitive) = self.sensitive {
properties.push(("sensitive", sensitive));
}
if let Some(ref tooltip_markup) = self.tooltip_markup {
properties.push(("tooltip-markup", tooltip_markup));
}
if let Some(ref tooltip_text) = self.tooltip_text {
properties.push(("tooltip-text", tooltip_text));
}
if let Some(ref valign) = self.valign {
properties.push(("valign", valign));
}
if let Some(ref vexpand) = self.vexpand {
properties.push(("vexpand", vexpand));
}
if let Some(ref vexpand_set) = self.vexpand_set {
properties.push(("vexpand-set", vexpand_set));
}
if let Some(ref visible) = self.visible {
properties.push(("visible", visible));
}
if let Some(ref width_request) = self.width_request {
properties.push(("width-request", width_request));
}
if let Some(ref accessible_role) = self.accessible_role {
properties.push(("accessible-role", accessible_role));
}
if let Some(ref editing_canceled) = self.editing_canceled {
properties.push(("editing-canceled", editing_canceled));
}
glib::Object::new::<ComboBox>(&properties)
.expect("Failed to create an instance of ComboBox")
}
pub fn active(mut self, active: i32) -> Self {
self.active = Some(active);
self
}
pub fn active_id(mut self, active_id: &str) -> Self {
self.active_id = Some(active_id.to_string());
self
}
pub fn button_sensitivity(mut self, button_sensitivity: SensitivityType) -> Self {
self.button_sensitivity = Some(button_sensitivity);
self
}
pub fn child<P: IsA<Widget>>(mut self, child: &P) -> Self {
self.child = Some(child.clone().upcast());
self
}
pub fn entry_text_column(mut self, entry_text_column: i32) -> Self {
self.entry_text_column = Some(entry_text_column);
self
}
pub fn has_entry(mut self, has_entry: bool) -> Self {
self.has_entry = Some(has_entry);
self
}
pub fn has_frame(mut self, has_frame: bool) -> Self {
self.has_frame = Some(has_frame);
self
}
pub fn id_column(mut self, id_column: i32) -> Self {
self.id_column = Some(id_column);
self
}
pub fn model<P: IsA<TreeModel>>(mut self, model: &P) -> Self {
self.model = Some(model.clone().upcast());
self
}
pub fn popup_fixed_width(mut self, popup_fixed_width: bool) -> Self {
self.popup_fixed_width = Some(popup_fixed_width);
self
}
pub fn can_focus(mut self, can_focus: bool) -> Self {
self.can_focus = Some(can_focus);
self
}
pub fn can_target(mut self, can_target: bool) -> Self {
self.can_target = Some(can_target);
self
}
pub fn css_classes(mut self, css_classes: Vec<String>) -> Self {
self.css_classes = Some(css_classes);
self
}
pub fn css_name(mut self, css_name: &str) -> Self {
self.css_name = Some(css_name.to_string());
self
}
pub fn cursor(mut self, cursor: &gdk::Cursor) -> Self {
self.cursor = Some(cursor.clone());
self
}
pub fn focus_on_click(mut self, focus_on_click: bool) -> Self {
self.focus_on_click = Some(focus_on_click);
self
}
pub fn focusable(mut self, focusable: bool) -> Self {
self.focusable = Some(focusable);
self
}
pub fn halign(mut self, halign: Align) -> Self {
self.halign = Some(halign);
self
}
pub fn has_tooltip(mut self, has_tooltip: bool) -> Self {
self.has_tooltip = Some(has_tooltip);
self
}
pub fn height_request(mut self, height_request: i32) -> Self {
self.height_request = Some(height_request);
self
}
pub fn hexpand(mut self, hexpand: bool) -> Self {
self.hexpand = Some(hexpand);
self
}
pub fn hexpand_set(mut self, hexpand_set: bool) -> Self {
self.hexpand_set = Some(hexpand_set);
self
}
pub fn layout_manager<P: IsA<LayoutManager>>(mut self, layout_manager: &P) -> Self {
self.layout_manager = Some(layout_manager.clone().upcast());
self
}
pub fn margin_bottom(mut self, margin_bottom: i32) -> Self {
self.margin_bottom = Some(margin_bottom);
self
}
pub fn margin_end(mut self, margin_end: i32) -> Self {
self.margin_end = Some(margin_end);
self
}
pub fn margin_start(mut self, margin_start: i32) -> Self {
self.margin_start = Some(margin_start);
self
}
pub fn margin_top(mut self, margin_top: i32) -> Self {
self.margin_top = Some(margin_top);
self
}
pub fn name(mut self, name: &str) -> Self {
self.name = Some(name.to_string());
self
}
pub fn opacity(mut self, opacity: f64) -> Self {
self.opacity = Some(opacity);
self
}
pub fn overflow(mut self, overflow: Overflow) -> Self {
self.overflow = Some(overflow);
self
}
pub fn receives_default(mut self, receives_default: bool) -> Self {
self.receives_default = Some(receives_default);
self
}
pub fn sensitive(mut self, sensitive: bool) -> Self {
self.sensitive = Some(sensitive);
self
}
pub fn tooltip_markup(mut self, tooltip_markup: &str) -> Self {
self.tooltip_markup = Some(tooltip_markup.to_string());
self
}
pub fn tooltip_text(mut self, tooltip_text: &str) -> Self {
self.tooltip_text = Some(tooltip_text.to_string());
self
}
pub fn valign(mut self, valign: Align) -> Self {
self.valign = Some(valign);
self
}
pub fn vexpand(mut self, vexpand: bool) -> Self {
self.vexpand = Some(vexpand);
self
}
pub fn vexpand_set(mut self, vexpand_set: bool) -> Self {
self.vexpand_set = Some(vexpand_set);
self
}
pub fn visible(mut self, visible: bool) -> Self {
self.visible = Some(visible);
self
}
pub fn width_request(mut self, width_request: i32) -> Self {
self.width_request = Some(width_request);
self
}
pub fn accessible_role(mut self, accessible_role: AccessibleRole) -> Self {
self.accessible_role = Some(accessible_role);
self
}
pub fn editing_canceled(mut self, editing_canceled: bool) -> Self {
self.editing_canceled = Some(editing_canceled);
self
}
}
pub const NONE_COMBO_BOX: Option<&ComboBox> = None;
pub trait ComboBoxExt: 'static {
#[doc(alias = "gtk_combo_box_get_active_id")]
#[doc(alias = "get_active_id")]
fn active_id(&self) -> Option<glib::GString>;
#[doc(alias = "gtk_combo_box_get_active_iter")]
#[doc(alias = "get_active_iter")]
fn active_iter(&self) -> Option<TreeIter>;
#[doc(alias = "gtk_combo_box_get_button_sensitivity")]
#[doc(alias = "get_button_sensitivity")]
fn button_sensitivity(&self) -> SensitivityType;
#[doc(alias = "gtk_combo_box_get_child")]
#[doc(alias = "get_child")]
fn child(&self) -> Option<Widget>;
#[doc(alias = "gtk_combo_box_get_entry_text_column")]
#[doc(alias = "get_entry_text_column")]
fn entry_text_column(&self) -> i32;
#[doc(alias = "gtk_combo_box_get_has_entry")]
#[doc(alias = "get_has_entry")]
fn has_entry(&self) -> bool;
#[doc(alias = "gtk_combo_box_get_id_column")]
#[doc(alias = "get_id_column")]
fn id_column(&self) -> i32;
#[doc(alias = "gtk_combo_box_get_model")]
#[doc(alias = "get_model")]
fn model(&self) -> Option<TreeModel>;
#[doc(alias = "gtk_combo_box_get_popup_fixed_width")]
#[doc(alias = "get_popup_fixed_width")]
fn is_popup_fixed_width(&self) -> bool;
//#[doc(alias = "gtk_combo_box_get_row_separator_func")]
//#[doc(alias = "get_row_separator_func")]
//fn row_separator_func(&self) -> Option<Box_<dyn Fn(&TreeModel, &TreeIter) -> bool + 'static>>;
#[doc(alias = "gtk_combo_box_popdown")]
fn popdown(&self);
#[doc(alias = "gtk_combo_box_popup")]
fn popup(&self);
#[doc(alias = "gtk_combo_box_popup_for_device")]
fn popup_for_device(&self, device: &gdk::Device);
#[doc(alias = "gtk_combo_box_set_active_id")]
fn set_active_id(&self, active_id: Option<&str>) -> bool;
#[doc(alias = "gtk_combo_box_set_active_iter")]
fn set_active_iter(&self, iter: Option<&TreeIter>);
#[doc(alias = "gtk_combo_box_set_button_sensitivity")]
fn set_button_sensitivity(&self, sensitivity: SensitivityType);
#[doc(alias = "gtk_combo_box_set_child")]
fn set_child<P: IsA<Widget>>(&self, child: Option<&P>);
#[doc(alias = "gtk_combo_box_set_entry_text_column")]
fn set_entry_text_column(&self, text_column: i32);
#[doc(alias = "gtk_combo_box_set_id_column")]
fn set_id_column(&self, id_column: i32);
#[doc(alias = "gtk_combo_box_set_model")]
fn set_model<P: IsA<TreeModel>>(&self, model: Option<&P>);
#[doc(alias = "gtk_combo_box_set_popup_fixed_width")]
fn set_popup_fixed_width(&self, fixed: bool);
#[doc(alias = "gtk_combo_box_set_row_separator_func")]
fn set_row_separator_func<P: Fn(&TreeModel, &TreeIter) -> bool + 'static>(&self, func: P);
#[doc(alias = "has-frame")]
fn has_frame(&self) -> bool;
#[doc(alias = "has-frame")]
fn set_has_frame(&self, has_frame: bool);
#[doc(alias = "popup-shown")]
fn is_popup_shown(&self) -> bool;
#[doc(alias = "changed")]
fn connect_changed<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
#[doc(alias = "format-entry-text")]
fn connect_format_entry_text<F: Fn(&Self, &str) -> String + 'static>(
&self,
f: F,
) -> SignalHandlerId;
#[doc(alias = "move-active")]
fn connect_move_active<F: Fn(&Self, ScrollType) + 'static>(&self, f: F) -> SignalHandlerId;
fn emit_move_active(&self, scroll_type: ScrollType);
#[doc(alias = "popdown")]
fn connect_popdown<F: Fn(&Self) -> bool + 'static>(&self, f: F) -> SignalHandlerId;
fn emit_popdown(&self) -> bool;
#[doc(alias = "popup")]
fn connect_popup<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn emit_popup(&self);
#[doc(alias = "active")]
fn connect_active_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
#[doc(alias = "active-id")]
fn connect_active_id_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
#[doc(alias = "button-sensitivity")]
fn connect_button_sensitivity_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
#[doc(alias = "child")]
fn connect_child_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
#[doc(alias = "entry-text-column")]
fn connect_entry_text_column_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
#[doc(alias = "has-frame")]
fn connect_has_frame_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
#[doc(alias = "id-column")]
fn connect_id_column_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
#[doc(alias = "model")]
fn connect_model_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
#[doc(alias = "popup-fixed-width")]
fn connect_popup_fixed_width_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
#[doc(alias = "popup-shown")]
fn connect_popup_shown_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
}
impl<O: IsA<ComboBox>> ComboBoxExt for O {
fn active_id(&self) -> Option<glib::GString> {
unsafe {
from_glib_none(ffi::gtk_combo_box_get_active_id(
self.as_ref().to_glib_none().0,
))
}
}
fn active_iter(&self) -> Option<TreeIter> {
unsafe {
let mut iter = TreeIter::uninitialized();
let ret = from_glib(ffi::gtk_combo_box_get_active_iter(
self.as_ref().to_glib_none().0,
iter.to_glib_none_mut().0,
));
if ret {
Some(iter)
} else {
None
}
}
}
fn button_sensitivity(&self) -> SensitivityType {
unsafe {
from_glib(ffi::gtk_combo_box_get_button_sensitivity(
self.as_ref().to_glib_none().0,
))
}
}
fn child(&self) -> Option<Widget> {
unsafe { from_glib_none(ffi::gtk_combo_box_get_child(self.as_ref().to_glib_none().0)) }
}
fn entry_text_column(&self) -> i32 {
unsafe { ffi::gtk_combo_box_get_entry_text_column(self.as_ref().to_glib_none().0) }
}
fn has_entry(&self) -> bool {
unsafe {
from_glib(ffi::gtk_combo_box_get_has_entry(
self.as_ref().to_glib_none().0,
))
}
}
fn id_column(&self) -> i32 {
unsafe { ffi::gtk_combo_box_get_id_column(self.as_ref().to_glib_none().0) }
}
fn model(&self) -> Option<TreeModel> {
unsafe { from_glib_none(ffi::gtk_combo_box_get_model(self.as_ref().to_glib_none().0)) }
}
fn is_popup_fixed_width(&self) -> bool {
unsafe {
from_glib(ffi::gtk_combo_box_get_popup_fixed_width(
self.as_ref().to_glib_none().0,
))
}
}
//fn row_separator_func(&self) -> Option<Box_<dyn Fn(&TreeModel, &TreeIter) -> bool + 'static>> {
// unsafe { TODO: call ffi:gtk_combo_box_get_row_separator_func() }
//}
fn popdown(&self) {
unsafe {
ffi::gtk_combo_box_popdown(self.as_ref().to_glib_none().0);
}
}
fn popup(&self) {
unsafe {
ffi::gtk_combo_box_popup(self.as_ref().to_glib_none().0);
}
}
fn popup_for_device(&self, device: &gdk::Device) {
unsafe {
ffi::gtk_combo_box_popup_for_device(
self.as_ref().to_glib_none().0,
device.to_glib_none().0,
);
}
}
fn set_active_id(&self, active_id: Option<&str>) -> bool {
unsafe {
from_glib(ffi::gtk_combo_box_set_active_id(
self.as_ref().to_glib_none().0,
active_id.to_glib_none().0,
))
}
}
fn set_active_iter(&self, iter: Option<&TreeIter>) {
unsafe {
ffi::gtk_combo_box_set_active_iter(
self.as_ref().to_glib_none().0,
mut_override(iter.to_glib_none().0),
);
}
}
fn set_button_sensitivity(&self, sensitivity: SensitivityType) {
unsafe {
ffi::gtk_combo_box_set_button_sensitivity(
self.as_ref().to_glib_none().0,
sensitivity.into_glib(),
);
}
}
fn set_child<P: IsA<Widget>>(&self, child: Option<&P>) {
unsafe {
ffi::gtk_combo_box_set_child(
self.as_ref().to_glib_none().0,
child.map(|p| p.as_ref()).to_glib_none().0,
);
}
}
fn set_entry_text_column(&self, text_column: i32) {
unsafe {
ffi::gtk_combo_box_set_entry_text_column(self.as_ref().to_glib_none().0, text_column);
}
}
fn set_id_column(&self, id_column: i32) {
unsafe {
ffi::gtk_combo_box_set_id_column(self.as_ref().to_glib_none().0, id_column);
}
}
fn set_model<P: IsA<TreeModel>>(&self, model: Option<&P>) {
unsafe {
ffi::gtk_combo_box_set_model(
self.as_ref().to_glib_none().0,
model.map(|p| p.as_ref()).to_glib_none().0,
);
}
}
fn set_popup_fixed_width(&self, fixed: bool) {
unsafe {
ffi::gtk_combo_box_set_popup_fixed_width(
self.as_ref().to_glib_none().0,
fixed.into_glib(),
);
}
}
fn set_row_separator_func<P: Fn(&TreeModel, &TreeIter) -> bool + 'static>(&self, func: P) {
let func_data: Box_<P> = Box_::new(func);
unsafe extern "C" fn func_func<P: Fn(&TreeModel, &TreeIter) -> bool + 'static>(
model: *mut ffi::GtkTreeModel,
iter: *mut ffi::GtkTreeIter,
data: glib::ffi::gpointer,
) -> glib::ffi::gboolean {
let model = from_glib_borrow(model);
let iter = from_glib_borrow(iter);
let callback: &P = &*(data as *mut _);
let res = (*callback)(&model, &iter);
res.into_glib()
}
let func = Some(func_func::<P> as _);
unsafe extern "C" fn destroy_func<P: Fn(&TreeModel, &TreeIter) -> bool + 'static>(
data: glib::ffi::gpointer,
) {
let _callback: Box_<P> = Box_::from_raw(data as *mut _);
}
let destroy_call3 = Some(destroy_func::<P> as _);
let super_callback0: Box_<P> = func_data;
unsafe {
ffi::gtk_combo_box_set_row_separator_func(
self.as_ref().to_glib_none().0,
func,
Box_::into_raw(super_callback0) as *mut _,
destroy_call3,
);
}
}
fn has_frame(&self) -> bool {
unsafe {
let mut value = glib::Value::from_type(<bool as StaticType>::static_type());
glib::gobject_ffi::g_object_get_property(
self.to_glib_none().0 as *mut glib::gobject_ffi::GObject,
b"has-frame\0".as_ptr() as *const _,
value.to_glib_none_mut().0,
);
value
.get()
.expect("Return Value for property `has-frame` getter")
}
}
fn set_has_frame(&self, has_frame: bool) {
unsafe {
glib::gobject_ffi::g_object_set_property(
self.to_glib_none().0 as *mut glib::gobject_ffi::GObject,
b"has-frame\0".as_ptr() as *const _,
has_frame.to_value().to_glib_none().0,
);
}
}
fn | (&self) -> bool {
unsafe {
let mut value = glib::Value::from_type(<bool as StaticType>::static_type());
glib::gobject_ffi::g_object_get_property(
self.to_glib_none().0 as *mut glib::gobject_ffi::GObject,
b"popup-shown\0".as_ptr() as *const _,
value.to_glib_none_mut().0,
);
value
.get()
.expect("Return Value for property `popup-shown` getter")
}
}
fn connect_changed<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe extern "C" fn changed_trampoline<P: IsA<ComboBox>, F: Fn(&P) + 'static>(
this: *mut ffi::GtkComboBox,
f: glib::ffi::gpointer,
) {
let f: &F = &*(f as *const F);
f(ComboBox::from_glib_borrow(this).unsafe_cast_ref())
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"changed\0".as_ptr() as *const _,
Some(transmute::<_, unsafe extern "C" fn()>(
changed_trampoline::<Self, F> as *const (),
)),
Box_::into_raw(f),
)
}
}
fn connect_format_entry_text<F: Fn(&Self, &str) -> String + 'static>(
&self,
f: F,
) -> SignalHandlerId {
unsafe extern "C" fn format_entry_text_trampoline<
P: IsA<ComboBox>,
F: Fn(&P, &str) -> String + 'static,
>(
this: *mut ffi::GtkComboBox,
path: *mut libc::c_char,
f: glib::ffi::gpointer,
) -> *mut libc::c_char {
let f: &F = &*(f as *const F);
f(
ComboBox::from_glib_borrow(this).unsafe_cast_ref(),
&glib::GString::from_glib_borrow(path),
)
.to_glib_full()
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"format-entry-text\0".as_ptr() as *const _,
Some(transmute::<_, unsafe extern "C" fn()>(
format_entry_text_trampoline::<Self, F> as *const (),
)),
Box_::into_raw(f),
)
}
}
fn connect_move_active<F: Fn(&Self, ScrollType) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe extern "C" fn move_active_trampoline<
P: IsA<ComboBox>,
F: Fn(&P, ScrollType) + 'static,
>(
this: *mut ffi::GtkComboBox,
scroll_type: ffi::GtkScrollType,
f: glib::ffi::gpointer,
) {
let f: &F = &*(f as *const F);
f(
ComboBox::from_glib_borrow(this).unsafe_cast_ref(),
from_glib(scroll_type),
)
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"move-active\0".as_ptr() as *const _,
Some(transmute::<_, unsafe extern "C" fn()>(
move_active_trampoline::<Self, F> as *const (),
)),
Box_::into_raw(f),
)
}
}
fn emit_move_active(&self, scroll_type: ScrollType) {
let _ = unsafe {
glib::Object::from_glib_borrow(self.as_ptr() as *mut glib::gobject_ffi::GObject)
.emit_by_name("move-active", &[&scroll_type])
.unwrap()
};
}
fn connect_popdown<F: Fn(&Self) -> bool + 'static>(&self, f: F) -> SignalHandlerId {
unsafe extern "C" fn popdown_trampoline<P: IsA<ComboBox>, F: Fn(&P) -> bool + 'static>(
this: *mut ffi::GtkComboBox,
f: glib::ffi::gpointer,
) -> glib::ffi::gboolean {
let f: &F = &*(f as *const F);
f(ComboBox::from_glib_borrow(this).unsafe_cast_ref()).into_glib()
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"popdown\0".as_ptr() as *const _,
Some(transmute::<_, unsafe extern "C" fn()>(
popdown_trampoline::<Self, F> as *const (),
)),
Box_::into_raw(f),
)
}
}
fn emit_popdown(&self) -> bool {
let res = unsafe {
glib::Object::from_glib_borrow(self.as_ptr() as *mut glib::gobject_ffi::GObject)
.emit_by_name("popdown", &[])
.unwrap()
};
res.unwrap().get().expect("Return Value for `emit_popdown`")
}
fn connect_popup<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe extern "C" fn popup_trampoline<P: IsA<ComboBox>, F: Fn(&P) + 'static>(
this: *mut ffi::GtkComboBox,
f: glib::ffi::gpointer,
) {
let f: &F = &*(f as *const F);
f(ComboBox::from_glib_borrow(this).unsafe_cast_ref())
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"popup\0".as_ptr() as *const _,
Some(transmute::<_, unsafe extern "C" fn()>(
popup_trampoline::<Self, F> as *const (),
)),
Box_::into_raw(f),
)
}
}
fn emit_popup(&self) {
let _ = unsafe {
glib::Object::from_glib_borrow(self.as_ptr() as *mut glib::gobject_ffi::GObject)
.emit_by_name("popup", &[])
.unwrap()
};
}
fn connect_active_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe extern "C" fn notify_active_trampoline<P: IsA<ComboBox>, F: Fn(&P) + 'static>(
this: *mut ffi::GtkComboBox,
_param_spec: glib::ffi::gpointer,
f: glib::ffi::gpointer,
) {
let f: &F = &*(f as *const F);
f(ComboBox::from_glib_borrow(this).unsafe_cast_ref())
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"notify::active\0".as_ptr() as *const _,
Some(transmute::<_, unsafe extern "C" fn()>(
notify_active_trampoline::<Self, F> as *const (),
)),
Box_::into_raw(f),
)
}
}
fn connect_active_id_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe extern "C" fn notify_active_id_trampoline<P: IsA<ComboBox>, F: Fn(&P) + 'static>(
this: *mut ffi::GtkComboBox,
_param_spec: glib::ffi::gpointer,
f: glib::ffi::gpointer,
) {
let f: &F = &*(f as *const F);
f(ComboBox::from_glib_borrow(this).unsafe_cast_ref())
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"notify::active-id\0".as_ptr() as *const _,
Some(transmute::<_, unsafe extern "C" fn()>(
notify_active_id_trampoline::<Self, F> as *const (),
)),
Box_::into_raw(f),
)
}
}
fn connect_button_sensitivity_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe extern "C" fn notify_button_sensitivity_trampoline<
P: IsA<ComboBox>,
F: Fn(&P) + 'static,
>(
this: *mut ffi::GtkComboBox,
_param_spec: glib::ffi::gpointer,
f: glib::ffi::gpointer,
) {
let f: &F = &*(f as *const F);
f(ComboBox::from_glib_borrow(this).unsafe_cast_ref())
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"notify::button-sensitivity\0".as_ptr() as *const _,
Some(transmute::<_, unsafe extern "C" fn()>(
notify_button_sensitivity_trampoline::<Self, F> as *const (),
)),
Box_::into_raw(f),
)
}
}
fn connect_child_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe extern "C" fn notify_child_trampoline<P: IsA<ComboBox>, F: Fn(&P) + 'static>(
this: *mut ffi::GtkComboBox,
_param_spec: glib::ffi::gpointer,
f: glib::ffi::gpointer,
) {
let f: &F = &*(f as *const F);
f(ComboBox::from_glib_borrow(this).unsafe_cast_ref())
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"notify::child\0".as_ptr() as *const _,
Some(transmute::<_, unsafe extern "C" fn()>(
notify_child_trampoline::<Self, F> as *const (),
)),
Box_::into_raw(f),
)
}
}
fn connect_entry_text_column_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe extern "C" fn notify_entry_text_column_trampoline<
P: IsA<ComboBox>,
F: Fn(&P) + 'static,
>(
this: *mut ffi::GtkComboBox,
_param_spec: glib::ffi::gpointer,
f: glib::ffi::gpointer,
) {
let f: &F = &*(f as *const F);
f(ComboBox::from_glib_borrow(this).unsafe_cast_ref())
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"notify::entry-text-column\0".as_ptr() as *const _,
Some(transmute::<_, unsafe extern "C" fn()>(
notify_entry_text_column_trampoline::<Self, F> as *const (),
)),
Box_::into_raw(f),
)
}
}
fn connect_has_frame_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe extern "C" fn notify_has_frame_trampoline<P: IsA<ComboBox>, F: Fn(&P) + 'static>(
this: *mut ffi::GtkComboBox,
_param_spec: glib::ffi::gpointer,
f: glib::ffi::gpointer,
) {
let f: &F = &*(f as *const F);
f(ComboBox::from_glib_borrow(this).unsafe_cast_ref())
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"notify::has-frame\0".as_ptr() as *const _,
Some(transmute::<_, unsafe extern "C" fn()>(
notify_has_frame_trampoline::<Self, F> as *const (),
)),
Box_::into_raw(f),
)
}
}
fn connect_id_column_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe extern "C" fn notify_id_column_trampoline<P: IsA<ComboBox>, F: Fn(&P) + 'static>(
this: *mut ffi::GtkComboBox,
_param_spec: glib::ffi::gpointer,
f: glib::ffi::gpointer,
) {
let f: &F = &*(f as *const F);
f(ComboBox::from_glib_borrow(this).unsafe_cast_ref())
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"notify::id-column\0".as_ptr() as *const _,
Some(transmute::<_, unsafe extern "C" fn()>(
notify_id_column_trampoline::<Self, F> as *const (),
)),
Box_::into_raw(f),
)
}
}
fn connect_model_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe extern "C" fn notify_model_trampoline<P: IsA<ComboBox>, F: Fn(&P) + 'static>(
this: *mut ffi::GtkComboBox,
_param_spec: glib::ffi::gpointer,
f: glib::ffi::gpointer,
) {
let f: &F = &*(f as *const F);
f(ComboBox::from_glib_borrow(this).unsafe_cast_ref())
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"notify::model\0".as_ptr() as *const _,
Some(transmute::<_, unsafe extern "C" fn()>(
notify_model_trampoline::<Self, F> as *const (),
)),
Box_::into_raw(f),
)
}
}
fn connect_popup_fixed_width_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe extern "C" fn notify_popup_fixed_width_trampoline<
P: IsA<ComboBox>,
F: Fn(&P) + 'static,
>(
this: *mut ffi::GtkComboBox,
_param_spec: glib::ffi::gpointer,
f: glib::ffi::gpointer,
) {
let f: &F = &*(f as *const F);
f(ComboBox::from_glib_borrow(this).unsafe_cast_ref())
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"notify::popup-fixed-width\0".as_ptr() as *const _,
Some(transmute::<_, unsafe extern "C" fn()>(
notify_popup_fixed_width_trampoline::<Self, F> as *const (),
)),
Box_::into_raw(f),
)
}
}
fn connect_popup_shown_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe extern "C" fn notify_popup_shown_trampoline<
P: IsA<ComboBox>,
F: Fn(&P) + 'static,
>(
this: *mut ffi::GtkComboBox,
_param_spec: glib::ffi::gpointer,
f: glib::ffi::gpointer,
) {
let f: &F = &*(f as *const F);
f(ComboBox::from_glib_borrow(this).unsafe_cast_ref())
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"notify::popup-shown\0".as_ptr() as *const _,
Some(transmute::<_, unsafe extern "C" fn()>(
notify_popup_shown_trampoline::<Self, F> as *const (),
)),
Box_::into_raw(f),
)
}
}
}
impl fmt::Display for ComboBox {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.write_str("ComboBox")
}
}
| is_popup_shown |
file_batch.rs | use dataplane::batch::{Batch, BATCH_FILE_HEADER_SIZE, BATCH_HEADER_SIZE};
use dataplane::Offset;
use std::io::{Error as IoError, ErrorKind, Cursor};
use tracing::{warn, debug};
use std::os::unix::io::RawFd;
use nix::sys::uio::pread;
use fluvio_future::file_slice::AsyncFileSlice;
// only encode information necessary to decode batches efficiently
pub struct FileBatch {
pub(crate) batch: Batch,
pub(crate) records: Vec<u8>,
}
impl FileBatch {
pub(crate) fn base_offset(&self) -> Offset {
self.batch.base_offset
}
pub(crate) fn offset_delta(&self) -> i32 {
self.batch.header.last_offset_delta
}
}
/// Iterator that returns batch from file
pub struct FileBatchIterator {
fd: RawFd,
offset: i64,
end: i64,
}
impl FileBatchIterator {
#[allow(unused)]
pub fn new(fd: RawFd, offset: i64, len: i64) -> Self {
Self {
fd,
offset,
end: offset + len,
}
}
pub fn from_raw_slice(slice: AsyncFileSlice) -> Self {
use std::os::unix::io::AsRawFd;
let offset = slice.position() as i64;
Self {
fd: slice.as_raw_fd(),
offset,
end: offset + slice.len() as i64,
}
}
}
impl Iterator for FileBatchIterator {
type Item = Result<FileBatch, IoError>;
fn next(&mut self) -> Option<Self::Item> {
if self.offset >= self.end {
return None;
}
let mut header = vec![0u8; BATCH_FILE_HEADER_SIZE];
let bytes_read = match pread(self.fd, &mut header, self.offset)
.map_err(|err| IoError::new(ErrorKind::Other, format!("pread error {}", err)))
{
Ok(bytes) => bytes,
Err(err) => return Some(Err(err)),
};
if bytes_read < header.len() {
warn!(bytes_read, header_len = header.len());
return Some(Err(IoError::new(
ErrorKind::UnexpectedEof,
format!(
"not eough for batch header {} out of {}",
bytes_read,
header.len()
),
)));
}
let mut batch = Batch::default();
if let Err(err) = batch.decode_from_file_buf(&mut Cursor::new(header), 0) {
return Some(Err(IoError::new(
ErrorKind::Other,
format!("decodinge batch header error {}", err),
)));
}
let remainder = batch.batch_len as usize - BATCH_HEADER_SIZE as usize;
debug!(
file_offset = self.offset,
base_offset = batch.base_offset,
"fbatch header"
);
let mut records = vec![0u8; remainder];
self.offset += BATCH_FILE_HEADER_SIZE as i64;
let bytes_read = match pread(self.fd, &mut records, self.offset)
.map_err(|err| IoError::new(ErrorKind::Other, format!("pread error {}", err)))
{
Ok(bytes) => bytes,
Err(err) => return Some(Err(err)),
};
if bytes_read < records.len() {
warn!(bytes_read, record_len = records.len());
return Some(Err(IoError::new(
ErrorKind::UnexpectedEof,
format!(
"not enough for batch records {} out of {}",
bytes_read,
records.len()
),
)));
}
self.offset += bytes_read as i64;
debug!(file_offset = self.offset, "fbatch end");
Some(Ok(FileBatch { batch, records }))
}
}
#[cfg(test)]
mod test {
use std::{fs::File, io::Write}; |
use fluvio_types::defaults::STORAGE_MAX_BATCH_SIZE;
use super::*;
#[test]
fn test_file() {
let path = temp_dir().join("pread.txt");
let mut file = File::create(&path).expect("create");
file.write_all(b"Hello, world!").expect("write");
file.sync_all().expect("flush");
drop(file);
let read_only = File::open(path).expect("open");
let fd = read_only.as_raw_fd();
let mut buf = vec![0; STORAGE_MAX_BATCH_SIZE as usize];
// let mut buf = BytesMut::with_capacity(64);
let bytes_read = pread(fd, &mut buf, 1).expect("");
println!("bytes read: {}", bytes_read);
assert!(bytes_read > 2);
}
} | use std::env::temp_dir;
use std::os::unix::io::AsRawFd; |
alarm.py | """Special handling for exceptions, for the UI.
"""
def alarm(e):
| """
Write traceback into PBFALCON_ERRFILE (until we stop using pbfalcon).
Write a special JSON object expected by pbcommand.models.common.
"""
import datetime
import os
import traceback
import uuid
from ..io import serialize
tb = traceback.format_exc()
# pbfalcon wants us to write errs here.
errfile = os.environ.get('PBFALCON_ERRFILE')
if errfile:
with open(errfile, 'w') as ofs:
ofs.write(tb) # in python3, this will include the entire chain of exceptions
# this is propagated to SMRT Link UI
# see PacBioAlarm class in pbcommand.models.common for details -- nat
special = [
{
"exception": e.__class__.__name__,
"info": tb,
"message": str(e) + "\n" + str(e.__cause__),
"name": e.__class__.__name__,
"severity": "ERROR",
"owner": "python3",
"createdAt": datetime.datetime.now().strftime('%Y-%m-%dT%H:%M:%S'),
"id": str(uuid.uuid4())
}
]
# Technically, we should add "causes" recursively, but "info" will include the full chain anyway.
serialize('alarms.json', special) |
|
tool_test.go | // Copyright 2020 The Gitea Authors. All rights reserved.
// Use of this source code is governed by a MIT-style
// license that can be found in the LICENSE file.
package base
import (
"net/url"
"testing"
"code.gitea.io/gitea/modules/setting"
"github.com/stretchr/testify/assert"
)
func TestEncodeMD5(t *testing.T) {
assert.Equal(t,
"3858f62230ac3c915f300c664312c63f",
EncodeMD5("foobar"),
)
}
func TestEncodeSha1(t *testing.T) {
assert.Equal(t,
"8843d7f92416211de9ebb963ff4ce28125932878",
EncodeSha1("foobar"),
)
}
func TestEncodeSha256(t *testing.T) {
assert.Equal(t,
"c3ab8ff13720e8ad9047dd39466b3c8974e592c2fa383d4a3960714caef0c4f2",
EncodeSha256("foobar"),
)
}
func TestShortSha(t *testing.T) {
assert.Equal(t, "veryverylo", ShortSha("veryverylong"))
}
func TestBasicAuthDecode(t *testing.T) {
_, _, err := BasicAuthDecode("?")
assert.Equal(t, "illegal base64 data at input byte 0", err.Error())
user, pass, err := BasicAuthDecode("Zm9vOmJhcg==")
assert.NoError(t, err)
assert.Equal(t, "foo", user)
assert.Equal(t, "bar", pass)
}
func TestBasicAuthEncode(t *testing.T) {
assert.Equal(t, "Zm9vOmJhcg==", BasicAuthEncode("foo", "bar"))
}
// TODO: Test PBKDF2()
// TODO: Test VerifyTimeLimitCode()
// TODO: Test CreateTimeLimitCode()
func TestHashEmail(t *testing.T) {
assert.Equal(t,
"d41d8cd98f00b204e9800998ecf8427e",
HashEmail(""),
)
assert.Equal(t,
"353cbad9b58e69c96154ad99f92bedc7",
HashEmail("[email protected]"),
)
}
const gravatarSource = "https://secure.gravatar.com/avatar/"
func disableGravatar() {
setting.EnableFederatedAvatar = false
setting.LibravatarService = nil
setting.DisableGravatar = true
}
func enableGravatar(t *testing.T) {
setting.DisableGravatar = false
var err error
setting.GravatarSourceURL, err = url.Parse(gravatarSource)
assert.NoError(t, err)
}
func TestSizedAvatarLink(t *testing.T) {
disableGravatar()
assert.Equal(t, "/img/avatar_default.png",
SizedAvatarLink("[email protected]", 100))
enableGravatar(t)
assert.Equal(t,
"https://secure.gravatar.com/avatar/353cbad9b58e69c96154ad99f92bedc7?d=identicon&s=100",
SizedAvatarLink("[email protected]", 100),
)
}
func TestFileSize(t *testing.T) {
var size int64 = 512
assert.Equal(t, "512 B", FileSize(size))
size *= 1024
assert.Equal(t, "512 KiB", FileSize(size))
size *= 1024
assert.Equal(t, "512 MiB", FileSize(size))
size *= 1024
assert.Equal(t, "512 GiB", FileSize(size))
size *= 1024
assert.Equal(t, "512 TiB", FileSize(size))
size *= 1024
assert.Equal(t, "512 PiB", FileSize(size))
size *= 4
assert.Equal(t, "2.0 EiB", FileSize(size))
}
func TestSubtract(t *testing.T) {
toFloat64 := func(n interface{}) float64 {
switch v := n.(type) {
case int:
return float64(v)
case int8:
return float64(v)
case int16:
return float64(v)
case int32:
return float64(v)
case int64:
return float64(v)
case float32:
return float64(v)
case float64:
return v
default:
return 0.0
}
}
values := []interface{}{
int(-3),
int8(14),
int16(81),
int32(-156),
int64(1528),
float32(3.5),
float64(-15.348),
}
for _, left := range values {
for _, right := range values {
expected := toFloat64(left) - toFloat64(right)
sub := Subtract(left, right)
assert.InDelta(t, expected, sub, 1e-3)
}
}
}
func TestEllipsisString(t *testing.T) {
assert.Equal(t, "...", EllipsisString("foobar", 0))
assert.Equal(t, "...", EllipsisString("foobar", 1))
assert.Equal(t, "...", EllipsisString("foobar", 2))
assert.Equal(t, "...", EllipsisString("foobar", 3))
assert.Equal(t, "f...", EllipsisString("foobar", 4))
assert.Equal(t, "fo...", EllipsisString("foobar", 5))
assert.Equal(t, "foobar", EllipsisString("foobar", 6))
assert.Equal(t, "foobar", EllipsisString("foobar", 10))
}
func TestTruncateString(t *testing.T) {
assert.Equal(t, "", TruncateString("foobar", 0))
assert.Equal(t, "f", TruncateString("foobar", 1))
assert.Equal(t, "fo", TruncateString("foobar", 2))
assert.Equal(t, "foo", TruncateString("foobar", 3))
assert.Equal(t, "foob", TruncateString("foobar", 4))
assert.Equal(t, "fooba", TruncateString("foobar", 5))
assert.Equal(t, "foobar", TruncateString("foobar", 6))
assert.Equal(t, "foobar", TruncateString("foobar", 7))
}
func TestStringsToInt64s(t *testing.T) {
testSuccess := func(input []string, expected []int64) {
result, err := StringsToInt64s(input)
assert.NoError(t, err)
assert.Equal(t, expected, result)
}
testSuccess([]string{}, []int64{})
testSuccess([]string{"-1234"}, []int64{-1234})
testSuccess([]string{"1", "4", "16", "64", "256"},
[]int64{1, 4, 16, 64, 256})
_, err := StringsToInt64s([]string{"-1", "a", "$"})
assert.Error(t, err)
}
func TestInt64sToStrings(t *testing.T) {
assert.Equal(t, []string{}, Int64sToStrings([]int64{}))
assert.Equal(t,
[]string{"1", "4", "16", "64", "256"},
Int64sToStrings([]int64{1, 4, 16, 64, 256}),
)
}
func TestInt64sToMap(t *testing.T) |
func TestIsLetter(t *testing.T) {
assert.True(t, IsLetter('a'))
assert.True(t, IsLetter('e'))
assert.True(t, IsLetter('q'))
assert.True(t, IsLetter('z'))
assert.True(t, IsLetter('A'))
assert.True(t, IsLetter('E'))
assert.True(t, IsLetter('Q'))
assert.True(t, IsLetter('Z'))
assert.True(t, IsLetter('_'))
assert.False(t, IsLetter('-'))
assert.False(t, IsLetter('1'))
assert.False(t, IsLetter('$'))
}
func TestIsTextFile(t *testing.T) {
assert.True(t, IsTextFile([]byte{}))
assert.True(t, IsTextFile([]byte("lorem ipsum")))
}
func TestFormatNumberSI(t *testing.T) {
assert.Equal(t, "125", FormatNumberSI(int(125)))
assert.Equal(t, "1.3k", FormatNumberSI(int64(1317)))
assert.Equal(t, "21.3M", FormatNumberSI(21317675))
assert.Equal(t, "45.7G", FormatNumberSI(45721317675))
assert.Equal(t, "", FormatNumberSI("test"))
}
// TODO: IsImageFile(), currently no idea how to test
// TODO: IsPDFFile(), currently no idea how to test
| {
assert.Equal(t, map[int64]bool{}, Int64sToMap([]int64{}))
assert.Equal(t,
map[int64]bool{1: true, 4: true, 16: true},
Int64sToMap([]int64{1, 4, 16}),
)
} |
JoystickButton.py | # -*- coding: utf-8 -*-
"""
JoystickButton is a button with x/y values. When the button is depressed and the
mouse dragged, the x/y values change to follow the mouse.
When the mouse button is released, the x/y values change to 0,0 (rather like
letting go of the joystick).
"""
import initExample ## Add path to library (just for examples; you do not need this)
from pyqtgraph.Qt import QtGui, QtCore
import pyqtgraph as pg
app = QtGui.QApplication([])
mw = QtGui.QMainWindow()
mw.resize(300,50)
mw.setWindowTitle('pyqtgraph example: JoystickButton')
cw = QtGui.QWidget()
mw.setCentralWidget(cw)
layout = QtGui.QGridLayout()
cw.setLayout(layout)
l1 = pg.ValueLabel(siPrefix=True, suffix='m')
l2 = pg.ValueLabel(siPrefix=True, suffix='m')
jb = pg.JoystickButton()
jb.setFixedWidth(30)
jb.setFixedHeight(30)
layout.addWidget(l1, 0, 0)
layout.addWidget(l2, 0, 1)
layout.addWidget(jb, 0, 2)
x = 0
y = 0
def update():
|
timer = QtCore.QTimer()
timer.timeout.connect(update)
timer.start(30)
#show() moved to end of file to get around this bug:
# https://bugreports.qt-project.org/browse/QTBUG-39019
mw.show()
## Start Qt event loop unless running in interactive mode or using pyside.
if __name__ == '__main__':
import sys
if (sys.flags.interactive != 1) or not hasattr(QtCore, 'PYQT_VERSION'):
QtGui.QApplication.instance().exec_()
| global x, y, l1, l2, jb
dx, dy = jb.getState()
x += dx * 1e-3
y += dy * 1e-3
l1.setValue(x)
l2.setValue(y) |
test_negation.py | """Tests for the Neg Strategy"""
import axelrod as axl
from .test_player import TestPlayer
C, D = axl.Action.C, axl.Action.D
class TestNegation(TestPlayer):
name = "Negation"
player = axl.Negation
expected_classifier = {
"memory_depth": 1,
"stochastic": True,
"makes_use_of": set(),
"long_run_time": False,
"inspects_source": False,
"manipulates_source": False,
"manipulates_state": False,
}
def test_strategy(self):
# First move is random.
| actions = [(C, C), (D, D), (C, C)]
self.versus_test(
opponent=axl.Alternator(), expected_actions=actions, seed=1
)
actions = [(D, C), (D, D), (C, C)]
self.versus_test(
opponent=axl.Alternator(), expected_actions=actions, seed=2
)
actions = [(C, C), (D, C), (D, C)]
self.versus_test(
opponent=axl.Cooperator(), expected_actions=actions, seed=1
)
actions = [(D, D), (C, D), (C, D)]
self.versus_test(
opponent=axl.Defector(), expected_actions=actions, seed=2
) |
|
onboard_test.go | package app
import (
"bytes"
"path/filepath"
"runtime"
"testing"
log "github.com/sirupsen/logrus"
"github.com/jarcoal/httpmock"
"github.com/layer5io/meshery/mesheryctl/pkg/utils"
)
func TestOnboardCmd(t *testing.T) {
// setup current context
utils.SetupContextEnv(t)
// initialize mock server for handling requests
utils.StartMockery(t)
// create a test helper |
// get current directory
_, filename, _, ok := runtime.Caller(0)
if !ok {
t.Fatal("Not able to get current working directory")
}
currDir := filepath.Dir(filename)
fixturesDir := filepath.Join(currDir, "fixtures")
// test scenrios for fetching data
tests := []struct {
Name string
Args []string
ExpectedResponse string
URLs []utils.MockURL
Token string
ExpectError bool
}{
{
Name: "Onboard Application",
Args: []string{"onboard", "-f", filepath.Join(fixturesDir, "sampleApp.golden")},
ExpectedResponse: "onboard.output.golden",
URLs: []utils.MockURL{
{
Method: "POST",
URL: testContext.BaseURL + "/api/application",
Response: "onboard.applicationSave.response.golden",
ResponseCode: 200,
},
{
Method: "POST",
URL: testContext.BaseURL + "/api/application/deploy",
Response: "onboard.applicationdeploy.response.golden",
ResponseCode: 200,
},
},
Token: filepath.Join(fixturesDir, "token.golden"),
ExpectError: false,
},
{
Name: "Onboard Application with --skip-save",
Args: []string{"onboard", "-f", filepath.Join(fixturesDir, "sampleApp.golden"), "--skip-save"},
ExpectedResponse: "onboard.output.golden",
URLs: []utils.MockURL{
{
Method: "POST",
URL: testContext.BaseURL + "/api/application/deploy",
Response: "onboard.applicationdeploy.response.golden",
ResponseCode: 200,
},
},
Token: filepath.Join(fixturesDir, "token.golden"),
ExpectError: false,
},
}
// Run tests
for _, tt := range tests {
t.Run(tt.Name, func(t *testing.T) {
for _, url := range tt.URLs {
// View api response from golden files
apiResponse := utils.NewGoldenFile(t, url.Response, fixturesDir).Load()
// mock response
httpmock.RegisterResponder(url.Method, url.URL,
httpmock.NewStringResponder(url.ResponseCode, apiResponse))
}
// set token
utils.TokenFlag = tt.Token
// Expected response
testdataDir := filepath.Join(currDir, "testdata")
golden := utils.NewGoldenFile(t, tt.ExpectedResponse, testdataDir)
// setting up log to grab logs
var buf bytes.Buffer
log.SetOutput(&buf)
utils.SetupLogrusFormatter()
AppCmd.SetArgs(tt.Args)
err := AppCmd.Execute()
if err != nil {
// if we're supposed to get an error
if tt.ExpectError {
// write it in file
if *update {
golden.Write(err.Error())
}
expectedResponse := golden.Load()
utils.Equals(t, expectedResponse, err.Error())
return
}
t.Error(err)
}
// response being printed in console
actualResponse := buf.String()
// write it in file
if *update {
golden.Write(actualResponse)
}
expectedResponse := golden.Load()
utils.Equals(t, expectedResponse, actualResponse)
})
}
// stop mock server
utils.StopMockery(t)
} | testContext := utils.NewTestHelper(t) |
index.js | const orm = require("./config/orm.js");
const inquirer = require("inquirer");
// View employees, view departments, view roles, add employee, add department, add role, update role, update manager,
// view employees by manager, delete employee, delete role, delete department, quit
// This function generates the top-level choices for the user. Upon selecting any of them, a new function is executed
// specific to that choice. Upon completion of the selected task, this function is called once again.
function | () {
console.log("Welcome to the Employee Tracker!\n")
inquirer.prompt({
type: "list",
message: "Choose what you would like to do",
choices: [
"View employees",
"View departments",
"View roles",
"Add employee",
"Add department",
"Add role",
"Update role",
"Update manager",
"Display employees by manager",
"Delete an employee",
"Delete a role",
"Delete a department",
"View utilized budget for a department",
"Quit"
],
name: "choice"
}).then(function ({ choice }) {
if (choice === "View employees") {
orm.viewEmployees()
.then(function () {
console.log("\n");
mainMenu();
});
} else if (choice === "View departments") {
orm.viewDepartments()
.then(function () {
console.log("\n");
mainMenu();
});
} else if (choice === "View roles") {
orm.viewRoles()
.then(function () {
console.log("\n");
mainMenu();
});
} else if (choice === "Add employee") {
addEmployeePrompt();
} else if (choice === "Add department") {
addDepartmentPrompt();
} else if (choice === "Add role") {
addRolePrompt();
} else if (choice === "Update role") {
updateRolePrompt();
} else if (choice === "Update manager") {
updateManagerPrompt();
} else if (choice === "Display employees by manager") {
displayByMgrPrompt();
} else if (choice === "Delete an employee") {
deleteEmployeePrompt();
} else if (choice === "Delete a role") {
deleteRolePrompt();
} else if (choice === "Delete a department") {
deleteDepartmentPrompt();
} else if (choice === "View utilized budget for a department") {
displayUtilizedBudgetPrompt();
} else {
orm.endConnection();
}
});
}
// Prompt user for information about new employee, calls ORM function to add it to the database
function addEmployeePrompt() {
orm.getEmployees()
.then(function (res) {
const managerArray = [];
for (let i = 0; i < res.length; i++) {
managerArray.push(res[i].name);
}
managerArray.push("none");
orm.getRoles()
.then(function (response) {
const roleTitleArray = [];
for (let i = 0; i < response.length; i++) {
roleTitleArray.push(response[i].title);
}
inquirer.prompt([{
type: "input",
message: "Enter employee's first name",
name: "firstName"
},
{
type: "input",
message: "Enter employee's last name",
name: "lastName"
},
{
type: "list",
message: "Select employee's role",
choices: roleTitleArray,
name: "role"
},
{
type: "list",
message: "Select employee's manager",
choices: managerArray,
name: "manager"
}]).then(function ({ firstName, lastName, role, manager }) {
const roleId = response[roleTitleArray.indexOf(role)].id;
if (manager === "none") {
orm.addEmployee(firstName, lastName, roleId)
.then(function () {
console.log("\n");
mainMenu();
});
} else {
const managerId = res[managerArray.indexOf(manager)].id;
orm.addEmployee(firstName, lastName, roleId, managerId)
.then(function () {
console.log("\n");
mainMenu();
});
}
});
});
});
}
// Prompts user for information needed to make new department, then calls ORM function to add it to the database
function addDepartmentPrompt() {
orm.getDepartments()
.then(function (response) {
const deptArray = [];
for (let i = 0; i < response.length; i++) {
deptArray.push(response[i].name);
}
inquirer.prompt({
type: "input",
message: "Enter the name of new department you'd like to add",
name: "deptName"
}).then(function ({ deptName }) {
if (deptArray.includes(deptName)) {
console.log("There is already a department with that name!\n");
mainMenu();
} else {
orm.addDepartment(deptName)
.then(function () {
console.log("\n");
mainMenu();
});
}
});
});
}
// Prompts user for information needed to make a new role, then calls ORM function to add it to the database
function addRolePrompt() {
orm.getRoles()
.then(function (roles) {
const roleArray = [];
for (let i = 0; i < roles.length; i++) {
roleArray.push(roles[i].title);
}
orm.getDepartments()
.then(function (deptArray) {
const deptNames = [];
for (let i = 0; i < deptArray.length; i++) {
deptNames.push(deptArray[i].name);
}
inquirer.prompt([{
type: "input",
message: "Enter the name of the role you would like to add",
name: "title"
},
{
type: "input",
message: "Enter the annual salary of the new role",
name: "salary"
},
{
type: "list",
message: "Select the department in which the new role will work",
choices: deptNames,
name: "department"
}]).then(function ({ title, salary, department }) {
const deptId = deptArray[deptNames.indexOf(department)].id;
if (roleArray.includes(title)) {
console.log("Error - that title already exists!\n");
mainMenu();
} else {
orm.addRole(title, salary, deptId)
.then(function () {
console.log("\n");
mainMenu();
});
}
});
});
});
}
// Grabs all employees, asks user which one they want to update, asks what role the employee should have, then calls ORM function to update the database
function updateRolePrompt() {
orm.getEmployees()
.then(function (res) {
const empArray = [];
for (let i = 0; i < res.length; i++) {
empArray.push(res[i].name);
}
orm.getRoles()
.then(function (response) {
const roleArray = [];
for (let i = 0; i < response.length; i++) {
roleArray.push(response[i].title);
}
inquirer.prompt([{
type: "list",
message: "Choose the employee whose role you'd like to update",
choices: empArray,
name: "employee"
},
{
type: "list",
message: "Select the employee's new role",
choices: roleArray,
name: "role"
}]).then(function ({ employee, role }) {
const empId = res[empArray.indexOf(employee)].id;
orm.updateRole(empId, role)
.then(function () {
console.log("\n");
mainMenu();
})
})
})
})
}
// Grabs all employees, asks user which one they want to update, asks what manager the employee should have, then calls ORM function to update the database
function updateManagerPrompt() {
orm.getEmployees()
.then(function (employees) {
const empArray = [];
for (let i = 0; i < employees.length; i++) {
empArray.push(employees[i].name);
}
inquirer.prompt([{
type: "list",
message: "Select the employee whose manager you would like to update",
choices: empArray,
name: "employee"
},
{
type: "list",
message: "Select the employee's new manager",
choices: empArray,
name: "manager"
}]).then(function ({ employee, manager }) {
if (employee === manager) {
console.log("Error - you cannot assign an employee to manage him/herself!");
mainMenu();
} else {
const empId = employees[empArray.indexOf(employee)].id;
const mgrId = employees[empArray.indexOf(manager)].id;
orm.updateManager(empId, mgrId)
.then(function () {
console.log("\n");
mainMenu();
});
}
});
});
}
// Grabs all employees, asks the user for which one they want to see direct reports, then calls ORM function to query database and display results
function displayByMgrPrompt() {
orm.getEmployees()
.then(function (employees) {
const empArray = [];
for (let i = 0; i < employees.length; i++) {
empArray.push(employees[i].name);
}
inquirer.prompt({
type: "list",
message: "Select the manager whose employees you would like to view",
choices: empArray,
name: "manager"
}).then(function ({ manager }) {
const mgrId = employees[empArray.indexOf(manager)].id;
orm.viewEmpsByMgr(mgrId)
.then(function () {
console.log("\n");
mainMenu();
});
});
});
}
// Grabs all employees, asks user which one they want to delete, then calls ORM function to delete it from the database
function deleteEmployeePrompt() {
orm.getEmployees()
.then(function (employees) {
const empArray = [];
for (let i = 0; i < employees.length; i++) {
empArray.push(employees[i].name);
}
inquirer.prompt({
type: "list",
message: "Which employee would you like to delete?",
choices: empArray,
name: "employee"
}).then(function ({ employee }) {
const empId = employees[empArray.indexOf(employee)].id;
orm.deleteRecord("employees", empId)
.then(function () {
console.log("\n");
mainMenu();
});
});
});
}
// Grabs all roles, asks user which one they want to delete, then calls ORM function to delete it from the database
function deleteRolePrompt() {
orm.getRoles()
.then(function (roles) {
const roleArray = [];
for (let i = 0; i < roles.length; i++) {
roleArray.push(roles[i].title);
}
inquirer.prompt({
type: "list",
message: "Which role would you like to delete?",
choices: roleArray,
name: "role"
}).then(function ({ role }) {
const roleId = roles[roleArray.indexOf(role)].id;
orm.deleteRecord("roles", roleId)
.then(function () {
console.log("\n");
mainMenu();
});
});
});
}
// Grabs all departments, asks user which one they want to delete, then calls ORM function to delete it from the database
function deleteDepartmentPrompt() {
orm.getDepartments()
.then(function (depts) {
const deptArray = [];
for (let i = 0; i < depts.length; i++) {
deptArray.push(depts[i].name);
}
inquirer.prompt({
type: "list",
message: "Which department would you like to delete?",
choices: deptArray,
name: "dept"
}).then(function ({ dept }) {
const deptId = depts[deptArray.indexOf(dept)].id;
orm.deleteRecord("departments", deptId)
.then(function () {
console.log("\n");
mainMenu();
});
});
});
}
// Grabs all departments, asks user for which one they want to see sum of salaries, then calls ORM function to query database and display results
function displayUtilizedBudgetPrompt() {
orm.getDepartments()
.then(function (depts) {
const deptArray = [];
for (let i = 0; i < depts.length; i++) {
deptArray.push(depts[i].name);
}
inquirer.prompt({
type: "list",
message: "For which department would you like to view the utilized budget?",
choices: deptArray,
name: "dept"
}).then(function ({ dept }) {
const deptId = depts[deptArray.indexOf(dept)].id;
orm.viewUtilizedBudget(deptId)
.then(function () {
console.log("\n");
mainMenu();
});
});
});
}
mainMenu(); | mainMenu |
other_hkdf.rs | // Testing against test vectors from https://www.kullo.net/blog/hkdf-sha-512-test-vectors/
#[cfg(test)]
mod other_hkdf {
extern crate hex;
extern crate orion;
use self::hex::decode;
use crate::kdf::hkdf_test_runner;
#[test]
fn test_case_1() {
let ikm = decode("0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b").unwrap();
let salt = decode("000102030405060708090a0b0c").unwrap();
let info = decode("f0f1f2f3f4f5f6f7f8f9").unwrap();
let expected_prk = decode(
"665799823737ded04a88e47e54a5890bb2c3d247c7a4254a8e61350723590a26c36238127d8661b88cf80ef802d57e2f7cebcf1e00e083848be19929c61b4237",
).unwrap();
let expected_okm = decode(
"832390086cda71fb47625bb5ceb168e4c8e26a1a16ed34d9fc7fe92c1481579338da362cb8d9f925d7cb",
)
.unwrap();
hkdf_test_runner(
Some(&expected_prk),
&expected_okm,
&salt,
&ikm,
&info,
expected_okm.len(),
true,
);
}
#[test]
fn test_case_2() {
let ikm = decode("000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f303132333435363738393a3b3c3d3e3f404142434445464748494a4b4c4d4e4f").unwrap();
let salt = decode("606162636465666768696a6b6c6d6e6f707172737475767778797a7b7c7d7e7f808182838485868788898a8b8c8d8e8f909192939495969798999a9b9c9d9e9fa0a1a2a3a4a5a6a7a8a9aaabacadaeaf").unwrap();
let info = decode("b0b1b2b3b4b5b6b7b8b9babbbcbdbebfc0c1c2c3c4c5c6c7c8c9cacbcccdcecfd0d1d2d3d4d5d6d7d8d9dadbdcdddedfe0e1e2e3e4e5e6e7e8e9eaebecedeeeff0f1f2f3f4f5f6f7f8f9fafbfcfdfeff").unwrap();
let expected_prk = decode(
"35672542907d4e142c00e84499e74e1de08be86535f924e022804ad775dde27ec86cd1e5b7d178c74489bdbeb30712beb82d4f97416c5a94ea81ebdf3e629e4a",
).unwrap();
let expected_okm = decode(
"ce6c97192805b346e6161e821ed165673b84f400a2b514b2fe23d84cd189ddf1b695b48cbd1c8388441137b3ce28f16aa64ba33ba466b24df6cfcb021ecff235f6a2056ce3af1de44d572097a8505d9e7a93",
).unwrap();
hkdf_test_runner(
Some(&expected_prk),
&expected_okm,
&salt,
&ikm,
&info,
expected_okm.len(),
true,
);
}
#[test]
fn test_case_3() {
let ikm = decode("0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b").unwrap();
let salt = decode("").unwrap();
let info = decode("").unwrap();
let expected_prk = decode(
"fd200c4987ac491313bd4a2a13287121247239e11c9ef82802044b66ef357e5b194498d0682611382348572a7b1611de54764094286320578a863f36562b0df6",
).unwrap();
let expected_okm = decode(
"f5fa02b18298a72a8c23898a8703472c6eb179dc204c03425c970e3b164bf90fff22d04836d0e2343bac",
)
.unwrap();
hkdf_test_runner(
Some(&expected_prk),
&expected_okm,
&salt,
&ikm,
&info,
expected_okm.len(),
true,
);
}
#[test]
fn | () {
let ikm = decode("0b0b0b0b0b0b0b0b0b0b0b").unwrap();
let salt = decode("000102030405060708090a0b0c").unwrap();
let info = decode("f0f1f2f3f4f5f6f7f8f9").unwrap();
let expected_prk = decode(
"67409c9cac28b52ee9fad91c2fda999f7ca22e3434f0ae772863836568ad6a7f10cf113bfddd560129a594a8f52385c2d661d785d29ce93a11400c920683181d",
).unwrap();
let expected_okm = decode(
"7413e8997e020610fbf6823f2ce14bff01875db1ca55f68cfcf3954dc8aff53559bd5e3028b080f7c068",
)
.unwrap();
hkdf_test_runner(
Some(&expected_prk),
&expected_okm,
&salt,
&ikm,
&info,
expected_okm.len(),
true,
);
}
#[test]
fn test_case_5() {
let ikm = decode("0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c").unwrap();
let salt = decode("").unwrap();
let info = decode("").unwrap();
let expected_prk = decode(
"5346b376bf3aa9f84f8f6ed5b1c4f489172e244dac303d12f68ecc766ea600aa88495e7fb605803122fa136924a840b1f0719d2d5f68e29b242299d758ed680c",
).unwrap();
let expected_okm = decode(
"1407d46013d98bc6decefcfee55f0f90b0c7f63d68eb1a80eaf07e953cfc0a3a5240a155d6e4daa965bb",
)
.unwrap();
hkdf_test_runner(
Some(&expected_prk),
&expected_okm,
&salt,
&ikm,
&info,
expected_okm.len(),
true,
);
}
}
| test_case_4 |
perspective.unit.js | var expect = require('chai').expect;
var sinon = require('sinon');
var proxyquire = require('proxyquire');
var https = {
request: sinon.stub(),
};
var utils = require('../../../utils/chapi');
var Perspective = proxyquire('../../../components/perspective', {
'https': https,
'../utils/chapi.js': utils,
});
var EventEmitter = require('events');
describe('Perspective', function() {
var p;
describe('constructor', function() {
it('should call set_api_key if called with api_key', function() {
var spy = sinon.spy(Perspective.prototype, 'set_api_key');
var p = new Perspective('my-test-key');
expect(spy.callCount).to.equal(1);
expect(spy.args[0][0]).to.equal('my-test-key');
spy.restore();
});
});
describe('#_lookup_id', function() {
var list;
before(function() {
p = new Perspective();
list = sinon.stub(p, 'list');
});
beforeEach(function() {
list.reset();
});
it('should call #list with any given flags', function(done) {
var test_flags = {cache: true};
var test_name = 'test_name';
list.yields(null, {});
p._lookup_id(test_flags, test_name, (err, id) => {
expect(list.called).to.be.true;
expect(list.calledWith(test_flags)).to.be.true;
done();
});
});
it('should call the callback with an id on success', function(done) {
var test_flags = {cache: true};
var test_name = 'test_name';
list.yields(null, {
'1': {
name: 'one',
},
'2': {
name: 'two',
},
'3': {
name: test_name,
}
});
p._lookup_id(test_flags, test_name, (err, id) => {
expect(list.called).to.be.true;
expect(err).to.not.be.ok;
expect(id).to.equal('3');
done();
});
});
it('should call the callback with an error on failure', function(done) {
var test_flags = {cache: true};
var test_name = 'test_name';
var error = new Error();
list.yields(error);
p._lookup_id(test_flags, test_name, (err, id) => {
expect(list.called).to.be.true;
expect(err).to.equal(error);
done();
});
});
after(function() {
list.restore();
});
});
describe('#_lookup_group_id', function() {
var list_groups;
before(function() {
p = new Perspective();
list_groups = sinon.stub(p, 'list_groups');
});
beforeEach(function() {
list_groups.reset();
});
it('should call the callback with an id when a matching group is found', function(done) {
var pers = {};
var group_name = 'test';
list_groups.yields(null, [
{
name: 'test',
ref_id: '1234',
},
]);
p._lookup_group_id(pers, group_name, (err, id) => {
expect(err).to.not.be.ok;
expect(id).to.equal('1234');
done();
});
});
it('matching should be case insensitive', function(done) {
var pers = {};
var group_name = 'TEST';
list_groups.yields(null, [
{
name: 'test',
ref_id: '1234',
},
]);
p._lookup_group_id(pers, group_name, (err, id) => {
expect(err).to.not.be.ok;
expect(id).to.equal('1234');
done();
});
});
it('should call the callback with the group_name if no matching group was found', function(done) {
var pers = {};
var group_name = 'test';
list_groups.yields(null, []);
p._lookup_group_id(pers, group_name, (err, id) => {
expect(err).to.not.be.ok;
expect(id).to.equal(group_name);
done();
});
});
it('should call the callback with an error on failure', function(done) {
var pers = {};
var group_name = 'test';
var error = new Error();
list_groups.yields(error);
p._lookup_group_id(pers, group_name, (err, id) => {
expect(err).to.equal(error);
done();
});
});
after(function() {
list_groups.restore();
});
});
describe('#list_groups', function() {
var get;
before(function() {
p = new Perspective();
get = sinon.stub(p, 'get');
});
beforeEach(function() {
get.reset();
});
it('should call the callback with an array on success', function(done) {
var test_groups = [];
var pers = {
constants: [
{
type: 'Static Group',
list: test_groups,
},
],
};
p.list_groups(pers, (err, groups) => {
expect(err).to.not.be.ok;
expect(groups).to.equal(test_groups);
done();
});
});
it('matching of "type: group" should be case insensitive', function(done) {
var test_groups = [];
var pers = {
constants: [
{
type: 'STATIC GROUP',
list: test_groups,
},
],
};
p.list_groups(pers, (err, groups) => {
expect(err).to.not.be.ok;
expect(groups).to.equal(test_groups);
done();
});
});
it('should get the perspective if an id is given in place of the perspective', function(done) {
var test_groups = [];
var pers = {
constants: [
{
type: 'Static Group',
list: test_groups,
},
],
};
get.yields(null, pers);
p.list_groups('1234', (err, groups) => {
expect(err).to.not.be.ok;
expect(groups).to.equal(test_groups);
done();
});
});
it('should call the callback with any errors on failure', function(done) {
var test_groups = [];
var error = new Error();
get.yields(error);
p.list_groups('1234', (err, groups) => {
expect(err).to.equal(error);
done();
});
});
after(function() {
get.restore();
});
});
describe('#add_to_group', function() {
var get, _lookup_group_id, _get_rule, update;
before(function() {
p = new Perspective();
get = sinon.stub(p, 'get');
_lookup_group_id = sinon.stub(p, '_lookup_group_id');
_get_rule = sinon.stub(p, '_get_rule');
update = sinon.stub(p, 'update');
});
beforeEach(function() {
get.reset();
_lookup_group_id.reset();
_get_rule.reset();
update.reset();
});
it('should call the callback with an updated perspective object', function(done) {
var group_name = 'test';
var group_id = '5678';
var accts = ['1234'];
var test_groups = [
{
name: group_name,
ref_id: group_id,
}
];
var rule = {
asset: 'AwsAccount',
to: group_id,
type: 'filter',
condition: {
clauses: [],
},
};
var pers = {
constants: [
{
type: 'group',
list: test_groups,
},
],
rules: [
rule,
],
};
_lookup_group_id.yields(null, group_id);
_get_rule.returns(rule);
update.yields(null, pers);
p.add_to_group(pers, accts, group_name, (err, perspective) => {
expect(err).to.not.be.ok;
expect(perspective).to.eql({
constants: [
{
type: 'group',
list: [
{
name: group_name,
ref_id: group_id,
}
],
},
],
rules: [
{
asset: 'AwsAccount',
to: group_id,
type: 'filter',
condition: {
clauses: [
{
asset_ref: accts[0],
op: '=',
val: accts[0],
},
],
},
},
],
});
done();
});
});
it('should call the callback with an error if #get fails', function(done) {
var group_name = 'test';
var accts = ['1234'];
var error = new Error();
var pers = '2345';
get.yields(error);
p.add_to_group(pers, accts, group_name, (err, perspective) => {
expect(err).to.equal(error);
done();
});
});
it('should call the callback with an error if #_lookup_group_id fails', function(done) {
var group_name = 'test';
var group_id = '5678';
var accts = ['1234'];
var error = new Error();
var test_groups = [
{
name: group_name,
ref_id: group_id,
}
];
var rule = {
asset: 'AwsAccount',
to: group_id,
type: 'filter',
condition: {
clauses: [],
},
};
var pers = {
constants: [
{
type: 'group',
list: test_groups,
},
],
rules: [
rule,
],
};
_lookup_group_id.yields(error);
p.add_to_group(pers, accts, group_name, (err, perspective) => {
expect(err).to.equal(error);
done();
});
});
it('should call the callback with an error if #update fails', function(done) {
var group_name = 'test';
var group_id = '5678';
var accts = ['1234'];
var error = new Error();
var test_groups = [
{
name: group_name,
ref_id: group_id,
}
];
var rule = {
asset: 'AwsAccount',
to: group_id,
type: 'filter',
condition: {
clauses: [],
},
};
var pers = {
constants: [
{
type: 'group',
list: test_groups,
},
],
rules: [
rule,
],
};
_lookup_group_id.yields(null, group_id);
_get_rule.returns(rule);
update.yields(error);
p.add_to_group(pers, accts, group_name, (err, perspective) => {
expect(err).to.equal(error);
done();
});
});
it('should accept an id instead of a perspective object', function(done) {
var group_name = 'test';
var group_id = '5678';
var accts = ['1234'];
var test_groups = [
{
name: group_name,
ref_id: group_id,
}
];
var rule = {
asset: 'AwsAccount',
to: group_id,
type: 'filter',
condition: {
clauses: [],
},
};
var pers = {
constants: [
{
type: 'group',
list: test_groups,
},
],
rules: [
rule,
],
};
get.yields(null, pers);
_lookup_group_id.yields(null, group_id);
_get_rule.returns(rule);
update.yields(null, pers);
p.add_to_group('2345', accts, group_name, (err, perspective) => {
expect(err).to.not.be.ok;
done();
});
});
it('should accept an account id instead of an array of account ids', function(done) {
var group_name = 'test';
var group_id = '5678';
var accts = '1234';
var test_groups = [
{
name: group_name,
ref_id: group_id,
}
];
var rule = {
asset: 'AwsAccount',
to: group_id,
type: 'filter',
condition: {
clauses: [],
},
};
var pers = {
constants: [
{
type: 'group',
list: test_groups,
},
],
rules: [
rule,
],
};
_lookup_group_id.yields(null, group_id);
_get_rule.returns(rule);
update.yields(null, pers);
p.add_to_group(pers, accts, group_name, (err, perspective) => {
expect(err).to.not.be.ok;
expect(perspective).to.eql({
constants: [
{
type: 'group',
list: [
{
name: group_name,
ref_id: group_id,
}
],
},
],
rules: [
{
asset: 'AwsAccount',
to: group_id,
type: 'filter',
condition: {
clauses: [
{
asset_ref: accts,
op: '=',
val: accts,
},
],
},
},
],
});
done();
});
});
it('should accept an account instead of an array of account ids', function(done) {
var group_name = 'test';
var group_id = '5678';
var accts = {
id: '1234',
};
var test_groups = [
{
name: group_name,
ref_id: group_id,
}
];
var rule = {
asset: 'AwsAccount',
to: group_id,
type: 'filter',
condition: {
clauses: [],
},
};
var pers = {
constants: [
{
type: 'group',
list: test_groups,
},
],
rules: [
rule,
],
};
_lookup_group_id.yields(null, group_id);
_get_rule.returns(rule);
update.yields(null, pers);
p.add_to_group(pers, accts, group_name, (err, perspective) => {
expect(err).to.not.be.ok;
expect(perspective).to.eql({
constants: [
{
type: 'group',
list: [
{
name: group_name,
ref_id: group_id,
}
],
},
],
rules: [
{
asset: 'AwsAccount',
to: group_id,
type: 'filter',
condition: {
clauses: [
{
asset_ref: accts.id,
op: '=',
val: accts.id,
},
],
},
},
],
});
done();
});
});
it('should accept an array of accounts instead of an array of account ids', function(done) {
var group_name = 'test';
var group_id = '5678';
var accts = [
{
id: '1234',
},
];
var test_groups = [
{
name: group_name,
ref_id: group_id,
}
];
var rule = {
asset: 'AwsAccount',
to: group_id,
type: 'filter',
condition: {
clauses: [],
},
};
var pers = {
constants: [
{
type: 'group',
list: test_groups,
},
],
rules: [
rule,
],
};
_lookup_group_id.yields(null, group_id);
_get_rule.returns(rule);
update.yields(null, pers);
p.add_to_group(pers, accts, group_name, (err, perspective) => {
expect(err).to.not.be.ok;
expect(perspective).to.eql({
constants: [
{
type: 'group',
list: [
{
name: group_name,
ref_id: group_id,
}
],
},
],
rules: [
{
asset: 'AwsAccount',
to: group_id,
type: 'filter',
condition: {
clauses: [
{
asset_ref: accts[0].id,
op: '=',
val: accts[0].id,
},
],
},
},
],
});
done();
});
});
it('should accept an array containing multiple accounts/ids', function(done) {
var group_name = 'test';
var group_id = '5678';
var accts = ['1234', '7890'];
var test_groups = [
{
name: group_name,
ref_id: group_id,
}
];
var rule = {
asset: 'AwsAccount',
to: group_id,
type: 'filter',
condition: {
clauses: [],
},
};
var pers = {
constants: [
{
type: 'group',
list: test_groups,
},
],
rules: [
rule,
],
};
_lookup_group_id.yields(null, group_id);
_get_rule.returns(rule);
update.yields(null, pers);
p.add_to_group(pers, accts, group_name, (err, perspective) => {
expect(err).to.not.be.ok;
expect(perspective).to.eql({
constants: [
{
type: 'group',
list: [
{
name: group_name,
ref_id: group_id,
}
],
},
],
rules: [
{
asset: 'AwsAccount',
to: group_id,
type: 'filter',
condition: {
clauses: [
{
asset_ref: accts[0],
op: '=',
val: accts[0],
},
{
asset_ref: accts[1],
op: '=',
val: accts[1],
},
],
combine_with: 'OR',
},
},
],
});
done();
});
});
after(function() {
get.restore();
_lookup_group_id.restore();
_get_rule.restore();
update.restore();
});
});
describe('#_get_rule', function() {
before(function() {
p = new Perspective();
});
it('should return the appropriate rule when it exists', function() {
var group_id = '1234';
var rule = {
asset: 'AwsAccount',
to: group_id,
type: 'filter',
condition: {
clauses: [],
},
};
var pers = {
rules: [
rule,
{
asset: 'AwsAccount',
to: '5678',
type: 'filter',
condition: {
clauses: [],
},
},
],
};
var test_rule = p._get_rule(pers, group_id);
expect(test_rule).to.equal(rule);
});
it('should NOT return a rule if the rule has a defined from field', function() {
var group_id = '1234';
var rule = {
asset: 'AwsAccount',
to: group_id,
from: '4567',
type: 'filter',
condition: {
clauses: [],
},
};
var pers = {
rules: [
rule,
{
asset: 'AwsAccount',
to: '5678',
type: 'filter',
condition: {
clauses: [],
},
},
],
};
var test_rule = p._get_rule(pers, group_id);
expect(test_rule).to.not.equal(rule);
});
it('should return a new rule if the perspective has no rules', function() {
var group_id = '1234';
var rule = {
asset: 'AwsAccount',
to: group_id,
type: 'filter',
condition: {
clauses: [],
},
};
var pers = {
rules: [],
};
| expect(pers.rules).to.contain(rule);
});
it('should return a new rule if no matching rule is found', function() {
var group_id = '1234';
var rule = {
asset: 'AwsAccount',
to: group_id,
type: 'filter',
condition: {
clauses: [],
},
};
var pers = {
rules: [
{
asset: 'AwsAccount',
to: '5678',
type: 'filter',
condition: {
clauses: [],
},
},
],
};
var test_rule = p._get_rule(pers, group_id);
expect(test_rule).to.eql(rule);
expect(pers.rules).to.contain(rule);
});
});
describe('#set_api_key', function() {
it('should set the api key', function() {
var p = new Perspective();
var str = 'my-new-api-key';
p.set_api_key(str);
expect(p._api_key).to.equal(str);
});
});
describe('#remove_prev_refs', () => {
const account_ref_id = '1234';
const pers = {
rules: [
{
asset: 'AwsAccount',
to: '5678',
type: 'filter',
condition: {
clauses: [
{
asset_ref: '1234',
},
{
asset_ref: '2345',
}
],
},
},
{
asset: 'AwsAccount',
to: '5678',
type: 'filter',
condition: {
clauses: [
{
asset_ref: '1234',
}
],
},
},
]
};
const persOutput = {
rules: [
{
asset: 'AwsAccount',
to: '5678',
type: 'filter',
condition: {
clauses: [
{
asset_ref: '2345',
}
],
},
},
]
};
before(function() {
p = new Perspective();
});
it('should remove any clauses that contain refs to the account and then remove empty rules', (done) => {
p.remove_prev_refs({ pers, account_ref_id }, (err, data) => {
expect(data).to.be.eql(persOutput);
done();
});
});
})
describe('#list', function() {
before(function() {
p = new Perspective();
});
it('should call #_send_request with an options object at least once', function(done) {
var request = sinon.stub(utils, 'send_request', function(options, send_data, cb) {
expect(typeof options).to.equal('object');
done();
});
p.list(function(err, json) {});
request.restore();
});
it('should use the GET http method', function(done) {
var request = sinon.stub(utils, 'send_request', function(options, send_data, cb) {
expect(options.method).to.equal('GET');
done();
});
p.list(function(err, json) {});
request.restore();
});
it('should call the callback with an error if #_send_request fails', function(done) {
var error = new Error('test');
var request = sinon.stub(utils, 'send_request', function(options, send_data, cb) {
cb(error);
});
p.list(function(err, json) {
expect(err).to.equal(error);
done();
});
request.restore();
});
it('should call the callback with an error if utils.set_cache fails', function(done) {
var error = new Error('test');
var request = sinon.stub(utils, 'send_request', function(options, send_data, cb) {
cb(null, {});
});
var set_cache = sinon.stub(utils, 'set_cache', function(cache_name, cache, cb) {
cb(error);
});
p.list(function(err, json) {
expect(err).to.equal(error);
done();
});
set_cache.restore();
request.restore();
});
it('should call utils.find_cache if cache flag is true', function(done) {
var request = sinon.stub(utils, 'send_request', function(options, send_data, cb) {
cb();
});
var find_cache = sinon.stub(utils, 'find_cache', function(cache_name, cb) {
cb(null, 'test');
});
p.list({cache: true}, function(err, json) {
expect(find_cache.called).to.be.true;
find_cache.restore();
request.restore();
done();
});
});
it('should return any errors given by utils.find_cache', function(done) {
var error = new Error('err');
var request = sinon.stub(utils, 'send_request', function(options, send_data, cb) {
cb();
});
var find_cache = sinon.stub(utils, 'find_cache', function(cache_name, cb) {
cb(error);
});
p.list({cache: true}, function(err, json) {
expect(err).to.equal(error);
find_cache.restore();
request.restore();
done();
});
});
it('should call itself without flags if utils.find_cache finds nothing', function(done) {
var request = sinon.stub(utils, 'send_request', function(options, send_data, cb) {
cb();
});
var find_cache = sinon.stub(utils, 'find_cache', function(cache_name, cb) {
cb(null, null);
});
var set_cache = sinon.stub(utils, 'set_cache', function(cache_name, cache, cb) {
cb(null, cache);
});
var callback = function(err, json) {
expect(spy.callCount).to.equal(2);
expect(spy.calledWithExactly(callback)).to.be.true;
find_cache.restore();
set_cache.restore();
request.restore();
spy.restore();
done();
};
var spy = sinon.spy(p, 'list');
p.list({cache: true}, callback);
});
it('should call utils.set_cache if cache flag is not true', function(done) {
var request = sinon.stub(utils, 'send_request', function(options, send_data, cb) {
cb();
});
var set_cache = sinon.stub(utils, 'set_cache', function(cache_name, cache, cb) {
cb(null, cache);
});
p.list(function(err, json) {
expect(set_cache.called).to.be.true;
set_cache.restore();
request.restore();
done();
});
});
});
describe('#get', function() {
before(function() {
p = new Perspective();
});
it('should call #_send_request with an options object at least once', function(done) {
var request = sinon.stub(utils, 'send_request', function(options, send_data, cb) {
expect(typeof options).to.equal('object');
done();
});
p.get(1, function(err, json) {});
request.restore();
});
it('should call the callback with the schema object from the https request', function(done) {
var test_obj = {
schema: {
test: 'test',
},
};
var request = sinon.stub(utils, 'send_request', function(options, send_data, cb) {
cb(null, test_obj);
});
p.get(1, function(err, json) {
expect(json).to.equal(test_obj.schema);
done();
});
request.restore();
});
it('should include the id in the URL', function(done) {
var id = 14562347657632460435;
var request = sinon.stub(utils, 'send_request', function(options, send_data, cb) {
expect(options.path).to.match(new RegExp('/' + id));
done();
});
p.get(id, function(err, json) {});
request.restore();
});
it('should use the GET http method', function(done) {
var request = sinon.stub(utils, 'send_request', function(options, send_data, cb) {
expect(options.method).to.equal('GET');
done();
});
p.get(1, function(err, json) {});
request.restore();
});
it('should call the callback with an error if #_send_request fails', function(done) {
var error = new Error('test');
var request = sinon.stub(utils, 'send_request', function(options, send_data, cb) {
cb(error);
});
p.get(1, function(err, json) {
expect(err).to.equal(error);
done();
});
request.restore();
});
it('should call #_lookup_id if the id given is an account name', function(done) {
var _lookup_id = sinon.stub(p, '_lookup_id', function(flags, id, cb) {
cb(null, '1234');
});
var request = sinon.stub(utils, 'send_request', function(options, send_data, cb) {
cb(null, {schema: {}});
});
p.get('test', function(err, json) {
expect(_lookup_id.called).to.be.true;
request.restore();
_lookup_id.restore();
done();
});
});
it('should return any errors from #_lookup_id', function(done) {
var error = new Error('err');
var _lookup_id = sinon.stub(p, '_lookup_id', function(flags, id, cb) {
cb(error);
});
var request = sinon.stub(utils, 'send_request', function(options, send_data, cb) {
cb(null, {schema: {}});
});
p.get('test', function(err, json) {
expect(_lookup_id.called).to.be.true;
expect(err).to.equal(error);
request.restore();
_lookup_id.restore();
done();
});
});
it('should pass any flags to #_lookup_id when #_lookup_id gets called', function(done) {
var test_flags = {cache: true};
var _lookup_id = sinon.stub(p, '_lookup_id', function(flags, id, cb) {
expect(test_flags).to.eql(flags);
cb(null, '1234');
});
var request = sinon.stub(utils, 'send_request', function(options, send_data, cb) {
cb(null, {schema: {}});
});
p.get(test_flags, 'test', function(err, json) {
expect(_lookup_id.called).to.be.true;
request.restore();
_lookup_id.restore();
done();
});
});
});
describe('#create', function() {
before(function() {
p = new Perspective();
});
it('should call #_send_request with an options object at least once', function(done) {
var request = sinon.stub(utils, 'send_request', function(options, send_data, cb) {
expect(typeof options).to.equal('object');
done();
});
p.create({}, function(err, json) {});
request.restore();
});
it('should use the POST http method', function(done) {
var request = sinon.stub(utils, 'send_request', function(options, send_data, cb) {
expect(options.method).to.equal('POST');
done();
});
p.create({}, function(err, json) {});
request.restore();
});
it('should call the callback with an error if #_send_request fails', function(done) {
var error = new Error('test');
var request = sinon.stub(utils, 'send_request', function(options, send_data, cb) {
cb(error);
});
p.create({}, function(err, json) {
expect(err).to.equal(error);
done();
});
request.restore();
});
it('should wrap the new perspective in a object under the "schema" field', function(done) {
var obj = {test: 'test'};
var request = sinon.stub(utils, 'send_request', function(options, send_data, cb) {
expect(JSON.parse(send_data).schema).to.eql(obj);
done();
});
p.create(obj, function(err, json) {});
request.restore();
});
it('should not wrap the new perspective in "schema" again if it contains the schema field already', function(done) {
var obj = {schema: {test: 'test'}};
var request = sinon.stub(utils, 'send_request', function(options, send_data, cb) {
expect(JSON.parse(send_data)).to.eql(obj);
done();
});
p.create(obj, function(err, json) {});
request.restore();
});
it('should return any object returned by #_send_request', function(done) {
var test_json = {
test: 'test',
};
var request = sinon.stub(utils, 'send_request', function(options, send_data, cb) {
cb(null, test_json);
});
p.create({}, function(err, json) {
expect(json).to.equal(test_json);
done();
});
request.restore();
});
});
describe('#update', function() {
before(function() {
p = new Perspective();
});
it('should call #_send_request with an options object at least once', function(done) {
var request = sinon.stub(utils, 'send_request', function(options, send_data, cb) {
expect(typeof options).to.equal('object');
done();
});
p.update({id: 1, constants: [{type: 'test'}]}, function(err, json) {});
request.restore();
});
it('should use the PUT http method', function(done) {
var request = sinon.stub(utils, 'send_request', function(options, send_data, cb) {
expect(options.method).to.equal('PUT');
done();
});
p.update({id: 1, constants: [{type: 'test'}]}, function(err, json) {});
request.restore();
});
it('should include the id in the URL', function(done) {
var id = 14562347657632460435;
var request = sinon.stub(utils, 'send_request', function(options, send_data, cb) {
expect(options.path).to.match(new RegExp('/' + id));
done();
});
p.update({id: id, constants: [{type: 'test'}]}, function(err, json) {});
request.restore();
});
it('should parse out the constant type "Version" from all schemas', function() {
var obj = {schema: {constants: [{type: 'Version'},{type: 'Static Group'}]}};
var parsedObj = {schema: {constants: [{type: 'Static Group'}]}};
var request = sinon.stub(utils, 'send_request')
p.update(obj, function(err, json) {});
expect(request.args[0][1]).to.equal(JSON.stringify(parsedObj));
request.restore();
});
it('should call the callback with an error if #_send_request fails', function(done) {
var error = new Error('test');
var request = sinon.stub(utils, 'send_request', function(options, send_data, cb) {
cb(error);
});
p.update({id: 1, constants: [{type: 'test'}]}, function(err, json) {
expect(err).to.equal(error);
done();
});
request.restore();
});
it('should wrap the new perspective in a object under the "schema" field', function(done) {
var obj = {test: 'test', constants: [{type: 'test'}]};
var request = sinon.stub(utils, 'send_request', function(options, send_data, cb) {
expect(JSON.parse(send_data).schema).to.eql(obj);
done();
});
p.update(obj, function(err, json) {});
request.restore();
});
it('should not wrap the new perspective in "schema" again if it contains the schema field already', function(done) {
var obj = {schema: {constants: [{type: 'test'}]}};
var request = sinon.stub(utils, 'send_request', function(options, send_data, cb) {
expect(JSON.parse(send_data)).to.eql(obj);
done();
});
p.update(obj, function(err, json) {});
request.restore();
});
it('should return any object returned by #_send_request', function(done) {
var test_json = {
test: 'test',
};
var request = sinon.stub(utils, 'send_request', function(options, send_data, cb) {
cb(null, test_json);
});
p.update({id: 1, constants: [{type: 'test'}]}, function(err, json) {
expect(json).to.equal(test_json);
done();
});
request.restore();
});
});
describe('#destroy', function() {
before(function() {
p = new Perspective();
});
it('should call #_send_request with an options object at least once', function(done) {
var request = sinon.stub(utils, 'send_request', function(options, send_data, cb) {
expect(typeof options).to.equal('object');
done();
});
p.destroy(1, function(err, json) {});
request.restore();
});
it('should use the DELETE http method', function(done) {
var request = sinon.stub(utils, 'send_request', function(options, send_data, cb) {
expect(options.method).to.equal('DELETE');
done();
});
p.destroy(1, function(err, json) {});
request.restore();
});
it('should include the id in the URL', function(done) {
var id = 14562347657632460435;
var request = sinon.stub(utils, 'send_request', function(options, send_data, cb) {
expect(options.path).to.match(new RegExp('/' + id));
done();
});
p.destroy(id, function(err, json) {});
request.restore();
});
it('should call the callback with an error if #_send_request fails', function(done) {
var error = new Error('test');
var request = sinon.stub(utils, 'send_request', function(options, send_data, cb) {
cb(error);
});
p.destroy(1, function(err, json) {
expect(err).to.equal(error);
done();
});
request.restore();
});
it('should return "perspective destroyed" on success', function(done) {
var request = sinon.stub(utils, 'send_request', function(options, send_data, cb) {
cb(null);
});
p.destroy(1, function(err, str) {
expect(str).to.equal('perspective destroyed');
done();
});
request.restore();
});
it('should force delete if the force flag is set true', function(done) {
var request = sinon.stub(utils, 'send_request', function(options, send_data, cb) {
expect(options.path).to.match(/.+force=true/);
done();
});
p.destroy({force: true}, 1, function(err, json) {});
request.restore();
});
it('should hard delete if the hard_delete flag is set true', function(done) {
var request = sinon.stub(utils, 'send_request', function(options, send_data, cb) {
expect(options.path).to.match(/.+force=true/);
expect(options.path).to.match(/.+hard_delete=true/);
done();
});
p.destroy({hard_delete: true}, 1, function(err, json) {});
request.restore();
});
it('should treat the second arg as callback if it\'s a function', function(done) {
var request = sinon.stub(utils, 'send_request', function(options, send_data, cb) {
cb(null, {});
});
p.destroy(1, function(err, json) {
done();
});
request.restore();
});
});
}); | var test_rule = p._get_rule(pers, group_id);
expect(test_rule).to.eql(rule); |
connectivity_information.py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class ConnectivityInformation(Model):
"""Information on the connectivity status.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar hops: List of hops between the source and the destination.
:vartype hops:
list[~azure.mgmt.network.v2017_03_01.models.ConnectivityHop]
:ivar connection_status: The connection status. Possible values include:
'Unknown', 'Connected', 'Disconnected', 'Degraded'
:vartype connection_status: str or
~azure.mgmt.network.v2017_03_01.models.ConnectionStatus
:ivar avg_latency_in_ms: Average latency in milliseconds.
:vartype avg_latency_in_ms: int
:ivar min_latency_in_ms: Minimum latency in milliseconds.
:vartype min_latency_in_ms: int
:ivar max_latency_in_ms: Maximum latency in milliseconds.
:vartype max_latency_in_ms: int
:ivar probes_sent: Total number of probes sent.
:vartype probes_sent: int
:ivar probes_failed: Number of failed probes.
:vartype probes_failed: int
"""
_validation = {
'hops': {'readonly': True},
'connection_status': {'readonly': True},
'avg_latency_in_ms': {'readonly': True},
'min_latency_in_ms': {'readonly': True},
'max_latency_in_ms': {'readonly': True},
'probes_sent': {'readonly': True},
'probes_failed': {'readonly': True},
}
_attribute_map = {
'hops': {'key': 'hops', 'type': '[ConnectivityHop]'},
'connection_status': {'key': 'connectionStatus', 'type': 'str'},
'avg_latency_in_ms': {'key': 'avgLatencyInMs', 'type': 'int'},
'min_latency_in_ms': {'key': 'minLatencyInMs', 'type': 'int'},
'max_latency_in_ms': {'key': 'maxLatencyInMs', 'type': 'int'},
'probes_sent': {'key': 'probesSent', 'type': 'int'},
'probes_failed': {'key': 'probesFailed', 'type': 'int'},
}
def __init__(self):
| self.hops = None
self.connection_status = None
self.avg_latency_in_ms = None
self.min_latency_in_ms = None
self.max_latency_in_ms = None
self.probes_sent = None
self.probes_failed = None |
|
database.go | // Copyright 2017 clair authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Package database defines the Clair's models and a common interface for
// database implementations.
package database
import (
"errors"
"fmt"
"time"
"github.com/coreos/clair/pkg/pagination"
)
var (
// ErrBackendException is an error that occurs when the database backend
// does not work properly (ie. unreachable).
ErrBackendException = errors.New("database: an error occurred when querying the backend")
// ErrInconsistent is an error that occurs when a database consistency check
// fails (i.e. when an entity which is supposed to be unique is detected
// twice)
ErrInconsistent = errors.New("database: inconsistent database")
// ErrInvalidParameters is an error that occurs when the parameters are not valid.
ErrInvalidParameters = errors.New("database: parameters are not valid")
// ErrMissingEntities is an error that occurs when an associated immutable
// entity doesn't exist in the database. This error can indicate a wrong
// implementation or corrupted database.
ErrMissingEntities = errors.New("database: associated immutable entities are missing in the database")
)
// RegistrableComponentConfig is a configuration block that can be used to
// determine which registrable component should be initialized and pass custom
// configuration to it.
type RegistrableComponentConfig struct {
Type string
Options map[string]interface{}
}
var drivers = make(map[string]Driver)
// Driver is a function that opens a Datastore specified by its database driver
// type and specific configuration.
type Driver func(RegistrableComponentConfig) (Datastore, error)
// Register makes a Constructor available by the provided name.
//
// If this function is called twice with the same name or if the Constructor is
// nil, it panics.
func Register(name string, driver Driver) {
if driver == nil {
panic("database: could not register nil Driver")
}
if _, dup := drivers[name]; dup {
panic("database: could not register duplicate Driver: " + name)
}
drivers[name] = driver
}
// Open opens a Datastore specified by a configuration.
func Open(cfg RegistrableComponentConfig) (Datastore, error) |
// Session contains the required operations on a persistent data store for a
// Clair deployment.
//
// Session is started by Datastore.Begin and terminated with Commit or Rollback.
// Besides Commit and Rollback, other functions cannot be called after the
// session is terminated.
// Any function is not guaranteed to be called successfully if there's a session
// failure.
type Session interface {
// Commit commits changes to datastore.
//
// Commit call after Rollback does no-op.
Commit() error
// Rollback drops changes to datastore.
//
// Rollback call after Commit does no-op.
Rollback() error
// UpsertAncestry inserts or replaces an ancestry and its namespaced
// features and processors used to scan the ancestry.
UpsertAncestry(Ancestry) error
// FindAncestry retrieves an ancestry with all detected
// namespaced features. If the ancestry is not found, return false.
FindAncestry(name string) (ancestry Ancestry, found bool, err error)
// PersistDetector inserts a slice of detectors if not in the database.
PersistDetectors(detectors []Detector) error
// PersistFeatures inserts a set of features if not in the database.
PersistFeatures(features []Feature) error
// PersistNamespacedFeatures inserts a set of namespaced features if not in
// the database.
PersistNamespacedFeatures([]NamespacedFeature) error
// CacheAffectedNamespacedFeatures relates the namespaced features with the
// vulnerabilities affecting these features.
//
// NOTE(Sida): it's not necessary for every database implementation and so
// this function may have a better home.
CacheAffectedNamespacedFeatures([]NamespacedFeature) error
// FindAffectedNamespacedFeatures retrieves a set of namespaced features
// with affecting vulnerabilities.
FindAffectedNamespacedFeatures(features []NamespacedFeature) ([]NullableAffectedNamespacedFeature, error)
// PersistNamespaces inserts a set of namespaces if not in the database.
PersistNamespaces([]Namespace) error
// PersistLayer appends a layer's content in the database.
//
// If any feature, namespace, or detector is not in the database, it returns not found error.
PersistLayer(hash string, features []LayerFeature, namespaces []LayerNamespace, detectedBy []Detector) error
// FindLayer returns a layer with all detected features and
// namespaces.
FindLayer(hash string) (layer Layer, found bool, err error)
// InsertVulnerabilities inserts a set of UNIQUE vulnerabilities with
// affected features into database, assuming that all vulnerabilities
// provided are NOT in database and all vulnerabilities' namespaces are
// already in the database.
InsertVulnerabilities([]VulnerabilityWithAffected) error
// FindVulnerability retrieves a set of Vulnerabilities with affected
// features.
FindVulnerabilities([]VulnerabilityID) ([]NullableVulnerability, error)
// DeleteVulnerability removes a set of Vulnerabilities assuming that the
// requested vulnerabilities are in the database.
DeleteVulnerabilities([]VulnerabilityID) error
// InsertVulnerabilityNotifications inserts a set of unique vulnerability
// notifications into datastore, assuming that they are not in the database.
InsertVulnerabilityNotifications([]VulnerabilityNotification) error
// FindNewNotification retrieves a notification, which has never been
// notified or notified before a certain time.
FindNewNotification(notifiedBefore time.Time) (hook NotificationHook, found bool, err error)
// FindVulnerabilityNotification retrieves a vulnerability notification with
// affected ancestries affected by old or new vulnerability.
//
// Because the number of affected ancestries maybe large, they are paginated
// and their pages are specified by the pagination token, which should be
// considered first page when it's empty.
FindVulnerabilityNotification(name string, limit int, oldVulnerabilityPage pagination.Token, newVulnerabilityPage pagination.Token) (noti VulnerabilityNotificationWithVulnerable, found bool, err error)
// MarkNotificationAsRead marks a Notification as notified now, assuming
// the requested notification is in the database.
MarkNotificationAsRead(name string) error
// DeleteNotification removes a Notification in the database.
DeleteNotification(name string) error
// UpdateKeyValue stores or updates a simple key/value pair.
UpdateKeyValue(key, value string) error
// FindKeyValue retrieves a value from the given key.
FindKeyValue(key string) (value string, found bool, err error)
// Lock creates or renew a Lock in the database with the given name, owner
// and duration.
//
// After the specified duration, the Lock expires by itself if it hasn't been
// unlocked, and thus, let other users create a Lock with the same name.
// However, the owner can renew its Lock by setting renew to true.
// Lock should not block, it should instead returns whether the Lock has been
// successfully acquired/renewed. If it's the case, the expiration time of
// that Lock is returned as well.
Lock(name string, owner string, duration time.Duration, renew bool) (success bool, expiration time.Time, err error)
// Unlock releases an existing Lock.
Unlock(name, owner string) error
// FindLock returns the owner of a Lock specified by the name, and its
// expiration time if it exists.
FindLock(name string) (owner string, expiration time.Time, found bool, err error)
}
// Datastore represents a persistent data store
type Datastore interface {
// Begin starts a session to change.
Begin() (Session, error)
// Ping returns the health status of the database.
Ping() bool
// Close closes the database and frees any allocated resource.
Close()
}
| {
driver, ok := drivers[cfg.Type]
if !ok {
return nil, fmt.Errorf("database: unknown Driver %q (forgotten configuration or import?)", cfg.Type)
}
return driver(cfg)
} |
run_sequence.py | """Defines the Ewa object which interfaces with Ewald"""
import os
import subprocess
import time
import sys
import fromage.io.edit_file as ef
import fromage.io.read_file as rf
from fromage.scripts.fro_assign_charges import assign_charges
class RunSeq(object):
"""
Class which sets up the order of operations for preparing calculation
Attributes
----------
region_1 : Mol object
The atoms in the central molecule
cell : Mol object
The atoms in the unit cell
inputs : dict
The input keywords
mode : str
String summarising the kind of run sequence required. options are:
noew_nosc : EC
noew_sc : SC-EC
ew_nosc : EEC
ew_sc : SC-EEC
"""
def __init__(self, region_1, cell, inputs):
self.region_1 = region_1
self.cell = cell
self.inputs = inputs
if self.inputs["ewald"]:
pref = "ew_"
else:
pref = "noew_"
if self.inputs["self_consistent"]:
post = "sc"
else:
post = "nosc"
self.mode = pref + post
# dirs
self.here = os.getcwd()
self.ewald_path = os.path.join(self.here,"ewald/")
self.out_file = open("prep.out","a")
return
def write_out(self,string):
self.out_file.write(string)
self.out_file.flush()
return
def make_region_2(self):
"""
Get region 2 Mols with different charges
Returns
-------
shell_high : Mol object
Region 2 molecules with high level of theory charges
shell_low : Mole object
Region 2 molecules with low level of theory charges
"""
if self.inputs["target_shell"]:
shell_high = rf.mol_from_file(self.inputs["target_shell"])
self.write_out("Outer region read in with " + str(len(shell_high)) + " atoms.\n")
high_level_pop_mol = rf.mol_from_gauss(self.inputs["high_pop_file"], pop=self.inputs["high_pop_method"])
shell_high.populate(high_level_pop_mol)
else:
shell_high = self.cell.make_cluster(self.inputs["clust_rad"], central_mol = self.region_1, mode = self.inputs["clust_mode"])
for atom_i in self.region_1:
for atom_j in shell_high:
if atom_i.very_close(atom_j):
shell_high.remove(atom_j)
break
self.write_out("Outer region generated with " + str(len(shell_high)) + " atoms.\n")
low_level_pop_mol = rf.mol_from_gauss(self.inputs["low_pop_file"], pop=self.inputs["low_pop_method"])
shell_low = shell_high.copy()
shell_low.populate(low_level_pop_mol)
return shell_low, shell_high
def run_ewald(self, calc_name=None):
if calc_name is None:
calc_name = self.inputs["name"]
if not os.path.exists(self.ewald_path):
os.makedirs(self.ewald_path)
os.chdir(self.ewald_path)
# no stdout
FNULL = open(os.devnull, 'w')
ef.write_uc(calc_name + ".uc", self.inputs["vectors"], self.inputs["an"], self.inputs["bn"], self.inputs["cn"], self.cell)
ef.write_qc(calc_name + ".qc", self.region_1)
ef.write_ew_in(calc_name, "ewald.in." + calc_name, self.inputs["nchk"], self.inputs["nat"])
ef.write_seed()
# run Ewald
self.write_out("Ewald calculation started\n")
ew_start = time.time()
subprocess.call("${FRO_EWALD} < ewald.in." + calc_name, stdout=FNULL, shell=True)
ew_end = time.time()
self.write_out("Ewald calculation finished after "+str(round(ew_end - ew_start,3))+" s\n")
points = rf.read_points(calc_name + ".pts-fro")
if len(points) == 0:
self.write_out("Something went wrong with the Ewald calculation, stopping...\n")
sys.exit()
os.chdir(self.here)
return points
def run(self):
"""
Run the calculation for the corresponding self.mode
Returns
-------
region_2 : Mol object
Region 2 atoms with low level of theory charges
high_points : Mol object
Points that will embed mh, regardless of self.mode
"""
run_types = {"noew_nosc":self.run_ec,
"noew_sc":self.run_scec,
"ew_nosc":self.run_eec,
"ew_sc":self.run_sceec}
# execute the appropriate run type
region_2, high_points = run_types[self.mode]()
self.out_file.close()
return region_2, high_points
def run_ec(self):
region_2_low , region_2_high = self.make_region_2()
return region_2_low, region_2_high
def run_scec(self):
region_2_low , region_2_high = self.make_region_2()
self.self_consistent(region_2_high)
return region_2_low, region_2_high
def run_eec(self):
region_2_low , region_2_high = self.make_region_2()
ew_points = self.run_ewald()
return region_2_low, ew_points
def run_sceec(self):
|
def single_sc_loop(self, sc_loop, initial_bg):
"""Run a single iteration of the sc loop, with or without Ewald"""
sc_name = "sc_" + self.inputs["name"]
# Initial charges in mol
old_charges = self.region_1.charges()
# if sc_eec then there is no initial_bg so it needs to be computed
if self.mode == "ew_sc":
points = self.run_ewald(calc_name = sc_name)
initial_bg = points
ef.write_gauss(sc_name + ".com", self.region_1, initial_bg, self.inputs["sc_temp"])
subprocess.call("${FRO_GAUSS} " + sc_name + ".com", shell=True)
# Calculate new charges
intact_charges, new_energy, char_self, char_int = rf.read_g_char(sc_name + ".log", self.inputs["high_pop_method"], debug=True)
# Correct charges if they are not perfectly neutral
if sum(intact_charges) != 0.0:
temp_correct = sum(intact_charges) / len(intact_charges)
intact_charges = [i - temp_correct for i in intact_charges]
dummy_mol = self.region_1.copy()
dummy_mol.raw_assign_charges(intact_charges)
self.region_1.populate(dummy_mol)
# Damp the change in charges
new_charges = [new * (1 - self.inputs["damping"]) + old * self.inputs["damping"] for new, old in zip(self.region_1.charges(), old_charges)]
# Correct charges again (due to damping)
if sum(new_charges) != 0.0:
temp_correct = sum(new_charges) / len(new_charges)
new_charges = [i - temp_correct for i in new_charges]
# assign damped charges
self.region_1.raw_assign_charges(new_charges)
self.cell.populate(self.region_1)
if self.mode == "noew_sc":
assign_charges(self.region_1, initial_bg)
# Calculate deviation between initial and new charges
deviation = sum([abs(i - j)
for (i, j) in zip(self.region_1.charges(), old_charges)]) / len(self.region_1)
out_str = ("Iteration:", sc_loop, "Deviation:",
deviation, "Energy:", new_energy, "Charge self energy:", char_self, "Total - charge self:", new_energy - char_self)
self.write_out("{:<6} {:<5} {:<6} {:10.6f} {:<6} {:10.6f} {:<6} {:10.6f} {:<6} {:10.6f}\n".format(*out_str))
return deviation
def self_consistent(self, initial_bg):
"""Run single iterations until the charge deviation is below the tol"""
sc_iter = 0
dev = float("inf")
while dev > self.inputs["dev_tol"]:
sc_iter += 1
dev = self.single_sc_loop(sc_iter, initial_bg)
self.write_out("Tolerance reached: " + str(dev) + " < " + str(self.inputs["dev_tol"]) + "\n")
return
| region_2_low , region_2_high = self.make_region_2()
self.self_consistent(None) # here, the None argument means that the initial background has yet to be computed
ew_points = self.run_ewald()
return region_2_low, ew_points |
HistoryAction.ts | import { Dispatch } from "redux";
import { ipcRenderer } from 'electron';
import { GO_BACK_EVENT, GO_FORWARD_EVENT } from "../constants/Events";
export class | {
public static back() {
return (dispatch: Dispatch) => {
ipcRenderer.send(GO_BACK_EVENT);
}
}
public static go() {
return (dispatch: Dispatch) => {
ipcRenderer.send(GO_FORWARD_EVENT);
}
}
}
| HistoryAction |
component.js | const Base = require('./base');
class | extends Base {
// 配置
getDefaultCfg() {
return {
// 顶层标志位
_id: null, // 用于动画
// 容器
canvas: null,
container: null, // html,可选
group: null, // G Group,可选
// 交互属性
capture: false,
// props
coord: null,
offset: [ 0, 0 ],
plotRange: null, // BBox
position: [ 0, 0 ],
visible: true,
zIndex: 1
};
}
// 基础生命周期
_init() { }
clear() { }
destroy() { }
// 绘图
beforeRender() { }
render() { } // 初始化、绑事件和绘图
afterRender() { }
beforeDraw() { }
draw() { } // 单纯更新视图
afterDraw() { }
// visibility
show() { }
hide() { }
// props operating syntactic sugar
setOffset() { }
setPosition() { }
setVisible() { }
setZIndex() { }
}
module.exports = Component;
| Component |
rest_handler.py | # Copyright 2020 The SODA Authors.
# Copyright (c) 2016 Huawei Technologies Co., Ltd.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import threading
import six
from oslo_log import log as logging
from delfin import cryptor
from delfin import exception
from delfin.drivers.hpe.hpe_3par import consts
from delfin.drivers.utils.tools import Tools
LOG = logging.getLogger(__name__)
class RestHandler(object):
"""Common class for Hpe 3parStor storage system."""
REST_AUTH_URL = '/api/v1/credentials'
REST_LOGOUT_URL = '/api/v1/credentials/'
REST_STORAGE_URL = '/api/v1/system'
REST_CAPACITY_URL = '/api/v1/capacity'
REST_POOLS_URL = '/api/v1/cpgs'
REST_VOLUMES_URL = '/api/v1/volumes'
REST_ALERTS_URL = '/api/v1/eventlog?query="category EQ 2"'
REST_HOSTS_URL = '/api/v1/hosts'
REST_AUTH_KEY = 'X-HP3PAR-WSAPI-SessionKey'
REST_CPGSTATISTICS_URL = '/api/v1/systemreporter' \
'/attime/cpgstatistics/hires?' \
'query="sampleTime GE %s AND sampleTime LE %s"'
session_lock = None
def __init__(self, rest_client):
self.rest_client = rest_client
self.session_lock = threading.Lock()
def call(self, url, data=None, method=None):
"""Send requests to server.
If fail, try another RestURL.
Increase the judgment of token invalidation
"""
try:
res = self.rest_client.do_call(url, data, method,
calltimeout=consts.SOCKET_TIMEOUT)
# Judge whether the access failure is caused by
# the token invalidation.
# If the token fails, it will be retrieved again,
# and the token will be accessed again
if res is not None:
# 403 The client request has an invalid session key.
# The request came from a different IP address
# 409 Session key is being used.
if (res.status_code == consts.ERROR_SESSION_INVALID_CODE
or res.status_code ==
consts.ERROR_SESSION_IS_BEING_USED_CODE):
LOG.error(
"Failed to get token=={0}=={1}".format(res.status_code,
res.text))
LOG.error("Failed to get token,relogin,Get token again")
# if method is logout,return immediately
if method == 'DELETE' and RestHandler.\
REST_LOGOUT_URL in url:
return res
self.rest_client.rest_auth_token = None
access_session = self.login()
# if get token,Revisit url
if access_session is not None:
res = self.rest_client. \
do_call(url, data, method,
calltimeout=consts.SOCKET_TIMEOUT)
else:
LOG.error('Login res is None')
elif res.status_code == 503:
raise exception.InvalidResults(res.text)
else:
LOG.error('Rest exec failed')
return res
except exception.DelfinException as e:
err_msg = "Call failed: %s" % (six.text_type(e))
LOG.error(err_msg)
raise e
except Exception as e:
err_msg = "Get RestHandler.call failed: %s" % (six.text_type(e))
LOG.error(err_msg)
raise exception.InvalidResults(err_msg)
def get_resinfo_call(self, url, data=None, method=None):
rejson = None
res = self.call(url, data, method)
if res is not None:
if res.status_code == consts.SUCCESS_STATUS_CODES:
rejson = res.json()
else:
if res.text and 'unsupported' in res.text:
LOG.warning('rest api error: {}'.format(res.text))
else:
raise exception.StorageBackendException(res.text)
return rejson
def login(self):
"""Login Hpe3par storage array."""
try:
access_session = self.rest_client.rest_auth_token
if self.rest_client.san_address:
url = RestHandler.REST_AUTH_URL
data = {"user": self.rest_client.rest_username,
"password": cryptor.decode(
self.rest_client.rest_password)
}
self.session_lock.acquire()
if self.rest_client.rest_auth_token is not None:
return self.rest_client.rest_auth_token
self.rest_client.init_http_head()
res = self.rest_client. \
do_call(url, data, 'POST',
calltimeout=consts.SOCKET_TIMEOUT)
if res is None:
LOG.error('Login res is None')
raise exception.InvalidResults('res is None')
if res.status_code == consts. \
LOGIN_SUCCESS_STATUS_CODES:
result = res.json()
access_session = result.get('key')
self.rest_client.rest_auth_token = access_session
self.rest_client.session.headers[
RestHandler.REST_AUTH_KEY] = access_session
else:
LOG.error("Login error. URL: %(url)s\n"
"Reason: %(reason)s.",
{"url": url, "reason": res.text})
if 'invalid username or password' in res.text:
raise exception.InvalidUsernameOrPassword()
else:
raise exception.StorageBackendException(
six.text_type(res.text))
else:
LOG.error('Login Parameter error')
return access_session
except Exception as e:
LOG.error("Login error: %s", six.text_type(e))
raise e
finally:
self.session_lock.release()
def logout(self):
"""Logout the session."""
try:
url = RestHandler.REST_LOGOUT_URL
if self.rest_client.rest_auth_token is not None:
url = '%s%s' % (url, self.rest_client.rest_auth_token)
self.rest_client.rest_auth_token = None
if self.rest_client.san_address:
self.call(url, method='DELETE')
if self.rest_client.session:
self.rest_client.session.close()
except exception.DelfinException as e:
err_msg = "Logout error: %s" % (e.msg)
LOG.error(err_msg)
raise e
except Exception as e:
err_msg = "Logout error: %s" % (six.text_type(e))
LOG.error(err_msg)
raise exception.InvalidResults(err_msg)
def get_storage(self):
rejson = self.get_resinfo_call(RestHandler.REST_STORAGE_URL,
method='GET')
return rejson
def get_capacity(self):
rejson = self.get_resinfo_call(RestHandler.REST_CAPACITY_URL,
method='GET')
return rejson
def get_all_pools(self):
rejson = self.get_resinfo_call(RestHandler.REST_POOLS_URL,
method='GET')
return rejson
def ge | elf):
rejson = self.get_resinfo_call(RestHandler.REST_VOLUMES_URL,
method='GET')
return rejson
def get_pool_metrics(self, start_time, end_time):
start_time_str = Tools.timestamp_to_utc_time_str(
start_time, consts.REST_COLLEC_TTIME_PATTERN)
end_time_str = Tools.timestamp_to_utc_time_str(
end_time, consts.REST_COLLEC_TTIME_PATTERN)
url = RestHandler.REST_CPGSTATISTICS_URL % (
start_time_str, end_time_str)
rejson = self.get_resinfo_call(url, method='GET')
return rejson
def list_storage_host(self):
rejson = self.get_resinfo_call(RestHandler.REST_HOSTS_URL,
method='GET')
return rejson
| t_all_volumes(s |
editor_plugin_src.js | /**
* WordPress View plugin.
*/
(function() {
var VK = tinymce.VK,
TreeWalker = tinymce.dom.TreeWalker,
selected;
tinymce.create('tinymce.plugins.wpView', {
init : function( editor, url ) {
var wpView = this;
// Check if the `wp.mce` API exists.
if ( typeof wp === 'undefined' || ! wp.mce )
return;
editor.onPreInit.add( function( editor ) {
// Add elements so we can set `contenteditable` to false.
editor.schema.addValidElements('div[*],span[*]');
});
// When the editor's content changes, scan the new content for
// matching view patterns, and transform the matches into
// view wrappers. Since the editor's DOM is outdated at this point,
// we'll wait to render the views.
editor.onBeforeSetContent.add( function( editor, o ) {
if ( ! o.content )
return;
o.content = wp.mce.view.toViews( o.content );
});
// When the editor's content has been updated and the DOM has been
// processed, render the views in the document.
editor.onSetContent.add( function( editor, o ) {
wp.mce.view.render( editor.getDoc() );
});
editor.onInit.add( function( editor ) {
// When a view is selected, ensure content that is being pasted
// or inserted is added to a text node (instead of the view).
editor.selection.onBeforeSetContent.add( function( selection, o ) {
var view = wpView.getParentView( selection.getNode() ),
walker, target;
// If the selection is not within a view, bail.
if ( ! view )
return;
// If there are no additional nodes or the next node is a
// view, create a text node after the current view.
if ( ! view.nextSibling || wpView.isView( view.nextSibling ) ) {
target = editor.getDoc().createTextNode('');
editor.dom.insertAfter( target, view );
// Otherwise, find the next text node.
} else {
walker = new TreeWalker( view.nextSibling, view.nextSibling );
target = walker.next();
}
// Select the `target` text node.
selection.select( target );
selection.collapse( true );
});
// When the selection's content changes, scan any new content
// for matching views and immediately render them.
//
// Runs on paste and on inserting nodes/html.
editor.selection.onSetContent.add( function( selection, o ) {
if ( ! o.context )
return;
var node = selection.getNode();
if ( ! node.innerHTML )
return;
node.innerHTML = wp.mce.view.toViews( node.innerHTML );
wp.mce.view.render( node );
});
});
// When the editor's contents are being accessed as a string,
// transform any views back to their text representations.
editor.onPostProcess.add( function( editor, o ) {
if ( ( ! o.get && ! o.save ) || ! o.content )
return;
o.content = wp.mce.view.toText( o.content );
});
// Triggers when the selection is changed.
// Add the event handler to the top of the stack.
editor.onNodeChange.addToTop( function( editor, controlManager, node, collapsed, o ) {
var view = wpView.getParentView( node );
// Update the selected view.
if ( view ) {
wpView.select( view );
// Prevent the selection from propagating to other plugins.
return false;
// If we've clicked off of the selected view, deselect it.
} else {
wpView.deselect();
}
});
editor.onKeyDown.addToTop( function( editor, event ) {
var keyCode = event.keyCode,
view, instance;
// If a view isn't selected, let the event go on its merry way.
if ( ! selected )
return;
// If the caret is not within the selected view, deselect the
// view and bail.
view = wpView.getParentView( editor.selection.getNode() );
if ( view !== selected ) {
wpView.deselect();
return;
}
// If delete or backspace is pressed, delete the view.
if ( keyCode === VK.DELETE || keyCode === VK.BACKSPACE ) {
if ( (instance = wp.mce.view.instance( selected )) ) {
instance.remove();
wpView.deselect();
}
}
// Let keypresses that involve the command or control keys through.
// Also, let any of the F# keys through.
if ( event.metaKey || event.ctrlKey || ( keyCode >= 112 && keyCode <= 123 ) )
return;
event.preventDefault(); | while ( node ) {
if ( this.isView( node ) )
return node;
node = node.parentNode;
}
},
isView : function( node ) {
return (/(?:^|\s)wp-view-wrap(?:\s|$)/).test( node.className );
},
select : function( view ) {
if ( view === selected )
return;
this.deselect();
selected = view;
wp.mce.view.select( selected );
},
deselect : function() {
if ( selected )
wp.mce.view.deselect( selected );
selected = null;
},
getInfo : function() {
return {
longname : 'WordPress Views',
author : 'WordPress',
authorurl : 'http://wordpress.org',
infourl : 'http://wordpress.org',
version : '1.0'
};
}
});
// Register plugin
tinymce.PluginManager.add( 'wpview', tinymce.plugins.wpView );
})(); | });
},
getParentView : function( node ) { |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.