prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>TestDataFormatterAdv.py<|end_file_name|><|fim▁begin|>"""
Test lldb data formatter subsystem.
"""
from __future__ import print_function
import os
import time
import lldb
from lldbsuite.test.lldbtest import *
import lldbsuite.test.lldbutil as lldbutil
class AdvDataFormatterTestCase(TestBase):
mydir = TestBase.compute_mydir(__file__)
def setUp(self):
# Call super's setUp().
TestBase.setUp(self)
# Find the line number to break at.
self.line = line_number('main.cpp', '// Set break point at this line.')
def test_with_run_command(self):
"""Test that that file and class static variables display correctly."""
self.build()
self.runCmd("file " + self.getBuildArtifact("a.out"), CURRENT_EXECUTABLE_SET)
lldbutil.run_break_set_by_file_and_line(
self, "main.cpp", self.line, num_expected_locations=1, loc_exact=True)
self.runCmd("run", RUN_SUCCEEDED)
# The stop reason of the thread should be breakpoint.
self.expect("thread list", STOPPED_DUE_TO_BREAKPOINT,
substrs=['stopped',
'stop reason = breakpoint'])
# This is the function to remove the custom formats in order to have a
# clean slate for the next test case.
def cleanup():
self.runCmd('type format clear', check=False)
self.runCmd('type summary clear', check=False)
self.runCmd(
"settings set target.max-children-count 256",
check=False)
# Execute the cleanup function during test case tear down.
self.addTearDownHook(cleanup)
self.runCmd("type summary add --summary-string \"pippo\" \"i_am_cool\"")
self.runCmd(
"type summary add --summary-string \"pluto\" -x \"i_am_cool[a-z]*\"")
self.expect("frame variable cool_boy",
substrs=['pippo'])
self.expect("frame variable cooler_boy",
substrs=['pluto'])
self.runCmd("type summary delete i_am_cool")
self.expect("frame variable cool_boy",
substrs=['pluto'])
self.runCmd("type summary clear")
self.runCmd(
"type summary add --summary-string \"${var[]}\" -x \"int \\[[0-9]\\]")
self.expect("frame variable int_array",
substrs=['1,2,3,4,5'])
# this will fail if we don't do [] as regex correctly
self.runCmd(
'type summary add --summary-string "${var[].integer}" "i_am_cool[]')
self.expect("frame variable cool_array",
substrs=['1,1,1,1,6'])
self.runCmd("type summary clear")
self.runCmd(
"type summary add --summary-string \"${var[1-0]%x}\" \"int\"")
self.expect("frame variable iAmInt",
substrs=['01'])
self.runCmd(
"type summary add --summary-string \"${var[0-1]%x}\" \"int\"")
self.expect("frame variable iAmInt",
substrs=['01'])
self.runCmd("type summary clear")
self.runCmd("type summary add --summary-string \"${var[0-1]%x}\" int")
self.runCmd(
"type summary add --summary-string \"${var[0-31]%x}\" float")
self.expect("frame variable *pointer",
substrs=['0x',
'2'])
# check fix for <rdar://problem/11338654> LLDB crashes when using a
# "type summary" that uses bitfields with no format
self.runCmd("type summary add --summary-string \"${var[0-1]}\" int")
self.expect("frame variable iAmInt",
substrs=['9 1'])
self.expect("frame variable cool_array[3].floating",
substrs=['0x'])
self.runCmd(
"type summary add --summary-string \"low bits are ${*var[0-1]} tgt is ${*var}\" \"int *\"")
self.expect("frame variable pointer",
substrs=['low bits are',
'tgt is 6'])
self.expect(
"frame variable int_array --summary-string \"${*var[0-1]}\"",
substrs=['3'])
self.runCmd("type summary clear")
self.runCmd(
'type summary add --summary-string \"${var[0-1]}\" -x \"int \[[0-9]\]\"')
self.expect("frame variable int_array",
substrs=['1,2'])<|fim▁hole|>
self.runCmd(
'type summary add --summary-string \"${var[0-1]}\" "int []"')
self.expect("frame variable int_array",
substrs=['1,2'])
self.runCmd("type summary clear")
self.runCmd("type summary add -c -x \"i_am_cool \[[0-9]\]\"")
self.runCmd("type summary add -c i_am_cool")
self.expect("frame variable cool_array",
substrs=['[0]',
'[1]',
'[2]',
'[3]',
'[4]',
'integer',
'character',
'floating'])
self.runCmd(
"type summary add --summary-string \"int = ${*var.int_pointer}, float = ${*var.float_pointer}\" IWrapPointers")
self.expect("frame variable wrapper",
substrs=['int = 4',
'float = 1.1'])
self.runCmd(
"type summary add --summary-string \"low bits = ${*var.int_pointer[2]}\" IWrapPointers -p")
self.expect("frame variable wrapper",
substrs=['low bits = 1'])
self.expect("frame variable *wrap_pointer",
substrs=['low bits = 1'])
self.runCmd("type summary clear")
self.expect(
"frame variable int_array --summary-string \"${var[0][0-2]%hex}\"",
substrs=[
'0x',
'7'])
self.runCmd("type summary clear")
self.runCmd(
"type summary add --summary-string \"${*var[].x[0-3]%hex} is a bitfield on a set of integers\" -x \"SimpleWithPointers \[[0-9]\]\"")
self.expect(
"frame variable couple --summary-string \"${*var.sp.x[0-2]} are low bits of integer ${*var.sp.x}. If I pretend it is an array I get ${var.sp.x[0-5]}\"",
substrs=[
'1 are low bits of integer 9.',
'If I pretend it is an array I get [9,'])
# if the summary has an error, we still display the value
self.expect(
"frame variable couple --summary-string \"${*var.sp.foo[0-2]\"",
substrs=[
'(Couple) couple = {',
'x = 0x',
'y = 0x',
'z = 0x',
's = 0x'])
self.runCmd(
"type summary add --summary-string \"${*var.sp.x[0-2]} are low bits of integer ${*var.sp.x}. If I pretend it is an array I get ${var.sp.x[0-5]}\" Couple")
self.expect("frame variable sparray",
substrs=['[0x0000000f,0x0000000c,0x00000009]'])
# check that we can format a variable in a summary even if a format is
# defined for its datatype
self.runCmd("type format add -f hex int")
self.runCmd(
"type summary add --summary-string \"x=${var.x%d}\" Simple")
self.expect("frame variable a_simple_object",
substrs=['x=3'])
self.expect("frame variable a_simple_object", matching=False,
substrs=['0x0'])
# now check that the default is applied if we do not hand out a format
self.runCmd("type summary add --summary-string \"x=${var.x}\" Simple")
self.expect("frame variable a_simple_object", matching=False,
substrs=['x=3'])
self.expect("frame variable a_simple_object", matching=True,
substrs=['x=0x00000003'])
# check that we can correctly cap the number of children shown
self.runCmd("settings set target.max-children-count 5")
self.expect('frame variable a_long_guy', matching=True,
substrs=['a_1',
'b_1',
'c_1',
'd_1',
'e_1',
'...'])
# check that no further stuff is printed (not ALL values are checked!)
self.expect('frame variable a_long_guy', matching=False,
substrs=['f_1',
'g_1',
'h_1',
'i_1',
'j_1',
'q_1',
'a_2',
'f_2',
't_2',
'w_2'])
self.runCmd("settings set target.max-children-count 1")
self.expect('frame variable a_long_guy', matching=True,
substrs=['a_1',
'...'])
self.expect('frame variable a_long_guy', matching=False,
substrs=['b_1',
'c_1',
'd_1',
'e_1'])
self.expect('frame variable a_long_guy', matching=False,
substrs=['f_1',
'g_1',
'h_1',
'i_1',
'j_1',
'q_1',
'a_2',
'f_2',
't_2',
'w_2'])
self.runCmd("settings set target.max-children-count 30")
self.expect('frame variable a_long_guy', matching=True,
substrs=['a_1',
'b_1',
'c_1',
'd_1',
'e_1',
'z_1',
'a_2',
'b_2',
'c_2',
'd_2',
'...'])
self.expect('frame variable a_long_guy', matching=False,
substrs=['e_2',
'n_2',
'r_2',
'i_2',
'k_2',
'o_2'])
# override the cap
self.expect(
'frame variable a_long_guy --show-all-children',
matching=True,
substrs=[
'a_1',
'b_1',
'c_1',
'd_1',
'e_1',
'z_1',
'a_2',
'b_2',
'c_2',
'd_2'])
self.expect(
'frame variable a_long_guy --show-all-children',
matching=True,
substrs=[
'e_2',
'n_2',
'r_2',
'i_2',
'k_2',
'o_2'])
self.expect(
'frame variable a_long_guy --show-all-children',
matching=False,
substrs=['...'])<|fim▁end|>
| |
<|file_name|>ast.rs<|end_file_name|><|fim▁begin|>use std::fmt;
use std::from_str::FromStr;
use operators::{Operator, Sub, Skip, Loop};
/**
The internal parsed representation of a program source.
*/
pub struct Ast(~[Operator]);
impl Ast {
/**
Produce an AST from a source string.
This is the most commod method to generate an Ast.
*/
pub fn parse_str(source: &str) -> Result<Ast, ~str> {
/*
We parse loops by making a context to group its operators,
pushing on it until the matching loop end. As we create the
context, we push the previous one onto a stack. After the
nest has been collected, we pop the context and replace it
with the subprocess operator.
*/
let mut stack:~[ ~[Operator] ] = ~[];
let mut ops: ~[Operator] = ~[];
for token in source.chars() {<|fim▁hole|> to push operators, and push the old one on the
stack.
*/
Some(Skip) => {
stack.push(ops);
ops = ~[];
}
/*
End of a loop. Make a subprocess operator out of
the just-collected context, and push that on the
previous context.
*/
Some(Loop) => {
let sub_ast = Sub(Ast( ops ));
// Try to pop the previous context from the stack.
// If this does not work, it's an unmatched `]`.
ops = match stack.pop() {
Some(ops) => ops,
_ => return Err(~"Unmatched `]`."),
};
ops.push(sub_ast);
}
// Push the operator onto the context.
Some(op) => ops.push(op),
// Unknown. Probably comments. Nop.
_ => continue
}
}
// If we still have things on the stack, then we have one or
// more unmatched `[`.
if ! stack.is_empty() {
return Err(~"Unmatched `[`.");
}
// Everything went well.
return Ok(Ast(ops));
}
}
impl FromStr for Ast {
fn from_str(source: &str) -> Option<Ast> {
Ast::parse_str(source).ok()
}
}
impl fmt::Show for Ast {
/**
Parses a string into the matching operator.
*/
fn fmt(&self, f:&mut fmt::Formatter) -> fmt::Result {
let &Ast(ref ops) = self;
let display = |op: &Operator| -> ~str { format!("{}", op) };
let repr: ~[~str] = ops.iter().map(display).collect();
f.buf.write(format!("{}", repr.concat()).as_bytes()
)
}
}<|fim▁end|>
|
match from_str::<Operator>(token.to_str()) {
/*
Start of a loop. Produce a new context in which
|
<|file_name|>example.go<|end_file_name|><|fim▁begin|>package main
import (
"flag"
"html/template"
"log"
"net/http"
"os"
"github.com/zenazn/goji/bind"
"github.com/zenazn/goji/graceful"
"github.com/zenazn/goji/web"
"github.com/zenazn/goji/web/middleware"
"github.com/philpearl/tt_goji_middleware/base"
"github.com/philpearl/tt_goji_middleware/redis"
"github.com/philpearl/tt_goji_oauth"
)
var (
// The template for /
index *template.Template
)
func init() {
index = template.Must(template.ParseFiles("index.html"))
}
/*
Callbacks implements the tt_goji_oauth callbacks.
*/
type Callbacks struct {
}
/*
GetOrCreateUser is called by tt_goji_oauth
*/
func (cbk Callbacks) GetOrCreateUser(c web.C, providerName string, user map[string]interface{}) (string, error) {
// Here is where we should ensure the user info is stored in the DB, but we can cheat somewhat by
// just adding the user info to the session
session, _ := base.SessionFromEnv(&c)
session.Put("user", user)
return "", nil
}
/*
IndexView is the handler for /
*/
func IndexView(c web.C, w http.ResponseWriter, r *http.Request) {
var user map[string]interface{}
session, ok := base.SessionFromEnv(&c)
if ok {<|fim▁hole|> usr, ok = session.Get("user")
if ok {
log.Printf("Have user")
user = usr.(map[string]interface{})
} else {
ok = false
}
}
// Render the template
index.Execute(w, struct {
LoggedIn bool
User map[string]interface{}
}{
LoggedIn: ok,
User: user,
})
}
func main() {
defer func() {
r := recover()
if r != nil {
log.Printf("caught a panic: %v\n", r)
os.Exit(1)
}
}()
if !flag.Parsed() {
flag.Parse()
}
// Build a mux for the site.
m := web.New()
m.Use(base.LoggingMiddleWare)
m.Use(middleware.Recoverer)
m.Use(middleware.EnvInit)
// tt_goji_oauth requires redis and sessions middleware.
m.Use(redis.BuildRedis(":6379"))
sh := redis.NewSessionHolder()
m.Use(base.BuildSessionMiddleware(sh))
// Add the oauth mux under /login/oauth/
callbacks := Callbacks{}
oauthm, _ := tt_goji_oauth.Build("http://localhost:8000/login/oauth/", "/login/oauth", sh, callbacks)
m.Handle("/login/oauth/*", oauthm)
// Add our main page in the root
m.Get("/", IndexView)
m.Compile()
// Now add goji boilerplate to run the site
http.Handle("/", m)
listener := bind.Default()
log.Println("Starting Goji on", listener.Addr())
bind.Ready()
err := graceful.Serve(listener, http.DefaultServeMux)
if err != nil {
log.Fatal(err)
}
graceful.Wait()
}<|fim▁end|>
|
// Our user is stored in the session if we're logged in
var usr interface{}
|
<|file_name|>panda-12b.py<|end_file_name|><|fim▁begin|>import dragonfly<|fim▁hole|>import bee
from bee import connect
import math, functools
from panda3d.core import NodePath
import dragonfly.scene.unbound, dragonfly.scene.bound
import dragonfly.std
import dragonfly.io
import dragonfly.canvas
import Spyder
# ## random matrix generator
from random import random
def random_matrix_generator():
while 1:
a = Spyder.AxisSystem()
a.rotateZ(360 * random())
a.origin = Spyder.Coordinate(15 * random() - 7.5, 15 * random() - 7.5, 0)
yield dragonfly.scene.matrix(a, "AxisSystem")
def id_generator():
n = 0
while 1:
n += 1
yield "spawnedpanda" + str(n)
from dragonfly.canvas import box2d
from bee.mstr import mstr
class parameters: pass
class myscene(dragonfly.pandahive.spyderframe):
a = Spyder.AxisSystem()
a *= 0.25
a.origin += (-8, 42, 0)
env = Spyder.Model3D("models/environment", "egg", a)
a = Spyder.AxisSystem()
a *= 0.005
pandaclass = Spyder.ActorClass3D("models/panda-model", "egg", [("walk", "models/panda-walk4", "egg")], a,
actorclassname="pandaclass")
box = Spyder.Box2D(50, 470, 96, 96)
icon = Spyder.Icon("pandaicon.png", "pandaicon", box, transparency=True)
camcenter = Spyder.Entity3D(
"camcenter",
(
Spyder.NewMaterial("white", color=(255, 255, 255)),
Spyder.Block3D((1, 1, 1), material="white"),
)
)
del a, box
class pandawalkhive(bee.inithive):
animation = dragonfly.scene.bound.animation()
walk = dragonfly.std.variable("str")("walk")
connect(walk, animation.animation_name)
key_w = dragonfly.io.keyboardsensor_trigger("W")
connect(key_w, animation.loop)
key_s = dragonfly.io.keyboardsensor_trigger("S")
connect(key_s, animation.stop)
setPos = dragonfly.scene.bound.setPos()
setHpr = dragonfly.scene.bound.setHpr()
interval = dragonfly.time.interval_time(18)
connect(key_w, interval.start)
connect(key_s, interval.pause)
sequence = dragonfly.time.sequence(4)(8, 1, 8, 1)
connect(interval.value, sequence.inp)
ip1 = dragonfly.time.interpolation("Coordinate")((0, 0, 0), (0, -10, 0))
connect(sequence.outp1, ip1)
connect(ip1, setPos)
connect(key_w, ip1.start)
connect(key_s, ip1.stop)
ip2 = dragonfly.time.interpolation("Coordinate")((0, 0, 0), (180, 0, 0))
connect(sequence.outp2, ip2)
connect(ip2, setHpr)
connect(key_w, ip2.start)
connect(key_s, ip2.stop)
ip3 = dragonfly.time.interpolation("Coordinate")((0, -10, 0), (0, 0, 0))
connect(sequence.outp3, ip3)
connect(ip3, setPos)
connect(key_w, ip3.start)
connect(key_s, ip3.stop)
ip4 = dragonfly.time.interpolation("Coordinate")((180, 0, 0), (0, 0, 0))
connect(sequence.outp4, ip4)
connect(ip4, setHpr)
connect(key_w, ip4.start)
connect(key_s, ip4.stop)
connect(ip4.reach_end, interval.start)
from bee.staticbind import staticbind_baseclass
class pandawalkbind(dragonfly.event.bind,
dragonfly.io.bind,
dragonfly.sys.bind,
dragonfly.scene.bind,
dragonfly.time.bind):
hive = pandawalkhive
bind_entity = "relative"
bind_keyboard = "indirect"
class camerabindhive(bee.inithive):
interval = dragonfly.time.interval_time(30)
sequence = dragonfly.time.sequence(2)(1, 1)
connect(interval.value, sequence.inp)
startsensor = dragonfly.sys.startsensor()
ip1 = dragonfly.time.interpolation("Coordinate")((180, -20, 0), (360, -20, 0))
ip2 = dragonfly.time.interpolation("Coordinate")((0, -20, 0), (180, -20, 0))
connect(sequence.outp1, ip1.inp)
connect(sequence.outp2, ip2.inp)
connect(startsensor, interval.start)
connect(startsensor, ip1.start)
connect(ip1.reach_end, ip1.stop)
connect(ip1.reach_end, ip2.start)
connect(ip2.reach_end, ip2.stop)
connect(ip2.reach_end, ip1.start)
connect(ip2.reach_end, interval.start)
sethpr = dragonfly.scene.bound.setHpr()
connect(ip1, sethpr)
connect(ip2, sethpr)
class camerabind(staticbind_baseclass,
dragonfly.event.bind,
dragonfly.io.bind,
dragonfly.sys.bind,
dragonfly.scene.bind,
dragonfly.time.bind):
hive = camerabindhive
class myhive(dragonfly.pandahive.pandahive):
pandaclassname = "pandaclass"
pandaclassname_ = bee.attribute("pandaclassname")
canvas = dragonfly.pandahive.pandacanvas()
mousearea = dragonfly.canvas.mousearea()
raiser = bee.raiser()
connect("evexc", raiser)
camerabind = camerabind().worker()
camcenter = dragonfly.std.variable("id")("camcenter")
connect(camcenter, camerabind.bindname)
startsensor = dragonfly.sys.startsensor()
cam = dragonfly.scene.get_camera()
camparent = dragonfly.scene.unbound.parent()
connect(cam, camparent.entityname)
connect(camcenter, camparent.entityparentname)
connect(startsensor, camparent)
cphide = dragonfly.scene.unbound.hide()
connect(camcenter, cphide)
connect(startsensor, cphide)
pandaspawn = dragonfly.scene.spawn_actor()
v_panda = dragonfly.std.variable("id")(pandaclassname_)
connect(v_panda, pandaspawn)
panda_id_gen = dragonfly.std.generator("id", id_generator)()
panda_id = dragonfly.std.variable("id")("")
t_panda_id_gen = dragonfly.std.transistor("id")()
connect(panda_id_gen, t_panda_id_gen)
connect(t_panda_id_gen, panda_id)
random_matrix = dragonfly.std.generator(("object", "matrix"), random_matrix_generator)()
w_spawn = dragonfly.std.weaver(("id", ("object", "matrix")))()
connect(panda_id, w_spawn.inp1)
connect(random_matrix, w_spawn.inp2)
z_pandawalk = pandawalkbind().worker()
t_bind = dragonfly.std.transistor("id")()
connect(panda_id, t_bind)
connect(t_bind, z_pandawalk.bind)
do_spawn = dragonfly.std.transistor(("id", ("object", "matrix")))()
connect(w_spawn, do_spawn)
connect(do_spawn, pandaspawn.spawn_matrix)
trig_spawn = dragonfly.std.pushconnector("trigger")()
connect(trig_spawn, t_panda_id_gen)
connect(trig_spawn, do_spawn)
connect(trig_spawn, t_bind)
key_z = dragonfly.io.keyboardsensor_trigger("Z")
connect(key_z, trig_spawn)
pandaicon_click = dragonfly.io.mouseareasensor("pandaicon")
connect(pandaicon_click, trig_spawn)
myscene = myscene(
scene="scene",
canvas=canvas,
mousearea=mousearea,
)
wininit = bee.init("window")
wininit.camera.setPos(0, 45, 25)
wininit.camera.setHpr(180, -20, 0)
keyboardevents = dragonfly.event.sensor_match_leader("keyboard")
add_head = dragonfly.event.add_head()
head = dragonfly.std.variable("event")("spawnedpanda3")
connect(keyboardevents, add_head)
connect(head, add_head)
connect(add_head, z_pandawalk.event)
main = myhive().getinstance()
main.build("main")
main.place()
main.close()
main.init()
main.run()<|fim▁end|>
|
import dragonfly.pandahive
|
<|file_name|>index.js<|end_file_name|><|fim▁begin|><|fim▁hole|>let datafire = require('datafire');
let openapi = require('./openapi.json');
module.exports = datafire.Integration.fromOpenAPI(openapi, "billbee");<|fim▁end|>
|
"use strict";
|
<|file_name|>sortChunks.js<|end_file_name|><|fim▁begin|>'use strict';
Object.defineProperty(exports, "__esModule", {
value: true
});
var _from = require('babel-runtime/core-js/array/from');
var _from2 = _interopRequireDefault(_from);
exports.default = sortChunks;
var _toposort = require('toposort');
var _toposort2 = _interopRequireDefault(_toposort);
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
// see https://github.com/jantimon/html-webpack-plugin/blob/8131d8bb1dc9b185b3c1709264a3baf32ef799bc/lib/chunksorter.js
function sortChunks(chunks, chunkGroups) {
// We build a map (chunk-id -> chunk) for faster access during graph building.
var nodeMap = {};
chunks.forEach(function (chunk) {
nodeMap[chunk.id] = chunk;
});<|fim▁hole|> return [parentGroup, chunkGroup];
}));
}, []);
var sortedGroups = _toposort2.default.array(chunkGroups, edges);
// flatten chunkGroup into chunks
var sortedChunks = sortedGroups.reduce(function (result, chunkGroup) {
return result.concat(chunkGroup.chunks);
}, []).map(function (chunk) {
return (// use the chunk from the list passed in, since it may be a filtered list
nodeMap[chunk.id]
);
}).filter(function (chunk, index, self) {
// make sure exists (ie excluded chunks not in nodeMap)
var exists = !!chunk;
// make sure we have a unique list
var unique = self.indexOf(chunk) === index;
return exists && unique;
});
return sortedChunks;
}<|fim▁end|>
|
// Add an edge for each parent (parent -> child)
var edges = chunkGroups.reduce(function (result, chunkGroup) {
return result.concat((0, _from2.default)(chunkGroup.parentsIterable, function (parentGroup) {
|
<|file_name|>whitespaceRuleTests.ts<|end_file_name|><|fim▁begin|>/*
* Copyright 2013 Palantir Technologies, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.<|fim▁hole|>
import * as Lint from "../lint";
describe("<whitespace>", () => {
const fileName = "rules/whitespace.test.ts";
const WhitespaceRule = Lint.Test.getRule("whitespace");
const createFailure = Lint.Test.createFailuresOnFile(fileName, WhitespaceRule.FAILURE_STRING);
let actualFailures: Lint.RuleFailure[];
before(() => {
const options = [true,
"check-branch",
"check-decl",
"check-operator",
"check-module",
"check-separator",
"check-type",
"check-typecast"
];
actualFailures = Lint.Test.applyRuleOnFile(fileName, WhitespaceRule, options);
assert.lengthOf(actualFailures, 39);
});
it("enforces rules only when enabled", () => {
const failures = Lint.Test.applyRuleOnFile(fileName, WhitespaceRule);
assert.equal(failures.length, 0);
});
it("enforces whitespace in import statements", () => {
const expectedFailures = [
createFailure([1, 11], [1, 12]),
createFailure([1, 12], [1, 13]),
createFailure([57, 19], [57, 20]),
createFailure([58, 7], [58, 8]),
createFailure([58, 16], [58, 17]),
createFailure([58, 20], [58, 21]),
createFailure([59, 26], [59, 27])
];
expectedFailures.forEach((failure) => {
Lint.Test.assertContainsFailure(actualFailures, failure);
});
});
it("enforces whitespace in export statements", () => {
const expectedFailures = [
createFailure([3, 19], [3, 20]),
createFailure([3, 20], [3, 21]),
createFailure([42, 7], [42, 8]),
createFailure([42, 8], [42, 9]),
createFailure([64, 7], [64, 8])
];
expectedFailures.forEach((failure) => {
Lint.Test.assertContainsFailure(actualFailures, failure);
});
});
it("enforces whitespace in type declarations", () => {
const expectedFailure = createFailure([5, 11], [5, 12]);
Lint.Test.assertContainsFailure(actualFailures, expectedFailure);
});
it("enforces whitespace in conditional statements", () => {
const expectedFailures = [
createFailure([7, 23], [7, 24]),
createFailure([7, 24], [7, 25]),
createFailure([7, 25], [7, 26]),
createFailure([7, 26], [7, 27])
];
expectedFailures.forEach((failure) => {
Lint.Test.assertContainsFailure(actualFailures, failure);
});
});
it("enforces whitespace in binary expressions", () => {
const expectedFailures = [
createFailure([9, 16], [9, 17]),
createFailure([9, 19], [9, 20])
];
expectedFailures.forEach((failure) => {
Lint.Test.assertContainsFailure(actualFailures, failure);
});
});
it("enforces whitespace in variable definitions", () => {
const expectedFailures = [
createFailure([11, 10], [11, 11]),
createFailure([11, 11], [11, 12]),
createFailure([13, 11], [13, 12])
];
expectedFailures.forEach((failure) => {
Lint.Test.assertContainsFailure(actualFailures, failure);
});
});
it("enforces whitespace in switch statements", () => {
const expectedFailures = [
createFailure([15, 11], [15, 12]),
createFailure([16, 16], [16, 17]),
createFailure([17, 17], [17, 18])
];
expectedFailures.forEach((failure) => {
Lint.Test.assertContainsFailure(actualFailures, failure);
});
});
it("enforces whitespace in for statements", () => {
const expectedFailures = [
createFailure([20, 8], [20, 9]),
createFailure([20, 15], [20, 16]),
createFailure([20, 18], [20, 19])
];
expectedFailures.forEach((failure) => {
Lint.Test.assertContainsFailure(actualFailures, failure);
});
});
it("enforces whitespace in while statements", () => {
const expectedFailure = createFailure([24, 10], [24, 11]);
Lint.Test.assertContainsFailure(actualFailures, expectedFailure);
});
it("enforces whitespace in label definitions", () => {
const expectedFailure = createFailure([21, 14], [21, 15]);
Lint.Test.assertContainsFailure(actualFailures, expectedFailure);
});
it("enforces whitespace around the => token", () => {
const expectedFailures = [
createFailure([29, 17], [29, 18]),
createFailure([29, 19], [29, 20]),
createFailure([30, 17], [30, 18]),
createFailure([30, 19], [30, 20]),
createFailure([34, 14], [34, 15]),
createFailure([34, 16], [34, 17]),
createFailure([35, 18], [35, 19]),
createFailure([35, 20], [35, 21])
];
expectedFailures.forEach((failure) => {
Lint.Test.assertContainsFailure(actualFailures, failure);
});
});
it("enforces whitespace around typecasts", () => {
Lint.Test.assertContainsFailure(actualFailures, createFailure([36, 21], [36, 22]));
});
});<|fim▁end|>
|
* See the License for the specific language governing permissions and
* limitations under the License.
*/
|
<|file_name|>str.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
fn check_contains_all_substrings(s: &str) {
assert!(s.contains(""));
for i in range(0, s.len()) {
for j in range(i+1, s.len() + 1) {
assert!(s.contains(s.slice(i, j)));
}
}
}
#[test]
fn strslice_issue_16589() {
assert!("bananas".contains("nana"));
// prior to the fix for #16589, x.contains("abcdabcd") returned false
// test all substrings for good measure
check_contains_all_substrings("012345678901234567890123456789bcdabcdabcd");
}
#[test]
fn strslice_issue_16878() {
assert!(!"1234567ah012345678901ah".contains("hah"));
assert!(!"00abc01234567890123456789abc".contains("bcabc"));
}
#[test]
fn test_strslice_contains() {
let x = "There are moments, Jeeves, when one asks oneself, 'Do trousers matter?'";
check_contains_all_substrings(x);
}
#[test]
fn test_rsplitn_char_iterator() {
let data = "\nMäry häd ä little lämb\nLittle lämb\n";
let mut split: Vec<&str> = data.rsplitn(3, ' ').collect();
split.reverse();
assert_eq!(split, vec!["\nMäry häd ä", "little", "lämb\nLittle", "lämb\n"]);
let mut split: Vec<&str> = data.rsplitn(3, |c: char| c == ' ').collect();
split.reverse();
assert_eq!(split, vec!["\nMäry häd ä", "little", "lämb\nLittle", "lämb\n"]);
// Unicode
let mut split: Vec<&str> = data.rsplitn(3, 'ä').collect();
split.reverse();
assert_eq!(split, vec!["\nMäry häd ", " little l", "mb\nLittle l", "mb\n"]);
let mut split: Vec<&str> = data.rsplitn(3, |c: char| c == 'ä').collect();
split.reverse();
assert_eq!(split, vec!["\nMäry häd ", " little l", "mb\nLittle l", "mb\n"]);
}
#[test]
fn test_split_char_iterator() {
let data = "\nMäry häd ä little lämb\nLittle lämb\n";
let split: Vec<&str> = data.split(' ').collect();
assert_eq!( split, vec!["\nMäry", "häd", "ä", "little", "lämb\nLittle", "lämb\n"]);
let mut rsplit: Vec<&str> = data.split(' ').rev().collect();
rsplit.reverse();
assert_eq!(rsplit, vec!["\nMäry", "häd", "ä", "little", "lämb\nLittle", "lämb\n"]);
let split: Vec<&str> = data.split(|c: char| c == ' ').collect();
assert_eq!( split, vec!["\nMäry", "häd", "ä", "little", "lämb\nLittle", "lämb\n"]);
let mut rsplit: Vec<&str> = data.split(|c: char| c == ' ').rev().collect();
rsplit.reverse();
assert_eq!(rsplit, vec!["\nMäry", "häd", "ä", "little", "lämb\nLittle", "lämb\n"]);
// Unicode
let split: Vec<&str> = data.split('ä').collect();
assert_eq!( split, vec!["\nM", "ry h", "d ", " little l", "mb\nLittle l", "mb\n"]);
let mut rsplit: Vec<&str> = data.split('ä').rev().collect();
rsplit.reverse();
assert_eq!(rsplit, vec!["\nM", "ry h", "d ", " little l", "mb\nLittle l", "mb\n"]);
let split: Vec<&str> = data.split(|c: char| c == 'ä').collect();
assert_eq!( split, vec!["\nM", "ry h", "d ", " little l", "mb\nLittle l", "mb\n"]);
let mut rsplit: Vec<&str> = data.split(|c: char| c == 'ä').rev().collect();
rsplit.reverse();
assert_eq!(rsplit, vec!["\nM", "ry h", "d ", " little l", "mb\nLittle l", "mb\n"]);
}
#[test]
fn test_rev_split_char_iterator_no_trailing() {
let data = "\nMäry häd ä little lämb\nLittle lämb\n";
let mut split: Vec<&str> = data.split('\n').rev().collect();
split.reverse();
assert_eq!(split, vec!["", "Märy häd ä little lämb", "Little lämb", ""]);
let mut split: Vec<&str> = data.split_terminator('\n').rev().collect();
split.reverse();<|fim▁hole|> assert_eq!(split, vec!["", "Märy häd ä little lämb", "Little lämb"]);
}<|fim▁end|>
| |
<|file_name|>admin.py<|end_file_name|><|fim▁begin|>from django import forms
from djwed.wedding.models import *
from djwed.wedding.admin_actions import *
from django.contrib import admin
class RequireOneFormSet(forms.models.BaseInlineFormSet):
"""Require at least one form in the formset to be completed."""
def clean(self):
"""Check that at least one form has been completed."""
super(RequireOneFormSet, self).clean()
if not self.is_valid():
return
for cleaned_data in self.cleaned_data:
# form has data and we aren't deleting it.
if cleaned_data and not cleaned_data.get('DELETE', False):
# we can break out after the first complete form
return
raise forms.ValidationError("At least one %s is required." %
(self.model._meta.verbose_name,))
class InviteeNotesInline(admin.TabularInline):
model = InviteeNotes
extra = 0
verbose_name_plural = "invitee notes"
class RSVPInline(admin.TabularInline):
model = RSVP
extra = 2
class GuestInline(admin.StackedInline):
model = Guest
extra = 1
class FoodOptionInline(admin.StackedInline):
model = FoodOption
extra = 3
class CommentInline(admin.StackedInline):
model = Comment
extra = 0
exclude = ('rsvp',)
readonly_fields = ('text',)
verbose_name_plural = "comments from invitees"
class GiftThankYouInline(admin.TabularInline):
model = ThankYou
extra = 0
verbose_name = "Source"
verbose_name_plural = "Sources"
formset = RequireOneFormSet
class InviteeThankYouInline(admin.TabularInline):
model = ThankYou
extra = 0
class InviteeAdmin(admin.ModelAdmin):
#fieldsets = [
# (None, {'fields': ['question']}),
# ('Date information', {'fields': ['pub_date'], 'classes': ['collapse']}),
#]
inlines = [GuestInline,InviteeNotesInline,CommentInline,InviteeThankYouInline]
list_display = ('full_name', 'tags', 'full_address', 'state','country')
list_editable = ('tags',)
list_filter = ['side', 'association','country','state']
search_fields = ['full_name_override','invite_code','guest__first_name', 'guest__last_name', 'guest__nickname']
actions = [
export_as_csv_action("Export addresses as CSV",
fields=['full_name', 'full_address']),
]
#date_hierarchy = 'pub_date'
class LongFoodChoiceField(forms.ModelChoiceField):
#widget = forms.widgets.RadioSelect()
def label_from_instance(self, obj):
return obj.long_desc
class GuestAdmin(admin.ModelAdmin):
inlines = [RSVPInline,]
list_display = ('full_name', 'email', 'tags')
list_filter = ['rsvp__status', 'role', 'invitee__side', 'invitee__association']
search_fields = ['first_name', 'last_name']
list_editable = ('email', 'tags')
class RSVPAdminForm(forms.ModelForm):
class Meta: model = RSVP
def clean(self, *args, **kwargs):
sret = super(RSVPAdminForm, self).clean(*args,**kwargs)
if self.cleaned_data['food_selection'] and self.cleaned_data['food_selection'].venue != self.cleaned_data['venue']:
raise ValidationError('Food selected from another venue')
if self.cleaned_data['venue'].site != u'MA' and self.cleaned_data['bus_selection']:
raise ValidationError('Bus selection for a site with no bus')
rsvp_filter = RSVP.objects.filter(venue = self.cleaned_data['venue'],
guest = self.cleaned_data['guest'])
if rsvp_filter.count()>1 or (rsvp_filter.count() == 1
and rsvp_filter.all()[0] != self.instance):
raise ValidationError('Only one RSVP allowed per person')
return sret
class RSVPAdmin(admin.ModelAdmin):
#inlines = [GuestInline,]
#food_selection = LongFoodChoiceField([], required=False, empty_label = "--- Please choose from a dinner selection below ---")
list_display = (
'guest_site',
'venue',
'status',
'food_selection',
'bus_selection',
'last_updated',
'prelim',
'guest_invitee',
'last_update_source',
'guest',
'table_assign',
)
search_fields = [
'guest__first_name',
'guest__last_name',
'guest__invitee__guest__last_name',
'guest__invitee__invite_code',
]
list_editable = (
'status',
'food_selection',
'bus_selection',
'prelim',
'last_update_source',
'table_assign',
)
form = RSVPAdminForm
list_filter = ('venue','status', 'guest__invitee__side',
'guest__invitee__association', 'guest__invitee__country',
'guest__invitee__state',
)
def guest_site(self,rsvp):
return u"%s (%s)"%(rsvp.guest.full_name(), unicode(rsvp.venue.site))
guest_site.short_description = "Guest (Site)"
def guest_invitee(self,rsvp):
return rsvp.guest.invitee
guest_invitee.short_description = "Invitee"
def guest_invitee_association(self,rsvp):
return rsvp.guest.invitee.association
guest_invitee_association.short_description = "Association"
def formfield_for_foreignkey(self, db_field, request, **kwargs):
if db_field.name == "guest":
kwargs["queryset"] = Guest.objects.all().order_by('last_name','first_name')
return db_field.formfield(**kwargs)
return super(RSVPAdmin, self).formfield_for_foreignkey(db_field, request, **kwargs)
class InviteeNotesAdmin(admin.ModelAdmin):
search_fields = ['invitee__guest__first_name',
'invitee__guest__last_name','invitee__guest__nickname']
list_display = [ 'invitee',
'likely_site',
'ma_likelihood',
'ca_likelihood',
'or_likelihood',
'savedate',
'batch',
'invitee_rsvp_count',
'adults',
'children',
'invitee_country',
]
list_editable = ['ma_likelihood',
'ca_likelihood',
'savedate',
'batch',
]
def invitee_rsvp_count(self,inote):
counts = inote.invitee.rsvp_yes_counts()
return ', '.join('%s: %d' % (venue, counts[venue])
for venue in sorted(counts.keys()))
invitee_rsvp_count.short_description = "RSVP Yes"
def invitee_country(self,inote):
return str(inote.invitee.country)
invitee_country.short_description = "Country"
class CommentAdmin(admin.ModelAdmin):
list_filter = ['type']
search_fields = ['invitee__guest__first_name','text',
'invitee__guest__last_name','invitee__guest__nickname']
list_display = ['id','invitee','type','last_updated','text']
class VenueAdmin(admin.ModelAdmin):
inlines = [FoodOptionInline,]
class PageSnippetAdmin(admin.ModelAdmin):
list_display = ['key','title','last_updated']
class GiftAdmin(admin.ModelAdmin):
search_fields = [
'sources__guest__first_name',
'sources__guest__nickname',
'sources__guest__last_name',
'notes',
'description',
]<|fim▁hole|> 'assignment','registry','status']
list_editable = ('status', 'assignment')
inlines = [GiftThankYouInline,]
radio_fields = {
'assignment': admin.HORIZONTAL,
'registry': admin.VERTICAL,
'status': admin.HORIZONTAL,
}
def formfield_for_foreignkey(self, db_field, request, **kwargs):
if db_field.name == "source" and request.META['REQUEST_METHOD'] != 'POST':
kwargs["queryset"] = Invitee.objects.all().order_by('guest__last_name','guest__first_name')
return db_field.formfield(**kwargs)
return super(GiftAdmin, self).formfield_for_foreignkey(db_field, request, **kwargs)
def source_names(self, gift):
return u"; ".join(unicode(inv) for inv in gift.sources.all())
source_names.short_description = "Sources"
class ThankYouAdmin(admin.ModelAdmin):
list_display = [
'invitee',
'status',
'sent',
]
list_editable = ['status', 'sent']
list_filter = [
'status',
'sent',
'gift__assignment',
'gift__received',
'invitee__side',
]
search_fields = [
'invitee__guest__first_name',
'invitee__guest__last_name',
'invitee__guest__nickname',
'gift__description',
'gift__notes',
]
class TableAdmin(admin.ModelAdmin):
search_fields = ['rsvp__guest__first_name','name','number','notes',
'rsvp__guest__last_name','invitee__guest__nickname']
list_display = ['number','name','venue','table_count','table_guests','notes','position']
list_editable = ('name','notes')
list_filter = ['venue',]
def table_count(self,table):
return str(table.rsvp_set.count())
table_count.short_description = "# people"
def table_guests(self,table):
guests = []
for r in table.rsvp_set.all():
guests.append(unicode(r.guest))
guests.sort()
return u" , \n".join(guests)
table_guests.short_description = "guests"
class RSVPOptionAdmin(admin.ModelAdmin):
list_display = ['short_desc', 'likelihood', 'rsvp_count', 'long_desc']
def rsvp_count(self, option):
return str(option.rsvp_set.count())
rsvp_count.short_description = "# people"
admin.site.register(Invitee, InviteeAdmin)
admin.site.register(InviteeNotes, InviteeNotesAdmin)
admin.site.register(Guest, GuestAdmin)
admin.site.register(Venue, VenueAdmin)
admin.site.register(PageSnippet, PageSnippetAdmin)
admin.site.register(RSVP, RSVPAdmin)
admin.site.register(RSVPOption, RSVPOptionAdmin)
admin.site.register(Comment, CommentAdmin)
admin.site.register(Gift, GiftAdmin)
admin.site.register(ThankYou, ThankYouAdmin)
admin.site.register(Table, TableAdmin)<|fim▁end|>
|
list_filter = ['status','registry','assignment']
list_display = ['source_names','received','description','notes',
|
<|file_name|>access_group.py<|end_file_name|><|fim▁begin|># Copyright (C) 2016 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
from ggrc import db
from ggrc.models.mixins import BusinessObject, Timeboxed, CustomAttributable
from ggrc.models.object_document import Documentable
from ggrc.models.object_owner import Ownable
from ggrc.models.object_person import Personable
from ggrc.models.relationship import Relatable
from ggrc.models.track_object_state import HasObjectState, track_state_for_class
class AccessGroup(HasObjectState,<|fim▁hole|> _aliases = {"url": "Access Group URL"}
track_state_for_class(AccessGroup)<|fim▁end|>
|
CustomAttributable, Personable, Documentable, Relatable,
Timeboxed, Ownable, BusinessObject, db.Model):
__tablename__ = 'access_groups'
|
<|file_name|>collapse.directive.ts<|end_file_name|><|fim▁begin|>import {
AnimationBuilder,
AnimationFactory,
AnimationPlayer
} from '@angular/animations';
import {
AfterViewChecked,
Directive,
ElementRef,
EventEmitter,
HostBinding,
Input,
Output,
Renderer2
} from '@angular/core';
import {
collapseAnimation,
expandAnimation
} from './collapse-animations';
@Directive({
selector: '[collapse]',
exportAs: 'bs-collapse',
// eslint-disable-next-line @angular-eslint/no-host-metadata-property
host: {
'[class.collapse]': 'true'
}
})
export class CollapseDirective implements AfterViewChecked {
/** This event fires as soon as content collapses */
@Output() collapsed: EventEmitter<CollapseDirective> = new EventEmitter();
/** This event fires when collapsing is started */
@Output() collapses: EventEmitter<CollapseDirective> = new EventEmitter();
/** This event fires as soon as content becomes visible */
@Output() expanded: EventEmitter<CollapseDirective> = new EventEmitter();
/** This event fires when expansion is started */
@Output() expands: EventEmitter<CollapseDirective> = new EventEmitter();
// shown
@HostBinding('class.in')
@HostBinding('class.show')
@HostBinding('attr.aria-expanded')
isExpanded = true;
collapseNewValue = true;
// hidden
@HostBinding('attr.aria-hidden') isCollapsed = false;
// stale state
@HostBinding('class.collapse') isCollapse = true;
// animation state
@HostBinding('class.collapsing') isCollapsing = false;
@Input()
set display(value: string) {
this._display = value;
if (value === 'none') {
this.hide();
return;
}
this.isAnimated ? this.toggle() : this.show();
}
/** turn on/off animation */
@Input() isAnimated = false;
/** A flag indicating visibility of content (shown or hidden) */
@Input()
set collapse(value: boolean) {
this.collapseNewValue = value;
if (!this._player || this._isAnimationDone) {
this.isExpanded = value;
this.toggle();
}
}
get collapse(): boolean {
return this.isExpanded;
}
private _display = 'block';<|fim▁hole|>
private _COLLAPSE_ACTION_NAME = 'collapse';
private _EXPAND_ACTION_NAME = 'expand';
private readonly _factoryCollapseAnimation: AnimationFactory;
private readonly _factoryExpandAnimation: AnimationFactory;
constructor(
private _el: ElementRef,
private _renderer: Renderer2,
_builder: AnimationBuilder
) {
this._factoryCollapseAnimation = _builder.build(collapseAnimation);
this._factoryExpandAnimation = _builder.build(expandAnimation);
}
ngAfterViewChecked(): void {
this._stylesLoaded = true;
if (!this._player || !this._isAnimationDone) {
return;
}
this._player.reset();
this._renderer.setStyle(this._el.nativeElement, 'height', '*');
}
/** allows to manually toggle content visibility */
toggle(): void {
if (this.isExpanded) {
this.hide();
} else {
this.show();
}
}
/** allows to manually hide content */
hide(): void {
this.isCollapsing = true;
this.isExpanded = false;
this.isCollapsed = true;
this.isCollapsing = false;
this.collapses.emit(this);
this._isAnimationDone = false;
this.animationRun(this.isAnimated, this._COLLAPSE_ACTION_NAME)(() => {
this._isAnimationDone = true;
if (this.collapseNewValue !== this.isCollapsed && this.isAnimated) {
this.show();
return;
}
this.collapsed.emit(this);
this._renderer.setStyle(this._el.nativeElement, 'display', 'none');
});
}
/** allows to manually show collapsed content */
show(): void {
this._renderer.setStyle(this._el.nativeElement, 'display', this._display);
this.isCollapsing = true;
this.isExpanded = true;
this.isCollapsed = false;
this.isCollapsing = false;
this.expands.emit(this);
this._isAnimationDone = false;
this.animationRun(this.isAnimated, this._EXPAND_ACTION_NAME)(() => {
this._isAnimationDone = true;
if (this.collapseNewValue !== this.isCollapsed && this.isAnimated) {
this.hide();
return;
}
this.expanded.emit(this);
this._renderer.removeStyle(this._el.nativeElement, 'overflow');
});
}
animationRun(isAnimated: boolean, action: string) {
if (!isAnimated || !this._stylesLoaded) {
return (callback: () => void) => callback();
}
this._renderer.setStyle(this._el.nativeElement, 'overflow', 'hidden');
this._renderer.addClass(this._el.nativeElement, 'collapse');
const factoryAnimation = (action === this._EXPAND_ACTION_NAME)
? this._factoryExpandAnimation
: this._factoryCollapseAnimation;
if (this._player) {
this._player.destroy();
}
this._player = factoryAnimation.create(this._el.nativeElement);
this._player.play();
return (callback: () => void) => this._player?.onDone(callback);
}
}<|fim▁end|>
|
private _isAnimationDone?: boolean;
private _player?: AnimationPlayer;
private _stylesLoaded = false;
|
<|file_name|>ParamVarArgsTest.java<|end_file_name|><|fim▁begin|>//
// $Id$
package org.ductilej.tests;
import org.junit.Test;
import static org.junit.Assert.*;
/**
* Tests handling of varags with parameterized variable argument. Edge case extraordinaire!
*/
public class ParamVarArgsTest
{
public static interface Predicate<T> {
boolean apply (T arg);
}
public static <T> Predicate<T> or (final Predicate<? super T>... preds) {
return new Predicate<T>() {
public boolean apply (T arg) {
for (Predicate<? super T> pred : preds) {
if (pred.apply(arg)) {
return true;<|fim▁hole|> }
return false;
}
};
}
@SuppressWarnings("unchecked") // this use of parameterized varargs is safe
@Test public void testParamVarArgs () {
Predicate<Integer> test = or(FALSE);
assertEquals(false, test.apply(1));
}
protected static final Predicate<Integer> FALSE = new Predicate<Integer>() {
public boolean apply (Integer arg) {
return false;
}
};
}<|fim▁end|>
|
}
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># __version__ = "1.0"
from . import data_processing
from . import plotly_usmap
from . import UI_setup
# from .data_processing import *
# from .plotly_usmap import *<|fim▁hole|># from .UI_setup import *<|fim▁end|>
| |
<|file_name|>clear.go<|end_file_name|><|fim▁begin|>package role
import (
"github.com/watermint/toolbox/domain/dropbox/api/dbx_auth"
"github.com/watermint/toolbox/domain/dropbox/api/dbx_conn"<|fim▁hole|> "github.com/watermint/toolbox/infra/recipe/rc_exec"
"github.com/watermint/toolbox/infra/recipe/rc_recipe"
)
type Clear struct {
Peer dbx_conn.ConnScopedTeam
Email string
}
func (z *Clear) Preset() {
z.Peer.SetScopes(
dbx_auth.ScopeMembersRead,
dbx_auth.ScopeMembersWrite,
)
}
func (z *Clear) Exec(c app_control.Control) error {
member, err := sv_member.New(z.Peer.Context()).ResolveByEmail(z.Email)
if err != nil {
return err
}
_, err = sv_adminrole.New(z.Peer.Context()).UpdateRole(mo_user.NewUserSelectorByTeamMemberId(member.TeamMemberId), []string{})
if err != nil {
return err
}
return nil
}
func (z *Clear) Test(c app_control.Control) error {
return rc_exec.ExecMock(c, &Clear{}, func(r rc_recipe.Recipe) {
m := r.(*Clear)
m.Email = "[email protected]"
})
}<|fim▁end|>
|
"github.com/watermint/toolbox/domain/dropbox/model/mo_user"
"github.com/watermint/toolbox/domain/dropbox/service/sv_adminrole"
"github.com/watermint/toolbox/domain/dropbox/service/sv_member"
"github.com/watermint/toolbox/infra/control/app_control"
|
<|file_name|>dart.js<|end_file_name|><|fim▁begin|>var which = require('which');
var spawnSync = require('child_process').spawnSync;
module.exports.detect = function(gulp) {
var DART_SDK = false;
try {
which.sync('dart');
if (process.platform === 'win32') {
DART_SDK = {
ANALYZER: 'dartanalyzer.bat',
DARTDOCGEN: 'dartdoc.bat',
DARTFMT: 'dartfmt.bat',
PUB: 'pub.bat',<|fim▁hole|> DART_SDK = {
ANALYZER: 'dartanalyzer',
DARTDOCGEN: 'dartdoc',
DARTFMT: 'dartfmt',
PUB: 'pub',
VM: 'dart'
};
}
console.log('Dart SDK detected:');
} catch (e) {
console.log('Dart SDK is not available, Dart tasks will be skipped.');
var gulpTaskFn = gulp.task.bind(gulp);
gulp.task = function (name, deps, fn) {
if (name.indexOf('.dart') === -1) {
return gulpTaskFn(name, deps, fn);
} else {
return gulpTaskFn(name, function() {
console.log('Dart SDK is not available. Skipping task: ' + name);
});
}
};
}
return DART_SDK;
}
module.exports.logVersion = function(dartSdk) {
console.log('DART SDK:') ;
console.log('- dart: ' + spawnSync(dartSdk.VM, ['--version']).stderr.toString().replace(/\n/g, ''));
console.log('- pub: ' + spawnSync(dartSdk.PUB, ['--version']).stdout.toString().replace(/\n/g, ''));
}<|fim▁end|>
|
VM: 'dart.exe'
};
} else {
|
<|file_name|>generated.rs<|end_file_name|><|fim▁begin|>// =================================================================
//
// * WARNING *
//
// This file is generated!
//
// Changes made to this file will be overwritten. If changes are
// required to the generated code, the service_crategen project
// must be updated to generate the changes.
//
// =================================================================
use std::error::Error;
use std::fmt;
use std::io;
#[allow(warnings)]
use futures::future;
use futures::Future;
use rusoto_core::region;
use rusoto_core::request::{BufferedHttpResponse, DispatchSignedRequest};
use rusoto_core::{Client, RusotoFuture};
use rusoto_core::credential::{CredentialsError, ProvideAwsCredentials};
use rusoto_core::request::HttpDispatchError;
use rusoto_core::param::{Params, ServiceParams};
use rusoto_core::signature::SignedRequest;
use serde_json;
use serde_json::from_slice;
use serde_json::Value as SerdeJsonValue;
/// <p>Placeholder documentation for AacSettings</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct AacSettings {
/// <p>Average bitrate in bits/second. Valid values depend on rate control mode and profile.</p>
#[serde(rename = "Bitrate")]
#[serde(skip_serializing_if = "Option::is_none")]
pub bitrate: Option<f64>,
/// <p>Mono, Stereo, or 5.1 channel layout. Valid values depend on rate control mode and profile. The adReceiverMix setting receives a stereo description plus control track and emits a mono AAC encode of the description track, with control data emitted in the PES header as per ETSI TS 101 154 Annex E.</p>
#[serde(rename = "CodingMode")]
#[serde(skip_serializing_if = "Option::is_none")]
pub coding_mode: Option<String>,
/// <p>Set to "broadcasterMixedAd" when input contains pre-mixed main audio + AD (narration) as a stereo pair. The Audio Type field (audioType) will be set to 3, which signals to downstream systems that this stream contains "broadcaster mixed AD". Note that the input received by the encoder must contain pre-mixed audio; the encoder does not perform the mixing. The values in audioTypeControl and audioType (in AudioDescription) are ignored when set to broadcasterMixedAd.</p>
///
/// <p>Leave set to "normal" when input does not contain pre-mixed audio + AD.</p>
#[serde(rename = "InputType")]
#[serde(skip_serializing_if = "Option::is_none")]
pub input_type: Option<String>,
/// <p>AAC Profile.</p>
#[serde(rename = "Profile")]
#[serde(skip_serializing_if = "Option::is_none")]
pub profile: Option<String>,
/// <p>Rate Control Mode.</p>
#[serde(rename = "RateControlMode")]
#[serde(skip_serializing_if = "Option::is_none")]
pub rate_control_mode: Option<String>,
/// <p>Sets LATM / LOAS AAC output for raw containers.</p>
#[serde(rename = "RawFormat")]
#[serde(skip_serializing_if = "Option::is_none")]
pub raw_format: Option<String>,
/// <p>Sample rate in Hz. Valid values depend on rate control mode and profile.</p>
#[serde(rename = "SampleRate")]
#[serde(skip_serializing_if = "Option::is_none")]
pub sample_rate: Option<f64>,
/// <p>Use MPEG-2 AAC audio instead of MPEG-4 AAC audio for raw or MPEG-2 Transport Stream containers.</p>
#[serde(rename = "Spec")]
#[serde(skip_serializing_if = "Option::is_none")]
pub spec: Option<String>,
/// <p>VBR Quality Level - Only used if rateControlMode is VBR.</p>
#[serde(rename = "VbrQuality")]
#[serde(skip_serializing_if = "Option::is_none")]
pub vbr_quality: Option<String>,
}
/// <p>Placeholder documentation for Ac3Settings</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct Ac3Settings {
/// <p>Average bitrate in bits/second. Valid bitrates depend on the coding mode.</p>
#[serde(rename = "Bitrate")]
#[serde(skip_serializing_if = "Option::is_none")]
pub bitrate: Option<f64>,
/// <p>Specifies the bitstream mode (bsmod) for the emitted AC-3 stream. See ATSC A/52-2012 for background on these values.</p>
#[serde(rename = "BitstreamMode")]
#[serde(skip_serializing_if = "Option::is_none")]
pub bitstream_mode: Option<String>,
/// <p>Dolby Digital coding mode. Determines number of channels.</p>
#[serde(rename = "CodingMode")]
#[serde(skip_serializing_if = "Option::is_none")]
pub coding_mode: Option<String>,
/// <p>Sets the dialnorm for the output. If excluded and input audio is Dolby Digital, dialnorm will be passed through.</p>
#[serde(rename = "Dialnorm")]
#[serde(skip_serializing_if = "Option::is_none")]
pub dialnorm: Option<i64>,
/// <p>If set to filmStandard, adds dynamic range compression signaling to the output bitstream as defined in the Dolby Digital specification.</p>
#[serde(rename = "DrcProfile")]
#[serde(skip_serializing_if = "Option::is_none")]
pub drc_profile: Option<String>,
/// <p>When set to enabled, applies a 120Hz lowpass filter to the LFE channel prior to encoding. Only valid in codingMode32Lfe mode.</p>
#[serde(rename = "LfeFilter")]
#[serde(skip_serializing_if = "Option::is_none")]
pub lfe_filter: Option<String>,
/// <p>When set to "followInput", encoder metadata will be sourced from the DD, DD+, or DolbyE decoder that supplied this audio data. If audio was not supplied from one of these streams, then the static metadata settings will be used.</p>
#[serde(rename = "MetadataControl")]
#[serde(skip_serializing_if = "Option::is_none")]
pub metadata_control: Option<String>,
}
/// <p>Placeholder documentation for AccessDenied</p>
#[derive(Default, Debug, Clone, PartialEq)]
pub struct AccessDenied {
pub message: Option<String>,
}
/// <p>Placeholder documentation for ArchiveContainerSettings</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct ArchiveContainerSettings {
#[serde(rename = "M2tsSettings")]
#[serde(skip_serializing_if = "Option::is_none")]
pub m_2ts_settings: Option<M2tsSettings>,
}
/// <p>Placeholder documentation for ArchiveGroupSettings</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct ArchiveGroupSettings {
/// <p>A directory and base filename where archive files should be written. If the base filename portion of the URI is left blank, the base filename of the first input will be automatically inserted.</p>
#[serde(rename = "Destination")]
pub destination: OutputLocationRef,
/// <p>Number of seconds to write to archive file before closing and starting a new one.</p>
#[serde(rename = "RolloverInterval")]
#[serde(skip_serializing_if = "Option::is_none")]
pub rollover_interval: Option<i64>,
}
/// <p>Placeholder documentation for ArchiveOutputSettings</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct ArchiveOutputSettings {
/// <p>Settings specific to the container type of the file.</p>
#[serde(rename = "ContainerSettings")]
pub container_settings: ArchiveContainerSettings,
/// <p>Output file extension. If excluded, this will be auto-selected from the container type.</p>
#[serde(rename = "Extension")]
#[serde(skip_serializing_if = "Option::is_none")]
pub extension: Option<String>,
/// <p>String concatenated to the end of the destination filename. Required for multiple outputs of the same type.</p>
#[serde(rename = "NameModifier")]
#[serde(skip_serializing_if = "Option::is_none")]
pub name_modifier: Option<String>,
}
/// <p>Placeholder documentation for AribDestinationSettings</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct AribDestinationSettings {}
/// <p>Placeholder documentation for AribSourceSettings</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct AribSourceSettings {}
/// <p>Placeholder documentation for AudioChannelMapping</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct AudioChannelMapping {
/// <p>Indices and gain values for each input channel that should be remixed into this output channel.</p>
#[serde(rename = "InputChannelLevels")]
pub input_channel_levels: Vec<InputChannelLevel>,
/// <p>The index of the output channel being produced.</p>
#[serde(rename = "OutputChannel")]
pub output_channel: i64,
}
/// <p>Placeholder documentation for AudioCodecSettings</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct AudioCodecSettings {
#[serde(rename = "AacSettings")]
#[serde(skip_serializing_if = "Option::is_none")]
pub aac_settings: Option<AacSettings>,
#[serde(rename = "Ac3Settings")]
#[serde(skip_serializing_if = "Option::is_none")]
pub ac_3_settings: Option<Ac3Settings>,
#[serde(rename = "Eac3Settings")]
#[serde(skip_serializing_if = "Option::is_none")]
pub eac_3_settings: Option<Eac3Settings>,
#[serde(rename = "Mp2Settings")]
#[serde(skip_serializing_if = "Option::is_none")]
pub mp_2_settings: Option<Mp2Settings>,
#[serde(rename = "PassThroughSettings")]
#[serde(skip_serializing_if = "Option::is_none")]
pub pass_through_settings: Option<PassThroughSettings>,
}
/// <p>Placeholder documentation for AudioDescription</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct AudioDescription {
/// <p>Advanced audio normalization settings.</p>
#[serde(rename = "AudioNormalizationSettings")]
#[serde(skip_serializing_if = "Option::is_none")]
pub audio_normalization_settings: Option<AudioNormalizationSettings>,
/// <p>The name of the AudioSelector used as the source for this AudioDescription.</p>
#[serde(rename = "AudioSelectorName")]
pub audio_selector_name: String,
/// <p>Applies only if audioTypeControl is useConfigured. The values for audioType are defined in ISO-IEC 13818-1.</p>
#[serde(rename = "AudioType")]
#[serde(skip_serializing_if = "Option::is_none")]
pub audio_type: Option<String>,
/// <p>Determines how audio type is determined.
/// followInput: If the input contains an ISO 639 audioType, then that value is passed through to the output. If the input contains no ISO 639 audioType, the value in Audio Type is included in the output.
/// useConfigured: The value in Audio Type is included in the output.
/// Note that this field and audioType are both ignored if inputType is broadcasterMixedAd.</p>
#[serde(rename = "AudioTypeControl")]
#[serde(skip_serializing_if = "Option::is_none")]
pub audio_type_control: Option<String>,
/// <p>Audio codec settings.</p>
#[serde(rename = "CodecSettings")]
#[serde(skip_serializing_if = "Option::is_none")]
pub codec_settings: Option<AudioCodecSettings>,
/// <p>Indicates the language of the audio output track. Only used if languageControlMode is useConfigured, or there is no ISO 639 language code specified in the input.</p>
#[serde(rename = "LanguageCode")]
#[serde(skip_serializing_if = "Option::is_none")]
pub language_code: Option<String>,
/// <p>Choosing followInput will cause the ISO 639 language code of the output to follow the ISO 639 language code of the input. The languageCode will be used when useConfigured is set, or when followInput is selected but there is no ISO 639 language code specified by the input.</p>
#[serde(rename = "LanguageCodeControl")]
#[serde(skip_serializing_if = "Option::is_none")]
pub language_code_control: Option<String>,
/// <p>The name of this AudioDescription. Outputs will use this name to uniquely identify this AudioDescription. Description names should be unique within this Live Event.</p>
#[serde(rename = "Name")]
pub name: String,
/// <p>Settings that control how input audio channels are remixed into the output audio channels.</p>
#[serde(rename = "RemixSettings")]
#[serde(skip_serializing_if = "Option::is_none")]
pub remix_settings: Option<RemixSettings>,
/// <p>Used for MS Smooth and Apple HLS outputs. Indicates the name displayed by the player (eg. English, or Director Commentary).</p>
#[serde(rename = "StreamName")]
#[serde(skip_serializing_if = "Option::is_none")]
pub stream_name: Option<String>,
}
/// <p>Placeholder documentation for AudioLanguageSelection</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct AudioLanguageSelection {
/// <p>Selects a specific three-letter language code from within an audio source.</p>
#[serde(rename = "LanguageCode")]
pub language_code: String,
/// <p>When set to "strict", the transport stream demux strictly identifies audio streams by their language descriptor. If a PMT update occurs such that an audio stream matching the initially selected language is no longer present then mute will be encoded until the language returns. If "loose", then on a PMT update the demux will choose another audio stream in the program with the same stream type if it can't find one with the same language.</p>
#[serde(rename = "LanguageSelectionPolicy")]
#[serde(skip_serializing_if = "Option::is_none")]
pub language_selection_policy: Option<String>,
}
/// <p>Placeholder documentation for AudioNormalizationSettings</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct AudioNormalizationSettings {
/// <p>Audio normalization algorithm to use. itu17701 conforms to the CALM Act specification, itu17702 conforms to the EBU R-128 specification.</p>
#[serde(rename = "Algorithm")]
#[serde(skip_serializing_if = "Option::is_none")]
pub algorithm: Option<String>,
/// <p>When set to correctAudio the output audio is corrected using the chosen algorithm. If set to measureOnly, the audio will be measured but not adjusted.</p>
#[serde(rename = "AlgorithmControl")]
#[serde(skip_serializing_if = "Option::is_none")]
pub algorithm_control: Option<String>,
/// <p>Target LKFS(loudness) to adjust volume to. If no value is entered, a default value will be used according to the chosen algorithm. The CALM Act (1770-1) recommends a target of -24 LKFS. The EBU R-128 specification (1770-2) recommends a target of -23 LKFS.</p>
#[serde(rename = "TargetLkfs")]
#[serde(skip_serializing_if = "Option::is_none")]
pub target_lkfs: Option<f64>,
}
/// <p>Placeholder documentation for AudioOnlyHlsSettings</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct AudioOnlyHlsSettings {
/// <p>Specifies the group to which the audio Rendition belongs.</p>
#[serde(rename = "AudioGroupId")]
#[serde(skip_serializing_if = "Option::is_none")]
pub audio_group_id: Option<String>,
/// <p>For use with an audio only Stream. Must be a .jpg or .png file. If given, this image will be used as the cover-art for the audio only output. Ideally, it should be formatted for an iPhone screen for two reasons. The iPhone does not resize the image, it crops a centered image on the top/bottom and left/right. Additionally, this image file gets saved bit-for-bit into every 10-second segment file, so will increase bandwidth by {image file size} * {segment count} * {user count.}.</p>
#[serde(rename = "AudioOnlyImage")]
#[serde(skip_serializing_if = "Option::is_none")]
pub audio_only_image: Option<InputLocation>,
/// <p>Four types of audio-only tracks are supported:</p>
///
/// <p>Audio-Only Variant Stream
/// The client can play back this audio-only stream instead of video in low-bandwidth scenarios. Represented as an EXT-X-STREAM-INF in the HLS manifest.</p>
///
/// <p>Alternate Audio, Auto Select, Default
/// Alternate rendition that the client should try to play back by default. Represented as an EXT-X-MEDIA in the HLS manifest with DEFAULT=YES, AUTOSELECT=YES</p>
///
/// <p>Alternate Audio, Auto Select, Not Default
/// Alternate rendition that the client may try to play back by default. Represented as an EXT-X-MEDIA in the HLS manifest with DEFAULT=NO, AUTOSELECT=YES</p>
///
/// <p>Alternate Audio, not Auto Select
/// Alternate rendition that the client will not try to play back by default. Represented as an EXT-X-MEDIA in the HLS manifest with DEFAULT=NO, AUTOSELECT=NO</p>
#[serde(rename = "AudioTrackType")]
#[serde(skip_serializing_if = "Option::is_none")]
pub audio_track_type: Option<String>,
}
/// <p>Placeholder documentation for AudioPidSelection</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct AudioPidSelection {
/// <p>Selects a specific PID from within a source.</p>
#[serde(rename = "Pid")]
pub pid: i64,
}
/// <p>Placeholder documentation for AudioSelector</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct AudioSelector {
/// <p>The name of this AudioSelector. AudioDescriptions will use this name to uniquely identify this Selector. Selector names should be unique per input.</p>
#[serde(rename = "Name")]
pub name: String,
/// <p>The audio selector settings.</p>
#[serde(rename = "SelectorSettings")]
#[serde(skip_serializing_if = "Option::is_none")]
pub selector_settings: Option<AudioSelectorSettings>,
}
/// <p>Placeholder documentation for AudioSelectorSettings</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct AudioSelectorSettings {
#[serde(rename = "AudioLanguageSelection")]
#[serde(skip_serializing_if = "Option::is_none")]
pub audio_language_selection: Option<AudioLanguageSelection>,
#[serde(rename = "AudioPidSelection")]
#[serde(skip_serializing_if = "Option::is_none")]
pub audio_pid_selection: Option<AudioPidSelection>,
}
/// <p>Placeholder documentation for AvailBlanking</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct AvailBlanking {
/// <p>Blanking image to be used. Leave empty for solid black. Only bmp and png images are supported.</p>
#[serde(rename = "AvailBlankingImage")]
#[serde(skip_serializing_if = "Option::is_none")]
pub avail_blanking_image: Option<InputLocation>,
/// <p>When set to enabled, causes video, audio and captions to be blanked when insertion metadata is added.</p>
#[serde(rename = "State")]
#[serde(skip_serializing_if = "Option::is_none")]
pub state: Option<String>,
}
/// <p>Placeholder documentation for AvailConfiguration</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct AvailConfiguration {
/// <p>Ad avail settings.</p>
#[serde(rename = "AvailSettings")]
#[serde(skip_serializing_if = "Option::is_none")]
pub avail_settings: Option<AvailSettings>,
}
/// <p>Placeholder documentation for AvailSettings</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct AvailSettings {
#[serde(rename = "Scte35SpliceInsert")]
#[serde(skip_serializing_if = "Option::is_none")]
pub scte_35_splice_insert: Option<Scte35SpliceInsert>,
#[serde(rename = "Scte35TimeSignalApos")]
#[serde(skip_serializing_if = "Option::is_none")]
pub scte_35_time_signal_apos: Option<Scte35TimeSignalApos>,
}
/// <p>Placeholder documentation for BlackoutSlate</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct BlackoutSlate {
/// <p>Blackout slate image to be used. Leave empty for solid black. Only bmp and png images are supported.</p>
#[serde(rename = "BlackoutSlateImage")]
#[serde(skip_serializing_if = "Option::is_none")]
pub blackout_slate_image: Option<InputLocation>,
/// <p>Setting to enabled causes the encoder to blackout the video, audio, and captions, and raise the "Network Blackout Image" slate when an SCTE104/35 Network End Segmentation Descriptor is encountered. The blackout will be lifted when the Network Start Segmentation Descriptor is encountered. The Network End and Network Start descriptors must contain a network ID that matches the value entered in "Network ID".</p>
#[serde(rename = "NetworkEndBlackout")]
#[serde(skip_serializing_if = "Option::is_none")]
pub network_end_blackout: Option<String>,
/// <p>Path to local file to use as Network End Blackout image. Image will be scaled to fill the entire output raster.</p>
#[serde(rename = "NetworkEndBlackoutImage")]
#[serde(skip_serializing_if = "Option::is_none")]
pub network_end_blackout_image: Option<InputLocation>,
/// <p>Provides Network ID that matches EIDR ID format (e.g., "10.XXXX/XXXX-XXXX-XXXX-XXXX-XXXX-C").</p>
#[serde(rename = "NetworkId")]
#[serde(skip_serializing_if = "Option::is_none")]
pub network_id: Option<String>,
/// <p>When set to enabled, causes video, audio and captions to be blanked when indicated by program metadata.</p>
#[serde(rename = "State")]
#[serde(skip_serializing_if = "Option::is_none")]
pub state: Option<String>,
}
/// <p>Placeholder documentation for BurnInDestinationSettings</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct BurnInDestinationSettings {
/// <p>If no explicit xPosition or yPosition is provided, setting alignment to centered will place the captions at the bottom center of the output. Similarly, setting a left alignment will align captions to the bottom left of the output. If x and y positions are given in conjunction with the alignment parameter, the font will be justified (either left or centered) relative to those coordinates. Selecting "smart" justification will left-justify live subtitles and center-justify pre-recorded subtitles. All burn-in and DVB-Sub font settings must match.</p>
#[serde(rename = "Alignment")]
#[serde(skip_serializing_if = "Option::is_none")]
pub alignment: Option<String>,
/// <p>Specifies the color of the rectangle behind the captions. All burn-in and DVB-Sub font settings must match.</p>
#[serde(rename = "BackgroundColor")]
#[serde(skip_serializing_if = "Option::is_none")]
pub background_color: Option<String>,
/// <p>Specifies the opacity of the background rectangle. 255 is opaque; 0 is transparent. Leaving this parameter out is equivalent to setting it to 0 (transparent). All burn-in and DVB-Sub font settings must match.</p>
#[serde(rename = "BackgroundOpacity")]
#[serde(skip_serializing_if = "Option::is_none")]
pub background_opacity: Option<i64>,
/// <p>External font file used for caption burn-in. File extension must be 'ttf' or 'tte'. Although the user can select output fonts for many different types of input captions, embedded, STL and teletext sources use a strict grid system. Using external fonts with these caption sources could cause unexpected display of proportional fonts. All burn-in and DVB-Sub font settings must match.</p>
#[serde(rename = "Font")]
#[serde(skip_serializing_if = "Option::is_none")]
pub font: Option<InputLocation>,
/// <p>Specifies the color of the burned-in captions. This option is not valid for source captions that are STL, 608/embedded or teletext. These source settings are already pre-defined by the caption stream. All burn-in and DVB-Sub font settings must match.</p>
#[serde(rename = "FontColor")]
#[serde(skip_serializing_if = "Option::is_none")]
pub font_color: Option<String>,
/// <p>Specifies the opacity of the burned-in captions. 255 is opaque; 0 is transparent. All burn-in and DVB-Sub font settings must match.</p>
#[serde(rename = "FontOpacity")]
#[serde(skip_serializing_if = "Option::is_none")]
pub font_opacity: Option<i64>,
/// <p>Font resolution in DPI (dots per inch); default is 96 dpi. All burn-in and DVB-Sub font settings must match.</p>
#[serde(rename = "FontResolution")]
#[serde(skip_serializing_if = "Option::is_none")]
pub font_resolution: Option<i64>,
/// <p>When set to 'auto' fontSize will scale depending on the size of the output. Giving a positive integer will specify the exact font size in points. All burn-in and DVB-Sub font settings must match.</p>
#[serde(rename = "FontSize")]
#[serde(skip_serializing_if = "Option::is_none")]
pub font_size: Option<String>,
/// <p>Specifies font outline color. This option is not valid for source captions that are either 608/embedded or teletext. These source settings are already pre-defined by the caption stream. All burn-in and DVB-Sub font settings must match.</p>
#[serde(rename = "OutlineColor")]
#[serde(skip_serializing_if = "Option::is_none")]
pub outline_color: Option<String>,
/// <p>Specifies font outline size in pixels. This option is not valid for source captions that are either 608/embedded or teletext. These source settings are already pre-defined by the caption stream. All burn-in and DVB-Sub font settings must match.</p>
#[serde(rename = "OutlineSize")]
#[serde(skip_serializing_if = "Option::is_none")]
pub outline_size: Option<i64>,
/// <p>Specifies the color of the shadow cast by the captions. All burn-in and DVB-Sub font settings must match.</p>
#[serde(rename = "ShadowColor")]
#[serde(skip_serializing_if = "Option::is_none")]
pub shadow_color: Option<String>,
/// <p>Specifies the opacity of the shadow. 255 is opaque; 0 is transparent. Leaving this parameter out is equivalent to setting it to 0 (transparent). All burn-in and DVB-Sub font settings must match.</p>
#[serde(rename = "ShadowOpacity")]
#[serde(skip_serializing_if = "Option::is_none")]
pub shadow_opacity: Option<i64>,
/// <p>Specifies the horizontal offset of the shadow relative to the captions in pixels. A value of -2 would result in a shadow offset 2 pixels to the left. All burn-in and DVB-Sub font settings must match.</p>
#[serde(rename = "ShadowXOffset")]
#[serde(skip_serializing_if = "Option::is_none")]
pub shadow_x_offset: Option<i64>,
/// <p>Specifies the vertical offset of the shadow relative to the captions in pixels. A value of -2 would result in a shadow offset 2 pixels above the text. All burn-in and DVB-Sub font settings must match.</p>
#[serde(rename = "ShadowYOffset")]
#[serde(skip_serializing_if = "Option::is_none")]
pub shadow_y_offset: Option<i64>,
/// <p>Controls whether a fixed grid size will be used to generate the output subtitles bitmap. Only applicable for Teletext inputs and DVB-Sub/Burn-in outputs.</p>
#[serde(rename = "TeletextGridControl")]
#[serde(skip_serializing_if = "Option::is_none")]
pub teletext_grid_control: Option<String>,
/// <p>Specifies the horizontal position of the caption relative to the left side of the output in pixels. A value of 10 would result in the captions starting 10 pixels from the left of the output. If no explicit xPosition is provided, the horizontal caption position will be determined by the alignment parameter. All burn-in and DVB-Sub font settings must match.</p>
#[serde(rename = "XPosition")]
#[serde(skip_serializing_if = "Option::is_none")]
pub x_position: Option<i64>,
/// <p>Specifies the vertical position of the caption relative to the top of the output in pixels. A value of 10 would result in the captions starting 10 pixels from the top of the output. If no explicit yPosition is provided, the caption will be positioned towards the bottom of the output. All burn-in and DVB-Sub font settings must match.</p>
#[serde(rename = "YPosition")]
#[serde(skip_serializing_if = "Option::is_none")]
pub y_position: Option<i64>,
}
/// <p>Output groups for this Live Event. Output groups contain information about where streams should be distributed.</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct CaptionDescription {
/// <p>Specifies which input caption selector to use as a caption source when generating output captions. This field should match a captionSelector name.</p>
#[serde(rename = "CaptionSelectorName")]
pub caption_selector_name: String,
/// <p>Additional settings for captions destination that depend on the destination type.</p>
#[serde(rename = "DestinationSettings")]
#[serde(skip_serializing_if = "Option::is_none")]
pub destination_settings: Option<CaptionDestinationSettings>,
/// <p>ISO 639-2 three-digit code: http://www.loc.gov/standards/iso639-2/</p>
#[serde(rename = "LanguageCode")]
#[serde(skip_serializing_if = "Option::is_none")]
pub language_code: Option<String>,
/// <p>Human readable information to indicate captions available for players (eg. English, or Spanish).</p>
#[serde(rename = "LanguageDescription")]
#[serde(skip_serializing_if = "Option::is_none")]
pub language_description: Option<String>,
/// <p>Name of the caption description. Used to associate a caption description with an output. Names must be unique within an event.</p>
#[serde(rename = "Name")]
pub name: String,
}
/// <p>Placeholder documentation for CaptionDestinationSettings</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct CaptionDestinationSettings {
#[serde(rename = "AribDestinationSettings")]
#[serde(skip_serializing_if = "Option::is_none")]
pub arib_destination_settings: Option<AribDestinationSettings>,
#[serde(rename = "BurnInDestinationSettings")]
#[serde(skip_serializing_if = "Option::is_none")]
pub burn_in_destination_settings: Option<BurnInDestinationSettings>,
#[serde(rename = "DvbSubDestinationSettings")]
#[serde(skip_serializing_if = "Option::is_none")]
pub dvb_sub_destination_settings: Option<DvbSubDestinationSettings>,
#[serde(rename = "EmbeddedDestinationSettings")]
#[serde(skip_serializing_if = "Option::is_none")]
pub embedded_destination_settings: Option<EmbeddedDestinationSettings>,
#[serde(rename = "EmbeddedPlusScte20DestinationSettings")]
#[serde(skip_serializing_if = "Option::is_none")]
pub embedded_plus_scte_20_destination_settings: Option<EmbeddedPlusScte20DestinationSettings>,
#[serde(rename = "RtmpCaptionInfoDestinationSettings")]
#[serde(skip_serializing_if = "Option::is_none")]
pub rtmp_caption_info_destination_settings: Option<RtmpCaptionInfoDestinationSettings>,
#[serde(rename = "Scte20PlusEmbeddedDestinationSettings")]
#[serde(skip_serializing_if = "Option::is_none")]
pub scte_20_plus_embedded_destination_settings: Option<Scte20PlusEmbeddedDestinationSettings>,
#[serde(rename = "Scte27DestinationSettings")]
#[serde(skip_serializing_if = "Option::is_none")]
pub scte_27_destination_settings: Option<Scte27DestinationSettings>,
#[serde(rename = "SmpteTtDestinationSettings")]
#[serde(skip_serializing_if = "Option::is_none")]
pub smpte_tt_destination_settings: Option<SmpteTtDestinationSettings>,
#[serde(rename = "TeletextDestinationSettings")]
#[serde(skip_serializing_if = "Option::is_none")]
pub teletext_destination_settings: Option<TeletextDestinationSettings>,
#[serde(rename = "TtmlDestinationSettings")]
#[serde(skip_serializing_if = "Option::is_none")]
pub ttml_destination_settings: Option<TtmlDestinationSettings>,
#[serde(rename = "WebvttDestinationSettings")]
#[serde(skip_serializing_if = "Option::is_none")]
pub webvtt_destination_settings: Option<WebvttDestinationSettings>,
}
/// <p>Maps a caption channel to an ISO 693-2 language code (http://www.loc.gov/standards/iso639-2), with an optional description.</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct CaptionLanguageMapping {
/// <p>The closed caption channel being described by this CaptionLanguageMapping. Each channel mapping must have a unique channel number (maximum of 4)</p>
#[serde(rename = "CaptionChannel")]
pub caption_channel: i64,
/// <p>Three character ISO 639-2 language code (see http://www.loc.gov/standards/iso639-2)</p>
#[serde(rename = "LanguageCode")]
pub language_code: String,
/// <p>Textual description of language</p>
#[serde(rename = "LanguageDescription")]
pub language_description: String,
}
/// <p>Output groups for this Live Event. Output groups contain information about where streams should be distributed.</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct CaptionSelector {
/// <p>When specified this field indicates the three letter language code of the caption track to extract from the source.</p>
#[serde(rename = "LanguageCode")]
#[serde(skip_serializing_if = "Option::is_none")]
pub language_code: Option<String>,
/// <p>Name identifier for a caption selector. This name is used to associate this caption selector with one or more caption descriptions. Names must be unique within an event.</p>
#[serde(rename = "Name")]
pub name: String,
/// <p>Caption selector settings.</p>
#[serde(rename = "SelectorSettings")]
#[serde(skip_serializing_if = "Option::is_none")]
pub selector_settings: Option<CaptionSelectorSettings>,
}
/// <p>Placeholder documentation for CaptionSelectorSettings</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct CaptionSelectorSettings {
#[serde(rename = "AribSourceSettings")]
#[serde(skip_serializing_if = "Option::is_none")]
pub arib_source_settings: Option<AribSourceSettings>,
#[serde(rename = "DvbSubSourceSettings")]
#[serde(skip_serializing_if = "Option::is_none")]
pub dvb_sub_source_settings: Option<DvbSubSourceSettings>,
#[serde(rename = "EmbeddedSourceSettings")]
#[serde(skip_serializing_if = "Option::is_none")]
pub embedded_source_settings: Option<EmbeddedSourceSettings>,
#[serde(rename = "Scte20SourceSettings")]
#[serde(skip_serializing_if = "Option::is_none")]
pub scte_20_source_settings: Option<Scte20SourceSettings>,
#[serde(rename = "Scte27SourceSettings")]
#[serde(skip_serializing_if = "Option::is_none")]
pub scte_27_source_settings: Option<Scte27SourceSettings>,
#[serde(rename = "TeletextSourceSettings")]
#[serde(skip_serializing_if = "Option::is_none")]
pub teletext_source_settings: Option<TeletextSourceSettings>,
}
/// <p>Placeholder documentation for Channel</p>
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct Channel {
/// <p>The unique arn of the channel.</p>
#[serde(rename = "Arn")]
#[serde(skip_serializing_if = "Option::is_none")]
pub arn: Option<String>,
/// <p>A list of destinations of the channel. For UDP outputs, there is one
/// destination per output. For other types (HLS, for example), there is
/// one destination per packager.</p>
#[serde(rename = "Destinations")]
#[serde(skip_serializing_if = "Option::is_none")]
pub destinations: Option<Vec<OutputDestination>>,
/// <p>The endpoints where outgoing connections initiate from</p>
#[serde(rename = "EgressEndpoints")]
#[serde(skip_serializing_if = "Option::is_none")]
pub egress_endpoints: Option<Vec<ChannelEgressEndpoint>>,
#[serde(rename = "EncoderSettings")]
#[serde(skip_serializing_if = "Option::is_none")]
pub encoder_settings: Option<EncoderSettings>,
/// <p>The unique id of the channel.</p>
#[serde(rename = "Id")]
#[serde(skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
/// <p>List of input attachments for channel.</p>
#[serde(rename = "InputAttachments")]
#[serde(skip_serializing_if = "Option::is_none")]
pub input_attachments: Option<Vec<InputAttachment>>,
#[serde(rename = "InputSpecification")]
#[serde(skip_serializing_if = "Option::is_none")]
pub input_specification: Option<InputSpecification>,
/// <p>The log level being written to CloudWatch Logs.</p>
#[serde(rename = "LogLevel")]
#[serde(skip_serializing_if = "Option::is_none")]
pub log_level: Option<String>,
/// <p>The name of the channel. (user-mutable)</p>
#[serde(rename = "Name")]
#[serde(skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
/// <p>The number of currently healthy pipelines.</p>
#[serde(rename = "PipelinesRunningCount")]
#[serde(skip_serializing_if = "Option::is_none")]
pub pipelines_running_count: Option<i64>,
/// <p>The Amazon Resource Name (ARN) of the role assumed when running the Channel.</p>
#[serde(rename = "RoleArn")]
#[serde(skip_serializing_if = "Option::is_none")]
pub role_arn: Option<String>,
#[serde(rename = "State")]
#[serde(skip_serializing_if = "Option::is_none")]
pub state: Option<String>,
}
/// <p>Placeholder documentation for ChannelConfigurationValidationError</p>
#[derive(Default, Debug, Clone, PartialEq)]
pub struct ChannelConfigurationValidationError {
pub message: Option<String>,
/// <p>A collection of validation error responses from attempting to create a channel with a bouquet of settings.</p>
pub validation_errors: Option<Vec<ValidationError>>,
}
/// <p>Placeholder documentation for ChannelEgressEndpoint</p>
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct ChannelEgressEndpoint {
/// <p>Public IP of where a channel's output comes from</p>
#[serde(rename = "SourceIp")]
#[serde(skip_serializing_if = "Option::is_none")]
pub source_ip: Option<String>,
}
/// <p>Placeholder documentation for ChannelSummary</p>
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct ChannelSummary {
/// <p>The unique arn of the channel.</p>
#[serde(rename = "Arn")]
#[serde(skip_serializing_if = "Option::is_none")]
pub arn: Option<String>,
/// <p>A list of destinations of the channel. For UDP outputs, there is one
/// destination per output. For other types (HLS, for example), there is
/// one destination per packager.</p>
#[serde(rename = "Destinations")]
#[serde(skip_serializing_if = "Option::is_none")]
pub destinations: Option<Vec<OutputDestination>>,
/// <p>The endpoints where outgoing connections initiate from</p>
#[serde(rename = "EgressEndpoints")]
#[serde(skip_serializing_if = "Option::is_none")]
pub egress_endpoints: Option<Vec<ChannelEgressEndpoint>>,
/// <p>The unique id of the channel.</p>
#[serde(rename = "Id")]
#[serde(skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
/// <p>List of input attachments for channel.</p>
#[serde(rename = "InputAttachments")]
#[serde(skip_serializing_if = "Option::is_none")]
pub input_attachments: Option<Vec<InputAttachment>>,
#[serde(rename = "InputSpecification")]
#[serde(skip_serializing_if = "Option::is_none")]
pub input_specification: Option<InputSpecification>,
/// <p>The log level being written to CloudWatch Logs.</p>
#[serde(rename = "LogLevel")]
#[serde(skip_serializing_if = "Option::is_none")]
pub log_level: Option<String>,
/// <p>The name of the channel. (user-mutable)</p>
#[serde(rename = "Name")]
#[serde(skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
/// <p>The number of currently healthy pipelines.</p>
#[serde(rename = "PipelinesRunningCount")]
#[serde(skip_serializing_if = "Option::is_none")]
pub pipelines_running_count: Option<i64>,
/// <p>The Amazon Resource Name (ARN) of the role assumed when running the Channel.</p>
#[serde(rename = "RoleArn")]
#[serde(skip_serializing_if = "Option::is_none")]
pub role_arn: Option<String>,
#[serde(rename = "State")]
#[serde(skip_serializing_if = "Option::is_none")]
pub state: Option<String>,
}
/// <p>Placeholder documentation for CreateChannel</p>
#[derive(Default, Debug, Clone, PartialEq)]
pub struct CreateChannel {
pub destinations: Option<Vec<OutputDestination>>,
pub encoder_settings: Option<EncoderSettings>,
/// <p>List of input attachments for channel.</p>
pub input_attachments: Option<Vec<InputAttachment>>,
/// <p>Specification of input for this channel (max. bitrate, resolution, codec, etc.)</p>
pub input_specification: Option<InputSpecification>,
/// <p>The log level to write to CloudWatch Logs.</p>
pub log_level: Option<String>,
/// <p>Name of channel.</p>
pub name: Option<String>,
/// <p>Unique request ID to be specified. This is needed to prevent retries from
/// creating multiple resources.</p>
pub request_id: Option<String>,
/// <p>An optional Amazon Resource Name (ARN) of the role to assume when running the Channel.</p>
pub role_arn: Option<String>,
}
/// <p>A request to create a channel</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct CreateChannelRequest {
#[serde(rename = "Destinations")]
#[serde(skip_serializing_if = "Option::is_none")]
pub destinations: Option<Vec<OutputDestination>>,
#[serde(rename = "EncoderSettings")]
#[serde(skip_serializing_if = "Option::is_none")]
pub encoder_settings: Option<EncoderSettings>,
/// <p>List of input attachments for channel.</p>
#[serde(rename = "InputAttachments")]
#[serde(skip_serializing_if = "Option::is_none")]
pub input_attachments: Option<Vec<InputAttachment>>,
/// <p>Specification of input for this channel (max. bitrate, resolution, codec, etc.)</p>
#[serde(rename = "InputSpecification")]
#[serde(skip_serializing_if = "Option::is_none")]
pub input_specification: Option<InputSpecification>,
/// <p>The log level to write to CloudWatch Logs.</p>
#[serde(rename = "LogLevel")]
#[serde(skip_serializing_if = "Option::is_none")]
pub log_level: Option<String>,
/// <p>Name of channel.</p>
#[serde(rename = "Name")]
#[serde(skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
/// <p>Unique request ID to be specified. This is needed to prevent retries from
/// creating multiple resources.</p>
#[serde(rename = "RequestId")]
#[serde(skip_serializing_if = "Option::is_none")]
pub request_id: Option<String>,
/// <p>An optional Amazon Resource Name (ARN) of the role to assume when running the Channel.</p>
#[serde(rename = "RoleArn")]
#[serde(skip_serializing_if = "Option::is_none")]
pub role_arn: Option<String>,
}
/// <p>Placeholder documentation for CreateChannelResponse</p>
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct CreateChannelResponse {
#[serde(rename = "Channel")]
#[serde(skip_serializing_if = "Option::is_none")]
pub channel: Option<Channel>,
}
/// <p>Placeholder documentation for CreateChannelResultModel</p>
#[derive(Default, Debug, Clone, PartialEq)]
pub struct CreateChannelResultModel {
pub channel: Option<Channel>,
}
/// <p>Placeholder documentation for CreateInput</p>
#[derive(Default, Debug, Clone, PartialEq)]
pub struct CreateInput {
/// <p>Destination settings for PUSH type inputs.</p>
pub destinations: Option<Vec<InputDestinationRequest>>,
/// <p>A list of security groups referenced by IDs to attach to the input.</p>
pub input_security_groups: Option<Vec<String>>,
/// <p>Name of the input.</p>
pub name: Option<String>,
/// <p>Unique identifier of the request to ensure the request is handled
/// exactly once in case of retries.</p>
pub request_id: Option<String>,
/// <p>The source URLs for a PULL-type input. Every PULL type input needs
/// exactly two source URLs for redundancy.
/// Only specify sources for PULL type Inputs. Leave Destinations empty.</p>
pub sources: Option<Vec<InputSourceRequest>>,
pub type_: Option<String>,
}
/// <p>The name of the input</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct CreateInputRequest {
/// <p>Destination settings for PUSH type inputs.</p>
#[serde(rename = "Destinations")]
#[serde(skip_serializing_if = "Option::is_none")]
pub destinations: Option<Vec<InputDestinationRequest>>,
/// <p>A list of security groups referenced by IDs to attach to the input.</p>
#[serde(rename = "InputSecurityGroups")]
#[serde(skip_serializing_if = "Option::is_none")]
pub input_security_groups: Option<Vec<String>>,
/// <p>Name of the input.</p>
#[serde(rename = "Name")]
#[serde(skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
/// <p>Unique identifier of the request to ensure the request is handled
/// exactly once in case of retries.</p>
#[serde(rename = "RequestId")]
#[serde(skip_serializing_if = "Option::is_none")]
pub request_id: Option<String>,
/// <p>The source URLs for a PULL-type input. Every PULL type input needs
/// exactly two source URLs for redundancy.
/// Only specify sources for PULL type Inputs. Leave Destinations empty.</p>
#[serde(rename = "Sources")]
#[serde(skip_serializing_if = "Option::is_none")]
pub sources: Option<Vec<InputSourceRequest>>,
#[serde(rename = "Type")]
#[serde(skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
}
/// <p>Placeholder documentation for CreateInputResponse</p>
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct CreateInputResponse {
#[serde(rename = "Input")]
#[serde(skip_serializing_if = "Option::is_none")]
pub input: Option<Input>,
}
/// <p>Placeholder documentation for CreateInputResultModel</p>
#[derive(Default, Debug, Clone, PartialEq)]
pub struct CreateInputResultModel {
pub input: Option<Input>,
}
/// <p>The IPv4 CIDRs to whitelist for this Input Security Group</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct CreateInputSecurityGroupRequest {
/// <p>List of IPv4 CIDR addresses to whitelist</p>
#[serde(rename = "WhitelistRules")]
#[serde(skip_serializing_if = "Option::is_none")]
pub whitelist_rules: Option<Vec<InputWhitelistRuleCidr>>,
}
/// <p>Placeholder documentation for CreateInputSecurityGroupResponse</p>
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct CreateInputSecurityGroupResponse {
#[serde(rename = "SecurityGroup")]
#[serde(skip_serializing_if = "Option::is_none")]
pub security_group: Option<InputSecurityGroup>,
}
/// <p>Placeholder documentation for CreateInputSecurityGroupResultModel</p>
#[derive(Default, Debug, Clone, PartialEq)]
pub struct CreateInputSecurityGroupResultModel {
pub security_group: Option<InputSecurityGroup>,
}
/// <p>Placeholder documentation for DeleteChannelRequest</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct DeleteChannelRequest {
/// <p>Unique ID of the channel.</p>
#[serde(rename = "ChannelId")]
pub channel_id: String,
}
/// <p>Placeholder documentation for DeleteChannelResponse</p>
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct DeleteChannelResponse {
/// <p>The unique arn of the channel.</p>
#[serde(rename = "Arn")]
#[serde(skip_serializing_if = "Option::is_none")]
pub arn: Option<String>,
/// <p>A list of destinations of the channel. For UDP outputs, there is one
/// destination per output. For other types (HLS, for example), there is
/// one destination per packager.</p>
#[serde(rename = "Destinations")]
#[serde(skip_serializing_if = "Option::is_none")]
pub destinations: Option<Vec<OutputDestination>>,
/// <p>The endpoints where outgoing connections initiate from</p>
#[serde(rename = "EgressEndpoints")]
#[serde(skip_serializing_if = "Option::is_none")]
pub egress_endpoints: Option<Vec<ChannelEgressEndpoint>>,
#[serde(rename = "EncoderSettings")]
#[serde(skip_serializing_if = "Option::is_none")]
pub encoder_settings: Option<EncoderSettings>,
/// <p>The unique id of the channel.</p>
#[serde(rename = "Id")]
#[serde(skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
/// <p>List of input attachments for channel.</p>
#[serde(rename = "InputAttachments")]
#[serde(skip_serializing_if = "Option::is_none")]
pub input_attachments: Option<Vec<InputAttachment>>,
#[serde(rename = "InputSpecification")]
#[serde(skip_serializing_if = "Option::is_none")]
pub input_specification: Option<InputSpecification>,
/// <p>The log level being written to CloudWatch Logs.</p>
#[serde(rename = "LogLevel")]
#[serde(skip_serializing_if = "Option::is_none")]
pub log_level: Option<String>,
/// <p>The name of the channel. (user-mutable)</p>
#[serde(rename = "Name")]
#[serde(skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
/// <p>The number of currently healthy pipelines.</p>
#[serde(rename = "PipelinesRunningCount")]
#[serde(skip_serializing_if = "Option::is_none")]
pub pipelines_running_count: Option<i64>,
/// <p>The Amazon Resource Name (ARN) of the role assumed when running the Channel.</p>
#[serde(rename = "RoleArn")]
#[serde(skip_serializing_if = "Option::is_none")]
pub role_arn: Option<String>,
#[serde(rename = "State")]
#[serde(skip_serializing_if = "Option::is_none")]
pub state: Option<String>,
}
/// <p>Placeholder documentation for DeleteInputRequest</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct DeleteInputRequest {
/// <p>Unique ID of the input</p>
#[serde(rename = "InputId")]
pub input_id: String,
}
/// <p>Placeholder documentation for DeleteInputResponse</p>
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct DeleteInputResponse {}
/// <p>Placeholder documentation for DeleteInputSecurityGroupRequest</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct DeleteInputSecurityGroupRequest {
/// <p>The Input Security Group to delete</p>
#[serde(rename = "InputSecurityGroupId")]
pub input_security_group_id: String,
}
/// <p>Placeholder documentation for DeleteInputSecurityGroupResponse</p>
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct DeleteInputSecurityGroupResponse {}
/// <p>Placeholder documentation for DeleteReservationRequest</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct DeleteReservationRequest {
/// <p>Unique reservation ID, e.g. '1234567'</p>
#[serde(rename = "ReservationId")]
pub reservation_id: String,
}
/// <p>Placeholder documentation for DeleteReservationResponse</p>
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct DeleteReservationResponse {
/// <p>Unique reservation ARN, e.g. 'arn:aws:medialive:us-west-2:123456789012:reservation:1234567'</p>
#[serde(rename = "Arn")]
#[serde(skip_serializing_if = "Option::is_none")]
pub arn: Option<String>,
/// <p>Number of reserved resources</p>
#[serde(rename = "Count")]
#[serde(skip_serializing_if = "Option::is_none")]
pub count: Option<i64>,
/// <p>Currency code for usagePrice and fixedPrice in ISO-4217 format, e.g. 'USD'</p>
#[serde(rename = "CurrencyCode")]
#[serde(skip_serializing_if = "Option::is_none")]
pub currency_code: Option<String>,
/// <p>Lease duration, e.g. '12'</p>
#[serde(rename = "Duration")]
#[serde(skip_serializing_if = "Option::is_none")]
pub duration: Option<i64>,
/// <p>Units for duration, e.g. 'MONTHS'</p>
#[serde(rename = "DurationUnits")]
#[serde(skip_serializing_if = "Option::is_none")]
pub duration_units: Option<String>,
/// <p>Reservation UTC end date and time in ISO-8601 format, e.g. '2019-03-01T00:00:00'</p>
#[serde(rename = "End")]
#[serde(skip_serializing_if = "Option::is_none")]
pub end: Option<String>,
/// <p>One-time charge for each reserved resource, e.g. '0.0' for a NO_UPFRONT offering</p>
#[serde(rename = "FixedPrice")]
#[serde(skip_serializing_if = "Option::is_none")]
pub fixed_price: Option<f64>,
/// <p>User specified reservation name</p>
#[serde(rename = "Name")]
#[serde(skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
/// <p>Offering description, e.g. 'HD AVC output at 10-20 Mbps, 30 fps, and standard VQ in US West (Oregon)'</p>
#[serde(rename = "OfferingDescription")]
#[serde(skip_serializing_if = "Option::is_none")]
pub offering_description: Option<String>,
/// <p>Unique offering ID, e.g. '87654321'</p>
#[serde(rename = "OfferingId")]
#[serde(skip_serializing_if = "Option::is_none")]
pub offering_id: Option<String>,
/// <p>Offering type, e.g. 'NO_UPFRONT'</p>
#[serde(rename = "OfferingType")]
#[serde(skip_serializing_if = "Option::is_none")]
pub offering_type: Option<String>,
/// <p>AWS region, e.g. 'us-west-2'</p>
#[serde(rename = "Region")]
#[serde(skip_serializing_if = "Option::is_none")]
pub region: Option<String>,
/// <p>Unique reservation ID, e.g. '1234567'</p>
#[serde(rename = "ReservationId")]
#[serde(skip_serializing_if = "Option::is_none")]
pub reservation_id: Option<String>,
/// <p>Resource configuration details</p>
#[serde(rename = "ResourceSpecification")]
#[serde(skip_serializing_if = "Option::is_none")]
pub resource_specification: Option<ReservationResourceSpecification>,
/// <p>Reservation UTC start date and time in ISO-8601 format, e.g. '2018-03-01T00:00:00'</p>
#[serde(rename = "Start")]
#[serde(skip_serializing_if = "Option::is_none")]
pub start: Option<String>,
/// <p>Current state of reservation, e.g. 'ACTIVE'</p>
#[serde(rename = "State")]
#[serde(skip_serializing_if = "Option::is_none")]
pub state: Option<String>,
/// <p>Recurring usage charge for each reserved resource, e.g. '157.0'</p>
#[serde(rename = "UsagePrice")]
#[serde(skip_serializing_if = "Option::is_none")]
pub usage_price: Option<f64>,
}
/// <p>Placeholder documentation for DescribeChannelRequest</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct DescribeChannelRequest {
/// <p>channel ID</p>
#[serde(rename = "ChannelId")]
pub channel_id: String,
}
/// <p>Placeholder documentation for DescribeChannelResponse</p>
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct DescribeChannelResponse {
/// <p>The unique arn of the channel.</p>
#[serde(rename = "Arn")]
#[serde(skip_serializing_if = "Option::is_none")]
pub arn: Option<String>,
/// <p>A list of destinations of the channel. For UDP outputs, there is one
/// destination per output. For other types (HLS, for example), there is
/// one destination per packager.</p>
#[serde(rename = "Destinations")]
#[serde(skip_serializing_if = "Option::is_none")]
pub destinations: Option<Vec<OutputDestination>>,
/// <p>The endpoints where outgoing connections initiate from</p>
#[serde(rename = "EgressEndpoints")]
#[serde(skip_serializing_if = "Option::is_none")]
pub egress_endpoints: Option<Vec<ChannelEgressEndpoint>>,
#[serde(rename = "EncoderSettings")]
#[serde(skip_serializing_if = "Option::is_none")]
pub encoder_settings: Option<EncoderSettings>,
/// <p>The unique id of the channel.</p>
#[serde(rename = "Id")]
#[serde(skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
/// <p>List of input attachments for channel.</p>
#[serde(rename = "InputAttachments")]
#[serde(skip_serializing_if = "Option::is_none")]
pub input_attachments: Option<Vec<InputAttachment>>,
#[serde(rename = "InputSpecification")]
#[serde(skip_serializing_if = "Option::is_none")]
pub input_specification: Option<InputSpecification>,
/// <p>The log level being written to CloudWatch Logs.</p>
#[serde(rename = "LogLevel")]
#[serde(skip_serializing_if = "Option::is_none")]
pub log_level: Option<String>,
/// <p>The name of the channel. (user-mutable)</p>
#[serde(rename = "Name")]
#[serde(skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
/// <p>The number of currently healthy pipelines.</p>
#[serde(rename = "PipelinesRunningCount")]
#[serde(skip_serializing_if = "Option::is_none")]
pub pipelines_running_count: Option<i64>,
/// <p>The Amazon Resource Name (ARN) of the role assumed when running the Channel.</p>
#[serde(rename = "RoleArn")]
#[serde(skip_serializing_if = "Option::is_none")]
pub role_arn: Option<String>,
#[serde(rename = "State")]
#[serde(skip_serializing_if = "Option::is_none")]
pub state: Option<String>,
}
/// <p>Placeholder documentation for DescribeInputRequest</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct DescribeInputRequest {
/// <p>Unique ID of the input</p>
#[serde(rename = "InputId")]
pub input_id: String,
}
/// <p>Placeholder documentation for DescribeInputResponse</p>
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct DescribeInputResponse {
/// <p>The Unique ARN of the input (generated, immutable).</p>
#[serde(rename = "Arn")]
#[serde(skip_serializing_if = "Option::is_none")]
pub arn: Option<String>,
/// <p>A list of channel IDs that that input is attached to (currently an input can only be attached to one channel).</p>
#[serde(rename = "AttachedChannels")]
#[serde(skip_serializing_if = "Option::is_none")]
pub attached_channels: Option<Vec<String>>,
/// <p>A list of the destinations of the input (PUSH-type).</p>
#[serde(rename = "Destinations")]
#[serde(skip_serializing_if = "Option::is_none")]
pub destinations: Option<Vec<InputDestination>>,
/// <p>The generated ID of the input (unique for user account, immutable).</p>
#[serde(rename = "Id")]
#[serde(skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
/// <p>The user-assigned name (This is a mutable value).</p>
#[serde(rename = "Name")]
#[serde(skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
/// <p>A list of IDs for all the security groups attached to the input.</p>
#[serde(rename = "SecurityGroups")]
#[serde(skip_serializing_if = "Option::is_none")]
pub security_groups: Option<Vec<String>>,
/// <p>A list of the sources of the input (PULL-type).</p>
#[serde(rename = "Sources")]
#[serde(skip_serializing_if = "Option::is_none")]
pub sources: Option<Vec<InputSource>>,
#[serde(rename = "State")]
#[serde(skip_serializing_if = "Option::is_none")]
pub state: Option<String>,
#[serde(rename = "Type")]
#[serde(skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
}
/// <p>Placeholder documentation for DescribeInputSecurityGroupRequest</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct DescribeInputSecurityGroupRequest {
/// <p>The id of the Input Security Group to describe</p>
#[serde(rename = "InputSecurityGroupId")]
pub input_security_group_id: String,
}
/// <p>Placeholder documentation for DescribeInputSecurityGroupResponse</p>
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct DescribeInputSecurityGroupResponse {
/// <p>Unique ARN of Input Security Group</p>
#[serde(rename = "Arn")]
#[serde(skip_serializing_if = "Option::is_none")]
pub arn: Option<String>,
/// <p>The Id of the Input Security Group</p>
#[serde(rename = "Id")]
#[serde(skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
/// <p>The list of inputs currently using this Input Security Group.</p>
#[serde(rename = "Inputs")]
#[serde(skip_serializing_if = "Option::is_none")]
pub inputs: Option<Vec<String>>,
/// <p>The current state of the Input Security Group.</p>
#[serde(rename = "State")]
#[serde(skip_serializing_if = "Option::is_none")]
pub state: Option<String>,
/// <p>Whitelist rules and their sync status</p>
#[serde(rename = "WhitelistRules")]
#[serde(skip_serializing_if = "Option::is_none")]
pub whitelist_rules: Option<Vec<InputWhitelistRule>>,
}
/// <p>Placeholder documentation for DescribeOfferingRequest</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct DescribeOfferingRequest {
/// <p>Unique offering ID, e.g. '87654321'</p>
#[serde(rename = "OfferingId")]
pub offering_id: String,
}
/// <p>Placeholder documentation for DescribeOfferingResponse</p>
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct DescribeOfferingResponse {
/// <p>Unique offering ARN, e.g. 'arn:aws:medialive:us-west-2:123456789012:offering:87654321'</p>
#[serde(rename = "Arn")]
#[serde(skip_serializing_if = "Option::is_none")]
pub arn: Option<String>,
/// <p>Currency code for usagePrice and fixedPrice in ISO-4217 format, e.g. 'USD'</p>
#[serde(rename = "CurrencyCode")]
#[serde(skip_serializing_if = "Option::is_none")]
pub currency_code: Option<String>,
/// <p>Lease duration, e.g. '12'</p>
#[serde(rename = "Duration")]
#[serde(skip_serializing_if = "Option::is_none")]
pub duration: Option<i64>,
/// <p>Units for duration, e.g. 'MONTHS'</p>
#[serde(rename = "DurationUnits")]
#[serde(skip_serializing_if = "Option::is_none")]
pub duration_units: Option<String>,
/// <p>One-time charge for each reserved resource, e.g. '0.0' for a NO_UPFRONT offering</p>
#[serde(rename = "FixedPrice")]
#[serde(skip_serializing_if = "Option::is_none")]
pub fixed_price: Option<f64>,
/// <p>Offering description, e.g. 'HD AVC output at 10-20 Mbps, 30 fps, and standard VQ in US West (Oregon)'</p>
#[serde(rename = "OfferingDescription")]
#[serde(skip_serializing_if = "Option::is_none")]
pub offering_description: Option<String>,
/// <p>Unique offering ID, e.g. '87654321'</p>
#[serde(rename = "OfferingId")]
#[serde(skip_serializing_if = "Option::is_none")]
pub offering_id: Option<String>,
/// <p>Offering type, e.g. 'NO_UPFRONT'</p>
#[serde(rename = "OfferingType")]
#[serde(skip_serializing_if = "Option::is_none")]
pub offering_type: Option<String>,
/// <p>AWS region, e.g. 'us-west-2'</p>
#[serde(rename = "Region")]
#[serde(skip_serializing_if = "Option::is_none")]
pub region: Option<String>,
/// <p>Resource configuration details</p>
#[serde(rename = "ResourceSpecification")]
#[serde(skip_serializing_if = "Option::is_none")]
pub resource_specification: Option<ReservationResourceSpecification>,
/// <p>Recurring usage charge for each reserved resource, e.g. '157.0'</p>
#[serde(rename = "UsagePrice")]
#[serde(skip_serializing_if = "Option::is_none")]
pub usage_price: Option<f64>,
}
/// <p>Placeholder documentation for DescribeReservationRequest</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct DescribeReservationRequest {
/// <p>Unique reservation ID, e.g. '1234567'</p>
#[serde(rename = "ReservationId")]
pub reservation_id: String,
}
/// <p>Placeholder documentation for DescribeReservationResponse</p>
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct DescribeReservationResponse {
/// <p>Unique reservation ARN, e.g. 'arn:aws:medialive:us-west-2:123456789012:reservation:1234567'</p>
#[serde(rename = "Arn")]
#[serde(skip_serializing_if = "Option::is_none")]
pub arn: Option<String>,
/// <p>Number of reserved resources</p>
#[serde(rename = "Count")]
#[serde(skip_serializing_if = "Option::is_none")]
pub count: Option<i64>,
/// <p>Currency code for usagePrice and fixedPrice in ISO-4217 format, e.g. 'USD'</p>
#[serde(rename = "CurrencyCode")]
#[serde(skip_serializing_if = "Option::is_none")]
pub currency_code: Option<String>,
/// <p>Lease duration, e.g. '12'</p>
#[serde(rename = "Duration")]
#[serde(skip_serializing_if = "Option::is_none")]
pub duration: Option<i64>,
/// <p>Units for duration, e.g. 'MONTHS'</p>
#[serde(rename = "DurationUnits")]
#[serde(skip_serializing_if = "Option::is_none")]
pub duration_units: Option<String>,
/// <p>Reservation UTC end date and time in ISO-8601 format, e.g. '2019-03-01T00:00:00'</p>
#[serde(rename = "End")]
#[serde(skip_serializing_if = "Option::is_none")]
pub end: Option<String>,
/// <p>One-time charge for each reserved resource, e.g. '0.0' for a NO_UPFRONT offering</p>
#[serde(rename = "FixedPrice")]
#[serde(skip_serializing_if = "Option::is_none")]
pub fixed_price: Option<f64>,
/// <p>User specified reservation name</p>
#[serde(rename = "Name")]
#[serde(skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
/// <p>Offering description, e.g. 'HD AVC output at 10-20 Mbps, 30 fps, and standard VQ in US West (Oregon)'</p>
#[serde(rename = "OfferingDescription")]
#[serde(skip_serializing_if = "Option::is_none")]
pub offering_description: Option<String>,
/// <p>Unique offering ID, e.g. '87654321'</p>
#[serde(rename = "OfferingId")]
#[serde(skip_serializing_if = "Option::is_none")]
pub offering_id: Option<String>,
/// <p>Offering type, e.g. 'NO_UPFRONT'</p>
#[serde(rename = "OfferingType")]
#[serde(skip_serializing_if = "Option::is_none")]
pub offering_type: Option<String>,
/// <p>AWS region, e.g. 'us-west-2'</p>
#[serde(rename = "Region")]
#[serde(skip_serializing_if = "Option::is_none")]
pub region: Option<String>,
/// <p>Unique reservation ID, e.g. '1234567'</p>
#[serde(rename = "ReservationId")]
#[serde(skip_serializing_if = "Option::is_none")]
pub reservation_id: Option<String>,
/// <p>Resource configuration details</p>
#[serde(rename = "ResourceSpecification")]
#[serde(skip_serializing_if = "Option::is_none")]
pub resource_specification: Option<ReservationResourceSpecification>,
/// <p>Reservation UTC start date and time in ISO-8601 format, e.g. '2018-03-01T00:00:00'</p>
#[serde(rename = "Start")]
#[serde(skip_serializing_if = "Option::is_none")]
pub start: Option<String>,
/// <p>Current state of reservation, e.g. 'ACTIVE'</p>
#[serde(rename = "State")]
#[serde(skip_serializing_if = "Option::is_none")]
pub state: Option<String>,
/// <p>Recurring usage charge for each reserved resource, e.g. '157.0'</p>
#[serde(rename = "UsagePrice")]
#[serde(skip_serializing_if = "Option::is_none")]
pub usage_price: Option<f64>,
}
/// <p>DVB Network Information Table (NIT)</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct DvbNitSettings {
/// <p>The numeric value placed in the Network Information Table (NIT).</p>
#[serde(rename = "NetworkId")]
pub network_id: i64,
/// <p>The network name text placed in the networkNameDescriptor inside the Network Information Table. Maximum length is 256 characters.</p>
#[serde(rename = "NetworkName")]
pub network_name: String,
/// <p>The number of milliseconds between instances of this table in the output transport stream.</p>
#[serde(rename = "RepInterval")]
#[serde(skip_serializing_if = "Option::is_none")]
pub rep_interval: Option<i64>,
}
/// <p>DVB Service Description Table (SDT)</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct DvbSdtSettings {
/// <p>Selects method of inserting SDT information into output stream. The sdtFollow setting copies SDT information from input stream to output stream. The sdtFollowIfPresent setting copies SDT information from input stream to output stream if SDT information is present in the input, otherwise it will fall back on the user-defined values. The sdtManual setting means user will enter the SDT information. The sdtNone setting means output stream will not contain SDT information.</p>
#[serde(rename = "OutputSdt")]
#[serde(skip_serializing_if = "Option::is_none")]
pub output_sdt: Option<String>,
/// <p>The number of milliseconds between instances of this table in the output transport stream.</p>
#[serde(rename = "RepInterval")]
#[serde(skip_serializing_if = "Option::is_none")]
pub rep_interval: Option<i64>,
/// <p>The service name placed in the serviceDescriptor in the Service Description Table. Maximum length is 256 characters.</p>
#[serde(rename = "ServiceName")]
#[serde(skip_serializing_if = "Option::is_none")]
pub service_name: Option<String>,
/// <p>The service provider name placed in the serviceDescriptor in the Service Description Table. Maximum length is 256 characters.</p>
#[serde(rename = "ServiceProviderName")]
#[serde(skip_serializing_if = "Option::is_none")]
pub service_provider_name: Option<String>,
}
/// <p>Placeholder documentation for DvbSubDestinationSettings</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct DvbSubDestinationSettings {
/// <p>If no explicit xPosition or yPosition is provided, setting alignment to centered will place the captions at the bottom center of the output. Similarly, setting a left alignment will align captions to the bottom left of the output. If x and y positions are given in conjunction with the alignment parameter, the font will be justified (either left or centered) relative to those coordinates. Selecting "smart" justification will left-justify live subtitles and center-justify pre-recorded subtitles. This option is not valid for source captions that are STL or 608/embedded. These source settings are already pre-defined by the caption stream. All burn-in and DVB-Sub font settings must match.</p>
#[serde(rename = "Alignment")]
#[serde(skip_serializing_if = "Option::is_none")]
pub alignment: Option<String>,
/// <p>Specifies the color of the rectangle behind the captions. All burn-in and DVB-Sub font settings must match.</p>
#[serde(rename = "BackgroundColor")]
#[serde(skip_serializing_if = "Option::is_none")]
pub background_color: Option<String>,
/// <p>Specifies the opacity of the background rectangle. 255 is opaque; 0 is transparent. Leaving this parameter blank is equivalent to setting it to 0 (transparent). All burn-in and DVB-Sub font settings must match.</p>
#[serde(rename = "BackgroundOpacity")]
#[serde(skip_serializing_if = "Option::is_none")]
pub background_opacity: Option<i64>,
/// <p>External font file used for caption burn-in. File extension must be 'ttf' or 'tte'. Although the user can select output fonts for many different types of input captions, embedded, STL and teletext sources use a strict grid system. Using external fonts with these caption sources could cause unexpected display of proportional fonts. All burn-in and DVB-Sub font settings must match.</p>
#[serde(rename = "Font")]
#[serde(skip_serializing_if = "Option::is_none")]
pub font: Option<InputLocation>,
/// <p>Specifies the color of the burned-in captions. This option is not valid for source captions that are STL, 608/embedded or teletext. These source settings are already pre-defined by the caption stream. All burn-in and DVB-Sub font settings must match.</p>
#[serde(rename = "FontColor")]
#[serde(skip_serializing_if = "Option::is_none")]
pub font_color: Option<String>,
/// <p>Specifies the opacity of the burned-in captions. 255 is opaque; 0 is transparent. All burn-in and DVB-Sub font settings must match.</p>
#[serde(rename = "FontOpacity")]
#[serde(skip_serializing_if = "Option::is_none")]
pub font_opacity: Option<i64>,
/// <p>Font resolution in DPI (dots per inch); default is 96 dpi. All burn-in and DVB-Sub font settings must match.</p>
#[serde(rename = "FontResolution")]
#[serde(skip_serializing_if = "Option::is_none")]
pub font_resolution: Option<i64>,
/// <p>When set to auto fontSize will scale depending on the size of the output. Giving a positive integer will specify the exact font size in points. All burn-in and DVB-Sub font settings must match.</p>
#[serde(rename = "FontSize")]
#[serde(skip_serializing_if = "Option::is_none")]
pub font_size: Option<String>,
/// <p>Specifies font outline color. This option is not valid for source captions that are either 608/embedded or teletext. These source settings are already pre-defined by the caption stream. All burn-in and DVB-Sub font settings must match.</p>
#[serde(rename = "OutlineColor")]
#[serde(skip_serializing_if = "Option::is_none")]
pub outline_color: Option<String>,
/// <p>Specifies font outline size in pixels. This option is not valid for source captions that are either 608/embedded or teletext. These source settings are already pre-defined by the caption stream. All burn-in and DVB-Sub font settings must match.</p>
#[serde(rename = "OutlineSize")]
#[serde(skip_serializing_if = "Option::is_none")]
pub outline_size: Option<i64>,
/// <p>Specifies the color of the shadow cast by the captions. All burn-in and DVB-Sub font settings must match.</p>
#[serde(rename = "ShadowColor")]
#[serde(skip_serializing_if = "Option::is_none")]
pub shadow_color: Option<String>,
/// <p>Specifies the opacity of the shadow. 255 is opaque; 0 is transparent. Leaving this parameter blank is equivalent to setting it to 0 (transparent). All burn-in and DVB-Sub font settings must match.</p>
#[serde(rename = "ShadowOpacity")]
#[serde(skip_serializing_if = "Option::is_none")]
pub shadow_opacity: Option<i64>,
/// <p>Specifies the horizontal offset of the shadow relative to the captions in pixels. A value of -2 would result in a shadow offset 2 pixels to the left. All burn-in and DVB-Sub font settings must match.</p>
#[serde(rename = "ShadowXOffset")]
#[serde(skip_serializing_if = "Option::is_none")]
pub shadow_x_offset: Option<i64>,
/// <p>Specifies the vertical offset of the shadow relative to the captions in pixels. A value of -2 would result in a shadow offset 2 pixels above the text. All burn-in and DVB-Sub font settings must match.</p>
#[serde(rename = "ShadowYOffset")]
#[serde(skip_serializing_if = "Option::is_none")]
pub shadow_y_offset: Option<i64>,
/// <p>Controls whether a fixed grid size will be used to generate the output subtitles bitmap. Only applicable for Teletext inputs and DVB-Sub/Burn-in outputs.</p>
#[serde(rename = "TeletextGridControl")]
#[serde(skip_serializing_if = "Option::is_none")]
pub teletext_grid_control: Option<String>,
/// <p>Specifies the horizontal position of the caption relative to the left side of the output in pixels. A value of 10 would result in the captions starting 10 pixels from the left of the output. If no explicit xPosition is provided, the horizontal caption position will be determined by the alignment parameter. This option is not valid for source captions that are STL, 608/embedded or teletext. These source settings are already pre-defined by the caption stream. All burn-in and DVB-Sub font settings must match.</p>
#[serde(rename = "XPosition")]
#[serde(skip_serializing_if = "Option::is_none")]
pub x_position: Option<i64>,
/// <p>Specifies the vertical position of the caption relative to the top of the output in pixels. A value of 10 would result in the captions starting 10 pixels from the top of the output. If no explicit yPosition is provided, the caption will be positioned towards the bottom of the output. This option is not valid for source captions that are STL, 608/embedded or teletext. These source settings are already pre-defined by the caption stream. All burn-in and DVB-Sub font settings must match.</p>
#[serde(rename = "YPosition")]
#[serde(skip_serializing_if = "Option::is_none")]
pub y_position: Option<i64>,
}
/// <p>Placeholder documentation for DvbSubSourceSettings</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct DvbSubSourceSettings {
/// <p>When using DVB-Sub with Burn-In or SMPTE-TT, use this PID for the source content. Unused for DVB-Sub passthrough. All DVB-Sub content is passed through, regardless of selectors.</p>
#[serde(rename = "Pid")]
#[serde(skip_serializing_if = "Option::is_none")]
pub pid: Option<i64>,
}
/// <p>DVB Time and Date Table (SDT)</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct DvbTdtSettings {
/// <p>The number of milliseconds between instances of this table in the output transport stream.</p>
#[serde(rename = "RepInterval")]
#[serde(skip_serializing_if = "Option::is_none")]
pub rep_interval: Option<i64>,
}
/// <p>Placeholder documentation for Eac3Settings</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct Eac3Settings {
/// <p>When set to attenuate3Db, applies a 3 dB attenuation to the surround channels. Only used for 3/2 coding mode.</p>
#[serde(rename = "AttenuationControl")]
#[serde(skip_serializing_if = "Option::is_none")]
pub attenuation_control: Option<String>,
/// <p>Average bitrate in bits/second. Valid bitrates depend on the coding mode.</p>
#[serde(rename = "Bitrate")]
#[serde(skip_serializing_if = "Option::is_none")]
pub bitrate: Option<f64>,
/// <p>Specifies the bitstream mode (bsmod) for the emitted E-AC-3 stream. See ATSC A/52-2012 (Annex E) for background on these values.</p>
#[serde(rename = "BitstreamMode")]
#[serde(skip_serializing_if = "Option::is_none")]
pub bitstream_mode: Option<String>,
/// <p>Dolby Digital Plus coding mode. Determines number of channels.</p>
#[serde(rename = "CodingMode")]
#[serde(skip_serializing_if = "Option::is_none")]
pub coding_mode: Option<String>,
/// <p>When set to enabled, activates a DC highpass filter for all input channels.</p>
#[serde(rename = "DcFilter")]
#[serde(skip_serializing_if = "Option::is_none")]
pub dc_filter: Option<String>,
/// <p>Sets the dialnorm for the output. If blank and input audio is Dolby Digital Plus, dialnorm will be passed through.</p>
#[serde(rename = "Dialnorm")]
#[serde(skip_serializing_if = "Option::is_none")]
pub dialnorm: Option<i64>,
/// <p>Sets the Dolby dynamic range compression profile.</p>
#[serde(rename = "DrcLine")]
#[serde(skip_serializing_if = "Option::is_none")]
pub drc_line: Option<String>,
/// <p>Sets the profile for heavy Dolby dynamic range compression, ensures that the instantaneous signal peaks do not exceed specified levels.</p>
#[serde(rename = "DrcRf")]
#[serde(skip_serializing_if = "Option::is_none")]
pub drc_rf: Option<String>,
/// <p>When encoding 3/2 audio, setting to lfe enables the LFE channel</p>
#[serde(rename = "LfeControl")]
#[serde(skip_serializing_if = "Option::is_none")]
pub lfe_control: Option<String>,
/// <p>When set to enabled, applies a 120Hz lowpass filter to the LFE channel prior to encoding. Only valid with codingMode32 coding mode.</p>
#[serde(rename = "LfeFilter")]
#[serde(skip_serializing_if = "Option::is_none")]
pub lfe_filter: Option<String>,
/// <p>Left only/Right only center mix level. Only used for 3/2 coding mode.</p>
#[serde(rename = "LoRoCenterMixLevel")]
#[serde(skip_serializing_if = "Option::is_none")]
pub lo_ro_center_mix_level: Option<f64>,
/// <p>Left only/Right only surround mix level. Only used for 3/2 coding mode.</p>
#[serde(rename = "LoRoSurroundMixLevel")]
#[serde(skip_serializing_if = "Option::is_none")]
pub lo_ro_surround_mix_level: Option<f64>,
/// <p>Left total/Right total center mix level. Only used for 3/2 coding mode.</p>
#[serde(rename = "LtRtCenterMixLevel")]
#[serde(skip_serializing_if = "Option::is_none")]
pub lt_rt_center_mix_level: Option<f64>,
/// <p>Left total/Right total surround mix level. Only used for 3/2 coding mode.</p>
#[serde(rename = "LtRtSurroundMixLevel")]
#[serde(skip_serializing_if = "Option::is_none")]
pub lt_rt_surround_mix_level: Option<f64>,
/// <p>When set to followInput, encoder metadata will be sourced from the DD, DD+, or DolbyE decoder that supplied this audio data. If audio was not supplied from one of these streams, then the static metadata settings will be used.</p>
#[serde(rename = "MetadataControl")]
#[serde(skip_serializing_if = "Option::is_none")]
pub metadata_control: Option<String>,
/// <p>When set to whenPossible, input DD+ audio will be passed through if it is present on the input. This detection is dynamic over the life of the transcode. Inputs that alternate between DD+ and non-DD+ content will have a consistent DD+ output as the system alternates between passthrough and encoding.</p>
#[serde(rename = "PassthroughControl")]
#[serde(skip_serializing_if = "Option::is_none")]
pub passthrough_control: Option<String>,
/// <p>When set to shift90Degrees, applies a 90-degree phase shift to the surround channels. Only used for 3/2 coding mode.</p>
#[serde(rename = "PhaseControl")]
#[serde(skip_serializing_if = "Option::is_none")]
pub phase_control: Option<String>,
/// <p>Stereo downmix preference. Only used for 3/2 coding mode.</p>
#[serde(rename = "StereoDownmix")]
#[serde(skip_serializing_if = "Option::is_none")]
pub stereo_downmix: Option<String>,
/// <p>When encoding 3/2 audio, sets whether an extra center back surround channel is matrix encoded into the left and right surround channels.</p>
#[serde(rename = "SurroundExMode")]
#[serde(skip_serializing_if = "Option::is_none")]
pub surround_ex_mode: Option<String>,
/// <p>When encoding 2/0 audio, sets whether Dolby Surround is matrix encoded into the two channels.</p>
#[serde(rename = "SurroundMode")]
#[serde(skip_serializing_if = "Option::is_none")]
pub surround_mode: Option<String>,
}
/// <p>Placeholder documentation for EmbeddedDestinationSettings</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct EmbeddedDestinationSettings {}
/// <p>Placeholder documentation for EmbeddedPlusScte20DestinationSettings</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct EmbeddedPlusScte20DestinationSettings {}
/// <p>Placeholder documentation for EmbeddedSourceSettings</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct EmbeddedSourceSettings {
/// <p>If upconvert, 608 data is both passed through via the "608 compatibility bytes" fields of the 708 wrapper as well as translated into 708. 708 data present in the source content will be discarded.</p>
#[serde(rename = "Convert608To708")]
#[serde(skip_serializing_if = "Option::is_none")]
pub convert_608_to_708: Option<String>,
/// <p>Set to "auto" to handle streams with intermittent and/or non-aligned SCTE-20 and Embedded captions.</p>
#[serde(rename = "Scte20Detection")]
#[serde(skip_serializing_if = "Option::is_none")]
pub scte_20_detection: Option<String>,
/// <p>Specifies the 608/708 channel number within the video track from which to extract captions. Unused for passthrough.</p>
#[serde(rename = "Source608ChannelNumber")]
#[serde(skip_serializing_if = "Option::is_none")]
pub source_608_channel_number: Option<i64>,
/// <p>This field is unused and deprecated.</p>
#[serde(rename = "Source608TrackNumber")]
#[serde(skip_serializing_if = "Option::is_none")]
pub source_608_track_number: Option<i64>,
}
/// <p>Placeholder documentation for Empty</p>
#[derive(Default, Debug, Clone, PartialEq)]
pub struct Empty {}
/// <p>Placeholder documentation for EncoderSettings</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct EncoderSettings {
#[serde(rename = "AudioDescriptions")]
pub audio_descriptions: Vec<AudioDescription>,
/// <p>Settings for ad avail blanking.</p>
#[serde(rename = "AvailBlanking")]
#[serde(skip_serializing_if = "Option::is_none")]
pub avail_blanking: Option<AvailBlanking>,
/// <p>Event-wide configuration settings for ad avail insertion.</p>
#[serde(rename = "AvailConfiguration")]
#[serde(skip_serializing_if = "Option::is_none")]
pub avail_configuration: Option<AvailConfiguration>,
/// <p>Settings for blackout slate.</p>
#[serde(rename = "BlackoutSlate")]
#[serde(skip_serializing_if = "Option::is_none")]
pub blackout_slate: Option<BlackoutSlate>,
/// <p>Settings for caption decriptions</p>
#[serde(rename = "CaptionDescriptions")]
#[serde(skip_serializing_if = "Option::is_none")]
pub caption_descriptions: Option<Vec<CaptionDescription>>,
/// <p>Configuration settings that apply to the event as a whole.</p>
#[serde(rename = "GlobalConfiguration")]
#[serde(skip_serializing_if = "Option::is_none")]
pub global_configuration: Option<GlobalConfiguration>,
#[serde(rename = "OutputGroups")]
pub output_groups: Vec<OutputGroup>,
/// <p>Contains settings used to acquire and adjust timecode information from inputs.</p>
#[serde(rename = "TimecodeConfig")]
pub timecode_config: TimecodeConfig,
#[serde(rename = "VideoDescriptions")]
pub video_descriptions: Vec<VideoDescription>,
}
/// <p>Placeholder documentation for FecOutputSettings</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct FecOutputSettings {
/// <p>Parameter D from SMPTE 2022-1. The height of the FEC protection matrix. The number of transport stream packets per column error correction packet. Must be between 4 and 20, inclusive.</p>
#[serde(rename = "ColumnDepth")]
#[serde(skip_serializing_if = "Option::is_none")]
pub column_depth: Option<i64>,
/// <p>Enables column only or column and row based FEC</p>
#[serde(rename = "IncludeFec")]
#[serde(skip_serializing_if = "Option::is_none")]
pub include_fec: Option<String>,
/// <p>Parameter L from SMPTE 2022-1. The width of the FEC protection matrix. Must be between 1 and 20, inclusive. If only Column FEC is used, then larger values increase robustness. If Row FEC is used, then this is the number of transport stream packets per row error correction packet, and the value must be between 4 and 20, inclusive, if includeFec is columnAndRow. If includeFec is column, this value must be 1 to 20, inclusive.</p>
#[serde(rename = "RowLength")]
#[serde(skip_serializing_if = "Option::is_none")]
pub row_length: Option<i64>,
}
/// <p>Placeholder documentation for GlobalConfiguration</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct GlobalConfiguration {
/// <p>Value to set the initial audio gain for the Live Event.</p>
#[serde(rename = "InitialAudioGain")]
#[serde(skip_serializing_if = "Option::is_none")]
pub initial_audio_gain: Option<i64>,
/// <p>Indicates the action to take when an input completes (e.g. end-of-file.) Options include immediately switching to the next sequential input (via "switchInput"), switching to the next input and looping back to the first input when last input ends (via "switchAndLoopInputs") or not switching inputs and instead transcoding black / color / slate images per the "Input Loss Behavior" configuration until an activateInput REST command is received (via "none").</p>
#[serde(rename = "InputEndAction")]
#[serde(skip_serializing_if = "Option::is_none")]
pub input_end_action: Option<String>,
/// <p>Settings for system actions when input is lost.</p>
#[serde(rename = "InputLossBehavior")]
#[serde(skip_serializing_if = "Option::is_none")]
pub input_loss_behavior: Option<InputLossBehavior>,
/// <p>Indicates whether the rate of frames emitted by the Live encoder should be paced by its system clock (which optionally may be locked to another source via NTP) or should be locked to the clock of the source that is providing the input stream.</p>
#[serde(rename = "OutputTimingSource")]
#[serde(skip_serializing_if = "Option::is_none")]
pub output_timing_source: Option<String>,
/// <p>Adjusts video input buffer for streams with very low video framerates. This is commonly set to enabled for music channels with less than one video frame per second.</p>
#[serde(rename = "SupportLowFramerateInputs")]
#[serde(skip_serializing_if = "Option::is_none")]
pub support_low_framerate_inputs: Option<String>,
}
/// <p>Placeholder documentation for H264Settings</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct H264Settings {
/// <p>Adaptive quantization. Allows intra-frame quantizers to vary to improve visual quality.</p>
#[serde(rename = "AdaptiveQuantization")]
#[serde(skip_serializing_if = "Option::is_none")]
pub adaptive_quantization: Option<String>,
/// <p>Indicates that AFD values will be written into the output stream. If afdSignaling is "auto", the system will try to preserve the input AFD value (in cases where multiple AFD values are valid). If set to "fixed", the AFD value will be the value configured in the fixedAfd parameter.</p>
#[serde(rename = "AfdSignaling")]
#[serde(skip_serializing_if = "Option::is_none")]
pub afd_signaling: Option<String>,
/// <p>Average bitrate in bits/second. Required for VBR, CBR, and ABR. For MS Smooth outputs, bitrates must be unique when rounded down to the nearest multiple of 1000.</p>
#[serde(rename = "Bitrate")]
#[serde(skip_serializing_if = "Option::is_none")]
pub bitrate: Option<i64>,
/// <p>Percentage of the buffer that should initially be filled (HRD buffer model).</p>
#[serde(rename = "BufFillPct")]
#[serde(skip_serializing_if = "Option::is_none")]
pub buf_fill_pct: Option<i64>,
/// <p>Size of buffer (HRD buffer model) in bits/second.</p>
#[serde(rename = "BufSize")]
#[serde(skip_serializing_if = "Option::is_none")]
pub buf_size: Option<i64>,
/// <p>Includes colorspace metadata in the output.</p>
#[serde(rename = "ColorMetadata")]
#[serde(skip_serializing_if = "Option::is_none")]
pub color_metadata: Option<String>,
/// <p>Entropy encoding mode. Use cabac (must be in Main or High profile) or cavlc.</p>
#[serde(rename = "EntropyEncoding")]
#[serde(skip_serializing_if = "Option::is_none")]
pub entropy_encoding: Option<String>,
/// <p>Four bit AFD value to write on all frames of video in the output stream. Only valid when afdSignaling is set to 'Fixed'.</p>
#[serde(rename = "FixedAfd")]
#[serde(skip_serializing_if = "Option::is_none")]
pub fixed_afd: Option<String>,
/// <p>If set to enabled, adjust quantization within each frame to reduce flicker or 'pop' on I-frames.</p>
#[serde(rename = "FlickerAq")]
#[serde(skip_serializing_if = "Option::is_none")]
pub flicker_aq: Option<String>,
/// <p>This field indicates how the output video frame rate is specified. If "specified" is selected then the output video frame rate is determined by framerateNumerator and framerateDenominator, else if "initializeFromSource" is selected then the output video frame rate will be set equal to the input video frame rate of the first input.</p>
#[serde(rename = "FramerateControl")]
#[serde(skip_serializing_if = "Option::is_none")]
pub framerate_control: Option<String>,
/// <p>Framerate denominator.</p>
#[serde(rename = "FramerateDenominator")]
#[serde(skip_serializing_if = "Option::is_none")]
pub framerate_denominator: Option<i64>,
/// <p>Framerate numerator - framerate is a fraction, e.g. 24000 / 1001 = 23.976 fps.</p>
#[serde(rename = "FramerateNumerator")]
#[serde(skip_serializing_if = "Option::is_none")]
pub framerate_numerator: Option<i64>,
/// <p>Documentation update needed</p>
#[serde(rename = "GopBReference")]
#[serde(skip_serializing_if = "Option::is_none")]
pub gop_b_reference: Option<String>,
/// <p>Frequency of closed GOPs. In streaming applications, it is recommended that this be set to 1 so a decoder joining mid-stream will receive an IDR frame as quickly as possible. Setting this value to 0 will break output segmenting.</p>
#[serde(rename = "GopClosedCadence")]
#[serde(skip_serializing_if = "Option::is_none")]
pub gop_closed_cadence: Option<i64>,
/// <p>Number of B-frames between reference frames.</p>
#[serde(rename = "GopNumBFrames")]
#[serde(skip_serializing_if = "Option::is_none")]
pub gop_num_b_frames: Option<i64>,
/// <p>GOP size (keyframe interval) in units of either frames or seconds per gopSizeUnits. Must be greater than zero.</p>
#[serde(rename = "GopSize")]
#[serde(skip_serializing_if = "Option::is_none")]
pub gop_size: Option<f64>,
/// <p>Indicates if the gopSize is specified in frames or seconds. If seconds the system will convert the gopSize into a frame count at run time.</p>
#[serde(rename = "GopSizeUnits")]
#[serde(skip_serializing_if = "Option::is_none")]
pub gop_size_units: Option<String>,
/// <p>H.264 Level.</p>
#[serde(rename = "Level")]
#[serde(skip_serializing_if = "Option::is_none")]
pub level: Option<String>,
/// <p>Amount of lookahead. A value of low can decrease latency and memory usage, while high can produce better quality for certain content.</p>
#[serde(rename = "LookAheadRateControl")]
#[serde(skip_serializing_if = "Option::is_none")]
pub look_ahead_rate_control: Option<String>,
/// <p>Maximum bitrate in bits/second (for VBR mode only).</p>
#[serde(rename = "MaxBitrate")]
#[serde(skip_serializing_if = "Option::is_none")]
pub max_bitrate: Option<i64>,
/// <p>Only meaningful if sceneChangeDetect is set to enabled. Enforces separation between repeated (cadence) I-frames and I-frames inserted by Scene Change Detection. If a scene change I-frame is within I-interval frames of a cadence I-frame, the GOP is shrunk and/or stretched to the scene change I-frame. GOP stretch requires enabling lookahead as well as setting I-interval. The normal cadence resumes for the next GOP. Note: Maximum GOP stretch = GOP size + Min-I-interval - 1</p>
#[serde(rename = "MinIInterval")]
#[serde(skip_serializing_if = "Option::is_none")]
pub min_i_interval: Option<i64>,
/// <p>Number of reference frames to use. The encoder may use more than requested if using B-frames and/or interlaced encoding.</p>
#[serde(rename = "NumRefFrames")]
#[serde(skip_serializing_if = "Option::is_none")]
pub num_ref_frames: Option<i64>,
/// <p>This field indicates how the output pixel aspect ratio is specified. If "specified" is selected then the output video pixel aspect ratio is determined by parNumerator and parDenominator, else if "initializeFromSource" is selected then the output pixsel aspect ratio will be set equal to the input video pixel aspect ratio of the first input.</p>
#[serde(rename = "ParControl")]
#[serde(skip_serializing_if = "Option::is_none")]
pub par_control: Option<String>,
/// <p>Pixel Aspect Ratio denominator.</p>
#[serde(rename = "ParDenominator")]
#[serde(skip_serializing_if = "Option::is_none")]
pub par_denominator: Option<i64>,
/// <p>Pixel Aspect Ratio numerator.</p>
#[serde(rename = "ParNumerator")]
#[serde(skip_serializing_if = "Option::is_none")]
pub par_numerator: Option<i64>,
/// <p>H.264 Profile.</p>
#[serde(rename = "Profile")]
#[serde(skip_serializing_if = "Option::is_none")]
pub profile: Option<String>,
/// <p>Rate control mode.</p>
#[serde(rename = "RateControlMode")]
#[serde(skip_serializing_if = "Option::is_none")]
pub rate_control_mode: Option<String>,
/// <p>Sets the scan type of the output to progressive or top-field-first interlaced.</p>
#[serde(rename = "ScanType")]
#[serde(skip_serializing_if = "Option::is_none")]
pub scan_type: Option<String>,
/// <p>Scene change detection. Inserts I-frames on scene changes when enabled.</p>
#[serde(rename = "SceneChangeDetect")]
#[serde(skip_serializing_if = "Option::is_none")]
pub scene_change_detect: Option<String>,
/// <p>Number of slices per picture. Must be less than or equal to the number of macroblock rows for progressive pictures, and less than or equal to half the number of macroblock rows for interlaced pictures.
/// This field is optional; when no value is specified the encoder will choose the number of slices based on encode resolution.</p>
#[serde(rename = "Slices")]
#[serde(skip_serializing_if = "Option::is_none")]
pub slices: Option<i64>,
/// <p>Softness. Selects quantizer matrix, larger values reduce high-frequency content in the encoded image.</p>
#[serde(rename = "Softness")]
#[serde(skip_serializing_if = "Option::is_none")]
pub softness: Option<i64>,
/// <p>If set to enabled, adjust quantization within each frame based on spatial variation of content complexity.</p>
#[serde(rename = "SpatialAq")]
#[serde(skip_serializing_if = "Option::is_none")]
pub spatial_aq: Option<String>,
/// <p>Produces a bitstream compliant with SMPTE RP-2027.</p>
#[serde(rename = "Syntax")]
#[serde(skip_serializing_if = "Option::is_none")]
pub syntax: Option<String>,
/// <p>If set to enabled, adjust quantization within each frame based on temporal variation of content complexity.</p>
#[serde(rename = "TemporalAq")]
#[serde(skip_serializing_if = "Option::is_none")]
pub temporal_aq: Option<String>,
/// <p>Determines how timecodes should be inserted into the video elementary stream.
/// - 'disabled': Do not include timecodes
/// - 'picTimingSei': Pass through picture timing SEI messages from the source specified in Timecode Config</p>
#[serde(rename = "TimecodeInsertion")]
#[serde(skip_serializing_if = "Option::is_none")]
pub timecode_insertion: Option<String>,
}
/// <p>Placeholder documentation for HlsAkamaiSettings</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct HlsAkamaiSettings {
/// <p>Number of seconds to wait before retrying connection to the CDN if the connection is lost.</p>
#[serde(rename = "ConnectionRetryInterval")]
#[serde(skip_serializing_if = "Option::is_none")]
pub connection_retry_interval: Option<i64>,
/// <p>Size in seconds of file cache for streaming outputs.</p>
#[serde(rename = "FilecacheDuration")]
#[serde(skip_serializing_if = "Option::is_none")]
pub filecache_duration: Option<i64>,
/// <p>Specify whether or not to use chunked transfer encoding to Akamai. User should contact Akamai to enable this feature.</p>
#[serde(rename = "HttpTransferMode")]
#[serde(skip_serializing_if = "Option::is_none")]
pub http_transfer_mode: Option<String>,
/// <p>Number of retry attempts that will be made before the Live Event is put into an error state.</p>
#[serde(rename = "NumRetries")]
#[serde(skip_serializing_if = "Option::is_none")]
pub num_retries: Option<i64>,
/// <p>If a streaming output fails, number of seconds to wait until a restart is initiated. A value of 0 means never restart.</p>
#[serde(rename = "RestartDelay")]
#[serde(skip_serializing_if = "Option::is_none")]
pub restart_delay: Option<i64>,
/// <p>Salt for authenticated Akamai.</p>
#[serde(rename = "Salt")]
#[serde(skip_serializing_if = "Option::is_none")]
pub salt: Option<String>,
/// <p>Token parameter for authenticated akamai. If not specified, <em>gda</em> is used.</p>
#[serde(rename = "Token")]
#[serde(skip_serializing_if = "Option::is_none")]
pub token: Option<String>,
}
/// <p>Placeholder documentation for HlsBasicPutSettings</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct HlsBasicPutSettings {
/// <p>Number of seconds to wait before retrying connection to the CDN if the connection is lost.</p>
#[serde(rename = "ConnectionRetryInterval")]
#[serde(skip_serializing_if = "Option::is_none")]
pub connection_retry_interval: Option<i64>,
/// <p>Size in seconds of file cache for streaming outputs.</p>
#[serde(rename = "FilecacheDuration")]
#[serde(skip_serializing_if = "Option::is_none")]
pub filecache_duration: Option<i64>,
/// <p>Number of retry attempts that will be made before the Live Event is put into an error state.</p>
#[serde(rename = "NumRetries")]
#[serde(skip_serializing_if = "Option::is_none")]
pub num_retries: Option<i64>,
/// <p>If a streaming output fails, number of seconds to wait until a restart is initiated. A value of 0 means never restart.</p>
#[serde(rename = "RestartDelay")]
#[serde(skip_serializing_if = "Option::is_none")]
pub restart_delay: Option<i64>,
}
/// <p>Placeholder documentation for HlsCdnSettings</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct HlsCdnSettings {
#[serde(rename = "HlsAkamaiSettings")]
#[serde(skip_serializing_if = "Option::is_none")]
pub hls_akamai_settings: Option<HlsAkamaiSettings>,
#[serde(rename = "HlsBasicPutSettings")]
#[serde(skip_serializing_if = "Option::is_none")]
pub hls_basic_put_settings: Option<HlsBasicPutSettings>,
#[serde(rename = "HlsMediaStoreSettings")]
#[serde(skip_serializing_if = "Option::is_none")]
pub hls_media_store_settings: Option<HlsMediaStoreSettings>,
#[serde(rename = "HlsWebdavSettings")]
#[serde(skip_serializing_if = "Option::is_none")]
pub hls_webdav_settings: Option<HlsWebdavSettings>,
}
/// <p>Placeholder documentation for HlsGroupSettings</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct HlsGroupSettings {
/// <p>Choose one or more ad marker types to pass SCTE35 signals through to this group of Apple HLS outputs.</p>
#[serde(rename = "AdMarkers")]
#[serde(skip_serializing_if = "Option::is_none")]
pub ad_markers: Option<Vec<String>>,
/// <p>A partial URI prefix that will be prepended to each output in the media .m3u8 file. Can be used if base manifest is delivered from a different URL than the main .m3u8 file.</p>
#[serde(rename = "BaseUrlContent")]
#[serde(skip_serializing_if = "Option::is_none")]
pub base_url_content: Option<String>,
/// <p>A partial URI prefix that will be prepended to each output in the media .m3u8 file. Can be used if base manifest is delivered from a different URL than the main .m3u8 file.</p>
#[serde(rename = "BaseUrlManifest")]
#[serde(skip_serializing_if = "Option::is_none")]
pub base_url_manifest: Option<String>,
/// <p>Mapping of up to 4 caption channels to caption languages. Is only meaningful if captionLanguageSetting is set to "insert".</p>
#[serde(rename = "CaptionLanguageMappings")]
#[serde(skip_serializing_if = "Option::is_none")]
pub caption_language_mappings: Option<Vec<CaptionLanguageMapping>>,
/// <p>Applies only to 608 Embedded output captions.
/// insert: Include CLOSED-CAPTIONS lines in the manifest. Specify at least one language in the CC1 Language Code field. One CLOSED-CAPTION line is added for each Language Code you specify. Make sure to specify the languages in the order in which they appear in the original source (if the source is embedded format) or the order of the caption selectors (if the source is other than embedded). Otherwise, languages in the manifest will not match up properly with the output captions.
/// none: Include CLOSED-CAPTIONS=NONE line in the manifest.
/// omit: Omit any CLOSED-CAPTIONS line from the manifest.</p>
#[serde(rename = "CaptionLanguageSetting")]
#[serde(skip_serializing_if = "Option::is_none")]
pub caption_language_setting: Option<String>,
/// <p>When set to "disabled", sets the #EXT-X-ALLOW-CACHE:no tag in the manifest, which prevents clients from saving media segments for later replay.</p>
#[serde(rename = "ClientCache")]
#[serde(skip_serializing_if = "Option::is_none")]
pub client_cache: Option<String>,
/// <p>Specification to use (RFC-6381 or the default RFC-4281) during m3u8 playlist generation.</p>
#[serde(rename = "CodecSpecification")]
#[serde(skip_serializing_if = "Option::is_none")]
pub codec_specification: Option<String>,
/// <p>For use with encryptionType. This is a 128-bit, 16-byte hex value represented by a 32-character text string. If ivSource is set to "explicit" then this parameter is required and is used as the IV for encryption.</p>
#[serde(rename = "ConstantIv")]
#[serde(skip_serializing_if = "Option::is_none")]
pub constant_iv: Option<String>,
/// <p>A directory or HTTP destination for the HLS segments, manifest files, and encryption keys (if enabled).</p>
#[serde(rename = "Destination")]
pub destination: OutputLocationRef,
/// <p>Place segments in subdirectories.</p>
#[serde(rename = "DirectoryStructure")]
#[serde(skip_serializing_if = "Option::is_none")]
pub directory_structure: Option<String>,
/// <p>Encrypts the segments with the given encryption scheme. Exclude this parameter if no encryption is desired.</p>
#[serde(rename = "EncryptionType")]
#[serde(skip_serializing_if = "Option::is_none")]
pub encryption_type: Option<String>,
/// <p>Parameters that control interactions with the CDN.</p>
#[serde(rename = "HlsCdnSettings")]
#[serde(skip_serializing_if = "Option::is_none")]
pub hls_cdn_settings: Option<HlsCdnSettings>,
/// <p>If mode is "live", the number of segments to retain in the manifest (.m3u8) file. This number must be less than or equal to keepSegments. If mode is "vod", this parameter has no effect.</p>
#[serde(rename = "IndexNSegments")]
#[serde(skip_serializing_if = "Option::is_none")]
pub index_n_segments: Option<i64>,
/// <p>Parameter that control output group behavior on input loss.</p>
#[serde(rename = "InputLossAction")]
#[serde(skip_serializing_if = "Option::is_none")]
pub input_loss_action: Option<String>,
/// <p>For use with encryptionType. The IV (Initialization Vector) is a 128-bit number used in conjunction with the key for encrypting blocks. If set to "include", IV is listed in the manifest, otherwise the IV is not in the manifest.</p>
#[serde(rename = "IvInManifest")]
#[serde(skip_serializing_if = "Option::is_none")]
pub iv_in_manifest: Option<String>,
/// <p>For use with encryptionType. The IV (Initialization Vector) is a 128-bit number used in conjunction with the key for encrypting blocks. If this setting is "followsSegmentNumber", it will cause the IV to change every segment (to match the segment number). If this is set to "explicit", you must enter a constantIv value.</p>
#[serde(rename = "IvSource")]
#[serde(skip_serializing_if = "Option::is_none")]
pub iv_source: Option<String>,
/// <p>If mode is "live", the number of TS segments to retain in the destination directory. If mode is "vod", this parameter has no effect.</p>
#[serde(rename = "KeepSegments")]
#[serde(skip_serializing_if = "Option::is_none")]
pub keep_segments: Option<i64>,
/// <p>The value specifies how the key is represented in the resource identified by the URI. If parameter is absent, an implicit value of "identity" is used. A reverse DNS string can also be given.</p>
#[serde(rename = "KeyFormat")]
#[serde(skip_serializing_if = "Option::is_none")]
pub key_format: Option<String>,
/// <p>Either a single positive integer version value or a slash delimited list of version values (1/2/3).</p>
#[serde(rename = "KeyFormatVersions")]
#[serde(skip_serializing_if = "Option::is_none")]
pub key_format_versions: Option<String>,
/// <p>The key provider settings.</p>
#[serde(rename = "KeyProviderSettings")]
#[serde(skip_serializing_if = "Option::is_none")]
pub key_provider_settings: Option<KeyProviderSettings>,
/// <p>When set to gzip, compresses HLS playlist.</p>
#[serde(rename = "ManifestCompression")]
#[serde(skip_serializing_if = "Option::is_none")]
pub manifest_compression: Option<String>,
/// <p>Indicates whether the output manifest should use floating point or integer values for segment duration.</p>
#[serde(rename = "ManifestDurationFormat")]
#[serde(skip_serializing_if = "Option::is_none")]
pub manifest_duration_format: Option<String>,
/// <p>When set, minimumSegmentLength is enforced by looking ahead and back within the specified range for a nearby avail and extending the segment size if needed.</p>
#[serde(rename = "MinSegmentLength")]
#[serde(skip_serializing_if = "Option::is_none")]
pub min_segment_length: Option<i64>,
/// <p>If "vod", all segments are indexed and kept permanently in the destination and manifest. If "live", only the number segments specified in keepSegments and indexNSegments are kept; newer segments replace older segments, which may prevent players from rewinding all the way to the beginning of the event.</p>
///
/// <p>VOD mode uses HLS EXT-X-PLAYLIST-TYPE of EVENT while the channel is running, converting it to a "VOD" type manifest on completion of the stream.</p>
#[serde(rename = "Mode")]
#[serde(skip_serializing_if = "Option::is_none")]
pub mode: Option<String>,
/// <p>Generates the .m3u8 playlist file for this HLS output group. The segmentsOnly option will output segments without the .m3u8 file.</p>
#[serde(rename = "OutputSelection")]
#[serde(skip_serializing_if = "Option::is_none")]
pub output_selection: Option<String>,
/// <p>Includes or excludes EXT-X-PROGRAM-DATE-TIME tag in .m3u8 manifest files. The value is calculated as follows: either the program date and time are initialized using the input timecode source, or the time is initialized using the input timecode source and the date is initialized using the timestampOffset.</p>
#[serde(rename = "ProgramDateTime")]
#[serde(skip_serializing_if = "Option::is_none")]
pub program_date_time: Option<String>,
/// <p>Period of insertion of EXT-X-PROGRAM-DATE-TIME entry, in seconds.</p>
#[serde(rename = "ProgramDateTimePeriod")]
#[serde(skip_serializing_if = "Option::is_none")]
pub program_date_time_period: Option<i64>,
/// <p>Length of MPEG-2 Transport Stream segments to create (in seconds). Note that segments will end on the next keyframe after this number of seconds, so actual segment length may be longer.</p>
#[serde(rename = "SegmentLength")]
#[serde(skip_serializing_if = "Option::is_none")]
pub segment_length: Option<i64>,
/// <p>When set to useInputSegmentation, the output segment or fragment points are set by the RAI markers from the input streams.</p>
#[serde(rename = "SegmentationMode")]
#[serde(skip_serializing_if = "Option::is_none")]
pub segmentation_mode: Option<String>,
/// <p>Number of segments to write to a subdirectory before starting a new one. directoryStructure must be subdirectoryPerStream for this setting to have an effect.</p>
#[serde(rename = "SegmentsPerSubdirectory")]
#[serde(skip_serializing_if = "Option::is_none")]
pub segments_per_subdirectory: Option<i64>,
/// <p>Include or exclude RESOLUTION attribute for video in EXT-X-STREAM-INF tag of variant manifest.</p>
#[serde(rename = "StreamInfResolution")]
#[serde(skip_serializing_if = "Option::is_none")]
pub stream_inf_resolution: Option<String>,
/// <p>Indicates ID3 frame that has the timecode.</p>
#[serde(rename = "TimedMetadataId3Frame")]
#[serde(skip_serializing_if = "Option::is_none")]
pub timed_metadata_id_3_frame: Option<String>,
/// <p>Timed Metadata interval in seconds.</p>
#[serde(rename = "TimedMetadataId3Period")]
#[serde(skip_serializing_if = "Option::is_none")]
pub timed_metadata_id_3_period: Option<i64>,
/// <p>Provides an extra millisecond delta offset to fine tune the timestamps.</p>
#[serde(rename = "TimestampDeltaMilliseconds")]
#[serde(skip_serializing_if = "Option::is_none")]
pub timestamp_delta_milliseconds: Option<i64>,
/// <p>When set to "singleFile", emits the program as a single media resource (.ts) file, and uses #EXT-X-BYTERANGE tags to index segment for playback. Playback of VOD mode content during event is not guaranteed due to HTTP server caching.</p>
#[serde(rename = "TsFileMode")]
#[serde(skip_serializing_if = "Option::is_none")]
pub ts_file_mode: Option<String>,
}
/// <p>Placeholder documentation for HlsInputSettings</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct HlsInputSettings {
/// <p>When specified the HLS stream with the m3u8 BANDWIDTH that most closely matches this value will be chosen, otherwise the highest bandwidth stream in the m3u8 will be chosen. The bitrate is specified in bits per second, as in an HLS manifest.</p>
#[serde(rename = "Bandwidth")]
#[serde(skip_serializing_if = "Option::is_none")]
pub bandwidth: Option<i64>,
/// <p>When specified, reading of the HLS input will begin this many buffer segments from the end (most recently written segment). When not specified, the HLS input will begin with the first segment specified in the m3u8.</p>
#[serde(rename = "BufferSegments")]
#[serde(skip_serializing_if = "Option::is_none")]
pub buffer_segments: Option<i64>,
/// <p>The number of consecutive times that attempts to read a manifest or segment must fail before the input is considered unavailable.</p>
#[serde(rename = "Retries")]
#[serde(skip_serializing_if = "Option::is_none")]
pub retries: Option<i64>,
/// <p>The number of seconds between retries when an attempt to read a manifest or segment fails.</p>
#[serde(rename = "RetryInterval")]
#[serde(skip_serializing_if = "Option::is_none")]
pub retry_interval: Option<i64>,
}
/// <p>Placeholder documentation for HlsMediaStoreSettings</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct HlsMediaStoreSettings {
/// <p>Number of seconds to wait before retrying connection to the CDN if the connection is lost.</p>
#[serde(rename = "ConnectionRetryInterval")]
#[serde(skip_serializing_if = "Option::is_none")]
pub connection_retry_interval: Option<i64>,
/// <p>Size in seconds of file cache for streaming outputs.</p>
#[serde(rename = "FilecacheDuration")]
#[serde(skip_serializing_if = "Option::is_none")]
pub filecache_duration: Option<i64>,
/// <p>When set to temporal, output files are stored in non-persistent memory for faster reading and writing.</p>
#[serde(rename = "MediaStoreStorageClass")]
#[serde(skip_serializing_if = "Option::is_none")]
pub media_store_storage_class: Option<String>,
/// <p>Number of retry attempts that will be made before the Live Event is put into an error state.</p>
#[serde(rename = "NumRetries")]
#[serde(skip_serializing_if = "Option::is_none")]
pub num_retries: Option<i64>,
/// <p>If a streaming output fails, number of seconds to wait until a restart is initiated. A value of 0 means never restart.</p>
#[serde(rename = "RestartDelay")]
#[serde(skip_serializing_if = "Option::is_none")]
pub restart_delay: Option<i64>,
}
/// <p>Placeholder documentation for HlsOutputSettings</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct HlsOutputSettings {
/// <p>Settings regarding the underlying stream. These settings are different for audio-only outputs.</p>
#[serde(rename = "HlsSettings")]
pub hls_settings: HlsSettings,
/// <p>String concatenated to the end of the destination filename. Accepts "Format Identifiers":#formatIdentifierParameters.</p>
#[serde(rename = "NameModifier")]
#[serde(skip_serializing_if = "Option::is_none")]
pub name_modifier: Option<String>,
/// <p>String concatenated to end of segment filenames.</p>
#[serde(rename = "SegmentModifier")]
#[serde(skip_serializing_if = "Option::is_none")]
pub segment_modifier: Option<String>,
}
/// <p>Placeholder documentation for HlsSettings</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct HlsSettings {
#[serde(rename = "AudioOnlyHlsSettings")]
#[serde(skip_serializing_if = "Option::is_none")]
pub audio_only_hls_settings: Option<AudioOnlyHlsSettings>,
#[serde(rename = "StandardHlsSettings")]
#[serde(skip_serializing_if = "Option::is_none")]
pub standard_hls_settings: Option<StandardHlsSettings>,
}
/// <p>Placeholder documentation for HlsWebdavSettings</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct HlsWebdavSettings {
/// <p>Number of seconds to wait before retrying connection to the CDN if the connection is lost.</p>
#[serde(rename = "ConnectionRetryInterval")]
#[serde(skip_serializing_if = "Option::is_none")]
pub connection_retry_interval: Option<i64>,
/// <p>Size in seconds of file cache for streaming outputs.</p>
#[serde(rename = "FilecacheDuration")]
#[serde(skip_serializing_if = "Option::is_none")]
pub filecache_duration: Option<i64>,
/// <p>Specify whether or not to use chunked transfer encoding to WebDAV.</p>
#[serde(rename = "HttpTransferMode")]
#[serde(skip_serializing_if = "Option::is_none")]
pub http_transfer_mode: Option<String>,
/// <p>Number of retry attempts that will be made before the Live Event is put into an error state.</p>
#[serde(rename = "NumRetries")]
#[serde(skip_serializing_if = "Option::is_none")]
pub num_retries: Option<i64>,
/// <p>If a streaming output fails, number of seconds to wait until a restart is initiated. A value of 0 means never restart.</p>
#[serde(rename = "RestartDelay")]
#[serde(skip_serializing_if = "Option::is_none")]
pub restart_delay: Option<i64>,
}
/// <p>Placeholder documentation for Input</p>
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct Input {
/// <p>The Unique ARN of the input (generated, immutable).</p>
#[serde(rename = "Arn")]
#[serde(skip_serializing_if = "Option::is_none")]
pub arn: Option<String>,
/// <p>A list of channel IDs that that input is attached to (currently an input can only be attached to one channel).</p>
#[serde(rename = "AttachedChannels")]
#[serde(skip_serializing_if = "Option::is_none")]
pub attached_channels: Option<Vec<String>>,
/// <p>A list of the destinations of the input (PUSH-type).</p>
#[serde(rename = "Destinations")]
#[serde(skip_serializing_if = "Option::is_none")]
pub destinations: Option<Vec<InputDestination>>,
/// <p>The generated ID of the input (unique for user account, immutable).</p>
#[serde(rename = "Id")]
#[serde(skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
/// <p>The user-assigned name (This is a mutable value).</p>
#[serde(rename = "Name")]
#[serde(skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
/// <p>A list of IDs for all the security groups attached to the input.</p>
#[serde(rename = "SecurityGroups")]
#[serde(skip_serializing_if = "Option::is_none")]
pub security_groups: Option<Vec<String>>,
/// <p>A list of the sources of the input (PULL-type).</p>
#[serde(rename = "Sources")]
#[serde(skip_serializing_if = "Option::is_none")]
pub sources: Option<Vec<InputSource>>,
#[serde(rename = "State")]
#[serde(skip_serializing_if = "Option::is_none")]
pub state: Option<String>,
#[serde(rename = "Type")]
#[serde(skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
}
/// <p>Placeholder documentation for InputAttachment</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct InputAttachment {
/// <p>The ID of the input</p>
#[serde(rename = "InputId")]
#[serde(skip_serializing_if = "Option::is_none")]
pub input_id: Option<String>,
/// <p>Settings of an input (caption selector, etc.)</p>
#[serde(rename = "InputSettings")]
#[serde(skip_serializing_if = "Option::is_none")]
pub input_settings: Option<InputSettings>,
}
/// <p>Placeholder documentation for InputChannelLevel</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct InputChannelLevel {
/// <p>Remixing value. Units are in dB and acceptable values are within the range from -60 (mute) and 6 dB.</p>
#[serde(rename = "Gain")]
pub gain: i64,
/// <p>The index of the input channel used as a source.</p>
#[serde(rename = "InputChannel")]
pub input_channel: i64,
}
/// <p>The settings for a PUSH type input.</p>
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct InputDestination {
/// <p>The system-generated static IP address of endpoint.
/// It remains fixed for the lifetime of the input.</p>
#[serde(rename = "Ip")]
#[serde(skip_serializing_if = "Option::is_none")]
pub ip: Option<String>,
/// <p>The port number for the input.</p>
#[serde(rename = "Port")]
#[serde(skip_serializing_if = "Option::is_none")]
pub port: Option<String>,
/// <p>This represents the endpoint that the customer stream will be
/// pushed to.</p>
#[serde(rename = "Url")]
#[serde(skip_serializing_if = "Option::is_none")]
pub url: Option<String>,
}
/// <p>Endpoint settings for a PUSH type input.</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct InputDestinationRequest {
/// <p>A unique name for the location the RTMP stream is being pushed
/// to.</p>
#[serde(rename = "StreamName")]
#[serde(skip_serializing_if = "Option::is_none")]
pub stream_name: Option<String>,
}
/// <p>Placeholder documentation for InputLocation</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct InputLocation {
/// <p>key used to extract the password from EC2 Parameter store</p>
#[serde(rename = "PasswordParam")]
#[serde(skip_serializing_if = "Option::is_none")]
pub password_param: Option<String>,
/// <p>Uniform Resource Identifier - This should be a path to a file accessible to the Live system (eg. a http:// URI) depending on the output type. For example, a RTMP destination should have a uri simliar to: "rtmp://fmsserver/live".</p>
#[serde(rename = "Uri")]
pub uri: String,
/// <p>Documentation update needed</p>
#[serde(rename = "Username")]
#[serde(skip_serializing_if = "Option::is_none")]
pub username: Option<String>,
}
/// <p>Placeholder documentation for InputLossBehavior</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct InputLossBehavior {
/// <p>Documentation update needed</p>
#[serde(rename = "BlackFrameMsec")]
#[serde(skip_serializing_if = "Option::is_none")]
pub black_frame_msec: Option<i64>,
/// <p>When input loss image type is "color" this field specifies the color to use. Value: 6 hex characters representing the values of RGB.</p>
#[serde(rename = "InputLossImageColor")]
#[serde(skip_serializing_if = "Option::is_none")]
pub input_loss_image_color: Option<String>,
/// <p>When input loss image type is "slate" these fields specify the parameters for accessing the slate.</p>
#[serde(rename = "InputLossImageSlate")]
#[serde(skip_serializing_if = "Option::is_none")]
pub input_loss_image_slate: Option<InputLocation>,
/// <p>Indicates whether to substitute a solid color or a slate into the output after input loss exceeds blackFrameMsec.</p>
#[serde(rename = "InputLossImageType")]
#[serde(skip_serializing_if = "Option::is_none")]
pub input_loss_image_type: Option<String>,
/// <p>Documentation update needed</p>
#[serde(rename = "RepeatFrameMsec")]
#[serde(skip_serializing_if = "Option::is_none")]
pub repeat_frame_msec: Option<i64>,
}
/// <p>An Input Security Group</p>
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct InputSecurityGroup {
/// <p>Unique ARN of Input Security Group</p>
#[serde(rename = "Arn")]
#[serde(skip_serializing_if = "Option::is_none")]
pub arn: Option<String>,
/// <p>The Id of the Input Security Group</p>
#[serde(rename = "Id")]
#[serde(skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
/// <p>The list of inputs currently using this Input Security Group.</p>
#[serde(rename = "Inputs")]
#[serde(skip_serializing_if = "Option::is_none")]
pub inputs: Option<Vec<String>>,
/// <p>The current state of the Input Security Group.</p>
#[serde(rename = "State")]
#[serde(skip_serializing_if = "Option::is_none")]
pub state: Option<String>,
/// <p>Whitelist rules and their sync status</p>
#[serde(rename = "WhitelistRules")]
#[serde(skip_serializing_if = "Option::is_none")]
pub whitelist_rules: Option<Vec<InputWhitelistRule>>,
}
/// <p>Request of IPv4 CIDR addresses to whitelist in a security group.</p>
#[derive(Default, Debug, Clone, PartialEq)]
pub struct InputSecurityGroupWhitelistRequest {
/// <p>List of IPv4 CIDR addresses to whitelist</p>
pub whitelist_rules: Option<Vec<InputWhitelistRuleCidr>>,
}
/// <p>Live Event input parameters. There can be multiple inputs in a single Live Event.</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct InputSettings {
/// <p>Used to select the audio stream to decode for inputs that have multiple available.</p>
#[serde(rename = "AudioSelectors")]
#[serde(skip_serializing_if = "Option::is_none")]
pub audio_selectors: Option<Vec<AudioSelector>>,
/// <p>Used to select the caption input to use for inputs that have multiple available.</p>
#[serde(rename = "CaptionSelectors")]
#[serde(skip_serializing_if = "Option::is_none")]
pub caption_selectors: Option<Vec<CaptionSelector>>,
/// <p>Enable or disable the deblock filter when filtering.</p>
#[serde(rename = "DeblockFilter")]
#[serde(skip_serializing_if = "Option::is_none")]
pub deblock_filter: Option<String>,
/// <p>Enable or disable the denoise filter when filtering.</p>
#[serde(rename = "DenoiseFilter")]
#[serde(skip_serializing_if = "Option::is_none")]
pub denoise_filter: Option<String>,
/// <p>Adjusts the magnitude of filtering from 1 (minimal) to 5 (strongest).</p>
#[serde(rename = "FilterStrength")]
#[serde(skip_serializing_if = "Option::is_none")]
pub filter_strength: Option<i64>,
/// <p>Turns on the filter for this input. MPEG-2 inputs have the deblocking filter enabled by default.
/// 1) auto - filtering will be applied depending on input type/quality
/// 2) disabled - no filtering will be applied to the input
/// 3) forced - filtering will be applied regardless of input type</p>
#[serde(rename = "InputFilter")]
#[serde(skip_serializing_if = "Option::is_none")]
pub input_filter: Option<String>,
/// <p>Input settings.</p>
#[serde(rename = "NetworkInputSettings")]
#[serde(skip_serializing_if = "Option::is_none")]
pub network_input_settings: Option<NetworkInputSettings>,
/// <p>Loop input if it is a file. This allows a file input to be streamed indefinitely.</p>
#[serde(rename = "SourceEndBehavior")]
#[serde(skip_serializing_if = "Option::is_none")]
pub source_end_behavior: Option<String>,
/// <p>Informs which video elementary stream to decode for input types that have multiple available.</p>
#[serde(rename = "VideoSelector")]
#[serde(skip_serializing_if = "Option::is_none")]
pub video_selector: Option<VideoSelector>,
}
/// <p>The settings for a PULL type input.</p>
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct InputSource {
/// <p>The key used to extract the password from EC2 Parameter store.</p>
#[serde(rename = "PasswordParam")]
#[serde(skip_serializing_if = "Option::is_none")]
pub password_param: Option<String>,
/// <p>This represents the customer's source URL where stream is
/// pulled from.</p>
#[serde(rename = "Url")]
#[serde(skip_serializing_if = "Option::is_none")]
pub url: Option<String>,
/// <p>The username for the input source.</p>
#[serde(rename = "Username")]
#[serde(skip_serializing_if = "Option::is_none")]
pub username: Option<String>,
}
/// <p>Settings for for a PULL type input.</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct InputSourceRequest {
/// <p>The key used to extract the password from EC2 Parameter store.</p>
#[serde(rename = "PasswordParam")]
#[serde(skip_serializing_if = "Option::is_none")]
pub password_param: Option<String>,
/// <p>This represents the customer's source URL where stream is
/// pulled from.</p>
#[serde(rename = "Url")]
#[serde(skip_serializing_if = "Option::is_none")]
pub url: Option<String>,
/// <p>The username for the input source.</p>
#[serde(rename = "Username")]
#[serde(skip_serializing_if = "Option::is_none")]
pub username: Option<String>,
}
/// <p>Placeholder documentation for InputSpecification</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct InputSpecification {
/// <p>Input codec</p>
#[serde(rename = "Codec")]
#[serde(skip_serializing_if = "Option::is_none")]
pub codec: Option<String>,
/// <p>Maximum input bitrate, categorized coarsely</p>
#[serde(rename = "MaximumBitrate")]
#[serde(skip_serializing_if = "Option::is_none")]
pub maximum_bitrate: Option<String>,
/// <p>Input resolution, categorized coarsely</p>
#[serde(rename = "Resolution")]
#[serde(skip_serializing_if = "Option::is_none")]
pub resolution: Option<String>,
}
/// <p>Whitelist rule</p>
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct InputWhitelistRule {
/// <p>The IPv4 CIDR that's whitelisted.</p>
#[serde(rename = "Cidr")]
#[serde(skip_serializing_if = "Option::is_none")]
pub cidr: Option<String>,
}
/// <p>An IPv4 CIDR to whitelist.</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct InputWhitelistRuleCidr {
/// <p>The IPv4 CIDR to whitelist.</p>
#[serde(rename = "Cidr")]
#[serde(skip_serializing_if = "Option::is_none")]
pub cidr: Option<String>,
}
/// <p>Placeholder documentation for InternalServiceError</p>
#[derive(Default, Debug, Clone, PartialEq)]
pub struct InternalServiceError {
pub message: Option<String>,
}
/// <p>Placeholder documentation for InvalidRequest</p>
#[derive(Default, Debug, Clone, PartialEq)]
pub struct InvalidRequest {
pub message: Option<String>,
}
/// <p>Placeholder documentation for KeyProviderSettings</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct KeyProviderSettings {
#[serde(rename = "StaticKeySettings")]
#[serde(skip_serializing_if = "Option::is_none")]
pub static_key_settings: Option<StaticKeySettings>,
}
/// <p>Placeholder documentation for LimitExceeded</p>
#[derive(Default, Debug, Clone, PartialEq)]
pub struct LimitExceeded {
pub message: Option<String>,
}
/// <p>Placeholder documentation for ListChannelsRequest</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct ListChannelsRequest {
#[serde(rename = "MaxResults")]
#[serde(skip_serializing_if = "Option::is_none")]
pub max_results: Option<i64>,
#[serde(rename = "NextToken")]
#[serde(skip_serializing_if = "Option::is_none")]
pub next_token: Option<String>,
}
/// <p>Placeholder documentation for ListChannelsResponse</p>
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct ListChannelsResponse {
#[serde(rename = "Channels")]
#[serde(skip_serializing_if = "Option::is_none")]
pub channels: Option<Vec<ChannelSummary>>,
#[serde(rename = "NextToken")]
#[serde(skip_serializing_if = "Option::is_none")]
pub next_token: Option<String>,
}
/// <p>Placeholder documentation for ListChannelsResultModel</p>
#[derive(Default, Debug, Clone, PartialEq)]
pub struct ListChannelsResultModel {
pub channels: Option<Vec<ChannelSummary>>,
pub next_token: Option<String>,
}
/// <p>Placeholder documentation for ListInputSecurityGroupsRequest</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct ListInputSecurityGroupsRequest {
#[serde(rename = "MaxResults")]
#[serde(skip_serializing_if = "Option::is_none")]
pub max_results: Option<i64>,
#[serde(rename = "NextToken")]
#[serde(skip_serializing_if = "Option::is_none")]
pub next_token: Option<String>,
}
/// <p>Placeholder documentation for ListInputSecurityGroupsResponse</p>
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct ListInputSecurityGroupsResponse {
/// <p>List of input security groups</p>
#[serde(rename = "InputSecurityGroups")]
#[serde(skip_serializing_if = "Option::is_none")]
pub input_security_groups: Option<Vec<InputSecurityGroup>>,
#[serde(rename = "NextToken")]
#[serde(skip_serializing_if = "Option::is_none")]
pub next_token: Option<String>,
}
/// <p>Result of input security group list request</p>
#[derive(Default, Debug, Clone, PartialEq)]
pub struct ListInputSecurityGroupsResultModel {
/// <p>List of input security groups</p>
pub input_security_groups: Option<Vec<InputSecurityGroup>>,
pub next_token: Option<String>,
}
/// <p>Placeholder documentation for ListInputsRequest</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct ListInputsRequest {
#[serde(rename = "MaxResults")]
#[serde(skip_serializing_if = "Option::is_none")]
pub max_results: Option<i64>,
#[serde(rename = "NextToken")]
#[serde(skip_serializing_if = "Option::is_none")]
pub next_token: Option<String>,
}
/// <p>Placeholder documentation for ListInputsResponse</p>
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct ListInputsResponse {
#[serde(rename = "Inputs")]
#[serde(skip_serializing_if = "Option::is_none")]
pub inputs: Option<Vec<Input>>,
#[serde(rename = "NextToken")]
#[serde(skip_serializing_if = "Option::is_none")]
pub next_token: Option<String>,
}
/// <p>Placeholder documentation for ListInputsResultModel</p>
#[derive(Default, Debug, Clone, PartialEq)]
pub struct ListInputsResultModel {
pub inputs: Option<Vec<Input>>,
pub next_token: Option<String>,
}
/// <p>Placeholder documentation for ListOfferingsRequest</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct ListOfferingsRequest {
/// <p>Filter to offerings that match the configuration of an existing channel, e.g. '2345678' (a channel ID)</p>
#[serde(rename = "ChannelConfiguration")]
#[serde(skip_serializing_if = "Option::is_none")]
pub channel_configuration: Option<String>,
/// <p>Filter by codec, 'AVC', 'HEVC', 'MPEG2', or 'AUDIO'</p>
#[serde(rename = "Codec")]
#[serde(skip_serializing_if = "Option::is_none")]
pub codec: Option<String>,
#[serde(rename = "MaxResults")]
#[serde(skip_serializing_if = "Option::is_none")]
pub max_results: Option<i64>,
/// <p>Filter by bitrate, 'MAX<em>10</em>MBPS', 'MAX<em>20</em>MBPS', or 'MAX<em>50</em>MBPS'</p>
#[serde(rename = "MaximumBitrate")]
#[serde(skip_serializing_if = "Option::is_none")]
pub maximum_bitrate: Option<String>,
/// <p>Filter by framerate, 'MAX<em>30</em>FPS' or 'MAX<em>60</em>FPS'</p>
#[serde(rename = "MaximumFramerate")]
#[serde(skip_serializing_if = "Option::is_none")]
pub maximum_framerate: Option<String>,
#[serde(rename = "NextToken")]
#[serde(skip_serializing_if = "Option::is_none")]
pub next_token: Option<String>,
/// <p>Filter by resolution, 'SD', 'HD', or 'UHD'</p>
#[serde(rename = "Resolution")]
#[serde(skip_serializing_if = "Option::is_none")]
pub resolution: Option<String>,
/// <p>Filter by resource type, 'INPUT', 'OUTPUT', or 'CHANNEL'</p>
#[serde(rename = "ResourceType")]
#[serde(skip_serializing_if = "Option::is_none")]
pub resource_type: Option<String>,
/// <p>Filter by special feature, 'ADVANCED<em>AUDIO' or 'AUDIO</em>NORMALIZATION'</p>
#[serde(rename = "SpecialFeature")]
#[serde(skip_serializing_if = "Option::is_none")]
pub special_feature: Option<String>,
/// <p>Filter by video quality, 'STANDARD', 'ENHANCED', or 'PREMIUM'</p>
#[serde(rename = "VideoQuality")]
#[serde(skip_serializing_if = "Option::is_none")]
pub video_quality: Option<String>,
}
/// <p>Placeholder documentation for ListOfferingsResponse</p>
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct ListOfferingsResponse {
/// <p>Token to retrieve the next page of results</p>
#[serde(rename = "NextToken")]
#[serde(skip_serializing_if = "Option::is_none")]
pub next_token: Option<String>,
/// <p>List of offerings</p>
#[serde(rename = "Offerings")]
#[serde(skip_serializing_if = "Option::is_none")]
pub offerings: Option<Vec<Offering>>,
}
/// <p>ListOfferings response</p>
#[derive(Default, Debug, Clone, PartialEq)]
pub struct ListOfferingsResultModel {
/// <p>Token to retrieve the next page of results</p>
pub next_token: Option<String>,
/// <p>List of offerings</p>
pub offerings: Option<Vec<Offering>>,
}
/// <p>Placeholder documentation for ListReservationsRequest</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct ListReservationsRequest {
/// <p>Filter by codec, 'AVC', 'HEVC', 'MPEG2', or 'AUDIO'</p>
#[serde(rename = "Codec")]
#[serde(skip_serializing_if = "Option::is_none")]
pub codec: Option<String>,
#[serde(rename = "MaxResults")]
#[serde(skip_serializing_if = "Option::is_none")]
pub max_results: Option<i64>,
/// <p>Filter by bitrate, 'MAX<em>10</em>MBPS', 'MAX<em>20</em>MBPS', or 'MAX<em>50</em>MBPS'</p>
#[serde(rename = "MaximumBitrate")]
#[serde(skip_serializing_if = "Option::is_none")]
pub maximum_bitrate: Option<String>,
/// <p>Filter by framerate, 'MAX<em>30</em>FPS' or 'MAX<em>60</em>FPS'</p>
#[serde(rename = "MaximumFramerate")]
#[serde(skip_serializing_if = "Option::is_none")]
pub maximum_framerate: Option<String>,
#[serde(rename = "NextToken")]
#[serde(skip_serializing_if = "Option::is_none")]
pub next_token: Option<String>,
/// <p>Filter by resolution, 'SD', 'HD', or 'UHD'</p>
#[serde(rename = "Resolution")]
#[serde(skip_serializing_if = "Option::is_none")]
pub resolution: Option<String>,
/// <p>Filter by resource type, 'INPUT', 'OUTPUT', or 'CHANNEL'</p>
#[serde(rename = "ResourceType")]
#[serde(skip_serializing_if = "Option::is_none")]
pub resource_type: Option<String>,
/// <p>Filter by special feature, 'ADVANCED<em>AUDIO' or 'AUDIO</em>NORMALIZATION'</p>
#[serde(rename = "SpecialFeature")]
#[serde(skip_serializing_if = "Option::is_none")]
pub special_feature: Option<String>,
/// <p>Filter by video quality, 'STANDARD', 'ENHANCED', or 'PREMIUM'</p>
#[serde(rename = "VideoQuality")]
#[serde(skip_serializing_if = "Option::is_none")]
pub video_quality: Option<String>,
}
/// <p>Placeholder documentation for ListReservationsResponse</p>
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct ListReservationsResponse {
/// <p>Token to retrieve the next page of results</p>
#[serde(rename = "NextToken")]
#[serde(skip_serializing_if = "Option::is_none")]
pub next_token: Option<String>,
/// <p>List of reservations</p>
#[serde(rename = "Reservations")]
#[serde(skip_serializing_if = "Option::is_none")]
pub reservations: Option<Vec<Reservation>>,
}
/// <p>ListReservations response</p>
#[derive(Default, Debug, Clone, PartialEq)]
pub struct ListReservationsResultModel {
/// <p>Token to retrieve the next page of results</p>
pub next_token: Option<String>,
/// <p>List of reservations</p>
pub reservations: Option<Vec<Reservation>>,
}
/// <p>Placeholder documentation for M2tsSettings</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct M2tsSettings {
/// <p>When set to drop, output audio streams will be removed from the program if the selected input audio stream is removed from the input. This allows the output audio configuration to dynamically change based on input configuration. If this is set to encodeSilence, all output audio streams will output encoded silence when not connected to an active input stream.</p>
#[serde(rename = "AbsentInputAudioBehavior")]
#[serde(skip_serializing_if = "Option::is_none")]
pub absent_input_audio_behavior: Option<String>,
/// <p>When set to enabled, uses ARIB-compliant field muxing and removes video descriptor.</p>
#[serde(rename = "Arib")]
#[serde(skip_serializing_if = "Option::is_none")]
pub arib: Option<String>,
/// <p>Packet Identifier (PID) for ARIB Captions in the transport stream. Can be entered as a decimal or hexadecimal value. Valid values are 32 (or 0x20)..8182 (or 0x1ff6).</p>
#[serde(rename = "AribCaptionsPid")]
#[serde(skip_serializing_if = "Option::is_none")]
pub arib_captions_pid: Option<String>,
/// <p>If set to auto, pid number used for ARIB Captions will be auto-selected from unused pids. If set to useConfigured, ARIB Captions will be on the configured pid number.</p>
#[serde(rename = "AribCaptionsPidControl")]
#[serde(skip_serializing_if = "Option::is_none")]
pub arib_captions_pid_control: Option<String>,
/// <p>When set to dvb, uses DVB buffer model for Dolby Digital audio. When set to atsc, the ATSC model is used.</p>
#[serde(rename = "AudioBufferModel")]
#[serde(skip_serializing_if = "Option::is_none")]
pub audio_buffer_model: Option<String>,
/// <p>The number of audio frames to insert for each PES packet.</p>
#[serde(rename = "AudioFramesPerPes")]
#[serde(skip_serializing_if = "Option::is_none")]
pub audio_frames_per_pes: Option<i64>,
/// <p>Packet Identifier (PID) of the elementary audio stream(s) in the transport stream. Multiple values are accepted, and can be entered in ranges and/or by comma separation. Can be entered as decimal or hexadecimal values. Each PID specified must be in the range of 32 (or 0x20)..8182 (or 0x1ff6).</p>
#[serde(rename = "AudioPids")]
#[serde(skip_serializing_if = "Option::is_none")]
pub audio_pids: Option<String>,
/// <p>When set to atsc, uses stream type = 0x81 for AC3 and stream type = 0x87 for EAC3. When set to dvb, uses stream type = 0x06.</p>
#[serde(rename = "AudioStreamType")]
#[serde(skip_serializing_if = "Option::is_none")]
pub audio_stream_type: Option<String>,
/// <p>The output bitrate of the transport stream in bits per second. Setting to 0 lets the muxer automatically determine the appropriate bitrate.</p>
#[serde(rename = "Bitrate")]
#[serde(skip_serializing_if = "Option::is_none")]
pub bitrate: Option<i64>,
/// <p>If set to multiplex, use multiplex buffer model for accurate interleaving. Setting to bufferModel to none can lead to lower latency, but low-memory devices may not be able to play back the stream without interruptions.</p>
#[serde(rename = "BufferModel")]
#[serde(skip_serializing_if = "Option::is_none")]
pub buffer_model: Option<String>,
/// <p>When set to enabled, generates captionServiceDescriptor in PMT.</p>
#[serde(rename = "CcDescriptor")]
#[serde(skip_serializing_if = "Option::is_none")]
pub cc_descriptor: Option<String>,
/// <p>Inserts DVB Network Information Table (NIT) at the specified table repetition interval.</p>
#[serde(rename = "DvbNitSettings")]
#[serde(skip_serializing_if = "Option::is_none")]
pub dvb_nit_settings: Option<DvbNitSettings>,
/// <p>Inserts DVB Service Description Table (SDT) at the specified table repetition interval.</p>
#[serde(rename = "DvbSdtSettings")]
#[serde(skip_serializing_if = "Option::is_none")]
pub dvb_sdt_settings: Option<DvbSdtSettings>,
/// <p>Packet Identifier (PID) for input source DVB Subtitle data to this output. Multiple values are accepted, and can be entered in ranges and/or by comma separation. Can be entered as decimal or hexadecimal values. Each PID specified must be in the range of 32 (or 0x20)..8182 (or 0x1ff6).</p>
#[serde(rename = "DvbSubPids")]
#[serde(skip_serializing_if = "Option::is_none")]
pub dvb_sub_pids: Option<String>,
/// <p>Inserts DVB Time and Date Table (TDT) at the specified table repetition interval.</p>
#[serde(rename = "DvbTdtSettings")]
#[serde(skip_serializing_if = "Option::is_none")]
pub dvb_tdt_settings: Option<DvbTdtSettings>,
/// <p>Packet Identifier (PID) for input source DVB Teletext data to this output. Can be entered as a decimal or hexadecimal value. Valid values are 32 (or 0x20)..8182 (or 0x1ff6).</p>
#[serde(rename = "DvbTeletextPid")]
#[serde(skip_serializing_if = "Option::is_none")]
pub dvb_teletext_pid: Option<String>,
/// <p>If set to passthrough, passes any EBIF data from the input source to this output.</p>
#[serde(rename = "Ebif")]
#[serde(skip_serializing_if = "Option::is_none")]
pub ebif: Option<String>,
/// <p>When videoAndFixedIntervals is selected, audio EBP markers will be added to partitions 3 and 4. The interval between these additional markers will be fixed, and will be slightly shorter than the video EBP marker interval. Only available when EBP Cablelabs segmentation markers are selected. Partitions 1 and 2 will always follow the video interval.</p>
#[serde(rename = "EbpAudioInterval")]
#[serde(skip_serializing_if = "Option::is_none")]
pub ebp_audio_interval: Option<String>,
/// <p>When set, enforces that Encoder Boundary Points do not come within the specified time interval of each other by looking ahead at input video. If another EBP is going to come in within the specified time interval, the current EBP is not emitted, and the segment is "stretched" to the next marker. The lookahead value does not add latency to the system. The Live Event must be configured elsewhere to create sufficient latency to make the lookahead accurate.</p>
#[serde(rename = "EbpLookaheadMs")]
#[serde(skip_serializing_if = "Option::is_none")]
pub ebp_lookahead_ms: Option<i64>,
/// <p>Controls placement of EBP on Audio PIDs. If set to videoAndAudioPids, EBP markers will be placed on the video PID and all audio PIDs. If set to videoPid, EBP markers will be placed on only the video PID.</p>
#[serde(rename = "EbpPlacement")]
#[serde(skip_serializing_if = "Option::is_none")]
pub ebp_placement: Option<String>,
/// <p>This field is unused and deprecated.</p>
#[serde(rename = "EcmPid")]
#[serde(skip_serializing_if = "Option::is_none")]
pub ecm_pid: Option<String>,
/// <p>Include or exclude the ES Rate field in the PES header.</p>
#[serde(rename = "EsRateInPes")]
#[serde(skip_serializing_if = "Option::is_none")]
pub es_rate_in_pes: Option<String>,
/// <p>Packet Identifier (PID) for input source ETV Platform data to this output. Can be entered as a decimal or hexadecimal value. Valid values are 32 (or 0x20)..8182 (or 0x1ff6).</p>
#[serde(rename = "EtvPlatformPid")]
#[serde(skip_serializing_if = "Option::is_none")]
pub etv_platform_pid: Option<String>,
/// <p>Packet Identifier (PID) for input source ETV Signal data to this output. Can be entered as a decimal or hexadecimal value. Valid values are 32 (or 0x20)..8182 (or 0x1ff6).</p>
#[serde(rename = "EtvSignalPid")]
#[serde(skip_serializing_if = "Option::is_none")]
pub etv_signal_pid: Option<String>,
/// <p>The length in seconds of each fragment. Only used with EBP markers.</p>
#[serde(rename = "FragmentTime")]
#[serde(skip_serializing_if = "Option::is_none")]
pub fragment_time: Option<f64>,
/// <p>If set to passthrough, passes any KLV data from the input source to this output.</p>
#[serde(rename = "Klv")]
#[serde(skip_serializing_if = "Option::is_none")]
pub klv: Option<String>,
/// <p>Packet Identifier (PID) for input source KLV data to this output. Multiple values are accepted, and can be entered in ranges and/or by comma separation. Can be entered as decimal or hexadecimal values. Each PID specified must be in the range of 32 (or 0x20)..8182 (or 0x1ff6).</p>
#[serde(rename = "KlvDataPids")]
#[serde(skip_serializing_if = "Option::is_none")]
pub klv_data_pids: Option<String>,
/// <p>Value in bits per second of extra null packets to insert into the transport stream. This can be used if a downstream encryption system requires periodic null packets.</p>
#[serde(rename = "NullPacketBitrate")]
#[serde(skip_serializing_if = "Option::is_none")]
pub null_packet_bitrate: Option<f64>,
/// <p>The number of milliseconds between instances of this table in the output transport stream. Valid values are 0, 10..1000.</p>
#[serde(rename = "PatInterval")]
#[serde(skip_serializing_if = "Option::is_none")]
pub pat_interval: Option<i64>,
/// <p>When set to pcrEveryPesPacket, a Program Clock Reference value is inserted for every Packetized Elementary Stream (PES) header. This parameter is effective only when the PCR PID is the same as the video or audio elementary stream.</p>
#[serde(rename = "PcrControl")]
#[serde(skip_serializing_if = "Option::is_none")]
pub pcr_control: Option<String>,
/// <p>Maximum time in milliseconds between Program Clock Reference (PCRs) inserted into the transport stream.</p>
#[serde(rename = "PcrPeriod")]
#[serde(skip_serializing_if = "Option::is_none")]
pub pcr_period: Option<i64>,
/// <p>Packet Identifier (PID) of the Program Clock Reference (PCR) in the transport stream. When no value is given, the encoder will assign the same value as the Video PID. Can be entered as a decimal or hexadecimal value. Valid values are 32 (or 0x20)..8182 (or 0x1ff6).</p>
#[serde(rename = "PcrPid")]
#[serde(skip_serializing_if = "Option::is_none")]
pub pcr_pid: Option<String>,
/// <p>The number of milliseconds between instances of this table in the output transport stream. Valid values are 0, 10..1000.</p>
#[serde(rename = "PmtInterval")]
#[serde(skip_serializing_if = "Option::is_none")]
pub pmt_interval: Option<i64>,
/// <p>Packet Identifier (PID) for the Program Map Table (PMT) in the transport stream. Can be entered as a decimal or hexadecimal value. Valid values are 32 (or 0x20)..8182 (or 0x1ff6).</p>
#[serde(rename = "PmtPid")]
#[serde(skip_serializing_if = "Option::is_none")]
pub pmt_pid: Option<String>,
/// <p>The value of the program number field in the Program Map Table.</p>
#[serde(rename = "ProgramNum")]
#[serde(skip_serializing_if = "Option::is_none")]
pub program_num: Option<i64>,
/// <p>When vbr, does not insert null packets into transport stream to fill specified bitrate. The bitrate setting acts as the maximum bitrate when vbr is set.</p>
#[serde(rename = "RateMode")]
#[serde(skip_serializing_if = "Option::is_none")]
pub rate_mode: Option<String>,
/// <p>Packet Identifier (PID) for input source SCTE-27 data to this output. Multiple values are accepted, and can be entered in ranges and/or by comma separation. Can be entered as decimal or hexadecimal values. Each PID specified must be in the range of 32 (or 0x20)..8182 (or 0x1ff6).</p>
#[serde(rename = "Scte27Pids")]
#[serde(skip_serializing_if = "Option::is_none")]
pub scte_27_pids: Option<String>,
/// <p>Optionally pass SCTE-35 signals from the input source to this output.</p>
#[serde(rename = "Scte35Control")]
#[serde(skip_serializing_if = "Option::is_none")]
pub scte_35_control: Option<String>,
/// <p>Packet Identifier (PID) of the SCTE-35 stream in the transport stream. Can be entered as a decimal or hexadecimal value. Valid values are 32 (or 0x20)..8182 (or 0x1ff6).</p>
#[serde(rename = "Scte35Pid")]
#[serde(skip_serializing_if = "Option::is_none")]
pub scte_35_pid: Option<String>,
/// <p>Inserts segmentation markers at each segmentationTime period. raiSegstart sets the Random Access Indicator bit in the adaptation field. raiAdapt sets the RAI bit and adds the current timecode in the private data bytes. psiSegstart inserts PAT and PMT tables at the start of segments. ebp adds Encoder Boundary Point information to the adaptation field as per OpenCable specification OC-SP-EBP-I01-130118. ebpLegacy adds Encoder Boundary Point information to the adaptation field using a legacy proprietary format.</p>
#[serde(rename = "SegmentationMarkers")]
#[serde(skip_serializing_if = "Option::is_none")]
pub segmentation_markers: Option<String>,
/// <p>The segmentation style parameter controls how segmentation markers are inserted into the transport stream. With avails, it is possible that segments may be truncated, which can influence where future segmentation markers are inserted.</p>
///
/// <p>When a segmentation style of "resetCadence" is selected and a segment is truncated due to an avail, we will reset the segmentation cadence. This means the subsequent segment will have a duration of $segmentationTime seconds.</p>
///
/// <p>When a segmentation style of "maintainCadence" is selected and a segment is truncated due to an avail, we will not reset the segmentation cadence. This means the subsequent segment will likely be truncated as well. However, all segments after that will have a duration of $segmentationTime seconds. Note that EBP lookahead is a slight exception to this rule.</p>
#[serde(rename = "SegmentationStyle")]
#[serde(skip_serializing_if = "Option::is_none")]
pub segmentation_style: Option<String>,
/// <p>The length in seconds of each segment. Required unless markers is set to None_.</p>
#[serde(rename = "SegmentationTime")]
#[serde(skip_serializing_if = "Option::is_none")]
pub segmentation_time: Option<f64>,
/// <p>When set to passthrough, timed metadata will be passed through from input to output.</p>
#[serde(rename = "TimedMetadataBehavior")]
#[serde(skip_serializing_if = "Option::is_none")]
pub timed_metadata_behavior: Option<String>,
/// <p>Packet Identifier (PID) of the timed metadata stream in the transport stream. Can be entered as a decimal or hexadecimal value. Valid values are 32 (or 0x20)..8182 (or 0x1ff6).</p>
#[serde(rename = "TimedMetadataPid")]
#[serde(skip_serializing_if = "Option::is_none")]
pub timed_metadata_pid: Option<String>,
/// <p>The value of the transport stream ID field in the Program Map Table.</p>
#[serde(rename = "TransportStreamId")]
#[serde(skip_serializing_if = "Option::is_none")]
pub transport_stream_id: Option<i64>,
/// <p>Packet Identifier (PID) of the elementary video stream in the transport stream. Can be entered as a decimal or hexadecimal value. Valid values are 32 (or 0x20)..8182 (or 0x1ff6).</p>
#[serde(rename = "VideoPid")]
#[serde(skip_serializing_if = "Option::is_none")]
pub video_pid: Option<String>,
}
/// <p>Settings information for the .m3u8 container</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct M3u8Settings {
/// <p>The number of audio frames to insert for each PES packet.</p>
#[serde(rename = "AudioFramesPerPes")]
#[serde(skip_serializing_if = "Option::is_none")]
pub audio_frames_per_pes: Option<i64>,
/// <p>Packet Identifier (PID) of the elementary audio stream(s) in the transport stream. Multiple values are accepted, and can be entered in ranges and/or by comma separation. Can be entered as decimal or hexadecimal values.</p>
#[serde(rename = "AudioPids")]
#[serde(skip_serializing_if = "Option::is_none")]
pub audio_pids: Option<String>,
/// <p>This parameter is unused and deprecated.</p>
#[serde(rename = "EcmPid")]
#[serde(skip_serializing_if = "Option::is_none")]
pub ecm_pid: Option<String>,
/// <p>The number of milliseconds between instances of this table in the output transport stream. A value of "0" writes out the PMT once per segment file.</p>
#[serde(rename = "PatInterval")]
#[serde(skip_serializing_if = "Option::is_none")]
pub pat_interval: Option<i64>,
/// <p>When set to pcrEveryPesPacket, a Program Clock Reference value is inserted for every Packetized Elementary Stream (PES) header. This parameter is effective only when the PCR PID is the same as the video or audio elementary stream.</p>
#[serde(rename = "PcrControl")]
#[serde(skip_serializing_if = "Option::is_none")]
pub pcr_control: Option<String>,
/// <p>Maximum time in milliseconds between Program Clock References (PCRs) inserted into the transport stream.</p>
#[serde(rename = "PcrPeriod")]
#[serde(skip_serializing_if = "Option::is_none")]
pub pcr_period: Option<i64>,
/// <p>Packet Identifier (PID) of the Program Clock Reference (PCR) in the transport stream. When no value is given, the encoder will assign the same value as the Video PID. Can be entered as a decimal or hexadecimal value.</p>
#[serde(rename = "PcrPid")]
#[serde(skip_serializing_if = "Option::is_none")]
pub pcr_pid: Option<String>,
/// <p>The number of milliseconds between instances of this table in the output transport stream. A value of "0" writes out the PMT once per segment file.</p>
#[serde(rename = "PmtInterval")]
#[serde(skip_serializing_if = "Option::is_none")]
pub pmt_interval: Option<i64>,
/// <p>Packet Identifier (PID) for the Program Map Table (PMT) in the transport stream. Can be entered as a decimal or hexadecimal value.</p>
#[serde(rename = "PmtPid")]
#[serde(skip_serializing_if = "Option::is_none")]
pub pmt_pid: Option<String>,
/// <p>The value of the program number field in the Program Map Table.</p>
#[serde(rename = "ProgramNum")]
#[serde(skip_serializing_if = "Option::is_none")]
pub program_num: Option<i64>,
/// <p>If set to passthrough, passes any SCTE-35 signals from the input source to this output.</p>
#[serde(rename = "Scte35Behavior")]
#[serde(skip_serializing_if = "Option::is_none")]
pub scte_35_behavior: Option<String>,
/// <p>Packet Identifier (PID) of the SCTE-35 stream in the transport stream. Can be entered as a decimal or hexadecimal value.</p>
#[serde(rename = "Scte35Pid")]
#[serde(skip_serializing_if = "Option::is_none")]
pub scte_35_pid: Option<String>,
/// <p>When set to passthrough, timed metadata is passed through from input to output.</p>
#[serde(rename = "TimedMetadataBehavior")]
#[serde(skip_serializing_if = "Option::is_none")]
pub timed_metadata_behavior: Option<String>,
/// <p>Packet Identifier (PID) of the timed metadata stream in the transport stream. Can be entered as a decimal or hexadecimal value. Valid values are 32 (or 0x20)..8182 (or 0x1ff6).</p>
#[serde(rename = "TimedMetadataPid")]
#[serde(skip_serializing_if = "Option::is_none")]
pub timed_metadata_pid: Option<String>,
/// <p>The value of the transport stream ID field in the Program Map Table.</p>
#[serde(rename = "TransportStreamId")]
#[serde(skip_serializing_if = "Option::is_none")]
pub transport_stream_id: Option<i64>,
/// <p>Packet Identifier (PID) of the elementary video stream in the transport stream. Can be entered as a decimal or hexadecimal value.</p>
#[serde(rename = "VideoPid")]
#[serde(skip_serializing_if = "Option::is_none")]
pub video_pid: Option<String>,
}
/// <p>Placeholder documentation for Mp2Settings</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct Mp2Settings {
/// <p>Average bitrate in bits/second.</p>
#[serde(rename = "Bitrate")]
#[serde(skip_serializing_if = "Option::is_none")]
pub bitrate: Option<f64>,
/// <p>The MPEG2 Audio coding mode. Valid values are codingMode10 (for mono) or codingMode20 (for stereo).</p>
#[serde(rename = "CodingMode")]
#[serde(skip_serializing_if = "Option::is_none")]
pub coding_mode: Option<String>,
/// <p>Sample rate in Hz.</p>
#[serde(rename = "SampleRate")]
#[serde(skip_serializing_if = "Option::is_none")]
pub sample_rate: Option<f64>,
}
/// <p>Placeholder documentation for MsSmoothGroupSettings</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct MsSmoothGroupSettings {
/// <p>The value of the "Acquisition Point Identity" element used in each message placed in the sparse track. Only enabled if sparseTrackType is not "none".</p>
#[serde(rename = "AcquisitionPointId")]
#[serde(skip_serializing_if = "Option::is_none")]
pub acquisition_point_id: Option<String>,
/// <p>If set to passthrough for an audio-only MS Smooth output, the fragment absolute time will be set to the current timecode. This option does not write timecodes to the audio elementary stream.</p>
#[serde(rename = "AudioOnlyTimecodeControl")]
#[serde(skip_serializing_if = "Option::is_none")]
pub audio_only_timecode_control: Option<String>,
/// <p>If set to verifyAuthenticity, verify the https certificate chain to a trusted Certificate Authority (CA). This will cause https outputs to self-signed certificates to fail.</p>
#[serde(rename = "CertificateMode")]
#[serde(skip_serializing_if = "Option::is_none")]
pub certificate_mode: Option<String>,
/// <p>Number of seconds to wait before retrying connection to the IIS server if the connection is lost. Content will be cached during this time and the cache will be be delivered to the IIS server once the connection is re-established.</p>
#[serde(rename = "ConnectionRetryInterval")]
#[serde(skip_serializing_if = "Option::is_none")]
pub connection_retry_interval: Option<i64>,
/// <p>Smooth Streaming publish point on an IIS server. Elemental Live acts as a "Push" encoder to IIS.</p>
#[serde(rename = "Destination")]
pub destination: OutputLocationRef,
/// <p>MS Smooth event ID to be sent to the IIS server.</p>
///
/// <p>Should only be specified if eventIdMode is set to useConfigured.</p>
#[serde(rename = "EventId")]
#[serde(skip_serializing_if = "Option::is_none")]
pub event_id: Option<String>,
/// <p>Specifies whether or not to send an event ID to the IIS server. If no event ID is sent and the same Live Event is used without changing the publishing point, clients might see cached video from the previous run.</p>
///
/// <p>Options:
/// - "useConfigured" - use the value provided in eventId
/// - "useTimestamp" - generate and send an event ID based on the current timestamp
/// - "noEventId" - do not send an event ID to the IIS server.</p>
#[serde(rename = "EventIdMode")]
#[serde(skip_serializing_if = "Option::is_none")]
pub event_id_mode: Option<String>,
/// <p>When set to sendEos, send EOS signal to IIS server when stopping the event</p>
#[serde(rename = "EventStopBehavior")]
#[serde(skip_serializing_if = "Option::is_none")]
pub event_stop_behavior: Option<String>,
/// <p>Size in seconds of file cache for streaming outputs.</p>
#[serde(rename = "FilecacheDuration")]
#[serde(skip_serializing_if = "Option::is_none")]
pub filecache_duration: Option<i64>,
/// <p>Length of mp4 fragments to generate (in seconds). Fragment length must be compatible with GOP size and framerate.</p>
#[serde(rename = "FragmentLength")]
#[serde(skip_serializing_if = "Option::is_none")]
pub fragment_length: Option<i64>,
/// <p>Parameter that control output group behavior on input loss.</p>
#[serde(rename = "InputLossAction")]
#[serde(skip_serializing_if = "Option::is_none")]
pub input_loss_action: Option<String>,
/// <p>Number of retry attempts.</p>
#[serde(rename = "NumRetries")]
#[serde(skip_serializing_if = "Option::is_none")]
pub num_retries: Option<i64>,
/// <p>Number of seconds before initiating a restart due to output failure, due to exhausting the numRetries on one segment, or exceeding filecacheDuration.</p>
#[serde(rename = "RestartDelay")]
#[serde(skip_serializing_if = "Option::is_none")]
pub restart_delay: Option<i64>,
/// <p>When set to useInputSegmentation, the output segment or fragment points are set by the RAI markers from the input streams.</p>
#[serde(rename = "SegmentationMode")]
#[serde(skip_serializing_if = "Option::is_none")]
pub segmentation_mode: Option<String>,
/// <p>Outputs that are "output locked" can use this delay. Assign a delay to the output that is "secondary". Do not assign a delay to the "primary" output. The delay means that the primary output will always reach the downstream system before the secondary, which helps ensure that the downstream system always uses the primary output. (If there were no delay, the downstream system might flip-flop between whichever output happens to arrive first.) If the primary fails, the downstream system will switch to the secondary output. When the primary is restarted, the downstream system will switch back to the primary (because once again it is always arriving first)</p>
#[serde(rename = "SendDelayMs")]
#[serde(skip_serializing_if = "Option::is_none")]
pub send_delay_ms: Option<i64>,
/// <p>If set to scte35, use incoming SCTE-35 messages to generate a sparse track in this group of MS-Smooth outputs.</p>
#[serde(rename = "SparseTrackType")]
#[serde(skip_serializing_if = "Option::is_none")]
pub sparse_track_type: Option<String>,
/// <p>When set to send, send stream manifest so publishing point doesn't start until all streams start.</p>
#[serde(rename = "StreamManifestBehavior")]
#[serde(skip_serializing_if = "Option::is_none")]
pub stream_manifest_behavior: Option<String>,
/// <p>Timestamp offset for the event. Only used if timestampOffsetMode is set to useConfiguredOffset.</p>
#[serde(rename = "TimestampOffset")]
#[serde(skip_serializing_if = "Option::is_none")]
pub timestamp_offset: Option<String>,
/// <p>Type of timestamp date offset to use.
/// - useEventStartDate: Use the date the event was started as the offset
/// - useConfiguredOffset: Use an explicitly configured date as the offset</p>
#[serde(rename = "TimestampOffsetMode")]
#[serde(skip_serializing_if = "Option::is_none")]
pub timestamp_offset_mode: Option<String>,
}
/// <p>Placeholder documentation for MsSmoothOutputSettings</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct MsSmoothOutputSettings {
/// <p>String concatenated to the end of the destination filename. Required for multiple outputs of the same type.</p>
#[serde(rename = "NameModifier")]
#[serde(skip_serializing_if = "Option::is_none")]
pub name_modifier: Option<String>,
}
/// <p>Network source to transcode. Must be accessible to the Elemental Live node that is running the live event through a network connection.</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct NetworkInputSettings {
/// <p>Specifies HLS input settings when the uri is for a HLS manifest.</p>
#[serde(rename = "HlsInputSettings")]
#[serde(skip_serializing_if = "Option::is_none")]
pub hls_input_settings: Option<HlsInputSettings>,
/// <p>Check HTTPS server certificates. When set to checkCryptographyOnly, cryptography in the certificate will be checked, but not the server's name. Certain subdomains (notably S3 buckets that use dots in the bucket name) do not strictly match the corresponding certificate's wildcard pattern and would otherwise cause the event to error. This setting is ignored for protocols that do not use https.</p>
#[serde(rename = "ServerValidation")]
#[serde(skip_serializing_if = "Option::is_none")]
pub server_validation: Option<String>,
}
/// <p>Reserved resources available for purchase</p>
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct Offering {
/// <p>Unique offering ARN, e.g. 'arn:aws:medialive:us-west-2:123456789012:offering:87654321'</p>
#[serde(rename = "Arn")]
#[serde(skip_serializing_if = "Option::is_none")]
pub arn: Option<String>,
/// <p>Currency code for usagePrice and fixedPrice in ISO-4217 format, e.g. 'USD'</p>
#[serde(rename = "CurrencyCode")]
#[serde(skip_serializing_if = "Option::is_none")]
pub currency_code: Option<String>,
/// <p>Lease duration, e.g. '12'</p>
#[serde(rename = "Duration")]
#[serde(skip_serializing_if = "Option::is_none")]
pub duration: Option<i64>,
/// <p>Units for duration, e.g. 'MONTHS'</p>
#[serde(rename = "DurationUnits")]
#[serde(skip_serializing_if = "Option::is_none")]
pub duration_units: Option<String>,
/// <p>One-time charge for each reserved resource, e.g. '0.0' for a NO_UPFRONT offering</p>
#[serde(rename = "FixedPrice")]
#[serde(skip_serializing_if = "Option::is_none")]
pub fixed_price: Option<f64>,
/// <p>Offering description, e.g. 'HD AVC output at 10-20 Mbps, 30 fps, and standard VQ in US West (Oregon)'</p>
#[serde(rename = "OfferingDescription")]
#[serde(skip_serializing_if = "Option::is_none")]
pub offering_description: Option<String>,
/// <p>Unique offering ID, e.g. '87654321'</p>
#[serde(rename = "OfferingId")]
#[serde(skip_serializing_if = "Option::is_none")]
pub offering_id: Option<String>,
/// <p>Offering type, e.g. 'NO_UPFRONT'</p>
#[serde(rename = "OfferingType")]
#[serde(skip_serializing_if = "Option::is_none")]
pub offering_type: Option<String>,
/// <p>AWS region, e.g. 'us-west-2'</p>
#[serde(rename = "Region")]
#[serde(skip_serializing_if = "Option::is_none")]
pub region: Option<String>,
/// <p>Resource configuration details</p>
#[serde(rename = "ResourceSpecification")]
#[serde(skip_serializing_if = "Option::is_none")]
pub resource_specification: Option<ReservationResourceSpecification>,
/// <p>Recurring usage charge for each reserved resource, e.g. '157.0'</p>
#[serde(rename = "UsagePrice")]
#[serde(skip_serializing_if = "Option::is_none")]
pub usage_price: Option<f64>,
}
/// <p>Output settings. There can be multiple outputs within a group.</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct Output {
/// <p>The names of the AudioDescriptions used as audio sources for this output.</p>
#[serde(rename = "AudioDescriptionNames")]
#[serde(skip_serializing_if = "Option::is_none")]
pub audio_description_names: Option<Vec<String>>,
/// <p>The names of the CaptionDescriptions used as caption sources for this output.</p>
#[serde(rename = "CaptionDescriptionNames")]
#[serde(skip_serializing_if = "Option::is_none")]
pub caption_description_names: Option<Vec<String>>,
/// <p>The name used to identify an output.</p>
#[serde(rename = "OutputName")]
#[serde(skip_serializing_if = "Option::is_none")]
pub output_name: Option<String>,
/// <p>Output type-specific settings.</p>
#[serde(rename = "OutputSettings")]
pub output_settings: OutputSettings,
/// <p>The name of the VideoDescription used as the source for this output.</p>
#[serde(rename = "VideoDescriptionName")]
#[serde(skip_serializing_if = "Option::is_none")]
pub video_description_name: Option<String>,
}
/// <p>Placeholder documentation for OutputDestination</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct OutputDestination {
/// <p>User-specified id. This is used in an output group or an output.</p>
#[serde(rename = "Id")]
#[serde(skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
/// <p>Destination settings for output; one for each redundant encoder.</p>
#[serde(rename = "Settings")]
#[serde(skip_serializing_if = "Option::is_none")]
pub settings: Option<Vec<OutputDestinationSettings>>,
}
/// <p>Placeholder documentation for OutputDestinationSettings</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct OutputDestinationSettings {
/// <p>key used to extract the password from EC2 Parameter store</p>
#[serde(rename = "PasswordParam")]
#[serde(skip_serializing_if = "Option::is_none")]
pub password_param: Option<String>,
/// <p>Stream name for RTMP destinations (URLs of type rtmp://)</p>
#[serde(rename = "StreamName")]
#[serde(skip_serializing_if = "Option::is_none")]
pub stream_name: Option<String>,
/// <p>A URL specifying a destination</p>
#[serde(rename = "Url")]
#[serde(skip_serializing_if = "Option::is_none")]
pub url: Option<String>,
/// <p>username for destination</p>
#[serde(rename = "Username")]
#[serde(skip_serializing_if = "Option::is_none")]
pub username: Option<String>,
}
/// <p>Output groups for this Live Event. Output groups contain information about where streams should be distributed.</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct OutputGroup {
/// <p>Custom output group name optionally defined by the user. Only letters, numbers, and the underscore character allowed; only 32 characters allowed.</p>
#[serde(rename = "Name")]
#[serde(skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
/// <p>Settings associated with the output group.</p>
#[serde(rename = "OutputGroupSettings")]
pub output_group_settings: OutputGroupSettings,
#[serde(rename = "Outputs")]
pub outputs: Vec<Output>,
}
/// <p>Placeholder documentation for OutputGroupSettings</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct OutputGroupSettings {
#[serde(rename = "ArchiveGroupSettings")]
#[serde(skip_serializing_if = "Option::is_none")]
pub archive_group_settings: Option<ArchiveGroupSettings>,
#[serde(rename = "HlsGroupSettings")]
#[serde(skip_serializing_if = "Option::is_none")]
pub hls_group_settings: Option<HlsGroupSettings>,
#[serde(rename = "MsSmoothGroupSettings")]
#[serde(skip_serializing_if = "Option::is_none")]
pub ms_smooth_group_settings: Option<MsSmoothGroupSettings>,
#[serde(rename = "RtmpGroupSettings")]
#[serde(skip_serializing_if = "Option::is_none")]
pub rtmp_group_settings: Option<RtmpGroupSettings>,
#[serde(rename = "UdpGroupSettings")]
#[serde(skip_serializing_if = "Option::is_none")]
pub udp_group_settings: Option<UdpGroupSettings>,
}
/// <p>Reference to an OutputDestination ID defined in the channel</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct OutputLocationRef {
#[serde(rename = "DestinationRefId")]
#[serde(skip_serializing_if = "Option::is_none")]
pub destination_ref_id: Option<String>,
}
/// <p>Placeholder documentation for OutputSettings</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct OutputSettings {
#[serde(rename = "ArchiveOutputSettings")]
#[serde(skip_serializing_if = "Option::is_none")]
pub archive_output_settings: Option<ArchiveOutputSettings>,
#[serde(rename = "HlsOutputSettings")]
#[serde(skip_serializing_if = "Option::is_none")]
pub hls_output_settings: Option<HlsOutputSettings>,
#[serde(rename = "MsSmoothOutputSettings")]
#[serde(skip_serializing_if = "Option::is_none")]
pub ms_smooth_output_settings: Option<MsSmoothOutputSettings>,
#[serde(rename = "RtmpOutputSettings")]
#[serde(skip_serializing_if = "Option::is_none")]
pub rtmp_output_settings: Option<RtmpOutputSettings>,
#[serde(rename = "UdpOutputSettings")]
#[serde(skip_serializing_if = "Option::is_none")]
pub udp_output_settings: Option<UdpOutputSettings>,
}
/// <p>Placeholder documentation for PassThroughSettings</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct PassThroughSettings {}
/// <p>PurchaseOffering request</p>
#[derive(Default, Debug, Clone, PartialEq)]
pub struct PurchaseOffering {
/// <p>Number of resources</p>
pub count: Option<i64>,
/// <p>Name for the new reservation</p>
pub name: Option<String>,
/// <p>Unique request ID to be specified. This is needed to prevent retries from creating multiple resources.</p>
pub request_id: Option<String>,
}
/// <p>Placeholder documentation for PurchaseOfferingRequest</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct PurchaseOfferingRequest {
/// <p>Number of resources</p>
#[serde(rename = "Count")]
#[serde(skip_serializing_if = "Option::is_none")]
pub count: Option<i64>,
/// <p>Name for the new reservation</p>
#[serde(rename = "Name")]
#[serde(skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
/// <p>Offering to purchase, e.g. '87654321'</p>
#[serde(rename = "OfferingId")]
pub offering_id: String,
/// <p>Unique request ID to be specified. This is needed to prevent retries from creating multiple resources.</p>
#[serde(rename = "RequestId")]
#[serde(skip_serializing_if = "Option::is_none")]
pub request_id: Option<String>,
}
/// <p>Placeholder documentation for PurchaseOfferingResponse</p>
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct PurchaseOfferingResponse {
#[serde(rename = "Reservation")]
#[serde(skip_serializing_if = "Option::is_none")]
pub reservation: Option<Reservation>,
}
/// <p>PurchaseOffering response</p>
#[derive(Default, Debug, Clone, PartialEq)]
pub struct PurchaseOfferingResultModel {
pub reservation: Option<Reservation>,
}
/// <p>Placeholder documentation for RemixSettings</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct RemixSettings {
/// <p>Mapping of input channels to output channels, with appropriate gain adjustments.</p>
#[serde(rename = "ChannelMappings")]
pub channel_mappings: Vec<AudioChannelMapping>,
/// <p>Number of input channels to be used.</p>
#[serde(rename = "ChannelsIn")]
#[serde(skip_serializing_if = "Option::is_none")]
pub channels_in: Option<i64>,
/// <p>Number of output channels to be produced.
/// Valid values: 1, 2, 4, 6, 8</p>
#[serde(rename = "ChannelsOut")]
#[serde(skip_serializing_if = "Option::is_none")]
pub channels_out: Option<i64>,
}
/// <p>Reserved resources available to use</p>
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct Reservation {
/// <p>Unique reservation ARN, e.g. 'arn:aws:medialive:us-west-2:123456789012:reservation:1234567'</p>
#[serde(rename = "Arn")]
#[serde(skip_serializing_if = "Option::is_none")]
pub arn: Option<String>,
/// <p>Number of reserved resources</p>
#[serde(rename = "Count")]
#[serde(skip_serializing_if = "Option::is_none")]
pub count: Option<i64>,
/// <p>Currency code for usagePrice and fixedPrice in ISO-4217 format, e.g. 'USD'</p>
#[serde(rename = "CurrencyCode")]
#[serde(skip_serializing_if = "Option::is_none")]
pub currency_code: Option<String>,
/// <p>Lease duration, e.g. '12'</p>
#[serde(rename = "Duration")]
#[serde(skip_serializing_if = "Option::is_none")]
pub duration: Option<i64>,
/// <p>Units for duration, e.g. 'MONTHS'</p>
#[serde(rename = "DurationUnits")]
#[serde(skip_serializing_if = "Option::is_none")]
pub duration_units: Option<String>,
/// <p>Reservation UTC end date and time in ISO-8601 format, e.g. '2019-03-01T00:00:00'</p>
#[serde(rename = "End")]
#[serde(skip_serializing_if = "Option::is_none")]
pub end: Option<String>,
/// <p>One-time charge for each reserved resource, e.g. '0.0' for a NO_UPFRONT offering</p>
#[serde(rename = "FixedPrice")]
#[serde(skip_serializing_if = "Option::is_none")]
pub fixed_price: Option<f64>,
/// <p>User specified reservation name</p>
#[serde(rename = "Name")]
#[serde(skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
/// <p>Offering description, e.g. 'HD AVC output at 10-20 Mbps, 30 fps, and standard VQ in US West (Oregon)'</p>
#[serde(rename = "OfferingDescription")]
#[serde(skip_serializing_if = "Option::is_none")]
pub offering_description: Option<String>,
/// <p>Unique offering ID, e.g. '87654321'</p>
#[serde(rename = "OfferingId")]
#[serde(skip_serializing_if = "Option::is_none")]
pub offering_id: Option<String>,
/// <p>Offering type, e.g. 'NO_UPFRONT'</p>
#[serde(rename = "OfferingType")]
#[serde(skip_serializing_if = "Option::is_none")]
pub offering_type: Option<String>,
/// <p>AWS region, e.g. 'us-west-2'</p>
#[serde(rename = "Region")]
#[serde(skip_serializing_if = "Option::is_none")]
pub region: Option<String>,
/// <p>Unique reservation ID, e.g. '1234567'</p>
#[serde(rename = "ReservationId")]
#[serde(skip_serializing_if = "Option::is_none")]
pub reservation_id: Option<String>,
/// <p>Resource configuration details</p>
#[serde(rename = "ResourceSpecification")]
#[serde(skip_serializing_if = "Option::is_none")]
pub resource_specification: Option<ReservationResourceSpecification>,
/// <p>Reservation UTC start date and time in ISO-8601 format, e.g. '2018-03-01T00:00:00'</p>
#[serde(rename = "Start")]
#[serde(skip_serializing_if = "Option::is_none")]
pub start: Option<String>,
/// <p>Current state of reservation, e.g. 'ACTIVE'</p>
#[serde(rename = "State")]
#[serde(skip_serializing_if = "Option::is_none")]
pub state: Option<String>,
/// <p>Recurring usage charge for each reserved resource, e.g. '157.0'</p>
#[serde(rename = "UsagePrice")]
#[serde(skip_serializing_if = "Option::is_none")]
pub usage_price: Option<f64>,
}
/// <p>Resource configuration (codec, resolution, bitrate, ...)</p>
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct ReservationResourceSpecification {
/// <p>Codec, e.g. 'AVC'</p>
#[serde(rename = "Codec")]
#[serde(skip_serializing_if = "Option::is_none")]
pub codec: Option<String>,
/// <p>Maximum bitrate, e.g. 'MAX<em>20</em>MBPS'</p>
#[serde(rename = "MaximumBitrate")]
#[serde(skip_serializing_if = "Option::is_none")]
pub maximum_bitrate: Option<String>,
/// <p>Maximum framerate, e.g. 'MAX<em>30</em>FPS' (Outputs only)</p>
#[serde(rename = "MaximumFramerate")]
#[serde(skip_serializing_if = "Option::is_none")]
pub maximum_framerate: Option<String>,
/// <p>Resolution, e.g. 'HD'</p>
#[serde(rename = "Resolution")]
#[serde(skip_serializing_if = "Option::is_none")]
pub resolution: Option<String>,
/// <p>Resource type, 'INPUT', 'OUTPUT', or 'CHANNEL'</p>
#[serde(rename = "ResourceType")]
#[serde(skip_serializing_if = "Option::is_none")]
pub resource_type: Option<String>,
/// <p>Special feature, e.g. 'AUDIO_NORMALIZATION' (Channels only)</p>
#[serde(rename = "SpecialFeature")]
#[serde(skip_serializing_if = "Option::is_none")]
pub special_feature: Option<String>,
/// <p>Video quality, e.g. 'STANDARD' (Outputs only)</p>
#[serde(rename = "VideoQuality")]
#[serde(skip_serializing_if = "Option::is_none")]
pub video_quality: Option<String>,
}
/// <p>Placeholder documentation for ResourceConflict</p>
#[derive(Default, Debug, Clone, PartialEq)]
pub struct ResourceConflict {
pub message: Option<String>,
}
/// <p>Placeholder documentation for ResourceNotFound</p>
#[derive(Default, Debug, Clone, PartialEq)]
pub struct ResourceNotFound {
pub message: Option<String>,
}
/// <p>Placeholder documentation for RtmpCaptionInfoDestinationSettings</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct RtmpCaptionInfoDestinationSettings {}
/// <p>Placeholder documentation for RtmpGroupSettings</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct RtmpGroupSettings {
/// <p>Authentication scheme to use when connecting with CDN</p>
#[serde(rename = "AuthenticationScheme")]
#[serde(skip_serializing_if = "Option::is_none")]
pub authentication_scheme: Option<String>,
/// <p>Controls behavior when content cache fills up. If remote origin server stalls the RTMP connection and does not accept content fast enough the 'Media Cache' will fill up. When the cache reaches the duration specified by cacheLength the cache will stop accepting new content. If set to disconnectImmediately, the RTMP output will force a disconnect. Clear the media cache, and reconnect after restartDelay seconds. If set to waitForServer, the RTMP output will wait up to 5 minutes to allow the origin server to begin accepting data again.</p>
#[serde(rename = "CacheFullBehavior")]
#[serde(skip_serializing_if = "Option::is_none")]
pub cache_full_behavior: Option<String>,
/// <p>Cache length, in seconds, is used to calculate buffer size.</p>
#[serde(rename = "CacheLength")]
#[serde(skip_serializing_if = "Option::is_none")]
pub cache_length: Option<i64>,
/// <p>Controls the types of data that passes to onCaptionInfo outputs. If set to 'all' then 608 and 708 carried DTVCC data will be passed. If set to 'field1AndField2608' then DTVCC data will be stripped out, but 608 data from both fields will be passed. If set to 'field1608' then only the data carried in 608 from field 1 video will be passed.</p>
#[serde(rename = "CaptionData")]
#[serde(skip_serializing_if = "Option::is_none")]
pub caption_data: Option<String>,
/// <p>If a streaming output fails, number of seconds to wait until a restart is initiated. A value of 0 means never restart.</p>
#[serde(rename = "RestartDelay")]
#[serde(skip_serializing_if = "Option::is_none")]
pub restart_delay: Option<i64>,
}
/// <p>Placeholder documentation for RtmpOutputSettings</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct RtmpOutputSettings {
/// <p>If set to verifyAuthenticity, verify the tls certificate chain to a trusted Certificate Authority (CA). This will cause rtmps outputs with self-signed certificates to fail.</p>
#[serde(rename = "CertificateMode")]
#[serde(skip_serializing_if = "Option::is_none")]
pub certificate_mode: Option<String>,
/// <p>Number of seconds to wait before retrying a connection to the Flash Media server if the connection is lost.</p>
#[serde(rename = "ConnectionRetryInterval")]
#[serde(skip_serializing_if = "Option::is_none")]
pub connection_retry_interval: Option<i64>,
/// <p>The RTMP endpoint excluding the stream name (eg. rtmp://host/appname). For connection to Akamai, a username and password must be supplied. URI fields accept format identifiers.</p>
#[serde(rename = "Destination")]
pub destination: OutputLocationRef,
/// <p>Number of retry attempts.</p>
#[serde(rename = "NumRetries")]
#[serde(skip_serializing_if = "Option::is_none")]
pub num_retries: Option<i64>,
}
/// <p>Placeholder documentation for Scte20PlusEmbeddedDestinationSettings</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct Scte20PlusEmbeddedDestinationSettings {}
/// <p>Placeholder documentation for Scte20SourceSettings</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct Scte20SourceSettings {
/// <p>If upconvert, 608 data is both passed through via the "608 compatibility bytes" fields of the 708 wrapper as well as translated into 708. 708 data present in the source content will be discarded.</p>
#[serde(rename = "Convert608To708")]
#[serde(skip_serializing_if = "Option::is_none")]
pub convert_608_to_708: Option<String>,
/// <p>Specifies the 608/708 channel number within the video track from which to extract captions. Unused for passthrough.</p>
#[serde(rename = "Source608ChannelNumber")]
#[serde(skip_serializing_if = "Option::is_none")]
pub source_608_channel_number: Option<i64>,
}
/// <p>Placeholder documentation for Scte27DestinationSettings</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct Scte27DestinationSettings {}
/// <p>Placeholder documentation for Scte27SourceSettings</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct Scte27SourceSettings {
/// <p>The pid field is used in conjunction with the caption selector languageCode field as follows:
/// - Specify PID and Language: Extracts captions from that PID; the language is "informational".
/// - Specify PID and omit Language: Extracts the specified PID.
/// - Omit PID and specify Language: Extracts the specified language, whichever PID that happens to be.
/// - Omit PID and omit Language: Valid only if source is DVB-Sub that is being passed through; all languages will be passed through.</p>
#[serde(rename = "Pid")]
#[serde(skip_serializing_if = "Option::is_none")]
pub pid: Option<i64>,
}
/// <p>Placeholder documentation for Scte35SpliceInsert</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct Scte35SpliceInsert {
/// <p>When specified, this offset (in milliseconds) is added to the input Ad Avail PTS time. This only applies to embedded SCTE 104/35 messages and does not apply to OOB messages.</p>
#[serde(rename = "AdAvailOffset")]
#[serde(skip_serializing_if = "Option::is_none")]
pub ad_avail_offset: Option<i64>,
/// <p>When set to ignore, Segment Descriptors with noRegionalBlackoutFlag set to 0 will no longer trigger blackouts or Ad Avail slates</p>
#[serde(rename = "NoRegionalBlackoutFlag")]
#[serde(skip_serializing_if = "Option::is_none")]
pub no_regional_blackout_flag: Option<String>,
/// <p>When set to ignore, Segment Descriptors with webDeliveryAllowedFlag set to 0 will no longer trigger blackouts or Ad Avail slates</p>
#[serde(rename = "WebDeliveryAllowedFlag")]
#[serde(skip_serializing_if = "Option::is_none")]
pub web_delivery_allowed_flag: Option<String>,
}
/// <p>Placeholder documentation for Scte35TimeSignalApos</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct Scte35TimeSignalApos {
/// <p>When specified, this offset (in milliseconds) is added to the input Ad Avail PTS time. This only applies to embedded SCTE 104/35 messages and does not apply to OOB messages.</p>
#[serde(rename = "AdAvailOffset")]
#[serde(skip_serializing_if = "Option::is_none")]
pub ad_avail_offset: Option<i64>,
/// <p>When set to ignore, Segment Descriptors with noRegionalBlackoutFlag set to 0 will no longer trigger blackouts or Ad Avail slates</p>
#[serde(rename = "NoRegionalBlackoutFlag")]
#[serde(skip_serializing_if = "Option::is_none")]
pub no_regional_blackout_flag: Option<String>,
/// <p>When set to ignore, Segment Descriptors with webDeliveryAllowedFlag set to 0 will no longer trigger blackouts or Ad Avail slates</p>
#[serde(rename = "WebDeliveryAllowedFlag")]
#[serde(skip_serializing_if = "Option::is_none")]
pub web_delivery_allowed_flag: Option<String>,
}
/// <p>Placeholder documentation for SmpteTtDestinationSettings</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct SmpteTtDestinationSettings {}
/// <p>Placeholder documentation for StandardHlsSettings</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct StandardHlsSettings {
/// <p>List all the audio groups that are used with the video output stream. Input all the audio GROUP-IDs that are associated to the video, separate by ','.</p>
#[serde(rename = "AudioRenditionSets")]
#[serde(skip_serializing_if = "Option::is_none")]
pub audio_rendition_sets: Option<String>,
#[serde(rename = "M3u8Settings")]
pub m_3u_8_settings: M3u8Settings,
}
/// <p>Placeholder documentation for StartChannelRequest</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct StartChannelRequest {
/// <p>A request to start a channel</p>
#[serde(rename = "ChannelId")]
pub channel_id: String,
}
/// <p>Placeholder documentation for StartChannelResponse</p>
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct StartChannelResponse {
/// <p>The unique arn of the channel.</p>
#[serde(rename = "Arn")]
#[serde(skip_serializing_if = "Option::is_none")]
pub arn: Option<String>,
/// <p>A list of destinations of the channel. For UDP outputs, there is one
/// destination per output. For other types (HLS, for example), there is
/// one destination per packager.</p>
#[serde(rename = "Destinations")]
#[serde(skip_serializing_if = "Option::is_none")]
pub destinations: Option<Vec<OutputDestination>>,
/// <p>The endpoints where outgoing connections initiate from</p>
#[serde(rename = "EgressEndpoints")]
#[serde(skip_serializing_if = "Option::is_none")]
pub egress_endpoints: Option<Vec<ChannelEgressEndpoint>>,
#[serde(rename = "EncoderSettings")]
#[serde(skip_serializing_if = "Option::is_none")]
pub encoder_settings: Option<EncoderSettings>,
/// <p>The unique id of the channel.</p>
#[serde(rename = "Id")]
#[serde(skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
/// <p>List of input attachments for channel.</p>
#[serde(rename = "InputAttachments")]
#[serde(skip_serializing_if = "Option::is_none")]
pub input_attachments: Option<Vec<InputAttachment>>,
#[serde(rename = "InputSpecification")]
#[serde(skip_serializing_if = "Option::is_none")]
pub input_specification: Option<InputSpecification>,
/// <p>The log level being written to CloudWatch Logs.</p>
#[serde(rename = "LogLevel")]
#[serde(skip_serializing_if = "Option::is_none")]
pub log_level: Option<String>,
/// <p>The name of the channel. (user-mutable)</p>
#[serde(rename = "Name")]
#[serde(skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
/// <p>The number of currently healthy pipelines.</p>
#[serde(rename = "PipelinesRunningCount")]
#[serde(skip_serializing_if = "Option::is_none")]
pub pipelines_running_count: Option<i64>,
/// <p>The Amazon Resource Name (ARN) of the role assumed when running the Channel.</p>
#[serde(rename = "RoleArn")]
#[serde(skip_serializing_if = "Option::is_none")]
pub role_arn: Option<String>,
#[serde(rename = "State")]
#[serde(skip_serializing_if = "Option::is_none")]
pub state: Option<String>,
}
/// <p>Placeholder documentation for StaticKeySettings</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct StaticKeySettings {
/// <p>The URL of the license server used for protecting content.</p>
#[serde(rename = "KeyProviderServer")]
#[serde(skip_serializing_if = "Option::is_none")]
pub key_provider_server: Option<InputLocation>,
/// <p>Static key value as a 32 character hexadecimal string.</p>
#[serde(rename = "StaticKeyValue")]
pub static_key_value: String,
}
/// <p>Placeholder documentation for StopChannelRequest</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct StopChannelRequest {
/// <p>A request to stop a running channel</p>
#[serde(rename = "ChannelId")]
pub channel_id: String,
}
/// <p>Placeholder documentation for StopChannelResponse</p>
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct StopChannelResponse {
/// <p>The unique arn of the channel.</p>
#[serde(rename = "Arn")]
#[serde(skip_serializing_if = "Option::is_none")]
pub arn: Option<String>,
/// <p>A list of destinations of the channel. For UDP outputs, there is one
/// destination per output. For other types (HLS, for example), there is
/// one destination per packager.</p>
#[serde(rename = "Destinations")]
#[serde(skip_serializing_if = "Option::is_none")]
pub destinations: Option<Vec<OutputDestination>>,
/// <p>The endpoints where outgoing connections initiate from</p>
#[serde(rename = "EgressEndpoints")]
#[serde(skip_serializing_if = "Option::is_none")]
pub egress_endpoints: Option<Vec<ChannelEgressEndpoint>>,
#[serde(rename = "EncoderSettings")]
#[serde(skip_serializing_if = "Option::is_none")]
pub encoder_settings: Option<EncoderSettings>,
/// <p>The unique id of the channel.</p>
#[serde(rename = "Id")]
#[serde(skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
/// <p>List of input attachments for channel.</p>
#[serde(rename = "InputAttachments")]
#[serde(skip_serializing_if = "Option::is_none")]
pub input_attachments: Option<Vec<InputAttachment>>,
#[serde(rename = "InputSpecification")]
#[serde(skip_serializing_if = "Option::is_none")]
pub input_specification: Option<InputSpecification>,
/// <p>The log level being written to CloudWatch Logs.</p>
#[serde(rename = "LogLevel")]
#[serde(skip_serializing_if = "Option::is_none")]
pub log_level: Option<String>,
/// <p>The name of the channel. (user-mutable)</p>
#[serde(rename = "Name")]
#[serde(skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
/// <p>The number of currently healthy pipelines.</p>
#[serde(rename = "PipelinesRunningCount")]
#[serde(skip_serializing_if = "Option::is_none")]
pub pipelines_running_count: Option<i64>,
/// <p>The Amazon Resource Name (ARN) of the role assumed when running the Channel.</p>
#[serde(rename = "RoleArn")]
#[serde(skip_serializing_if = "Option::is_none")]
pub role_arn: Option<String>,
#[serde(rename = "State")]
#[serde(skip_serializing_if = "Option::is_none")]
pub state: Option<String>,
}
/// <p>Placeholder documentation for TeletextDestinationSettings</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct TeletextDestinationSettings {}
/// <p>Placeholder documentation for TeletextSourceSettings</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct TeletextSourceSettings {
/// <p>Specifies the teletext page number within the data stream from which to extract captions. Range of 0x100 (256) to 0x8FF (2303). Unused for passthrough. Should be specified as a hexadecimal string with no "0x" prefix.</p>
#[serde(rename = "PageNumber")]
#[serde(skip_serializing_if = "Option::is_none")]
pub page_number: Option<String>,
}
/// <p>Placeholder documentation for TimecodeConfig</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct TimecodeConfig {
/// <p>Identifies the source for the timecode that will be associated with the events outputs.
/// -Embedded (embedded): Initialize the output timecode with timecode from the the source. If no embedded timecode is detected in the source, the system falls back to using "Start at 0" (zerobased).
/// -System Clock (systemclock): Use the UTC time.
/// -Start at 0 (zerobased): The time of the first frame of the event will be 00:00:00:00.</p>
#[serde(rename = "Source")]
pub source: String,
/// <p>Threshold in frames beyond which output timecode is resynchronized to the input timecode. Discrepancies below this threshold are permitted to avoid unnecessary discontinuities in the output timecode. No timecode sync when this is not specified.</p>
#[serde(rename = "SyncThreshold")]
#[serde(skip_serializing_if = "Option::is_none")]
pub sync_threshold: Option<i64>,
}
/// <p>Placeholder documentation for TtmlDestinationSettings</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct TtmlDestinationSettings {
/// <p>When set to passthrough, passes through style and position information from a TTML-like input source (TTML, SMPTE-TT, CFF-TT) to the CFF-TT output or TTML output.</p>
#[serde(rename = "StyleControl")]
#[serde(skip_serializing_if = "Option::is_none")]
pub style_control: Option<String>,
}
/// <p>Placeholder documentation for UdpContainerSettings</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct UdpContainerSettings {
#[serde(rename = "M2tsSettings")]
#[serde(skip_serializing_if = "Option::is_none")]
pub m_2ts_settings: Option<M2tsSettings>,
}
/// <p>Placeholder documentation for UdpGroupSettings</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct UdpGroupSettings {
/// <p>Specifies behavior of last resort when input video is lost, and no more backup inputs are available. When dropTs is selected the entire transport stream will stop being emitted. When dropProgram is selected the program can be dropped from the transport stream (and replaced with null packets to meet the TS bitrate requirement). Or, when emitProgram is chosen the transport stream will continue to be produced normally with repeat frames, black frames, or slate frames substituted for the absent input video.</p>
#[serde(rename = "InputLossAction")]
#[serde(skip_serializing_if = "Option::is_none")]
pub input_loss_action: Option<String>,
/// <p>Indicates ID3 frame that has the timecode.</p>
#[serde(rename = "TimedMetadataId3Frame")]
#[serde(skip_serializing_if = "Option::is_none")]
pub timed_metadata_id_3_frame: Option<String>,
/// <p>Timed Metadata interval in seconds.</p>
#[serde(rename = "TimedMetadataId3Period")]
#[serde(skip_serializing_if = "Option::is_none")]
pub timed_metadata_id_3_period: Option<i64>,
}
/// <p>Placeholder documentation for UdpOutputSettings</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct UdpOutputSettings {
/// <p>UDP output buffering in milliseconds. Larger values increase latency through the transcoder but simultaneously assist the transcoder in maintaining a constant, low-jitter UDP/RTP output while accommodating clock recovery, input switching, input disruptions, picture reordering, etc.</p>
#[serde(rename = "BufferMsec")]
#[serde(skip_serializing_if = "Option::is_none")]
pub buffer_msec: Option<i64>,
#[serde(rename = "ContainerSettings")]
pub container_settings: UdpContainerSettings,
/// <p>Destination address and port number for RTP or UDP packets. Can be unicast or multicast RTP or UDP (eg. rtp://239.10.10.10:5001 or udp://10.100.100.100:5002).</p>
#[serde(rename = "Destination")]
pub destination: OutputLocationRef,
/// <p>Settings for enabling and adjusting Forward Error Correction on UDP outputs.</p>
#[serde(rename = "FecOutputSettings")]
#[serde(skip_serializing_if = "Option::is_none")]
pub fec_output_settings: Option<FecOutputSettings>,
}
/// <p>Placeholder documentation for UpdateChannel</p>
#[derive(Default, Debug, Clone, PartialEq)]
pub struct UpdateChannel {
/// <p>A list of output destinations for this channel.</p>
pub destinations: Option<Vec<OutputDestination>>,
/// <p>The encoder settings for this channel.</p>
pub encoder_settings: Option<EncoderSettings>,
pub input_attachments: Option<Vec<InputAttachment>>,
/// <p>Specification of input for this channel (max. bitrate, resolution, codec, etc.)</p>
pub input_specification: Option<InputSpecification>,
/// <p>The log level to write to CloudWatch Logs.</p>
pub log_level: Option<String>,
/// <p>The name of the channel.</p>
pub name: Option<String>,
/// <p>An optional Amazon Resource Name (ARN) of the role to assume when running the Channel. If you do not specify this on an update call but the role was previously set that role will be removed.</p>
pub role_arn: Option<String>,
}
/// <p>A request to update a channel.</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct UpdateChannelRequest {
/// <p>channel ID</p>
#[serde(rename = "ChannelId")]
pub channel_id: String,
/// <p>A list of output destinations for this channel.</p>
#[serde(rename = "Destinations")]
#[serde(skip_serializing_if = "Option::is_none")]
pub destinations: Option<Vec<OutputDestination>>,
/// <p>The encoder settings for this channel.</p>
#[serde(rename = "EncoderSettings")]
#[serde(skip_serializing_if = "Option::is_none")]
pub encoder_settings: Option<EncoderSettings>,
#[serde(rename = "InputAttachments")]
#[serde(skip_serializing_if = "Option::is_none")]
pub input_attachments: Option<Vec<InputAttachment>>,
/// <p>Specification of input for this channel (max. bitrate, resolution, codec, etc.)</p>
#[serde(rename = "InputSpecification")]
#[serde(skip_serializing_if = "Option::is_none")]
pub input_specification: Option<InputSpecification>,
/// <p>The log level to write to CloudWatch Logs.</p>
#[serde(rename = "LogLevel")]
#[serde(skip_serializing_if = "Option::is_none")]
pub log_level: Option<String>,
/// <p>The name of the channel.</p>
#[serde(rename = "Name")]
#[serde(skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
/// <p>An optional Amazon Resource Name (ARN) of the role to assume when running the Channel. If you do not specify this on an update call but the role was previously set that role will be removed.</p>
#[serde(rename = "RoleArn")]
#[serde(skip_serializing_if = "Option::is_none")]
pub role_arn: Option<String>,
}
/// <p>Placeholder documentation for UpdateChannelResponse</p>
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct UpdateChannelResponse {
#[serde(rename = "Channel")]
#[serde(skip_serializing_if = "Option::is_none")]
pub channel: Option<Channel>,
}
/// <p>The updated channel's description.</p>
#[derive(Default, Debug, Clone, PartialEq)]
pub struct UpdateChannelResultModel {
pub channel: Option<Channel>,
}
/// <p>Placeholder documentation for UpdateInput</p>
#[derive(Default, Debug, Clone, PartialEq)]
pub struct UpdateInput {
/// <p>Destination settings for PUSH type inputs.</p>
pub destinations: Option<Vec<InputDestinationRequest>>,
/// <p>A list of security groups referenced by IDs to attach to the input.</p>
pub input_security_groups: Option<Vec<String>>,
/// <p>Name of the input.</p>
pub name: Option<String>,
/// <p>The source URLs for a PULL-type input. Every PULL type input needs
/// exactly two source URLs for redundancy.
/// Only specify sources for PULL type Inputs. Leave Destinations empty.</p>
pub sources: Option<Vec<InputSourceRequest>>,
}
/// <p>A request to update an input.</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct UpdateInputRequest {
/// <p>Destination settings for PUSH type inputs.</p>
#[serde(rename = "Destinations")]
#[serde(skip_serializing_if = "Option::is_none")]
pub destinations: Option<Vec<InputDestinationRequest>>,
/// <p>Unique ID of the input.</p>
#[serde(rename = "InputId")]
pub input_id: String,
/// <p>A list of security groups referenced by IDs to attach to the input.</p>
#[serde(rename = "InputSecurityGroups")]
#[serde(skip_serializing_if = "Option::is_none")]
pub input_security_groups: Option<Vec<String>>,
/// <p>Name of the input.</p>
#[serde(rename = "Name")]
#[serde(skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
/// <p>The source URLs for a PULL-type input. Every PULL type input needs
/// exactly two source URLs for redundancy.
/// Only specify sources for PULL type Inputs. Leave Destinations empty.</p>
#[serde(rename = "Sources")]
#[serde(skip_serializing_if = "Option::is_none")]
pub sources: Option<Vec<InputSourceRequest>>,
}
/// <p>Placeholder documentation for UpdateInputResponse</p>
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct UpdateInputResponse {
#[serde(rename = "Input")]
#[serde(skip_serializing_if = "Option::is_none")]
pub input: Option<Input>,
}
/// <p>Placeholder documentation for UpdateInputResultModel</p>
#[derive(Default, Debug, Clone, PartialEq)]
pub struct UpdateInputResultModel {
pub input: Option<Input>,
}
/// <p>The request to update some combination of the Input Security Group name and the IPv4 CIDRs the Input Security Group should allow.</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct UpdateInputSecurityGroupRequest {
/// <p>The id of the Input Security Group to update.</p>
#[serde(rename = "InputSecurityGroupId")]
pub input_security_group_id: String,
/// <p>List of IPv4 CIDR addresses to whitelist</p>
#[serde(rename = "WhitelistRules")]
#[serde(skip_serializing_if = "Option::is_none")]
pub whitelist_rules: Option<Vec<InputWhitelistRuleCidr>>,
}
/// <p>Placeholder documentation for UpdateInputSecurityGroupResponse</p>
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct UpdateInputSecurityGroupResponse {
#[serde(rename = "SecurityGroup")]
#[serde(skip_serializing_if = "Option::is_none")]
pub security_group: Option<InputSecurityGroup>,
}
/// <p>Placeholder documentation for UpdateInputSecurityGroupResultModel</p>
#[derive(Default, Debug, Clone, PartialEq)]
pub struct UpdateInputSecurityGroupResultModel {
pub security_group: Option<InputSecurityGroup>,
}
/// <p>Placeholder documentation for ValidationError</p>
#[derive(Default, Debug, Clone, PartialEq)]
pub struct ValidationError {
pub element_path: Option<String>,
pub error_message: Option<String>,
}
/// <p>Placeholder documentation for VideoCodecSettings</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct VideoCodecSettings {
#[serde(rename = "H264Settings")]
#[serde(skip_serializing_if = "Option::is_none")]
pub h264_settings: Option<H264Settings>,
}
/// <p>Video settings for this stream.</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct VideoDescription {
/// <p>Video codec settings.</p>
#[serde(rename = "CodecSettings")]
#[serde(skip_serializing_if = "Option::is_none")]
pub codec_settings: Option<VideoCodecSettings>,
/// <p>Output video height (in pixels). Leave blank to use source video height. If left blank, width must also be unspecified.</p>
#[serde(rename = "Height")]
#[serde(skip_serializing_if = "Option::is_none")]
pub height: Option<i64>,
/// <p>The name of this VideoDescription. Outputs will use this name to uniquely identify this Description. Description names should be unique within this Live Event.</p>
#[serde(rename = "Name")]
pub name: String,
/// <p>Indicates how to respond to the AFD values in the input stream. Setting to "respond" causes input video to be clipped, depending on AFD value, input display aspect ratio and output display aspect ratio.</p>
#[serde(rename = "RespondToAfd")]
#[serde(skip_serializing_if = "Option::is_none")]
pub respond_to_afd: Option<String>,
/// <p>When set to "stretchToOutput", automatically configures the output position to stretch the video to the specified output resolution. This option will override any position value.</p>
#[serde(rename = "ScalingBehavior")]
#[serde(skip_serializing_if = "Option::is_none")]
pub scaling_behavior: Option<String>,
/// <p>Changes the width of the anti-alias filter kernel used for scaling. Only applies if scaling is being performed and antiAlias is set to true. 0 is the softest setting, 100 the sharpest, and 50 recommended for most content.</p>
#[serde(rename = "Sharpness")]
#[serde(skip_serializing_if = "Option::is_none")]
pub sharpness: Option<i64>,
/// <p>Output video width (in pixels). Leave out to use source video width. If left out, height must also be left out. Display aspect ratio is always preserved by letterboxing or pillarboxing when necessary.</p>
#[serde(rename = "Width")]
#[serde(skip_serializing_if = "Option::is_none")]
pub width: Option<i64>,
}
/// <p>Specifies a particular video stream within an input source. An input may have only a single video selector.</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct VideoSelector {
/// <p>Specifies the colorspace of an input. This setting works in tandem with colorSpaceConversion to determine if any conversion will be performed.</p>
#[serde(rename = "ColorSpace")]
#[serde(skip_serializing_if = "Option::is_none")]
pub color_space: Option<String>,
/// <p>Applies only if colorSpace is a value other than follow. This field controls how the value in the colorSpace field will be used. fallback means that when the input does include color space data, that data will be used, but when the input has no color space data, the value in colorSpace will be used. Choose fallback if your input is sometimes missing color space data, but when it does have color space data, that data is correct. force means to always use the value in colorSpace. Choose force if your input usually has no color space data or might have unreliable color space data.</p>
#[serde(rename = "ColorSpaceUsage")]
#[serde(skip_serializing_if = "Option::is_none")]
pub color_space_usage: Option<String>,
/// <p>The video selector settings.</p>
#[serde(rename = "SelectorSettings")]
#[serde(skip_serializing_if = "Option::is_none")]
pub selector_settings: Option<VideoSelectorSettings>,
}
/// <p>Placeholder documentation for VideoSelectorPid</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct VideoSelectorPid {
/// <p>Selects a specific PID from within a video source.</p>
#[serde(rename = "Pid")]
#[serde(skip_serializing_if = "Option::is_none")]
pub pid: Option<i64>,
}
/// <p>Placeholder documentation for VideoSelectorProgramId</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct VideoSelectorProgramId {
/// <p>Selects a specific program from within a multi-program transport stream. If the program doesn't exist, the first program within the transport stream will be selected by default.</p>
#[serde(rename = "ProgramId")]
#[serde(skip_serializing_if = "Option::is_none")]
pub program_id: Option<i64>,
}
/// <p>Placeholder documentation for VideoSelectorSettings</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct VideoSelectorSettings {
#[serde(rename = "VideoSelectorPid")]
#[serde(skip_serializing_if = "Option::is_none")]
pub video_selector_pid: Option<VideoSelectorPid>,
#[serde(rename = "VideoSelectorProgramId")]
#[serde(skip_serializing_if = "Option::is_none")]
pub video_selector_program_id: Option<VideoSelectorProgramId>,
}
/// <p>Placeholder documentation for WebvttDestinationSettings</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct WebvttDestinationSettings {}
/// Errors returned by CreateChannel
#[derive(Debug, PartialEq)]
pub enum CreateChannelError {
/// <p>Placeholder documentation for BadGatewayException</p>
BadGateway(String),
/// <p>Placeholder documentation for BadRequestException</p>
BadRequest(String),
/// <p>Placeholder documentation for ConflictException</p>
Conflict(String),
/// <p>Placeholder documentation for ForbiddenException</p>
Forbidden(String),
/// <p>Placeholder documentation for GatewayTimeoutException</p>
GatewayTimeout(String),
/// <p>Placeholder documentation for InternalServerErrorException</p>
InternalServerError(String),
/// <p>Placeholder documentation for TooManyRequestsException</p>
TooManyRequests(String),
/// <p>Placeholder documentation for UnprocessableEntityException</p>
UnprocessableEntity(String),
/// An error occurred dispatching the HTTP request
HttpDispatch(HttpDispatchError),
/// An error was encountered with AWS credentials.
Credentials(CredentialsError),
/// A validation error occurred. Details from AWS are provided.
Validation(String),
/// An error occurred parsing the response payload.
ParseError(String),
/// An unknown error occurred. The raw HTTP response is provided.
Unknown(BufferedHttpResponse),
}
impl CreateChannelError {
// see boto RestJSONParser impl for parsing errors
// https://github.com/boto/botocore/blob/4dff78c840403d1d17db9b3f800b20d3bd9fbf9f/botocore/parsers.py#L838-L850
pub fn from_response(res: BufferedHttpResponse) -> CreateChannelError {
if let Ok(json) = from_slice::<SerdeJsonValue>(&res.body) {
let error_type = match res.headers.get("x-amzn-errortype") {
Some(raw_error_type) => raw_error_type
.split(':')
.next()
.unwrap_or_else(|| "Unknown"),
_ => json
.get("code")
.or_else(|| json.get("Code"))
.and_then(|c| c.as_str())
.unwrap_or_else(|| "Unknown"),
};
// message can come in either "message" or "Message"
// see boto BaseJSONParser impl for parsing message
// https://github.com/boto/botocore/blob/4dff78c840403d1d17db9b3f800b20d3bd9fbf9f/botocore/parsers.py#L595-L598
let error_message = json
.get("message")
.or_else(|| json.get("Message"))
.and_then(|m| m.as_str())
.unwrap_or("");
match error_type {
"BadGatewayException" => {
return CreateChannelError::BadGateway(String::from(error_message))
}
"BadRequestException" => {
return CreateChannelError::BadRequest(String::from(error_message))
}
"ConflictException" => {
return CreateChannelError::Conflict(String::from(error_message))
}
"ForbiddenException" => {
return CreateChannelError::Forbidden(String::from(error_message))
}
"GatewayTimeoutException" => {
return CreateChannelError::GatewayTimeout(String::from(error_message))
}
"InternalServerErrorException" => {
return CreateChannelError::InternalServerError(String::from(error_message))
}
"TooManyRequestsException" => {
return CreateChannelError::TooManyRequests(String::from(error_message))
}
"UnprocessableEntityException" => {
return CreateChannelError::UnprocessableEntity(String::from(error_message))
}
"ValidationException" => {
return CreateChannelError::Validation(error_message.to_string())
}
_ => {}
}
}
return CreateChannelError::Unknown(res);
}
}
impl From<serde_json::error::Error> for CreateChannelError {
fn from(err: serde_json::error::Error) -> CreateChannelError {
CreateChannelError::ParseError(err.description().to_string())
}
}
impl From<CredentialsError> for CreateChannelError {
fn from(err: CredentialsError) -> CreateChannelError {
CreateChannelError::Credentials(err)
}
}
impl From<HttpDispatchError> for CreateChannelError {
fn from(err: HttpDispatchError) -> CreateChannelError {
CreateChannelError::HttpDispatch(err)
}
}
impl From<io::Error> for CreateChannelError {
fn from(err: io::Error) -> CreateChannelError {
CreateChannelError::HttpDispatch(HttpDispatchError::from(err))
}
}
impl fmt::Display for CreateChannelError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for CreateChannelError {
fn description(&self) -> &str {
match *self {
CreateChannelError::BadGateway(ref cause) => cause,
CreateChannelError::BadRequest(ref cause) => cause,
CreateChannelError::Conflict(ref cause) => cause,
CreateChannelError::Forbidden(ref cause) => cause,
CreateChannelError::GatewayTimeout(ref cause) => cause,
CreateChannelError::InternalServerError(ref cause) => cause,
CreateChannelError::TooManyRequests(ref cause) => cause,
CreateChannelError::UnprocessableEntity(ref cause) => cause,
CreateChannelError::Validation(ref cause) => cause,
CreateChannelError::Credentials(ref err) => err.description(),
CreateChannelError::HttpDispatch(ref dispatch_error) => dispatch_error.description(),
CreateChannelError::ParseError(ref cause) => cause,
CreateChannelError::Unknown(_) => "unknown error",
}
}
}
/// Errors returned by CreateInput
#[derive(Debug, PartialEq)]
pub enum CreateInputError {
/// <p>Placeholder documentation for BadGatewayException</p>
BadGateway(String),
/// <p>Placeholder documentation for BadRequestException</p>
BadRequest(String),
/// <p>Placeholder documentation for ForbiddenException</p>
Forbidden(String),
/// <p>Placeholder documentation for GatewayTimeoutException</p>
GatewayTimeout(String),
/// <p>Placeholder documentation for InternalServerErrorException</p>
InternalServerError(String),
/// <p>Placeholder documentation for TooManyRequestsException</p>
TooManyRequests(String),
/// An error occurred dispatching the HTTP request
HttpDispatch(HttpDispatchError),
/// An error was encountered with AWS credentials.
Credentials(CredentialsError),
/// A validation error occurred. Details from AWS are provided.
Validation(String),
/// An error occurred parsing the response payload.
ParseError(String),
/// An unknown error occurred. The raw HTTP response is provided.
Unknown(BufferedHttpResponse),
}
impl CreateInputError {
// see boto RestJSONParser impl for parsing errors
// https://github.com/boto/botocore/blob/4dff78c840403d1d17db9b3f800b20d3bd9fbf9f/botocore/parsers.py#L838-L850
pub fn from_response(res: BufferedHttpResponse) -> CreateInputError {
if let Ok(json) = from_slice::<SerdeJsonValue>(&res.body) {
let error_type = match res.headers.get("x-amzn-errortype") {
Some(raw_error_type) => raw_error_type
.split(':')
.next()
.unwrap_or_else(|| "Unknown"),
_ => json
.get("code")
.or_else(|| json.get("Code"))
.and_then(|c| c.as_str())
.unwrap_or_else(|| "Unknown"),
};
// message can come in either "message" or "Message"
// see boto BaseJSONParser impl for parsing message
// https://github.com/boto/botocore/blob/4dff78c840403d1d17db9b3f800b20d3bd9fbf9f/botocore/parsers.py#L595-L598
let error_message = json
.get("message")
.or_else(|| json.get("Message"))
.and_then(|m| m.as_str())
.unwrap_or("");
match error_type {
"BadGatewayException" => {
return CreateInputError::BadGateway(String::from(error_message))
}
"BadRequestException" => {
return CreateInputError::BadRequest(String::from(error_message))
}
"ForbiddenException" => {
return CreateInputError::Forbidden(String::from(error_message))
}
"GatewayTimeoutException" => {
return CreateInputError::GatewayTimeout(String::from(error_message))
}
"InternalServerErrorException" => {
return CreateInputError::InternalServerError(String::from(error_message))
}
"TooManyRequestsException" => {
return CreateInputError::TooManyRequests(String::from(error_message))
}
"ValidationException" => {
return CreateInputError::Validation(error_message.to_string())
}
_ => {}
}
}
return CreateInputError::Unknown(res);
}
}
impl From<serde_json::error::Error> for CreateInputError {
fn from(err: serde_json::error::Error) -> CreateInputError {
CreateInputError::ParseError(err.description().to_string())
}
}
impl From<CredentialsError> for CreateInputError {
fn from(err: CredentialsError) -> CreateInputError {
CreateInputError::Credentials(err)
}
}
impl From<HttpDispatchError> for CreateInputError {
fn from(err: HttpDispatchError) -> CreateInputError {
CreateInputError::HttpDispatch(err)
}
}
impl From<io::Error> for CreateInputError {
fn from(err: io::Error) -> CreateInputError {
CreateInputError::HttpDispatch(HttpDispatchError::from(err))
}
}
impl fmt::Display for CreateInputError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for CreateInputError {
fn description(&self) -> &str {
match *self {
CreateInputError::BadGateway(ref cause) => cause,
CreateInputError::BadRequest(ref cause) => cause,
CreateInputError::Forbidden(ref cause) => cause,
CreateInputError::GatewayTimeout(ref cause) => cause,
CreateInputError::InternalServerError(ref cause) => cause,
CreateInputError::TooManyRequests(ref cause) => cause,
CreateInputError::Validation(ref cause) => cause,
CreateInputError::Credentials(ref err) => err.description(),
CreateInputError::HttpDispatch(ref dispatch_error) => dispatch_error.description(),
CreateInputError::ParseError(ref cause) => cause,
CreateInputError::Unknown(_) => "unknown error",
}
}
}
/// Errors returned by CreateInputSecurityGroup
#[derive(Debug, PartialEq)]
pub enum CreateInputSecurityGroupError {
/// <p>Placeholder documentation for BadGatewayException</p>
BadGateway(String),
/// <p>Placeholder documentation for BadRequestException</p>
BadRequest(String),
/// <p>Placeholder documentation for ForbiddenException</p>
Forbidden(String),
/// <p>Placeholder documentation for GatewayTimeoutException</p>
GatewayTimeout(String),
/// <p>Placeholder documentation for InternalServerErrorException</p>
InternalServerError(String),
/// <p>Placeholder documentation for TooManyRequestsException</p>
TooManyRequests(String),
/// An error occurred dispatching the HTTP request
HttpDispatch(HttpDispatchError),
/// An error was encountered with AWS credentials.
Credentials(CredentialsError),
/// A validation error occurred. Details from AWS are provided.
Validation(String),
/// An error occurred parsing the response payload.
ParseError(String),
/// An unknown error occurred. The raw HTTP response is provided.
Unknown(BufferedHttpResponse),
}
impl CreateInputSecurityGroupError {
// see boto RestJSONParser impl for parsing errors
// https://github.com/boto/botocore/blob/4dff78c840403d1d17db9b3f800b20d3bd9fbf9f/botocore/parsers.py#L838-L850
pub fn from_response(res: BufferedHttpResponse) -> CreateInputSecurityGroupError {
if let Ok(json) = from_slice::<SerdeJsonValue>(&res.body) {
let error_type = match res.headers.get("x-amzn-errortype") {
Some(raw_error_type) => raw_error_type
.split(':')
.next()
.unwrap_or_else(|| "Unknown"),
_ => json
.get("code")
.or_else(|| json.get("Code"))
.and_then(|c| c.as_str())
.unwrap_or_else(|| "Unknown"),
};
// message can come in either "message" or "Message"
// see boto BaseJSONParser impl for parsing message
// https://github.com/boto/botocore/blob/4dff78c840403d1d17db9b3f800b20d3bd9fbf9f/botocore/parsers.py#L595-L598
let error_message = json
.get("message")
.or_else(|| json.get("Message"))
.and_then(|m| m.as_str())
.unwrap_or("");
match error_type {
"BadGatewayException" => {
return CreateInputSecurityGroupError::BadGateway(String::from(error_message))
}
"BadRequestException" => {
return CreateInputSecurityGroupError::BadRequest(String::from(error_message))
}
"ForbiddenException" => {
return CreateInputSecurityGroupError::Forbidden(String::from(error_message))
}
"GatewayTimeoutException" => {
return CreateInputSecurityGroupError::GatewayTimeout(String::from(
error_message,
))
}
"InternalServerErrorException" => {
return CreateInputSecurityGroupError::InternalServerError(String::from(
error_message,
))
}
"TooManyRequestsException" => {
return CreateInputSecurityGroupError::TooManyRequests(String::from(
error_message,
))
}
"ValidationException" => {
return CreateInputSecurityGroupError::Validation(error_message.to_string())
}
_ => {}
}
}
return CreateInputSecurityGroupError::Unknown(res);
}
}
impl From<serde_json::error::Error> for CreateInputSecurityGroupError {
fn from(err: serde_json::error::Error) -> CreateInputSecurityGroupError {
CreateInputSecurityGroupError::ParseError(err.description().to_string())
}
}
impl From<CredentialsError> for CreateInputSecurityGroupError {
fn from(err: CredentialsError) -> CreateInputSecurityGroupError {
CreateInputSecurityGroupError::Credentials(err)
}
}
impl From<HttpDispatchError> for CreateInputSecurityGroupError {
fn from(err: HttpDispatchError) -> CreateInputSecurityGroupError {
CreateInputSecurityGroupError::HttpDispatch(err)
}
}
impl From<io::Error> for CreateInputSecurityGroupError {
fn from(err: io::Error) -> CreateInputSecurityGroupError {
CreateInputSecurityGroupError::HttpDispatch(HttpDispatchError::from(err))
}
}
impl fmt::Display for CreateInputSecurityGroupError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for CreateInputSecurityGroupError {
fn description(&self) -> &str {
match *self {
CreateInputSecurityGroupError::BadGateway(ref cause) => cause,
CreateInputSecurityGroupError::BadRequest(ref cause) => cause,
CreateInputSecurityGroupError::Forbidden(ref cause) => cause,
CreateInputSecurityGroupError::GatewayTimeout(ref cause) => cause,
CreateInputSecurityGroupError::InternalServerError(ref cause) => cause,
CreateInputSecurityGroupError::TooManyRequests(ref cause) => cause,
CreateInputSecurityGroupError::Validation(ref cause) => cause,
CreateInputSecurityGroupError::Credentials(ref err) => err.description(),
CreateInputSecurityGroupError::HttpDispatch(ref dispatch_error) => {
dispatch_error.description()
}
CreateInputSecurityGroupError::ParseError(ref cause) => cause,
CreateInputSecurityGroupError::Unknown(_) => "unknown error",
}
}
}
/// Errors returned by DeleteChannel
#[derive(Debug, PartialEq)]
pub enum DeleteChannelError {
/// <p>Placeholder documentation for BadGatewayException</p>
BadGateway(String),
/// <p>Placeholder documentation for BadRequestException</p>
BadRequest(String),
/// <p>Placeholder documentation for ConflictException</p>
Conflict(String),
/// <p>Placeholder documentation for ForbiddenException</p>
Forbidden(String),
/// <p>Placeholder documentation for GatewayTimeoutException</p>
GatewayTimeout(String),
/// <p>Placeholder documentation for InternalServerErrorException</p>
InternalServerError(String),
/// <p>Placeholder documentation for NotFoundException</p>
NotFound(String),
/// <p>Placeholder documentation for TooManyRequestsException</p>
TooManyRequests(String),
/// An error occurred dispatching the HTTP request
HttpDispatch(HttpDispatchError),
/// An error was encountered with AWS credentials.
Credentials(CredentialsError),
/// A validation error occurred. Details from AWS are provided.
Validation(String),
/// An error occurred parsing the response payload.
ParseError(String),
/// An unknown error occurred. The raw HTTP response is provided.
Unknown(BufferedHttpResponse),
}
impl DeleteChannelError {
// see boto RestJSONParser impl for parsing errors
// https://github.com/boto/botocore/blob/4dff78c840403d1d17db9b3f800b20d3bd9fbf9f/botocore/parsers.py#L838-L850
pub fn from_response(res: BufferedHttpResponse) -> DeleteChannelError {
if let Ok(json) = from_slice::<SerdeJsonValue>(&res.body) {
let error_type = match res.headers.get("x-amzn-errortype") {
Some(raw_error_type) => raw_error_type
.split(':')
.next()
.unwrap_or_else(|| "Unknown"),
_ => json
.get("code")
.or_else(|| json.get("Code"))
.and_then(|c| c.as_str())
.unwrap_or_else(|| "Unknown"),
};
// message can come in either "message" or "Message"
// see boto BaseJSONParser impl for parsing message
// https://github.com/boto/botocore/blob/4dff78c840403d1d17db9b3f800b20d3bd9fbf9f/botocore/parsers.py#L595-L598
let error_message = json
.get("message")
.or_else(|| json.get("Message"))
.and_then(|m| m.as_str())
.unwrap_or("");
match error_type {
"BadGatewayException" => {
return DeleteChannelError::BadGateway(String::from(error_message))
}
"BadRequestException" => {
return DeleteChannelError::BadRequest(String::from(error_message))
}
"ConflictException" => {
return DeleteChannelError::Conflict(String::from(error_message))
}
"ForbiddenException" => {
return DeleteChannelError::Forbidden(String::from(error_message))
}
"GatewayTimeoutException" => {
return DeleteChannelError::GatewayTimeout(String::from(error_message))
}
"InternalServerErrorException" => {
return DeleteChannelError::InternalServerError(String::from(error_message))
}
"NotFoundException" => {
return DeleteChannelError::NotFound(String::from(error_message))
}
"TooManyRequestsException" => {
return DeleteChannelError::TooManyRequests(String::from(error_message))
}
"ValidationException" => {
return DeleteChannelError::Validation(error_message.to_string())
}
_ => {}
}
}
return DeleteChannelError::Unknown(res);
}
}
impl From<serde_json::error::Error> for DeleteChannelError {
fn from(err: serde_json::error::Error) -> DeleteChannelError {
DeleteChannelError::ParseError(err.description().to_string())
}
}
impl From<CredentialsError> for DeleteChannelError {
fn from(err: CredentialsError) -> DeleteChannelError {
DeleteChannelError::Credentials(err)
}
}
impl From<HttpDispatchError> for DeleteChannelError {
fn from(err: HttpDispatchError) -> DeleteChannelError {
DeleteChannelError::HttpDispatch(err)
}
}
impl From<io::Error> for DeleteChannelError {
fn from(err: io::Error) -> DeleteChannelError {
DeleteChannelError::HttpDispatch(HttpDispatchError::from(err))
}
}
impl fmt::Display for DeleteChannelError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for DeleteChannelError {
fn description(&self) -> &str {
match *self {
DeleteChannelError::BadGateway(ref cause) => cause,
DeleteChannelError::BadRequest(ref cause) => cause,
DeleteChannelError::Conflict(ref cause) => cause,
DeleteChannelError::Forbidden(ref cause) => cause,
DeleteChannelError::GatewayTimeout(ref cause) => cause,
DeleteChannelError::InternalServerError(ref cause) => cause,
DeleteChannelError::NotFound(ref cause) => cause,
DeleteChannelError::TooManyRequests(ref cause) => cause,
DeleteChannelError::Validation(ref cause) => cause,
DeleteChannelError::Credentials(ref err) => err.description(),
DeleteChannelError::HttpDispatch(ref dispatch_error) => dispatch_error.description(),
DeleteChannelError::ParseError(ref cause) => cause,
DeleteChannelError::Unknown(_) => "unknown error",
}
}
}
/// Errors returned by DeleteInput
#[derive(Debug, PartialEq)]
pub enum DeleteInputError {
/// <p>Placeholder documentation for BadGatewayException</p>
BadGateway(String),
/// <p>Placeholder documentation for BadRequestException</p>
BadRequest(String),
/// <p>Placeholder documentation for ConflictException</p>
Conflict(String),
/// <p>Placeholder documentation for ForbiddenException</p>
Forbidden(String),
/// <p>Placeholder documentation for GatewayTimeoutException</p>
GatewayTimeout(String),
/// <p>Placeholder documentation for InternalServerErrorException</p>
InternalServerError(String),
/// <p>Placeholder documentation for NotFoundException</p>
NotFound(String),
/// <p>Placeholder documentation for TooManyRequestsException</p>
TooManyRequests(String),
/// An error occurred dispatching the HTTP request
HttpDispatch(HttpDispatchError),
/// An error was encountered with AWS credentials.
Credentials(CredentialsError),
/// A validation error occurred. Details from AWS are provided.
Validation(String),
/// An error occurred parsing the response payload.
ParseError(String),
/// An unknown error occurred. The raw HTTP response is provided.
Unknown(BufferedHttpResponse),
}
impl DeleteInputError {
// see boto RestJSONParser impl for parsing errors
// https://github.com/boto/botocore/blob/4dff78c840403d1d17db9b3f800b20d3bd9fbf9f/botocore/parsers.py#L838-L850
pub fn from_response(res: BufferedHttpResponse) -> DeleteInputError {
if let Ok(json) = from_slice::<SerdeJsonValue>(&res.body) {
let error_type = match res.headers.get("x-amzn-errortype") {
Some(raw_error_type) => raw_error_type
.split(':')
.next()
.unwrap_or_else(|| "Unknown"),
_ => json
.get("code")
.or_else(|| json.get("Code"))
.and_then(|c| c.as_str())
.unwrap_or_else(|| "Unknown"),
};
// message can come in either "message" or "Message"
// see boto BaseJSONParser impl for parsing message
// https://github.com/boto/botocore/blob/4dff78c840403d1d17db9b3f800b20d3bd9fbf9f/botocore/parsers.py#L595-L598
let error_message = json
.get("message")
.or_else(|| json.get("Message"))
.and_then(|m| m.as_str())
.unwrap_or("");
match error_type {
"BadGatewayException" => {
return DeleteInputError::BadGateway(String::from(error_message))
}
"BadRequestException" => {
return DeleteInputError::BadRequest(String::from(error_message))
}
"ConflictException" => {
return DeleteInputError::Conflict(String::from(error_message))
}
"ForbiddenException" => {
return DeleteInputError::Forbidden(String::from(error_message))
}
"GatewayTimeoutException" => {
return DeleteInputError::GatewayTimeout(String::from(error_message))
}
"InternalServerErrorException" => {
return DeleteInputError::InternalServerError(String::from(error_message))
}
"NotFoundException" => {
return DeleteInputError::NotFound(String::from(error_message))
}
"TooManyRequestsException" => {
return DeleteInputError::TooManyRequests(String::from(error_message))
}
"ValidationException" => {
return DeleteInputError::Validation(error_message.to_string())
}
_ => {}
}
}
return DeleteInputError::Unknown(res);
}
}
impl From<serde_json::error::Error> for DeleteInputError {
fn from(err: serde_json::error::Error) -> DeleteInputError {
DeleteInputError::ParseError(err.description().to_string())
}
}
impl From<CredentialsError> for DeleteInputError {
fn from(err: CredentialsError) -> DeleteInputError {
DeleteInputError::Credentials(err)
}
}
impl From<HttpDispatchError> for DeleteInputError {
fn from(err: HttpDispatchError) -> DeleteInputError {
DeleteInputError::HttpDispatch(err)
}
}
impl From<io::Error> for DeleteInputError {
fn from(err: io::Error) -> DeleteInputError {
DeleteInputError::HttpDispatch(HttpDispatchError::from(err))
}
}
impl fmt::Display for DeleteInputError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for DeleteInputError {
fn description(&self) -> &str {
match *self {
DeleteInputError::BadGateway(ref cause) => cause,
DeleteInputError::BadRequest(ref cause) => cause,
DeleteInputError::Conflict(ref cause) => cause,
DeleteInputError::Forbidden(ref cause) => cause,
DeleteInputError::GatewayTimeout(ref cause) => cause,
DeleteInputError::InternalServerError(ref cause) => cause,
DeleteInputError::NotFound(ref cause) => cause,
DeleteInputError::TooManyRequests(ref cause) => cause,
DeleteInputError::Validation(ref cause) => cause,
DeleteInputError::Credentials(ref err) => err.description(),
DeleteInputError::HttpDispatch(ref dispatch_error) => dispatch_error.description(),
DeleteInputError::ParseError(ref cause) => cause,
DeleteInputError::Unknown(_) => "unknown error",
}
}
}
/// Errors returned by DeleteInputSecurityGroup
#[derive(Debug, PartialEq)]
pub enum DeleteInputSecurityGroupError {
/// <p>Placeholder documentation for BadGatewayException</p>
BadGateway(String),
/// <p>Placeholder documentation for BadRequestException</p>
BadRequest(String),
/// <p>Placeholder documentation for ForbiddenException</p>
Forbidden(String),
/// <p>Placeholder documentation for GatewayTimeoutException</p>
GatewayTimeout(String),
/// <p>Placeholder documentation for InternalServerErrorException</p>
InternalServerError(String),
/// <p>Placeholder documentation for NotFoundException</p>
NotFound(String),
/// <p>Placeholder documentation for TooManyRequestsException</p>
TooManyRequests(String),
/// An error occurred dispatching the HTTP request
HttpDispatch(HttpDispatchError),
/// An error was encountered with AWS credentials.
Credentials(CredentialsError),
/// A validation error occurred. Details from AWS are provided.
Validation(String),
/// An error occurred parsing the response payload.
ParseError(String),
/// An unknown error occurred. The raw HTTP response is provided.
Unknown(BufferedHttpResponse),
}
impl DeleteInputSecurityGroupError {
// see boto RestJSONParser impl for parsing errors
// https://github.com/boto/botocore/blob/4dff78c840403d1d17db9b3f800b20d3bd9fbf9f/botocore/parsers.py#L838-L850
pub fn from_response(res: BufferedHttpResponse) -> DeleteInputSecurityGroupError {
if let Ok(json) = from_slice::<SerdeJsonValue>(&res.body) {
let error_type = match res.headers.get("x-amzn-errortype") {
Some(raw_error_type) => raw_error_type
.split(':')
.next()
.unwrap_or_else(|| "Unknown"),
_ => json
.get("code")
.or_else(|| json.get("Code"))
.and_then(|c| c.as_str())
.unwrap_or_else(|| "Unknown"),
};
// message can come in either "message" or "Message"
// see boto BaseJSONParser impl for parsing message
// https://github.com/boto/botocore/blob/4dff78c840403d1d17db9b3f800b20d3bd9fbf9f/botocore/parsers.py#L595-L598
let error_message = json
.get("message")
.or_else(|| json.get("Message"))
.and_then(|m| m.as_str())
.unwrap_or("");
match error_type {
"BadGatewayException" => {
return DeleteInputSecurityGroupError::BadGateway(String::from(error_message))
}
"BadRequestException" => {
return DeleteInputSecurityGroupError::BadRequest(String::from(error_message))
}
"ForbiddenException" => {
return DeleteInputSecurityGroupError::Forbidden(String::from(error_message))
}
"GatewayTimeoutException" => {
return DeleteInputSecurityGroupError::GatewayTimeout(String::from(
error_message,
))
}
"InternalServerErrorException" => {
return DeleteInputSecurityGroupError::InternalServerError(String::from(
error_message,
))
}
"NotFoundException" => {
return DeleteInputSecurityGroupError::NotFound(String::from(error_message))
}
"TooManyRequestsException" => {
return DeleteInputSecurityGroupError::TooManyRequests(String::from(
error_message,
))
}
"ValidationException" => {
return DeleteInputSecurityGroupError::Validation(error_message.to_string())
}
_ => {}
}
}
return DeleteInputSecurityGroupError::Unknown(res);
}
}
impl From<serde_json::error::Error> for DeleteInputSecurityGroupError {
fn from(err: serde_json::error::Error) -> DeleteInputSecurityGroupError {
DeleteInputSecurityGroupError::ParseError(err.description().to_string())
}
}
impl From<CredentialsError> for DeleteInputSecurityGroupError {
fn from(err: CredentialsError) -> DeleteInputSecurityGroupError {
DeleteInputSecurityGroupError::Credentials(err)
}
}
impl From<HttpDispatchError> for DeleteInputSecurityGroupError {
fn from(err: HttpDispatchError) -> DeleteInputSecurityGroupError {
DeleteInputSecurityGroupError::HttpDispatch(err)
}
}
impl From<io::Error> for DeleteInputSecurityGroupError {
fn from(err: io::Error) -> DeleteInputSecurityGroupError {
DeleteInputSecurityGroupError::HttpDispatch(HttpDispatchError::from(err))
}
}
impl fmt::Display for DeleteInputSecurityGroupError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for DeleteInputSecurityGroupError {
fn description(&self) -> &str {
match *self {
DeleteInputSecurityGroupError::BadGateway(ref cause) => cause,
DeleteInputSecurityGroupError::BadRequest(ref cause) => cause,
DeleteInputSecurityGroupError::Forbidden(ref cause) => cause,
DeleteInputSecurityGroupError::GatewayTimeout(ref cause) => cause,
DeleteInputSecurityGroupError::InternalServerError(ref cause) => cause,
DeleteInputSecurityGroupError::NotFound(ref cause) => cause,
DeleteInputSecurityGroupError::TooManyRequests(ref cause) => cause,
DeleteInputSecurityGroupError::Validation(ref cause) => cause,
DeleteInputSecurityGroupError::Credentials(ref err) => err.description(),
DeleteInputSecurityGroupError::HttpDispatch(ref dispatch_error) => {
dispatch_error.description()
}
DeleteInputSecurityGroupError::ParseError(ref cause) => cause,
DeleteInputSecurityGroupError::Unknown(_) => "unknown error",
}
}
}
/// Errors returned by DeleteReservation
#[derive(Debug, PartialEq)]
pub enum DeleteReservationError {
/// <p>Placeholder documentation for BadGatewayException</p>
BadGateway(String),
/// <p>Placeholder documentation for BadRequestException</p>
BadRequest(String),
/// <p>Placeholder documentation for ConflictException</p>
Conflict(String),
/// <p>Placeholder documentation for ForbiddenException</p>
Forbidden(String),
/// <p>Placeholder documentation for GatewayTimeoutException</p>
GatewayTimeout(String),
/// <p>Placeholder documentation for InternalServerErrorException</p>
InternalServerError(String),
/// <p>Placeholder documentation for NotFoundException</p>
NotFound(String),
/// <p>Placeholder documentation for TooManyRequestsException</p>
TooManyRequests(String),
/// An error occurred dispatching the HTTP request
HttpDispatch(HttpDispatchError),
/// An error was encountered with AWS credentials.
Credentials(CredentialsError),
/// A validation error occurred. Details from AWS are provided.
Validation(String),
/// An error occurred parsing the response payload.
ParseError(String),
/// An unknown error occurred. The raw HTTP response is provided.
Unknown(BufferedHttpResponse),
}
impl DeleteReservationError {
// see boto RestJSONParser impl for parsing errors
// https://github.com/boto/botocore/blob/4dff78c840403d1d17db9b3f800b20d3bd9fbf9f/botocore/parsers.py#L838-L850
pub fn from_response(res: BufferedHttpResponse) -> DeleteReservationError {
if let Ok(json) = from_slice::<SerdeJsonValue>(&res.body) {
let error_type = match res.headers.get("x-amzn-errortype") {
Some(raw_error_type) => raw_error_type
.split(':')
.next()
.unwrap_or_else(|| "Unknown"),
_ => json
.get("code")
.or_else(|| json.get("Code"))
.and_then(|c| c.as_str())
.unwrap_or_else(|| "Unknown"),
};
// message can come in either "message" or "Message"
// see boto BaseJSONParser impl for parsing message
// https://github.com/boto/botocore/blob/4dff78c840403d1d17db9b3f800b20d3bd9fbf9f/botocore/parsers.py#L595-L598
let error_message = json
.get("message")
.or_else(|| json.get("Message"))
.and_then(|m| m.as_str())
.unwrap_or("");
match error_type {
"BadGatewayException" => {
return DeleteReservationError::BadGateway(String::from(error_message))
}
"BadRequestException" => {
return DeleteReservationError::BadRequest(String::from(error_message))
}
"ConflictException" => {
return DeleteReservationError::Conflict(String::from(error_message))
}
"ForbiddenException" => {
return DeleteReservationError::Forbidden(String::from(error_message))
}
"GatewayTimeoutException" => {
return DeleteReservationError::GatewayTimeout(String::from(error_message))
}
"InternalServerErrorException" => {
return DeleteReservationError::InternalServerError(String::from(error_message))
}
"NotFoundException" => {
return DeleteReservationError::NotFound(String::from(error_message))
}
"TooManyRequestsException" => {
return DeleteReservationError::TooManyRequests(String::from(error_message))
}
"ValidationException" => {
return DeleteReservationError::Validation(error_message.to_string())
}
_ => {}
}
}
return DeleteReservationError::Unknown(res);
}
}
impl From<serde_json::error::Error> for DeleteReservationError {
fn from(err: serde_json::error::Error) -> DeleteReservationError {
DeleteReservationError::ParseError(err.description().to_string())
}
}
impl From<CredentialsError> for DeleteReservationError {
fn from(err: CredentialsError) -> DeleteReservationError {
DeleteReservationError::Credentials(err)
}
}
impl From<HttpDispatchError> for DeleteReservationError {
fn from(err: HttpDispatchError) -> DeleteReservationError {
DeleteReservationError::HttpDispatch(err)
}
}
impl From<io::Error> for DeleteReservationError {
fn from(err: io::Error) -> DeleteReservationError {
DeleteReservationError::HttpDispatch(HttpDispatchError::from(err))
}
}
impl fmt::Display for DeleteReservationError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for DeleteReservationError {
fn description(&self) -> &str {
match *self {
DeleteReservationError::BadGateway(ref cause) => cause,
DeleteReservationError::BadRequest(ref cause) => cause,
DeleteReservationError::Conflict(ref cause) => cause,
DeleteReservationError::Forbidden(ref cause) => cause,
DeleteReservationError::GatewayTimeout(ref cause) => cause,
DeleteReservationError::InternalServerError(ref cause) => cause,
DeleteReservationError::NotFound(ref cause) => cause,
DeleteReservationError::TooManyRequests(ref cause) => cause,
DeleteReservationError::Validation(ref cause) => cause,
DeleteReservationError::Credentials(ref err) => err.description(),
DeleteReservationError::HttpDispatch(ref dispatch_error) => {
dispatch_error.description()
}
DeleteReservationError::ParseError(ref cause) => cause,
DeleteReservationError::Unknown(_) => "unknown error",
}
}
}
/// Errors returned by DescribeChannel
#[derive(Debug, PartialEq)]
pub enum DescribeChannelError {
/// <p>Placeholder documentation for BadGatewayException</p>
BadGateway(String),
/// <p>Placeholder documentation for BadRequestException</p>
BadRequest(String),
/// <p>Placeholder documentation for ForbiddenException</p>
Forbidden(String),
/// <p>Placeholder documentation for GatewayTimeoutException</p>
GatewayTimeout(String),
/// <p>Placeholder documentation for InternalServerErrorException</p>
InternalServerError(String),
/// <p>Placeholder documentation for NotFoundException</p>
NotFound(String),
/// <p>Placeholder documentation for TooManyRequestsException</p>
TooManyRequests(String),
/// An error occurred dispatching the HTTP request
HttpDispatch(HttpDispatchError),
/// An error was encountered with AWS credentials.
Credentials(CredentialsError),
/// A validation error occurred. Details from AWS are provided.
Validation(String),
/// An error occurred parsing the response payload.
ParseError(String),
/// An unknown error occurred. The raw HTTP response is provided.
Unknown(BufferedHttpResponse),
}
impl DescribeChannelError {
// see boto RestJSONParser impl for parsing errors
// https://github.com/boto/botocore/blob/4dff78c840403d1d17db9b3f800b20d3bd9fbf9f/botocore/parsers.py#L838-L850
pub fn from_response(res: BufferedHttpResponse) -> DescribeChannelError {
if let Ok(json) = from_slice::<SerdeJsonValue>(&res.body) {
let error_type = match res.headers.get("x-amzn-errortype") {
Some(raw_error_type) => raw_error_type
.split(':')
.next()
.unwrap_or_else(|| "Unknown"),
_ => json
.get("code")
.or_else(|| json.get("Code"))
.and_then(|c| c.as_str())
.unwrap_or_else(|| "Unknown"),
};
// message can come in either "message" or "Message"
// see boto BaseJSONParser impl for parsing message
// https://github.com/boto/botocore/blob/4dff78c840403d1d17db9b3f800b20d3bd9fbf9f/botocore/parsers.py#L595-L598
let error_message = json
.get("message")
.or_else(|| json.get("Message"))
.and_then(|m| m.as_str())
.unwrap_or("");
match error_type {
"BadGatewayException" => {
return DescribeChannelError::BadGateway(String::from(error_message))
}
"BadRequestException" => {
return DescribeChannelError::BadRequest(String::from(error_message))
}
"ForbiddenException" => {
return DescribeChannelError::Forbidden(String::from(error_message))
}
"GatewayTimeoutException" => {
return DescribeChannelError::GatewayTimeout(String::from(error_message))
}
"InternalServerErrorException" => {
return DescribeChannelError::InternalServerError(String::from(error_message))
}
"NotFoundException" => {
return DescribeChannelError::NotFound(String::from(error_message))
}
"TooManyRequestsException" => {
return DescribeChannelError::TooManyRequests(String::from(error_message))
}
"ValidationException" => {
return DescribeChannelError::Validation(error_message.to_string())
}
_ => {}
}
}
return DescribeChannelError::Unknown(res);
}
}
impl From<serde_json::error::Error> for DescribeChannelError {
fn from(err: serde_json::error::Error) -> DescribeChannelError {
DescribeChannelError::ParseError(err.description().to_string())
}
}
impl From<CredentialsError> for DescribeChannelError {
fn from(err: CredentialsError) -> DescribeChannelError {
DescribeChannelError::Credentials(err)
}
}
impl From<HttpDispatchError> for DescribeChannelError {
fn from(err: HttpDispatchError) -> DescribeChannelError {
DescribeChannelError::HttpDispatch(err)
}
}
impl From<io::Error> for DescribeChannelError {
fn from(err: io::Error) -> DescribeChannelError {
DescribeChannelError::HttpDispatch(HttpDispatchError::from(err))
}
}
impl fmt::Display for DescribeChannelError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for DescribeChannelError {
fn description(&self) -> &str {
match *self {
DescribeChannelError::BadGateway(ref cause) => cause,
DescribeChannelError::BadRequest(ref cause) => cause,
DescribeChannelError::Forbidden(ref cause) => cause,
DescribeChannelError::GatewayTimeout(ref cause) => cause,
DescribeChannelError::InternalServerError(ref cause) => cause,
DescribeChannelError::NotFound(ref cause) => cause,
DescribeChannelError::TooManyRequests(ref cause) => cause,
DescribeChannelError::Validation(ref cause) => cause,
DescribeChannelError::Credentials(ref err) => err.description(),
DescribeChannelError::HttpDispatch(ref dispatch_error) => dispatch_error.description(),
DescribeChannelError::ParseError(ref cause) => cause,
DescribeChannelError::Unknown(_) => "unknown error",
}
}
}
/// Errors returned by DescribeInput
#[derive(Debug, PartialEq)]
pub enum DescribeInputError {
/// <p>Placeholder documentation for BadGatewayException</p>
BadGateway(String),
/// <p>Placeholder documentation for BadRequestException</p>
BadRequest(String),
/// <p>Placeholder documentation for ForbiddenException</p>
Forbidden(String),
/// <p>Placeholder documentation for GatewayTimeoutException</p>
GatewayTimeout(String),
/// <p>Placeholder documentation for InternalServerErrorException</p>
InternalServerError(String),
/// <p>Placeholder documentation for NotFoundException</p>
NotFound(String),
/// <p>Placeholder documentation for TooManyRequestsException</p>
TooManyRequests(String),
/// An error occurred dispatching the HTTP request
HttpDispatch(HttpDispatchError),
/// An error was encountered with AWS credentials.
Credentials(CredentialsError),
/// A validation error occurred. Details from AWS are provided.
Validation(String),
/// An error occurred parsing the response payload.
ParseError(String),
/// An unknown error occurred. The raw HTTP response is provided.
Unknown(BufferedHttpResponse),
}
impl DescribeInputError {
// see boto RestJSONParser impl for parsing errors
// https://github.com/boto/botocore/blob/4dff78c840403d1d17db9b3f800b20d3bd9fbf9f/botocore/parsers.py#L838-L850
pub fn from_response(res: BufferedHttpResponse) -> DescribeInputError {
if let Ok(json) = from_slice::<SerdeJsonValue>(&res.body) {
let error_type = match res.headers.get("x-amzn-errortype") {
Some(raw_error_type) => raw_error_type
.split(':')
.next()
.unwrap_or_else(|| "Unknown"),
_ => json
.get("code")
.or_else(|| json.get("Code"))
.and_then(|c| c.as_str())
.unwrap_or_else(|| "Unknown"),
};
// message can come in either "message" or "Message"
// see boto BaseJSONParser impl for parsing message
// https://github.com/boto/botocore/blob/4dff78c840403d1d17db9b3f800b20d3bd9fbf9f/botocore/parsers.py#L595-L598
let error_message = json
.get("message")
.or_else(|| json.get("Message"))
.and_then(|m| m.as_str())
.unwrap_or("");
match error_type {
"BadGatewayException" => {
return DescribeInputError::BadGateway(String::from(error_message))
}
"BadRequestException" => {
return DescribeInputError::BadRequest(String::from(error_message))
}
"ForbiddenException" => {
return DescribeInputError::Forbidden(String::from(error_message))
}
"GatewayTimeoutException" => {
return DescribeInputError::GatewayTimeout(String::from(error_message))
}
"InternalServerErrorException" => {
return DescribeInputError::InternalServerError(String::from(error_message))
}
"NotFoundException" => {
return DescribeInputError::NotFound(String::from(error_message))
}
"TooManyRequestsException" => {
return DescribeInputError::TooManyRequests(String::from(error_message))
}
"ValidationException" => {
return DescribeInputError::Validation(error_message.to_string())
}
_ => {}
}
}
return DescribeInputError::Unknown(res);
}
}
impl From<serde_json::error::Error> for DescribeInputError {
fn from(err: serde_json::error::Error) -> DescribeInputError {
DescribeInputError::ParseError(err.description().to_string())
}
}
impl From<CredentialsError> for DescribeInputError {
fn from(err: CredentialsError) -> DescribeInputError {
DescribeInputError::Credentials(err)
}
}
impl From<HttpDispatchError> for DescribeInputError {
fn from(err: HttpDispatchError) -> DescribeInputError {
DescribeInputError::HttpDispatch(err)
}
}
impl From<io::Error> for DescribeInputError {
fn from(err: io::Error) -> DescribeInputError {
DescribeInputError::HttpDispatch(HttpDispatchError::from(err))
}
}
impl fmt::Display for DescribeInputError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for DescribeInputError {
fn description(&self) -> &str {
match *self {
DescribeInputError::BadGateway(ref cause) => cause,
DescribeInputError::BadRequest(ref cause) => cause,
DescribeInputError::Forbidden(ref cause) => cause,
DescribeInputError::GatewayTimeout(ref cause) => cause,
DescribeInputError::InternalServerError(ref cause) => cause,
DescribeInputError::NotFound(ref cause) => cause,
DescribeInputError::TooManyRequests(ref cause) => cause,
DescribeInputError::Validation(ref cause) => cause,
DescribeInputError::Credentials(ref err) => err.description(),
DescribeInputError::HttpDispatch(ref dispatch_error) => dispatch_error.description(),
DescribeInputError::ParseError(ref cause) => cause,
DescribeInputError::Unknown(_) => "unknown error",
}
}
}
/// Errors returned by DescribeInputSecurityGroup
#[derive(Debug, PartialEq)]
pub enum DescribeInputSecurityGroupError {
/// <p>Placeholder documentation for BadGatewayException</p>
BadGateway(String),
/// <p>Placeholder documentation for BadRequestException</p>
BadRequest(String),
/// <p>Placeholder documentation for ForbiddenException</p>
Forbidden(String),
/// <p>Placeholder documentation for GatewayTimeoutException</p>
GatewayTimeout(String),
/// <p>Placeholder documentation for InternalServerErrorException</p>
InternalServerError(String),
/// <p>Placeholder documentation for NotFoundException</p>
NotFound(String),
/// <p>Placeholder documentation for TooManyRequestsException</p>
TooManyRequests(String),
/// An error occurred dispatching the HTTP request
HttpDispatch(HttpDispatchError),
/// An error was encountered with AWS credentials.
Credentials(CredentialsError),
/// A validation error occurred. Details from AWS are provided.
Validation(String),
/// An error occurred parsing the response payload.
ParseError(String),
/// An unknown error occurred. The raw HTTP response is provided.
Unknown(BufferedHttpResponse),
}
impl DescribeInputSecurityGroupError {
// see boto RestJSONParser impl for parsing errors
// https://github.com/boto/botocore/blob/4dff78c840403d1d17db9b3f800b20d3bd9fbf9f/botocore/parsers.py#L838-L850
pub fn from_response(res: BufferedHttpResponse) -> DescribeInputSecurityGroupError {
if let Ok(json) = from_slice::<SerdeJsonValue>(&res.body) {
let error_type = match res.headers.get("x-amzn-errortype") {
Some(raw_error_type) => raw_error_type
.split(':')
.next()
.unwrap_or_else(|| "Unknown"),
_ => json
.get("code")
.or_else(|| json.get("Code"))
.and_then(|c| c.as_str())
.unwrap_or_else(|| "Unknown"),
};
// message can come in either "message" or "Message"
// see boto BaseJSONParser impl for parsing message
// https://github.com/boto/botocore/blob/4dff78c840403d1d17db9b3f800b20d3bd9fbf9f/botocore/parsers.py#L595-L598
let error_message = json
.get("message")
.or_else(|| json.get("Message"))
.and_then(|m| m.as_str())
.unwrap_or("");
match error_type {
"BadGatewayException" => {
return DescribeInputSecurityGroupError::BadGateway(String::from(error_message))
}
"BadRequestException" => {
return DescribeInputSecurityGroupError::BadRequest(String::from(error_message))
}
"ForbiddenException" => {
return DescribeInputSecurityGroupError::Forbidden(String::from(error_message))
}
"GatewayTimeoutException" => {
return DescribeInputSecurityGroupError::GatewayTimeout(String::from(
error_message,
))
}
"InternalServerErrorException" => {
return DescribeInputSecurityGroupError::InternalServerError(String::from(
error_message,
))
}
"NotFoundException" => {
return DescribeInputSecurityGroupError::NotFound(String::from(error_message))
}
"TooManyRequestsException" => {
return DescribeInputSecurityGroupError::TooManyRequests(String::from(
error_message,
))
}
"ValidationException" => {
return DescribeInputSecurityGroupError::Validation(error_message.to_string())
}
_ => {}
}
}
return DescribeInputSecurityGroupError::Unknown(res);
}
}
impl From<serde_json::error::Error> for DescribeInputSecurityGroupError {
fn from(err: serde_json::error::Error) -> DescribeInputSecurityGroupError {
DescribeInputSecurityGroupError::ParseError(err.description().to_string())
}
}
impl From<CredentialsError> for DescribeInputSecurityGroupError {
fn from(err: CredentialsError) -> DescribeInputSecurityGroupError {
DescribeInputSecurityGroupError::Credentials(err)
}
}
impl From<HttpDispatchError> for DescribeInputSecurityGroupError {
fn from(err: HttpDispatchError) -> DescribeInputSecurityGroupError {
DescribeInputSecurityGroupError::HttpDispatch(err)
}
}
impl From<io::Error> for DescribeInputSecurityGroupError {
fn from(err: io::Error) -> DescribeInputSecurityGroupError {
DescribeInputSecurityGroupError::HttpDispatch(HttpDispatchError::from(err))
}
}
impl fmt::Display for DescribeInputSecurityGroupError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for DescribeInputSecurityGroupError {
fn description(&self) -> &str {
match *self {
DescribeInputSecurityGroupError::BadGateway(ref cause) => cause,
DescribeInputSecurityGroupError::BadRequest(ref cause) => cause,
DescribeInputSecurityGroupError::Forbidden(ref cause) => cause,
DescribeInputSecurityGroupError::GatewayTimeout(ref cause) => cause,
DescribeInputSecurityGroupError::InternalServerError(ref cause) => cause,
DescribeInputSecurityGroupError::NotFound(ref cause) => cause,
DescribeInputSecurityGroupError::TooManyRequests(ref cause) => cause,
DescribeInputSecurityGroupError::Validation(ref cause) => cause,
DescribeInputSecurityGroupError::Credentials(ref err) => err.description(),
DescribeInputSecurityGroupError::HttpDispatch(ref dispatch_error) => {
dispatch_error.description()
}
DescribeInputSecurityGroupError::ParseError(ref cause) => cause,
DescribeInputSecurityGroupError::Unknown(_) => "unknown error",
}
}
}
/// Errors returned by DescribeOffering
#[derive(Debug, PartialEq)]
pub enum DescribeOfferingError {
/// <p>Placeholder documentation for BadGatewayException</p>
BadGateway(String),
/// <p>Placeholder documentation for BadRequestException</p>
BadRequest(String),
/// <p>Placeholder documentation for ForbiddenException</p>
Forbidden(String),
/// <p>Placeholder documentation for GatewayTimeoutException</p>
GatewayTimeout(String),
/// <p>Placeholder documentation for InternalServerErrorException</p>
InternalServerError(String),
/// <p>Placeholder documentation for NotFoundException</p>
NotFound(String),
/// <p>Placeholder documentation for TooManyRequestsException</p>
TooManyRequests(String),
/// An error occurred dispatching the HTTP request
HttpDispatch(HttpDispatchError),
/// An error was encountered with AWS credentials.
Credentials(CredentialsError),
/// A validation error occurred. Details from AWS are provided.
Validation(String),
/// An error occurred parsing the response payload.
ParseError(String),
/// An unknown error occurred. The raw HTTP response is provided.
Unknown(BufferedHttpResponse),
}
impl DescribeOfferingError {
// see boto RestJSONParser impl for parsing errors
// https://github.com/boto/botocore/blob/4dff78c840403d1d17db9b3f800b20d3bd9fbf9f/botocore/parsers.py#L838-L850
pub fn from_response(res: BufferedHttpResponse) -> DescribeOfferingError {
if let Ok(json) = from_slice::<SerdeJsonValue>(&res.body) {
let error_type = match res.headers.get("x-amzn-errortype") {
Some(raw_error_type) => raw_error_type
.split(':')
.next()
.unwrap_or_else(|| "Unknown"),
_ => json
.get("code")
.or_else(|| json.get("Code"))
.and_then(|c| c.as_str())
.unwrap_or_else(|| "Unknown"),
};
// message can come in either "message" or "Message"
// see boto BaseJSONParser impl for parsing message
// https://github.com/boto/botocore/blob/4dff78c840403d1d17db9b3f800b20d3bd9fbf9f/botocore/parsers.py#L595-L598
let error_message = json
.get("message")
.or_else(|| json.get("Message"))
.and_then(|m| m.as_str())
.unwrap_or("");
match error_type {
"BadGatewayException" => {
return DescribeOfferingError::BadGateway(String::from(error_message))
}
"BadRequestException" => {
return DescribeOfferingError::BadRequest(String::from(error_message))
}
"ForbiddenException" => {
return DescribeOfferingError::Forbidden(String::from(error_message))
}
"GatewayTimeoutException" => {
return DescribeOfferingError::GatewayTimeout(String::from(error_message))
}
"InternalServerErrorException" => {
return DescribeOfferingError::InternalServerError(String::from(error_message))
}
"NotFoundException" => {
return DescribeOfferingError::NotFound(String::from(error_message))
}
"TooManyRequestsException" => {
return DescribeOfferingError::TooManyRequests(String::from(error_message))
}
"ValidationException" => {
return DescribeOfferingError::Validation(error_message.to_string())
}
_ => {}
}
}
return DescribeOfferingError::Unknown(res);
}
}
impl From<serde_json::error::Error> for DescribeOfferingError {
fn from(err: serde_json::error::Error) -> DescribeOfferingError {
DescribeOfferingError::ParseError(err.description().to_string())
}
}
impl From<CredentialsError> for DescribeOfferingError {
fn from(err: CredentialsError) -> DescribeOfferingError {
DescribeOfferingError::Credentials(err)
}
}
impl From<HttpDispatchError> for DescribeOfferingError {
fn from(err: HttpDispatchError) -> DescribeOfferingError {
DescribeOfferingError::HttpDispatch(err)
}
}
impl From<io::Error> for DescribeOfferingError {
fn from(err: io::Error) -> DescribeOfferingError {
DescribeOfferingError::HttpDispatch(HttpDispatchError::from(err))
}
}
impl fmt::Display for DescribeOfferingError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for DescribeOfferingError {
fn description(&self) -> &str {
match *self {
DescribeOfferingError::BadGateway(ref cause) => cause,
DescribeOfferingError::BadRequest(ref cause) => cause,
DescribeOfferingError::Forbidden(ref cause) => cause,
DescribeOfferingError::GatewayTimeout(ref cause) => cause,
DescribeOfferingError::InternalServerError(ref cause) => cause,
DescribeOfferingError::NotFound(ref cause) => cause,
DescribeOfferingError::TooManyRequests(ref cause) => cause,
DescribeOfferingError::Validation(ref cause) => cause,
DescribeOfferingError::Credentials(ref err) => err.description(),
DescribeOfferingError::HttpDispatch(ref dispatch_error) => dispatch_error.description(),
DescribeOfferingError::ParseError(ref cause) => cause,
DescribeOfferingError::Unknown(_) => "unknown error",
}
}
}
/// Errors returned by DescribeReservation
#[derive(Debug, PartialEq)]
pub enum DescribeReservationError {
/// <p>Placeholder documentation for BadGatewayException</p>
BadGateway(String),
/// <p>Placeholder documentation for BadRequestException</p>
BadRequest(String),
/// <p>Placeholder documentation for ForbiddenException</p>
Forbidden(String),
/// <p>Placeholder documentation for GatewayTimeoutException</p>
GatewayTimeout(String),
/// <p>Placeholder documentation for InternalServerErrorException</p>
InternalServerError(String),
/// <p>Placeholder documentation for NotFoundException</p>
NotFound(String),
/// <p>Placeholder documentation for TooManyRequestsException</p>
TooManyRequests(String),
/// An error occurred dispatching the HTTP request
HttpDispatch(HttpDispatchError),
/// An error was encountered with AWS credentials.
Credentials(CredentialsError),
/// A validation error occurred. Details from AWS are provided.
Validation(String),
/// An error occurred parsing the response payload.
ParseError(String),
/// An unknown error occurred. The raw HTTP response is provided.
Unknown(BufferedHttpResponse),
}
impl DescribeReservationError {
// see boto RestJSONParser impl for parsing errors
// https://github.com/boto/botocore/blob/4dff78c840403d1d17db9b3f800b20d3bd9fbf9f/botocore/parsers.py#L838-L850
pub fn from_response(res: BufferedHttpResponse) -> DescribeReservationError {
if let Ok(json) = from_slice::<SerdeJsonValue>(&res.body) {
let error_type = match res.headers.get("x-amzn-errortype") {
Some(raw_error_type) => raw_error_type
.split(':')
.next()
.unwrap_or_else(|| "Unknown"),
_ => json
.get("code")
.or_else(|| json.get("Code"))
.and_then(|c| c.as_str())
.unwrap_or_else(|| "Unknown"),
};
// message can come in either "message" or "Message"
// see boto BaseJSONParser impl for parsing message
// https://github.com/boto/botocore/blob/4dff78c840403d1d17db9b3f800b20d3bd9fbf9f/botocore/parsers.py#L595-L598
let error_message = json
.get("message")
.or_else(|| json.get("Message"))
.and_then(|m| m.as_str())
.unwrap_or("");
match error_type {
"BadGatewayException" => {
return DescribeReservationError::BadGateway(String::from(error_message))
}
"BadRequestException" => {
return DescribeReservationError::BadRequest(String::from(error_message))
}
"ForbiddenException" => {
return DescribeReservationError::Forbidden(String::from(error_message))
}
"GatewayTimeoutException" => {
return DescribeReservationError::GatewayTimeout(String::from(error_message))
}
"InternalServerErrorException" => {
return DescribeReservationError::InternalServerError(String::from(
error_message,
))
}
"NotFoundException" => {
return DescribeReservationError::NotFound(String::from(error_message))
}
"TooManyRequestsException" => {
return DescribeReservationError::TooManyRequests(String::from(error_message))
}
"ValidationException" => {
return DescribeReservationError::Validation(error_message.to_string())
}
_ => {}
}
}
return DescribeReservationError::Unknown(res);
}
}
impl From<serde_json::error::Error> for DescribeReservationError {
fn from(err: serde_json::error::Error) -> DescribeReservationError {
DescribeReservationError::ParseError(err.description().to_string())
}
}
impl From<CredentialsError> for DescribeReservationError {
fn from(err: CredentialsError) -> DescribeReservationError {
DescribeReservationError::Credentials(err)
}
}
impl From<HttpDispatchError> for DescribeReservationError {
fn from(err: HttpDispatchError) -> DescribeReservationError {
DescribeReservationError::HttpDispatch(err)
}
}
impl From<io::Error> for DescribeReservationError {
fn from(err: io::Error) -> DescribeReservationError {
DescribeReservationError::HttpDispatch(HttpDispatchError::from(err))
}
}
impl fmt::Display for DescribeReservationError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for DescribeReservationError {
fn description(&self) -> &str {
match *self {
DescribeReservationError::BadGateway(ref cause) => cause,
DescribeReservationError::BadRequest(ref cause) => cause,
DescribeReservationError::Forbidden(ref cause) => cause,
DescribeReservationError::GatewayTimeout(ref cause) => cause,
DescribeReservationError::InternalServerError(ref cause) => cause,
DescribeReservationError::NotFound(ref cause) => cause,
DescribeReservationError::TooManyRequests(ref cause) => cause,
DescribeReservationError::Validation(ref cause) => cause,
DescribeReservationError::Credentials(ref err) => err.description(),
DescribeReservationError::HttpDispatch(ref dispatch_error) => {
dispatch_error.description()
}
DescribeReservationError::ParseError(ref cause) => cause,
DescribeReservationError::Unknown(_) => "unknown error",
}
}
}
/// Errors returned by ListChannels
#[derive(Debug, PartialEq)]
pub enum ListChannelsError {
/// <p>Placeholder documentation for BadGatewayException</p>
BadGateway(String),
/// <p>Placeholder documentation for BadRequestException</p>
BadRequest(String),
/// <p>Placeholder documentation for ForbiddenException</p>
Forbidden(String),
/// <p>Placeholder documentation for GatewayTimeoutException</p>
GatewayTimeout(String),
/// <p>Placeholder documentation for InternalServerErrorException</p>
InternalServerError(String),
/// <p>Placeholder documentation for TooManyRequestsException</p>
TooManyRequests(String),
/// An error occurred dispatching the HTTP request
HttpDispatch(HttpDispatchError),
/// An error was encountered with AWS credentials.
Credentials(CredentialsError),
/// A validation error occurred. Details from AWS are provided.
Validation(String),
/// An error occurred parsing the response payload.
ParseError(String),
/// An unknown error occurred. The raw HTTP response is provided.
Unknown(BufferedHttpResponse),
}
impl ListChannelsError {
// see boto RestJSONParser impl for parsing errors
// https://github.com/boto/botocore/blob/4dff78c840403d1d17db9b3f800b20d3bd9fbf9f/botocore/parsers.py#L838-L850
pub fn from_response(res: BufferedHttpResponse) -> ListChannelsError {
if let Ok(json) = from_slice::<SerdeJsonValue>(&res.body) {
let error_type = match res.headers.get("x-amzn-errortype") {
Some(raw_error_type) => raw_error_type
.split(':')
.next()
.unwrap_or_else(|| "Unknown"),
_ => json
.get("code")
.or_else(|| json.get("Code"))
.and_then(|c| c.as_str())
.unwrap_or_else(|| "Unknown"),
};
// message can come in either "message" or "Message"
// see boto BaseJSONParser impl for parsing message
// https://github.com/boto/botocore/blob/4dff78c840403d1d17db9b3f800b20d3bd9fbf9f/botocore/parsers.py#L595-L598
let error_message = json
.get("message")
.or_else(|| json.get("Message"))
.and_then(|m| m.as_str())
.unwrap_or("");
match error_type {
"BadGatewayException" => {
return ListChannelsError::BadGateway(String::from(error_message))
}
"BadRequestException" => {
return ListChannelsError::BadRequest(String::from(error_message))
}
"ForbiddenException" => {
return ListChannelsError::Forbidden(String::from(error_message))
}
"GatewayTimeoutException" => {
return ListChannelsError::GatewayTimeout(String::from(error_message))
}
"InternalServerErrorException" => {
return ListChannelsError::InternalServerError(String::from(error_message))
}
"TooManyRequestsException" => {
return ListChannelsError::TooManyRequests(String::from(error_message))
}
"ValidationException" => {
return ListChannelsError::Validation(error_message.to_string())
}
_ => {}
}
}
return ListChannelsError::Unknown(res);
}
}
impl From<serde_json::error::Error> for ListChannelsError {
fn from(err: serde_json::error::Error) -> ListChannelsError {
ListChannelsError::ParseError(err.description().to_string())
}
}
impl From<CredentialsError> for ListChannelsError {
fn from(err: CredentialsError) -> ListChannelsError {
ListChannelsError::Credentials(err)
}
}
impl From<HttpDispatchError> for ListChannelsError {
fn from(err: HttpDispatchError) -> ListChannelsError {
ListChannelsError::HttpDispatch(err)
}
}
impl From<io::Error> for ListChannelsError {
fn from(err: io::Error) -> ListChannelsError {
ListChannelsError::HttpDispatch(HttpDispatchError::from(err))
}
}
impl fmt::Display for ListChannelsError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for ListChannelsError {
fn description(&self) -> &str {
match *self {
ListChannelsError::BadGateway(ref cause) => cause,
ListChannelsError::BadRequest(ref cause) => cause,
ListChannelsError::Forbidden(ref cause) => cause,
ListChannelsError::GatewayTimeout(ref cause) => cause,
ListChannelsError::InternalServerError(ref cause) => cause,
ListChannelsError::TooManyRequests(ref cause) => cause,
ListChannelsError::Validation(ref cause) => cause,
ListChannelsError::Credentials(ref err) => err.description(),
ListChannelsError::HttpDispatch(ref dispatch_error) => dispatch_error.description(),
ListChannelsError::ParseError(ref cause) => cause,
ListChannelsError::Unknown(_) => "unknown error",
}
}
}
/// Errors returned by ListInputSecurityGroups
#[derive(Debug, PartialEq)]
pub enum ListInputSecurityGroupsError {
/// <p>Placeholder documentation for BadGatewayException</p>
BadGateway(String),
/// <p>Placeholder documentation for BadRequestException</p>
BadRequest(String),
/// <p>Placeholder documentation for ForbiddenException</p>
Forbidden(String),
/// <p>Placeholder documentation for GatewayTimeoutException</p>
GatewayTimeout(String),
/// <p>Placeholder documentation for InternalServerErrorException</p>
InternalServerError(String),
/// <p>Placeholder documentation for TooManyRequestsException</p>
TooManyRequests(String),
/// An error occurred dispatching the HTTP request
HttpDispatch(HttpDispatchError),
/// An error was encountered with AWS credentials.
Credentials(CredentialsError),
/// A validation error occurred. Details from AWS are provided.
Validation(String),
/// An error occurred parsing the response payload.
ParseError(String),
/// An unknown error occurred. The raw HTTP response is provided.
Unknown(BufferedHttpResponse),
}
impl ListInputSecurityGroupsError {
// see boto RestJSONParser impl for parsing errors
// https://github.com/boto/botocore/blob/4dff78c840403d1d17db9b3f800b20d3bd9fbf9f/botocore/parsers.py#L838-L850
pub fn from_response(res: BufferedHttpResponse) -> ListInputSecurityGroupsError {
if let Ok(json) = from_slice::<SerdeJsonValue>(&res.body) {
let error_type = match res.headers.get("x-amzn-errortype") {
Some(raw_error_type) => raw_error_type
.split(':')
.next()
.unwrap_or_else(|| "Unknown"),
_ => json
.get("code")
.or_else(|| json.get("Code"))
.and_then(|c| c.as_str())
.unwrap_or_else(|| "Unknown"),
};
// message can come in either "message" or "Message"
// see boto BaseJSONParser impl for parsing message
// https://github.com/boto/botocore/blob/4dff78c840403d1d17db9b3f800b20d3bd9fbf9f/botocore/parsers.py#L595-L598
let error_message = json
.get("message")
.or_else(|| json.get("Message"))
.and_then(|m| m.as_str())
.unwrap_or("");
match error_type {
"BadGatewayException" => {
return ListInputSecurityGroupsError::BadGateway(String::from(error_message))
}
"BadRequestException" => {
return ListInputSecurityGroupsError::BadRequest(String::from(error_message))
}
"ForbiddenException" => {
return ListInputSecurityGroupsError::Forbidden(String::from(error_message))
}
"GatewayTimeoutException" => {
return ListInputSecurityGroupsError::GatewayTimeout(String::from(error_message))
}
"InternalServerErrorException" => {
return ListInputSecurityGroupsError::InternalServerError(String::from(
error_message,
))
}
"TooManyRequestsException" => {
return ListInputSecurityGroupsError::TooManyRequests(String::from(
error_message,
))
}
"ValidationException" => {
return ListInputSecurityGroupsError::Validation(error_message.to_string())
}
_ => {}
}
}
return ListInputSecurityGroupsError::Unknown(res);
}
}
impl From<serde_json::error::Error> for ListInputSecurityGroupsError {
fn from(err: serde_json::error::Error) -> ListInputSecurityGroupsError {
ListInputSecurityGroupsError::ParseError(err.description().to_string())
}
}
impl From<CredentialsError> for ListInputSecurityGroupsError {
fn from(err: CredentialsError) -> ListInputSecurityGroupsError {
ListInputSecurityGroupsError::Credentials(err)
}
}
impl From<HttpDispatchError> for ListInputSecurityGroupsError {
fn from(err: HttpDispatchError) -> ListInputSecurityGroupsError {
ListInputSecurityGroupsError::HttpDispatch(err)
}
}
impl From<io::Error> for ListInputSecurityGroupsError {
fn from(err: io::Error) -> ListInputSecurityGroupsError {
ListInputSecurityGroupsError::HttpDispatch(HttpDispatchError::from(err))
}
}
impl fmt::Display for ListInputSecurityGroupsError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for ListInputSecurityGroupsError {
fn description(&self) -> &str {
match *self {
ListInputSecurityGroupsError::BadGateway(ref cause) => cause,
ListInputSecurityGroupsError::BadRequest(ref cause) => cause,
ListInputSecurityGroupsError::Forbidden(ref cause) => cause,
ListInputSecurityGroupsError::GatewayTimeout(ref cause) => cause,
ListInputSecurityGroupsError::InternalServerError(ref cause) => cause,
ListInputSecurityGroupsError::TooManyRequests(ref cause) => cause,
ListInputSecurityGroupsError::Validation(ref cause) => cause,
ListInputSecurityGroupsError::Credentials(ref err) => err.description(),
ListInputSecurityGroupsError::HttpDispatch(ref dispatch_error) => {
dispatch_error.description()
}
ListInputSecurityGroupsError::ParseError(ref cause) => cause,
ListInputSecurityGroupsError::Unknown(_) => "unknown error",
}
}
}
/// Errors returned by ListInputs
#[derive(Debug, PartialEq)]
pub enum ListInputsError {
/// <p>Placeholder documentation for BadGatewayException</p>
BadGateway(String),
/// <p>Placeholder documentation for BadRequestException</p>
BadRequest(String),
/// <p>Placeholder documentation for ForbiddenException</p>
Forbidden(String),
/// <p>Placeholder documentation for GatewayTimeoutException</p>
GatewayTimeout(String),
/// <p>Placeholder documentation for InternalServerErrorException</p>
InternalServerError(String),
/// <p>Placeholder documentation for TooManyRequestsException</p>
TooManyRequests(String),
/// An error occurred dispatching the HTTP request
HttpDispatch(HttpDispatchError),
/// An error was encountered with AWS credentials.
Credentials(CredentialsError),
/// A validation error occurred. Details from AWS are provided.
Validation(String),
/// An error occurred parsing the response payload.
ParseError(String),
/// An unknown error occurred. The raw HTTP response is provided.
Unknown(BufferedHttpResponse),
}
impl ListInputsError {
// see boto RestJSONParser impl for parsing errors
// https://github.com/boto/botocore/blob/4dff78c840403d1d17db9b3f800b20d3bd9fbf9f/botocore/parsers.py#L838-L850
pub fn from_response(res: BufferedHttpResponse) -> ListInputsError {
if let Ok(json) = from_slice::<SerdeJsonValue>(&res.body) {
let error_type = match res.headers.get("x-amzn-errortype") {
Some(raw_error_type) => raw_error_type
.split(':')
.next()
.unwrap_or_else(|| "Unknown"),
_ => json
.get("code")
.or_else(|| json.get("Code"))
.and_then(|c| c.as_str())
.unwrap_or_else(|| "Unknown"),
};
// message can come in either "message" or "Message"
// see boto BaseJSONParser impl for parsing message
// https://github.com/boto/botocore/blob/4dff78c840403d1d17db9b3f800b20d3bd9fbf9f/botocore/parsers.py#L595-L598
let error_message = json
.get("message")
.or_else(|| json.get("Message"))
.and_then(|m| m.as_str())
.unwrap_or("");
match error_type {
"BadGatewayException" => {
return ListInputsError::BadGateway(String::from(error_message))
}
"BadRequestException" => {
return ListInputsError::BadRequest(String::from(error_message))
}
"ForbiddenException" => {
return ListInputsError::Forbidden(String::from(error_message))
}
"GatewayTimeoutException" => {
return ListInputsError::GatewayTimeout(String::from(error_message))
}
"InternalServerErrorException" => {
return ListInputsError::InternalServerError(String::from(error_message))
}
"TooManyRequestsException" => {
return ListInputsError::TooManyRequests(String::from(error_message))
}
"ValidationException" => {
return ListInputsError::Validation(error_message.to_string())
}
_ => {}
}
}
return ListInputsError::Unknown(res);
}
}
impl From<serde_json::error::Error> for ListInputsError {
fn from(err: serde_json::error::Error) -> ListInputsError {
ListInputsError::ParseError(err.description().to_string())
}
}
impl From<CredentialsError> for ListInputsError {
fn from(err: CredentialsError) -> ListInputsError {
ListInputsError::Credentials(err)
}
}
impl From<HttpDispatchError> for ListInputsError {
fn from(err: HttpDispatchError) -> ListInputsError {
ListInputsError::HttpDispatch(err)
}
}
impl From<io::Error> for ListInputsError {
fn from(err: io::Error) -> ListInputsError {
ListInputsError::HttpDispatch(HttpDispatchError::from(err))
}<|fim▁hole|> write!(f, "{}", self.description())
}
}
impl Error for ListInputsError {
fn description(&self) -> &str {
match *self {
ListInputsError::BadGateway(ref cause) => cause,
ListInputsError::BadRequest(ref cause) => cause,
ListInputsError::Forbidden(ref cause) => cause,
ListInputsError::GatewayTimeout(ref cause) => cause,
ListInputsError::InternalServerError(ref cause) => cause,
ListInputsError::TooManyRequests(ref cause) => cause,
ListInputsError::Validation(ref cause) => cause,
ListInputsError::Credentials(ref err) => err.description(),
ListInputsError::HttpDispatch(ref dispatch_error) => dispatch_error.description(),
ListInputsError::ParseError(ref cause) => cause,
ListInputsError::Unknown(_) => "unknown error",
}
}
}
/// Errors returned by ListOfferings
#[derive(Debug, PartialEq)]
pub enum ListOfferingsError {
/// <p>Placeholder documentation for BadGatewayException</p>
BadGateway(String),
/// <p>Placeholder documentation for BadRequestException</p>
BadRequest(String),
/// <p>Placeholder documentation for ForbiddenException</p>
Forbidden(String),
/// <p>Placeholder documentation for GatewayTimeoutException</p>
GatewayTimeout(String),
/// <p>Placeholder documentation for InternalServerErrorException</p>
InternalServerError(String),
/// <p>Placeholder documentation for TooManyRequestsException</p>
TooManyRequests(String),
/// An error occurred dispatching the HTTP request
HttpDispatch(HttpDispatchError),
/// An error was encountered with AWS credentials.
Credentials(CredentialsError),
/// A validation error occurred. Details from AWS are provided.
Validation(String),
/// An error occurred parsing the response payload.
ParseError(String),
/// An unknown error occurred. The raw HTTP response is provided.
Unknown(BufferedHttpResponse),
}
impl ListOfferingsError {
// see boto RestJSONParser impl for parsing errors
// https://github.com/boto/botocore/blob/4dff78c840403d1d17db9b3f800b20d3bd9fbf9f/botocore/parsers.py#L838-L850
pub fn from_response(res: BufferedHttpResponse) -> ListOfferingsError {
if let Ok(json) = from_slice::<SerdeJsonValue>(&res.body) {
let error_type = match res.headers.get("x-amzn-errortype") {
Some(raw_error_type) => raw_error_type
.split(':')
.next()
.unwrap_or_else(|| "Unknown"),
_ => json
.get("code")
.or_else(|| json.get("Code"))
.and_then(|c| c.as_str())
.unwrap_or_else(|| "Unknown"),
};
// message can come in either "message" or "Message"
// see boto BaseJSONParser impl for parsing message
// https://github.com/boto/botocore/blob/4dff78c840403d1d17db9b3f800b20d3bd9fbf9f/botocore/parsers.py#L595-L598
let error_message = json
.get("message")
.or_else(|| json.get("Message"))
.and_then(|m| m.as_str())
.unwrap_or("");
match error_type {
"BadGatewayException" => {
return ListOfferingsError::BadGateway(String::from(error_message))
}
"BadRequestException" => {
return ListOfferingsError::BadRequest(String::from(error_message))
}
"ForbiddenException" => {
return ListOfferingsError::Forbidden(String::from(error_message))
}
"GatewayTimeoutException" => {
return ListOfferingsError::GatewayTimeout(String::from(error_message))
}
"InternalServerErrorException" => {
return ListOfferingsError::InternalServerError(String::from(error_message))
}
"TooManyRequestsException" => {
return ListOfferingsError::TooManyRequests(String::from(error_message))
}
"ValidationException" => {
return ListOfferingsError::Validation(error_message.to_string())
}
_ => {}
}
}
return ListOfferingsError::Unknown(res);
}
}
impl From<serde_json::error::Error> for ListOfferingsError {
fn from(err: serde_json::error::Error) -> ListOfferingsError {
ListOfferingsError::ParseError(err.description().to_string())
}
}
impl From<CredentialsError> for ListOfferingsError {
fn from(err: CredentialsError) -> ListOfferingsError {
ListOfferingsError::Credentials(err)
}
}
impl From<HttpDispatchError> for ListOfferingsError {
fn from(err: HttpDispatchError) -> ListOfferingsError {
ListOfferingsError::HttpDispatch(err)
}
}
impl From<io::Error> for ListOfferingsError {
fn from(err: io::Error) -> ListOfferingsError {
ListOfferingsError::HttpDispatch(HttpDispatchError::from(err))
}
}
impl fmt::Display for ListOfferingsError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for ListOfferingsError {
fn description(&self) -> &str {
match *self {
ListOfferingsError::BadGateway(ref cause) => cause,
ListOfferingsError::BadRequest(ref cause) => cause,
ListOfferingsError::Forbidden(ref cause) => cause,
ListOfferingsError::GatewayTimeout(ref cause) => cause,
ListOfferingsError::InternalServerError(ref cause) => cause,
ListOfferingsError::TooManyRequests(ref cause) => cause,
ListOfferingsError::Validation(ref cause) => cause,
ListOfferingsError::Credentials(ref err) => err.description(),
ListOfferingsError::HttpDispatch(ref dispatch_error) => dispatch_error.description(),
ListOfferingsError::ParseError(ref cause) => cause,
ListOfferingsError::Unknown(_) => "unknown error",
}
}
}
/// Errors returned by ListReservations
#[derive(Debug, PartialEq)]
pub enum ListReservationsError {
/// <p>Placeholder documentation for BadGatewayException</p>
BadGateway(String),
/// <p>Placeholder documentation for BadRequestException</p>
BadRequest(String),
/// <p>Placeholder documentation for ForbiddenException</p>
Forbidden(String),
/// <p>Placeholder documentation for GatewayTimeoutException</p>
GatewayTimeout(String),
/// <p>Placeholder documentation for InternalServerErrorException</p>
InternalServerError(String),
/// <p>Placeholder documentation for TooManyRequestsException</p>
TooManyRequests(String),
/// An error occurred dispatching the HTTP request
HttpDispatch(HttpDispatchError),
/// An error was encountered with AWS credentials.
Credentials(CredentialsError),
/// A validation error occurred. Details from AWS are provided.
Validation(String),
/// An error occurred parsing the response payload.
ParseError(String),
/// An unknown error occurred. The raw HTTP response is provided.
Unknown(BufferedHttpResponse),
}
impl ListReservationsError {
// see boto RestJSONParser impl for parsing errors
// https://github.com/boto/botocore/blob/4dff78c840403d1d17db9b3f800b20d3bd9fbf9f/botocore/parsers.py#L838-L850
pub fn from_response(res: BufferedHttpResponse) -> ListReservationsError {
if let Ok(json) = from_slice::<SerdeJsonValue>(&res.body) {
let error_type = match res.headers.get("x-amzn-errortype") {
Some(raw_error_type) => raw_error_type
.split(':')
.next()
.unwrap_or_else(|| "Unknown"),
_ => json
.get("code")
.or_else(|| json.get("Code"))
.and_then(|c| c.as_str())
.unwrap_or_else(|| "Unknown"),
};
// message can come in either "message" or "Message"
// see boto BaseJSONParser impl for parsing message
// https://github.com/boto/botocore/blob/4dff78c840403d1d17db9b3f800b20d3bd9fbf9f/botocore/parsers.py#L595-L598
let error_message = json
.get("message")
.or_else(|| json.get("Message"))
.and_then(|m| m.as_str())
.unwrap_or("");
match error_type {
"BadGatewayException" => {
return ListReservationsError::BadGateway(String::from(error_message))
}
"BadRequestException" => {
return ListReservationsError::BadRequest(String::from(error_message))
}
"ForbiddenException" => {
return ListReservationsError::Forbidden(String::from(error_message))
}
"GatewayTimeoutException" => {
return ListReservationsError::GatewayTimeout(String::from(error_message))
}
"InternalServerErrorException" => {
return ListReservationsError::InternalServerError(String::from(error_message))
}
"TooManyRequestsException" => {
return ListReservationsError::TooManyRequests(String::from(error_message))
}
"ValidationException" => {
return ListReservationsError::Validation(error_message.to_string())
}
_ => {}
}
}
return ListReservationsError::Unknown(res);
}
}
impl From<serde_json::error::Error> for ListReservationsError {
fn from(err: serde_json::error::Error) -> ListReservationsError {
ListReservationsError::ParseError(err.description().to_string())
}
}
impl From<CredentialsError> for ListReservationsError {
fn from(err: CredentialsError) -> ListReservationsError {
ListReservationsError::Credentials(err)
}
}
impl From<HttpDispatchError> for ListReservationsError {
fn from(err: HttpDispatchError) -> ListReservationsError {
ListReservationsError::HttpDispatch(err)
}
}
impl From<io::Error> for ListReservationsError {
fn from(err: io::Error) -> ListReservationsError {
ListReservationsError::HttpDispatch(HttpDispatchError::from(err))
}
}
impl fmt::Display for ListReservationsError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for ListReservationsError {
fn description(&self) -> &str {
match *self {
ListReservationsError::BadGateway(ref cause) => cause,
ListReservationsError::BadRequest(ref cause) => cause,
ListReservationsError::Forbidden(ref cause) => cause,
ListReservationsError::GatewayTimeout(ref cause) => cause,
ListReservationsError::InternalServerError(ref cause) => cause,
ListReservationsError::TooManyRequests(ref cause) => cause,
ListReservationsError::Validation(ref cause) => cause,
ListReservationsError::Credentials(ref err) => err.description(),
ListReservationsError::HttpDispatch(ref dispatch_error) => dispatch_error.description(),
ListReservationsError::ParseError(ref cause) => cause,
ListReservationsError::Unknown(_) => "unknown error",
}
}
}
/// Errors returned by PurchaseOffering
#[derive(Debug, PartialEq)]
pub enum PurchaseOfferingError {
/// <p>Placeholder documentation for BadGatewayException</p>
BadGateway(String),
/// <p>Placeholder documentation for BadRequestException</p>
BadRequest(String),
/// <p>Placeholder documentation for ConflictException</p>
Conflict(String),
/// <p>Placeholder documentation for ForbiddenException</p>
Forbidden(String),
/// <p>Placeholder documentation for GatewayTimeoutException</p>
GatewayTimeout(String),
/// <p>Placeholder documentation for InternalServerErrorException</p>
InternalServerError(String),
/// <p>Placeholder documentation for NotFoundException</p>
NotFound(String),
/// <p>Placeholder documentation for TooManyRequestsException</p>
TooManyRequests(String),
/// An error occurred dispatching the HTTP request
HttpDispatch(HttpDispatchError),
/// An error was encountered with AWS credentials.
Credentials(CredentialsError),
/// A validation error occurred. Details from AWS are provided.
Validation(String),
/// An error occurred parsing the response payload.
ParseError(String),
/// An unknown error occurred. The raw HTTP response is provided.
Unknown(BufferedHttpResponse),
}
impl PurchaseOfferingError {
// see boto RestJSONParser impl for parsing errors
// https://github.com/boto/botocore/blob/4dff78c840403d1d17db9b3f800b20d3bd9fbf9f/botocore/parsers.py#L838-L850
pub fn from_response(res: BufferedHttpResponse) -> PurchaseOfferingError {
if let Ok(json) = from_slice::<SerdeJsonValue>(&res.body) {
let error_type = match res.headers.get("x-amzn-errortype") {
Some(raw_error_type) => raw_error_type
.split(':')
.next()
.unwrap_or_else(|| "Unknown"),
_ => json
.get("code")
.or_else(|| json.get("Code"))
.and_then(|c| c.as_str())
.unwrap_or_else(|| "Unknown"),
};
// message can come in either "message" or "Message"
// see boto BaseJSONParser impl for parsing message
// https://github.com/boto/botocore/blob/4dff78c840403d1d17db9b3f800b20d3bd9fbf9f/botocore/parsers.py#L595-L598
let error_message = json
.get("message")
.or_else(|| json.get("Message"))
.and_then(|m| m.as_str())
.unwrap_or("");
match error_type {
"BadGatewayException" => {
return PurchaseOfferingError::BadGateway(String::from(error_message))
}
"BadRequestException" => {
return PurchaseOfferingError::BadRequest(String::from(error_message))
}
"ConflictException" => {
return PurchaseOfferingError::Conflict(String::from(error_message))
}
"ForbiddenException" => {
return PurchaseOfferingError::Forbidden(String::from(error_message))
}
"GatewayTimeoutException" => {
return PurchaseOfferingError::GatewayTimeout(String::from(error_message))
}
"InternalServerErrorException" => {
return PurchaseOfferingError::InternalServerError(String::from(error_message))
}
"NotFoundException" => {
return PurchaseOfferingError::NotFound(String::from(error_message))
}
"TooManyRequestsException" => {
return PurchaseOfferingError::TooManyRequests(String::from(error_message))
}
"ValidationException" => {
return PurchaseOfferingError::Validation(error_message.to_string())
}
_ => {}
}
}
return PurchaseOfferingError::Unknown(res);
}
}
impl From<serde_json::error::Error> for PurchaseOfferingError {
fn from(err: serde_json::error::Error) -> PurchaseOfferingError {
PurchaseOfferingError::ParseError(err.description().to_string())
}
}
impl From<CredentialsError> for PurchaseOfferingError {
fn from(err: CredentialsError) -> PurchaseOfferingError {
PurchaseOfferingError::Credentials(err)
}
}
impl From<HttpDispatchError> for PurchaseOfferingError {
fn from(err: HttpDispatchError) -> PurchaseOfferingError {
PurchaseOfferingError::HttpDispatch(err)
}
}
impl From<io::Error> for PurchaseOfferingError {
fn from(err: io::Error) -> PurchaseOfferingError {
PurchaseOfferingError::HttpDispatch(HttpDispatchError::from(err))
}
}
impl fmt::Display for PurchaseOfferingError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for PurchaseOfferingError {
fn description(&self) -> &str {
match *self {
PurchaseOfferingError::BadGateway(ref cause) => cause,
PurchaseOfferingError::BadRequest(ref cause) => cause,
PurchaseOfferingError::Conflict(ref cause) => cause,
PurchaseOfferingError::Forbidden(ref cause) => cause,
PurchaseOfferingError::GatewayTimeout(ref cause) => cause,
PurchaseOfferingError::InternalServerError(ref cause) => cause,
PurchaseOfferingError::NotFound(ref cause) => cause,
PurchaseOfferingError::TooManyRequests(ref cause) => cause,
PurchaseOfferingError::Validation(ref cause) => cause,
PurchaseOfferingError::Credentials(ref err) => err.description(),
PurchaseOfferingError::HttpDispatch(ref dispatch_error) => dispatch_error.description(),
PurchaseOfferingError::ParseError(ref cause) => cause,
PurchaseOfferingError::Unknown(_) => "unknown error",
}
}
}
/// Errors returned by StartChannel
#[derive(Debug, PartialEq)]
pub enum StartChannelError {
/// <p>Placeholder documentation for BadGatewayException</p>
BadGateway(String),
/// <p>Placeholder documentation for BadRequestException</p>
BadRequest(String),
/// <p>Placeholder documentation for ConflictException</p>
Conflict(String),
/// <p>Placeholder documentation for ForbiddenException</p>
Forbidden(String),
/// <p>Placeholder documentation for GatewayTimeoutException</p>
GatewayTimeout(String),
/// <p>Placeholder documentation for InternalServerErrorException</p>
InternalServerError(String),
/// <p>Placeholder documentation for NotFoundException</p>
NotFound(String),
/// <p>Placeholder documentation for TooManyRequestsException</p>
TooManyRequests(String),
/// An error occurred dispatching the HTTP request
HttpDispatch(HttpDispatchError),
/// An error was encountered with AWS credentials.
Credentials(CredentialsError),
/// A validation error occurred. Details from AWS are provided.
Validation(String),
/// An error occurred parsing the response payload.
ParseError(String),
/// An unknown error occurred. The raw HTTP response is provided.
Unknown(BufferedHttpResponse),
}
impl StartChannelError {
// see boto RestJSONParser impl for parsing errors
// https://github.com/boto/botocore/blob/4dff78c840403d1d17db9b3f800b20d3bd9fbf9f/botocore/parsers.py#L838-L850
pub fn from_response(res: BufferedHttpResponse) -> StartChannelError {
if let Ok(json) = from_slice::<SerdeJsonValue>(&res.body) {
let error_type = match res.headers.get("x-amzn-errortype") {
Some(raw_error_type) => raw_error_type
.split(':')
.next()
.unwrap_or_else(|| "Unknown"),
_ => json
.get("code")
.or_else(|| json.get("Code"))
.and_then(|c| c.as_str())
.unwrap_or_else(|| "Unknown"),
};
// message can come in either "message" or "Message"
// see boto BaseJSONParser impl for parsing message
// https://github.com/boto/botocore/blob/4dff78c840403d1d17db9b3f800b20d3bd9fbf9f/botocore/parsers.py#L595-L598
let error_message = json
.get("message")
.or_else(|| json.get("Message"))
.and_then(|m| m.as_str())
.unwrap_or("");
match error_type {
"BadGatewayException" => {
return StartChannelError::BadGateway(String::from(error_message))
}
"BadRequestException" => {
return StartChannelError::BadRequest(String::from(error_message))
}
"ConflictException" => {
return StartChannelError::Conflict(String::from(error_message))
}
"ForbiddenException" => {
return StartChannelError::Forbidden(String::from(error_message))
}
"GatewayTimeoutException" => {
return StartChannelError::GatewayTimeout(String::from(error_message))
}
"InternalServerErrorException" => {
return StartChannelError::InternalServerError(String::from(error_message))
}
"NotFoundException" => {
return StartChannelError::NotFound(String::from(error_message))
}
"TooManyRequestsException" => {
return StartChannelError::TooManyRequests(String::from(error_message))
}
"ValidationException" => {
return StartChannelError::Validation(error_message.to_string())
}
_ => {}
}
}
return StartChannelError::Unknown(res);
}
}
impl From<serde_json::error::Error> for StartChannelError {
fn from(err: serde_json::error::Error) -> StartChannelError {
StartChannelError::ParseError(err.description().to_string())
}
}
impl From<CredentialsError> for StartChannelError {
fn from(err: CredentialsError) -> StartChannelError {
StartChannelError::Credentials(err)
}
}
impl From<HttpDispatchError> for StartChannelError {
fn from(err: HttpDispatchError) -> StartChannelError {
StartChannelError::HttpDispatch(err)
}
}
impl From<io::Error> for StartChannelError {
fn from(err: io::Error) -> StartChannelError {
StartChannelError::HttpDispatch(HttpDispatchError::from(err))
}
}
impl fmt::Display for StartChannelError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for StartChannelError {
fn description(&self) -> &str {
match *self {
StartChannelError::BadGateway(ref cause) => cause,
StartChannelError::BadRequest(ref cause) => cause,
StartChannelError::Conflict(ref cause) => cause,
StartChannelError::Forbidden(ref cause) => cause,
StartChannelError::GatewayTimeout(ref cause) => cause,
StartChannelError::InternalServerError(ref cause) => cause,
StartChannelError::NotFound(ref cause) => cause,
StartChannelError::TooManyRequests(ref cause) => cause,
StartChannelError::Validation(ref cause) => cause,
StartChannelError::Credentials(ref err) => err.description(),
StartChannelError::HttpDispatch(ref dispatch_error) => dispatch_error.description(),
StartChannelError::ParseError(ref cause) => cause,
StartChannelError::Unknown(_) => "unknown error",
}
}
}
/// Errors returned by StopChannel
#[derive(Debug, PartialEq)]
pub enum StopChannelError {
/// <p>Placeholder documentation for BadGatewayException</p>
BadGateway(String),
/// <p>Placeholder documentation for BadRequestException</p>
BadRequest(String),
/// <p>Placeholder documentation for ConflictException</p>
Conflict(String),
/// <p>Placeholder documentation for ForbiddenException</p>
Forbidden(String),
/// <p>Placeholder documentation for GatewayTimeoutException</p>
GatewayTimeout(String),
/// <p>Placeholder documentation for InternalServerErrorException</p>
InternalServerError(String),
/// <p>Placeholder documentation for NotFoundException</p>
NotFound(String),
/// <p>Placeholder documentation for TooManyRequestsException</p>
TooManyRequests(String),
/// An error occurred dispatching the HTTP request
HttpDispatch(HttpDispatchError),
/// An error was encountered with AWS credentials.
Credentials(CredentialsError),
/// A validation error occurred. Details from AWS are provided.
Validation(String),
/// An error occurred parsing the response payload.
ParseError(String),
/// An unknown error occurred. The raw HTTP response is provided.
Unknown(BufferedHttpResponse),
}
impl StopChannelError {
// see boto RestJSONParser impl for parsing errors
// https://github.com/boto/botocore/blob/4dff78c840403d1d17db9b3f800b20d3bd9fbf9f/botocore/parsers.py#L838-L850
pub fn from_response(res: BufferedHttpResponse) -> StopChannelError {
if let Ok(json) = from_slice::<SerdeJsonValue>(&res.body) {
let error_type = match res.headers.get("x-amzn-errortype") {
Some(raw_error_type) => raw_error_type
.split(':')
.next()
.unwrap_or_else(|| "Unknown"),
_ => json
.get("code")
.or_else(|| json.get("Code"))
.and_then(|c| c.as_str())
.unwrap_or_else(|| "Unknown"),
};
// message can come in either "message" or "Message"
// see boto BaseJSONParser impl for parsing message
// https://github.com/boto/botocore/blob/4dff78c840403d1d17db9b3f800b20d3bd9fbf9f/botocore/parsers.py#L595-L598
let error_message = json
.get("message")
.or_else(|| json.get("Message"))
.and_then(|m| m.as_str())
.unwrap_or("");
match error_type {
"BadGatewayException" => {
return StopChannelError::BadGateway(String::from(error_message))
}
"BadRequestException" => {
return StopChannelError::BadRequest(String::from(error_message))
}
"ConflictException" => {
return StopChannelError::Conflict(String::from(error_message))
}
"ForbiddenException" => {
return StopChannelError::Forbidden(String::from(error_message))
}
"GatewayTimeoutException" => {
return StopChannelError::GatewayTimeout(String::from(error_message))
}
"InternalServerErrorException" => {
return StopChannelError::InternalServerError(String::from(error_message))
}
"NotFoundException" => {
return StopChannelError::NotFound(String::from(error_message))
}
"TooManyRequestsException" => {
return StopChannelError::TooManyRequests(String::from(error_message))
}
"ValidationException" => {
return StopChannelError::Validation(error_message.to_string())
}
_ => {}
}
}
return StopChannelError::Unknown(res);
}
}
impl From<serde_json::error::Error> for StopChannelError {
fn from(err: serde_json::error::Error) -> StopChannelError {
StopChannelError::ParseError(err.description().to_string())
}
}
impl From<CredentialsError> for StopChannelError {
fn from(err: CredentialsError) -> StopChannelError {
StopChannelError::Credentials(err)
}
}
impl From<HttpDispatchError> for StopChannelError {
fn from(err: HttpDispatchError) -> StopChannelError {
StopChannelError::HttpDispatch(err)
}
}
impl From<io::Error> for StopChannelError {
fn from(err: io::Error) -> StopChannelError {
StopChannelError::HttpDispatch(HttpDispatchError::from(err))
}
}
impl fmt::Display for StopChannelError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for StopChannelError {
fn description(&self) -> &str {
match *self {
StopChannelError::BadGateway(ref cause) => cause,
StopChannelError::BadRequest(ref cause) => cause,
StopChannelError::Conflict(ref cause) => cause,
StopChannelError::Forbidden(ref cause) => cause,
StopChannelError::GatewayTimeout(ref cause) => cause,
StopChannelError::InternalServerError(ref cause) => cause,
StopChannelError::NotFound(ref cause) => cause,
StopChannelError::TooManyRequests(ref cause) => cause,
StopChannelError::Validation(ref cause) => cause,
StopChannelError::Credentials(ref err) => err.description(),
StopChannelError::HttpDispatch(ref dispatch_error) => dispatch_error.description(),
StopChannelError::ParseError(ref cause) => cause,
StopChannelError::Unknown(_) => "unknown error",
}
}
}
/// Errors returned by UpdateChannel
#[derive(Debug, PartialEq)]
pub enum UpdateChannelError {
/// <p>Placeholder documentation for BadGatewayException</p>
BadGateway(String),
/// <p>Placeholder documentation for BadRequestException</p>
BadRequest(String),
/// <p>Placeholder documentation for ConflictException</p>
Conflict(String),
/// <p>Placeholder documentation for ForbiddenException</p>
Forbidden(String),
/// <p>Placeholder documentation for GatewayTimeoutException</p>
GatewayTimeout(String),
/// <p>Placeholder documentation for InternalServerErrorException</p>
InternalServerError(String),
/// <p>Placeholder documentation for UnprocessableEntityException</p>
UnprocessableEntity(String),
/// An error occurred dispatching the HTTP request
HttpDispatch(HttpDispatchError),
/// An error was encountered with AWS credentials.
Credentials(CredentialsError),
/// A validation error occurred. Details from AWS are provided.
Validation(String),
/// An error occurred parsing the response payload.
ParseError(String),
/// An unknown error occurred. The raw HTTP response is provided.
Unknown(BufferedHttpResponse),
}
impl UpdateChannelError {
// see boto RestJSONParser impl for parsing errors
// https://github.com/boto/botocore/blob/4dff78c840403d1d17db9b3f800b20d3bd9fbf9f/botocore/parsers.py#L838-L850
pub fn from_response(res: BufferedHttpResponse) -> UpdateChannelError {
if let Ok(json) = from_slice::<SerdeJsonValue>(&res.body) {
let error_type = match res.headers.get("x-amzn-errortype") {
Some(raw_error_type) => raw_error_type
.split(':')
.next()
.unwrap_or_else(|| "Unknown"),
_ => json
.get("code")
.or_else(|| json.get("Code"))
.and_then(|c| c.as_str())
.unwrap_or_else(|| "Unknown"),
};
// message can come in either "message" or "Message"
// see boto BaseJSONParser impl for parsing message
// https://github.com/boto/botocore/blob/4dff78c840403d1d17db9b3f800b20d3bd9fbf9f/botocore/parsers.py#L595-L598
let error_message = json
.get("message")
.or_else(|| json.get("Message"))
.and_then(|m| m.as_str())
.unwrap_or("");
match error_type {
"BadGatewayException" => {
return UpdateChannelError::BadGateway(String::from(error_message))
}
"BadRequestException" => {
return UpdateChannelError::BadRequest(String::from(error_message))
}
"ConflictException" => {
return UpdateChannelError::Conflict(String::from(error_message))
}
"ForbiddenException" => {
return UpdateChannelError::Forbidden(String::from(error_message))
}
"GatewayTimeoutException" => {
return UpdateChannelError::GatewayTimeout(String::from(error_message))
}
"InternalServerErrorException" => {
return UpdateChannelError::InternalServerError(String::from(error_message))
}
"UnprocessableEntityException" => {
return UpdateChannelError::UnprocessableEntity(String::from(error_message))
}
"ValidationException" => {
return UpdateChannelError::Validation(error_message.to_string())
}
_ => {}
}
}
return UpdateChannelError::Unknown(res);
}
}
impl From<serde_json::error::Error> for UpdateChannelError {
fn from(err: serde_json::error::Error) -> UpdateChannelError {
UpdateChannelError::ParseError(err.description().to_string())
}
}
impl From<CredentialsError> for UpdateChannelError {
fn from(err: CredentialsError) -> UpdateChannelError {
UpdateChannelError::Credentials(err)
}
}
impl From<HttpDispatchError> for UpdateChannelError {
fn from(err: HttpDispatchError) -> UpdateChannelError {
UpdateChannelError::HttpDispatch(err)
}
}
impl From<io::Error> for UpdateChannelError {
fn from(err: io::Error) -> UpdateChannelError {
UpdateChannelError::HttpDispatch(HttpDispatchError::from(err))
}
}
impl fmt::Display for UpdateChannelError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for UpdateChannelError {
fn description(&self) -> &str {
match *self {
UpdateChannelError::BadGateway(ref cause) => cause,
UpdateChannelError::BadRequest(ref cause) => cause,
UpdateChannelError::Conflict(ref cause) => cause,
UpdateChannelError::Forbidden(ref cause) => cause,
UpdateChannelError::GatewayTimeout(ref cause) => cause,
UpdateChannelError::InternalServerError(ref cause) => cause,
UpdateChannelError::UnprocessableEntity(ref cause) => cause,
UpdateChannelError::Validation(ref cause) => cause,
UpdateChannelError::Credentials(ref err) => err.description(),
UpdateChannelError::HttpDispatch(ref dispatch_error) => dispatch_error.description(),
UpdateChannelError::ParseError(ref cause) => cause,
UpdateChannelError::Unknown(_) => "unknown error",
}
}
}
/// Errors returned by UpdateInput
#[derive(Debug, PartialEq)]
pub enum UpdateInputError {
/// <p>Placeholder documentation for BadGatewayException</p>
BadGateway(String),
/// <p>Placeholder documentation for BadRequestException</p>
BadRequest(String),
/// <p>Placeholder documentation for ConflictException</p>
Conflict(String),
/// <p>Placeholder documentation for ForbiddenException</p>
Forbidden(String),
/// <p>Placeholder documentation for GatewayTimeoutException</p>
GatewayTimeout(String),
/// <p>Placeholder documentation for InternalServerErrorException</p>
InternalServerError(String),
/// <p>Placeholder documentation for NotFoundException</p>
NotFound(String),
/// An error occurred dispatching the HTTP request
HttpDispatch(HttpDispatchError),
/// An error was encountered with AWS credentials.
Credentials(CredentialsError),
/// A validation error occurred. Details from AWS are provided.
Validation(String),
/// An error occurred parsing the response payload.
ParseError(String),
/// An unknown error occurred. The raw HTTP response is provided.
Unknown(BufferedHttpResponse),
}
impl UpdateInputError {
// see boto RestJSONParser impl for parsing errors
// https://github.com/boto/botocore/blob/4dff78c840403d1d17db9b3f800b20d3bd9fbf9f/botocore/parsers.py#L838-L850
pub fn from_response(res: BufferedHttpResponse) -> UpdateInputError {
if let Ok(json) = from_slice::<SerdeJsonValue>(&res.body) {
let error_type = match res.headers.get("x-amzn-errortype") {
Some(raw_error_type) => raw_error_type
.split(':')
.next()
.unwrap_or_else(|| "Unknown"),
_ => json
.get("code")
.or_else(|| json.get("Code"))
.and_then(|c| c.as_str())
.unwrap_or_else(|| "Unknown"),
};
// message can come in either "message" or "Message"
// see boto BaseJSONParser impl for parsing message
// https://github.com/boto/botocore/blob/4dff78c840403d1d17db9b3f800b20d3bd9fbf9f/botocore/parsers.py#L595-L598
let error_message = json
.get("message")
.or_else(|| json.get("Message"))
.and_then(|m| m.as_str())
.unwrap_or("");
match error_type {
"BadGatewayException" => {
return UpdateInputError::BadGateway(String::from(error_message))
}
"BadRequestException" => {
return UpdateInputError::BadRequest(String::from(error_message))
}
"ConflictException" => {
return UpdateInputError::Conflict(String::from(error_message))
}
"ForbiddenException" => {
return UpdateInputError::Forbidden(String::from(error_message))
}
"GatewayTimeoutException" => {
return UpdateInputError::GatewayTimeout(String::from(error_message))
}
"InternalServerErrorException" => {
return UpdateInputError::InternalServerError(String::from(error_message))
}
"NotFoundException" => {
return UpdateInputError::NotFound(String::from(error_message))
}
"ValidationException" => {
return UpdateInputError::Validation(error_message.to_string())
}
_ => {}
}
}
return UpdateInputError::Unknown(res);
}
}
impl From<serde_json::error::Error> for UpdateInputError {
fn from(err: serde_json::error::Error) -> UpdateInputError {
UpdateInputError::ParseError(err.description().to_string())
}
}
impl From<CredentialsError> for UpdateInputError {
fn from(err: CredentialsError) -> UpdateInputError {
UpdateInputError::Credentials(err)
}
}
impl From<HttpDispatchError> for UpdateInputError {
fn from(err: HttpDispatchError) -> UpdateInputError {
UpdateInputError::HttpDispatch(err)
}
}
impl From<io::Error> for UpdateInputError {
fn from(err: io::Error) -> UpdateInputError {
UpdateInputError::HttpDispatch(HttpDispatchError::from(err))
}
}
impl fmt::Display for UpdateInputError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for UpdateInputError {
fn description(&self) -> &str {
match *self {
UpdateInputError::BadGateway(ref cause) => cause,
UpdateInputError::BadRequest(ref cause) => cause,
UpdateInputError::Conflict(ref cause) => cause,
UpdateInputError::Forbidden(ref cause) => cause,
UpdateInputError::GatewayTimeout(ref cause) => cause,
UpdateInputError::InternalServerError(ref cause) => cause,
UpdateInputError::NotFound(ref cause) => cause,
UpdateInputError::Validation(ref cause) => cause,
UpdateInputError::Credentials(ref err) => err.description(),
UpdateInputError::HttpDispatch(ref dispatch_error) => dispatch_error.description(),
UpdateInputError::ParseError(ref cause) => cause,
UpdateInputError::Unknown(_) => "unknown error",
}
}
}
/// Errors returned by UpdateInputSecurityGroup
#[derive(Debug, PartialEq)]
pub enum UpdateInputSecurityGroupError {
/// <p>Placeholder documentation for BadGatewayException</p>
BadGateway(String),
/// <p>Placeholder documentation for BadRequestException</p>
BadRequest(String),
/// <p>Placeholder documentation for ConflictException</p>
Conflict(String),
/// <p>Placeholder documentation for ForbiddenException</p>
Forbidden(String),
/// <p>Placeholder documentation for GatewayTimeoutException</p>
GatewayTimeout(String),
/// <p>Placeholder documentation for InternalServerErrorException</p>
InternalServerError(String),
/// <p>Placeholder documentation for NotFoundException</p>
NotFound(String),
/// An error occurred dispatching the HTTP request
HttpDispatch(HttpDispatchError),
/// An error was encountered with AWS credentials.
Credentials(CredentialsError),
/// A validation error occurred. Details from AWS are provided.
Validation(String),
/// An error occurred parsing the response payload.
ParseError(String),
/// An unknown error occurred. The raw HTTP response is provided.
Unknown(BufferedHttpResponse),
}
impl UpdateInputSecurityGroupError {
// see boto RestJSONParser impl for parsing errors
// https://github.com/boto/botocore/blob/4dff78c840403d1d17db9b3f800b20d3bd9fbf9f/botocore/parsers.py#L838-L850
pub fn from_response(res: BufferedHttpResponse) -> UpdateInputSecurityGroupError {
if let Ok(json) = from_slice::<SerdeJsonValue>(&res.body) {
let error_type = match res.headers.get("x-amzn-errortype") {
Some(raw_error_type) => raw_error_type
.split(':')
.next()
.unwrap_or_else(|| "Unknown"),
_ => json
.get("code")
.or_else(|| json.get("Code"))
.and_then(|c| c.as_str())
.unwrap_or_else(|| "Unknown"),
};
// message can come in either "message" or "Message"
// see boto BaseJSONParser impl for parsing message
// https://github.com/boto/botocore/blob/4dff78c840403d1d17db9b3f800b20d3bd9fbf9f/botocore/parsers.py#L595-L598
let error_message = json
.get("message")
.or_else(|| json.get("Message"))
.and_then(|m| m.as_str())
.unwrap_or("");
match error_type {
"BadGatewayException" => {
return UpdateInputSecurityGroupError::BadGateway(String::from(error_message))
}
"BadRequestException" => {
return UpdateInputSecurityGroupError::BadRequest(String::from(error_message))
}
"ConflictException" => {
return UpdateInputSecurityGroupError::Conflict(String::from(error_message))
}
"ForbiddenException" => {
return UpdateInputSecurityGroupError::Forbidden(String::from(error_message))
}
"GatewayTimeoutException" => {
return UpdateInputSecurityGroupError::GatewayTimeout(String::from(
error_message,
))
}
"InternalServerErrorException" => {
return UpdateInputSecurityGroupError::InternalServerError(String::from(
error_message,
))
}
"NotFoundException" => {
return UpdateInputSecurityGroupError::NotFound(String::from(error_message))
}
"ValidationException" => {
return UpdateInputSecurityGroupError::Validation(error_message.to_string())
}
_ => {}
}
}
return UpdateInputSecurityGroupError::Unknown(res);
}
}
impl From<serde_json::error::Error> for UpdateInputSecurityGroupError {
fn from(err: serde_json::error::Error) -> UpdateInputSecurityGroupError {
UpdateInputSecurityGroupError::ParseError(err.description().to_string())
}
}
impl From<CredentialsError> for UpdateInputSecurityGroupError {
fn from(err: CredentialsError) -> UpdateInputSecurityGroupError {
UpdateInputSecurityGroupError::Credentials(err)
}
}
impl From<HttpDispatchError> for UpdateInputSecurityGroupError {
fn from(err: HttpDispatchError) -> UpdateInputSecurityGroupError {
UpdateInputSecurityGroupError::HttpDispatch(err)
}
}
impl From<io::Error> for UpdateInputSecurityGroupError {
fn from(err: io::Error) -> UpdateInputSecurityGroupError {
UpdateInputSecurityGroupError::HttpDispatch(HttpDispatchError::from(err))
}
}
impl fmt::Display for UpdateInputSecurityGroupError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for UpdateInputSecurityGroupError {
fn description(&self) -> &str {
match *self {
UpdateInputSecurityGroupError::BadGateway(ref cause) => cause,
UpdateInputSecurityGroupError::BadRequest(ref cause) => cause,
UpdateInputSecurityGroupError::Conflict(ref cause) => cause,
UpdateInputSecurityGroupError::Forbidden(ref cause) => cause,
UpdateInputSecurityGroupError::GatewayTimeout(ref cause) => cause,
UpdateInputSecurityGroupError::InternalServerError(ref cause) => cause,
UpdateInputSecurityGroupError::NotFound(ref cause) => cause,
UpdateInputSecurityGroupError::Validation(ref cause) => cause,
UpdateInputSecurityGroupError::Credentials(ref err) => err.description(),
UpdateInputSecurityGroupError::HttpDispatch(ref dispatch_error) => {
dispatch_error.description()
}
UpdateInputSecurityGroupError::ParseError(ref cause) => cause,
UpdateInputSecurityGroupError::Unknown(_) => "unknown error",
}
}
}
/// Trait representing the capabilities of the MediaLive API. MediaLive clients implement this trait.
pub trait MediaLive {
/// <p>Creates a new channel</p>
fn create_channel(
&self,
input: CreateChannelRequest,
) -> RusotoFuture<CreateChannelResponse, CreateChannelError>;
/// <p>Create an input</p>
fn create_input(
&self,
input: CreateInputRequest,
) -> RusotoFuture<CreateInputResponse, CreateInputError>;
/// <p>Creates a Input Security Group</p>
fn create_input_security_group(
&self,
input: CreateInputSecurityGroupRequest,
) -> RusotoFuture<CreateInputSecurityGroupResponse, CreateInputSecurityGroupError>;
/// <p>Starts deletion of channel. The associated outputs are also deleted.</p>
fn delete_channel(
&self,
input: DeleteChannelRequest,
) -> RusotoFuture<DeleteChannelResponse, DeleteChannelError>;
/// <p>Deletes the input end point</p>
fn delete_input(
&self,
input: DeleteInputRequest,
) -> RusotoFuture<DeleteInputResponse, DeleteInputError>;
/// <p>Deletes an Input Security Group</p>
fn delete_input_security_group(
&self,
input: DeleteInputSecurityGroupRequest,
) -> RusotoFuture<DeleteInputSecurityGroupResponse, DeleteInputSecurityGroupError>;
/// <p>Delete an expired reservation.</p>
fn delete_reservation(
&self,
input: DeleteReservationRequest,
) -> RusotoFuture<DeleteReservationResponse, DeleteReservationError>;
/// <p>Gets details about a channel</p>
fn describe_channel(
&self,
input: DescribeChannelRequest,
) -> RusotoFuture<DescribeChannelResponse, DescribeChannelError>;
/// <p>Produces details about an input</p>
fn describe_input(
&self,
input: DescribeInputRequest,
) -> RusotoFuture<DescribeInputResponse, DescribeInputError>;
/// <p>Produces a summary of an Input Security Group</p>
fn describe_input_security_group(
&self,
input: DescribeInputSecurityGroupRequest,
) -> RusotoFuture<DescribeInputSecurityGroupResponse, DescribeInputSecurityGroupError>;
/// <p>Get details for an offering.</p>
fn describe_offering(
&self,
input: DescribeOfferingRequest,
) -> RusotoFuture<DescribeOfferingResponse, DescribeOfferingError>;
/// <p>Get details for a reservation.</p>
fn describe_reservation(
&self,
input: DescribeReservationRequest,
) -> RusotoFuture<DescribeReservationResponse, DescribeReservationError>;
/// <p>Produces list of channels that have been created</p>
fn list_channels(
&self,
input: ListChannelsRequest,
) -> RusotoFuture<ListChannelsResponse, ListChannelsError>;
/// <p>Produces a list of Input Security Groups for an account</p>
fn list_input_security_groups(
&self,
input: ListInputSecurityGroupsRequest,
) -> RusotoFuture<ListInputSecurityGroupsResponse, ListInputSecurityGroupsError>;
/// <p>Produces list of inputs that have been created</p>
fn list_inputs(
&self,
input: ListInputsRequest,
) -> RusotoFuture<ListInputsResponse, ListInputsError>;
/// <p>List offerings available for purchase.</p>
fn list_offerings(
&self,
input: ListOfferingsRequest,
) -> RusotoFuture<ListOfferingsResponse, ListOfferingsError>;
/// <p>List purchased reservations.</p>
fn list_reservations(
&self,
input: ListReservationsRequest,
) -> RusotoFuture<ListReservationsResponse, ListReservationsError>;
/// <p>Purchase an offering and create a reservation.</p>
fn purchase_offering(
&self,
input: PurchaseOfferingRequest,
) -> RusotoFuture<PurchaseOfferingResponse, PurchaseOfferingError>;
/// <p>Starts an existing channel</p>
fn start_channel(
&self,
input: StartChannelRequest,
) -> RusotoFuture<StartChannelResponse, StartChannelError>;
/// <p>Stops a running channel</p>
fn stop_channel(
&self,
input: StopChannelRequest,
) -> RusotoFuture<StopChannelResponse, StopChannelError>;
/// <p>Updates a channel.</p>
fn update_channel(
&self,
input: UpdateChannelRequest,
) -> RusotoFuture<UpdateChannelResponse, UpdateChannelError>;
/// <p>Updates an input.</p>
fn update_input(
&self,
input: UpdateInputRequest,
) -> RusotoFuture<UpdateInputResponse, UpdateInputError>;
/// <p>Update an Input Security Group's Whilelists.</p>
fn update_input_security_group(
&self,
input: UpdateInputSecurityGroupRequest,
) -> RusotoFuture<UpdateInputSecurityGroupResponse, UpdateInputSecurityGroupError>;
}
/// A client for the MediaLive API.
pub struct MediaLiveClient {
client: Client,
region: region::Region,
}
impl MediaLiveClient {
/// Creates a client backed by the default tokio event loop.
///
/// The client will use the default credentials provider and tls client.
pub fn new(region: region::Region) -> MediaLiveClient {
MediaLiveClient {
client: Client::shared(),
region: region,
}
}
pub fn new_with<P, D>(
request_dispatcher: D,
credentials_provider: P,
region: region::Region,
) -> MediaLiveClient
where
P: ProvideAwsCredentials + Send + Sync + 'static,
P::Future: Send,
D: DispatchSignedRequest + Send + Sync + 'static,
D::Future: Send,
{
MediaLiveClient {
client: Client::new_with(credentials_provider, request_dispatcher),
region: region,
}
}
}
impl MediaLive for MediaLiveClient {
/// <p>Creates a new channel</p>
fn create_channel(
&self,
input: CreateChannelRequest,
) -> RusotoFuture<CreateChannelResponse, CreateChannelError> {
let request_uri = "/prod/channels";
let mut request = SignedRequest::new("POST", "medialive", &self.region, &request_uri);
request.set_content_type("application/x-amz-json-1.1".to_owned());
let encoded = Some(serde_json::to_vec(&input).unwrap());
request.set_payload(encoded);
self.client.sign_and_dispatch(request, |response| {
if response.status.as_u16() == 201 {
Box::new(response.buffer().from_err().map(|response| {
let mut body = response.body;
if body == b"null" || body.is_empty() {
body = b"{}".to_vec();
}
debug!("Response body: {:?}", body);
debug!("Response status: {}", response.status);
let result = serde_json::from_slice::<CreateChannelResponse>(&body).unwrap();
result
}))
} else {
Box::new(
response
.buffer()
.from_err()
.and_then(|response| Err(CreateChannelError::from_response(response))),
)
}
})
}
/// <p>Create an input</p>
fn create_input(
&self,
input: CreateInputRequest,
) -> RusotoFuture<CreateInputResponse, CreateInputError> {
let request_uri = "/prod/inputs";
let mut request = SignedRequest::new("POST", "medialive", &self.region, &request_uri);
request.set_content_type("application/x-amz-json-1.1".to_owned());
let encoded = Some(serde_json::to_vec(&input).unwrap());
request.set_payload(encoded);
self.client.sign_and_dispatch(request, |response| {
if response.status.as_u16() == 201 {
Box::new(response.buffer().from_err().map(|response| {
let mut body = response.body;
if body == b"null" || body.is_empty() {
body = b"{}".to_vec();
}
debug!("Response body: {:?}", body);
debug!("Response status: {}", response.status);
let result = serde_json::from_slice::<CreateInputResponse>(&body).unwrap();
result
}))
} else {
Box::new(
response
.buffer()
.from_err()
.and_then(|response| Err(CreateInputError::from_response(response))),
)
}
})
}
/// <p>Creates a Input Security Group</p>
fn create_input_security_group(
&self,
input: CreateInputSecurityGroupRequest,
) -> RusotoFuture<CreateInputSecurityGroupResponse, CreateInputSecurityGroupError> {
let request_uri = "/prod/inputSecurityGroups";
let mut request = SignedRequest::new("POST", "medialive", &self.region, &request_uri);
request.set_content_type("application/x-amz-json-1.1".to_owned());
let encoded = Some(serde_json::to_vec(&input).unwrap());
request.set_payload(encoded);
self.client.sign_and_dispatch(request, |response| {
if response.status.as_u16() == 200 {
Box::new(response.buffer().from_err().map(|response| {
let mut body = response.body;
if body == b"null" || body.is_empty() {
body = b"{}".to_vec();
}
debug!("Response body: {:?}", body);
debug!("Response status: {}", response.status);
let result =
serde_json::from_slice::<CreateInputSecurityGroupResponse>(&body).unwrap();
result
}))
} else {
Box::new(response.buffer().from_err().and_then(|response| {
Err(CreateInputSecurityGroupError::from_response(response))
}))
}
})
}
/// <p>Starts deletion of channel. The associated outputs are also deleted.</p>
fn delete_channel(
&self,
input: DeleteChannelRequest,
) -> RusotoFuture<DeleteChannelResponse, DeleteChannelError> {
let request_uri = format!("/prod/channels/{channel_id}", channel_id = input.channel_id);
let mut request = SignedRequest::new("DELETE", "medialive", &self.region, &request_uri);
request.set_content_type("application/x-amz-json-1.1".to_owned());
self.client.sign_and_dispatch(request, |response| {
if response.status.as_u16() == 200 {
Box::new(response.buffer().from_err().map(|response| {
let mut body = response.body;
if body == b"null" || body.is_empty() {
body = b"{}".to_vec();
}
debug!("Response body: {:?}", body);
debug!("Response status: {}", response.status);
let result = serde_json::from_slice::<DeleteChannelResponse>(&body).unwrap();
result
}))
} else {
Box::new(
response
.buffer()
.from_err()
.and_then(|response| Err(DeleteChannelError::from_response(response))),
)
}
})
}
/// <p>Deletes the input end point</p>
fn delete_input(
&self,
input: DeleteInputRequest,
) -> RusotoFuture<DeleteInputResponse, DeleteInputError> {
let request_uri = format!("/prod/inputs/{input_id}", input_id = input.input_id);
let mut request = SignedRequest::new("DELETE", "medialive", &self.region, &request_uri);
request.set_content_type("application/x-amz-json-1.1".to_owned());
self.client.sign_and_dispatch(request, |response| {
if response.status.as_u16() == 200 {
Box::new(response.buffer().from_err().map(|response| {
let mut body = response.body;
if body == b"null" || body.is_empty() {
body = b"{}".to_vec();
}
debug!("Response body: {:?}", body);
debug!("Response status: {}", response.status);
let result = serde_json::from_slice::<DeleteInputResponse>(&body).unwrap();
result
}))
} else {
Box::new(
response
.buffer()
.from_err()
.and_then(|response| Err(DeleteInputError::from_response(response))),
)
}
})
}
/// <p>Deletes an Input Security Group</p>
fn delete_input_security_group(
&self,
input: DeleteInputSecurityGroupRequest,
) -> RusotoFuture<DeleteInputSecurityGroupResponse, DeleteInputSecurityGroupError> {
let request_uri = format!(
"/prod/inputSecurityGroups/{input_security_group_id}",
input_security_group_id = input.input_security_group_id
);
let mut request = SignedRequest::new("DELETE", "medialive", &self.region, &request_uri);
request.set_content_type("application/x-amz-json-1.1".to_owned());
self.client.sign_and_dispatch(request, |response| {
if response.status.as_u16() == 200 {
Box::new(response.buffer().from_err().map(|response| {
let mut body = response.body;
if body == b"null" || body.is_empty() {
body = b"{}".to_vec();
}
debug!("Response body: {:?}", body);
debug!("Response status: {}", response.status);
let result =
serde_json::from_slice::<DeleteInputSecurityGroupResponse>(&body).unwrap();
result
}))
} else {
Box::new(response.buffer().from_err().and_then(|response| {
Err(DeleteInputSecurityGroupError::from_response(response))
}))
}
})
}
/// <p>Delete an expired reservation.</p>
fn delete_reservation(
&self,
input: DeleteReservationRequest,
) -> RusotoFuture<DeleteReservationResponse, DeleteReservationError> {
let request_uri = format!(
"/prod/reservations/{reservation_id}",
reservation_id = input.reservation_id
);
let mut request = SignedRequest::new("DELETE", "medialive", &self.region, &request_uri);
request.set_content_type("application/x-amz-json-1.1".to_owned());
self.client.sign_and_dispatch(request, |response| {
if response.status.as_u16() == 200 {
Box::new(response.buffer().from_err().map(|response| {
let mut body = response.body;
if body == b"null" || body.is_empty() {
body = b"{}".to_vec();
}
debug!("Response body: {:?}", body);
debug!("Response status: {}", response.status);
let result =
serde_json::from_slice::<DeleteReservationResponse>(&body).unwrap();
result
}))
} else {
Box::new(
response
.buffer()
.from_err()
.and_then(|response| Err(DeleteReservationError::from_response(response))),
)
}
})
}
/// <p>Gets details about a channel</p>
fn describe_channel(
&self,
input: DescribeChannelRequest,
) -> RusotoFuture<DescribeChannelResponse, DescribeChannelError> {
let request_uri = format!("/prod/channels/{channel_id}", channel_id = input.channel_id);
let mut request = SignedRequest::new("GET", "medialive", &self.region, &request_uri);
request.set_content_type("application/x-amz-json-1.1".to_owned());
self.client.sign_and_dispatch(request, |response| {
if response.status.as_u16() == 200 {
Box::new(response.buffer().from_err().map(|response| {
let mut body = response.body;
if body == b"null" || body.is_empty() {
body = b"{}".to_vec();
}
debug!("Response body: {:?}", body);
debug!("Response status: {}", response.status);
let result = serde_json::from_slice::<DescribeChannelResponse>(&body).unwrap();
result
}))
} else {
Box::new(
response
.buffer()
.from_err()
.and_then(|response| Err(DescribeChannelError::from_response(response))),
)
}
})
}
/// <p>Produces details about an input</p>
fn describe_input(
&self,
input: DescribeInputRequest,
) -> RusotoFuture<DescribeInputResponse, DescribeInputError> {
let request_uri = format!("/prod/inputs/{input_id}", input_id = input.input_id);
let mut request = SignedRequest::new("GET", "medialive", &self.region, &request_uri);
request.set_content_type("application/x-amz-json-1.1".to_owned());
self.client.sign_and_dispatch(request, |response| {
if response.status.as_u16() == 200 {
Box::new(response.buffer().from_err().map(|response| {
let mut body = response.body;
if body == b"null" || body.is_empty() {
body = b"{}".to_vec();
}
debug!("Response body: {:?}", body);
debug!("Response status: {}", response.status);
let result = serde_json::from_slice::<DescribeInputResponse>(&body).unwrap();
result
}))
} else {
Box::new(
response
.buffer()
.from_err()
.and_then(|response| Err(DescribeInputError::from_response(response))),
)
}
})
}
/// <p>Produces a summary of an Input Security Group</p>
fn describe_input_security_group(
&self,
input: DescribeInputSecurityGroupRequest,
) -> RusotoFuture<DescribeInputSecurityGroupResponse, DescribeInputSecurityGroupError> {
let request_uri = format!(
"/prod/inputSecurityGroups/{input_security_group_id}",
input_security_group_id = input.input_security_group_id
);
let mut request = SignedRequest::new("GET", "medialive", &self.region, &request_uri);
request.set_content_type("application/x-amz-json-1.1".to_owned());
self.client.sign_and_dispatch(request, |response| {
if response.status.as_u16() == 200 {
Box::new(response.buffer().from_err().map(|response| {
let mut body = response.body;
if body == b"null" || body.is_empty() {
body = b"{}".to_vec();
}
debug!("Response body: {:?}", body);
debug!("Response status: {}", response.status);
let result =
serde_json::from_slice::<DescribeInputSecurityGroupResponse>(&body)
.unwrap();
result
}))
} else {
Box::new(response.buffer().from_err().and_then(|response| {
Err(DescribeInputSecurityGroupError::from_response(response))
}))
}
})
}
/// <p>Get details for an offering.</p>
fn describe_offering(
&self,
input: DescribeOfferingRequest,
) -> RusotoFuture<DescribeOfferingResponse, DescribeOfferingError> {
let request_uri = format!(
"/prod/offerings/{offering_id}",
offering_id = input.offering_id
);
let mut request = SignedRequest::new("GET", "medialive", &self.region, &request_uri);
request.set_content_type("application/x-amz-json-1.1".to_owned());
self.client.sign_and_dispatch(request, |response| {
if response.status.as_u16() == 200 {
Box::new(response.buffer().from_err().map(|response| {
let mut body = response.body;
if body == b"null" || body.is_empty() {
body = b"{}".to_vec();
}
debug!("Response body: {:?}", body);
debug!("Response status: {}", response.status);
let result = serde_json::from_slice::<DescribeOfferingResponse>(&body).unwrap();
result
}))
} else {
Box::new(
response
.buffer()
.from_err()
.and_then(|response| Err(DescribeOfferingError::from_response(response))),
)
}
})
}
/// <p>Get details for a reservation.</p>
fn describe_reservation(
&self,
input: DescribeReservationRequest,
) -> RusotoFuture<DescribeReservationResponse, DescribeReservationError> {
let request_uri = format!(
"/prod/reservations/{reservation_id}",
reservation_id = input.reservation_id
);
let mut request = SignedRequest::new("GET", "medialive", &self.region, &request_uri);
request.set_content_type("application/x-amz-json-1.1".to_owned());
self.client.sign_and_dispatch(request, |response| {
if response.status.as_u16() == 200 {
Box::new(response.buffer().from_err().map(|response| {
let mut body = response.body;
if body == b"null" || body.is_empty() {
body = b"{}".to_vec();
}
debug!("Response body: {:?}", body);
debug!("Response status: {}", response.status);
let result =
serde_json::from_slice::<DescribeReservationResponse>(&body).unwrap();
result
}))
} else {
Box::new(
response.buffer().from_err().and_then(|response| {
Err(DescribeReservationError::from_response(response))
}),
)
}
})
}
/// <p>Produces list of channels that have been created</p>
fn list_channels(
&self,
input: ListChannelsRequest,
) -> RusotoFuture<ListChannelsResponse, ListChannelsError> {
let request_uri = "/prod/channels";
let mut request = SignedRequest::new("GET", "medialive", &self.region, &request_uri);
request.set_content_type("application/x-amz-json-1.1".to_owned());
let mut params = Params::new();
if let Some(ref x) = input.max_results {
params.put("maxResults", x);
}
if let Some(ref x) = input.next_token {
params.put("nextToken", x);
}
request.set_params(params);
self.client.sign_and_dispatch(request, |response| {
if response.status.as_u16() == 200 {
Box::new(response.buffer().from_err().map(|response| {
let mut body = response.body;
if body == b"null" || body.is_empty() {
body = b"{}".to_vec();
}
debug!("Response body: {:?}", body);
debug!("Response status: {}", response.status);
let result = serde_json::from_slice::<ListChannelsResponse>(&body).unwrap();
result
}))
} else {
Box::new(
response
.buffer()
.from_err()
.and_then(|response| Err(ListChannelsError::from_response(response))),
)
}
})
}
/// <p>Produces a list of Input Security Groups for an account</p>
fn list_input_security_groups(
&self,
input: ListInputSecurityGroupsRequest,
) -> RusotoFuture<ListInputSecurityGroupsResponse, ListInputSecurityGroupsError> {
let request_uri = "/prod/inputSecurityGroups";
let mut request = SignedRequest::new("GET", "medialive", &self.region, &request_uri);
request.set_content_type("application/x-amz-json-1.1".to_owned());
let mut params = Params::new();
if let Some(ref x) = input.max_results {
params.put("maxResults", x);
}
if let Some(ref x) = input.next_token {
params.put("nextToken", x);
}
request.set_params(params);
self.client.sign_and_dispatch(request, |response| {
if response.status.as_u16() == 200 {
Box::new(response.buffer().from_err().map(|response| {
let mut body = response.body;
if body == b"null" || body.is_empty() {
body = b"{}".to_vec();
}
debug!("Response body: {:?}", body);
debug!("Response status: {}", response.status);
let result =
serde_json::from_slice::<ListInputSecurityGroupsResponse>(&body).unwrap();
result
}))
} else {
Box::new(response.buffer().from_err().and_then(|response| {
Err(ListInputSecurityGroupsError::from_response(response))
}))
}
})
}
/// <p>Produces list of inputs that have been created</p>
fn list_inputs(
&self,
input: ListInputsRequest,
) -> RusotoFuture<ListInputsResponse, ListInputsError> {
let request_uri = "/prod/inputs";
let mut request = SignedRequest::new("GET", "medialive", &self.region, &request_uri);
request.set_content_type("application/x-amz-json-1.1".to_owned());
let mut params = Params::new();
if let Some(ref x) = input.max_results {
params.put("maxResults", x);
}
if let Some(ref x) = input.next_token {
params.put("nextToken", x);
}
request.set_params(params);
self.client.sign_and_dispatch(request, |response| {
if response.status.as_u16() == 200 {
Box::new(response.buffer().from_err().map(|response| {
let mut body = response.body;
if body == b"null" || body.is_empty() {
body = b"{}".to_vec();
}
debug!("Response body: {:?}", body);
debug!("Response status: {}", response.status);
let result = serde_json::from_slice::<ListInputsResponse>(&body).unwrap();
result
}))
} else {
Box::new(
response
.buffer()
.from_err()
.and_then(|response| Err(ListInputsError::from_response(response))),
)
}
})
}
/// <p>List offerings available for purchase.</p>
fn list_offerings(
&self,
input: ListOfferingsRequest,
) -> RusotoFuture<ListOfferingsResponse, ListOfferingsError> {
let request_uri = "/prod/offerings";
let mut request = SignedRequest::new("GET", "medialive", &self.region, &request_uri);
request.set_content_type("application/x-amz-json-1.1".to_owned());
let mut params = Params::new();
if let Some(ref x) = input.channel_configuration {
params.put("channelConfiguration", x);
}
if let Some(ref x) = input.codec {
params.put("codec", x);
}
if let Some(ref x) = input.max_results {
params.put("maxResults", x);
}
if let Some(ref x) = input.maximum_bitrate {
params.put("maximumBitrate", x);
}
if let Some(ref x) = input.maximum_framerate {
params.put("maximumFramerate", x);
}
if let Some(ref x) = input.next_token {
params.put("nextToken", x);
}
if let Some(ref x) = input.resolution {
params.put("resolution", x);
}
if let Some(ref x) = input.resource_type {
params.put("resourceType", x);
}
if let Some(ref x) = input.special_feature {
params.put("specialFeature", x);
}
if let Some(ref x) = input.video_quality {
params.put("videoQuality", x);
}
request.set_params(params);
self.client.sign_and_dispatch(request, |response| {
if response.status.as_u16() == 200 {
Box::new(response.buffer().from_err().map(|response| {
let mut body = response.body;
if body == b"null" || body.is_empty() {
body = b"{}".to_vec();
}
debug!("Response body: {:?}", body);
debug!("Response status: {}", response.status);
let result = serde_json::from_slice::<ListOfferingsResponse>(&body).unwrap();
result
}))
} else {
Box::new(
response
.buffer()
.from_err()
.and_then(|response| Err(ListOfferingsError::from_response(response))),
)
}
})
}
/// <p>List purchased reservations.</p>
fn list_reservations(
&self,
input: ListReservationsRequest,
) -> RusotoFuture<ListReservationsResponse, ListReservationsError> {
let request_uri = "/prod/reservations";
let mut request = SignedRequest::new("GET", "medialive", &self.region, &request_uri);
request.set_content_type("application/x-amz-json-1.1".to_owned());
let mut params = Params::new();
if let Some(ref x) = input.codec {
params.put("codec", x);
}
if let Some(ref x) = input.max_results {
params.put("maxResults", x);
}
if let Some(ref x) = input.maximum_bitrate {
params.put("maximumBitrate", x);
}
if let Some(ref x) = input.maximum_framerate {
params.put("maximumFramerate", x);
}
if let Some(ref x) = input.next_token {
params.put("nextToken", x);
}
if let Some(ref x) = input.resolution {
params.put("resolution", x);
}
if let Some(ref x) = input.resource_type {
params.put("resourceType", x);
}
if let Some(ref x) = input.special_feature {
params.put("specialFeature", x);
}
if let Some(ref x) = input.video_quality {
params.put("videoQuality", x);
}
request.set_params(params);
self.client.sign_and_dispatch(request, |response| {
if response.status.as_u16() == 200 {
Box::new(response.buffer().from_err().map(|response| {
let mut body = response.body;
if body == b"null" || body.is_empty() {
body = b"{}".to_vec();
}
debug!("Response body: {:?}", body);
debug!("Response status: {}", response.status);
let result = serde_json::from_slice::<ListReservationsResponse>(&body).unwrap();
result
}))
} else {
Box::new(
response
.buffer()
.from_err()
.and_then(|response| Err(ListReservationsError::from_response(response))),
)
}
})
}
/// <p>Purchase an offering and create a reservation.</p>
fn purchase_offering(
&self,
input: PurchaseOfferingRequest,
) -> RusotoFuture<PurchaseOfferingResponse, PurchaseOfferingError> {
let request_uri = format!(
"/prod/offerings/{offering_id}/purchase",
offering_id = input.offering_id
);
let mut request = SignedRequest::new("POST", "medialive", &self.region, &request_uri);
request.set_content_type("application/x-amz-json-1.1".to_owned());
let encoded = Some(serde_json::to_vec(&input).unwrap());
request.set_payload(encoded);
self.client.sign_and_dispatch(request, |response| {
if response.status.as_u16() == 201 {
Box::new(response.buffer().from_err().map(|response| {
let mut body = response.body;
if body == b"null" || body.is_empty() {
body = b"{}".to_vec();
}
debug!("Response body: {:?}", body);
debug!("Response status: {}", response.status);
let result = serde_json::from_slice::<PurchaseOfferingResponse>(&body).unwrap();
result
}))
} else {
Box::new(
response
.buffer()
.from_err()
.and_then(|response| Err(PurchaseOfferingError::from_response(response))),
)
}
})
}
/// <p>Starts an existing channel</p>
fn start_channel(
&self,
input: StartChannelRequest,
) -> RusotoFuture<StartChannelResponse, StartChannelError> {
let request_uri = format!(
"/prod/channels/{channel_id}/start",
channel_id = input.channel_id
);
let mut request = SignedRequest::new("POST", "medialive", &self.region, &request_uri);
request.set_content_type("application/x-amz-json-1.1".to_owned());
self.client.sign_and_dispatch(request, |response| {
if response.status.as_u16() == 200 {
Box::new(response.buffer().from_err().map(|response| {
let mut body = response.body;
if body == b"null" || body.is_empty() {
body = b"{}".to_vec();
}
debug!("Response body: {:?}", body);
debug!("Response status: {}", response.status);
let result = serde_json::from_slice::<StartChannelResponse>(&body).unwrap();
result
}))
} else {
Box::new(
response
.buffer()
.from_err()
.and_then(|response| Err(StartChannelError::from_response(response))),
)
}
})
}
/// <p>Stops a running channel</p>
fn stop_channel(
&self,
input: StopChannelRequest,
) -> RusotoFuture<StopChannelResponse, StopChannelError> {
let request_uri = format!(
"/prod/channels/{channel_id}/stop",
channel_id = input.channel_id
);
let mut request = SignedRequest::new("POST", "medialive", &self.region, &request_uri);
request.set_content_type("application/x-amz-json-1.1".to_owned());
self.client.sign_and_dispatch(request, |response| {
if response.status.as_u16() == 200 {
Box::new(response.buffer().from_err().map(|response| {
let mut body = response.body;
if body == b"null" || body.is_empty() {
body = b"{}".to_vec();
}
debug!("Response body: {:?}", body);
debug!("Response status: {}", response.status);
let result = serde_json::from_slice::<StopChannelResponse>(&body).unwrap();
result
}))
} else {
Box::new(
response
.buffer()
.from_err()
.and_then(|response| Err(StopChannelError::from_response(response))),
)
}
})
}
/// <p>Updates a channel.</p>
fn update_channel(
&self,
input: UpdateChannelRequest,
) -> RusotoFuture<UpdateChannelResponse, UpdateChannelError> {
let request_uri = format!("/prod/channels/{channel_id}", channel_id = input.channel_id);
let mut request = SignedRequest::new("PUT", "medialive", &self.region, &request_uri);
request.set_content_type("application/x-amz-json-1.1".to_owned());
let encoded = Some(serde_json::to_vec(&input).unwrap());
request.set_payload(encoded);
self.client.sign_and_dispatch(request, |response| {
if response.status.as_u16() == 200 {
Box::new(response.buffer().from_err().map(|response| {
let mut body = response.body;
if body == b"null" || body.is_empty() {
body = b"{}".to_vec();
}
debug!("Response body: {:?}", body);
debug!("Response status: {}", response.status);
let result = serde_json::from_slice::<UpdateChannelResponse>(&body).unwrap();
result
}))
} else {
Box::new(
response
.buffer()
.from_err()
.and_then(|response| Err(UpdateChannelError::from_response(response))),
)
}
})
}
/// <p>Updates an input.</p>
fn update_input(
&self,
input: UpdateInputRequest,
) -> RusotoFuture<UpdateInputResponse, UpdateInputError> {
let request_uri = format!("/prod/inputs/{input_id}", input_id = input.input_id);
let mut request = SignedRequest::new("PUT", "medialive", &self.region, &request_uri);
request.set_content_type("application/x-amz-json-1.1".to_owned());
let encoded = Some(serde_json::to_vec(&input).unwrap());
request.set_payload(encoded);
self.client.sign_and_dispatch(request, |response| {
if response.status.as_u16() == 200 {
Box::new(response.buffer().from_err().map(|response| {
let mut body = response.body;
if body == b"null" || body.is_empty() {
body = b"{}".to_vec();
}
debug!("Response body: {:?}", body);
debug!("Response status: {}", response.status);
let result = serde_json::from_slice::<UpdateInputResponse>(&body).unwrap();
result
}))
} else {
Box::new(
response
.buffer()
.from_err()
.and_then(|response| Err(UpdateInputError::from_response(response))),
)
}
})
}
/// <p>Update an Input Security Group's Whilelists.</p>
fn update_input_security_group(
&self,
input: UpdateInputSecurityGroupRequest,
) -> RusotoFuture<UpdateInputSecurityGroupResponse, UpdateInputSecurityGroupError> {
let request_uri = format!(
"/prod/inputSecurityGroups/{input_security_group_id}",
input_security_group_id = input.input_security_group_id
);
let mut request = SignedRequest::new("PUT", "medialive", &self.region, &request_uri);
request.set_content_type("application/x-amz-json-1.1".to_owned());
let encoded = Some(serde_json::to_vec(&input).unwrap());
request.set_payload(encoded);
self.client.sign_and_dispatch(request, |response| {
if response.status.as_u16() == 200 {
Box::new(response.buffer().from_err().map(|response| {
let mut body = response.body;
if body == b"null" || body.is_empty() {
body = b"{}".to_vec();
}
debug!("Response body: {:?}", body);
debug!("Response status: {}", response.status);
let result =
serde_json::from_slice::<UpdateInputSecurityGroupResponse>(&body).unwrap();
result
}))
} else {
Box::new(response.buffer().from_err().and_then(|response| {
Err(UpdateInputSecurityGroupError::from_response(response))
}))
}
})
}
}
#[cfg(test)]
mod protocol_tests {}<|fim▁end|>
|
}
impl fmt::Display for ListInputsError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
<|file_name|>server_suite_test.go<|end_file_name|><|fim▁begin|>package run_test
import (
"fmt"
"net/url"
"strings"
"testing"
"time"
)
var tests Tests
// Load all shared tests
func init() {
tests = make(map[string]Test)
tests["database_commands"] = Test{
queries: []*Query{
&Query{
name: "create database should succeed",
command: `CREATE DATABASE db0`,
exp: `{"results":[{"statement_id":0}]}`,
once: true,
},
&Query{
name: "create database with retention duration should succeed",
command: `CREATE DATABASE db0_r WITH DURATION 24h REPLICATION 2 NAME db0_r_policy`,
exp: `{"results":[{"statement_id":0}]}`,
once: true,
},
&Query{
name: "create database with retention policy should fail with invalid name",
command: `CREATE DATABASE db1 WITH NAME "."`,
exp: `{"results":[{"statement_id":0,"error":"invalid name"}]}`,
once: true,
},
&Query{
name: "create database should error with some unquoted names",
command: `CREATE DATABASE 0xdb0`,
exp: `{"error":"error parsing query: found 0xdb0, expected identifier at line 1, char 17"}`,
},
&Query{
name: "create database should error with invalid characters",
command: `CREATE DATABASE "."`,
exp: `{"results":[{"statement_id":0,"error":"invalid name"}]}`,
},
&Query{
name: "create database with retention duration should error with bad retention duration",
command: `CREATE DATABASE db0 WITH DURATION xyz`,
exp: `{"error":"error parsing query: found xyz, expected duration at line 1, char 35"}`,
},
&Query{
name: "create database with retention replication should error with bad retention replication number",
command: `CREATE DATABASE db0 WITH REPLICATION xyz`,
exp: `{"error":"error parsing query: found xyz, expected integer at line 1, char 38"}`,
},
&Query{
name: "create database with retention name should error with missing retention name",
command: `CREATE DATABASE db0 WITH NAME`,
exp: `{"error":"error parsing query: found EOF, expected identifier at line 1, char 31"}`,
},
&Query{
name: "show database should succeed",
command: `SHOW DATABASES`,
exp: `{"results":[{"statement_id":0,"series":[{"name":"databases","columns":["name"],"values":[["db0"],["db0_r"]]}]}]}`,
},
&Query{
name: "create database should not error with existing database",
command: `CREATE DATABASE db0`,
exp: `{"results":[{"statement_id":0}]}`,
},
&Query{
name: "create database should create non-existing database",
command: `CREATE DATABASE db1`,
exp: `{"results":[{"statement_id":0}]}`,
},
&Query{
name: "create database with retention duration should error if retention policy is different",
command: `CREATE DATABASE db1 WITH DURATION 24h`,
exp: `{"results":[{"statement_id":0,"error":"retention policy conflicts with an existing policy"}]}`,
},
&Query{
name: "create database should error with bad retention duration",
command: `CREATE DATABASE db1 WITH DURATION xyz`,
exp: `{"error":"error parsing query: found xyz, expected duration at line 1, char 35"}`,
},
&Query{
name: "show database should succeed",
command: `SHOW DATABASES`,
exp: `{"results":[{"statement_id":0,"series":[{"name":"databases","columns":["name"],"values":[["db0"],["db0_r"],["db1"]]}]}]}`,
},
&Query{
name: "drop database db0 should succeed",
command: `DROP DATABASE db0`,
exp: `{"results":[{"statement_id":0}]}`,
once: true,
},
&Query{
name: "drop database db0_r should succeed",
command: `DROP DATABASE db0_r`,
exp: `{"results":[{"statement_id":0}]}`,
once: true,
},
&Query{
name: "drop database db1 should succeed",
command: `DROP DATABASE db1`,
exp: `{"results":[{"statement_id":0}]}`,
once: true,
},
&Query{
name: "drop database should not error if it does not exists",
command: `DROP DATABASE db1`,
exp: `{"results":[{"statement_id":0}]}`,
},
&Query{
name: "drop database should not error with non-existing database db1",
command: `DROP DATABASE db1`,
exp: `{"results":[{"statement_id":0}]}`,
},
&Query{
name: "show database should have no results",
command: `SHOW DATABASES`,
exp: `{"results":[{"statement_id":0,"series":[{"name":"databases","columns":["name"]}]}]}`,
},
&Query{
name: "create database with shard group duration should succeed",
command: `CREATE DATABASE db0 WITH SHARD DURATION 61m`,<|fim▁hole|> },
&Query{
name: "create database with shard group duration and duration should succeed",
command: `CREATE DATABASE db1 WITH DURATION 60m SHARD DURATION 30m`,
exp: `{"results":[{"statement_id":0}]}`,
},
},
}
tests["drop_and_recreate_database"] = Test{
db: "db0",
rp: "rp0",
writes: Writes{
&Write{data: fmt.Sprintf(`cpu,host=serverA,region=uswest val=23.2 %d`, mustParseTime(time.RFC3339Nano, "2000-01-01T00:00:00Z").UnixNano())},
},
queries: []*Query{
&Query{
name: "Drop database after data write",
command: `DROP DATABASE db0`,
exp: `{"results":[{"statement_id":0}]}`,
once: true,
},
&Query{
name: "Recreate database",
command: `CREATE DATABASE db0`,
exp: `{"results":[{"statement_id":0}]}`,
once: true,
},
&Query{
name: "Recreate retention policy",
command: `CREATE RETENTION POLICY rp0 ON db0 DURATION 365d REPLICATION 1 DEFAULT`,
exp: `{"results":[{"statement_id":0}]}`,
once: true,
},
&Query{
name: "Show measurements after recreate",
command: `SHOW MEASUREMENTS`,
exp: `{"results":[{"statement_id":0}]}`,
params: url.Values{"db": []string{"db0"}},
},
&Query{
name: "Query data after recreate",
command: `SELECT * FROM cpu`,
exp: `{"results":[{"statement_id":0}]}`,
params: url.Values{"db": []string{"db0"}},
},
},
}
tests["drop_database_isolated"] = Test{
db: "db0",
rp: "rp0",
writes: Writes{
&Write{data: fmt.Sprintf(`cpu,host=serverA,region=uswest val=23.2 %d`, mustParseTime(time.RFC3339Nano, "2000-01-01T00:00:00Z").UnixNano())},
},
queries: []*Query{
&Query{
name: "Query data from 1st database",
command: `SELECT * FROM cpu`,
exp: `{"results":[{"statement_id":0,"series":[{"name":"cpu","columns":["time","host","region","val"],"values":[["2000-01-01T00:00:00Z","serverA","uswest",23.2]]}]}]}`,
params: url.Values{"db": []string{"db0"}},
},
&Query{
name: "Query data from 1st database with GROUP BY *",
command: `SELECT * FROM cpu GROUP BY *`,
exp: `{"results":[{"statement_id":0,"series":[{"name":"cpu","tags":{"host":"serverA","region":"uswest"},"columns":["time","val"],"values":[["2000-01-01T00:00:00Z",23.2]]}]}]}`,
params: url.Values{"db": []string{"db0"}},
},
&Query{
name: "Drop other database",
command: `DROP DATABASE db1`,
once: true,
exp: `{"results":[{"statement_id":0}]}`,
},
&Query{
name: "Query data from 1st database and ensure it's still there",
command: `SELECT * FROM cpu`,
exp: `{"results":[{"statement_id":0,"series":[{"name":"cpu","columns":["time","host","region","val"],"values":[["2000-01-01T00:00:00Z","serverA","uswest",23.2]]}]}]}`,
params: url.Values{"db": []string{"db0"}},
},
&Query{
name: "Query data from 1st database and ensure it's still there with GROUP BY *",
command: `SELECT * FROM cpu GROUP BY *`,
exp: `{"results":[{"statement_id":0,"series":[{"name":"cpu","tags":{"host":"serverA","region":"uswest"},"columns":["time","val"],"values":[["2000-01-01T00:00:00Z",23.2]]}]}]}`,
params: url.Values{"db": []string{"db0"}},
},
},
}
tests["delete_series"] = Test{
db: "db0",
rp: "rp0",
writes: Writes{
&Write{data: fmt.Sprintf(`cpu,host=serverA,region=uswest val=23.2 %d`, mustParseTime(time.RFC3339Nano, "2000-01-01T00:00:00Z").UnixNano())},
&Write{data: fmt.Sprintf(`cpu,host=serverA,region=uswest val=100 %d`, mustParseTime(time.RFC3339Nano, "2000-01-02T00:00:00Z").UnixNano())},
&Write{data: fmt.Sprintf(`cpu,host=serverA,region=uswest val=200 %d`, mustParseTime(time.RFC3339Nano, "2000-01-03T00:00:00Z").UnixNano())},
&Write{db: "db1", data: fmt.Sprintf(`cpu,host=serverA,region=uswest val=23.2 %d`, mustParseTime(time.RFC3339Nano, "2000-01-01T00:00:00Z").UnixNano())},
},
queries: []*Query{
&Query{
name: "Show series is present",
command: `SHOW SERIES`,
exp: `{"results":[{"statement_id":0,"series":[{"columns":["key"],"values":[["cpu,host=serverA,region=uswest"]]}]}]}`,
params: url.Values{"db": []string{"db0"}},
},
&Query{
name: "Delete series",
command: `DELETE FROM cpu WHERE time < '2000-01-03T00:00:00Z'`,
exp: `{"results":[{"statement_id":0}]}`,
params: url.Values{"db": []string{"db0"}},
once: true,
},
&Query{
name: "Show series still exists",
command: `SHOW SERIES`,
exp: `{"results":[{"statement_id":0,"series":[{"columns":["key"],"values":[["cpu,host=serverA,region=uswest"]]}]}]}`,
params: url.Values{"db": []string{"db0"}},
},
&Query{
name: "Make sure last point still exists",
command: `SELECT * FROM cpu`,
exp: `{"results":[{"statement_id":0,"series":[{"name":"cpu","columns":["time","host","region","val"],"values":[["2000-01-03T00:00:00Z","serverA","uswest",200]]}]}]}`,
params: url.Values{"db": []string{"db0"}},
},
&Query{
name: "Make sure data wasn't deleted from other database.",
command: `SELECT * FROM cpu`,
exp: `{"results":[{"statement_id":0,"series":[{"name":"cpu","columns":["time","host","region","val"],"values":[["2000-01-01T00:00:00Z","serverA","uswest",23.2]]}]}]}`,
params: url.Values{"db": []string{"db1"}},
},
},
}
tests["drop_and_recreate_series"] = Test{
db: "db0",
rp: "rp0",
writes: Writes{
&Write{data: fmt.Sprintf(`cpu,host=serverA,region=uswest val=23.2 %d`, mustParseTime(time.RFC3339Nano, "2000-01-01T00:00:00Z").UnixNano())},
&Write{db: "db1", data: fmt.Sprintf(`cpu,host=serverA,region=uswest val=23.2 %d`, mustParseTime(time.RFC3339Nano, "2000-01-01T00:00:00Z").UnixNano())},
},
queries: []*Query{
&Query{
name: "Show series is present",
command: `SHOW SERIES`,
exp: `{"results":[{"statement_id":0,"series":[{"columns":["key"],"values":[["cpu,host=serverA,region=uswest"]]}]}]}`,
params: url.Values{"db": []string{"db0"}},
},
&Query{
name: "Drop series after data write",
command: `DROP SERIES FROM cpu`,
exp: `{"results":[{"statement_id":0}]}`,
params: url.Values{"db": []string{"db0"}},
once: true,
},
&Query{
name: "Show series is gone",
command: `SHOW SERIES`,
exp: `{"results":[{"statement_id":0}]}`,
params: url.Values{"db": []string{"db0"}},
},
&Query{
name: "Make sure data wasn't deleted from other database.",
command: `SELECT * FROM cpu`,
exp: `{"results":[{"statement_id":0,"series":[{"name":"cpu","columns":["time","host","region","val"],"values":[["2000-01-01T00:00:00Z","serverA","uswest",23.2]]}]}]}`,
params: url.Values{"db": []string{"db1"}},
},
},
}
tests["drop_and_recreate_series_retest"] = Test{
db: "db0",
rp: "rp0",
writes: Writes{
&Write{data: fmt.Sprintf(`cpu,host=serverA,region=uswest val=23.2 %d`, mustParseTime(time.RFC3339Nano, "2000-01-01T00:00:00Z").UnixNano())},
},
queries: []*Query{
&Query{
name: "Show series is present again after re-write",
command: `SHOW SERIES`,
exp: `{"results":[{"statement_id":0,"series":[{"columns":["key"],"values":[["cpu,host=serverA,region=uswest"]]}]}]}`,
params: url.Values{"db": []string{"db0"}},
},
},
}
tests["drop_series_from_regex"] = Test{
db: "db0",
rp: "rp0",
writes: Writes{
&Write{data: strings.Join([]string{
fmt.Sprintf(`a,host=serverA,region=uswest val=23.2 %d`, mustParseTime(time.RFC3339Nano, "2000-01-01T00:00:00Z").UnixNano()),
fmt.Sprintf(`aa,host=serverA,region=uswest val=23.2 %d`, mustParseTime(time.RFC3339Nano, "2000-01-01T00:00:00Z").UnixNano()),
fmt.Sprintf(`b,host=serverA,region=uswest val=23.2 %d`, mustParseTime(time.RFC3339Nano, "2000-01-01T00:00:00Z").UnixNano()),
fmt.Sprintf(`c,host=serverA,region=uswest val=30.2 %d`, mustParseTime(time.RFC3339Nano, "2000-01-01T00:00:00Z").UnixNano()),
}, "\n")},
},
queries: []*Query{
&Query{
name: "Show series is present",
command: `SHOW SERIES`,
exp: `{"results":[{"statement_id":0,"series":[{"columns":["key"],"values":[["a,host=serverA,region=uswest"],["aa,host=serverA,region=uswest"],["b,host=serverA,region=uswest"],["c,host=serverA,region=uswest"]]}]}]}`,
params: url.Values{"db": []string{"db0"}},
},
&Query{
name: "Drop series after data write",
command: `DROP SERIES FROM /a.*/`,
exp: `{"results":[{"statement_id":0}]}`,
params: url.Values{"db": []string{"db0"}},
once: true,
},
&Query{
name: "Show series is gone",
command: `SHOW SERIES`,
exp: `{"results":[{"statement_id":0,"series":[{"columns":["key"],"values":[["b,host=serverA,region=uswest"],["c,host=serverA,region=uswest"]]}]}]}`,
params: url.Values{"db": []string{"db0"}},
},
&Query{
name: "Drop series from regex that matches no measurements",
command: `DROP SERIES FROM /a.*/`,
exp: `{"results":[{"statement_id":0}]}`,
params: url.Values{"db": []string{"db0"}},
once: true,
},
&Query{
name: "make sure DROP SERIES doesn't delete anything when regex doesn't match",
command: `SHOW SERIES`,
exp: `{"results":[{"statement_id":0,"series":[{"columns":["key"],"values":[["b,host=serverA,region=uswest"],["c,host=serverA,region=uswest"]]}]}]}`,
params: url.Values{"db": []string{"db0"}},
},
&Query{
name: "Drop series with WHERE field should error",
command: `DROP SERIES FROM c WHERE val > 50.0`,
exp: `{"results":[{"statement_id":0,"error":"fields not supported in WHERE clause during deletion"}]}`,
params: url.Values{"db": []string{"db0"}},
},
&Query{
name: "make sure DROP SERIES with field in WHERE didn't delete data",
command: `SHOW SERIES`,
exp: `{"results":[{"statement_id":0,"series":[{"columns":["key"],"values":[["b,host=serverA,region=uswest"],["c,host=serverA,region=uswest"]]}]}]}`,
params: url.Values{"db": []string{"db0"}},
},
&Query{
name: "Drop series with WHERE time should error",
command: `DROP SERIES FROM c WHERE time > now() - 1d`,
exp: `{"results":[{"statement_id":0,"error":"DROP SERIES doesn't support time in WHERE clause"}]}`,
params: url.Values{"db": []string{"db0"}},
},
},
}
tests["retention_policy_commands"] = Test{
db: "db0",
queries: []*Query{
&Query{
name: "create retention policy with invalid name should return an error",
command: `CREATE RETENTION POLICY "." ON db0 DURATION 1d REPLICATION 1`,
exp: `{"results":[{"statement_id":0,"error":"invalid name"}]}`,
once: true,
},
&Query{
name: "create retention policy should succeed",
command: `CREATE RETENTION POLICY rp0 ON db0 DURATION 1h REPLICATION 1`,
exp: `{"results":[{"statement_id":0}]}`,
once: true,
},
&Query{
name: "show retention policy should succeed",
command: `SHOW RETENTION POLICIES ON db0`,
exp: `{"results":[{"statement_id":0,"series":[{"columns":["name","duration","shardGroupDuration","replicaN","default"],"values":[["rp0","1h0m0s","1h0m0s",1,false]]}]}]}`,
},
&Query{
name: "alter retention policy should succeed",
command: `ALTER RETENTION POLICY rp0 ON db0 DURATION 2h REPLICATION 3 DEFAULT`,
exp: `{"results":[{"statement_id":0}]}`,
once: true,
},
&Query{
name: "show retention policy should have new altered information",
command: `SHOW RETENTION POLICIES ON db0`,
exp: `{"results":[{"statement_id":0,"series":[{"columns":["name","duration","shardGroupDuration","replicaN","default"],"values":[["rp0","2h0m0s","1h0m0s",3,true]]}]}]}`,
},
&Query{
name: "show retention policy should still show policy",
command: `SHOW RETENTION POLICIES ON db0`,
exp: `{"results":[{"statement_id":0,"series":[{"columns":["name","duration","shardGroupDuration","replicaN","default"],"values":[["rp0","2h0m0s","1h0m0s",3,true]]}]}]}`,
},
&Query{
name: "create a second non-default retention policy",
command: `CREATE RETENTION POLICY rp2 ON db0 DURATION 1h REPLICATION 1`,
exp: `{"results":[{"statement_id":0}]}`,
once: true,
},
&Query{
name: "show retention policy should show both",
command: `SHOW RETENTION POLICIES ON db0`,
exp: `{"results":[{"statement_id":0,"series":[{"columns":["name","duration","shardGroupDuration","replicaN","default"],"values":[["rp0","2h0m0s","1h0m0s",3,true],["rp2","1h0m0s","1h0m0s",1,false]]}]}]}`,
},
&Query{
name: "dropping non-default retention policy succeed",
command: `DROP RETENTION POLICY rp2 ON db0`,
exp: `{"results":[{"statement_id":0}]}`,
once: true,
},
&Query{
name: "create a third non-default retention policy",
command: `CREATE RETENTION POLICY rp3 ON db0 DURATION 1h REPLICATION 1 SHARD DURATION 30m`,
exp: `{"results":[{"statement_id":0}]}`,
once: true,
},
&Query{
name: "create retention policy with default on",
command: `CREATE RETENTION POLICY rp3 ON db0 DURATION 1h REPLICATION 1 SHARD DURATION 30m DEFAULT`,
exp: `{"results":[{"statement_id":0,"error":"retention policy conflicts with an existing policy"}]}`,
once: true,
},
&Query{
name: "show retention policy should show both with custom shard",
command: `SHOW RETENTION POLICIES ON db0`,
exp: `{"results":[{"statement_id":0,"series":[{"columns":["name","duration","shardGroupDuration","replicaN","default"],"values":[["rp0","2h0m0s","1h0m0s",3,true],["rp3","1h0m0s","1h0m0s",1,false]]}]}]}`,
},
&Query{
name: "dropping non-default custom shard retention policy succeed",
command: `DROP RETENTION POLICY rp3 ON db0`,
exp: `{"results":[{"statement_id":0}]}`,
once: true,
},
&Query{
name: "show retention policy should show just default",
command: `SHOW RETENTION POLICIES ON db0`,
exp: `{"results":[{"statement_id":0,"series":[{"columns":["name","duration","shardGroupDuration","replicaN","default"],"values":[["rp0","2h0m0s","1h0m0s",3,true]]}]}]}`,
},
&Query{
name: "Ensure retention policy with unacceptable retention cannot be created",
command: `CREATE RETENTION POLICY rp4 ON db0 DURATION 1s REPLICATION 1`,
exp: `{"results":[{"statement_id":0,"error":"retention policy duration must be at least 1h0m0s"}]}`,
once: true,
},
&Query{
name: "Check error when deleting retention policy on non-existent database",
command: `DROP RETENTION POLICY rp1 ON mydatabase`,
exp: `{"results":[{"statement_id":0}]}`,
},
&Query{
name: "Ensure retention policy for non existing db is not created",
command: `CREATE RETENTION POLICY rp0 ON nodb DURATION 1h REPLICATION 1`,
exp: `{"results":[{"statement_id":0,"error":"database not found: nodb"}]}`,
once: true,
},
&Query{
name: "drop rp0",
command: `DROP RETENTION POLICY rp0 ON db0`,
exp: `{"results":[{"statement_id":0}]}`,
},
// INF Shard Group Duration will normalize to the Retention Policy Duration Default
&Query{
name: "create retention policy with inf shard group duration",
command: `CREATE RETENTION POLICY rpinf ON db0 DURATION INF REPLICATION 1 SHARD DURATION 0s`,
exp: `{"results":[{"statement_id":0}]}`,
once: true,
},
// 0s Shard Group Duration will normalize to the Replication Policy Duration
&Query{
name: "create retention policy with 0s shard group duration",
command: `CREATE RETENTION POLICY rpzero ON db0 DURATION 1h REPLICATION 1 SHARD DURATION 0s`,
exp: `{"results":[{"statement_id":0}]}`,
once: true,
},
// 1s Shard Group Duration will normalize to the MinDefaultRetentionPolicyDuration
&Query{
name: "create retention policy with 1s shard group duration",
command: `CREATE RETENTION POLICY rponesecond ON db0 DURATION 2h REPLICATION 1 SHARD DURATION 1s`,
exp: `{"results":[{"statement_id":0}]}`,
once: true,
},
&Query{
name: "show retention policy: validate normalized shard group durations are working",
command: `SHOW RETENTION POLICIES ON db0`,
exp: `{"results":[{"statement_id":0,"series":[{"columns":["name","duration","shardGroupDuration","replicaN","default"],"values":[["rpinf","0s","168h0m0s",1,false],["rpzero","1h0m0s","1h0m0s",1,false],["rponesecond","2h0m0s","1h0m0s",1,false]]}]}]}`,
},
},
}
tests["retention_policy_auto_create"] = Test{
queries: []*Query{
&Query{
name: "create database should succeed",
command: `CREATE DATABASE db0`,
exp: `{"results":[{"statement_id":0}]}`,
once: true,
},
&Query{
name: "show retention policies should return auto-created policy",
command: `SHOW RETENTION POLICIES ON db0`,
exp: `{"results":[{"statement_id":0,"series":[{"columns":["name","duration","shardGroupDuration","replicaN","default"],"values":[["autogen","0s","168h0m0s",1,true]]}]}]}`,
},
},
}
}
func (tests Tests) load(t *testing.T, key string) Test {
test, ok := tests[key]
if !ok {
t.Fatalf("no test %q", key)
}
return test.duplicate()
}<|fim▁end|>
|
exp: `{"results":[{"statement_id":0}]}`,
|
<|file_name|>test_run.py<|end_file_name|><|fim▁begin|>from datetime import date, datetime
from textwrap import dedent
from unittest.mock import patch
from freezegun import freeze_time
from mitoc_const import affiliations
import ws.utils.dates as date_utils
from ws import enums, models, settings
from ws.lottery import run
from ws.tests import TestCase, factories
class SingleTripLotteryTests(TestCase):
def test_fcfs_not_run(self):
"""If a trip's algorithm is not 'lottery', nothing happens."""
trip = factories.TripFactory.create(
algorithm='fcfs', program=enums.Program.HIKING.value
)
runner = run.SingleTripLotteryRunner(trip)
with patch.object(models.Trip, 'save', wraps=models.Trip.save) as save_trip:
runner() # Early exits because it's not a lottery trip
save_trip.assert_not_called() # Trip was not modified
trip.refresh_from_db()
self.assertIsNone(trip.lottery_log) # No lottery was run!
self.assertEqual(trip.algorithm, 'fcfs')
def test_run_with_no_signups(self):
"""We still run the lottery when nobody signed up."""
trip = factories.TripFactory.create(algorithm='lottery')
runner = run.SingleTripLotteryRunner(trip)
runner()
trip.refresh_from_db()
expected = '\n'.join(
[
'Randomly ordering (preference to MIT affiliates)...',
'No participants signed up.',
'Converting trip to first-come, first-serve.',
'',
]
)
self.assertEqual(trip.algorithm, 'fcfs')
self.assertEqual(trip.lottery_log, expected)
# The wall time when invoking the lottery determines the random seed
# See lottery.rank for more details
@freeze_time("2019-10-22 10:30:00 EST")
def test_run(self):
"""Test a full run of a single trip's lottery, demonstrating deterministic seeding.
See lottery.rank for more detail on how the random seeding works.
"""
trip = factories.TripFactory.create(
pk=838249, # Will factor into seed + ordering
name="Single Trip Example",
algorithm='lottery',
maximum_participants=2,
program=enums.Program.CLIMBING.value,
)
alice = factories.SignUpFactory.create(
participant__pk=1021, # Will factor into seed + ordering
participant__name="Alice Aaronson",
participant__affiliation=affiliations.MIT_UNDERGRAD.CODE,
trip=trip,
on_trip=False,
)
bob = factories.SignUpFactory.create(
participant__pk=1022, # Will factor into seed + ordering
participant__name="Bob Bobberson",
participant__affiliation=affiliations.MIT_AFFILIATE.CODE,
trip=trip,
on_trip=False,
)
charles = factories.SignUpFactory.create(
participant__pk=1023, # Will factor into seed + ordering
participant__name="Charles Charleson",
participant__affiliation=affiliations.NON_AFFILIATE.CODE,
trip=trip,
on_trip=False,
)
runner = run.SingleTripLotteryRunner(trip)
runner() # Early exits because it's not a lottery trip
# We can expect the exact same ordering & "random" seed because:
# - we mock wall time to be consistent with every test run
# - we know participant PKs and the trip PK.
# - we know the test environment's PRNG_SEED_SECRET
self.assertEqual(settings.PRNG_SEED_SECRET, 'some-key-unknown-to-participants')
expected = dedent(
"""\
Randomly ordering (preference to MIT affiliates)...
Participants will be handled in the following order:
1. Alice Aaronson (MIT undergrad, 0.04993458051632388)
2. Charles Charleson (Non-affiliate, 0.1895304657881689)
3. Bob Bobberson (MIT affiliate (staff or faculty), 0.5391638258147878)
--------------------------------------------------
Single Trip Example has 2 slots, adding Alice Aaronson
Single Trip Example has 1 slot, adding Charles Charleson
Adding Bob Bobberson to the waitlist
"""
)
# The lottery log explains what happened & is written directly to the trip.
trip.refresh_from_db()
self.assertEqual(trip.algorithm, 'fcfs')
self.assertEqual(trip.lottery_log, expected)
# Alice & Charles were placed on the trip.
alice.refresh_from_db()
self.assertTrue(alice.on_trip)
charles.refresh_from_db()
self.assertTrue(charles.on_trip)
# Bob was waitlisted.
bob.refresh_from_db()
self.assertFalse(bob.on_trip)
self.assertTrue(bob.waitlistsignup)
@freeze_time("2020-01-15 09:00:00 EST")
class WinterSchoolLotteryTests(TestCase):
@staticmethod
def _ws_trip(**kwargs):
return factories.TripFactory.create(
algorithm="lottery", program=enums.Program.WINTER_SCHOOL.value, **kwargs
)
def setUp(self):
self.ice = self._ws_trip(
name="Frankenstein",
trip_type=enums.TripType.ICE_CLIMBING.value,
maximum_participants=3,
trip_date=date(2020, 1, 18), # Sat
)
self.hike = self._ws_trip(
name="Welch-Dickey",
trip_type=enums.TripType.HIKING.value,
maximum_participants=2,
trip_date=date(2020, 1, 19), # Sun
)
self.hiker = factories.ParticipantFactory.create(name="Prefers Hiking")
self.climber = factories.ParticipantFactory.create(name="Seeks Ice")
# Won't be included in the lottery run.
self.non_ws = factories.TripFactory.create(
name="Local trail work",
algorithm="lottery",<|fim▁hole|>
def _assert_fcfs_at_noon(self, trip):
self.assertEqual(trip.algorithm, 'fcfs')
self.assertEqual(
trip.signups_open_at, date_utils.localize(datetime(2020, 1, 15, 12))
)
def test_no_signups(self):
"""Trips are made FCFS, even if nobody signed up."""
runner = run.WinterSchoolLotteryRunner()
runner()
for trip in [self.hike, self.ice]:
trip.refresh_from_db()
self._assert_fcfs_at_noon(trip)
def test_non_ws_trips_ignored(self):
"""Participants with signups for non-WS trips are handled.
Namely,
- a participant's signup for a non-WS trip does not affect the lottery
- The cleanup phase of the lottery does not modify any non-WS trips
"""
outside_iap_trip = factories.TripFactory.create(
name='non-WS trip',
algorithm='lottery',
program=enums.Program.WINTER_NON_IAP.value,
trip_date=date(2020, 1, 18),
)
office_day = factories.TripFactory.create(
name='Office Day',
algorithm='fcfs',
program=enums.Program.NONE.value,
trip_date=date(2020, 1, 19),
)
# Sign up the hiker for all three trips!
for trip in [self.hike, outside_iap_trip, office_day]:
factories.SignUpFactory.create(participant=self.hiker, trip=trip)
runner = run.WinterSchoolLotteryRunner()
runner()
# The participant was placed on their desired WS trip!
ws_signup = models.SignUp.objects.get(trip=self.hike, participant=self.hiker)
self.assertTrue(ws_signup.on_trip)
# Neither of the other two trips had their algorithm or start time adjusted
outside_iap_trip.refresh_from_db()
self.assertEqual(outside_iap_trip.algorithm, 'lottery')
office_day.refresh_from_db()
self.assertEqual(office_day.signups_open_at, date_utils.local_now())<|fim▁end|>
|
program=enums.Program.SERVICE.value,
trip_type=enums.TripType.HIKING.value,
trip_date=date(2020, 1, 18), # Sat
)
|
<|file_name|>spec_tests.rs<|end_file_name|><|fim▁begin|>use super::StatusRegister;
use std::cell::Cell;
#[test]
fn sprite_overflow() {
let reg = new_status_register(0b00000000);
assert_eq!(false, reg.sprite_overflow());
let reg = new_status_register(0b00100000);
assert_eq!(true, reg.sprite_overflow());
}
#[test]
fn clear_sprite_overflow() {
let reg = new_status_register(0b11111111);
reg.clear_sprite_overflow();
assert_eq!(0b11011111, reg.reg.get());
}
#[test]
fn sprite_zero_hit() {
let reg = new_status_register(0b00000000);
assert_eq!(false, reg.sprite_zero_hit());
let reg = new_status_register(0b01000000);
assert_eq!(true, reg.sprite_zero_hit());
}
#[test]
fn set_sprite_zero_hit() {
let reg = new_status_register(0b00000000);
reg.set_sprite_zero_hit();
assert_eq!(0b01000000, reg.reg.get());
}
#[test]
fn clear_sprite_zero_hit() {
let reg = new_status_register(0b11111111);
reg.clear_sprite_zero_hit();
assert_eq!(0b10111111, reg.reg.get());
}
#[test]
fn in_vblank() {
let reg = new_status_register(0b00000000);
assert_eq!(false, reg.in_vblank());
let reg = new_status_register(0b10000000);
assert_eq!(true, reg.in_vblank());
}
#[test]<|fim▁hole|>}
#[test]
fn clear_in_vblank() {
let reg = new_status_register(0b11111111);
reg.clear_in_vblank();
assert_eq!(0b01111111, reg.reg.get());
}
fn new_status_register(val: u8) -> StatusRegister {
StatusRegister {
reg: Cell::new(val),
}
}<|fim▁end|>
|
fn set_in_vblank() {
let reg = new_status_register(0b00000000);
reg.set_in_vblank();
assert_eq!(0b10000000, reg.reg.get());
|
<|file_name|>rotations.py<|end_file_name|><|fim▁begin|># Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
Implements rotations, including spherical rotations as defined in WCS Paper II
[1]_
`RotateNative2Celestial` and `RotateCelestial2Native` follow the convention in
WCS Paper II to rotate to/from a native sphere and the celestial sphere.
The implementation uses `EulerAngleRotation`. The model parameters are
three angles: the longitude (``lon``) and latitude (``lat``) of the fiducial point
in the celestial system (``CRVAL`` keywords in FITS), and the longitude of the celestial
pole in the native system (``lon_pole``). The Euler angles are ``lon+90``, ``90-lat``
and ``-(lon_pole-90)``.
References
----------
.. [1] Calabretta, M.R., Greisen, E.W., 2002, A&A, 395, 1077 (Paper II)
"""
import math
import numpy as np
from .core import Model
from .parameters import Parameter
from astropy.coordinates.matrix_utilities import rotation_matrix, matrix_product
from astropy import units as u
from .utils import _to_radian, _to_orig_unit
__all__ = ['RotateCelestial2Native', 'RotateNative2Celestial', 'Rotation2D',
'EulerAngleRotation', 'RotationSequence3D', 'SphericalRotationSequence']
def _create_matrix(angles, axes_order):
matrices = []
for angle, axis in zip(angles, axes_order):
if isinstance(angle, u.Quantity):
angle = angle.value
angle = angle.item()
matrices.append(rotation_matrix(angle, axis, unit=u.rad))
result = matrix_product(*matrices[::-1])
return result
def spherical2cartesian(alpha, delta):
alpha = np.deg2rad(alpha)
delta = np.deg2rad(delta)
x = np.cos(alpha) * np.cos(delta)
y = np.cos(delta) * np.sin(alpha)
z = np.sin(delta)
return np.array([x, y, z])
def cartesian2spherical(x, y, z):
h = np.hypot(x, y)
alpha = np.rad2deg(np.arctan2(y, x))
delta = np.rad2deg(np.arctan2(z, h))
return alpha, delta
class RotationSequence3D(Model):
"""
Perform a series of rotations about different axis in 3D space.
Positive angles represent a counter-clockwise rotation.
Parameters
----------
angles : array-like
Angles of rotation in deg in the order of axes_order.
axes_order : str
A sequence of 'x', 'y', 'z' corresponding to axis of rotation.
Examples
--------
>>> model = RotationSequence3D([1.1, 2.1, 3.1, 4.1], axes_order='xyzx')
"""
standard_broadcasting = False
_separable = False
n_inputs = 3
n_outputs = 3
angles = Parameter(default=[], getter=_to_orig_unit, setter=_to_radian)
def __init__(self, angles, axes_order, name=None):
self.axes = ['x', 'y', 'z']
unrecognized = set(axes_order).difference(self.axes)
if unrecognized:
raise ValueError("Unrecognized axis label {0}; "
"should be one of {1} ".format(unrecognized,
self.axes))
self.axes_order = axes_order
if len(angles) != len(axes_order):
raise ValueError("The number of angles {0} should match the number \
of axes {1}.".format(len(angles),
len(axes_order)))
super().__init__(angles, name=name)
self._inputs = ('x', 'y', 'z')
self._outputs = ('x', 'y', 'z')
@property
def inverse(self):
"""Inverse rotation."""
angles = self.angles.value[::-1] * -1
return self.__class__(angles, axes_order=self.axes_order[::-1])
def evaluate(self, x, y, z, angles):
"""
Apply the rotation to a set of 3D Cartesian coordinates.
"""
if x.shape != y.shape != z.shape:
raise ValueError("Expected input arrays to have the same shape")
# Note: If the original shape was () (an array scalar) convert to a
# 1-element 1-D array on output for consistency with most other models
orig_shape = x.shape or (1,)
inarr = np.array([x.flatten(), y.flatten(), z.flatten()])
result = np.dot(_create_matrix(angles[0], self.axes_order), inarr)
x, y, z = result[0], result[1], result[2]
x.shape = y.shape = z.shape = orig_shape
return x, y, z
class SphericalRotationSequence(RotationSequence3D):
"""
Perform a sequence of rotations about arbitrary number of axes
in spherical coordinates.
Parameters
----------
angles : list
A sequence of angles (in deg).
axes_order : str
A sequence of characters ('x', 'y', or 'z') corresponding to the
axis of rotation and matching the order in ``angles``.
"""
def __init__(self, angles, axes_order, name=None, **kwargs):
self._n_inputs = 2
self._n_outputs = 2
super().__init__(angles, axes_order=axes_order, name=name, **kwargs)
self._inputs = ("lon", "lat")
self._outputs = ("lon", "lat")
@property
def n_inputs(self):
return self._n_inputs
@property
def n_outputs(self):
return self._n_outputs
def evaluate(self, lon, lat, angles):
x, y, z = spherical2cartesian(lon, lat)
x1, y1, z1 = super().evaluate(x, y, z, angles)
lon, lat = cartesian2spherical(x1, y1, z1)
return lon, lat
class _EulerRotation:
"""
Base class which does the actual computation.
"""
_separable = False
def evaluate(self, alpha, delta, phi, theta, psi, axes_order):
shape = None
if isinstance(alpha, np.ndarray) and alpha.ndim == 2:
alpha = alpha.flatten()
delta = delta.flatten()
shape = alpha.shape
inp = spherical2cartesian(alpha, delta)
matrix = _create_matrix([phi, theta, psi], axes_order)
result = np.dot(matrix, inp)
a, b = cartesian2spherical(*result)
if shape is not None:
a.shape = shape
b.shape = shape
return a, b
_input_units_strict = True
_input_units_allow_dimensionless = True
@property
def input_units(self):
""" Input units. """
return {'alpha': u.deg, 'delta': u.deg}
@property
def return_units(self):
""" Output units. """
return {'alpha': u.deg, 'delta': u.deg}
class EulerAngleRotation(_EulerRotation, Model):
"""
Implements Euler angle intrinsic rotations.
Rotates one coordinate system into another (fixed) coordinate system.
All coordinate systems are right-handed. The sign of the angles is
determined by the right-hand rule..
Parameters
----------
phi, theta, psi : float or `~astropy.units.Quantity`
"proper" Euler angles in deg.
If floats, they should be in deg.
axes_order : str
A 3 character string, a combination of 'x', 'y' and 'z',
where each character denotes an axis in 3D space.
"""
n_inputs = 2
n_outputs = 2
phi = Parameter(default=0, getter=_to_orig_unit, setter=_to_radian)
theta = Parameter(default=0, getter=_to_orig_unit, setter=_to_radian)
psi = Parameter(default=0, getter=_to_orig_unit, setter=_to_radian)
def __init__(self, phi, theta, psi, axes_order, **kwargs):
self.axes = ['x', 'y', 'z']
if len(axes_order) != 3:
raise TypeError(
"Expected axes_order to be a character sequence of length 3,"
"got {}".format(axes_order))
unrecognized = set(axes_order).difference(self.axes)
if unrecognized:
raise ValueError("Unrecognized axis label {}; "
"should be one of {} ".format(unrecognized, self.axes))
self.axes_order = axes_order
qs = [isinstance(par, u.Quantity) for par in [phi, theta, psi]]
if any(qs) and not all(qs):
raise TypeError("All parameters should be of the same type - float or Quantity.")
super().__init__(phi=phi, theta=theta, psi=psi, **kwargs)
self._inputs = ('alpha', 'delta')
self._outputs = ('alpha', 'delta')
def inverse(self):
return self.__class__(phi=-self.psi,
theta=-self.theta,
psi=-self.phi,
axes_order=self.axes_order[::-1])
def evaluate(self, alpha, delta, phi, theta, psi):
a, b = super().evaluate(alpha, delta, phi, theta, psi, self.axes_order)
return a, b
class _SkyRotation(_EulerRotation, Model):
"""
Base class for RotateNative2Celestial and RotateCelestial2Native.
"""
lon = Parameter(default=0, getter=_to_orig_unit, setter=_to_radian)
lat = Parameter(default=0, getter=_to_orig_unit, setter=_to_radian)
lon_pole = Parameter(default=0, getter=_to_orig_unit, setter=_to_radian)
def __init__(self, lon, lat, lon_pole, **kwargs):
qs = [isinstance(par, u.Quantity) for par in [lon, lat, lon_pole]]
if any(qs) and not all(qs):
raise TypeError("All parameters should be of the same type - float or Quantity.")
super().__init__(lon, lat, lon_pole, **kwargs)
self.axes_order = 'zxz'
def _evaluate(self, phi, theta, lon, lat, lon_pole):
alpha, delta = super().evaluate(phi, theta, lon, lat, lon_pole,
self.axes_order)
mask = alpha < 0
if isinstance(mask, np.ndarray):
alpha[mask] += 360
else:
alpha += 360
return alpha, delta
class RotateNative2Celestial(_SkyRotation):
"""
Transform from Native to Celestial Spherical Coordinates.
Parameters
----------
lon : float or or `~astropy.units.Quantity`
Celestial longitude of the fiducial point.
lat : float or or `~astropy.units.Quantity`
Celestial latitude of the fiducial point.
lon_pole : float or or `~astropy.units.Quantity`
Longitude of the celestial pole in the native system.
Notes
-----
If ``lon``, ``lat`` and ``lon_pole`` are numerical values they
should be in units of deg. Inputs are angles on the native sphere.
Outputs are angles on the celestial sphere.
"""
n_inputs = 2
n_outputs = 2
@property
def input_units(self):
""" Input units. """
return {'phi_N': u.deg, 'theta_N': u.deg}
@property
def return_units(self):
""" Output units. """
return {'alpha_C': u.deg, 'delta_C': u.deg}
def __init__(self, lon, lat, lon_pole, **kwargs):
super().__init__(lon, lat, lon_pole, **kwargs)
self.inputs = ('phi_N', 'theta_N')
self.outputs = ('alpha_C', 'delta_C')
def evaluate(self, phi_N, theta_N, lon, lat, lon_pole):
"""
Parameters
----------
phi_N, theta_N : float (deg) or `~astropy.units.Quantity`
Angles in the Native coordinate system.<|fim▁hole|> Parameter values when the model was initialized.
Returns
-------
alpha_C, delta_C : float (deg) or `~astropy.units.Quantity`
Angles on the Celestial sphere.
"""
# The values are in radians since they have already been through the setter.
if isinstance(lon, u.Quantity):
lon = lon.value
lat = lat.value
lon_pole = lon_pole.value
# Convert to Euler angles
phi = lon_pole - np.pi / 2
theta = - (np.pi / 2 - lat)
psi = -(np.pi / 2 + lon)
alpha_C, delta_C = super()._evaluate(phi_N, theta_N, phi, theta, psi)
return alpha_C, delta_C
@property
def inverse(self):
# convert to angles on the celestial sphere
return RotateCelestial2Native(self.lon, self.lat, self.lon_pole)
class RotateCelestial2Native(_SkyRotation):
"""
Transform from Celestial to Native Spherical Coordinates.
Parameters
----------
lon : float or or `~astropy.units.Quantity`
Celestial longitude of the fiducial point.
lat : float or or `~astropy.units.Quantity`
Celestial latitude of the fiducial point.
lon_pole : float or or `~astropy.units.Quantity`
Longitude of the celestial pole in the native system.
Notes
-----
If ``lon``, ``lat`` and ``lon_pole`` are numerical values they should be
in units of deg. Inputs are angles on the celestial sphere.
Outputs are angles on the native sphere.
"""
n_inputs = 2
n_outputs = 2
@property
def input_units(self):
""" Input units. """
return {'alpha_C': u.deg, 'delta_C': u.deg}
@property
def return_units(self):
""" Output units. """
return {'phi_N': u.deg, 'theta_N': u.deg}
def __init__(self, lon, lat, lon_pole, **kwargs):
super().__init__(lon, lat, lon_pole, **kwargs)
# Inputs are angles on the celestial sphere
self.inputs = ('alpha_C', 'delta_C')
# Outputs are angles on the native sphere
self.outputs = ('phi_N', 'theta_N')
def evaluate(self, alpha_C, delta_C, lon, lat, lon_pole):
"""
Parameters
----------
alpha_C, delta_C : float (deg) or `~astropy.units.Quantity`
Angles in the Celestial coordinate frame.
lon, lat, lon_pole : float (deg) or `~astropy.units.Quantity`
Parameter values when the model was initialized.
Returns
-------
phi_N, theta_N : float (deg) or `~astropy.units.Quantity`
Angles on the Native sphere.
"""
if isinstance(lon, u.Quantity):
lon = lon.value
lat = lat.value
lon_pole = lon_pole.value
# Convert to Euler angles
phi = (np.pi / 2 + lon)
theta = (np.pi / 2 - lat)
psi = -(lon_pole - np.pi / 2)
phi_N, theta_N = super()._evaluate(alpha_C, delta_C, phi, theta, psi)
return phi_N, theta_N
@property
def inverse(self):
return RotateNative2Celestial(self.lon, self.lat, self.lon_pole)
class Rotation2D(Model):
"""
Perform a 2D rotation given an angle.
Positive angles represent a counter-clockwise rotation and vice-versa.
Parameters
----------
angle : float or `~astropy.units.Quantity`
Angle of rotation (if float it should be in deg).
"""
n_inputs = 2
n_outputs = 2
_separable = False
angle = Parameter(default=0.0, getter=_to_orig_unit, setter=_to_radian)
def __init__(self, angle=angle, **kwargs):
super().__init__(angle=angle, **kwargs)
self._inputs = ("x", "y")
self._outputs = ("x", "y")
@property
def inverse(self):
"""Inverse rotation."""
return self.__class__(angle=-self.angle)
@classmethod
def evaluate(cls, x, y, angle):
"""
Rotate (x, y) about ``angle``.
Parameters
----------
x, y : ndarray-like
Input quantities
angle : float (deg) or `~astropy.units.Quantity`
Angle of rotations.
"""
if x.shape != y.shape:
raise ValueError("Expected input arrays to have the same shape")
# If one argument has units, enforce they both have units and they are compatible.
x_unit = getattr(x, 'unit', None)
y_unit = getattr(y, 'unit', None)
has_units = x_unit is not None and y_unit is not None
if x_unit != y_unit:
if has_units and y_unit.is_equivalent(x_unit):
y = y.to(x_unit)
y_unit = x_unit
else:
raise u.UnitsError("x and y must have compatible units")
# Note: If the original shape was () (an array scalar) convert to a
# 1-element 1-D array on output for consistency with most other models
orig_shape = x.shape or (1,)
inarr = np.array([x.flatten(), y.flatten()])
if isinstance(angle, u.Quantity):
angle = angle.to_value(u.rad)
result = np.dot(cls._compute_matrix(angle), inarr)
x, y = result[0], result[1]
x.shape = y.shape = orig_shape
if has_units:
return u.Quantity(x, unit=x_unit), u.Quantity(y, unit=y_unit)
else:
return x, y
@staticmethod
def _compute_matrix(angle):
return np.array([[math.cos(angle), -math.sin(angle)],
[math.sin(angle), math.cos(angle)]],
dtype=np.float64)<|fim▁end|>
|
lon, lat, lon_pole : float (in deg) or `~astropy.units.Quantity`
|
<|file_name|>CsvGenerator.java<|end_file_name|><|fim▁begin|>package gr.softaware.lib_1_3.data.convert.csv;
<|fim▁hole|>/**
*
* @author siggouroglou
* @param <T> A class that implements the CsvGenerationModel interface.
*/
final public class CsvGenerator<T extends CsvGenerationModel> {
private final List<CsvGenerationModel> modelList;
public CsvGenerator(List<T> modelList) {
this.modelList = (List<CsvGenerationModel>)modelList;
}
public StringBuilder getContent() {
return getContent("\t", "\\t");
}
public StringBuilder getContent(String separator, String separatorAsString) {
StringBuilder builder = new StringBuilder();
// First line contains the separator.
builder.append("sep=").append(separatorAsString).append("\n");
// Get the header.
if(!modelList.isEmpty()) {
builder.append(modelList.get(0).toCsvFormattedHeader(separator)).append("\n");
}
// Get the data.
modelList.stream().forEach((model) -> {
builder.append(model.toCsvFormattedRow(separator)).append("\n");
});
return builder;
}
}<|fim▁end|>
|
import java.util.List;
|
<|file_name|>gulp.config.js<|end_file_name|><|fim▁begin|>var sourceFolder = 'src',
destFolder = 'public',
configFolder = 'config';
module.exports = {
folders: {
source: sourceFolder,
dest: destFolder
},
files: {
scripts: [
`${sourceFolder}/js/utils.js`,
`${sourceFolder}/js/sprites/weapon.js`,
`${sourceFolder}/js/sprites/hook.js`,
`${sourceFolder}/js/sprites/enemy.js`,
`${sourceFolder}/js/sprites/**/*.js`,
`${sourceFolder}/js/map.js`,
`${sourceFolder}/js/ui/**/*.js`,
`${sourceFolder}/js/states/**/*.js`,
`${sourceFolder}/js/**/*.js`
],
templates: `${sourceFolder}/templates/**/*.html`,
libs: [
'node_modules/phaser/dist/phaser.js',
'node_modules/stats.js/build/stats.min.js'
],
styles: `${sourceFolder}/styles/**/*.css`,
images: `${sourceFolder}/images/**/*.*`,
sounds: `${sourceFolder}/sounds/**/*.*`,
json: `${sourceFolder}/json/**/*.*`,
fonts: `${sourceFolder}/fonts/**/*.*`,
cname: `${configFolder}/CNAME`
},
scripts: {
destFolder: `${destFolder}/js`,
outFile: 'index.js'
},
libs: {
destFolder: `${destFolder}/js`,
outFile: 'libs.js'
},
styles: {
destFolder: `${destFolder}/css`,
outFile: 'index.css'
},
images: {
destFolder: `${destFolder}/images`
},
sounds: {
destFolder: `${destFolder}/sounds`
},<|fim▁hole|> },
fonts: {
destFolder: `${destFolder}/fonts`
},
server: {
root: destFolder,
livereload: true
}
};<|fim▁end|>
|
json: {
destFolder: `${destFolder}/json`
|
<|file_name|>wrapper.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# Copyright © 2014 René Samselnig
#
# This file is part of Database Navigator.
#
# Database Navigator is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Database Navigator is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the<|fim▁hole|># along with Database Navigator. If not, see <http://www.gnu.org/licenses/>.
#
import sys
import os
import logging
import pdb
import urllib2
import json
import ijson
from dbmanagr.writer import Writer
from dbmanagr import logger as log
from dbmanagr.jsonable import from_json
COMMANDS = {
'dbdiff': 'differ',
'dbexec': 'executer',
'dbexport': 'exporter',
'dbgraph': 'grapher',
'dbnav': 'navigator'
}
class Wrapper(object):
def __init__(self, options=None):
self.options = options
def write(self):
try:
sys.stdout.write(Writer.write(self.run()))
except BaseException as e:
log.logger.exception(e)
return -1
return 0
def execute(self): # pragma: no cover
"""To be overridden by sub classes"""
pass
def run(self):
try:
if (
self.options is not None
and self.options.daemon): # pragma: no cover
log.logger.debug('Executing remotely')
return self.executer(*sys.argv)
log.logger.debug('Executing locally')
return self.execute()
except BaseException as e:
log.logger.exception(e)
if log.logger.getEffectiveLevel() <= logging.DEBUG:
# Start post mortem debugging only when debugging is enabled
if os.getenv('UNITTEST', 'False') == 'True':
raise
if self.options.trace: # pragma: no cover
pdb.post_mortem(sys.exc_info()[2])
else:
# Show the error message if log level is INFO or higher
log.log_error(e) # pragma: no cover
def executer(self, *args): # pragma: no cover
"""Execute remotely"""
options = self.options
try:
# from dbmanagr import daemon
# if not daemon.is_running(options):
# daemon.start_server(options)
url = 'http://{host}:{port}/{path}'.format(
host=options.host,
port=options.port,
path=COMMANDS[options.prog])
request = json.dumps(args[1:])
log.logger.debug('Request to %s:\n%s', url, request)
response = urllib2.urlopen(url, request)
for i in ijson.items(response, 'item'):
yield from_json(i)
except urllib2.HTTPError as e:
raise from_json(json.load(e))
except urllib2.URLError as e:
log.logger.error('Daemon not available: %s', e)
except BaseException as e:
log.logger.exception(e)<|fim▁end|>
|
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
|
<|file_name|>pydevd_thread_wrappers.py<|end_file_name|><|fim▁begin|>from _pydev_imps._pydev_saved_modules import threading
def wrapper(fun):
def pydev_after_run_call():
pass
def inner(*args, **kwargs):
fun(*args, **kwargs)
pydev_after_run_call()
return inner
def wrap_attr(obj, attr):
t_save_start = getattr(obj, attr)
setattr(obj, attr, wrapper(t_save_start))
obj._pydev_run_patched = True
class ObjectWrapper(object):
def __init__(self, obj):
self.wrapped_object = obj
try:
import functools
functools.update_wrapper(self, obj)
except:
pass
def __getattr__(self, attr):
orig_attr = getattr(self.wrapped_object, attr) #.__getattribute__(attr)
if callable(orig_attr):
def patched_attr(*args, **kwargs):
self.call_begin(attr)
result = orig_attr(*args, **kwargs)
self.call_end(attr)
if result == self.wrapped_object:
return self
return result
<|fim▁hole|> return patched_attr
else:
return orig_attr
def call_begin(self, attr):
pass
def call_end(self, attr):
pass
def __enter__(self):
self.call_begin("__enter__")
self.wrapped_object.__enter__()
self.call_end("__enter__")
def __exit__(self, exc_type, exc_val, exc_tb):
self.call_begin("__exit__")
self.wrapped_object.__exit__(exc_type, exc_val, exc_tb)
def factory_wrapper(fun):
def inner(*args, **kwargs):
obj = fun(*args, **kwargs)
return ObjectWrapper(obj)
return inner
def wrap_threads():
# TODO: add wrappers for thread and _thread
# import _thread as mod
# print("Thread imported")
# mod.start_new_thread = wrapper(mod.start_new_thread)
import threading
threading.Lock = factory_wrapper(threading.Lock)
threading.RLock = factory_wrapper(threading.RLock)
# queue patching
try:
import queue # @UnresolvedImport
queue.Queue = factory_wrapper(queue.Queue)
except:
import Queue
Queue.Queue = factory_wrapper(Queue.Queue)<|fim▁end|>
| |
<|file_name|>kindeditor.js<|end_file_name|><|fim▁begin|>/*******************************************************************************
* KindEditor - WYSIWYG HTML Editor for Internet
* Copyright (C) 2006-2012 kindsoft.net
*
* @author Roddy <[email protected]>
* @website http://www.kindsoft.net/
* @licence http://www.kindsoft.net/license.php
* @version 4.1.4 (2012-11-11)
*
* 调试前必读:务必将DEBUG设成true,否则,死的很惨! added by zentao team.
*******************************************************************************/
(function (window, undefined) {
if (window.KindEditor) {
return;
}
if (!window.console) {
window.console = {};
}
if (!console.log) {
console.log = function () {};
}
var _VERSION = '4.1.4',
_ua = navigator.userAgent.toLowerCase(),
_IE = _ua.indexOf('msie') > -1 && _ua.indexOf('opera') == -1,
_GECKO = _ua.indexOf('gecko') > -1 && _ua.indexOf('khtml') == -1,
_WEBKIT = _ua.indexOf('applewebkit') > -1,
_OPERA = _ua.indexOf('opera') > -1,
_MOBILE = _ua.indexOf('mobile') > -1,
_IOS = /ipad|iphone|ipod/.test(_ua),
_QUIRKS = document.compatMode != 'CSS1Compat',
_matches = /(?:msie|firefox|webkit|opera)[\/:\s](\d+)/.exec(_ua),
_V = _matches ? _matches[1] : '0',
_TIME = new Date().getTime();
function _isArray(val) {
if (!val) {
return false;
}
return Object.prototype.toString.call(val) === '[object Array]';
}
function _isFunction(val) {
if (!val) {
return false;
}
return Object.prototype.toString.call(val) === '[object Function]';
}
function _inArray(val, arr) {
for (var i = 0, len = arr.length; i < len; i++) {
if (val === arr[i]) {
return i;
}
}
return -1;
}
function _each(obj, fn) {
if (_isArray(obj)) {
for (var i = 0, len = obj.length; i < len; i++) {
if (fn.call(obj[i], i, obj[i]) === false) {
break;
}
}
} else {
for (var key in obj) {
if (obj.hasOwnProperty(key)) {
if (fn.call(obj[key], key, obj[key]) === false) {
break;
}
}
}
}
}
function _trim(str) {
return str.replace(/(?:^[ \t\n\r]+)|(?:[ \t\n\r]+$)/g, '');
}
function _inString(val, str, delimiter) {
delimiter = delimiter === undefined ? ',' : delimiter;
return (delimiter + str + delimiter).indexOf(delimiter + val + delimiter) >= 0;
}
function _addUnit(val, unit) {
unit = unit || 'px';
return val && /^\d+$/.test(val) ? val + unit : val;
}
function _removeUnit(val) {
var match;
return val && (match = /(\d+)/.exec(val)) ? parseInt(match[1], 10) : 0;
}
function _escape(val) {
return val.replace(/&/g, '&').replace(/</g, '<').replace(/>/g, '>').replace(/"/g, '"');
}
function _unescape(val) {
return val.replace(/</g, '<').replace(/>/g, '>').replace(/"/g, '"').replace(/&/g, '&');
}
function _toCamel(str) {
var arr = str.split('-');
str = '';
_each(arr, function(key, val) {
str += (key > 0) ? val.charAt(0).toUpperCase() + val.substr(1) : val;
});
return str;
}
function _toHex(val) {
function hex(d) {
var s = parseInt(d, 10).toString(16).toUpperCase();
return s.length > 1 ? s : '0' + s;
}
return val.replace(/rgb\s*\(\s*(\d+)\s*,\s*(\d+)\s*,\s*(\d+)\s*\)/ig,
function($0, $1, $2, $3) {
return '#' + hex($1) + hex($2) + hex($3);
}
);
}
function _toMap(val, delimiter) {
delimiter = delimiter === undefined ? ',' : delimiter;
var map = {}, arr = _isArray(val) ? val : val.split(delimiter), match;
_each(arr, function(key, val) {
if ((match = /^(\d+)\.\.(\d+)$/.exec(val))) {
for (var i = parseInt(match[1], 10); i <= parseInt(match[2], 10); i++) {
map[i.toString()] = true;
}
} else {
map[val] = true;
}
});
return map;
}
function _toArray(obj, offset) {
return Array.prototype.slice.call(obj, offset || 0);
}
function _undef(val, defaultVal) {
return val === undefined ? defaultVal : val;
}
function _invalidUrl(url) {
return !url || /[<>"]/.test(url);
}
function _addParam(url, param) {
return url.indexOf('?') >= 0 ? url + '&' + param : url + '?' + param;
}
function _extend(child, parent, proto) {
if (!proto) {
proto = parent;
parent = null;
}
var childProto;
if (parent) {
var fn = function () {};
fn.prototype = parent.prototype;
childProto = new fn();
_each(proto, function(key, val) {
childProto[key] = val;
});
} else {
childProto = proto;
}
childProto.constructor = child;
child.prototype = childProto;
child.parent = parent ? parent.prototype : null;
}
function _json(text) {
var match;
if ((match = /\{[\s\S]*\}|\[[\s\S]*\]/.exec(text))) {
text = match[0];
}
var cx = /[\u0000\u00ad\u0600-\u0604\u070f\u17b4\u17b5\u200c-\u200f\u2028-\u202f\u2060-\u206f\ufeff\ufff0-\uffff]/g;
cx.lastIndex = 0;
if (cx.test(text)) {
text = text.replace(cx, function (a) {
return '\\u' + ('0000' + a.charCodeAt(0).toString(16)).slice(-4);
});
}
if (/^[\],:{}\s]*$/.
test(text.replace(/\\(?:["\\\/bfnrt]|u[0-9a-fA-F]{4})/g, '@').
replace(/"[^"\\\n\r]*"|true|false|null|-?\d+(?:\.\d*)?(?:[eE][+\-]?\d+)?/g, ']').
replace(/(?:^|:|,)(?:\s*\[)+/g, ''))) {
return eval('(' + text + ')');
}
throw 'JSON parse error';
}
var _round = Math.round;
var K = {
DEBUG : false,
VERSION : _VERSION,
IE : _IE,
GECKO : _GECKO,
WEBKIT : _WEBKIT,
OPERA : _OPERA,
V : _V,
TIME : _TIME,
each : _each,
isArray : _isArray,
isFunction : _isFunction,
inArray : _inArray,
inString : _inString,
trim : _trim,
addUnit : _addUnit,
removeUnit : _removeUnit,
escape : _escape,
unescape : _unescape,
toCamel : _toCamel,
toHex : _toHex,
toMap : _toMap,
toArray : _toArray,
undef : _undef,
invalidUrl : _invalidUrl,
addParam : _addParam,
extend : _extend,
json : _json
};
var _INLINE_TAG_MAP = _toMap('a,abbr,acronym,b,basefont,bdo,big,br,button,cite,code,del,dfn,em,font,i,img,input,ins,kbd,label,map,q,s,samp,select,small,span,strike,strong,sub,sup,textarea,tt,u,var'),
_BLOCK_TAG_MAP = _toMap('address,applet,blockquote,body,center,dd,dir,div,dl,dt,fieldset,form,frameset,h1,h2,h3,h4,h5,h6,head,hr,html,iframe,ins,isindex,li,map,menu,meta,noframes,noscript,object,ol,p,pre,script,style,table,tbody,td,tfoot,th,thead,title,tr,ul'),
_SINGLE_TAG_MAP = _toMap('area,base,basefont,br,col,frame,hr,img,input,isindex,link,meta,param,embed'),
_STYLE_TAG_MAP = _toMap('b,basefont,big,del,em,font,i,s,small,span,strike,strong,sub,sup,u'),
_CONTROL_TAG_MAP = _toMap('img,table,input,textarea,button'),
_PRE_TAG_MAP = _toMap('pre,style,script'),
_NOSPLIT_TAG_MAP = _toMap('html,head,body,td,tr,table,ol,ul,li'),
_AUTOCLOSE_TAG_MAP = _toMap('colgroup,dd,dt,li,options,p,td,tfoot,th,thead,tr'),
_FILL_ATTR_MAP = _toMap('checked,compact,declare,defer,disabled,ismap,multiple,nohref,noresize,noshade,nowrap,readonly,selected'),
_VALUE_TAG_MAP = _toMap('input,button,textarea,select');
function _getBasePath() {
var els = document.getElementsByTagName('script'), src;
for (var i = 0, len = els.length; i < len; i++) {
src = els[i].src || '';
if (/kindeditor[\w\-\.]*\.js/.test(src)) {
return src.substring(0, src.lastIndexOf('/') + 1);
}
}
return '';
}
K.basePath = _getBasePath();
K.options = {
designMode : true,
fullscreenMode : false,
filterMode : true,
wellFormatMode : true,
shadowMode : true,
loadStyleMode : true,
basePath : K.basePath,
themesPath : K.basePath + 'themes/',
langPath : K.basePath + 'lang/',
pluginsPath : K.basePath + 'plugins/',
themeType : 'default',
langType : 'zh_CN',
urlType : '',
newlineTag : 'p',
resizeType : 2,
syncType : 'form',
pasteType : 2,
dialogAlignType : 'page',
useContextmenu : true,
fullscreenShortcut : false,
bodyClass : 'ke-content',
indentChar : '',
cssPath : '',
cssData : '',
minWidth : 650,
minHeight : 100,
minChangeSize : 50,
items : [
'source', '|', 'undo', 'redo', '|', 'preview', 'print', 'template', 'code', 'cut', 'copy', 'paste',
'plainpaste', 'wordpaste', '|', 'justifyleft', 'justifycenter', 'justifyright',
'justifyfull', 'insertorderedlist', 'insertunorderedlist', 'indent', 'outdent', 'subscript',
'superscript', 'clearhtml', 'quickformat', 'selectall', '|', 'fullscreen', '/',
'formatblock', 'fontname', 'fontsize', '|', 'forecolor', 'hilitecolor', 'bold',
'italic', 'underline', 'strikethrough', 'lineheight', 'removeformat', '|', 'image', 'multiimage',
'flash', 'media', 'insertfile', 'table', 'hr', 'emoticons', 'baidumap', 'pagebreak',
'anchor', 'link', 'unlink', '|', 'about'
],
noDisableItems : ['source', 'fullscreen'],
colorTable : [
['#E53333', '#E56600', '#FF9900', '#64451D', '#DFC5A4', '#FFE500'],
['#009900', '#006600', '#99BB00', '#B8D100', '#60D978', '#00D5FF'],
['#337FE5', '#003399', '#4C33E5', '#9933E5', '#CC33E5', '#EE33EE'],
['#FFFFFF', '#CCCCCC', '#999999', '#666666', '#333333', '#000000']
],
fontSizeTable : ['9px', '10px', '12px', '14px', '16px', '18px', '24px', '32px'],
htmlTags : {
font : ['id', 'class', 'color', 'size', 'face', '.background-color'],
span : [
'id', 'class', '.color', '.background-color', '.font-size', '.font-family', '.background',
'.font-weight', '.font-style', '.text-decoration', '.vertical-align', '.line-height'
],
div : [
'id', 'class', 'align', '.border', '.margin', '.padding', '.text-align', '.color',
'.background-color', '.font-size', '.font-family', '.font-weight', '.background',
'.font-style', '.text-decoration', '.vertical-align', '.margin-left'
],
table: [
'id', 'class', 'border', 'cellspacing', 'cellpadding', 'width', 'height', 'align', 'bordercolor',
'.padding', '.margin', '.border', 'bgcolor', '.text-align', '.color', '.background-color',
'.font-size', '.font-family', '.font-weight', '.font-style', '.text-decoration', '.background',
'.width', '.height', '.border-collapse'
],
'td,th': [
'id', 'class', 'align', 'valign', 'width', 'height', 'colspan', 'rowspan', 'bgcolor',
'.text-align', '.color', '.background-color', '.font-size', '.font-family', '.font-weight',
'.font-style', '.text-decoration', '.vertical-align', '.background', '.border'
],
a : ['id', 'class', 'href', 'target', 'name'],
embed : ['id', 'class', 'src', 'width', 'height', 'type', 'loop', 'autostart', 'quality', '.width', '.height', 'align', 'allowscriptaccess'],
img : ['id', 'class', 'src', 'width', 'height', 'border', 'alt', 'title', 'align', '.width', '.height', '.border'],
'p,ol,ul,li,blockquote,h1,h2,h3,h4,h5,h6' : [
'id', 'class', 'align', '.text-align', '.color', '.background-color', '.font-size', '.font-family', '.background',
'.font-weight', '.font-style', '.text-decoration', '.vertical-align', '.text-indent', '.margin-left'
],
pre : ['id', 'class'],
hr : ['id', 'class', '.page-break-after'],
'br,tbody,tr,strong,b,sub,sup,em,i,u,strike,s,del' : ['id', 'class'],
iframe : ['id', 'class', 'src', 'frameborder', 'width', 'height', '.width', '.height']
},
layout : '<div class="container"><div class="toolbar"></div><div class="edit"></div><div class="statusbar"></div></div>'
};
var _useCapture = false;
var _INPUT_KEY_MAP = _toMap('8,9,13,32,46,48..57,59,61,65..90,106,109..111,188,190..192,219..222');
var _CURSORMOVE_KEY_MAP = _toMap('33..40');
var _CHANGE_KEY_MAP = {};
_each(_INPUT_KEY_MAP, function(key, val) {
_CHANGE_KEY_MAP[key] = val;
});
_each(_CURSORMOVE_KEY_MAP, function(key, val) {
_CHANGE_KEY_MAP[key] = val;
});
function _bindEvent(el, type, fn) {
if (el.addEventListener){
el.addEventListener(type, fn, _useCapture);
} else if (el.attachEvent){
el.attachEvent('on' + type, fn);
}
}
function _unbindEvent(el, type, fn) {
if (el.removeEventListener){
el.removeEventListener(type, fn, _useCapture);
} else if (el.detachEvent){
el.detachEvent('on' + type, fn);
}
}
var _EVENT_PROPS = ('altKey,attrChange,attrName,bubbles,button,cancelable,charCode,clientX,clientY,ctrlKey,currentTarget,' +
'data,detail,eventPhase,fromElement,handler,keyCode,metaKey,newValue,offsetX,offsetY,originalTarget,pageX,' +
'pageY,prevValue,relatedNode,relatedTarget,screenX,screenY,shiftKey,srcElement,target,toElement,view,wheelDelta,which').split(',');
function KEvent(el, event) {
this.init(el, event);
}
_extend(KEvent, {
init : function(el, event) {
var self = this, doc = el.ownerDocument || el.document || el;
self.event = event;
_each(_EVENT_PROPS, function(key, val) {
self[val] = event[val];
});
if (!self.target) {
self.target = self.srcElement || doc;
}
if (self.target.nodeType === 3) {
self.target = self.target.parentNode;
}
if (!self.relatedTarget && self.fromElement) {
self.relatedTarget = self.fromElement === self.target ? self.toElement : self.fromElement;
}
if (self.pageX == null && self.clientX != null) {
var d = doc.documentElement, body = doc.body;
self.pageX = self.clientX + (d && d.scrollLeft || body && body.scrollLeft || 0) - (d && d.clientLeft || body && body.clientLeft || 0);
self.pageY = self.clientY + (d && d.scrollTop || body && body.scrollTop || 0) - (d && d.clientTop || body && body.clientTop || 0);
}
if (!self.which && ((self.charCode || self.charCode === 0) ? self.charCode : self.keyCode)) {
self.which = self.charCode || self.keyCode;
}
if (!self.metaKey && self.ctrlKey) {
self.metaKey = self.ctrlKey;
}
if (!self.which && self.button !== undefined) {
self.which = (self.button & 1 ? 1 : (self.button & 2 ? 3 : (self.button & 4 ? 2 : 0)));
}
switch (self.which) {
case 186 :
self.which = 59;
break;
case 187 :
case 107 :
case 43 :
self.which = 61;
break;
case 189 :
case 45 :
self.which = 109;
break;
case 42 :
self.which = 106;
break;
case 47 :
self.which = 111;
break;
case 78 :
self.which = 110;
break;
}
if (self.which >= 96 && self.which <= 105) {
self.which -= 48;
}
},
preventDefault : function() {
var ev = this.event;
if (ev.preventDefault) {
ev.preventDefault();
}
ev.returnValue = false;
},
stopPropagation : function() {
var ev = this.event;
if (ev.stopPropagation) {
ev.stopPropagation();
}
ev.cancelBubble = true;
},
stop : function() {
this.preventDefault();
this.stopPropagation();
}
});
var _eventExpendo = 'kindeditor_' + _TIME, _eventId = 0, _eventData = {};
function _getId(el) {
return el[_eventExpendo] || null;
}
function _setId(el) {
el[_eventExpendo] = ++_eventId;
return _eventId;
}
function _removeId(el) {
try {
delete el[_eventExpendo];
} catch(e) {
if (el.removeAttribute) {
el.removeAttribute(_eventExpendo);
}
}
}
function _bind(el, type, fn) {
if (type.indexOf(',') >= 0) {
_each(type.split(','), function() {
_bind(el, this, fn);
});
return;
}
var id = _getId(el);
if (!id) {
id = _setId(el);
}
if (_eventData[id] === undefined) {
_eventData[id] = {};
}
var events = _eventData[id][type];
if (events && events.length > 0) {
_unbindEvent(el, type, events[0]);
} else {
_eventData[id][type] = [];
_eventData[id].el = el;
}
events = _eventData[id][type];
if (events.length === 0) {
events[0] = function(e) {
var kevent = e ? new KEvent(el, e) : undefined;
_each(events, function(i, event) {
if (i > 0 && event) {
event.call(el, kevent);
}
});
};
}
if (_inArray(fn, events) < 0) {
events.push(fn);
}
_bindEvent(el, type, events[0]);
}
function _unbind(el, type, fn) {
if (type && type.indexOf(',') >= 0) {
_each(type.split(','), function() {
_unbind(el, this, fn);
});
return;
}
var id = _getId(el);
if (!id) {
return;
}
if (type === undefined) {
if (id in _eventData) {
_each(_eventData[id], function(key, events) {
if (key != 'el' && events.length > 0) {
_unbindEvent(el, key, events[0]);
}
});
delete _eventData[id];
_removeId(el);
}
return;
}
if (!_eventData[id]) {
return;
}
var events = _eventData[id][type];
if (events && events.length > 0) {
if (fn === undefined) {
_unbindEvent(el, type, events[0]);
delete _eventData[id][type];
} else {
_each(events, function(i, event) {
if (i > 0 && event === fn) {
events.splice(i, 1);
}
});
if (events.length == 1) {
_unbindEvent(el, type, events[0]);
delete _eventData[id][type];
}
}
var count = 0;
_each(_eventData[id], function() {
count++;
});
if (count < 2) {
delete _eventData[id];
_removeId(el);
}
}
}
function _fire(el, type) {
if (type.indexOf(',') >= 0) {
_each(type.split(','), function() {
_fire(el, this);
});
return;
}
var id = _getId(el);
if (!id) {
return;
}
var events = _eventData[id][type];
if (_eventData[id] && events && events.length > 0) {
events[0]();
}
}
function _ctrl(el, key, fn) {
var self = this;
key = /^\d{2,}$/.test(key) ? key : key.toUpperCase().charCodeAt(0);
_bind(el, 'keydown', function(e) {
if (e.ctrlKey && e.which == key && !e.shiftKey && !e.altKey) {
fn.call(el);
e.stop();
}
});
}
function _ready(fn) {
var loaded = false;
function readyFunc() {
if (!loaded) {
loaded = true;
fn(KindEditor);
}
}
function ieReadyFunc() {
if (!loaded) {
try {
document.documentElement.doScroll('left');
} catch(e) {
setTimeout(ieReadyFunc, 100);
return;
}
readyFunc();
}
}
function ieReadyStateFunc() {
if (document.readyState === 'complete') {
readyFunc();
}
}
if (document.addEventListener) {
_bind(document, 'DOMContentLoaded', readyFunc);
} else if (document.attachEvent) {
_bind(document, 'readystatechange', ieReadyStateFunc);
var toplevel = false;
try {
toplevel = window.frameElement == null;
} catch(e) {}
if (document.documentElement.doScroll && toplevel) {
ieReadyFunc();
}
}
_bind(window, 'load', readyFunc);
}
if (_IE) {
window.attachEvent('onunload', function() {
_each(_eventData, function(key, events) {
if (events.el) {
_unbind(events.el);
}
});
});
}
K.ctrl = _ctrl;
K.ready = _ready;
function _getCssList(css) {
var list = {},
reg = /\s*([\w\-]+)\s*:([^;]*)(;|$)/g,
match;
while ((match = reg.exec(css))) {
var key = _trim(match[1].toLowerCase()),
val = _trim(_toHex(match[2]));
list[key] = val;
}
return list;
}
function _getAttrList(tag) {
var list = {},
reg = /\s+(?:([\w\-:]+)|(?:([\w\-:]+)=([^\s"'<>]+))|(?:([\w\-:"]+)="([^"]*)")|(?:([\w\-:"]+)='([^']*)'))(?=(?:\s|\/|>)+)/g,
match;
while ((match = reg.exec(tag))) {
var key = (match[1] || match[2] || match[4] || match[6]).toLowerCase(),
val = (match[2] ? match[3] : (match[4] ? match[5] : match[7])) || '';
list[key] = val;
}
return list;
}
function _addClassToTag(tag, className) {
if (/\s+class\s*=/.test(tag)) {
tag = tag.replace(/(\s+class=["']?)([^"']*)(["']?[\s>])/, function($0, $1, $2, $3) {
if ((' ' + $2 + ' ').indexOf(' ' + className + ' ') < 0) {
return $2 === '' ? $1 + className + $3 : $1 + $2 + ' ' + className + $3;
} else {
return $0;
}
});
} else {
tag = tag.substr(0, tag.length - 1) + ' class="' + className + '">';
}
return tag;
}
function _formatCss(css) {
var str = '';
_each(_getCssList(css), function(key, val) {
str += key + ':' + val + ';';
});
return str;
}
function _formatUrl(url, mode, host, pathname) {
mode = _undef(mode, '').toLowerCase();
if (url.substr(0, 5) != 'data:') {
url = url.replace(/([^:])\/\//g, '$1/');
}
if (_inArray(mode, ['absolute', 'relative', 'domain']) < 0) {
return url;
}
host = host || location.protocol + '//' + location.host;
if (pathname === undefined) {
var m = location.pathname.match(/^(\/.*)\//);
pathname = m ? m[1] : '';
}
var match;
if ((match = /^(\w+:\/\/[^\/]*)/.exec(url))) {
if (match[1] !== host) {
return url;
}
} else if (/^\w+:/.test(url)) {
return url;
}
function getRealPath(path) {
var parts = path.split('/'), paths = [];
for (var i = 0, len = parts.length; i < len; i++) {
var part = parts[i];
if (part == '..') {
if (paths.length > 0) {
paths.pop();
}
} else if (part !== '' && part != '.') {
paths.push(part);
}
}
return '/' + paths.join('/');
}
if (/^\//.test(url)) {
url = host + getRealPath(url.substr(1));
} else if (!/^\w+:\/\//.test(url)) {
url = host + getRealPath(pathname + '/' + url);
}
function getRelativePath(path, depth) {
if (url.substr(0, path.length) === path) {
var arr = [];
for (var i = 0; i < depth; i++) {
arr.push('..');
}
var prefix = '.';
if (arr.length > 0) {
prefix += '/' + arr.join('/');
}
if (pathname == '/') {
prefix += '/';
}
return prefix + url.substr(path.length);
} else {
if ((match = /^(.*)\//.exec(path))) {
return getRelativePath(match[1], ++depth);
}
}
}
if (mode === 'relative') {
url = getRelativePath(host + pathname, 0).substr(2);
} else if (mode === 'absolute') {
if (url.substr(0, host.length) === host) {
url = url.substr(host.length);
}
}
return url;
}
function _formatHtml(html, htmlTags, urlType, wellFormatted, indentChar) {
urlType = urlType || '';
wellFormatted = _undef(wellFormatted, false);
indentChar = _undef(indentChar, '\t');
var fontSizeList = 'xx-small,x-small,small,medium,large,x-large,xx-large'.split(',');
html = html.replace(/(<(?:pre|pre\s[^>]*)>)([\s\S]*?)(<\/pre>)/ig, function($0, $1, $2, $3) {
return $1 + $2.replace(/<(?:br|br\s[^>]*)>/ig, '\n') + $3;
});
html = html.replace(/<(?:br|br\s[^>]*)\s*\/?>\s*<\/p>/ig, '</p>');
html = html.replace(/(<(?:p|p\s[^>]*)>)\s*(<\/p>)/ig, '$1<br />$2');
html = html.replace(/\u200B/g, '');
html = html.replace(/\u00A9/g, '©');
var htmlTagMap = {};
if (htmlTags) {
_each(htmlTags, function(key, val) {
var arr = key.split(',');
for (var i = 0, len = arr.length; i < len; i++) {
htmlTagMap[arr[i]] = _toMap(val);
}
});
if (!htmlTagMap.script) {
html = html.replace(/(<(?:script|script\s[^>]*)>)([\s\S]*?)(<\/script>)/ig, '');
}
if (!htmlTagMap.style) {
html = html.replace(/(<(?:style|style\s[^>]*)>)([\s\S]*?)(<\/style>)/ig, '');
}
}
var re = /([ \t\n\r]*)<(\/)?([\w\-:]+)((?:\s+|(?:\s+[\w\-:]+)|(?:\s+[\w\-:]+=[^\s"'<>]+)|(?:\s+[\w\-:"]+="[^"]*")|(?:\s+[\w\-:"]+='[^']*'))*)(\/)?>([ \t\n\r]*)/g;
var tagStack = [];
html = html.replace(re, function($0, $1, $2, $3, $4, $5, $6) {
var full = $0,
startNewline = $1 || '',
startSlash = $2 || '',
tagName = $3.toLowerCase(),
attr = $4 || '',
endSlash = $5 ? ' ' + $5 : '',
endNewline = $6 || '';
if (htmlTags && !htmlTagMap[tagName]) {
return '';
}
if (endSlash === '' && _SINGLE_TAG_MAP[tagName]) {
endSlash = ' /';
}
if (_INLINE_TAG_MAP[tagName]) {
if (startNewline) {
startNewline = ' ';
}
if (endNewline) {
endNewline = ' ';
}
}
if (_PRE_TAG_MAP[tagName]) {
if (startSlash) {
endNewline = '\n';
} else {
startNewline = '\n';
}
}
if (wellFormatted && tagName == 'br') {
endNewline = '\n';
}
if (_BLOCK_TAG_MAP[tagName] && !_PRE_TAG_MAP[tagName]) {
if (wellFormatted) {
if (startSlash && tagStack.length > 0 && tagStack[tagStack.length - 1] === tagName) {
tagStack.pop();
} else {
tagStack.push(tagName);
}
startNewline = '';
endNewline = '';
for (var i = 0, len = startSlash ? tagStack.length : tagStack.length - 1; i < len; i++) {
startNewline += indentChar;
if (!startSlash) {
endNewline += indentChar;
}
}
if (endSlash) {
tagStack.pop();
} else if (!startSlash) {
endNewline += indentChar;
}
} else {
startNewline = endNewline = '';
}
}
if (attr !== '') {
var attrMap = _getAttrList(full);
if (tagName === 'font') {
var fontStyleMap = {}, fontStyle = '';
_each(attrMap, function(key, val) {
if (key === 'color') {
fontStyleMap.color = val;
delete attrMap[key];
}
if (key === 'size') {
fontStyleMap['font-size'] = fontSizeList[parseInt(val, 10) - 1] || '';
delete attrMap[key];
}
if (key === 'face') {
fontStyleMap['font-family'] = val;
delete attrMap[key];
}
if (key === 'style') {
fontStyle = val;
}
});
if (fontStyle && !/;$/.test(fontStyle)) {
fontStyle += ';';
}
_each(fontStyleMap, function(key, val) {
if (val === '') {
return;
}
if (/\s/.test(val)) {
val = "'" + val + "'";
}
fontStyle += key + ':' + val + ';';
});
attrMap.style = fontStyle;
}
_each(attrMap, function(key, val) {
if (_FILL_ATTR_MAP[key]) {
attrMap[key] = key;
}
if (_inArray(key, ['src', 'href']) >= 0) {
attrMap[key] = _formatUrl(val, urlType);
}
if (htmlTags && key !== 'style' && !htmlTagMap[tagName]['*'] && !htmlTagMap[tagName][key] ||
tagName === 'body' && key === 'contenteditable' ||
/^kindeditor_\d+$/.test(key)) {
delete attrMap[key];
}
if (key === 'style' && val !== '') {
var styleMap = _getCssList(val);
_each(styleMap, function(k, v) {
if (htmlTags && !htmlTagMap[tagName].style && !htmlTagMap[tagName]['.' + k]) {
delete styleMap[k];
}
});
var style = '';
_each(styleMap, function(k, v) {
style += k + ':' + v + ';';
});
attrMap.style = style;
}
});
attr = '';
_each(attrMap, function(key, val) {
if (key === 'style' && val === '') {
return;
}
val = val.replace(/"/g, '"');
attr += ' ' + key + '="' + val + '"';
});
}<|fim▁hole|> tagName = 'span';
}
return startNewline + '<' + startSlash + tagName + attr + endSlash + '>' + endNewline;
});
html = html.replace(/(<(?:pre|pre\s[^>]*)>)([\s\S]*?)(<\/pre>)/ig, function($0, $1, $2, $3) {
return $1 + $2.replace(/\n/g, '<span id="__kindeditor_pre_newline__">\n') + $3;
});
html = html.replace(/\n\s*\n/g, '\n');
html = html.replace(/<span id="__kindeditor_pre_newline__">\n/g, '\n');
html = html.replace(/></g, '>\n<');
return _trim(html);
}
function _clearMsWord(html, htmlTags) {
html = html.replace(/<meta[\s\S]*?>/ig, '')
.replace(/<![\s\S]*?>/ig, '')
.replace(/<style[^>]*>[\s\S]*?<\/style>/ig, '')
.replace(/<script[^>]*>[\s\S]*?<\/script>/ig, '')
.replace(/<w:[^>]+>[\s\S]*?<\/w:[^>]+>/ig, '')
.replace(/<o:[^>]+>[\s\S]*?<\/o:[^>]+>/ig, '')
.replace(/<xml>[\s\S]*?<\/xml>/ig, '')
.replace(/<(?:table|td)[^>]*>/ig, function(full) {
return full.replace(/border-bottom:([#\w\s]+)/ig, 'border:$1');
});
return _formatHtml(html, htmlTags);
}
function _mediaType(src) {
if (/\.(rm|rmvb)(\?|$)/i.test(src)) {
return 'audio/x-pn-realaudio-plugin';
}
if (/\.(swf|flv)(\?|$)/i.test(src)) {
return 'application/x-shockwave-flash';
}
return 'video/x-ms-asf-plugin';
}
function _mediaClass(type) {
if (/realaudio/i.test(type)) {
return 'ke-rm';
}
if (/flash/i.test(type)) {
return 'ke-flash';
}
return 'ke-media';
}
function _mediaAttrs(srcTag) {
return _getAttrList(unescape(srcTag));
}
function _mediaEmbed(attrs) {
var html = '<embed ';
_each(attrs, function(key, val) {
html += key + '="' + val + '" ';
});
html += '/>';
return html;
}
function _mediaImg(blankPath, attrs) {
var width = attrs.width,
height = attrs.height,
type = attrs.type || _mediaType(attrs.src),
srcTag = _mediaEmbed(attrs),
style = '';
if (width > 0) {
style += 'width:' + width + 'px;';
}
if (height > 0) {
style += 'height:' + height + 'px;';
}
var html = '<img class="' + _mediaClass(type) + '" src="' + blankPath + '" ';
if (style !== '') {
html += 'style="' + style + '" ';
}
html += 'data-ke-tag="' + escape(srcTag) + '" alt="" />';
return html;
}
function _tmpl(str, data) {
var fn = new Function("obj",
"var p=[],print=function(){p.push.apply(p,arguments);};" +
"with(obj){p.push('" +
str.replace(/[\r\t\n]/g, " ")
.split("<%").join("\t")
.replace(/((^|%>)[^\t]*)'/g, "$1\r")
.replace(/\t=(.*?)%>/g, "',$1,'")
.split("\t").join("');")
.split("%>").join("p.push('")
.split("\r").join("\\'") + "');}return p.join('');");
return data ? fn(data) : fn;
}
K.formatUrl = _formatUrl;
K.formatHtml = _formatHtml;
K.getCssList = _getCssList;
K.getAttrList = _getAttrList;
K.mediaType = _mediaType;
K.mediaAttrs = _mediaAttrs;
K.mediaEmbed = _mediaEmbed;
K.mediaImg = _mediaImg;
K.clearMsWord = _clearMsWord;
K.tmpl = _tmpl;
function _contains(nodeA, nodeB) {
if (nodeA.nodeType == 9 && nodeB.nodeType != 9) {
return true;
}
while ((nodeB = nodeB.parentNode)) {
if (nodeB == nodeA) {
return true;
}
}
return false;
}
var _getSetAttrDiv = document.createElement('div');
_getSetAttrDiv.setAttribute('className', 't');
var _GET_SET_ATTRIBUTE = _getSetAttrDiv.className !== 't';
function _getAttr(el, key) {
key = key.toLowerCase();
var val = null;
if (!_GET_SET_ATTRIBUTE && el.nodeName.toLowerCase() != 'script') {
var div = el.ownerDocument.createElement('div');
div.appendChild(el.cloneNode(false));
var list = _getAttrList(_unescape(div.innerHTML));
if (key in list) {
val = list[key];
}
} else {
try {
val = el.getAttribute(key, 2);
} catch(e) {
val = el.getAttribute(key, 1);
}
}
if (key === 'style' && val !== null) {
val = _formatCss(val);
}
return val;
}
function _queryAll(expr, root) {
var exprList = expr.split(',');
if (exprList.length > 1) {
var mergedResults = [];
_each(exprList, function() {
_each(_queryAll(this, root), function() {
if (_inArray(this, mergedResults) < 0) {
mergedResults.push(this);
}
});
});
return mergedResults;
}
root = root || document;
function escape(str) {
if (typeof str != 'string') {
return str;
}
return str.replace(/([^\w\-])/g, '\\$1');
}
function stripslashes(str) {
return str.replace(/\\/g, '');
}
function cmpTag(tagA, tagB) {
return tagA === '*' || tagA.toLowerCase() === escape(tagB.toLowerCase());
}
function byId(id, tag, root) {
var arr = [],
doc = root.ownerDocument || root,
el = doc.getElementById(stripslashes(id));
if (el) {
if (cmpTag(tag, el.nodeName) && _contains(root, el)) {
arr.push(el);
}
}
return arr;
}
function byClass(className, tag, root) {
var doc = root.ownerDocument || root, arr = [], els, i, len, el;
if (root.getElementsByClassName) {
els = root.getElementsByClassName(stripslashes(className));
for (i = 0, len = els.length; i < len; i++) {
el = els[i];
if (cmpTag(tag, el.nodeName)) {
arr.push(el);
}
}
} else if (doc.querySelectorAll) {
els = doc.querySelectorAll((root.nodeName !== '#document' ? root.nodeName + ' ' : '') + tag + '.' + className);
for (i = 0, len = els.length; i < len; i++) {
el = els[i];
if (_contains(root, el)) {
arr.push(el);
}
}
} else {
els = root.getElementsByTagName(tag);
className = ' ' + className + ' ';
for (i = 0, len = els.length; i < len; i++) {
el = els[i];
if (el.nodeType == 1) {
var cls = el.className;
if (cls && (' ' + cls + ' ').indexOf(className) > -1) {
arr.push(el);
}
}
}
}
return arr;
}
function byName(name, tag, root) {
var arr = [], doc = root.ownerDocument || root,
els = doc.getElementsByName(stripslashes(name)), el;
for (var i = 0, len = els.length; i < len; i++) {
el = els[i];
if (cmpTag(tag, el.nodeName) && _contains(root, el)) {
if (el.getAttributeNode('name')) {
arr.push(el);
}
}
}
return arr;
}
function byAttr(key, val, tag, root) {
var arr = [], els = root.getElementsByTagName(tag), el;
for (var i = 0, len = els.length; i < len; i++) {
el = els[i];
if (el.nodeType == 1) {
if (val === null) {
if (_getAttr(el, key) !== null) {
arr.push(el);
}
} else {
if (val === escape(_getAttr(el, key))) {
arr.push(el);
}
}
}
}
return arr;
}
function select(expr, root) {
var arr = [], matches;
matches = /^((?:\\.|[^.#\s\[<>])+)/.exec(expr);
var tag = matches ? matches[1] : '*';
if ((matches = /#((?:[\w\-]|\\.)+)$/.exec(expr))) {
arr = byId(matches[1], tag, root);
} else if ((matches = /\.((?:[\w\-]|\\.)+)$/.exec(expr))) {
arr = byClass(matches[1], tag, root);
} else if ((matches = /\[((?:[\w\-]|\\.)+)\]/.exec(expr))) {
arr = byAttr(matches[1].toLowerCase(), null, tag, root);
} else if ((matches = /\[((?:[\w\-]|\\.)+)\s*=\s*['"]?((?:\\.|[^'"]+)+)['"]?\]/.exec(expr))) {
var key = matches[1].toLowerCase(), val = matches[2];
if (key === 'id') {
arr = byId(val, tag, root);
} else if (key === 'class') {
arr = byClass(val, tag, root);
} else if (key === 'name') {
arr = byName(val, tag, root);
} else {
arr = byAttr(key, val, tag, root);
}
} else {
var els = root.getElementsByTagName(tag), el;
for (var i = 0, len = els.length; i < len; i++) {
el = els[i];
if (el.nodeType == 1) {
arr.push(el);
}
}
}
return arr;
}
var parts = [], arr, re = /((?:\\.|[^\s>])+|[\s>])/g;
while ((arr = re.exec(expr))) {
if (arr[1] !== ' ') {
parts.push(arr[1]);
}
}
var results = [];
if (parts.length == 1) {
return select(parts[0], root);
}
var isChild = false, part, els, subResults, val, v, i, j, k, length, len, l;
for (i = 0, lenth = parts.length; i < lenth; i++) {
part = parts[i];
if (part === '>') {
isChild = true;
continue;
}
if (i > 0) {
els = [];
for (j = 0, len = results.length; j < len; j++) {
val = results[j];
subResults = select(part, val);
for (k = 0, l = subResults.length; k < l; k++) {
v = subResults[k];
if (isChild) {
if (val === v.parentNode) {
els.push(v);
}
} else {
els.push(v);
}
}
}
results = els;
} else {
results = select(part, root);
}
if (results.length === 0) {
return [];
}
}
return results;
}
function _query(expr, root) {
var arr = _queryAll(expr, root);
return arr.length > 0 ? arr[0] : null;
}
K.query = _query;
K.queryAll = _queryAll;
function _get(val) {
return K(val)[0];
}
function _getDoc(node) {
if (!node) {
return document;
}
return node.ownerDocument || node.document || node;
}
function _getWin(node) {
if (!node) {
return window;
}
var doc = _getDoc(node);
return doc.parentWindow || doc.defaultView;
}
function _setHtml(el, html) {
if (el.nodeType != 1) {
return;
}
var doc = _getDoc(el);
try {
el.innerHTML = '<img id="__kindeditor_temp_tag__" width="0" height="0" style="display:none;" />' + html;
var temp = doc.getElementById('__kindeditor_temp_tag__');
temp.parentNode.removeChild(temp);
} catch(e) {
K(el).empty();
K('@' + html, doc).each(function() {
el.appendChild(this);
});
}
}
function _hasClass(el, cls) {
return _inString(cls, el.className, ' ');
}
function _setAttr(el, key, val) {
if (_IE && _V < 8 && key.toLowerCase() == 'class') {
key = 'className';
}
el.setAttribute(key, '' + val);
}
function _removeAttr(el, key) {
if (_IE && _V < 8 && key.toLowerCase() == 'class') {
key = 'className';
}
_setAttr(el, key, '');
el.removeAttribute(key);
}
function _getNodeName(node) {
if (!node || !node.nodeName) {
return '';
}
return node.nodeName.toLowerCase();
}
function _computedCss(el, key) {
var self = this, win = _getWin(el), camelKey = _toCamel(key), val = '';
if (win.getComputedStyle) {
var style = win.getComputedStyle(el, null);
val = style[camelKey] || style.getPropertyValue(key) || el.style[camelKey];
} else if (el.currentStyle) {
val = el.currentStyle[camelKey] || el.style[camelKey];
}
return val;
}
function _hasVal(node) {
return !!_VALUE_TAG_MAP[_getNodeName(node)];
}
function _docElement(doc) {
doc = doc || document;
return _QUIRKS ? doc.body : doc.documentElement;
}
function _docHeight(doc) {
var el = _docElement(doc);
return Math.max(el.scrollHeight, el.clientHeight);
}
function _docWidth(doc) {
var el = _docElement(doc);
return Math.max(el.scrollWidth, el.clientWidth);
}
function _getScrollPos(doc) {
doc = doc || document;
var x, y;
if (_IE || _OPERA) {
x = _docElement(doc).scrollLeft;
y = _docElement(doc).scrollTop;
} else {
x = _getWin(doc).scrollX;
y = _getWin(doc).scrollY;
}
return {x : x, y : y};
}
function KNode(node) {
this.init(node);
}
_extend(KNode, {
init : function(node) {
var self = this;
node = _isArray(node) ? node : [node];
var length = 0;
for (var i = 0, len = node.length; i < len; i++) {
if (node[i]) {
self[i] = node[i].constructor === KNode ? node[i][0] : node[i];
length++;
}
}
self.length = length;
self.doc = _getDoc(self[0]);
self.name = _getNodeName(self[0]);
self.type = self.length > 0 ? self[0].nodeType : null;
self.win = _getWin(self[0]);
},
each : function(fn) {
var self = this;
for (var i = 0; i < self.length; i++) {
if (fn.call(self[i], i, self[i]) === false) {
return self;
}
}
return self;
},
bind : function(type, fn) {
this.each(function() {
_bind(this, type, fn);
});
return this;
},
unbind : function(type, fn) {
this.each(function() {
_unbind(this, type, fn);
});
return this;
},
fire : function(type) {
if (this.length < 1) {
return this;
}
_fire(this[0], type);
return this;
},
hasAttr : function(key) {
if (this.length < 1) {
return false;
}
return !!_getAttr(this[0], key);
},
attr : function(key, val) {
var self = this;
if (key === undefined) {
return _getAttrList(self.outer());
}
if (typeof key === 'object') {
_each(key, function(k, v) {
self.attr(k, v);
});
return self;
}
if (val === undefined) {
val = self.length < 1 ? null : _getAttr(self[0], key);
return val === null ? '' : val;
}
self.each(function() {
_setAttr(this, key, val);
});
return self;
},
removeAttr : function(key) {
this.each(function() {
_removeAttr(this, key);
});
return this;
},
get : function(i) {
if (this.length < 1) {
return null;
}
return this[i || 0];
},
eq : function(i) {
if (this.length < 1) {
return null;
}
return this[i] ? new KNode(this[i]) : null;
},
hasClass : function(cls) {
if (this.length < 1) {
return false;
}
return _hasClass(this[0], cls);
},
addClass : function(cls) {
this.each(function() {
if (!_hasClass(this, cls)) {
this.className = _trim(this.className + ' ' + cls);
}
});
return this;
},
removeClass : function(cls) {
this.each(function() {
if (_hasClass(this, cls)) {
this.className = _trim(this.className.replace(new RegExp('(^|\\s)' + cls + '(\\s|$)'), ' '));
}
});
return this;
},
html : function(val) {
var self = this;
if (val === undefined) {
if (self.length < 1 || self.type != 1) {
return '';
}
return _formatHtml(self[0].innerHTML);
}
self.each(function() {
_setHtml(this, val);
});
return self;
},
text : function() {
var self = this;
if (self.length < 1) {
return '';
}
return _IE ? self[0].innerText : self[0].textContent;
},
hasVal : function() {
if (this.length < 1) {
return false;
}
return _hasVal(this[0]);
},
val : function(val) {
var self = this;
if (val === undefined) {
if (self.length < 1) {
return '';
}
return self.hasVal() ? self[0].value : self.attr('value');
} else {
self.each(function() {
if (_hasVal(this)) {
this.value = val;
} else {
_setAttr(this, 'value' , val);
}
});
return self;
}
},
css : function(key, val) {
var self = this;
if (key === undefined) {
return _getCssList(self.attr('style'));
}
if (typeof key === 'object') {
_each(key, function(k, v) {
self.css(k, v);
});
return self;
}
if (val === undefined) {
if (self.length < 1) {
return '';
}
return self[0].style[_toCamel(key)] || _computedCss(self[0], key) || '';
}
self.each(function() {
this.style[_toCamel(key)] = val;
});
return self;
},
width : function(val) {
var self = this;
if (val === undefined) {
if (self.length < 1) {
return 0;
}
return self[0].offsetWidth;
}
return self.css('width', _addUnit(val));
},
height : function(val) {
var self = this;
if (val === undefined) {
if (self.length < 1) {
return 0;
}
return self[0].offsetHeight;
}
return self.css('height', _addUnit(val));
},
opacity : function(val) {
this.each(function() {
if (this.style.opacity === undefined) {
this.style.filter = val == 1 ? '' : 'alpha(opacity=' + (val * 100) + ')';
} else {
this.style.opacity = val == 1 ? '' : val;
}
});
return this;
},
data : function(key, val) {
var self = this;
key = 'kindeditor_data_' + key;
if (val === undefined) {
if (self.length < 1) {
return null;
}
return self[0][key];
}
this.each(function() {
this[key] = val;
});
return self;
},
pos : function() {
var self = this, node = self[0], x = 0, y = 0;
if (node) {
if (node.getBoundingClientRect) {
var box = node.getBoundingClientRect(),
pos = _getScrollPos(self.doc);
x = box.left + pos.x;
y = box.top + pos.y;
} else {
while (node) {
x += node.offsetLeft;
y += node.offsetTop;
node = node.offsetParent;
}
}
}
return {x : _round(x), y : _round(y)};
},
clone : function(bool) {
if (this.length < 1) {
return new KNode([]);
}
return new KNode(this[0].cloneNode(bool));
},
append : function(expr) {
this.each(function() {
if (this.appendChild) {
this.appendChild(_get(expr));
}
});
return this;
},
appendTo : function(expr) {
this.each(function() {
_get(expr).appendChild(this);
});
return this;
},
before : function(expr) {
this.each(function() {
this.parentNode.insertBefore(_get(expr), this);
});
return this;
},
after : function(expr) {
this.each(function() {
if (this.nextSibling) {
this.parentNode.insertBefore(_get(expr), this.nextSibling);
} else {
this.parentNode.appendChild(_get(expr));
}
});
return this;
},
replaceWith : function(expr) {
var nodes = [];
this.each(function(i, node) {
_unbind(node);
var newNode = _get(expr);
node.parentNode.replaceChild(newNode, node);
nodes.push(newNode);
});
return K(nodes);
},
empty : function() {
var self = this;
self.each(function(i, node) {
var child = node.firstChild;
while (child) {
if (!node.parentNode) {
return;
}
var next = child.nextSibling;
child.parentNode.removeChild(child);
child = next;
}
});
return self;
},
remove : function(keepChilds) {
var self = this;
self.each(function(i, node) {
if (!node.parentNode) {
return;
}
_unbind(node);
if (keepChilds) {
var child = node.firstChild;
while (child) {
var next = child.nextSibling;
node.parentNode.insertBefore(child, node);
child = next;
}
}
node.parentNode.removeChild(node);
delete self[i];
});
self.length = 0;
return self;
},
show : function(val) {
var self = this;
if (val === undefined) {
val = self._originDisplay || '';
}
if (self.css('display') != 'none') {
return self;
}
return self.css('display', val);
},
hide : function() {
var self = this;
if (self.length < 1) {
return self;
}
self._originDisplay = self[0].style.display;
return self.css('display', 'none');
},
outer : function() {
var self = this;
if (self.length < 1) {
return '';
}
var div = self.doc.createElement('div'), html;
div.appendChild(self[0].cloneNode(true));
html = _formatHtml(div.innerHTML);
div = null;
return html;
},
isSingle : function() {
return !!_SINGLE_TAG_MAP[this.name];
},
isInline : function() {
return !!_INLINE_TAG_MAP[this.name];
},
isBlock : function() {
return !!_BLOCK_TAG_MAP[this.name];
},
isStyle : function() {
return !!_STYLE_TAG_MAP[this.name];
},
isControl : function() {
return !!_CONTROL_TAG_MAP[this.name];
},
contains : function(otherNode) {
if (this.length < 1) {
return false;
}
return _contains(this[0], _get(otherNode));
},
parent : function() {
if (this.length < 1) {
return null;
}
var node = this[0].parentNode;
return node ? new KNode(node) : null;
},
children : function() {
if (this.length < 1) {
return new KNode([]);
}
var list = [], child = this[0].firstChild;
while (child) {
if (child.nodeType != 3 || _trim(child.nodeValue) !== '') {
list.push(child);
}
child = child.nextSibling;
}
return new KNode(list);
},
first : function() {
var list = this.children();
return list.length > 0 ? list.eq(0) : null;
},
last : function() {
var list = this.children();
return list.length > 0 ? list.eq(list.length - 1) : null;
},
index : function() {
if (this.length < 1) {
return -1;
}
var i = -1, sibling = this[0];
while (sibling) {
i++;
sibling = sibling.previousSibling;
}
return i;
},
prev : function() {
if (this.length < 1) {
return null;
}
var node = this[0].previousSibling;
return node ? new KNode(node) : null;
},
next : function() {
if (this.length < 1) {
return null;
}
var node = this[0].nextSibling;
return node ? new KNode(node) : null;
},
scan : function(fn, order) {
if (this.length < 1) {
return;
}
order = (order === undefined) ? true : order;
function walk(node) {
var n = order ? node.firstChild : node.lastChild;
while (n) {
var next = order ? n.nextSibling : n.previousSibling;
if (fn(n) === false) {
return false;
}
if (walk(n) === false) {
return false;
}
n = next;
}
}
walk(this[0]);
return this;
}
});
_each(('blur,focus,focusin,focusout,load,resize,scroll,unload,click,dblclick,' +
'mousedown,mouseup,mousemove,mouseover,mouseout,mouseenter,mouseleave,' +
'change,select,submit,keydown,keypress,keyup,error,contextmenu').split(','), function(i, type) {
KNode.prototype[type] = function(fn) {
return fn ? this.bind(type, fn) : this.fire(type);
};
});
var _K = K;
K = function(expr, root) {
if (expr === undefined || expr === null) {
return;
}
function newNode(node) {
if (!node[0]) {
node = [];
}
return new KNode(node);
}
if (typeof expr === 'string') {
if (root) {
root = _get(root);
}
var length = expr.length;
if (expr.charAt(0) === '@') {
expr = expr.substr(1);
}
if (expr.length !== length || /<.+>/.test(expr)) {
var doc = root ? root.ownerDocument || root : document,
div = doc.createElement('div'), list = [];
div.innerHTML = '<img id="__kindeditor_temp_tag__" width="0" height="0" style="display:none;" />' + expr;
for (var i = 0, len = div.childNodes.length; i < len; i++) {
var child = div.childNodes[i];
if (child.id == '__kindeditor_temp_tag__') {
continue;
}
list.push(child);
}
return newNode(list);
}
return newNode(_queryAll(expr, root));
}
if (expr && expr.constructor === KNode) {
return expr;
}
if (expr.toArray) {
expr = expr.toArray();
}
if (_isArray(expr)) {
return newNode(expr);
}
return newNode(_toArray(arguments));
};
_each(_K, function(key, val) {
K[key] = val;
});
K.NodeClass = KNode;
window.KindEditor = K;
var _START_TO_START = 0,
_START_TO_END = 1,
_END_TO_END = 2,
_END_TO_START = 3,
_BOOKMARK_ID = 0;
function _updateCollapsed(range) {
range.collapsed = (range.startContainer === range.endContainer && range.startOffset === range.endOffset);
return range;
}
function _copyAndDelete(range, isCopy, isDelete) {
var doc = range.doc, nodeList = [];
function splitTextNode(node, startOffset, endOffset) {
var length = node.nodeValue.length, centerNode;
if (isCopy) {
var cloneNode = node.cloneNode(true);
if (startOffset > 0) {
centerNode = cloneNode.splitText(startOffset);
} else {
centerNode = cloneNode;
}
if (endOffset < length) {
centerNode.splitText(endOffset - startOffset);
}
}
if (isDelete) {
var center = node;
if (startOffset > 0) {
center = node.splitText(startOffset);
range.setStart(node, startOffset);
}
if (endOffset < length) {
var right = center.splitText(endOffset - startOffset);
range.setEnd(right, 0);
}
nodeList.push(center);
}
return centerNode;
}
function removeNodes() {
if (isDelete) {
range.up().collapse(true);
}
for (var i = 0, len = nodeList.length; i < len; i++) {
var node = nodeList[i];
if (node.parentNode) {
node.parentNode.removeChild(node);
}
}
}
var copyRange = range.cloneRange().down();
var start = -1, incStart = -1, incEnd = -1, end = -1,
ancestor = range.commonAncestor(), frag = doc.createDocumentFragment();
if (ancestor.nodeType == 3) {
var textNode = splitTextNode(ancestor, range.startOffset, range.endOffset);
if (isCopy) {
frag.appendChild(textNode);
}
removeNodes();
return isCopy ? frag : range;
}
function extractNodes(parent, frag) {
var node = parent.firstChild, nextNode;
while (node) {
var testRange = new KRange(doc).selectNode(node);
start = testRange.compareBoundaryPoints(_START_TO_END, range);
if (start >= 0 && incStart <= 0) {
incStart = testRange.compareBoundaryPoints(_START_TO_START, range);
}
if (incStart >= 0 && incEnd <= 0) {
incEnd = testRange.compareBoundaryPoints(_END_TO_END, range);
}
if (incEnd >= 0 && end <= 0) {
end = testRange.compareBoundaryPoints(_END_TO_START, range);
}
if (end >= 0) {
return false;
}
nextNode = node.nextSibling;
if (start > 0) {
if (node.nodeType == 1) {
if (incStart >= 0 && incEnd <= 0) {
if (isCopy) {
frag.appendChild(node.cloneNode(true));
}
if (isDelete) {
nodeList.push(node);
}
} else {
var childFlag;
if (isCopy) {
childFlag = node.cloneNode(false);
frag.appendChild(childFlag);
}
if (extractNodes(node, childFlag) === false) {
return false;
}
}
} else if (node.nodeType == 3) {
var textNode;
if (node == copyRange.startContainer) {
textNode = splitTextNode(node, copyRange.startOffset, node.nodeValue.length);
} else if (node == copyRange.endContainer) {
textNode = splitTextNode(node, 0, copyRange.endOffset);
} else {
textNode = splitTextNode(node, 0, node.nodeValue.length);
}
if (isCopy) {
try {
frag.appendChild(textNode);
} catch(e) {}
}
}
}
node = nextNode;
}
}
extractNodes(ancestor, frag);
if (isDelete) {
range.up().collapse(true);
}
for (var i = 0, len = nodeList.length; i < len; i++) {
var node = nodeList[i];
if (node.parentNode) {
node.parentNode.removeChild(node);
}
}
return isCopy ? frag : range;
}
function _moveToElementText(range, el) {
var node = el;
while (node) {
var knode = K(node);
if (knode.name == 'marquee' || knode.name == 'select') {
return;
}
node = node.parentNode;
}
try {
range.moveToElementText(el);
} catch(e) {}
}
function _getStartEnd(rng, isStart) {
var doc = rng.parentElement().ownerDocument,
pointRange = rng.duplicate();
pointRange.collapse(isStart);
var parent = pointRange.parentElement(),
nodes = parent.childNodes;
if (nodes.length === 0) {
return {node: parent.parentNode, offset: K(parent).index()};
}
var startNode = doc, startPos = 0, cmp = -1;
var testRange = rng.duplicate();
_moveToElementText(testRange, parent);
for (var i = 0, len = nodes.length; i < len; i++) {
var node = nodes[i];
cmp = testRange.compareEndPoints('StartToStart', pointRange);
if (cmp === 0) {
return {node: node.parentNode, offset: i};
}
if (node.nodeType == 1) {
var nodeRange = rng.duplicate(), dummy, knode = K(node), newNode = node;
if (knode.isControl()) {
dummy = doc.createElement('span');
knode.after(dummy);
newNode = dummy;
startPos += knode.text().replace(/\r\n|\n|\r/g, '').length;
}
_moveToElementText(nodeRange, newNode);
testRange.setEndPoint('StartToEnd', nodeRange);
if (cmp > 0) {
startPos += nodeRange.text.replace(/\r\n|\n|\r/g, '').length;
} else {
startPos = 0;
}
if (dummy) {
K(dummy).remove();
}
} else if (node.nodeType == 3) {
testRange.moveStart('character', node.nodeValue.length);
startPos += node.nodeValue.length;
}
if (cmp < 0) {
startNode = node;
}
}
if (cmp < 0 && startNode.nodeType == 1) {
return {node: parent, offset: K(parent.lastChild).index() + 1};
}
if (cmp > 0) {
while (startNode.nextSibling && startNode.nodeType == 1) {
startNode = startNode.nextSibling;
}
}
testRange = rng.duplicate();
_moveToElementText(testRange, parent);
testRange.setEndPoint('StartToEnd', pointRange);
startPos -= testRange.text.replace(/\r\n|\n|\r/g, '').length;
if (cmp > 0 && startNode.nodeType == 3) {
var prevNode = startNode.previousSibling;
while (prevNode && prevNode.nodeType == 3) {
startPos -= prevNode.nodeValue.length;
prevNode = prevNode.previousSibling;
}
}
return {node: startNode, offset: startPos};
}
function _getEndRange(node, offset) {
var doc = node.ownerDocument || node,
range = doc.body.createTextRange();
if (doc == node) {
range.collapse(true);
return range;
}
if (node.nodeType == 1 && node.childNodes.length > 0) {
var children = node.childNodes, isStart, child;
if (offset === 0) {
child = children[0];
isStart = true;
} else {
child = children[offset - 1];
isStart = false;
}
if (!child) {
return range;
}
if (K(child).name === 'head') {
if (offset === 1) {
isStart = true;
}
if (offset === 2) {
isStart = false;
}
range.collapse(isStart);
return range;
}
if (child.nodeType == 1) {
var kchild = K(child), span;
if (kchild.isControl()) {
span = doc.createElement('span');
if (isStart) {
kchild.before(span);
} else {
kchild.after(span);
}
child = span;
}
_moveToElementText(range, child);
range.collapse(isStart);
if (span) {
K(span).remove();
}
return range;
}
node = child;
offset = isStart ? 0 : child.nodeValue.length;
}
var dummy = doc.createElement('span');
K(node).before(dummy);
_moveToElementText(range, dummy);
range.moveStart('character', offset);
K(dummy).remove();
return range;
}
function _toRange(rng) {
var doc, range;
function tr2td(start) {
if (K(start.node).name == 'tr') {
start.node = start.node.cells[start.offset];
start.offset = 0;
}
}
if (_IE) {
if (rng.item) {
doc = _getDoc(rng.item(0));
range = new KRange(doc);
range.selectNode(rng.item(0));
return range;
}
doc = rng.parentElement().ownerDocument;
var start = _getStartEnd(rng, true),
end = _getStartEnd(rng, false);
tr2td(start);
tr2td(end);
range = new KRange(doc);
range.setStart(start.node, start.offset);
range.setEnd(end.node, end.offset);
return range;
}
var startContainer = rng.startContainer;
doc = startContainer.ownerDocument || startContainer;
range = new KRange(doc);
range.setStart(startContainer, rng.startOffset);
range.setEnd(rng.endContainer, rng.endOffset);
return range;
}
function KRange(doc) {
this.init(doc);
}
_extend(KRange, {
init : function(doc) {
var self = this;
self.startContainer = doc;
self.startOffset = 0;
self.endContainer = doc;
self.endOffset = 0;
self.collapsed = true;
self.doc = doc;
},
commonAncestor : function() {
function getParents(node) {
var parents = [];
while (node) {
parents.push(node);
node = node.parentNode;
}
return parents;
}
var parentsA = getParents(this.startContainer),
parentsB = getParents(this.endContainer),
i = 0, lenA = parentsA.length, lenB = parentsB.length, parentA, parentB;
while (++i) {
parentA = parentsA[lenA - i];
parentB = parentsB[lenB - i];
if (!parentA || !parentB || parentA !== parentB) {
break;
}
}
return parentsA[lenA - i + 1];
},
setStart : function(node, offset) {
var self = this, doc = self.doc;
self.startContainer = node;
self.startOffset = offset;
if (self.endContainer === doc) {
self.endContainer = node;
self.endOffset = offset;
}
return _updateCollapsed(this);
},
setEnd : function(node, offset) {
var self = this, doc = self.doc;
self.endContainer = node;
self.endOffset = offset;
if (self.startContainer === doc) {
self.startContainer = node;
self.startOffset = offset;
}
return _updateCollapsed(this);
},
setStartBefore : function(node) {
return this.setStart(node.parentNode || this.doc, K(node).index());
},
setStartAfter : function(node) {
return this.setStart(node.parentNode || this.doc, K(node).index() + 1);
},
setEndBefore : function(node) {
return this.setEnd(node.parentNode || this.doc, K(node).index());
},
setEndAfter : function(node) {
return this.setEnd(node.parentNode || this.doc, K(node).index() + 1);
},
selectNode : function(node) {
return this.setStartBefore(node).setEndAfter(node);
},
selectNodeContents : function(node) {
var knode = K(node);
if (knode.type == 3 || knode.isSingle()) {
return this.selectNode(node);
}
var children = knode.children();
if (children.length > 0) {
return this.setStartBefore(children[0]).setEndAfter(children[children.length - 1]);
}
return this.setStart(node, 0).setEnd(node, 0);
},
collapse : function(toStart) {
if (toStart) {
return this.setEnd(this.startContainer, this.startOffset);
}
return this.setStart(this.endContainer, this.endOffset);
},
compareBoundaryPoints : function(how, range) {
var rangeA = this.get(), rangeB = range.get();
if (_IE) {
var arr = {};
arr[_START_TO_START] = 'StartToStart';
arr[_START_TO_END] = 'EndToStart';
arr[_END_TO_END] = 'EndToEnd';
arr[_END_TO_START] = 'StartToEnd';
var cmp = rangeA.compareEndPoints(arr[how], rangeB);
if (cmp !== 0) {
return cmp;
}
var nodeA, nodeB, nodeC, posA, posB;
if (how === _START_TO_START || how === _END_TO_START) {
nodeA = this.startContainer;
posA = this.startOffset;
}
if (how === _START_TO_END || how === _END_TO_END) {
nodeA = this.endContainer;
posA = this.endOffset;
}
if (how === _START_TO_START || how === _START_TO_END) {
nodeB = range.startContainer;
posB = range.startOffset;
}
if (how === _END_TO_END || how === _END_TO_START) {
nodeB = range.endContainer;
posB = range.endOffset;
}
if (nodeA === nodeB) {
var diff = posA - posB;
return diff > 0 ? 1 : (diff < 0 ? -1 : 0);
}
nodeC = nodeB;
while (nodeC && nodeC.parentNode !== nodeA) {
nodeC = nodeC.parentNode;
}
if (nodeC) {
return K(nodeC).index() >= posA ? -1 : 1;
}
nodeC = nodeA;
while (nodeC && nodeC.parentNode !== nodeB) {
nodeC = nodeC.parentNode;
}
if (nodeC) {
return K(nodeC).index() >= posB ? 1 : -1;
}
nodeC = K(nodeB).next();
if (nodeC && nodeC.contains(nodeA)) {
return 1;
}
nodeC = K(nodeA).next();
if (nodeC && nodeC.contains(nodeB)) {
return -1;
}
} else {
return rangeA.compareBoundaryPoints(how, rangeB);
}
},
cloneRange : function() {
return new KRange(this.doc).setStart(this.startContainer, this.startOffset).setEnd(this.endContainer, this.endOffset);
},
toString : function() {
var rng = this.get(), str = _IE ? rng.text : rng.toString();
return str.replace(/\r\n|\n|\r/g, '');
},
cloneContents : function() {
return _copyAndDelete(this, true, false);
},
deleteContents : function() {
return _copyAndDelete(this, false, true);
},
extractContents : function() {
return _copyAndDelete(this, true, true);
},
insertNode : function(node) {
var self = this,
sc = self.startContainer, so = self.startOffset,
ec = self.endContainer, eo = self.endOffset,
firstChild, lastChild, c, nodeCount = 1;
if (node.nodeName.toLowerCase() === '#document-fragment') {
firstChild = node.firstChild;
lastChild = node.lastChild;
nodeCount = node.childNodes.length;
}
if (sc.nodeType == 1) {
c = sc.childNodes[so];
if (c) {
sc.insertBefore(node, c);
if (sc === ec) {
eo += nodeCount;
}
} else {
sc.appendChild(node);
}
} else if (sc.nodeType == 3) {
if (so === 0) {
sc.parentNode.insertBefore(node, sc);
if (sc.parentNode === ec) {
eo += nodeCount;
}
} else if (so >= sc.nodeValue.length) {
if (sc.nextSibling) {
sc.parentNode.insertBefore(node, sc.nextSibling);
} else {
sc.parentNode.appendChild(node);
}
} else {
if (so > 0) {
c = sc.splitText(so);
} else {
c = sc;
}
sc.parentNode.insertBefore(node, c);
if (sc === ec) {
ec = c;
eo -= so;
}
}
}
if (firstChild) {
self.setStartBefore(firstChild).setEndAfter(lastChild);
} else {
self.selectNode(node);
}
if (self.compareBoundaryPoints(_END_TO_END, self.cloneRange().setEnd(ec, eo)) >= 1) {
return self;
}
return self.setEnd(ec, eo);
},
surroundContents : function(node) {
node.appendChild(this.extractContents());
return this.insertNode(node).selectNode(node);
},
isControl : function() {
var self = this,
sc = self.startContainer, so = self.startOffset,
ec = self.endContainer, eo = self.endOffset, rng;
return sc.nodeType == 1 && sc === ec && so + 1 === eo && K(sc.childNodes[so]).isControl();
},
get : function(hasControlRange) {
var self = this, doc = self.doc, node, rng;
if (!_IE) {
rng = doc.createRange();
try {
rng.setStart(self.startContainer, self.startOffset);
rng.setEnd(self.endContainer, self.endOffset);
} catch (e) {}
return rng;
}
if (hasControlRange && self.isControl()) {
rng = doc.body.createControlRange();
rng.addElement(self.startContainer.childNodes[self.startOffset]);
return rng;
}
var range = self.cloneRange().down();
rng = doc.body.createTextRange();
rng.setEndPoint('StartToStart', _getEndRange(range.startContainer, range.startOffset));
rng.setEndPoint('EndToStart', _getEndRange(range.endContainer, range.endOffset));
return rng;
},
html : function() {
return K(this.cloneContents()).outer();
},
down : function() {
var self = this;
function downPos(node, pos, isStart) {
if (node.nodeType != 1) {
return;
}
var children = K(node).children();
if (children.length === 0) {
return;
}
var left, right, child, offset;
if (pos > 0) {
left = children.eq(pos - 1);
}
if (pos < children.length) {
right = children.eq(pos);
}
if (left && left.type == 3) {
child = left[0];
offset = child.nodeValue.length;
}
if (right && right.type == 3) {
child = right[0];
offset = 0;
}
if (!child) {
return;
}
if (isStart) {
self.setStart(child, offset);
} else {
self.setEnd(child, offset);
}
}
downPos(self.startContainer, self.startOffset, true);
downPos(self.endContainer, self.endOffset, false);
return self;
},
up : function() {
var self = this;
function upPos(node, pos, isStart) {
if (node.nodeType != 3) {
return;
}
if (pos === 0) {
if (isStart) {
self.setStartBefore(node);
} else {
self.setEndBefore(node);
}
} else if (pos == node.nodeValue.length) {
if (isStart) {
self.setStartAfter(node);
} else {
self.setEndAfter(node);
}
}
}
upPos(self.startContainer, self.startOffset, true);
upPos(self.endContainer, self.endOffset, false);
return self;
},
enlarge : function(toBlock) {
var self = this;
self.up();
function enlargePos(node, pos, isStart) {
var knode = K(node), parent;
if (knode.type == 3 || _NOSPLIT_TAG_MAP[knode.name] || !toBlock && knode.isBlock()) {
return;
}
if (pos === 0) {
while (!knode.prev()) {
parent = knode.parent();
if (!parent || _NOSPLIT_TAG_MAP[parent.name] || !toBlock && parent.isBlock()) {
break;
}
knode = parent;
}
if (isStart) {
self.setStartBefore(knode[0]);
} else {
self.setEndBefore(knode[0]);
}
} else if (pos == knode.children().length) {
while (!knode.next()) {
parent = knode.parent();
if (!parent || _NOSPLIT_TAG_MAP[parent.name] || !toBlock && parent.isBlock()) {
break;
}
knode = parent;
}
if (isStart) {
self.setStartAfter(knode[0]);
} else {
self.setEndAfter(knode[0]);
}
}
}
enlargePos(self.startContainer, self.startOffset, true);
enlargePos(self.endContainer, self.endOffset, false);
return self;
},
shrink : function() {
var self = this, child, collapsed = self.collapsed;
while (self.startContainer.nodeType == 1 && (child = self.startContainer.childNodes[self.startOffset]) && child.nodeType == 1 && !K(child).isSingle()) {
self.setStart(child, 0);
}
if (collapsed) {
return self.collapse(collapsed);
}
while (self.endContainer.nodeType == 1 && self.endOffset > 0 && (child = self.endContainer.childNodes[self.endOffset - 1]) && child.nodeType == 1 && !K(child).isSingle()) {
self.setEnd(child, child.childNodes.length);
}
return self;
},
createBookmark : function(serialize) {
var self = this, doc = self.doc, endNode,
startNode = K('<span style="display:none;"></span>', doc)[0];
startNode.id = '__kindeditor_bookmark_start_' + (_BOOKMARK_ID++) + '__';
if (!self.collapsed) {
endNode = startNode.cloneNode(true);
endNode.id = '__kindeditor_bookmark_end_' + (_BOOKMARK_ID++) + '__';
}
if (endNode) {
self.cloneRange().collapse(false).insertNode(endNode).setEndBefore(endNode);
}
self.insertNode(startNode).setStartAfter(startNode);
return {
start : serialize ? '#' + startNode.id : startNode,
end : endNode ? (serialize ? '#' + endNode.id : endNode) : null
};
},
moveToBookmark : function(bookmark) {
var self = this, doc = self.doc,
start = K(bookmark.start, doc), end = bookmark.end ? K(bookmark.end, doc) : null;
if (!start || start.length < 1) {
return self;
}
self.setStartBefore(start[0]);
start.remove();
if (end && end.length > 0) {
self.setEndBefore(end[0]);
end.remove();
} else {
self.collapse(true);
}
return self;
},
dump : function() {
console.log('--------------------');
console.log(this.startContainer.nodeType == 3 ? this.startContainer.nodeValue : this.startContainer, this.startOffset);
console.log(this.endContainer.nodeType == 3 ? this.endContainer.nodeValue : this.endContainer, this.endOffset);
}
});
function _range(mixed) {
if (!mixed.nodeName) {
return mixed.constructor === KRange ? mixed : _toRange(mixed);
}
return new KRange(mixed);
}
K.RangeClass = KRange;
K.range = _range;
K.START_TO_START = _START_TO_START;
K.START_TO_END = _START_TO_END;
K.END_TO_END = _END_TO_END;
K.END_TO_START = _END_TO_START;
function _nativeCommand(doc, key, val) {
try {
doc.execCommand(key, false, val);
} catch(e) {}
}
function _nativeCommandValue(doc, key) {
var val = '';
try {
val = doc.queryCommandValue(key);
} catch (e) {}
if (typeof val !== 'string') {
val = '';
}
return val;
}
function _getSel(doc) {
var win = _getWin(doc);
return doc.selection || win.getSelection();
}
function _getRng(doc) {
var sel = _getSel(doc), rng;
try {
if (sel.rangeCount > 0) {
rng = sel.getRangeAt(0);
} else {
rng = sel.createRange();
}
} catch(e) {}
if (_IE && (!rng || (!rng.item && rng.parentElement().ownerDocument !== doc))) {
return null;
}
return rng;
}
function _singleKeyMap(map) {
var newMap = {}, arr, v;
_each(map, function(key, val) {
arr = key.split(',');
for (var i = 0, len = arr.length; i < len; i++) {
v = arr[i];
newMap[v] = val;
}
});
return newMap;
}
function _hasAttrOrCss(knode, map) {
return _hasAttrOrCssByKey(knode, map, '*') || _hasAttrOrCssByKey(knode, map);
}
function _hasAttrOrCssByKey(knode, map, mapKey) {
mapKey = mapKey || knode.name;
if (knode.type !== 1) {
return false;
}
var newMap = _singleKeyMap(map);
if (!newMap[mapKey]) {
return false;
}
var arr = newMap[mapKey].split(',');
for (var i = 0, len = arr.length; i < len; i++) {
var key = arr[i];
if (key === '*') {
return true;
}
var match = /^(\.?)([^=]+)(?:=([^=]*))?$/.exec(key);
var method = match[1] ? 'css' : 'attr';
key = match[2];
var val = match[3] || '';
if (val === '' && knode[method](key) !== '') {
return true;
}
if (val !== '' && knode[method](key) === val) {
return true;
}
}
return false;
}
function _removeAttrOrCss(knode, map) {
if (knode.type != 1) {
return;
}
_removeAttrOrCssByKey(knode, map, '*');
_removeAttrOrCssByKey(knode, map);
}
function _removeAttrOrCssByKey(knode, map, mapKey) {
mapKey = mapKey || knode.name;
if (knode.type !== 1) {
return;
}
var newMap = _singleKeyMap(map);
if (!newMap[mapKey]) {
return;
}
var arr = newMap[mapKey].split(','), allFlag = false;
for (var i = 0, len = arr.length; i < len; i++) {
var key = arr[i];
if (key === '*') {
allFlag = true;
break;
}
var match = /^(\.?)([^=]+)(?:=([^=]*))?$/.exec(key);
key = match[2];
if (match[1]) {
key = _toCamel(key);
if (knode[0].style[key]) {
knode[0].style[key] = '';
}
} else {
knode.removeAttr(key);
}
}
if (allFlag) {
knode.remove(true);
}
}
function _getInnerNode(knode) {
var inner = knode;
while (inner.first()) {
inner = inner.first();
}
return inner;
}
function _isEmptyNode(knode) {
return knode.type == 1 && knode.html().replace(/<[^>]+>/g, '') === '';
}
function _mergeWrapper(a, b) {
a = a.clone(true);
var lastA = _getInnerNode(a), childA = a, merged = false;
while (b) {
while (childA) {
if (childA.name === b.name) {
_mergeAttrs(childA, b.attr(), b.css());
merged = true;
}
childA = childA.first();
}
if (!merged) {
lastA.append(b.clone(false));
}
merged = false;
b = b.first();
}
return a;
}
function _wrapNode(knode, wrapper) {
wrapper = wrapper.clone(true);
if (knode.type == 3) {
_getInnerNode(wrapper).append(knode.clone(false));
knode.replaceWith(wrapper);
return wrapper;
}
var nodeWrapper = knode, child;
while ((child = knode.first()) && child.children().length == 1) {
knode = child;
}
child = knode.first();
var frag = knode.doc.createDocumentFragment();
while (child) {
frag.appendChild(child[0]);
child = child.next();
}
wrapper = _mergeWrapper(nodeWrapper, wrapper);
if (frag.firstChild) {
_getInnerNode(wrapper).append(frag);
}
nodeWrapper.replaceWith(wrapper);
return wrapper;
}
function _mergeAttrs(knode, attrs, styles) {
_each(attrs, function(key, val) {
if (key !== 'style') {
knode.attr(key, val);
}
});
_each(styles, function(key, val) {
knode.css(key, val);
});
}
function _inPreElement(knode) {
while (knode && knode.name != 'body') {
if (_PRE_TAG_MAP[knode.name] || knode.name == 'div' && knode.hasClass('ke-script')) {
return true;
}
knode = knode.parent();
}
return false;
}
function KCmd(range) {
this.init(range);
}
_extend(KCmd, {
init : function(range) {
var self = this, doc = range.doc;
self.doc = doc;
self.win = _getWin(doc);
self.sel = _getSel(doc);
self.range = range;
},
selection : function(forceReset) {
var self = this, doc = self.doc, rng = _getRng(doc);
self.sel = _getSel(doc);
if (rng) {
self.range = _range(rng);
if (K(self.range.startContainer).name == 'html') {
self.range.selectNodeContents(doc.body).collapse(false);
}
return self;
}
if (forceReset) {
self.range.selectNodeContents(doc.body).collapse(false);
}
return self;
},
select : function(hasDummy) {
hasDummy = _undef(hasDummy, true);
var self = this, sel = self.sel, range = self.range.cloneRange().shrink(),
sc = range.startContainer, so = range.startOffset,
ec = range.endContainer, eo = range.endOffset,
doc = _getDoc(sc), win = self.win, rng, hasU200b = false;
if (hasDummy && sc.nodeType == 1 && range.collapsed) {
if (_IE) {
var dummy = K('<span> </span>', doc);
range.insertNode(dummy[0]);
rng = doc.body.createTextRange();
try {
rng.moveToElementText(dummy[0]);
} catch(ex) {}
rng.collapse(false);
rng.select();
dummy.remove();
win.focus();
return self;
}
if (_WEBKIT) {
var children = sc.childNodes;
if (K(sc).isInline() || so > 0 && K(children[so - 1]).isInline() || children[so] && K(children[so]).isInline()) {
range.insertNode(doc.createTextNode('\u200B'));
hasU200b = true;
}
}
}
if (_IE) {
try {
rng = range.get(true);
rng.select();
} catch(e) {}
} else {
if (hasU200b) {
range.collapse(false);
}
rng = range.get(true);
//sel.removeAllRanges();
//sel.addRange(rng);
}
win.focus();
return self;
},
wrap : function(val) {
var self = this, doc = self.doc, range = self.range, wrapper;
wrapper = K(val, doc);
if (range.collapsed) {
range.shrink();
range.insertNode(wrapper[0]).selectNodeContents(wrapper[0]);
return self;
}
if (wrapper.isBlock()) {
var copyWrapper = wrapper.clone(true), child = copyWrapper;
while (child.first()) {
child = child.first();
}
child.append(range.extractContents());
range.insertNode(copyWrapper[0]).selectNode(copyWrapper[0]);
return self;
}
range.enlarge();
var bookmark = range.createBookmark(), ancestor = range.commonAncestor(), isStart = false;
K(ancestor).scan(function(node) {
if (!isStart && node == bookmark.start) {
isStart = true;
return;
}
if (isStart) {
if (node == bookmark.end) {
return false;
}
var knode = K(node);
if (_inPreElement(knode)) {
return;
}
if (knode.type == 3 && _trim(node.nodeValue).length > 0) {
var parent;
while ((parent = knode.parent()) && parent.isStyle() && parent.children().length == 1) {
knode = parent;
}
_wrapNode(knode, wrapper);
}
}
});
range.moveToBookmark(bookmark);
return self;
},
split : function(isStart, map) {
var range = this.range, doc = range.doc;
var tempRange = range.cloneRange().collapse(isStart);
var node = tempRange.startContainer, pos = tempRange.startOffset,
parent = node.nodeType == 3 ? node.parentNode : node,
needSplit = false, knode;
while (parent && parent.parentNode) {
knode = K(parent);
if (map) {
if (!knode.isStyle()) {
break;
}
if (!_hasAttrOrCss(knode, map)) {
break;
}
} else {
if (_NOSPLIT_TAG_MAP[knode.name]) {
break;
}
}
needSplit = true;
parent = parent.parentNode;
}
if (needSplit) {
var dummy = doc.createElement('span');
range.cloneRange().collapse(!isStart).insertNode(dummy);
if (isStart) {
tempRange.setStartBefore(parent.firstChild).setEnd(node, pos);
} else {
tempRange.setStart(node, pos).setEndAfter(parent.lastChild);
}
var frag = tempRange.extractContents(),
first = frag.firstChild, last = frag.lastChild;
if (isStart) {
tempRange.insertNode(frag);
range.setStartAfter(last).setEndBefore(dummy);
} else {
parent.appendChild(frag);
range.setStartBefore(dummy).setEndBefore(first);
}
var dummyParent = dummy.parentNode;
if (dummyParent == range.endContainer) {
var prev = K(dummy).prev(), next = K(dummy).next();
if (prev && next && prev.type == 3 && next.type == 3) {
range.setEnd(prev[0], prev[0].nodeValue.length);
} else if (!isStart) {
range.setEnd(range.endContainer, range.endOffset - 1);
}
}
dummyParent.removeChild(dummy);
}
return this;
},
remove : function(map) {
var self = this, doc = self.doc, range = self.range;
range.enlarge();
if (range.startOffset === 0) {
var ksc = K(range.startContainer), parent;
while ((parent = ksc.parent()) && parent.isStyle() && parent.children().length == 1) {
ksc = parent;
}
range.setStart(ksc[0], 0);
ksc = K(range.startContainer);
if (ksc.isBlock()) {
_removeAttrOrCss(ksc, map);
}
var kscp = ksc.parent();
if (kscp && kscp.isBlock()) {
_removeAttrOrCss(kscp, map);
}
}
var sc, so;
if (range.collapsed) {
self.split(true, map);
sc = range.startContainer;
so = range.startOffset;
if (so > 0) {
var sb = K(sc.childNodes[so - 1]);
if (sb && _isEmptyNode(sb)) {
sb.remove();
range.setStart(sc, so - 1);
}
}
var sa = K(sc.childNodes[so]);
if (sa && _isEmptyNode(sa)) {
sa.remove();
}
if (_isEmptyNode(sc)) {
range.startBefore(sc);
sc.remove();
}
range.collapse(true);
return self;
}
self.split(true, map);
self.split(false, map);
var startDummy = doc.createElement('span'), endDummy = doc.createElement('span');
range.cloneRange().collapse(false).insertNode(endDummy);
range.cloneRange().collapse(true).insertNode(startDummy);
var nodeList = [], cmpStart = false;
K(range.commonAncestor()).scan(function(node) {
if (!cmpStart && node == startDummy) {
cmpStart = true;
return;
}
if (node == endDummy) {
return false;
}
if (cmpStart) {
nodeList.push(node);
}
});
K(startDummy).remove();
K(endDummy).remove();
sc = range.startContainer;
so = range.startOffset;
var ec = range.endContainer, eo = range.endOffset;
if (so > 0) {
var startBefore = K(sc.childNodes[so - 1]);
if (startBefore && _isEmptyNode(startBefore)) {
startBefore.remove();
range.setStart(sc, so - 1);
if (sc == ec) {
range.setEnd(ec, eo - 1);
}
}
var startAfter = K(sc.childNodes[so]);
if (startAfter && _isEmptyNode(startAfter)) {
startAfter.remove();
if (sc == ec) {
range.setEnd(ec, eo - 1);
}
}
}
var endAfter = K(ec.childNodes[range.endOffset]);
if (endAfter && _isEmptyNode(endAfter)) {
endAfter.remove();
}
var bookmark = range.createBookmark(true);
_each(nodeList, function(i, node) {
_removeAttrOrCss(K(node), map);
});
range.moveToBookmark(bookmark);
return self;
},
commonNode : function(map) {
var range = this.range;
var ec = range.endContainer, eo = range.endOffset,
node = (ec.nodeType == 3 || eo === 0) ? ec : ec.childNodes[eo - 1];
function find(node) {
var child = node, parent = node;
while (parent) {
if (_hasAttrOrCss(K(parent), map)) {
return K(parent);
}
parent = parent.parentNode;
}
while (child && (child = child.lastChild)) {
if (_hasAttrOrCss(K(child), map)) {
return K(child);
}
}
return null;
}
var cNode = find(node);
if (cNode) {
return cNode;
}
if (node.nodeType == 1 || (ec.nodeType == 3 && eo === 0)) {
var prev = K(node).prev();
if (prev) {
return find(prev);
}
}
return null;
},
commonAncestor : function(tagName) {
var range = this.range,
sc = range.startContainer, so = range.startOffset,
ec = range.endContainer, eo = range.endOffset,
startNode = (sc.nodeType == 3 || so === 0) ? sc : sc.childNodes[so - 1],
endNode = (ec.nodeType == 3 || eo === 0) ? ec : ec.childNodes[eo - 1];
function find(node) {
while (node) {
if (node.nodeType == 1) {
if (node.tagName.toLowerCase() === tagName) {
return node;
}
}
node = node.parentNode;
}
return null;
}
var start = find(startNode), end = find(endNode);
if (start && end && start === end) {
return K(start);
}
return null;
},
state : function(key) {
var self = this, doc = self.doc, bool = false;
try {
bool = doc.queryCommandState(key);
} catch (e) {}
return bool;
},
val : function(key) {
var self = this, doc = self.doc, range = self.range;
function lc(val) {
return val.toLowerCase();
}
key = lc(key);
var val = '', knode;
if (key === 'fontfamily' || key === 'fontname') {
val = _nativeCommandValue(doc, 'fontname');
val = val.replace(/['"]/g, '');
return lc(val);
}
if (key === 'formatblock') {
val = _nativeCommandValue(doc, key);
if (val === '') {
knode = self.commonNode({'h1,h2,h3,h4,h5,h6,p,div,pre,address' : '*'});
if (knode) {
val = knode.name;
}
}
if (val === 'Normal') {
val = 'p';
}
return lc(val);
}
if (key === 'fontsize') {
knode = self.commonNode({'*' : '.font-size'});
if (knode) {
val = knode.css('font-size');
}
return lc(val);
}
if (key === 'forecolor') {
knode = self.commonNode({'*' : '.color'});
if (knode) {
val = knode.css('color');
}
val = _toHex(val);
if (val === '') {
val = 'default';
}
return lc(val);
}
if (key === 'hilitecolor') {
knode = self.commonNode({'*' : '.background-color'});
if (knode) {
val = knode.css('background-color');
}
val = _toHex(val);
if (val === '') {
val = 'default';
}
return lc(val);
}
return val;
},
toggle : function(wrapper, map) {
var self = this;
if (self.commonNode(map)) {
self.remove(map);
} else {
self.wrap(wrapper);
}
return self.select();
},
bold : function() {
return this.toggle('<strong></strong>', {
span : '.font-weight=bold',
strong : '*',
b : '*'
});
},
italic : function() {
return this.toggle('<em></em>', {
span : '.font-style=italic',
em : '*',
i : '*'
});
},
underline : function() {
return this.toggle('<u></u>', {
span : '.text-decoration=underline',
u : '*'
});
},
strikethrough : function() {
return this.toggle('<s></s>', {
span : '.text-decoration=line-through',
s : '*'
});
},
forecolor : function(val) {
return this.toggle('<span style="color:' + val + ';"></span>', {
span : '.color=' + val,
font : 'color'
});
},
hilitecolor : function(val) {
return this.toggle('<span style="background-color:' + val + ';"></span>', {
span : '.background-color=' + val
});
},
fontsize : function(val) {
return this.toggle('<span style="font-size:' + val + ';"></span>', {
span : '.font-size=' + val,
font : 'size'
});
},
fontname : function(val) {
return this.fontfamily(val);
},
fontfamily : function(val) {
return this.toggle('<span style="font-family:' + val + ';"></span>', {
span : '.font-family=' + val,
font : 'face'
});
},
removeformat : function() {
var map = {
'*' : '.font-weight,.font-style,.text-decoration,.color,.background-color,.font-size,.font-family,.text-indent'
},
tags = _STYLE_TAG_MAP;
_each(tags, function(key, val) {
map[key] = '*';
});
this.remove(map);
return this.select();
},
inserthtml : function(val, quickMode) {
var self = this, range = self.range;
if (val === '') {
return self;
}
function pasteHtml(range, val) {
val = '<img id="__kindeditor_temp_tag__" width="0" height="0" style="display:none;" />' + val;
var rng = range.get();
if (rng.item) {
rng.item(0).outerHTML = val;
} else {
rng.pasteHTML(val);
}
var temp = range.doc.getElementById('__kindeditor_temp_tag__');
temp.parentNode.removeChild(temp);
var newRange = _toRange(rng);
range.setEnd(newRange.endContainer, newRange.endOffset);
range.collapse(false);
self.select(false);
}
function insertHtml(range, val) {
var doc = range.doc,
frag = doc.createDocumentFragment();
K('@' + val, doc).each(function() {
frag.appendChild(this);
});
range.deleteContents();
range.insertNode(frag);
range.collapse(false);
self.select(false);
}
if (_IE && quickMode) {
try {
pasteHtml(range, val);
} catch(e) {
insertHtml(range, val);
}
return self;
}
insertHtml(range, val);
return self;
},
hr : function() {
return this.inserthtml('<hr />');
},
print : function() {
this.win.print();
return this;
},
insertimage : function(url, title, width, height, border, align) {
title = _undef(title, '');
border = _undef(border, 0);
var html = '<img src="' + _escape(url) + '" data-ke-src="' + _escape(url) + '" ';
if (width) {
html += 'width="' + _escape(width) + '" ';
}
if (height) {
html += 'height="' + _escape(height) + '" ';
}
if (title) {
html += 'title="' + _escape(title) + '" ';
}
if (align) {
html += 'align="' + _escape(align) + '" ';
}
html += 'alt="' + _escape(title) + '" ';
html += '/>';
return this.inserthtml(html);
},
createlink : function(url, type) {
var self = this, doc = self.doc, range = self.range;
self.select();
var a = self.commonNode({ a : '*' });
if (a && !range.isControl()) {
range.selectNode(a.get());
self.select();
}
var html = '<a href="' + _escape(url) + '" data-ke-src="' + _escape(url) + '" ';
if (type) {
html += ' target="' + _escape(type) + '"';
}
if (range.collapsed) {
html += '>' + _escape(url) + '</a>';
return self.inserthtml(html);
}
if (range.isControl()) {
var node = K(range.startContainer.childNodes[range.startOffset]);
html += '></a>';
node.after(K(html, doc));
node.next().append(node);
range.selectNode(node[0]);
return self.select();
}
_nativeCommand(doc, 'createlink', '__kindeditor_temp_url__');
K('a[href="__kindeditor_temp_url__"]', doc).each(function() {
K(this).attr('href', url).attr('data-ke-src', url);
if (type) {
K(this).attr('target', type);
} else {
K(this).removeAttr('target');
}
});
return self;
},
unlink : function() {
var self = this, doc = self.doc, range = self.range;
self.select();
if (range.collapsed) {
var a = self.commonNode({ a : '*' });
if (a) {
range.selectNode(a.get());
self.select();
}
_nativeCommand(doc, 'unlink', null);
if (_WEBKIT && K(range.startContainer).name === 'img') {
var parent = K(range.startContainer).parent();
if (parent.name === 'a') {
parent.remove(true);
}
}
} else {
_nativeCommand(doc, 'unlink', null);
}
return self;
}
});
_each(('formatblock,selectall,justifyleft,justifycenter,justifyright,justifyfull,insertorderedlist,' +
'insertunorderedlist,indent,outdent,subscript,superscript').split(','), function(i, name) {
KCmd.prototype[name] = function(val) {
var self = this;
self.select();
_nativeCommand(self.doc, name, val);
if (!_IE || _inArray(name, 'formatblock,selectall,insertorderedlist,insertunorderedlist'.split(',')) >= 0) {
self.selection();
}
return self;
};
});
_each('cut,copy,paste'.split(','), function(i, name) {
KCmd.prototype[name] = function() {
var self = this;
if (!self.doc.queryCommandSupported(name)) {
throw 'not supported';
}
self.select();
_nativeCommand(self.doc, name, null);
return self;
};
});
function _cmd(mixed) {
if (mixed.nodeName) {
var doc = _getDoc(mixed);
mixed = _range(doc).selectNodeContents(doc.body).collapse(false);
}
return new KCmd(mixed);
}
K.CmdClass = KCmd;
K.cmd = _cmd;
function _drag(options) {
var moveEl = options.moveEl,
moveFn = options.moveFn,
clickEl = options.clickEl || moveEl,
beforeDrag = options.beforeDrag,
iframeFix = options.iframeFix === undefined ? true : options.iframeFix;
var docs = [document];
if (iframeFix) {
K('iframe').each(function() {
var src = _formatUrl(this.src || '', 'absolute');
if (/^https?:\/\//.test(src)) {
return;
}
var doc;
try {
doc = _iframeDoc(this);
} catch(e) {}
if (doc) {
var pos = K(this).pos();
K(doc).data('pos-x', pos.x);
K(doc).data('pos-y', pos.y);
docs.push(doc);
}
});
}
clickEl.mousedown(function(e) {
e.stopPropagation();
var self = clickEl.get(),
x = _removeUnit(moveEl.css('left')),
y = _removeUnit(moveEl.css('top')),
width = moveEl.width(),
height = moveEl.height(),
pageX = e.pageX,
pageY = e.pageY;
if (beforeDrag) {
beforeDrag();
}
function moveListener(e) {
e.preventDefault();
var kdoc = K(_getDoc(e.target));
var diffX = _round((kdoc.data('pos-x') || 0) + e.pageX - pageX);
var diffY = _round((kdoc.data('pos-y') || 0) + e.pageY - pageY);
moveFn.call(clickEl, x, y, width, height, diffX, diffY);
}
function selectListener(e) {
e.preventDefault();
}
function upListener(e) {
e.preventDefault();
K(docs).unbind('mousemove', moveListener)
.unbind('mouseup', upListener)
.unbind('selectstart', selectListener);
if (self.releaseCapture) {
self.releaseCapture();
}
}
K(docs).mousemove(moveListener)
.mouseup(upListener)
.bind('selectstart', selectListener);
if (self.setCapture) {
self.setCapture();
}
});
}
function KWidget(options) {
this.init(options);
}
_extend(KWidget, {
init : function(options) {
var self = this;
self.name = options.name || '';
self.doc = options.doc || document;
self.win = _getWin(self.doc);
self.x = _addUnit(options.x);
self.y = _addUnit(options.y);
self.z = options.z;
self.width = _addUnit(options.width);
self.height = _addUnit(options.height);
self.div = K('<div style="display:block;"></div>');
self.options = options;
self._alignEl = options.alignEl;
if (self.width) {
self.div.css('width', self.width);
}
if (self.height) {
self.div.css('height', self.height);
}
if (self.z) {
self.div.css({
position : 'absolute',
left : self.x,
top : self.y,
'z-index' : self.z
});
}
if (self.z && (self.x === undefined || self.y === undefined)) {
self.autoPos(self.width, self.height);
}
if (options.cls) {
self.div.addClass(options.cls);
}
if (options.shadowMode) {
self.div.addClass('ke-shadow');
}
if (options.css) {
self.div.css(options.css);
}
if (options.src) {
K(options.src).replaceWith(self.div);
} else {
K(self.doc.body).append(self.div);
}
if (options.html) {
self.div.html(options.html);
}
if (options.autoScroll) {
if (_IE && _V < 7 || _QUIRKS) {
var scrollPos = _getScrollPos();
K(self.win).bind('scroll', function(e) {
var pos = _getScrollPos(),
diffX = pos.x - scrollPos.x,
diffY = pos.y - scrollPos.y;
self.pos(_removeUnit(self.x) + diffX, _removeUnit(self.y) + diffY, false);
});
} else {
self.div.css('position', 'fixed');
}
}
},
pos : function(x, y, updateProp) {
var self = this;
updateProp = _undef(updateProp, true);
if (x !== null) {
x = x < 0 ? 0 : _addUnit(x);
self.div.css('left', x);
if (updateProp) {
self.x = x;
}
}
if (y !== null) {
y = y < 0 ? 0 : _addUnit(y);
self.div.css('top', y);
if (updateProp) {
self.y = y;
}
}
return self;
},
autoPos : function(width, height) {
var self = this,
w = _removeUnit(width) || 0,
h = _removeUnit(height) || 0,
scrollPos = _getScrollPos();
if (self._alignEl) {
var knode = K(self._alignEl),
pos = knode.pos(),
diffX = _round(knode[0].clientWidth / 2 - w / 2),
diffY = _round(knode[0].clientHeight / 2 - h / 2);
x = diffX < 0 ? pos.x : pos.x + diffX;
y = diffY < 0 ? pos.y : pos.y + diffY;
} else {
var docEl = _docElement(self.doc);
x = _round(scrollPos.x + (docEl.clientWidth - w) / 2);
y = _round(scrollPos.y + (docEl.clientHeight - h) / 2);
}
if (!(_IE && _V < 7 || _QUIRKS)) {
x -= scrollPos.x;
y -= scrollPos.y;
}
return self.pos(x, y);
},
remove : function() {
var self = this;
if (_IE && _V < 7 || _QUIRKS) {
K(self.win).unbind('scroll');
}
self.div.remove();
_each(self, function(i) {
self[i] = null;
});
return this;
},
show : function() {
this.div.show();
return this;
},
hide : function() {
this.div.hide();
return this;
},
draggable : function(options) {
var self = this;
options = options || {};
options.moveEl = self.div;
options.moveFn = function(x, y, width, height, diffX, diffY) {
if ((x = x + diffX) < 0) {
x = 0;
}
if ((y = y + diffY) < 0) {
y = 0;
}
self.pos(x, y);
};
_drag(options);
return self;
}
});
function _widget(options) {
return new KWidget(options);
}
K.WidgetClass = KWidget;
K.widget = _widget;
function _iframeDoc(iframe) {
iframe = _get(iframe);
return iframe.contentDocument || iframe.contentWindow.document;
}
var html, _direction = '';
if ((html = document.getElementsByTagName('html'))) {
_direction = html[0].dir;
}
function _getInitHtml(themesPath, bodyClass, cssPath, cssData) {
var arr = [
(_direction === '' ? '<html>' : '<html dir="' + _direction + '">'),
'<head><meta charset="utf-8" /><title></title>',
'<style>',
'html {margin:0;padding:0;}',
'body {margin:0;padding:5px;}',
'body, td {font:12px/1.5 "sans serif",tahoma,verdana,helvetica;}',
'body, p, div {word-wrap: break-word;}',
'p {margin:0px;}',
'table {border-collapse:collapse;}',
'img {border:0;}',
'noscript {display:none;}',
'table.ke-zeroborder td {border:1px dotted #AAA;}',
'img.ke-flash {',
' border:1px solid #AAA;',
' background-image:url(' + themesPath + 'common/flash.gif);',
' background-position:center center;',
' background-repeat:no-repeat;',
' width:100px;',
' height:100px;',
'}',
'img.ke-rm {',
' border:1px solid #AAA;',
' background-image:url(' + themesPath + 'common/rm.gif);',
' background-position:center center;',
' background-repeat:no-repeat;',
' width:100px;',
' height:100px;',
'}',
'img.ke-media {',
' border:1px solid #AAA;',
' background-image:url(' + themesPath + 'common/media.gif);',
' background-position:center center;',
' background-repeat:no-repeat;',
' width:100px;',
' height:100px;',
'}',
'img.ke-anchor {',
' border:1px dashed #666;',
' width:16px;',
' height:16px;',
'}',
'.ke-script, .ke-noscript {',
' display:none;',
' font-size:0;',
' width:0;',
' height:0;',
'}',
'.ke-pagebreak {',
' border:1px dotted #AAA;',
' font-size:0;',
' height:2px;',
'}',
'</style>'
];
if (!_isArray(cssPath)) {
cssPath = [cssPath];
}
_each(cssPath, function(i, path) {
if (path) {
arr.push('<link href="' + path + '" rel="stylesheet" />');
}
});
if (cssData) {
arr.push('<style>' + cssData + '</style>');
}
arr.push('</head><body ' + (bodyClass ? 'class="' + bodyClass + '"' : '') + '></body></html>');
return arr.join('\n');
}
function _elementVal(knode, val) {
if (knode.hasVal()) {
if (val === undefined) {
var html = knode.val();
html = html.replace(/(<(?:p|p\s[^>]*)>) *(<\/p>)/ig, '');
return html;
}
return knode.val(val);
}
return knode.html(val);
}
function KEdit(options) {
this.init(options);
}
_extend(KEdit, KWidget, {
init : function(options) {
var self = this;
KEdit.parent.init.call(self, options);
self.srcElement = K(options.srcElement);
self.div.addClass('ke-edit');
self.designMode = _undef(options.designMode, true);
self.beforeGetHtml = options.beforeGetHtml;
self.beforeSetHtml = options.beforeSetHtml;
self.afterSetHtml = options.afterSetHtml;
var themesPath = _undef(options.themesPath, ''),
bodyClass = options.bodyClass,
cssPath = options.cssPath,
cssData = options.cssData,
isDocumentDomain = location.host.replace(/:\d+/, '') !== document.domain,
srcScript = ('document.open();' +
(isDocumentDomain ? 'document.domain="' + document.domain + '";' : '') +
'document.close();'),
iframeSrc = _IE ? ' src="javascript:void(function(){' + encodeURIComponent(srcScript) + '}())"' : '';
self.iframe = K('<iframe class="ke-edit-iframe" hidefocus="true" frameborder="0"' + iframeSrc + '></iframe>').css('width', '100%');
self.textarea = K('<textarea class="ke-edit-textarea" hidefocus="true"></textarea>').css('width', '100%');
if (self.width) {
self.setWidth(self.width);
}
if (self.height) {
self.setHeight(self.height);
}
if (self.designMode) {
self.textarea.hide();
} else {
self.iframe.hide();
}
function ready() {
var doc = _iframeDoc(self.iframe);
doc.open();
if (isDocumentDomain) {
doc.domain = document.domain;
}
doc.write(_getInitHtml(themesPath, bodyClass, cssPath, cssData));
doc.close();
self.win = self.iframe[0].contentWindow;
self.doc = doc;
var cmd = _cmd(doc);
self.afterChange(function(e) {
cmd.selection();
});
if (_WEBKIT) {
K(doc).click(function(e) {
if (K(e.target).name === 'img') {
cmd.selection(true);
cmd.range.selectNode(e.target);
cmd.select();
}
});
}
if (_IE) {
K(doc).keydown(function(e) {
if (e.which == 8) {
cmd.selection();
var rng = cmd.range;
if (rng.isControl()) {
rng.collapse(true);
K(rng.startContainer.childNodes[rng.startOffset]).remove();
e.preventDefault();
}
}
});
}
self.cmd = cmd;
self.html(_elementVal(self.srcElement));
if (_IE) {
doc.body.disabled = true;
doc.body.contentEditable = true;
doc.body.removeAttribute('disabled');
} else {
doc.designMode = 'on';
}
if (options.afterCreate) {
options.afterCreate.call(self);
}
}
if (isDocumentDomain) {
self.iframe.bind('load', function(e) {
self.iframe.unbind('load');
if (_IE) {
ready();
} else {
setTimeout(ready, 0);
}
});
}
self.div.append(self.iframe);
self.div.append(self.textarea);
self.srcElement.hide();
!isDocumentDomain && ready();
},
setWidth : function(val) {
this.div.css('width', _addUnit(val));
return this;
},
setHeight : function(val) {
var self = this;
val = _addUnit(val);
self.div.css('height', val);
self.iframe.css('height', val);
if ((_IE && _V < 8) || _QUIRKS) {
val = _addUnit(_removeUnit(val) - 2);
}
self.textarea.css('height', val);
return self;
},
remove : function() {
var self = this, doc = self.doc;
K(doc.body).unbind();
K(doc).unbind();
K(self.win).unbind();
_elementVal(self.srcElement, self.html());
self.srcElement.show();
doc.write('');
self.iframe.unbind();
self.textarea.unbind();
KEdit.parent.remove.call(self);
},
html : function(val, isFull) {
var self = this, doc = self.doc;
if (self.designMode) {
var body = doc.body;
if (val === undefined) {
if (isFull) {
val = '<!doctype html><html>' + body.parentNode.innerHTML + '</html>';
} else {
val = body.innerHTML;
}
if (self.beforeGetHtml) {
val = self.beforeGetHtml(val);
}
if (_GECKO && val == '<br />') {
val = '';
}
return val;
}
if (self.beforeSetHtml) {
val = self.beforeSetHtml(val);
}
K(body).html(val);
if (self.afterSetHtml) {
self.afterSetHtml();
}
return self;
}
if (val === undefined) {
return self.textarea.val();
}
self.textarea.val(val);
return self;
},
design : function(bool) {
var self = this, val;
if (bool === undefined ? !self.designMode : bool) {
if (!self.designMode) {
val = self.html();
self.designMode = true;
self.html(val);
self.textarea.hide();
self.iframe.show();
}
} else {
if (self.designMode) {
val = self.html();
self.designMode = false;
self.html(val);
self.iframe.hide();
self.textarea.show();
}
}
return self.focus();
},
focus : function() {
var self = this;
self.designMode ? self.win.focus() : self.textarea[0].focus();
return self;
},
blur : function() {
var self = this;
if (_IE) {
var input = K('<input type="text" style="float:left;width:0;height:0;padding:0;margin:0;border:0;" value="" />', self.div);
self.div.append(input);
input[0].focus();
input.remove();
} else {
self.designMode ? self.win.blur() : self.textarea[0].blur();
}
return self;
},
afterChange : function(fn) {
var self = this, doc = self.doc, body = doc.body;
K(doc).keyup(function(e) {
if (!e.ctrlKey && !e.altKey && _CHANGE_KEY_MAP[e.which]) {
fn(e);
}
});
K(doc).mouseup(fn).contextmenu(fn);
K(self.win).blur(fn);
function timeoutHandler(e) {
setTimeout(function() {
fn(e);
}, 1);
}
K(body).bind('paste', timeoutHandler);
K(body).bind('cut', timeoutHandler);
return self;
}
});
function _edit(options) {
return new KEdit(options);
}
K.EditClass = KEdit;
K.edit = _edit;
K.iframeDoc = _iframeDoc;
function _selectToolbar(name, fn) {
var self = this,
knode = self.get(name);
if (knode) {
if (knode.hasClass('ke-disabled')) {
return;
}
fn(knode);
}
}
function KToolbar(options) {
this.init(options);
}
_extend(KToolbar, KWidget, {
init : function(options) {
var self = this;
KToolbar.parent.init.call(self, options);
self.disableMode = _undef(options.disableMode, false);
self.noDisableItemMap = _toMap(_undef(options.noDisableItems, []));
self._itemMap = {};
self.div.addClass('ke-toolbar').bind('contextmenu,mousedown,mousemove', function(e) {
e.preventDefault();
}).attr('unselectable', 'on');
function find(target) {
var knode = K(target);
if (knode.hasClass('ke-outline')) {
return knode;
}
if (knode.hasClass('ke-toolbar-icon')) {
return knode.parent();
}
}
function hover(e, method) {
var knode = find(e.target);
if (knode) {
if (knode.hasClass('ke-disabled')) {
return;
}
if (knode.hasClass('ke-selected')) {
return;
}
knode[method]('ke-on');
}
}
self.div.mouseover(function(e) {
hover(e, 'addClass');
})
.mouseout(function(e) {
hover(e, 'removeClass');
})
.click(function(e) {
var knode = find(e.target);
if (knode) {
if (knode.hasClass('ke-disabled')) {
return;
}
self.options.click.call(this, e, knode.attr('data-name'));
}
});
},
get : function(name) {
if (this._itemMap[name]) {
return this._itemMap[name];
}
return (this._itemMap[name] = K('span.ke-icon-' + name, this.div).parent());
},
select : function(name) {
_selectToolbar.call(this, name, function(knode) {
knode.addClass('ke-selected');
});
return self;
},
unselect : function(name) {
_selectToolbar.call(this, name, function(knode) {
knode.removeClass('ke-selected').removeClass('ke-on');
});
return self;
},
enable : function(name) {
var self = this,
knode = name.get ? name : self.get(name);
if (knode) {
knode.removeClass('ke-disabled');
knode.opacity(1);
}
return self;
},
disable : function(name) {
var self = this,
knode = name.get ? name : self.get(name);
if (knode) {
knode.removeClass('ke-selected').addClass('ke-disabled');
knode.opacity(0.5);
}
return self;
},
disableAll : function(bool, noDisableItems) {
var self = this, map = self.noDisableItemMap, item;
if (noDisableItems) {
map = _toMap(noDisableItems);
}
if (bool === undefined ? !self.disableMode : bool) {
K('span.ke-outline', self.div).each(function() {
var knode = K(this),
name = knode[0].getAttribute('data-name', 2);
if (!map[name]) {
self.disable(knode);
}
});
self.disableMode = true;
} else {
K('span.ke-outline', self.div).each(function() {
var knode = K(this),
name = knode[0].getAttribute('data-name', 2);
if (!map[name]) {
self.enable(knode);
}
});
self.disableMode = false;
}
return self;
}
});
function _toolbar(options) {
return new KToolbar(options);
}
K.ToolbarClass = KToolbar;
K.toolbar = _toolbar;
function KMenu(options) {
this.init(options);
}
_extend(KMenu, KWidget, {
init : function(options) {
var self = this;
options.z = options.z || 811213;
KMenu.parent.init.call(self, options);
self.centerLineMode = _undef(options.centerLineMode, true);
self.div.addClass('ke-menu').bind('click,mousedown', function(e){
e.stopPropagation();
}).attr('unselectable', 'on');
},
addItem : function(item) {
var self = this;
if (item.title === '-') {
self.div.append(K('<div class="ke-menu-separator"></div>'));
return;
}
var itemDiv = K('<div class="ke-menu-item" unselectable="on"></div>'),
leftDiv = K('<div class="ke-inline-block ke-menu-item-left"></div>'),
rightDiv = K('<div class="ke-inline-block ke-menu-item-right"></div>'),
height = _addUnit(item.height),
iconClass = _undef(item.iconClass, '');
self.div.append(itemDiv);
if (height) {
itemDiv.css('height', height);
rightDiv.css('line-height', height);
}
var centerDiv;
if (self.centerLineMode) {
centerDiv = K('<div class="ke-inline-block ke-menu-item-center"></div>');
if (height) {
centerDiv.css('height', height);
}
}
itemDiv.mouseover(function(e) {
K(this).addClass('ke-menu-item-on');
if (centerDiv) {
centerDiv.addClass('ke-menu-item-center-on');
}
})
.mouseout(function(e) {
K(this).removeClass('ke-menu-item-on');
if (centerDiv) {
centerDiv.removeClass('ke-menu-item-center-on');
}
})
.click(function(e) {
item.click.call(K(this));
e.stopPropagation();
})
.append(leftDiv);
if (centerDiv) {
itemDiv.append(centerDiv);
}
itemDiv.append(rightDiv);
if (item.checked) {
iconClass = 'ke-icon-checked';
}
if (iconClass !== '') {
leftDiv.html('<span class="ke-inline-block ke-toolbar-icon ke-toolbar-icon-url ' + iconClass + '"></span>');
}
rightDiv.html(item.title);
return self;
},
remove : function() {
var self = this;
if (self.options.beforeRemove) {
self.options.beforeRemove.call(self);
}
K('.ke-menu-item', self.div[0]).unbind();
KMenu.parent.remove.call(self);
return self;
}
});
function _menu(options) {
return new KMenu(options);
}
K.MenuClass = KMenu;
K.menu = _menu;
function KColorPicker(options) {
this.init(options);
}
_extend(KColorPicker, KWidget, {
init : function(options) {
var self = this;
options.z = options.z || 811213;
KColorPicker.parent.init.call(self, options);
var colors = options.colors || [
['#E53333', '#E56600', '#FF9900', '#64451D', '#DFC5A4', '#FFE500'],
['#009900', '#006600', '#99BB00', '#B8D100', '#60D978', '#00D5FF'],
['#337FE5', '#003399', '#4C33E5', '#9933E5', '#CC33E5', '#EE33EE'],
['#FFFFFF', '#CCCCCC', '#999999', '#666666', '#333333', '#000000']
];
self.selectedColor = (options.selectedColor || '').toLowerCase();
self._cells = [];
self.div.addClass('ke-colorpicker').bind('click,mousedown', function(e){
e.stopPropagation();
}).attr('unselectable', 'on');
var table = self.doc.createElement('table');
self.div.append(table);
table.className = 'ke-colorpicker-table';
table.cellPadding = 0;
table.cellSpacing = 0;
table.border = 0;
var row = table.insertRow(0), cell = row.insertCell(0);
cell.colSpan = colors[0].length;
self._addAttr(cell, '', 'ke-colorpicker-cell-top');
for (var i = 0; i < colors.length; i++) {
row = table.insertRow(i + 1);
for (var j = 0; j < colors[i].length; j++) {
cell = row.insertCell(j);
self._addAttr(cell, colors[i][j], 'ke-colorpicker-cell');
}
}
},
_addAttr : function(cell, color, cls) {
var self = this;
cell = K(cell).addClass(cls);
if (self.selectedColor === color.toLowerCase()) {
cell.addClass('ke-colorpicker-cell-selected');
}
cell.attr('title', color || self.options.noColor);
cell.mouseover(function(e) {
K(this).addClass('ke-colorpicker-cell-on');
});
cell.mouseout(function(e) {
K(this).removeClass('ke-colorpicker-cell-on');
});
cell.click(function(e) {
e.stop();
self.options.click.call(K(this), color);
});
if (color) {
cell.append(K('<div class="ke-colorpicker-cell-color" unselectable="on"></div>').css('background-color', color));
} else {
cell.html(self.options.noColor);
}
K(cell).attr('unselectable', 'on');
self._cells.push(cell);
},
remove : function() {
var self = this;
_each(self._cells, function() {
this.unbind();
});
KColorPicker.parent.remove.call(self);
return self;
}
});
function _colorpicker(options) {
return new KColorPicker(options);
}
K.ColorPickerClass = KColorPicker;
K.colorpicker = _colorpicker;
function KUploadButton(options) {
this.init(options);
}
_extend(KUploadButton, {
init : function(options) {
var self = this,
button = K(options.button),
fieldName = options.fieldName || 'file',
url = options.url || '',
title = button.val(),
extraParams = options.extraParams || {},
cls = button[0].className || '',
target = options.target || 'kindeditor_upload_iframe_' + new Date().getTime();
options.afterError = options.afterError || function(str) {
alert(str);
};
var hiddenElements = [];
for(var k in extraParams){
hiddenElements.push('<input type="hidden" name="' + k + '" value="' + extraParams[k] + '" />');
}
var html = [
'<div class="ke-inline-block ' + cls + '">',
(options.target ? '' : '<iframe name="' + target + '" style="display:none;"></iframe>'),
(options.form ? '<div class="ke-upload-area">' : '<form class="ke-upload-area ke-form" method="post" enctype="multipart/form-data" target="' + target + '" action="' + url + '">'),
'<span class="ke-button-common">',
hiddenElements.join(''),
'<input type="button" class="ke-button-common ke-button" value="' + title + '" />',
'</span>',
'<input type="file" class="ke-upload-file" name="' + fieldName + '" tabindex="-1" />',
(options.form ? '</div>' : '</form>'),
'</div>'].join('');
var div = K(html, button.doc);
button.hide();
button.before(div);
self.div = div;
self.button = button;
self.iframe = options.target ? K('iframe[name="' + target + '"]') : K('iframe', div);
self.form = options.form ? K(options.form) : K('form', div);
var width = options.width || K('.ke-button-common', div).width();
self.fileBox = K('.ke-upload-file', div).width(width);
self.options = options;
},
submit : function() {
var self = this,
iframe = self.iframe;
iframe.bind('load', function() {
iframe.unbind();
var tempForm = document.createElement('form');
self.fileBox.before(tempForm);
K(tempForm).append(self.fileBox);
tempForm.reset();
K(tempForm).remove(true);
var doc = K.iframeDoc(iframe),
pre = doc.getElementsByTagName('pre')[0],
str = '', data;
if (pre) {
str = pre.innerHTML;
} else {
str = doc.body.innerHTML;
}
iframe[0].src = 'javascript:false';
try {
data = K.json(str);
} catch (e) {
self.options.afterError.call(self, '<!doctype html><html>' + doc.body.parentNode.innerHTML + '</html>');
}
if (data) {
self.options.afterUpload.call(self, data);
}
});
self.form[0].submit();
return self;
},
remove : function() {
var self = this;
if (self.fileBox) {
self.fileBox.unbind();
}
self.iframe.remove();
self.div.remove();
self.button.show();
return self;
}
});
function _uploadbutton(options) {
return new KUploadButton(options);
}
K.UploadButtonClass = KUploadButton;
K.uploadbutton = _uploadbutton;
function _createButton(arg) {
arg = arg || {};
var name = arg.name || '',
span = K('<span class="ke-button-common ke-button-outer" title="' + name + '"></span>'),
btn = K('<input class="ke-button-common ke-button" type="button" value="' + name + '" />');
if (arg.click) {
btn.click(arg.click);
}
span.append(btn);
return span;
}
function KDialog(options) {
this.init(options);
}
_extend(KDialog, KWidget, {
init : function(options) {
var self = this;
var shadowMode = _undef(options.shadowMode, true);
options.z = options.z || 811213;
options.shadowMode = false;
options.autoScroll = _undef(options.autoScroll, true);
KDialog.parent.init.call(self, options);
var title = options.title,
body = K(options.body, self.doc),
previewBtn = options.previewBtn,
yesBtn = options.yesBtn,
noBtn = options.noBtn,
closeBtn = options.closeBtn,
showMask = _undef(options.showMask, true);
self.div.addClass('ke-dialog').bind('click,mousedown', function(e){
e.stopPropagation();
});
var contentDiv = K('<div class="ke-dialog-content"></div>').appendTo(self.div);
if (_IE && _V < 7) {
self.iframeMask = K('<iframe src="about:blank" class="ke-dialog-shadow"></iframe>').appendTo(self.div);
} else if (shadowMode) {
K('<div class="ke-dialog-shadow"></div>').appendTo(self.div);
}
var headerDiv = K('<div class="ke-dialog-header"></div>');
contentDiv.append(headerDiv);
headerDiv.html(title);
self.closeIcon = K('<span class="ke-dialog-icon-close" title="' + closeBtn.name + '"></span>').click(closeBtn.click);
headerDiv.append(self.closeIcon);
self.draggable({
clickEl : headerDiv,
beforeDrag : options.beforeDrag
});
var bodyDiv = K('<div class="ke-dialog-body"></div>');
contentDiv.append(bodyDiv);
bodyDiv.append(body);
var footerDiv = K('<div class="ke-dialog-footer"></div>');
if (previewBtn || yesBtn || noBtn) {
contentDiv.append(footerDiv);
}
_each([
{ btn : previewBtn, name : 'preview' },
{ btn : yesBtn, name : 'yes' },
{ btn : noBtn, name : 'no' }
], function() {
if (this.btn) {
var button = _createButton(this.btn);
button.addClass('ke-dialog-' + this.name);
footerDiv.append(button);
}
});
if (self.height) {
bodyDiv.height(_removeUnit(self.height) - headerDiv.height() - footerDiv.height());
}
self.div.width(self.div.width());
self.div.height(self.div.height());
self.mask = null;
if (showMask) {
var docEl = _docElement(self.doc),
docWidth = Math.max(docEl.scrollWidth, docEl.clientWidth),
docHeight = Math.max(docEl.scrollHeight, docEl.clientHeight);
self.mask = _widget({
x : 0,
y : 0,
z : self.z - 1,
cls : 'ke-dialog-mask',
width : docWidth,
height : docHeight
});
}
self.autoPos(self.div.width(), self.div.height());
self.footerDiv = footerDiv;
self.bodyDiv = bodyDiv;
self.headerDiv = headerDiv;
self.isLoading = false;
},
setMaskIndex : function(z) {
var self = this;
self.mask.div.css('z-index', z);
},
showLoading : function(msg) {
msg = _undef(msg, '');
var self = this, body = self.bodyDiv;
self.loading = K('<div class="ke-dialog-loading"><div class="ke-inline-block ke-dialog-loading-content" style="margin-top:' + Math.round(body.height() / 3) + 'px;">' + msg + '</div></div>')
.width(body.width()).height(body.height())
.css('top', self.headerDiv.height() + 'px');
body.css('visibility', 'hidden').after(self.loading);
self.isLoading = true;
return self;
},
hideLoading : function() {
this.loading && this.loading.remove();
this.bodyDiv.css('visibility', 'visible');
this.isLoading = false;
return this;
},
remove : function() {
var self = this;
if (self.options.beforeRemove) {
self.options.beforeRemove.call(self);
}
self.mask && self.mask.remove();
self.iframeMask && self.iframeMask.remove();
self.closeIcon.unbind();
K('input', self.div).unbind();
K('button', self.div).unbind();
self.footerDiv.unbind();
self.bodyDiv.unbind();
self.headerDiv.unbind();
K('iframe', self.div).each(function() {
K(this).remove();
});
KDialog.parent.remove.call(self);
return self;
}
});
function _dialog(options) {
return new KDialog(options);
}
K.DialogClass = KDialog;
K.dialog = _dialog;
function _tabs(options) {
var self = _widget(options),
remove = self.remove,
afterSelect = options.afterSelect,
div = self.div,
liList = [];
div.addClass('ke-tabs')
.bind('contextmenu,mousedown,mousemove', function(e) {
e.preventDefault();
});
var ul = K('<ul class="ke-tabs-ul ke-clearfix"></ul>');
div.append(ul);
self.add = function(tab) {
var li = K('<li class="ke-tabs-li">' + tab.title + '</li>');
li.data('tab', tab);
liList.push(li);
ul.append(li);
};
self.selectedIndex = 0;
self.select = function(index) {
self.selectedIndex = index;
_each(liList, function(i, li) {
li.unbind();
if (i === index) {
li.addClass('ke-tabs-li-selected');
K(li.data('tab').panel).show('');
} else {
li.removeClass('ke-tabs-li-selected').removeClass('ke-tabs-li-on')
.mouseover(function() {
K(this).addClass('ke-tabs-li-on');
})
.mouseout(function() {
K(this).removeClass('ke-tabs-li-on');
})
.click(function() {
self.select(i);
});
K(li.data('tab').panel).hide();
}
});
if (afterSelect) {
afterSelect.call(self, index);
}
};
self.remove = function() {
_each(liList, function() {
this.remove();
});
ul.remove();
remove.call(self);
};
return self;
}
K.tabs = _tabs;
function _loadScript(url, fn) {
var head = document.getElementsByTagName('head')[0] || (_QUIRKS ? document.body : document.documentElement),
script = document.createElement('script');
head.appendChild(script);
script.src = url;
script.charset = 'utf-8';
script.onload = script.onreadystatechange = function() {
if (!this.readyState || this.readyState === 'loaded') {
if (fn) {
fn();
}
script.onload = script.onreadystatechange = null;
head.removeChild(script);
}
};
}
function _chopQuery(url) {
var index = url.indexOf('?');
return index > 0 ? url.substr(0, index) : url;
}
function _loadStyle(url) {
var head = document.getElementsByTagName('head')[0] || (_QUIRKS ? document.body : document.documentElement),
link = document.createElement('link'),
absoluteUrl = _chopQuery(_formatUrl(url, 'absolute'));
var links = K('link[rel="stylesheet"]', head);
for (var i = 0, len = links.length; i < len; i++) {
if (_chopQuery(_formatUrl(links[i].href, 'absolute')) === absoluteUrl) {
return;
}
}
head.appendChild(link);
link.href = url;
link.rel = 'stylesheet';
}
function _ajax(url, fn, method, param, dataType) {
method = method || 'GET';
dataType = dataType || 'json';
var xhr = window.XMLHttpRequest ? new window.XMLHttpRequest() : new ActiveXObject('Microsoft.XMLHTTP');
xhr.open(method, url, true);
xhr.onreadystatechange = function () {
if (xhr.readyState == 4 && xhr.status == 200) {
if (fn) {
var data = _trim(xhr.responseText);
if (dataType == 'json') {
data = _json(data);
}
fn(data);
}
}
};
if (method == 'POST') {
var params = [];
_each(param, function(key, val) {
params.push(encodeURIComponent(key) + '=' + encodeURIComponent(val));
});
try {
xhr.setRequestHeader('Content-Type', 'application/x-www-form-urlencoded');
} catch (e) {}
xhr.send(params.join('&'));
} else {
xhr.send(null);
}
}
K.loadScript = _loadScript;
K.loadStyle = _loadStyle;
K.ajax = _ajax;
var _plugins = {};
function _plugin(name, fn) {
if (name === undefined) {
return _plugins;
}
if (!fn) {
return _plugins[name];
}
_plugins[name] = fn;
}
var _language = {};
function _parseLangKey(key) {
var match, ns = 'core';
if ((match = /^(\w+)\.(\w+)$/.exec(key))) {
ns = match[1];
key = match[2];
}
return { ns : ns, key : key };
}
function _lang(mixed, langType) {
langType = langType === undefined ? K.options.langType : langType;
if (typeof mixed === 'string') {
if (!_language[langType]) {
return 'no language';
}
var pos = mixed.length - 1;
if (mixed.substr(pos) === '.') {
return _language[langType][mixed.substr(0, pos)];
}
var obj = _parseLangKey(mixed);
return _language[langType][obj.ns][obj.key];
}
_each(mixed, function(key, val) {
var obj = _parseLangKey(key);
if (!_language[langType]) {
_language[langType] = {};
}
if (!_language[langType][obj.ns]) {
_language[langType][obj.ns] = {};
}
_language[langType][obj.ns][obj.key] = val;
});
}
function _getImageFromRange(range, fn) {
if (range.collapsed) {
return;
}
range = range.cloneRange().up();
var sc = range.startContainer, so = range.startOffset;
if (!_WEBKIT && !range.isControl()) {
return;
}
var img = K(sc.childNodes[so]);
if (!img || img.name != 'img') {
return;
}
if (fn(img)) {
return img;
}
}
function _bindContextmenuEvent() {
var self = this, doc = self.edit.doc;
K(doc).contextmenu(function(e) {
if (self.menu) {
self.hideMenu();
}
if (!self.useContextmenu) {
e.preventDefault();
return;
}
if (self._contextmenus.length === 0) {
return;
}
var maxWidth = 0, items = [];
_each(self._contextmenus, function() {
if (this.title == '-') {
items.push(this);
return;
}
if (this.cond && this.cond()) {
items.push(this);
if (this.width && this.width > maxWidth) {
maxWidth = this.width;
}
}
});
while (items.length > 0 && items[0].title == '-') {
items.shift();
}
while (items.length > 0 && items[items.length - 1].title == '-') {
items.pop();
}
var prevItem = null;
_each(items, function(i) {
if (this.title == '-' && prevItem.title == '-') {
delete items[i];
}
prevItem = this;
});
if (items.length > 0) {
e.preventDefault();
var pos = K(self.edit.iframe).pos(),
menu = _menu({
x : pos.x + e.clientX,
y : pos.y + e.clientY,
width : maxWidth,
css : { visibility: 'hidden' },
shadowMode : self.shadowMode
});
_each(items, function() {
if (this.title) {
menu.addItem(this);
}
});
var docEl = _docElement(menu.doc),
menuHeight = menu.div.height();
if (e.clientY + menuHeight >= docEl.clientHeight - 100) {
menu.pos(menu.x, _removeUnit(menu.y) - menuHeight);
}
menu.div.css('visibility', 'visible');
self.menu = menu;
}
});
}
function _bindNewlineEvent() {
var self = this, doc = self.edit.doc, newlineTag = self.newlineTag;
if (_IE && newlineTag !== 'br') {
return;
}
if (_GECKO && _V < 3 && newlineTag !== 'p') {
return;
}
if (_OPERA && _V < 9) {
return;
}
var brSkipTagMap = _toMap('h1,h2,h3,h4,h5,h6,pre,li'),
pSkipTagMap = _toMap('p,h1,h2,h3,h4,h5,h6,pre,li,blockquote');
function getAncestorTagName(range) {
var ancestor = K(range.commonAncestor());
while (ancestor) {
if (ancestor.type == 1 && !ancestor.isStyle()) {
break;
}
ancestor = ancestor.parent();
}
return ancestor.name;
}
K(doc).keydown(function(e) {
if (e.which != 13 || e.shiftKey || e.ctrlKey || e.altKey) {
return;
}
self.cmd.selection();
var tagName = getAncestorTagName(self.cmd.range);
if (tagName == 'marquee' || tagName == 'select') {
return;
}
if (newlineTag === 'br' && !brSkipTagMap[tagName]) {
e.preventDefault();
self.insertHtml('<br />' + (_IE && _V < 9 ? '' : '\u200B'));
return;
}
if (!pSkipTagMap[tagName]) {
_nativeCommand(doc, 'formatblock', '<p>');
}
});
K(doc).keyup(function(e) {
if (e.which != 13 || e.shiftKey || e.ctrlKey || e.altKey) {
return;
}
if (newlineTag == 'br') {
return;
}
if (_GECKO) {
var root = self.cmd.commonAncestor('p');
var a = self.cmd.commonAncestor('a');
if (a.text() == '') {
a.remove(true);
self.cmd.range.selectNodeContents(root[0]).collapse(true);
self.cmd.select();
}
return;
}
self.cmd.selection();
var tagName = getAncestorTagName(self.cmd.range);
if (tagName == 'marquee' || tagName == 'select') {
return;
}
if (!pSkipTagMap[tagName]) {
_nativeCommand(doc, 'formatblock', '<p>');
}
var div = self.cmd.commonAncestor('div');
if (div) {
var p = K('<p></p>'),
child = div[0].firstChild;
while (child) {
var next = child.nextSibling;
p.append(child);
child = next;
}
div.before(p);
div.remove();
self.cmd.range.selectNodeContents(p[0]);
self.cmd.select();
}
});
}
function _bindTabEvent() {
var self = this, doc = self.edit.doc;
K(doc).keydown(function(e) {
if (e.which == 9) {
e.preventDefault();
if (self.afterTab) {
self.afterTab.call(self, e);
return;
}
var cmd = self.cmd, range = cmd.range;
range.shrink();
if (range.collapsed && range.startContainer.nodeType == 1) {
range.insertNode(K('@ ', doc)[0]);
cmd.select();
}
self.insertHtml(' ');
}
});
}
function _bindFocusEvent() {
var self = this;
K(self.edit.textarea[0], self.edit.win).focus(function(e) {
if (self.afterFocus) {
self.afterFocus.call(self, e);
}
}).blur(function(e) {
if (self.afterBlur) {
self.afterBlur.call(self, e);
}
});
}
function _removeBookmarkTag(html) {
return _trim(html.replace(/<span [^>]*id="?__kindeditor_bookmark_\w+_\d+__"?[^>]*><\/span>/ig, ''));
}
function _removeTempTag(html) {
return html.replace(/<div[^>]+class="?__kindeditor_paste__"?[^>]*>[\s\S]*?<\/div>/ig, '');
}
function _addBookmarkToStack(stack, bookmark) {
if (stack.length === 0) {
stack.push(bookmark);
return;
}
var prev = stack[stack.length - 1];
if (_removeBookmarkTag(bookmark.html) !== _removeBookmarkTag(prev.html)) {
stack.push(bookmark);
}
}
function _undoToRedo(fromStack, toStack) {
var self = this, edit = self.edit,
body = edit.doc.body,
range, bookmark;
if (fromStack.length === 0) {
return self;
}
if (edit.designMode) {
range = self.cmd.range;
bookmark = range.createBookmark(true);
bookmark.html = body.innerHTML;
} else {
bookmark = {
html : body.innerHTML
};
}
_addBookmarkToStack(toStack, bookmark);
var prev = fromStack.pop();
if (_removeBookmarkTag(bookmark.html) === _removeBookmarkTag(prev.html) && fromStack.length > 0) {
prev = fromStack.pop();
}
if (edit.designMode) {
edit.html(prev.html);
if (prev.start) {
range.moveToBookmark(prev);
self.select();
}
} else {
K(body).html(_removeBookmarkTag(prev.html));
}
return self;
}
function KEditor(options) {
var self = this;
self.options = {};
function setOption(key, val) {
if (KEditor.prototype[key] === undefined) {
self[key] = val;
}
self.options[key] = val;
}
_each(options, function(key, val) {
setOption(key, options[key]);
});
_each(K.options, function(key, val) {
if (self[key] === undefined) {
setOption(key, val);
}
});
var se = K(self.srcElement || '<textarea/>');
if (!self.width) {
self.width = se[0].style.width || se.width();
}
if (!self.height) {
self.height = se[0].style.height || se.height();
}
setOption('width', _undef(self.width, self.minWidth));
setOption('height', _undef(self.height, self.minHeight));
setOption('width', _addUnit(self.width));
setOption('height', _addUnit(self.height));
if (_MOBILE && (!_IOS || _V < 534)) {
self.designMode = false;
}
self.srcElement = se;
self.initContent = '';
self.plugin = {};
self.isCreated = false;
self.isLoading = false;
self._handlers = {};
self._contextmenus = [];
self._undoStack = [];
self._redoStack = [];
self._calledPlugins = {};
self._firstAddBookmark = true;
self.menu = self.contextmenu = null;
self.dialogs = [];
}
KEditor.prototype = {
lang : function(mixed) {
return _lang(mixed, this.langType);
},
loadPlugin : function(name, fn) {
var self = this;
if (_plugins[name]) {
if (self._calledPlugins[name]) {
if (fn) {
fn.call(self);
}
return self;
}
_plugins[name].call(self, KindEditor);
if (fn) {
fn.call(self);
}
self._calledPlugins[name] = true;
return self;
}
if (self.isLoading) {
return self;
}
self.isLoading = true;
_loadScript(self.pluginsPath + name + '/' + name + '.js?ver=' + encodeURIComponent(K.DEBUG ? _TIME : _VERSION), function() {
self.isLoading = false;
if (_plugins[name]) {
self.loadPlugin(name, fn);
}
});
return self;
},
handler : function(key, fn) {
var self = this;
if (!self._handlers[key]) {
self._handlers[key] = [];
}
if (_isFunction(fn)) {
self._handlers[key].push(fn);
return self;
}
_each(self._handlers[key], function() {
fn = this.call(self, fn);
});
return fn;
},
clickToolbar : function(name, fn) {
var self = this, key = 'clickToolbar' + name;
if (fn === undefined) {
if (self._handlers[key]) {
return self.handler(key);
}
self.loadPlugin(name, function() {
self.handler(key);
});
return self;
}
return self.handler(key, fn);
},
updateState : function() {
var self = this;
_each(('justifyleft,justifycenter,justifyright,justifyfull,insertorderedlist,insertunorderedlist,' +
'subscript,superscript,bold,italic,underline,strikethrough').split(','), function(i, name) {
self.cmd.state(name) ? self.toolbar.select(name) : self.toolbar.unselect(name);
});
return self;
},
addContextmenu : function(item) {
this._contextmenus.push(item);
return this;
},
afterCreate : function(fn) {
return this.handler('afterCreate', fn);
},
beforeRemove : function(fn) {
return this.handler('beforeRemove', fn);
},
beforeGetHtml : function(fn) {
return this.handler('beforeGetHtml', fn);
},
beforeSetHtml : function(fn) {
return this.handler('beforeSetHtml', fn);
},
afterSetHtml : function(fn) {
return this.handler('afterSetHtml', fn);
},
create : function() {
var self = this, fullscreenMode = self.fullscreenMode;
if (self.isCreated) {
return self;
}
if (self.srcElement.data('kindeditor')) {
return self;
}
self.srcElement.data('kindeditor', 'true');
if (fullscreenMode) {
_docElement().style.overflow = 'hidden';
} else {
_docElement().style.overflow = '';
}
var width = fullscreenMode ? _docElement().clientWidth + 'px' : self.width,
height = fullscreenMode ? _docElement().clientHeight + 'px' : self.height;
if ((_IE && _V < 8) || _QUIRKS) {
height = _addUnit(_removeUnit(height) + 2);
}
var container = self.container = K(self.layout);
if (fullscreenMode) {
K(document.body).append(container);
} else {
self.srcElement.before(container);
}
var toolbarDiv = K('.toolbar', container),
editDiv = K('.edit', container),
statusbar = self.statusbar = K('.statusbar', container);
container.removeClass('container')
.addClass('ke-container ke-container-' + self.themeType).css('width', width);
if (fullscreenMode) {
container.css({
position : 'absolute',
left : 0,
top : 0,
'z-index' : 811211
});
if (!_GECKO) {
self._scrollPos = _getScrollPos();
}
window.scrollTo(0, 0);
K(document.body).css({
'height' : '1px',
'overflow' : 'hidden'
});
K(document.body.parentNode).css('overflow', 'hidden');
self._fullscreenExecuted = true;
} else {
if (self._fullscreenExecuted) {
K(document.body).css({
'height' : '',
'overflow' : ''
});
K(document.body.parentNode).css('overflow', '');
}
if (self._scrollPos) {
window.scrollTo(self._scrollPos.x, self._scrollPos.y);
}
}
var htmlList = [];
K.each(self.items, function(i, name) {
if (name == '|') {
htmlList.push('<span class="ke-inline-block ke-separator"></span>');
} else if (name == '/') {
htmlList.push('<div class="ke-hr"></div>');
} else {
htmlList.push('<span class="ke-outline" data-name="' + name + '" title="' + self.lang(name) + '" unselectable="on">');
htmlList.push('<span class="ke-toolbar-icon ke-toolbar-icon-url ke-icon-' + name + '" unselectable="on"></span></span>');
}
});
var toolbar = self.toolbar = _toolbar({
src : toolbarDiv,
html : htmlList.join(''),
noDisableItems : self.noDisableItems,
click : function(e, name) {
e.stop();
if (self.menu) {
var menuName = self.menu.name;
self.hideMenu();
if (menuName === name) {
return;
}
}
self.clickToolbar(name);
}
});
var editHeight = _removeUnit(height) - toolbar.div.height();
var edit = self.edit = _edit({
height : editHeight > 0 && _removeUnit(height) > self.minHeight ? editHeight : self.minHeight,
src : editDiv,
srcElement : self.srcElement,
designMode : self.designMode,
themesPath : self.themesPath,
bodyClass : self.bodyClass,
cssPath : self.cssPath,
cssData : self.cssData,
beforeGetHtml : function(html) {
html = self.beforeGetHtml(html);
return _formatHtml(html, self.filterMode ? self.htmlTags : null, self.urlType, self.wellFormatMode, self.indentChar);
},
beforeSetHtml : function(html) {
html = _formatHtml(html, self.filterMode ? self.htmlTags : null, '', false);
return self.beforeSetHtml(html);
},
afterSetHtml : function() {
self.edit = edit = this;
self.afterSetHtml();
},
afterCreate : function() {
self.edit = edit = this;
self.cmd = edit.cmd;
self._docMousedownFn = function(e) {
if (self.menu) {
self.hideMenu();
}
};
K(edit.doc, document).mousedown(self._docMousedownFn);
_bindContextmenuEvent.call(self);
_bindNewlineEvent.call(self);
_bindTabEvent.call(self);
_bindFocusEvent.call(self);
edit.afterChange(function(e) {
if (!edit.designMode) {
return;
}
self.updateState();
self.addBookmark();
if (self.options.afterChange) {
self.options.afterChange.call(self);
}
});
edit.textarea.keyup(function(e) {
if (!e.ctrlKey && !e.altKey && _INPUT_KEY_MAP[e.which]) {
if (self.options.afterChange) {
self.options.afterChange.call(self);
}
}
});
if (self.readonlyMode) {
self.readonly();
}
self.isCreated = true;
if (self.initContent === '') {
self.initContent = self.html();
}
self.afterCreate();
if (self.options.afterCreate) {
self.options.afterCreate.call(self);
}
}
});
statusbar.removeClass('statusbar').addClass('ke-statusbar')
.append('<span class="ke-inline-block ke-statusbar-center-icon"></span>')
.append('<span class="ke-inline-block ke-statusbar-right-icon"></span>');
K(window).unbind('resize');
function initResize() {
if (statusbar.height() === 0) {
setTimeout(initResize, 100);
return;
}
self.resize(width, height);
}
initResize();
function newResize(width, height, updateProp) {
updateProp = _undef(updateProp, true);
if (width && width >= self.minWidth) {
self.resize(width, null);
if (updateProp) {
self.width = _addUnit(width);
}
}
if (height && height >= self.minHeight) {
self.resize(null, height);
if (updateProp) {
self.height = _addUnit(height);
}
}
}
if (fullscreenMode) {
K(window).bind('resize', function(e) {
if (self.isCreated) {
newResize(_docElement().clientWidth, _docElement().clientHeight, false);
}
});
toolbar.select('fullscreen');
statusbar.first().css('visibility', 'hidden');
statusbar.last().css('visibility', 'hidden');
} else {
if (_GECKO) {
K(window).bind('scroll', function(e) {
self._scrollPos = _getScrollPos();
});
}
if (self.resizeType > 0) {
_drag({
moveEl : container,
clickEl : statusbar,
moveFn : function(x, y, width, height, diffX, diffY) {
height += diffY;
newResize(null, height);
}
});
} else {
statusbar.first().css('visibility', 'hidden');
}
if (self.resizeType === 2) {
_drag({
moveEl : container,
clickEl : statusbar.last(),
moveFn : function(x, y, width, height, diffX, diffY) {
width += diffX;
height += diffY;
newResize(width, height);
}
});
} else {
statusbar.last().css('visibility', 'hidden');
}
}
return self;
},
remove : function() {
var self = this;
if (!self.isCreated) {
return self;
}
self.beforeRemove();
self.srcElement.data('kindeditor', '');
if (self.menu) {
self.hideMenu();
}
_each(self.dialogs, function() {
self.hideDialog();
});
K(document).unbind('mousedown', self._docMousedownFn);
self.toolbar.remove();
self.edit.remove();
self.statusbar.last().unbind();
self.statusbar.unbind();
self.container.remove();
self.container = self.toolbar = self.edit = self.menu = null;
self.dialogs = [];
self.isCreated = false;
return self;
},
resize : function(width, height) {
var self = this;
if (width !== null) {
if (_removeUnit(width) > self.minWidth) {
self.container.css('width', _addUnit(width));
}
}
if (height !== null && self.toolbar.div && self.statusbar) {
height = _removeUnit(height) - self.toolbar.div.height() - self.statusbar.height();
if (height > 0 && _removeUnit(height) > self.minHeight) {
self.edit.setHeight(height);
}
}
return self;
},
select : function() {
this.isCreated && this.cmd.select();
return this;
},
html : function(val) {
var self = this;
if (val === undefined) {
return self.isCreated ? self.edit.html() : _elementVal(self.srcElement);
}
self.isCreated ? self.edit.html(val) : _elementVal(self.srcElement, val);
return self;
},
fullHtml : function() {
return this.isCreated ? this.edit.html(undefined, true) : '';
},
text : function(val) {
var self = this;
if (val === undefined) {
return _trim(self.html().replace(/<(?!img|embed).*?>/ig, '').replace(/ /ig, ' '));
} else {
return self.html(_escape(val));
}
},
isEmpty : function() {
return _trim(this.text().replace(/\r\n|\n|\r/, '')) === '';
},
isDirty : function() {
return _trim(this.initContent.replace(/\r\n|\n|\r|t/g, '')) !== _trim(this.html().replace(/\r\n|\n|\r|t/g, ''));
},
selectedHtml : function() {
return this.isCreated ? this.cmd.range.html() : '';
},
count : function(mode) {
var self = this;
mode = (mode || 'html').toLowerCase();
if (mode === 'html') {
return _removeBookmarkTag(_removeTempTag(self.html())).length;
}
if (mode === 'text') {
return self.text().replace(/<(?:img|embed).*?>/ig, 'K').replace(/\r\n|\n|\r/g, '').length;
}
return 0;
},
exec : function(key) {
key = key.toLowerCase();
var self = this, cmd = self.cmd,
changeFlag = _inArray(key, 'selectall,copy,paste,print'.split(',')) < 0;
if (changeFlag) {
self.addBookmark(false);
}
cmd[key].apply(cmd, _toArray(arguments, 1));
if (changeFlag) {
self.updateState();
self.addBookmark(false);
if (self.options.afterChange) {
self.options.afterChange.call(self);
}
}
return self;
},
insertHtml : function(val, quickMode) {
if (!this.isCreated) {
return this;
}
val = this.beforeSetHtml(val);
this.exec('inserthtml', val, quickMode);
return this;
},
appendHtml : function(val) {
this.html(this.html() + val);
if (this.isCreated) {
var cmd = this.cmd;
cmd.range.selectNodeContents(cmd.doc.body).collapse(false);
cmd.select();
}
return this;
},
sync : function() {
_elementVal(this.srcElement, this.html());
return this;
},
focus : function() {
this.isCreated ? this.edit.focus() : this.srcElement[0].focus();
return this;
},
blur : function() {
this.isCreated ? this.edit.blur() : this.srcElement[0].blur();
return this;
},
addBookmark : function(checkSize) {
checkSize = _undef(checkSize, true);
var self = this, edit = self.edit,
body = edit.doc.body,
html = _removeTempTag(body.innerHTML), bookmark;
if (checkSize && self._undoStack.length > 0) {
var prev = self._undoStack[self._undoStack.length - 1];
if (Math.abs(html.length - _removeBookmarkTag(prev.html).length) < self.minChangeSize) {
return self;
}
}
if (edit.designMode && !self._firstAddBookmark) {
var range = self.cmd.range;
bookmark = range.createBookmark(true);
bookmark.html = _removeTempTag(body.innerHTML);
range.moveToBookmark(bookmark);
} else {
bookmark = {
html : html
};
}
self._firstAddBookmark = false;
_addBookmarkToStack(self._undoStack, bookmark);
return self;
},
undo : function() {
return _undoToRedo.call(this, this._undoStack, this._redoStack);
},
redo : function() {
return _undoToRedo.call(this, this._redoStack, this._undoStack);
},
fullscreen : function(bool) {
this.fullscreenMode = (bool === undefined ? !this.fullscreenMode : bool);
return this.remove().create();
},
readonly : function(isReadonly) {
isReadonly = _undef(isReadonly, true);
var self = this, edit = self.edit, doc = edit.doc;
if (self.designMode) {
self.toolbar.disableAll(isReadonly, []);
} else {
_each(self.noDisableItems, function() {
self.toolbar[isReadonly ? 'disable' : 'enable'](this);
});
}
if (_IE) {
doc.body.contentEditable = !isReadonly;
} else {
doc.designMode = isReadonly ? 'off' : 'on';
}
edit.textarea[0].disabled = isReadonly;
},
createMenu : function(options) {
var self = this,
name = options.name,
knode = self.toolbar.get(name),
pos = knode.pos();
options.x = pos.x;
options.y = pos.y + knode.height();
options.shadowMode = _undef(options.shadowMode, self.shadowMode);
if (options.selectedColor !== undefined) {
options.cls = 'ke-colorpicker-' + self.themeType;
options.noColor = self.lang('noColor');
self.menu = _colorpicker(options);
} else {
options.cls = 'ke-menu-' + self.themeType;
options.centerLineMode = false;
self.menu = _menu(options);
}
return self.menu;
},
hideMenu : function() {
this.menu.remove();
this.menu = null;
return this;
},
hideContextmenu : function() {
this.contextmenu.remove();
this.contextmenu = null;
return this;
},
createDialog : function(options) {
var self = this, name = options.name;
options.shadowMode = _undef(options.shadowMode, self.shadowMode);
options.closeBtn = _undef(options.closeBtn, {
name : self.lang('close'),
click : function(e) {
self.hideDialog();
if (_IE && self.cmd) {
self.cmd.select();
}
}
});
options.noBtn = _undef(options.noBtn, {
name : self.lang(options.yesBtn ? 'no' : 'close'),
click : function(e) {
self.hideDialog();
if (_IE && self.cmd) {
self.cmd.select();
}
}
});
if (self.dialogAlignType != 'page') {
options.alignEl = self.container;
}
options.cls = 'ke-dialog-' + self.themeType;
if (self.dialogs.length > 0) {
var firstDialog = self.dialogs[0],
parentDialog = self.dialogs[self.dialogs.length - 1];
firstDialog.setMaskIndex(parentDialog.z + 2);
options.z = parentDialog.z + 3;
options.showMask = false;
}
var dialog = _dialog(options);
self.dialogs.push(dialog);
return dialog;
},
hideDialog : function() {
var self = this;
if (self.dialogs.length > 0) {
self.dialogs.pop().remove();
}
if (self.dialogs.length > 0) {
var firstDialog = self.dialogs[0],
parentDialog = self.dialogs[self.dialogs.length - 1];
firstDialog.setMaskIndex(parentDialog.z - 1);
}
return self;
},
errorDialog : function(html) {
var self = this;
var dialog = self.createDialog({
width : 750,
title : self.lang('uploadError'),
body : '<div style="padding:10px 20px;"><iframe frameborder="0" style="width:708px;height:400px;"></iframe></div>'
});
var iframe = K('iframe', dialog.div), doc = K.iframeDoc(iframe);
doc.open();
doc.write(html);
doc.close();
K(doc.body).css('background-color', '#FFF');
iframe[0].contentWindow.focus();
return self;
}
};
function _editor(options) {
return new KEditor(options);
}
_instances = [];
function _create(expr, options) {
options = options || {};
options.basePath = _undef(options.basePath, K.basePath);
options.themesPath = _undef(options.themesPath, options.basePath + 'themes/');
options.langPath = _undef(options.langPath, options.basePath + 'lang/');
options.pluginsPath = _undef(options.pluginsPath, options.basePath + 'plugins/');
if (_undef(options.loadStyleMode, K.options.loadStyleMode)) {
var themeType = _undef(options.themeType, K.options.themeType);
_loadStyle(options.themesPath + 'default/default.css');
_loadStyle(options.themesPath + themeType + '/' + themeType + '.css');
}
function create(editor) {
_each(_plugins, function(name, fn) {
fn.call(editor, KindEditor);
});
return editor.create();
}
var knode = K(expr);
if (!knode || knode.length === 0) {
return;
}
if (knode.length > 1) {
knode.each(function() {
_create(this, options);
});
return _instances[0];
}
options.srcElement = knode[0];
var editor = new KEditor(options);
_instances.push(editor);
if (_language[editor.langType]) {
return create(editor);
}
_loadScript(editor.langPath + editor.langType + '.js?ver=' + encodeURIComponent(K.DEBUG ? _TIME : _VERSION), function() {
create(editor);
});
return editor;
}
function _eachEditor(expr, fn) {
K(expr).each(function(i, el) {
K.each(_instances, function(j, editor) {
if (editor && editor.srcElement[0] == el) {
fn.call(editor, j, editor);
return false;
}
});
});
}
K.remove = function(expr) {
_eachEditor(expr, function(i) {
this.remove();
_instances.splice(i, 1);
});
};
K.sync = function(expr) {
_eachEditor(expr, function() {
this.sync();
});
};
if (_IE && _V < 7) {
_nativeCommand(document, 'BackgroundImageCache', true);
}
K.EditorClass = KEditor;
K.editor = _editor;
K.create = _create;
K.instances = _instances;
K.plugin = _plugin;
K.lang = _lang;
_plugin('core', function(K) {
var self = this,
shortcutKeys = {
undo : 'Z', redo : 'Y', bold : 'B', italic : 'I', underline : 'U', print : 'P', selectall : 'A'
};
self.afterSetHtml(function() {
if (self.options.afterChange) {
self.options.afterChange.call(self);
}
});
self.afterCreate(function() {
if (self.syncType != 'form') {
return;
}
var el = K(self.srcElement), hasForm = false;
while ((el = el.parent())) {
if (el.name == 'form') {
hasForm = true;
break;
}
}
if (hasForm) {
el.bind('submit', function(e) {
self.sync();
K(window).bind('unload', function() {
self.edit.textarea.remove();
});
});
var resetBtn = K('[type="reset"]', el);
resetBtn.click(function() {
self.html(self.initContent);
self.cmd.selection();
});
self.beforeRemove(function() {
el.unbind();
resetBtn.unbind();
});
}
});
self.clickToolbar('source', function() {
if (self.edit.designMode) {
self.toolbar.disableAll(true);
self.edit.design(false);
self.toolbar.select('source');
} else {
self.toolbar.disableAll(false);
self.edit.design(true);
self.toolbar.unselect('source');
}
self.designMode = self.edit.designMode;
});
self.afterCreate(function() {
if (!self.designMode) {
self.toolbar.disableAll(true).select('source');
}
});
self.clickToolbar('fullscreen', function() {
self.fullscreen();
});
if (self.fullscreenShortcut) {
var loaded = false;
self.afterCreate(function() {
K(self.edit.doc, self.edit.textarea).keyup(function(e) {
if (e.which == 27) {
setTimeout(function() {
self.fullscreen();
}, 0);
}
});
if (loaded) {
if (_IE && !self.designMode) {
return;
}
self.focus();
}
if (!loaded) {
loaded = true;
}
});
}
_each('undo,redo'.split(','), function(i, name) {
if (shortcutKeys[name]) {
self.afterCreate(function() {
_ctrl(this.edit.doc, shortcutKeys[name], function() {
self.clickToolbar(name);
});
});
}
self.clickToolbar(name, function() {
self[name]();
});
});
self.clickToolbar('formatblock', function() {
var blocks = self.lang('formatblock.formatBlock'),
heights = {
h1 : 28,
h2 : 24,
h3 : 18,
H4 : 14,
p : 12
},
curVal = self.cmd.val('formatblock'),
menu = self.createMenu({
name : 'formatblock',
width : self.langType == 'en' ? 200 : 150
});
_each(blocks, function(key, val) {
var style = 'font-size:' + heights[key] + 'px;';
if (key.charAt(0) === 'h') {
style += 'font-weight:bold;';
}
menu.addItem({
title : '<span style="' + style + '" unselectable="on">' + val + '</span>',
height : heights[key] + 12,
checked : (curVal === key || curVal === val),
click : function() {
self.select().exec('formatblock', '<' + key + '>').hideMenu();
}
});
});
});
self.clickToolbar('fontname', function() {
var curVal = self.cmd.val('fontname'),
menu = self.createMenu({
name : 'fontname',
width : 150
});
_each(self.lang('fontname.fontName'), function(key, val) {
menu.addItem({
title : '<span style="font-family: ' + key + ';" unselectable="on">' + val + '</span>',
checked : (curVal === key.toLowerCase() || curVal === val.toLowerCase()),
click : function() {
self.exec('fontname', key).hideMenu();
}
});
});
});
self.clickToolbar('fontsize', function() {
var curVal = self.cmd.val('fontsize'),
menu = self.createMenu({
name : 'fontsize',
width : 150
});
_each(self.fontSizeTable, function(i, val) {
menu.addItem({
title : '<span style="font-size:' + val + ';" unselectable="on">' + val + '</span>',
height : _removeUnit(val) + 12,
checked : curVal === val,
click : function() {
self.exec('fontsize', val).hideMenu();
}
});
});
});
_each('forecolor,hilitecolor'.split(','), function(i, name) {
self.clickToolbar(name, function() {
self.createMenu({
name : name,
selectedColor : self.cmd.val(name) || 'default',
colors : self.colorTable,
click : function(color) {
self.exec(name, color).hideMenu();
}
});
});
});
_each(('cut,copy,paste').split(','), function(i, name) {
self.clickToolbar(name, function() {
self.focus();
try {
self.exec(name, null);
} catch(e) {
alert(self.lang(name + 'Error'));
}
});
});
self.clickToolbar('about', function() {
var html = '<div style="margin:20px;">' +
'<div>KindEditor ' + _VERSION + '</div>' +
'<div>Copyright © <a href="http://www.kindsoft.net/" target="_blank">kindsoft.net</a> All rights reserved.</div>' +
'</div>';
self.createDialog({
name : 'about',
width : 300,
title : self.lang('about'),
body : html
});
});
self.plugin.getSelectedLink = function() {
return self.cmd.commonAncestor('a');
};
self.plugin.getSelectedImage = function() {
return _getImageFromRange(self.edit.cmd.range, function(img) {
return !/^ke-\w+$/i.test(img[0].className);
});
};
self.plugin.getSelectedFlash = function() {
return _getImageFromRange(self.edit.cmd.range, function(img) {
return img[0].className == 'ke-flash';
});
};
self.plugin.getSelectedMedia = function() {
return _getImageFromRange(self.edit.cmd.range, function(img) {
return img[0].className == 'ke-media' || img[0].className == 'ke-rm';
});
};
self.plugin.getSelectedAnchor = function() {
return _getImageFromRange(self.edit.cmd.range, function(img) {
return img[0].className == 'ke-anchor';
});
};
_each('link,image,flash,media,anchor'.split(','), function(i, name) {
var uName = name.charAt(0).toUpperCase() + name.substr(1);
_each('edit,delete'.split(','), function(j, val) {
self.addContextmenu({
title : self.lang(val + uName),
click : function() {
self.loadPlugin(name, function() {
self.plugin[name][val]();
self.hideMenu();
});
},
cond : self.plugin['getSelected' + uName],
width : 150,
iconClass : val == 'edit' ? 'ke-icon-' + name : undefined
});
});
self.addContextmenu({ title : '-' });
});
self.plugin.getSelectedTable = function() {
return self.cmd.commonAncestor('table');
};
self.plugin.getSelectedRow = function() {
return self.cmd.commonAncestor('tr');
};
self.plugin.getSelectedCell = function() {
return self.cmd.commonAncestor('td');
};
_each(('prop,cellprop,colinsertleft,colinsertright,rowinsertabove,rowinsertbelow,rowmerge,colmerge,' +
'rowsplit,colsplit,coldelete,rowdelete,insert,delete').split(','), function(i, val) {
var cond = _inArray(val, ['prop', 'delete']) < 0 ? self.plugin.getSelectedCell : self.plugin.getSelectedTable;
self.addContextmenu({
title : self.lang('table' + val),
click : function() {
self.loadPlugin('table', function() {
self.plugin.table[val]();
self.hideMenu();
});
},
cond : cond,
width : 170,
iconClass : 'ke-icon-table' + val
});
});
self.addContextmenu({ title : '-' });
_each(('selectall,justifyleft,justifycenter,justifyright,justifyfull,insertorderedlist,' +
'insertunorderedlist,indent,outdent,subscript,superscript,hr,print,' +
'bold,italic,underline,strikethrough,removeformat,unlink').split(','), function(i, name) {
if (shortcutKeys[name]) {
self.afterCreate(function() {
_ctrl(this.edit.doc, shortcutKeys[name], function() {
self.cmd.selection();
self.clickToolbar(name);
});
});
}
self.clickToolbar(name, function() {
self.focus().exec(name, null);
});
});
self.afterCreate(function() {
var doc = self.edit.doc, cmd, bookmark, div,
cls = '__kindeditor_paste__', pasting = false;
function movePastedData() {
cmd.range.moveToBookmark(bookmark);
cmd.select();
if (_WEBKIT) {
K('div.' + cls, div).each(function() {
K(this).after('<br />').remove(true);
});
K('span.Apple-style-span', div).remove(true);
K('span.Apple-tab-span', div).remove(true);
K('span[style]', div).each(function() {
if (K(this).css('white-space') == 'nowrap') {
K(this).remove(true);
}
});
K('meta', div).remove();
}
var html = div[0].innerHTML;
div.remove();
if (html === '') {
return;
}
if (self.pasteType === 2) {
if (/schemas-microsoft-com|worddocument|mso-\w+/i.test(html)) {
html = _clearMsWord(html, self.filterMode ? self.htmlTags : K.options.htmlTags);
} else {
html = _formatHtml(html, self.filterMode ? self.htmlTags : null);
html = self.beforeSetHtml(html);
}
}
if (self.pasteType === 1) {
html = html.replace(/<br[^>]*>/ig, '\n');
html = html.replace(/<\/p><p[^>]*>/ig, '\n');
html = html.replace(/<[^>]+>/g, '');
html = html.replace(/ /ig, ' ');
html = html.replace(/\n\s*\n/g, '\n');
html = html.replace(/ {2}/g, ' ');
if (self.newlineTag == 'p') {
if (/\n/.test(html)) {
html = html.replace(/^/, '<p>').replace(/$/, '</p>').replace(/\n/g, '</p><p>');
}
} else {
html = html.replace(/\n/g, '<br />$&');
}
}
self.insertHtml(html, true);
}
K(doc.body).bind('paste', function(e){
if (self.pasteType === 0) {
e.stop();
return;
}
if (pasting) {
return;
}
pasting = true;
K('div.' + cls, doc).remove();
cmd = self.cmd.selection();
bookmark = cmd.range.createBookmark();
div = K('<div class="' + cls + '"></div>', doc).css({
position : 'absolute',
width : '1px',
height : '1px',
overflow : 'hidden',
left : '-1981px',
top : K(bookmark.start).pos().y + 'px',
'white-space' : 'nowrap'
});
K(doc.body).append(div);
if (_IE) {
var rng = cmd.range.get(true);
rng.moveToElementText(div[0]);
rng.select();
rng.execCommand('paste');
e.preventDefault();
} else {
cmd.range.selectNodeContents(div[0]);
cmd.select();
}
setTimeout(function() {
movePastedData();
pasting = false;
}, 0);
});
});
self.beforeGetHtml(function(html) {
return html.replace(/(<(?:noscript|noscript\s[^>]*)>)([\s\S]*?)(<\/noscript>)/ig, function($0, $1, $2, $3) {
return $1 + _unescape($2).replace(/\s+/g, ' ') + $3;
})
.replace(/<img[^>]*class="?ke-(flash|rm|media)"?[^>]*>/ig, function(full) {
var imgAttrs = _getAttrList(full),
styles = _getCssList(imgAttrs.style || ''),
attrs = _mediaAttrs(imgAttrs['data-ke-tag']);
attrs.width = _undef(imgAttrs.width, _removeUnit(_undef(styles.width, '')));
attrs.height = _undef(imgAttrs.height, _removeUnit(_undef(styles.height, '')));
return _mediaEmbed(attrs);
})
.replace(/<img[^>]*class="?ke-anchor"?[^>]*>/ig, function(full) {
var imgAttrs = _getAttrList(full);
return '<a name="' + unescape(imgAttrs['data-ke-name']) + '"></a>';
})
.replace(/<div\s+[^>]*data-ke-script-attr="([^"]*)"[^>]*>([\s\S]*?)<\/div>/ig, function(full, attr, code) {
return '<script' + unescape(attr) + '>' + unescape(code) + '</script>';
})
.replace(/<div\s+[^>]*data-ke-noscript-attr="([^"]*)"[^>]*>([\s\S]*?)<\/div>/ig, function(full, attr, code) {
return '<noscript' + unescape(attr) + '>' + unescape(code) + '</noscript>';
})
.replace(/(<[^>]*)data-ke-src="([^"]*)"([^>]*>)/ig, function(full, start, src, end) {
full = full.replace(/(\s+(?:href|src)=")[^"]*(")/i, function($0, $1, $2) {
return $1 + _unescape(src) + $2;
});
full = full.replace(/\s+data-ke-src="[^"]*"/i, '');
return full;
})
.replace(/(<[^>]+\s)data-ke-(on\w+="[^"]*"[^>]*>)/ig, function(full, start, end) {
return start + end;
});
});
self.beforeSetHtml(function(html) {
return html.replace(/<embed[^>]*type="([^"]+)"[^>]*>(?:<\/embed>)?/ig, function(full) {
var attrs = _getAttrList(full);
attrs.src = _undef(attrs.src, '');
attrs.width = _undef(attrs.width, 0);
attrs.height = _undef(attrs.height, 0);
return _mediaImg(self.themesPath + 'common/blank.gif', attrs);
})
.replace(/<a[^>]*name="([^"]+)"[^>]*>(?:<\/a>)?/ig, function(full) {
var attrs = _getAttrList(full);
if (attrs.href !== undefined) {
return full;
}
return '<img class="ke-anchor" src="' + self.themesPath + 'common/anchor.gif" data-ke-name="' + escape(attrs.name) + '" />';
})
.replace(/<script([^>]*)>([\s\S]*?)<\/script>/ig, function(full, attr, code) {
return '<div class="ke-script" data-ke-script-attr="' + escape(attr) + '">' + escape(code) + '</div>';
})
.replace(/<noscript([^>]*)>([\s\S]*?)<\/noscript>/ig, function(full, attr, code) {
return '<div class="ke-noscript" data-ke-noscript-attr="' + escape(attr) + '">' + escape(code) + '</div>';
})
.replace(/(<[^>]*)(href|src)="([^"]*)"([^>]*>)/ig, function(full, start, key, src, end) {
if (full.match(/\sdata-ke-src="[^"]*"/i)) {
return full;
}
full = start + key + '="' + src + '"' + ' data-ke-src="' + _escape(src) + '"' + end;
return full;
})
.replace(/(<[^>]+\s)(on\w+="[^"]*"[^>]*>)/ig, function(full, start, end) {
return start + 'data-ke-' + end;
})
.replace(/<table[^>]*\s+border="0"[^>]*>/ig, function(full) {
if (full.indexOf('ke-zeroborder') >= 0) {
return full;
}
return _addClassToTag(full, 'ke-zeroborder');
});
});
});
})(window);<|fim▁end|>
|
if (tagName === 'font') {
|
<|file_name|>boneIKController.ts<|end_file_name|><|fim▁begin|>import { Bone } from "./bone";
import { Vector3, Quaternion, Matrix } from "../Maths/math.vector";
import { TransformNode } from "../Meshes/transformNode";
import { Nullable } from "../types";
import { Space } from '../Maths/math.axis';
/**
* Class used to apply inverse kinematics to bones
* @see https://doc.babylonjs.com/how_to/how_to_use_bones_and_skeletons#boneikcontroller
*/
export class BoneIKController {
private static _tmpVecs: Vector3[] = [Vector3.Zero(), Vector3.Zero(), Vector3.Zero(), Vector3.Zero(), Vector3.Zero(), Vector3.Zero()];
private static _tmpQuat = Quaternion.Identity();
private static _tmpMats: Matrix[] = [Matrix.Identity(), Matrix.Identity()];
/**
* Gets or sets the target TransformNode
* Name kept as mesh for back compability
*/
public targetMesh: TransformNode;
/** Gets or sets the mesh used as pole */
public poleTargetMesh: TransformNode;
/**
* Gets or sets the bone used as pole
*/
public poleTargetBone: Nullable<Bone>;
/**
* Gets or sets the target position
*/
public targetPosition = Vector3.Zero();
/**
* Gets or sets the pole target position
*/
public poleTargetPosition = Vector3.Zero();
/**
* Gets or sets the pole target local offset
*/
public poleTargetLocalOffset = Vector3.Zero();
/**
* Gets or sets the pole angle
*/
public poleAngle = 0;
/**
* Gets or sets the TransformNode associated with the controller
* Name kept as mesh for back compability
*/
public mesh: TransformNode;
/**
* The amount to slerp (spherical linear interpolation) to the target. Set this to a value between 0 and 1 (a value of 1 disables slerp)
*/
public slerpAmount = 1;
private _bone1Quat = Quaternion.Identity();
private _bone1Mat = Matrix.Identity();
private _bone2Ang = Math.PI;
private _bone1: Nullable<Bone>;
private _bone2: Bone;
private _bone1Length: number;
private _bone2Length: number;
private _maxAngle = Math.PI;
private _maxReach: number;
private _rightHandedSystem = false;
private _bendAxis = Vector3.Right();
private _slerping = false;
private _adjustRoll = 0;
/**
* Gets or sets maximum allowed angle
*/
public get maxAngle(): number {
return this._maxAngle;
}
public set maxAngle(value: number) {
this._setMaxAngle(value);
}
/**
* Creates a new BoneIKController
* @param mesh defines the TransformNode to control
* @param bone defines the bone to control
* @param options defines options to set up the controller
*/
constructor(mesh: TransformNode,
bone: Bone,
options?: {
targetMesh?: TransformNode,
poleTargetMesh?: TransformNode,
poleTargetBone?: Bone,
poleTargetLocalOffset?: Vector3,
poleAngle?: number,
bendAxis?: Vector3,
maxAngle?: number,
slerpAmount?: number
}) {
this._bone2 = bone;
this._bone1 = bone.getParent();
if (!this._bone1) {
return;
}
this.mesh = mesh;
var bonePos = bone.getPosition();
if (bone.getAbsoluteTransform().determinant() > 0) {
this._rightHandedSystem = true;
this._bendAxis.x = 0;
this._bendAxis.y = 0;
this._bendAxis.z = -1;
if (bonePos.x > bonePos.y && bonePos.x > bonePos.z) {
this._adjustRoll = Math.PI * .5;
this._bendAxis.z = 1;
}
}
if (this._bone1.length) {
var boneScale1 = this._bone1.getScale();
var boneScale2 = this._bone2.getScale();
this._bone1Length = this._bone1.length * boneScale1.y * this.mesh.scaling.y;
this._bone2Length = this._bone2.length * boneScale2.y * this.mesh.scaling.y;
} else if (this._bone1.children[0]) {
mesh.computeWorldMatrix(true);
var pos1 = this._bone2.children[0].getAbsolutePosition(mesh);
var pos2 = this._bone2.getAbsolutePosition(mesh);
var pos3 = this._bone1.getAbsolutePosition(mesh);
this._bone1Length = Vector3.Distance(pos1, pos2);
this._bone2Length = Vector3.Distance(pos2, pos3);
}
this._bone1.getRotationMatrixToRef(Space.WORLD, mesh, this._bone1Mat);
this.maxAngle = Math.PI;
if (options) {
if (options.targetMesh) {
this.targetMesh = options.targetMesh;
this.targetMesh.computeWorldMatrix(true);
}
if (options.poleTargetMesh) {
this.poleTargetMesh = options.poleTargetMesh;
this.poleTargetMesh.computeWorldMatrix(true);
<|fim▁hole|> } else if (options.poleTargetBone) {
this.poleTargetBone = options.poleTargetBone;
} else if (this._bone1.getParent()) {
this.poleTargetBone = this._bone1.getParent();
}
if (options.poleTargetLocalOffset) {
this.poleTargetLocalOffset.copyFrom(options.poleTargetLocalOffset);
}
if (options.poleAngle) {
this.poleAngle = options.poleAngle;
}
if (options.bendAxis) {
this._bendAxis.copyFrom(options.bendAxis);
}
if (options.maxAngle) {
this.maxAngle = options.maxAngle;
}
if (options.slerpAmount) {
this.slerpAmount = options.slerpAmount;
}
}
}
private _setMaxAngle(ang: number): void {
if (ang < 0) {
ang = 0;
}
if (ang > Math.PI || ang == undefined) {
ang = Math.PI;
}
this._maxAngle = ang;
var a = this._bone1Length;
var b = this._bone2Length;
this._maxReach = Math.sqrt(a * a + b * b - 2 * a * b * Math.cos(ang));
}
/**
* Force the controller to update the bones
*/
public update(): void {
var bone1 = this._bone1;
if (!bone1) {
return;
}
var target = this.targetPosition;
var poleTarget = this.poleTargetPosition;
var mat1 = BoneIKController._tmpMats[0];
var mat2 = BoneIKController._tmpMats[1];
if (this.targetMesh) {
target.copyFrom(this.targetMesh.getAbsolutePosition());
}
if (this.poleTargetBone) {
this.poleTargetBone.getAbsolutePositionFromLocalToRef(this.poleTargetLocalOffset, this.mesh, poleTarget);
} else if (this.poleTargetMesh) {
Vector3.TransformCoordinatesToRef(this.poleTargetLocalOffset, this.poleTargetMesh.getWorldMatrix(), poleTarget);
}
var bonePos = BoneIKController._tmpVecs[0];
var zaxis = BoneIKController._tmpVecs[1];
var xaxis = BoneIKController._tmpVecs[2];
var yaxis = BoneIKController._tmpVecs[3];
var upAxis = BoneIKController._tmpVecs[4];
var _tmpQuat = BoneIKController._tmpQuat;
bone1.getAbsolutePositionToRef(this.mesh, bonePos);
poleTarget.subtractToRef(bonePos, upAxis);
if (upAxis.x == 0 && upAxis.y == 0 && upAxis.z == 0) {
upAxis.y = 1;
} else {
upAxis.normalize();
}
target.subtractToRef(bonePos, yaxis);
yaxis.normalize();
Vector3.CrossToRef(yaxis, upAxis, zaxis);
zaxis.normalize();
Vector3.CrossToRef(yaxis, zaxis, xaxis);
xaxis.normalize();
Matrix.FromXYZAxesToRef(xaxis, yaxis, zaxis, mat1);
var a = this._bone1Length;
var b = this._bone2Length;
var c = Vector3.Distance(bonePos, target);
if (this._maxReach > 0) {
c = Math.min(this._maxReach, c);
}
var acosa = (b * b + c * c - a * a) / (2 * b * c);
var acosb = (c * c + a * a - b * b) / (2 * c * a);
if (acosa > 1) {
acosa = 1;
}
if (acosb > 1) {
acosb = 1;
}
if (acosa < -1) {
acosa = -1;
}
if (acosb < -1) {
acosb = -1;
}
var angA = Math.acos(acosa);
var angB = Math.acos(acosb);
var angC = -angA - angB;
if (this._rightHandedSystem) {
Matrix.RotationYawPitchRollToRef(0, 0, this._adjustRoll, mat2);
mat2.multiplyToRef(mat1, mat1);
Matrix.RotationAxisToRef(this._bendAxis, angB, mat2);
mat2.multiplyToRef(mat1, mat1);
} else {
var _tmpVec = BoneIKController._tmpVecs[5];
_tmpVec.copyFrom(this._bendAxis);
_tmpVec.x *= -1;
Matrix.RotationAxisToRef(_tmpVec, -angB, mat2);
mat2.multiplyToRef(mat1, mat1);
}
if (this.poleAngle) {
Matrix.RotationAxisToRef(yaxis, this.poleAngle, mat2);
mat1.multiplyToRef(mat2, mat1);
}
if (this._bone1) {
if (this.slerpAmount < 1) {
if (!this._slerping) {
Quaternion.FromRotationMatrixToRef(this._bone1Mat, this._bone1Quat);
}
Quaternion.FromRotationMatrixToRef(mat1, _tmpQuat);
Quaternion.SlerpToRef(this._bone1Quat, _tmpQuat, this.slerpAmount, this._bone1Quat);
angC = this._bone2Ang * (1.0 - this.slerpAmount) + angC * this.slerpAmount;
this._bone1.setRotationQuaternion(this._bone1Quat, Space.WORLD, this.mesh);
this._slerping = true;
} else {
this._bone1.setRotationMatrix(mat1, Space.WORLD, this.mesh);
this._bone1Mat.copyFrom(mat1);
this._slerping = false;
}
this._updateLinkedTransformRotation(this._bone1);
}
this._bone2.setAxisAngle(this._bendAxis, angC, Space.LOCAL);
this._updateLinkedTransformRotation(this._bone2);
this._bone2Ang = angC;
}
private _updateLinkedTransformRotation(bone: Bone): void {
if (bone._linkedTransformNode) {
if (!bone._linkedTransformNode.rotationQuaternion) {
bone._linkedTransformNode.rotationQuaternion = new Quaternion();
}
bone.getRotationQuaternionToRef(Space.LOCAL, null, bone._linkedTransformNode.rotationQuaternion);
}
}
}<|fim▁end|>
| |
<|file_name|>closure-bounds-subtype.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
fn take_any(_: ||) {
}
fn take_const_owned(_: ||:Sync+Send) {
}
<|fim▁hole|>fn give_owned(f: ||:Send) {
take_any(f);
take_const_owned(f); //~ ERROR expected bounds `Send+Sync`, found bounds `Send`
}
fn main() {}<|fim▁end|>
|
fn give_any(f: ||) {
take_any(f);
}
|
<|file_name|>AuthUtils.java<|end_file_name|><|fim▁begin|>package se.leiflandia.lroi.utils;
import android.accounts.Account;
import android.accounts.AccountManager;
import android.content.Context;
import android.content.SharedPreferences;
import android.text.TextUtils;
import se.leiflandia.lroi.auth.model.AccessToken;
import se.leiflandia.lroi.auth.model.UserCredentials;
public class AuthUtils {
private static final String PREF_ACTIVE_ACCOUNT = "active_account";
private static final String PREFS_NAME = "se.leiflandia.lroi.prefs";
public static void removeActiveAccount(Context context, String accountType) {
Account account = getActiveAccount(context, accountType);
if (account != null) {
AccountManager.get(context).removeAccount(account, null, null);
}
setActiveAccountName(context, null);
}
public static Account getActiveAccount(final Context context, final String accountType) {
Account[] accounts = AccountManager.get(context).getAccountsByType(accountType);
return getActiveAccount(accounts, getActiveAccountName(context));
}
public static boolean hasActiveAccount(final Context context, final String accountType) {
return getActiveAccount(context, accountType) != null;
}
<|fim▁hole|> }
public static void setActiveAccountName(final Context context, final String name) {
getSharedPreferences(context).edit()
.putString(PREF_ACTIVE_ACCOUNT, name)
.commit();
}
private static Account getActiveAccount(final Account[] accounts, final String activeAccountName) {
for (Account account : accounts) {
if (TextUtils.equals(account.name, activeAccountName)) {
return account;
}
}
return null;
}
private static SharedPreferences getSharedPreferences(final Context context) {
return context.getSharedPreferences(PREFS_NAME, Context.MODE_PRIVATE);
}
/**
* Saves an authorized account in account manager and set as active account.
*/
public static void setAuthorizedAccount(Context context, UserCredentials credentials, AccessToken token, String authtokenType, String accountType) {
final AccountManager accountManager = AccountManager.get(context);
Account account = findOrCreateAccount(accountManager, credentials.getUsername(), token.getRefreshToken(), accountType);
accountManager.setAuthToken(account, authtokenType, token.getAccessToken());
setActiveAccountName(context, account.name);
}
/**
* Sets password of account, creates a new account if necessary.
*/
private static Account findOrCreateAccount(AccountManager accountManager, String username, String refreshToken, String accountType) {
for (Account account : accountManager.getAccountsByType(accountType)) {
if (account.name.equals(username)) {
accountManager.setPassword(account, refreshToken);
return account;
}
}
Account account = new Account(username, accountType);
accountManager.addAccountExplicitly(account, refreshToken, null);
return account;
}
}<|fim▁end|>
|
private static String getActiveAccountName(final Context context) {
return getSharedPreferences(context)
.getString(PREF_ACTIVE_ACCOUNT, null);
|
<|file_name|>matrix_solve_ls_op_test.py<|end_file_name|><|fim▁begin|># Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for tensorflow.ops.math_ops.matrix_solve."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import tensorflow as tf
def BatchMatMul(a, b):
# A numpy implementation of tf.batch_matmul().
if a.ndim < 3:
return np.dot(a, b)
# Get the number of matrices.
n = np.prod(a.shape[:-2])
assert n == np.prod(b.shape[:-2])
a_flat = np.reshape(a, tuple([n]) + a.shape[-2:])
b_flat = np.reshape(b, tuple([n]) + b.shape[-2:])
c_flat_shape = [n, a.shape[-2], b.shape[-1]]
c_flat = np.empty(c_flat_shape)
for i in range(n):
c_flat[i, :, :] = np.dot(a_flat[i, :, :], b_flat[i, :, :])
return np.reshape(c_flat, a.shape[:-1] + b_flat.shape[-1:])
def BatchRegularizedLeastSquares(matrices, rhss, l2_regularization=0.0):
# A numpy implementation of regularized least squares solver using
# the normal equations.
matrix_dims = matrices.shape
matrices_transposed = np.swapaxes(matrices, -2, -1)
rows = matrix_dims[-2]
cols = matrix_dims[-1]
if rows >= cols:
preconditioner = l2_regularization * np.identity(cols)
gramian = BatchMatMul(matrices_transposed, matrices) + preconditioner
inverse = np.linalg.inv(gramian)
left_pseudo_inverse = BatchMatMul(inverse, matrices_transposed)
return BatchMatMul(left_pseudo_inverse, rhss)
else:
preconditioner = l2_regularization * np.identity(rows)
gramian = BatchMatMul(matrices, matrices_transposed) + preconditioner
inverse = np.linalg.inv(gramian)
right_pseudo_inverse = BatchMatMul(matrices_transposed, inverse)
return BatchMatMul(right_pseudo_inverse, rhss)
class MatrixSolveLsOpTest(tf.test.TestCase):
def _verifySolve(self, x, y):
for np_type in [np.float32, np.float64]:
a = x.astype(np_type)
b = y.astype(np_type)
np_ans, _, _, _ = np.linalg.lstsq(a, b)
for fast in [True, False]:
with self.test_session():
tf_ans = tf.matrix_solve_ls(a, b, fast=fast).eval()
self.assertEqual(np_ans.shape, tf_ans.shape)
# Check residual norm.
tf_r = b - BatchMatMul(a, tf_ans)
tf_r_norm = np.sum(tf_r * tf_r)
np_r = b - BatchMatMul(a, np_ans)
np_r_norm = np.sum(np_r * np_r)
self.assertAllClose(np_r_norm, tf_r_norm)
# Check solution.
if fast or a.shape[0] >= a.shape[1]:
# We skip this test for the underdetermined case when using the
# slow path, because Eigen does not return a minimum norm solution.
# TODO(rmlarsen): Enable this check for all paths if/when we fix
# Eigen's solver.
self.assertAllClose(np_ans, tf_ans, atol=1e-5, rtol=1e-5)
def _verifySolveBatch(self, x, y):
# Since numpy.linalg.lsqr does not support batch solves, as opposed
# to numpy.linalg.solve, we just perform this test for a fixed batch size
# of 2x3.
for np_type in [np.float32, np.float64]:
a = np.tile(x.astype(np_type), [2, 3, 1, 1])
b = np.tile(y.astype(np_type), [2, 3, 1, 1])
np_ans = np.empty([2, 3, a.shape[-1], b.shape[-1]])
for dim1 in range(2):
for dim2 in range(3):
np_ans[dim1, dim2, :, :], _, _, _ = np.linalg.lstsq(
a[dim1, dim2, :, :], b[dim1, dim2, :, :])
for fast in [True, False]:
with self.test_session():
tf_ans = tf.batch_matrix_solve_ls(a, b, fast=fast).eval()
self.assertEqual(np_ans.shape, tf_ans.shape)
# Check residual norm.
tf_r = b - BatchMatMul(a, tf_ans)
tf_r_norm = np.sum(tf_r * tf_r)
np_r = b - BatchMatMul(a, np_ans)
np_r_norm = np.sum(np_r * np_r)
self.assertAllClose(np_r_norm, tf_r_norm)
# Check solution.
if fast or a.shape[-2] >= a.shape[-1]:
# We skip this test for the underdetermined case when using the
# slow path, because Eigen does not return a minimum norm solution.
# TODO(rmlarsen): Enable this check for all paths if/when we fix
# Eigen's solver.
self.assertAllClose(np_ans, tf_ans, atol=1e-5, rtol=1e-5)
def _verifyRegularized(self, x, y, l2_regularizer):
for np_type in [np.float32, np.float64]:
# Test with a single matrix.
a = x.astype(np_type)
b = y.astype(np_type)
np_ans = BatchRegularizedLeastSquares(a, b, l2_regularizer)
with self.test_session():
tf_ans = tf.matrix_solve_ls(a,
b,
l2_regularizer=l2_regularizer,
fast=True).eval()
self.assertAllClose(np_ans, tf_ans, atol=1e-5, rtol=1e-5)
# Test with a 2x3 batch of matrices.
a = np.tile(x.astype(np_type), [2, 3, 1, 1])
b = np.tile(y.astype(np_type), [2, 3, 1, 1])
np_ans = BatchRegularizedLeastSquares(a, b, l2_regularizer)
with self.test_session():
tf_ans = tf.batch_matrix_solve_ls(a,
b,
l2_regularizer=l2_regularizer,
fast=True).eval()
self.assertAllClose(np_ans, tf_ans, atol=1e-5, rtol=1e-5)
def testSquare(self):
# 2x2 matrices, 2x3 right-hand sides.
matrix = np.array([[1., 2.], [3., 4.]])
rhs = np.array([[1., 0., 1.], [0., 1., 1.]])
self._verifySolve(matrix, rhs)
self._verifySolveBatch(matrix, rhs)
self._verifyRegularized(matrix, rhs, l2_regularizer=0.1)
def testOverdetermined(self):
# 2x2 matrices, 2x3 right-hand sides.
matrix = np.array([[1., 2.], [3., 4.], [5., 6.]])
rhs = np.array([[1., 0., 1.], [0., 1., 1.], [1., 1., 0.]])
self._verifySolve(matrix, rhs)
self._verifySolveBatch(matrix, rhs)
self._verifyRegularized(matrix, rhs, l2_regularizer=0.1)
def testUnderdetermined(self):
# 2x2 matrices, 2x3 right-hand sides.
matrix = np.array([[1., 2., 3], [4., 5., 6.]])
rhs = np.array([[1., 0., 1.], [0., 1., 1.]])
self._verifySolve(matrix, rhs)
self._verifySolveBatch(matrix, rhs)
self._verifyRegularized(matrix, rhs, l2_regularizer=0.1)
def testWrongDimensions(self):
# The matrix and right-hand sides should have the same number of rows.
with self.test_session():
matrix = tf.constant([[1., 0.], [0., 1.]])
rhs = tf.constant([[1., 0.]])
with self.assertRaises(ValueError):
tf.matrix_solve_ls(matrix, rhs)
with self.assertRaises(ValueError):
tf.batch_matrix_solve_ls(matrix, rhs)
def testEmpty(self):
full = np.array([[1., 2.], [3., 4.], [5., 6.]])
empty0 = np.empty([3, 0])
empty1 = np.empty([0, 2])
for fast in [True, False]:
with self.test_session():
tf_ans = tf.matrix_solve_ls(empty0, empty0, fast=fast).eval()
self.assertEqual(tf_ans.shape, (0, 0))
tf_ans = tf.matrix_solve_ls(empty0, full, fast=fast).eval()
self.assertEqual(tf_ans.shape, (0, 2))
tf_ans = tf.matrix_solve_ls(full, empty0, fast=fast).eval()
self.assertEqual(tf_ans.shape, (2, 0))
tf_ans = tf.matrix_solve_ls(empty1, empty1, fast=fast).eval()
self.assertEqual(tf_ans.shape, (2, 2))
def testBatchResultSize(self):
# 3x3x3 matrices, 3x3x1 right-hand sides.
matrix = np.array([1., 2., 3., 4., 5., 6., 7., 8., 9.] * 3).reshape(3, 3, 3)
rhs = np.array([1., 2., 3.] * 3).reshape(3, 3, 1)
answer = tf.batch_matrix_solve(matrix, rhs)<|fim▁hole|> self.assertEqual(ls_answer.get_shape(), [3, 3, 1])
self.assertEqual(answer.get_shape(), [3, 3, 1])
if __name__ == "__main__":
tf.test.main()<|fim▁end|>
|
ls_answer = tf.batch_matrix_solve_ls(matrix, rhs)
|
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>mod the_if;
mod highest_and_lowest;<|fim▁hole|><|fim▁end|>
|
mod moves_in_squared_strings_1;
|
<|file_name|>robotics.py<|end_file_name|><|fim▁begin|>"""
Classes for using robotic or other hardware using Topographica.
This module contains several classes for constructing robotics
interfaces to Topographica simulations. It includes modules that read
input from or send output to robot devices, and a (quasi) real-time
simulation object that attempts to maintain a correspondence between
simulation time and real time.
This module requires the PlayerStage robot interface system (from
playerstage.sourceforge.net), and the playerrobot module for
high-level communications with Player robots.
"""
import Image
import ImageOps
from math import pi,cos,sin
import param
from topo.base.simulation import EventProcessor
from imagen.image import GenericImage
from playerrobot import CameraDevice, PTZDevice
class CameraImage(GenericImage):
"""
An image pattern generator that gets its image from a Player
camera device.
"""
camera = param.ClassSelector(CameraDevice,default=None,doc="""
An instance of playerrobot.CameraDevice to be used
to generate images.""")
def __init__(self,**params):
super(CameraImage,self).__init__(**params)
self._image = None
def _get_image(self,params):
self._decode_image(*self.camera.image)
return True
def _decode_image(self,fmt,w,h,bpp,fdiv,data):
if fmt==1:
self._image = Image.new('L',(w,h))
self._image.fromstring(data,'raw')
else:
# JPALERT: if not grayscale, then assume color. This
# should be expanded for other modes.
rgb_im = Image.new('RGB',(w,h))
rgb_im.fromstring(data,'raw')
self._image = ImageOps.grayscale(rgb_im)
class CameraImageQueued(CameraImage):
"""
A version of CameraImage that gets the image from the camera's image queue,
rather than directly from the camera object. Using queues is
necessary when running the playerrobot in a separate process
without shared memory. When getting an image, this pattern
generator will fetch every image in the image queue and use the
most recent as the current pattern.
"""
def _get_image(self,params):<|fim▁hole|> im_spec = None
if self._image is None:
# if we don't have an image then block until we get one
im_spec = self.camera.image_queue.get()
self.camera.image_queue.task_done()
# Make sure we clear the image queue and get the most recent image.
while not self.camera.image_queue.empty():
im_spec = self.camera.image_queue.get_nowait()
self.camera.image_queue.task_done()
if im_spec:
# If we got a new image from the queue, then
# construct a PIL image from it.
self._decode_image(*im_spec)
return True
else:
return False
class PTZ(EventProcessor):
"""
Pan/Tilt/Zoom control.
This event processor takes input events on its 'Saccade' input
port in the form of (amplitude,direction) saccade commands (as
produced by the topo.sheet.saccade.SaccadeController class) and
appropriately servoes the attached PTZ object. There is not
currently any dynamic zoom control, though the static zoom level
can be set as a parameter.
"""
ptz = param.ClassSelector(PTZDevice,default=None,doc="""
An instance of playerrobot.PTZDevice to be controlled.""")
zoom = param.Number(default=120,bounds=(0,None),doc="""
Desired FOV width in degrees.""")
speed = param.Number(default=200,bounds=(0,None),doc="""
Desired max pan/tilt speed in deg/sec.""")
invert_amplitude = param.Boolean(default=False,doc="""
Invert the sense of the amplitude signal, in order to get the
appropriate ipsi-/contralateral sense of saccades.""")
dest_ports = ["Saccade"]
src_ports = ["State"]
def start(self):
pass
def input_event(self,conn,data):
if conn.dest_port == "Saccade":
# the data should be (amplitude,direction)
amplitude,direction = data
self.shift(amplitude,direction)
def shift(self,amplitude,direction):
self.debug("Executing shift, amplitude=%.2f, direction=%.2f"%(amplitude,direction))
if self.invert_amplitude:
amplitude *= -1
# if the amplitude is negative, invert the direction, so up is still up.
if amplitude < 0:
direction *= -1
angle = direction * pi/180
pan,tilt,zoom = self.ptz.state_deg
pan += amplitude * cos(angle)
tilt += amplitude * sin(angle)
self.ptz.set_ws_deg(pan,tilt,self.zoom,self.speed,self.speed)
## self.ptz.cmd_queue.put_nowait(('set_ws_deg',
## (pan,tilt,self.zoom,self.speed,self.speed)))<|fim▁end|>
| |
<|file_name|>EnumConstant.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2007 the original author or authors.
*<|fim▁hole|> * Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jsefa.common.converter;
import static java.lang.annotation.ElementType.FIELD;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
import java.lang.annotation.Retention;
import java.lang.annotation.Target;
/**
* Enum constant annotation.
*
* @author Norman Lahme-Huetig
*
*/
@Retention(RUNTIME)
@Target({FIELD})
public @interface EnumConstant {
/**
* The display name of the enum constant.
*/
String value();
}<|fim▁end|>
| |
<|file_name|>_observabletimertimespan.js<|end_file_name|><|fim▁begin|>/* */
"format cjs";
function observableTimerTimeSpan(dueTime, scheduler) {<|fim▁hole|> return new AnonymousObservable(function (observer) {
return scheduler.scheduleWithRelative(normalizeTime(dueTime), function () {
observer.onNext(0);
observer.onCompleted();
});
});
}<|fim▁end|>
| |
<|file_name|>private.rs<|end_file_name|><|fim▁begin|>// This file was borrowed from https://github.com/nikomatsakis/rayon/blob/master/src/private.rs
// and is subject to the license and copyright from that project.
//! The public parts of this private module are used to create traits
//! that cannot be implemented outside of our own crate. This way we
//! can feel free to extend those traits without worrying about it
//! being a breaking change for other implementations.
/// If this type is pub but not publicly reachable, third parties<|fim▁hole|>macro_rules! private_decl {
() => {
/// This trait is private; this method exists to make it
/// impossible to implement outside the crate.
#[doc(hidden)]
fn __rayon_private__(&self) -> ::private::PrivateMarker;
}
}
macro_rules! private_impl {
() => {
fn __rayon_private__(&self) -> ::private::PrivateMarker {
::private::PrivateMarker
}
}
}<|fim▁end|>
|
/// can't name it and can't implement traits using it.
pub struct PrivateMarker;
|
<|file_name|>services.module.js<|end_file_name|><|fim▁begin|>(function() {<|fim▁hole|><|fim▁end|>
|
'use strict';
angular.module('pteroWorkflowClient.services', []);
})();
|
<|file_name|>truffle.js<|end_file_name|><|fim▁begin|>var HDWalletProvider = require("truffle-hdwallet-provider");
var mnemonic = "candy maple cake sugar pudding cream honey rich smooth crumble sweet treat";
module.exports = {
networks: {
development: {
provider: function () {
return new HDWalletProvider(mnemonic, "http://127.0.0.1:7545/", 0, 50);
},
network_id: "*",
},<|fim▁hole|> version: "^0.5.2",
},
},
};<|fim▁end|>
|
},
compilers: {
solc: {
|
<|file_name|>StoreQueryIntegrationTest.java<|end_file_name|><|fim▁begin|>/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.streams.integration;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.common.serialization.IntegerSerializer;
import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.common.utils.Bytes;
import org.apache.kafka.common.utils.MockTime;
import org.apache.kafka.streams.KafkaStreams;
import org.apache.kafka.streams.KeyQueryMetadata;
import org.apache.kafka.streams.KeyValue;
import org.apache.kafka.streams.StoreQueryParameters;
import org.apache.kafka.streams.StreamsBuilder;
import org.apache.kafka.streams.StreamsConfig;
import org.apache.kafka.streams.errors.InvalidStateStoreException;
import org.apache.kafka.streams.integration.utils.EmbeddedKafkaCluster;
import org.apache.kafka.streams.integration.utils.IntegrationTestUtils;
import org.apache.kafka.streams.kstream.Consumed;
import org.apache.kafka.streams.kstream.Materialized;
import org.apache.kafka.streams.state.KeyValueStore;
import org.apache.kafka.streams.state.QueryableStoreType;
import org.apache.kafka.streams.state.ReadOnlyKeyValueStore;
import org.apache.kafka.test.IntegrationTest;
import org.apache.kafka.test.TestCondition;
import org.apache.kafka.test.TestUtils;
import org.junit.After;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.time.Duration;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Properties;
import java.util.concurrent.Semaphore;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
import static org.apache.kafka.streams.integration.utils.IntegrationTestUtils.getStore;
import static org.apache.kafka.streams.integration.utils.IntegrationTestUtils.safeUniqueTestName;
import static org.apache.kafka.streams.integration.utils.IntegrationTestUtils.startApplicationAndWaitUntilRunning;
import static org.apache.kafka.streams.state.QueryableStoreTypes.keyValueStore;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.nullValue;
import static org.junit.Assert.assertThrows;
import static org.junit.Assert.assertTrue;
@Category({IntegrationTest.class})
public class StoreQueryIntegrationTest {
private static final Logger LOG = LoggerFactory.getLogger(StoreQueryIntegrationTest.class);
private static final int NUM_BROKERS = 1;
private static int port = 0;
private static final String INPUT_TOPIC_NAME = "input-topic";
private static final String TABLE_NAME = "source-table";
public final EmbeddedKafkaCluster cluster = new EmbeddedKafkaCluster(NUM_BROKERS);
@Rule
public TestName testName = new TestName();
private final List<KafkaStreams> streamsToCleanup = new ArrayList<>();
private final MockTime mockTime = cluster.time;
@Before
public void before() throws InterruptedException, IOException {
cluster.start();
cluster.createTopic(INPUT_TOPIC_NAME, 2, 1);
}
@After
public void after() {
for (final KafkaStreams kafkaStreams : streamsToCleanup) {
kafkaStreams.close();
}
cluster.stop();
}
@Test
public void shouldQueryOnlyActivePartitionStoresByDefault() throws Exception {
final int batch1NumMessages = 100;
final int key = 1;
final Semaphore semaphore = new Semaphore(0);
final StreamsBuilder builder = new StreamsBuilder();
builder.table(INPUT_TOPIC_NAME, Consumed.with(Serdes.Integer(), Serdes.Integer()),
Materialized.<Integer, Integer, KeyValueStore<Bytes, byte[]>>as(TABLE_NAME)
.withCachingDisabled())
.toStream()
.peek((k, v) -> semaphore.release());
final KafkaStreams kafkaStreams1 = createKafkaStreams(builder, streamsConfiguration());
final KafkaStreams kafkaStreams2 = createKafkaStreams(builder, streamsConfiguration());
final List<KafkaStreams> kafkaStreamsList = Arrays.asList(kafkaStreams1, kafkaStreams2);
startApplicationAndWaitUntilRunning(kafkaStreamsList, Duration.ofSeconds(60));
produceValueRange(key, 0, batch1NumMessages);
// Assert that all messages in the first batch were processed in a timely manner
assertThat(semaphore.tryAcquire(batch1NumMessages, 60, TimeUnit.SECONDS), is(equalTo(true)));
until(() -> {
final KeyQueryMetadata keyQueryMetadata = kafkaStreams1.queryMetadataForKey(TABLE_NAME, key, (topic, somekey, value, numPartitions) -> 0);
final QueryableStoreType<ReadOnlyKeyValueStore<Integer, Integer>> queryableStoreType = keyValueStore();
final ReadOnlyKeyValueStore<Integer, Integer> store1 = getStore(TABLE_NAME, kafkaStreams1, queryableStoreType);
final ReadOnlyKeyValueStore<Integer, Integer> store2 = getStore(TABLE_NAME, kafkaStreams2, queryableStoreType);
final boolean kafkaStreams1IsActive = (keyQueryMetadata.activeHost().port() % 2) == 1;
try {
if (kafkaStreams1IsActive) {
assertThat(store1.get(key), is(notNullValue()));
assertThat(store2.get(key), is(nullValue()));
} else {
assertThat(store1.get(key), is(nullValue()));
assertThat(store2.get(key), is(notNullValue()));
}
return true;
} catch (final InvalidStateStoreException exception) {
assertThat(
exception.getMessage(),
containsString("Cannot get state store source-table because the stream thread is PARTITIONS_ASSIGNED, not RUNNING")
);
LOG.info("Streams wasn't running. Will try again.");
return false;
}
});
}
@Test
public void shouldQuerySpecificActivePartitionStores() throws Exception {
final int batch1NumMessages = 100;
final int key = 1;
final Semaphore semaphore = new Semaphore(0);
final StreamsBuilder builder = new StreamsBuilder();
builder.table(INPUT_TOPIC_NAME, Consumed.with(Serdes.Integer(), Serdes.Integer()),
Materialized.<Integer, Integer, KeyValueStore<Bytes, byte[]>>as(TABLE_NAME)
.withCachingDisabled())
.toStream()
.peek((k, v) -> semaphore.release());
final KafkaStreams kafkaStreams1 = createKafkaStreams(builder, streamsConfiguration());
final KafkaStreams kafkaStreams2 = createKafkaStreams(builder, streamsConfiguration());
final List<KafkaStreams> kafkaStreamsList = Arrays.asList(kafkaStreams1, kafkaStreams2);
startApplicationAndWaitUntilRunning(kafkaStreamsList, Duration.ofSeconds(60));
produceValueRange(key, 0, batch1NumMessages);
// Assert that all messages in the first batch were processed in a timely manner
assertThat(semaphore.tryAcquire(batch1NumMessages, 60, TimeUnit.SECONDS), is(equalTo(true)));
until(() -> {
final KeyQueryMetadata keyQueryMetadata = kafkaStreams1.queryMetadataForKey(TABLE_NAME, key, (topic, somekey, value, numPartitions) -> 0);
//key belongs to this partition
final int keyPartition = keyQueryMetadata.partition();
//key doesn't belongs to this partition
final int keyDontBelongPartition = (keyPartition == 0) ? 1 : 0;
final boolean kafkaStreams1IsActive = (keyQueryMetadata.activeHost().port() % 2) == 1;
final StoreQueryParameters<ReadOnlyKeyValueStore<Integer, Integer>> storeQueryParam =
StoreQueryParameters.<ReadOnlyKeyValueStore<Integer, Integer>>fromNameAndType(TABLE_NAME, keyValueStore())
.withPartition(keyPartition);
ReadOnlyKeyValueStore<Integer, Integer> store1 = null;
ReadOnlyKeyValueStore<Integer, Integer> store2 = null;
if (kafkaStreams1IsActive) {
store1 = getStore(kafkaStreams1, storeQueryParam);
} else {
store2 = getStore(kafkaStreams2, storeQueryParam);
}
if (kafkaStreams1IsActive) {
assertThat(store1, is(notNullValue()));
assertThat(store2, is(nullValue()));
} else {
assertThat(store2, is(notNullValue()));
assertThat(store1, is(nullValue()));
}
final StoreQueryParameters<ReadOnlyKeyValueStore<Integer, Integer>> storeQueryParam2 =
StoreQueryParameters.<ReadOnlyKeyValueStore<Integer, Integer>>fromNameAndType(TABLE_NAME, keyValueStore())
.withPartition(keyDontBelongPartition);
try {
// Assert that key is not served when wrong specific partition is requested<|fim▁hole|> assertThat(store1.get(key), is(notNullValue()));
assertThat(getStore(kafkaStreams2, storeQueryParam2).get(key), is(nullValue()));
final InvalidStateStoreException exception =
assertThrows(InvalidStateStoreException.class, () -> getStore(kafkaStreams1, storeQueryParam2).get(key));
assertThat(
exception.getMessage(),
containsString("The specified partition 1 for store source-table does not exist.")
);
} else {
assertThat(store2.get(key), is(notNullValue()));
assertThat(getStore(kafkaStreams1, storeQueryParam2).get(key), is(nullValue()));
final InvalidStateStoreException exception =
assertThrows(InvalidStateStoreException.class, () -> getStore(kafkaStreams2, storeQueryParam2).get(key));
assertThat(
exception.getMessage(),
containsString("The specified partition 1 for store source-table does not exist.")
);
}
return true;
} catch (final InvalidStateStoreException exception) {
assertThat(
exception.getMessage(),
containsString("Cannot get state store source-table because the stream thread is PARTITIONS_ASSIGNED, not RUNNING")
);
LOG.info("Streams wasn't running. Will try again.");
return false;
}
});
}
@Test
public void shouldQueryAllStalePartitionStores() throws Exception {
final int batch1NumMessages = 100;
final int key = 1;
final Semaphore semaphore = new Semaphore(0);
final StreamsBuilder builder = new StreamsBuilder();
builder.table(INPUT_TOPIC_NAME, Consumed.with(Serdes.Integer(), Serdes.Integer()),
Materialized.<Integer, Integer, KeyValueStore<Bytes, byte[]>>as(TABLE_NAME)
.withCachingDisabled())
.toStream()
.peek((k, v) -> semaphore.release());
final KafkaStreams kafkaStreams1 = createKafkaStreams(builder, streamsConfiguration());
final KafkaStreams kafkaStreams2 = createKafkaStreams(builder, streamsConfiguration());
final List<KafkaStreams> kafkaStreamsList = Arrays.asList(kafkaStreams1, kafkaStreams2);
startApplicationAndWaitUntilRunning(kafkaStreamsList, Duration.ofSeconds(60));
produceValueRange(key, 0, batch1NumMessages);
// Assert that all messages in the first batch were processed in a timely manner
assertThat(semaphore.tryAcquire(batch1NumMessages, 60, TimeUnit.SECONDS), is(equalTo(true)));
final QueryableStoreType<ReadOnlyKeyValueStore<Integer, Integer>> queryableStoreType = keyValueStore();
// Assert that both active and standby are able to query for a key
TestUtils.waitForCondition(() -> {
final ReadOnlyKeyValueStore<Integer, Integer> store1 = getStore(TABLE_NAME, kafkaStreams1, true, queryableStoreType);
return store1.get(key) != null;
}, "store1 cannot find results for key");
TestUtils.waitForCondition(() -> {
final ReadOnlyKeyValueStore<Integer, Integer> store2 = getStore(TABLE_NAME, kafkaStreams2, true, queryableStoreType);
return store2.get(key) != null;
}, "store2 cannot find results for key");
}
@Test
public void shouldQuerySpecificStalePartitionStores() throws Exception {
final int batch1NumMessages = 100;
final int key = 1;
final Semaphore semaphore = new Semaphore(0);
final StreamsBuilder builder = new StreamsBuilder();
builder.table(INPUT_TOPIC_NAME, Consumed.with(Serdes.Integer(), Serdes.Integer()),
Materialized.<Integer, Integer, KeyValueStore<Bytes, byte[]>>as(TABLE_NAME)
.withCachingDisabled())
.toStream()
.peek((k, v) -> semaphore.release());
final KafkaStreams kafkaStreams1 = createKafkaStreams(builder, streamsConfiguration());
final KafkaStreams kafkaStreams2 = createKafkaStreams(builder, streamsConfiguration());
final List<KafkaStreams> kafkaStreamsList = Arrays.asList(kafkaStreams1, kafkaStreams2);
startApplicationAndWaitUntilRunning(kafkaStreamsList, Duration.ofSeconds(60));
produceValueRange(key, 0, batch1NumMessages);
// Assert that all messages in the first batch were processed in a timely manner
assertThat(semaphore.tryAcquire(batch1NumMessages, 60, TimeUnit.SECONDS), is(equalTo(true)));
final KeyQueryMetadata keyQueryMetadata = kafkaStreams1.queryMetadataForKey(TABLE_NAME, key, (topic, somekey, value, numPartitions) -> 0);
//key belongs to this partition
final int keyPartition = keyQueryMetadata.partition();
//key doesn't belongs to this partition
final int keyDontBelongPartition = (keyPartition == 0) ? 1 : 0;
final QueryableStoreType<ReadOnlyKeyValueStore<Integer, Integer>> queryableStoreType = keyValueStore();
// Assert that both active and standby are able to query for a key
final StoreQueryParameters<ReadOnlyKeyValueStore<Integer, Integer>> param = StoreQueryParameters
.fromNameAndType(TABLE_NAME, queryableStoreType)
.enableStaleStores()
.withPartition(keyPartition);
TestUtils.waitForCondition(() -> {
final ReadOnlyKeyValueStore<Integer, Integer> store1 = getStore(kafkaStreams1, param);
return store1.get(key) != null;
}, "store1 cannot find results for key");
TestUtils.waitForCondition(() -> {
final ReadOnlyKeyValueStore<Integer, Integer> store2 = getStore(kafkaStreams2, param);
return store2.get(key) != null;
}, "store2 cannot find results for key");
final StoreQueryParameters<ReadOnlyKeyValueStore<Integer, Integer>> otherParam = StoreQueryParameters
.fromNameAndType(TABLE_NAME, queryableStoreType)
.enableStaleStores()
.withPartition(keyDontBelongPartition);
final ReadOnlyKeyValueStore<Integer, Integer> store3 = getStore(kafkaStreams1, otherParam);
final ReadOnlyKeyValueStore<Integer, Integer> store4 = getStore(kafkaStreams2, otherParam);
// Assert that
assertThat(store3.get(key), is(nullValue()));
assertThat(store4.get(key), is(nullValue()));
}
@Test
public void shouldQuerySpecificStalePartitionStoresMultiStreamThreads() throws Exception {
final int batch1NumMessages = 100;
final int key = 1;
final Semaphore semaphore = new Semaphore(0);
final int numStreamThreads = 2;
final StreamsBuilder builder = new StreamsBuilder();
builder.table(INPUT_TOPIC_NAME, Consumed.with(Serdes.Integer(), Serdes.Integer()),
Materialized.<Integer, Integer, KeyValueStore<Bytes, byte[]>>as(TABLE_NAME)
.withCachingDisabled())
.toStream()
.peek((k, v) -> semaphore.release());
final Properties streamsConfiguration1 = streamsConfiguration();
streamsConfiguration1.put(StreamsConfig.NUM_STREAM_THREADS_CONFIG, numStreamThreads);
final Properties streamsConfiguration2 = streamsConfiguration();
streamsConfiguration2.put(StreamsConfig.NUM_STREAM_THREADS_CONFIG, numStreamThreads);
final KafkaStreams kafkaStreams1 = createKafkaStreams(builder, streamsConfiguration1);
final KafkaStreams kafkaStreams2 = createKafkaStreams(builder, streamsConfiguration2);
final List<KafkaStreams> kafkaStreamsList = Arrays.asList(kafkaStreams1, kafkaStreams2);
startApplicationAndWaitUntilRunning(kafkaStreamsList, Duration.ofSeconds(60));
assertTrue(kafkaStreams1.localThreadsMetadata().size() > 1);
assertTrue(kafkaStreams2.localThreadsMetadata().size() > 1);
produceValueRange(key, 0, batch1NumMessages);
// Assert that all messages in the first batch were processed in a timely manner
assertThat(semaphore.tryAcquire(batch1NumMessages, 60, TimeUnit.SECONDS), is(equalTo(true)));
final KeyQueryMetadata keyQueryMetadata = kafkaStreams1.queryMetadataForKey(TABLE_NAME, key, new IntegerSerializer());
//key belongs to this partition
final int keyPartition = keyQueryMetadata.partition();
//key doesn't belongs to this partition
final int keyDontBelongPartition = (keyPartition == 0) ? 1 : 0;
final QueryableStoreType<ReadOnlyKeyValueStore<Integer, Integer>> queryableStoreType = keyValueStore();
// Assert that both active and standby are able to query for a key
final StoreQueryParameters<ReadOnlyKeyValueStore<Integer, Integer>> param = StoreQueryParameters
.fromNameAndType(TABLE_NAME, queryableStoreType)
.enableStaleStores()
.withPartition(keyPartition);
TestUtils.waitForCondition(() -> {
final ReadOnlyKeyValueStore<Integer, Integer> store1 = getStore(kafkaStreams1, param);
return store1.get(key) != null;
}, "store1 cannot find results for key");
TestUtils.waitForCondition(() -> {
final ReadOnlyKeyValueStore<Integer, Integer> store2 = getStore(kafkaStreams2, param);
return store2.get(key) != null;
}, "store2 cannot find results for key");
final StoreQueryParameters<ReadOnlyKeyValueStore<Integer, Integer>> otherParam = StoreQueryParameters
.fromNameAndType(TABLE_NAME, queryableStoreType)
.enableStaleStores()
.withPartition(keyDontBelongPartition);
final ReadOnlyKeyValueStore<Integer, Integer> store3 = getStore(kafkaStreams1, otherParam);
final ReadOnlyKeyValueStore<Integer, Integer> store4 = getStore(kafkaStreams2, otherParam);
// Assert that
assertThat(store3.get(key), is(nullValue()));
assertThat(store4.get(key), is(nullValue()));
}
private static void until(final TestCondition condition) {
boolean success = false;
final long deadline = System.currentTimeMillis() + IntegrationTestUtils.DEFAULT_TIMEOUT;
while (!success && System.currentTimeMillis() < deadline) {
try {
success = condition.conditionMet();
Thread.sleep(500L);
} catch (final RuntimeException e) {
throw e;
} catch (final Exception e) {
throw new RuntimeException(e);
}
}
}
private KafkaStreams createKafkaStreams(final StreamsBuilder builder, final Properties config) {
final KafkaStreams streams = new KafkaStreams(builder.build(config), config);
streamsToCleanup.add(streams);
return streams;
}
private void produceValueRange(final int key, final int start, final int endExclusive) {
final Properties producerProps = new Properties();
producerProps.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, cluster.bootstrapServers());
producerProps.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, IntegerSerializer.class);
producerProps.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, IntegerSerializer.class);
IntegrationTestUtils.produceKeyValuesSynchronously(
INPUT_TOPIC_NAME,
IntStream.range(start, endExclusive)
.mapToObj(i -> KeyValue.pair(key, i))
.collect(Collectors.toList()),
producerProps,
mockTime);
}
private Properties streamsConfiguration() {
final String safeTestName = safeUniqueTestName(getClass(), testName);
final Properties config = new Properties();
config.put(StreamsConfig.TOPOLOGY_OPTIMIZATION_CONFIG, StreamsConfig.OPTIMIZE);
config.put(StreamsConfig.APPLICATION_ID_CONFIG, "app-" + safeTestName);
config.put(StreamsConfig.APPLICATION_SERVER_CONFIG, "localhost:" + (++port));
config.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, cluster.bootstrapServers());
config.put(StreamsConfig.STATE_DIR_CONFIG, TestUtils.tempDirectory().getPath());
config.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.Integer().getClass());
config.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.Integer().getClass());
config.put(StreamsConfig.NUM_STANDBY_REPLICAS_CONFIG, 1);
config.put(ConsumerConfig.MAX_POLL_RECORDS_CONFIG, 100);
config.put(ConsumerConfig.HEARTBEAT_INTERVAL_MS_CONFIG, 200);
config.put(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, 1000);
config.put(StreamsConfig.COMMIT_INTERVAL_MS_CONFIG, 100);
return config;
}
}<|fim▁end|>
|
// If kafkaStreams1 is active for keyPartition, kafkaStreams2 would be active for keyDontBelongPartition
// So, in that case, store3 would be null and the store4 would not return the value for key as wrong partition was requested
if (kafkaStreams1IsActive) {
|
<|file_name|>inboundRules-service.js<|end_file_name|><|fim▁begin|>'use strict';
angular.module('myApp').factory('inboundRulesApi', function($resource) {
return $resource('/api/scm.config/1.0/inbound_rules', {},
{
'query': {
method: 'GET',
isArray: true ,
responseType: 'json',
transformResponse: function (data) {
var wrapped = angular.fromJson(data);
return wrapped.items;
}
},
'delete': {
method: 'DELETE',
url: '/api/scm.config/1.0/inbound_rule/:ruleid',
params: { ruleid: '@ruleid' }
},
'update': {
method: 'PUT',
url: '/api/scm.config/1.0/inbound_rule/:ruleid',
params: { ruleid: '@ruleid' }
}
});
});
angular.module('myApp').service('inboundRulesSelectionSvc', function() {
this.inboundRules = { };
this.setinboundRules = function(obj){
this.inboundRules = obj;
}
this.getinboundRules = function(){
return this.inboundRules;
}<|fim▁hole|>});<|fim▁end|>
| |
<|file_name|>chat_notify_es_ES.ts<|end_file_name|><|fim▁begin|><|fim▁hole|> <name>@default</name>
<message>
<source>Chat window notifications</source>
<translation type="unfinished"/>
</message>
</context>
</TS><|fim▁end|>
|
<?xml version="1.0" ?><!DOCTYPE TS><TS language="es_ES" version="2.1">
<context>
|
<|file_name|>bitcoin_af_ZA.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" ?><!DOCTYPE TS><TS language="af_ZA" version="2.1">
<context>
<name>AboutDialog</name>
<message>
<location filename="../forms/aboutdialog.ui" line="+14"/>
<source>About Aero</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+39"/>
<source><b>Aero</b> version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+41"/>
<source>Copyright © 2009-2014 The Bitcoin developers
Copyright © 2012-2014 The NovaCoin developers
Copyright © 2014 The Aero developers</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>
This is experimental software.
Distributed under the MIT/X11 software license, see the accompanying file COPYING or http://www.opensource.org/licenses/mit-license.php.
This product includes software developed by the OpenSSL Project for use in the OpenSSL Toolkit (http://www.openssl.org/) and cryptographic software written by Eric Young ([email protected]) and UPnP software written by Thomas Bernard.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>AddressBookPage</name>
<message>
<location filename="../forms/addressbookpage.ui" line="+14"/>
<source>Address Book</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+22"/>
<source>Double-click to edit address or label</source>
<translation>Dubbel-klik om die adres of etiket te wysig</translation>
</message>
<message>
<location line="+27"/>
<source>Create a new address</source>
<translation>Skep 'n nuwe adres</translation>
</message>
<message>
<location line="+14"/>
<source>Copy the currently selected address to the system clipboard</source>
<translation>Maak 'n kopie van die huidige adres na die stelsel klipbord</translation>
</message>
<message>
<location line="-11"/>
<source>&New Address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-46"/>
<source>These are your Aero addresses for receiving payments. You may want to give a different one to each sender so you can keep track of who is paying you.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+60"/>
<source>&Copy Address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>Show &QR Code</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>Sign a message to prove you own a Aero address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Sign &Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+25"/>
<source>Delete the currently selected address from the list</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-14"/>
<source>Verify a message to ensure it was signed with a specified Aero address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Verify Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>&Delete</source>
<translation>&Verwyder</translation>
</message>
<message>
<location filename="../addressbookpage.cpp" line="+65"/>
<source>Copy &Label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>&Edit</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+250"/>
<source>Export Address Book Data</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Comma separated file (*.csv)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>Error exporting</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Could not write to file %1.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>AddressTableModel</name>
<message>
<location filename="../addresstablemodel.cpp" line="+144"/>
<source>Label</source>
<translation>Etiket</translation>
</message>
<message>
<location line="+0"/>
<source>Address</source>
<translation>Adres</translation>
</message>
<message>
<location line="+36"/>
<source>(no label)</source>
<translation>(geen etiket)</translation>
</message>
</context>
<context>
<name>AskPassphraseDialog</name>
<message>
<location filename="../forms/askpassphrasedialog.ui" line="+26"/>
<source>Passphrase Dialog</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>Enter passphrase</source>
<translation>Tik Wagwoord in</translation>
</message>
<message>
<location line="+14"/>
<source>New passphrase</source>
<translation>Nuwe wagwoord</translation>
</message>
<message>
<location line="+14"/>
<source>Repeat new passphrase</source>
<translation>Herhaal nuwe wagwoord</translation>
</message>
<message>
<location line="+33"/>
<source>Serves to disable the trivial sendmoney when OS account compromised. Provides no real security.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>For staking only</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../askpassphrasedialog.cpp" line="+35"/>
<source>Enter the new passphrase to the wallet.<br/>Please use a passphrase of <b>10 or more random characters</b>, or <b>eight or more words</b>.</source>
<translation>Tik die nuwe wagwoord vir die beursie in.<br/>Gebruik asseblief 'n wagwoord van <b>ten minste 10 ewekansige karakters</b>, of <b>agt (8) of meer woorde.</b></translation>
</message>
<message>
<location line="+1"/>
<source>Encrypt wallet</source>
<translation>Enkripteer beursie</translation>
</message>
<message>
<location line="+7"/>
<source>This operation needs your wallet passphrase to unlock the wallet.</source>
<translation>Hierdie operasie benodig 'n wagwoord om die beursie oop te sluit.</translation>
</message>
<message>
<location line="+5"/>
<source>Unlock wallet</source>
<translation>Sluit beursie oop</translation>
</message>
<message>
<location line="+3"/>
<source>This operation needs your wallet passphrase to decrypt the wallet.</source>
<translation>Hierdie operasie benodig 'n wagwoord om die beursie oop te sluit.</translation>
</message>
<message>
<location line="+5"/>
<source>Decrypt wallet</source>
<translation>Sluit beursie oop</translation>
</message>
<message>
<location line="+3"/>
<source>Change passphrase</source>
<translation>Verander wagwoord</translation>
</message>
<message>
<location line="+1"/>
<source>Enter the old and new passphrase to the wallet.</source>
<translation>Tik asseblief die ou en nuwe wagwoord vir die beursie in.</translation>
</message>
<message>
<location line="+46"/>
<source>Confirm wallet encryption</source>
<translation>Bevestig beursie enkripsie.</translation>
</message>
<message>
<location line="+1"/>
<source>Warning: If you encrypt your wallet and lose your passphrase, you will <b>LOSE ALL OF YOUR COINS</b>!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Are you sure you wish to encrypt your wallet?</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>IMPORTANT: Any previous backups you have made of your wallet file should be replaced with the newly generated, encrypted wallet file. For security reasons, previous backups of the unencrypted wallet file will become useless as soon as you start using the new, encrypted wallet.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+103"/>
<location line="+24"/>
<source>Warning: The Caps Lock key is on!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-133"/>
<location line="+60"/>
<source>Wallet encrypted</source>
<translation>Die beursie is nou bewaak</translation>
</message>
<message>
<location line="-58"/>
<source>Aero will close now to finish the encryption process. Remember that encrypting your wallet cannot fully protect your coins from being stolen by malware infecting your computer.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<location line="+7"/>
<location line="+44"/>
<location line="+6"/>
<source>Wallet encryption failed</source>
<translation>Die beursie kon nie bewaak word nie</translation>
</message>
<message>
<location line="-56"/>
<source>Wallet encryption failed due to an internal error. Your wallet was not encrypted.</source>
<translation>Beursie bewaaking het misluk as gevolg van 'n interne fout. Die beursie is nie bewaak nie!</translation>
</message>
<message>
<location line="+7"/>
<location line="+50"/>
<source>The supplied passphrases do not match.</source>
<translation>Die wagwoord stem nie ooreen nie</translation>
</message>
<message>
<location line="-38"/>
<source>Wallet unlock failed</source>
<translation>Beursie oopsluiting het misluk</translation>
</message>
<message>
<location line="+1"/>
<location line="+12"/>
<location line="+19"/>
<source>The passphrase entered for the wallet decryption was incorrect.</source>
<translation>Die wagwoord wat ingetik was om die beursie oop te sluit, was verkeerd.</translation>
</message>
<message>
<location line="-20"/>
<source>Wallet decryption failed</source>
<translation>Beursie dekripsie het misluk</translation>
</message>
<message>
<location line="+14"/>
<source>Wallet passphrase was successfully changed.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>BitcoinGUI</name>
<message>
<location filename="../bitcoingui.cpp" line="+282"/>
<source>Sign &message...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+251"/>
<source>Synchronizing with network...</source>
<translation>Sinchroniseer met die netwerk ...</translation>
</message>
<message>
<location line="-319"/>
<source>&Overview</source>
<translation>&Oorsig</translation>
</message>
<message>
<location line="+1"/>
<source>Show general overview of wallet</source>
<translation>Wys algemene oorsig van die beursie</translation>
</message>
<message>
<location line="+17"/>
<source>&Transactions</source>
<translation>&Transaksies</translation>
</message>
<message>
<location line="+1"/>
<source>Browse transaction history</source>
<translation>Besoek transaksie geskiedenis</translation>
</message>
<message>
<location line="+5"/>
<source>&Address Book</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Edit the list of stored addresses and labels</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-13"/>
<source>&Receive coins</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Show the list of addresses for receiving payments</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-7"/>
<source>&Send coins</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>E&xit</source>
<translation>S&luit af</translation>
</message>
<message>
<location line="+1"/>
<source>Quit application</source>
<translation>Sluit af</translation>
</message>
<message>
<location line="+6"/>
<source>Show information about Aero</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>About &Qt</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Show information about Qt</source>
<translation>Wys inligting oor Qt</translation>
</message>
<message>
<location line="+2"/>
<source>&Options...</source>
<translation>&Opsies</translation>
</message>
<message>
<location line="+4"/>
<source>&Encrypt Wallet...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Backup Wallet...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>&Change Passphrase...</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+259"/>
<source>~%n block(s) remaining</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+6"/>
<source>Downloaded %1 of %2 blocks of transaction history (%3% done).</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-256"/>
<source>&Export...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-64"/>
<source>Send coins to a Aero address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+47"/>
<source>Modify configuration options for Aero</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>Export the data in the current tab to a file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-14"/>
<source>Encrypt or decrypt wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Backup wallet to another location</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Change the passphrase used for wallet encryption</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>&Debug window</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Open debugging and diagnostic console</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-5"/>
<source>&Verify message...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-202"/>
<source>Aero</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Wallet</source>
<translation>Beursie</translation>
</message>
<message>
<location line="+180"/>
<source>&About Aero</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>&Show / Hide</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>Unlock wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>&Lock Wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Lock wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>&File</source>
<translation>&Lêer</translation>
</message>
<message>
<location line="+8"/>
<source>&Settings</source>
<translation>&Instellings</translation>
</message>
<message>
<location line="+8"/>
<source>&Help</source>
<translation>&Hulp</translation>
</message>
<message>
<location line="+12"/>
<source>Tabs toolbar</source>
<translation>Blad nutsbalk</translation>
</message>
<message>
<location line="+8"/>
<source>Actions toolbar</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<location line="+9"/>
<source>[testnet]</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<location line="+60"/>
<source>Aero client</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+75"/>
<source>%n active connection(s) to Aero network</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+40"/>
<source>Downloaded %1 blocks of transaction history.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+413"/>
<source>Staking.<br>Your weight is %1<br>Network weight is %2<br>Expected time to earn reward is %3</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Not staking because wallet is locked</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Not staking because wallet is offline</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Not staking because wallet is syncing</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Not staking because you don't have mature coins</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="-403"/>
<source>%n second(s) ago</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="-312"/>
<source>About Aero card</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Show information about Aero card</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>&Unlock Wallet...</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+297"/>
<source>%n minute(s) ago</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n hour(s) ago</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n day(s) ago</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+6"/>
<source>Up to date</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Catching up...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Last received block was generated %1.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+59"/>
<source>This transaction is over the size limit. You can still send it for a fee of %1, which goes to the nodes that process your transaction and helps to support the network. Do you want to pay the fee?</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Confirm transaction fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+27"/>
<source>Sent transaction</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Incoming transaction</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Date: %1
Amount: %2
Type: %3
Address: %4
</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+100"/>
<location line="+15"/>
<source>URI handling</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-15"/>
<location line="+15"/>
<source>URI can not be parsed! This can be caused by an invalid Aero address or malformed URI parameters.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>Wallet is <b>encrypted</b> and currently <b>unlocked</b></source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Wallet is <b>encrypted</b> and currently <b>locked</b></source>
<translation type="unfinished"/>
</message>
<message>
<location line="+25"/>
<source>Backup Wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Wallet Data (*.dat)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Backup Failed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>There was an error trying to save the wallet data to the new location.</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+76"/>
<source>%n second(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n minute(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n hour(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n day(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+18"/>
<source>Not staking</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../bitcoin.cpp" line="+109"/>
<source>A fatal error occurred. Aero can no longer continue safely and will quit.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>ClientModel</name>
<message>
<location filename="../clientmodel.cpp" line="+90"/>
<source>Network Alert</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>CoinControlDialog</name>
<message>
<location filename="../forms/coincontroldialog.ui" line="+14"/>
<source>Coin Control</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+31"/>
<source>Quantity:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+32"/>
<source>Bytes:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+48"/>
<source>Amount:</source>
<translation>Bedrag:</translation>
</message>
<message>
<location line="+32"/>
<source>Priority:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+48"/>
<source>Fee:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>Low Output:</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../coincontroldialog.cpp" line="+551"/>
<source>no</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/coincontroldialog.ui" line="+51"/>
<source>After Fee:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>Change:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+69"/>
<source>(un)select all</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>Tree mode</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>List mode</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+45"/>
<source>Amount</source>
<translation>Bedrag</translation>
</message>
<message>
<location line="+5"/>
<source>Label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Address</source>
<translation>Adres</translation>
</message>
<message>
<location line="+5"/>
<source>Date</source>
<translation>Datum</translation>
</message>
<message>
<location line="+5"/>
<source>Confirmations</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Confirmed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Priority</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../coincontroldialog.cpp" line="-515"/>
<source>Copy address</source>
<translation>Maak kopie van adres</translation>
</message>
<message>
<location line="+1"/>
<source>Copy label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<location line="+26"/>
<source>Copy amount</source>
<translation>Kopieer bedrag</translation>
</message>
<message>
<location line="-25"/>
<source>Copy transaction ID</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+24"/>
<source>Copy quantity</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Copy fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy after fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy bytes</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy priority</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy low output</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy change</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+317"/>
<source>highest</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>high</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>medium-high</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>medium</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>low-medium</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>low</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>lowest</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+155"/>
<source>DUST</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>yes</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>This label turns red, if the transaction size is bigger than 10000 bytes.
This means a fee of at least %1 per kb is required.
Can vary +/- 1 Byte per input.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Transactions with higher priority get more likely into a block.
This label turns red, if the priority is smaller than "medium".
This means a fee of at least %1 per kb is required.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>This label turns red, if any recipient receives an amount smaller than %1.
This means a fee of at least %2 is required.
Amounts below 0.546 times the minimum relay fee are shown as DUST.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>This label turns red, if the change is smaller than %1.
This means a fee of at least %2 is required.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+37"/>
<location line="+66"/>
<source>(no label)</source>
<translation>(geen etiket)</translation>
</message>
<message>
<location line="-9"/>
<source>change from %1 (%2)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>(change)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>EditAddressDialog</name>
<message>
<location filename="../forms/editaddressdialog.ui" line="+14"/>
<source>Edit Address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>&Label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>The label associated with this address book entry</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>&Address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>The address associated with this address book entry. This can only be modified for sending addresses.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../editaddressdialog.cpp" line="+20"/>
<source>New receiving address</source>
<translation>Nuwe ontvangende adres</translation>
</message>
<message>
<location line="+4"/>
<source>New sending address</source>
<translation>Nuwe stuurende adres</translation>
</message>
<message>
<location line="+3"/>
<source>Edit receiving address</source>
<translation>Wysig ontvangende adres</translation>
</message>
<message>
<location line="+4"/>
<source>Edit sending address</source>
<translation>Wysig stuurende adres</translation>
</message>
<message>
<location line="+76"/>
<source>The entered address "%1" is already in the address book.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-5"/>
<source>The entered address "%1" is not a valid Aero address.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Could not unlock wallet.</source>
<translation>Kon nie die beursie oopsluit nie.</translation>
</message>
<message>
<location line="+5"/>
<source>New key generation failed.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>GUIUtil::HelpMessageBox</name>
<message>
<location filename="../guiutil.cpp" line="+420"/>
<location line="+12"/>
<source>Aero-Qt</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-12"/>
<source>version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Usage:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>command-line options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>UI options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Set language, for example "de_DE" (default: system locale)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Start minimized</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Show splash screen on startup (default: 1)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>OptionsDialog</name>
<message>
<location filename="../forms/optionsdialog.ui" line="+14"/>
<source>Options</source>
<translation>Opsies</translation>
</message>
<message>
<location line="+16"/>
<source>&Main</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Optional transaction fee per kB that helps make sure your transactions are processed quickly. Most transactions are 1 kB. Fee 0.01 recommended.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>Pay transaction &fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+31"/>
<source>Reserved amount does not participate in staking and is therefore spendable at any time.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>Reserve</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+31"/>
<source>Automatically start Aero after logging in to the system.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Start Aero on system login</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Detach block and address databases at shutdown. This means they can be moved to another data directory, but it slows down shutdown. The wallet is always detached.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Detach databases at shutdown</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>&Network</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Automatically open the Aero client port on the router. This only works when your router supports UPnP and it is enabled.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Map port using &UPnP</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Connect to the Aero network through a SOCKS proxy (e.g. when connecting through Tor).</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Connect through SOCKS proxy:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>Proxy &IP:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>IP address of the proxy (e.g. 127.0.0.1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>&Port:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>Port of the proxy (e.g. 9050)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>SOCKS &Version:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>SOCKS version of the proxy (e.g. 5)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+36"/>
<source>&Window</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Show only a tray icon after minimizing the window.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Minimize to the tray instead of the taskbar</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Minimize instead of exit the application when the window is closed. When this option is enabled, the application will be closed only after selecting Quit in the menu.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>M&inimize on close</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>&Display</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>User Interface &language:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>The user interface language can be set here. This setting will take effect after restarting Aero.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>&Unit to show amounts in:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>Choose the default subdivision unit to show in the interface and when sending coins.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>Whether to show Aero addresses in the transaction list or not.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Display addresses in transaction list</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Whether to show coin control features or not.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Display coin &control features (experts only!)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+71"/>
<source>&OK</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>&Cancel</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>&Apply</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../optionsdialog.cpp" line="+55"/>
<source>default</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+149"/>
<location line="+9"/>
<source>Warning</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-9"/>
<location line="+9"/>
<source>This setting will take effect after restarting Aero.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+29"/>
<source>The supplied proxy address is invalid.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>OverviewPage</name>
<message>
<location filename="../forms/overviewpage.ui" line="+14"/>
<source>Form</source>
<translation>Vorm</translation>
</message>
<message>
<location line="+33"/>
<location line="+231"/>
<source>The displayed information may be out of date. Your wallet automatically synchronizes with the Aero network after a connection is established, but this process has not completed yet.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-160"/>
<source>Stake:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+29"/>
<source>Unconfirmed:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-107"/>
<source>Wallet</source>
<translation>Beursie</translation>
</message>
<message>
<location line="+49"/>
<source>Spendable:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>Your current spendable balance</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+71"/>
<source>Immature:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>Mined balance that has not yet matured</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+20"/>
<source>Total:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>Your current total balance</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+46"/>
<source><b>Recent transactions</b></source>
<translation><b>Onlangse transaksies</b></translation>
</message>
<message>
<location line="-108"/>
<source>Total of transactions that have yet to be confirmed, and do not yet count toward the current balance</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-29"/>
<source>Total of coins that was staked, and do not yet count toward the current balance</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../overviewpage.cpp" line="+113"/>
<location line="+1"/>
<source>out of sync</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>QRCodeDialog</name>
<message>
<location filename="../forms/qrcodedialog.ui" line="+14"/>
<source>QR Code Dialog</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+59"/>
<source>Request Payment</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+56"/>
<source>Amount:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-44"/>
<source>Label:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>Message:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+71"/>
<source>&Save As...</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../qrcodedialog.cpp" line="+62"/>
<source>Error encoding URI into QR Code.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+40"/>
<source>The entered amount is invalid, please check.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Resulting URI too long, try to reduce the text for label / message.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+25"/>
<source>Save QR Code</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>PNG Images (*.png)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>RPCConsole</name>
<message>
<location filename="../forms/rpcconsole.ui" line="+46"/>
<source>Client name</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<location line="+23"/>
<location line="+26"/>
<location line="+23"/>
<location line="+23"/>
<location line="+36"/>
<location line="+53"/>
<location line="+23"/>
<location line="+23"/>
<location filename="../rpcconsole.cpp" line="+348"/>
<source>N/A</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-217"/>
<source>Client version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-45"/>
<source>&Information</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+68"/>
<source>Using OpenSSL version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+49"/>
<source>Startup time</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+29"/>
<source>Network</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Number of connections</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>On testnet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Block chain</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Current number of blocks</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Estimated total blocks</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Last block time</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+52"/>
<source>&Open</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>Command-line options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Show the Aero-Qt help message to get a list with possible Aero command-line options.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Show</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+24"/>
<source>&Console</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-260"/>
<source>Build date</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-104"/>
<source>Aero - Debug window</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+25"/>
<source>Aero Core</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+279"/>
<source>Debug log file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Open the Aero debug log file from the current data directory. This can take a few seconds for large log files.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+102"/>
<source>Clear console</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../rpcconsole.cpp" line="-33"/>
<source>Welcome to the Aero RPC console.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Use up and down arrows to navigate history, and <b>Ctrl-L</b> to clear screen.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Type <b>help</b> for an overview of available commands.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>SendCoinsDialog</name>
<message>
<location filename="../forms/sendcoinsdialog.ui" line="+14"/>
<location filename="../sendcoinsdialog.cpp" line="+182"/>
<location line="+5"/>
<location line="+5"/>
<location line="+5"/>
<location line="+6"/>
<location line="+5"/>
<location line="+5"/>
<source>Send Coins</source>
<translation>Stuur Munstukke</translation>
</message>
<message>
<location line="+76"/>
<source>Coin Control Features</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+20"/>
<source>Inputs...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>automatically selected</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>Insufficient funds!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+77"/>
<source>Quantity:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+22"/>
<location line="+35"/>
<source>0</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-19"/>
<source>Bytes:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+51"/>
<source>Amount:</source>
<translation>Bedrag:</translation>
</message>
<message>
<location line="+22"/>
<location line="+86"/>
<location line="+86"/>
<location line="+32"/>
<source>0.00 AERO</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-191"/>
<source>Priority:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>medium</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+32"/>
<source>Fee:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>Low Output:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>no</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+32"/>
<source>After Fee:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>Change</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+50"/>
<source>custom change address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+106"/>
<source>Send to multiple recipients at once</source>
<translation>Stuur aan vele ontvangers op eens</translation>
</message>
<message>
<location line="+3"/>
<source>Add &Recipient</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+20"/>
<source>Remove all transaction fields</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Clear &All</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+28"/>
<source>Balance:</source>
<translation>Balans:</translation>
</message>
<message>
<location line="+16"/>
<source>123.456 AERO</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+31"/>
<source>Confirm the send action</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>S&end</source>
<translation>S&tuur</translation>
</message>
<message>
<location filename="../sendcoinsdialog.cpp" line="-173"/>
<source>Enter a Aero address (e.g. Sjz75uKHzUQJnSdzvpiigEGxseKkDhQToX)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>Copy quantity</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy amount</source>
<translation>Kopieer bedrag</translation>
</message>
<message>
<location line="+1"/>
<source>Copy fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy after fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy bytes</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy priority</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy low output</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy change</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+86"/>
<source><b>%1</b> to %2 (%3)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Confirm send coins</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Are you sure you want to send %1?</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source> and </source>
<translation type="unfinished"/>
</message>
<message>
<location line="+29"/>
<source>The recipient address is not valid, please recheck.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>The amount to pay must be larger than 0.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>The amount exceeds your balance.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>The total exceeds your balance when the %1 transaction fee is included.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Duplicate address found, can only send to each address once per send operation.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Error: Transaction creation failed.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Error: The transaction was rejected. This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+251"/>
<source>WARNING: Invalid Aero address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>(no label)</source>
<translation>(geen etiket)</translation>
</message>
<message>
<location line="+4"/>
<source>WARNING: unknown change address</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>SendCoinsEntry</name>
<message>
<location filename="../forms/sendcoinsentry.ui" line="+14"/>
<source>Form</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>A&mount:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>Pay &To:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+24"/>
<location filename="../sendcoinsentry.cpp" line="+25"/>
<source>Enter a label for this address to add it to your address book</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>&Label:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>The address to send the payment to (e.g. Sjz75uKHzUQJnSdzvpiigEGxseKkDhQToX)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Choose address from address book</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Alt+A</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Paste address from clipboard</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Alt+P</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Remove this recipient</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../sendcoinsentry.cpp" line="+1"/>
<source>Enter a Aero address (e.g. Sjz75uKHzUQJnSdzvpiigEGxseKkDhQToX)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>SignVerifyMessageDialog</name>
<message>
<location filename="../forms/signverifymessagedialog.ui" line="+14"/>
<source>Signatures - Sign / Verify a Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<location line="+124"/>
<source>&Sign Message</source>
<translation>&Teken boodskap</translation>
</message>
<message>
<location line="-118"/>
<source>You can sign messages with your addresses to prove you own them. Be careful not to sign anything vague, as phishing attacks may try to trick you into signing your identity over to them. Only sign fully-detailed statements you agree to.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>The address to sign the message with (e.g. Sjz75uKHzUQJnSdzvpiigEGxseKkDhQToX)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<location line="+203"/>
<source>Choose an address from the address book</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-193"/>
<location line="+203"/>
<source>Alt+A</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-193"/>
<source>Paste address from clipboard</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Alt+P</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+12"/>
<source>Enter the message you want to sign here</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+24"/>
<source>Copy the current signature to the system clipboard</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>Sign the message to prove you own this Aero address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+17"/>
<source>Reset all sign message fields</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<location line="+146"/>
<source>Clear &All</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-87"/>
<location line="+70"/>
<source>&Verify Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-64"/>
<source>Enter the signing address, message (ensure you copy line breaks, spaces, tabs, etc. exactly) and signature below to verify the message. Be careful not to read more into the signature than what is in the signed message itself, to avoid being tricked by a man-in-the-middle attack.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>The address the message was signed with (e.g. Sjz75uKHzUQJnSdzvpiigEGxseKkDhQToX)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+40"/>
<source>Verify the message to ensure it was signed with the specified Aero address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+17"/>
<source>Reset all verify message fields</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../signverifymessagedialog.cpp" line="+27"/>
<location line="+3"/>
<source>Enter a Aero address (e.g. Sjz75uKHzUQJnSdzvpiigEGxseKkDhQToX)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-2"/>
<source>Click "Sign Message" to generate signature</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Enter Aero signature</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+82"/>
<location line="+81"/>
<source>The entered address is invalid.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-81"/>
<location line="+8"/>
<location line="+73"/>
<location line="+8"/>
<source>Please check the address and try again.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-81"/>
<location line="+81"/>
<source>The entered address does not refer to a key.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-73"/>
<source>Wallet unlock was cancelled.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Private key for the entered address is not available.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+12"/>
<source>Message signing failed.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Message signed.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+59"/>
<source>The signature could not be decoded.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<location line="+13"/>
<source>Please check the signature and try again.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>The signature did not match the message digest.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Message verification failed.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Message verified.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>TransactionDesc</name>
<message>
<location filename="../transactiondesc.cpp" line="+19"/>
<source>Open until %1</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="-2"/>
<source>Open for %n block(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+8"/>
<source>conflicted</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>%1/offline</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>%1/unconfirmed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>%1 confirmations</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>Status</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+7"/>
<source>, broadcast through %n node(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+4"/>
<source>Date</source>
<translation>Datum</translation>
</message>
<message>
<location line="+7"/>
<source>Source</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Generated</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<location line="+17"/>
<source>From</source>
<translation>Van</translation>
</message>
<message>
<location line="+1"/>
<location line="+22"/>
<location line="+58"/>
<source>To</source>
<translation>Na</translation>
</message>
<message>
<location line="-77"/>
<location line="+2"/>
<source>own address</source>
<translation>eie adres</translation>
</message>
<message>
<location line="-2"/>
<source>label</source>
<translation>etiket</translation>
</message>
<message>
<location line="+37"/>
<location line="+12"/>
<location line="+45"/>
<location line="+17"/>
<location line="+30"/>
<source>Credit</source>
<translation>Krediet</translation>
</message>
<message numerus="yes">
<location line="-102"/>
<source>matures in %n more block(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+2"/>
<source>not accepted</source>
<translation>nie aanvaar nie</translation>
</message>
<message>
<location line="+44"/>
<location line="+8"/>
<location line="+15"/>
<location line="+30"/>
<source>Debit</source>
<translation>Debiet</translation>
</message>
<message>
<location line="-39"/>
<source>Transaction fee</source>
<translation>Transaksie fooi</translation>
</message>
<message>
<location line="+16"/>
<source>Net amount</source>
<translation>Netto bedrag</translation>
</message>
<message>
<location line="+6"/>
<source>Message</source>
<translation>Boodskap</translation>
</message>
<message>
<location line="+2"/>
<source>Comment</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Transaction ID</source>
<translation>Transaksie ID</translation>
</message>
<message>
<location line="+3"/>
<source>Generated coins must mature 510 blocks before they can be spent. When you generated this block, it was broadcast to the network to be added to the block chain. If it fails to get into the chain, its state will change to "not accepted" and it won't be spendable. This may occasionally happen if another node generates a block within a few seconds of yours.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Debug information</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Transaction</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Inputs</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Amount</source>
<translation>Bedrag</translation>
</message>
<message>
<location line="+1"/>
<source>true</source>
<translation>waar</translation>
</message>
<message>
<location line="+0"/>
<source>false</source>
<translation>onwaar</translation>
</message>
<message>
<location line="-211"/>
<source>, has not been successfully broadcast yet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>unknown</source>
<translation>onbekend</translation>
</message>
</context>
<context>
<name>TransactionDescDialog</name>
<message>
<location filename="../forms/transactiondescdialog.ui" line="+14"/>
<source>Transaction details</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>This pane shows a detailed description of the transaction</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>TransactionTableModel</name>
<message>
<location filename="../transactiontablemodel.cpp" line="+226"/>
<source>Date</source>
<translation>Datum</translation>
</message>
<message>
<location line="+0"/>
<source>Type</source>
<translation>Tipe</translation>
</message>
<message>
<location line="+0"/>
<source>Address</source>
<translation>Adres</translation>
</message>
<message>
<location line="+0"/>
<source>Amount</source>
<translation>Bedrag</translation>
</message>
<message>
<location line="+60"/>
<source>Open until %1</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+12"/>
<source>Confirmed (%1 confirmations)</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="-15"/>
<source>Open for %n more block(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+6"/>
<source>Offline</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Unconfirmed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Confirming (%1 of %2 recommended confirmations)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Conflicted</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Immature (%1 confirmations, will be available after %2)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>This block was not received by any other nodes and will probably not be accepted!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Generated but not accepted</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+42"/>
<source>Received with</source>
<translation>Ontvang met</translation>
</message>
<message>
<location line="+2"/>
<source>Received from</source>
<translation>Ontvang van</translation>
</message>
<message>
<location line="+3"/>
<source>Sent to</source>
<translation>Gestuur na</translation>
</message>
<message>
<location line="+2"/>
<source>Payment to yourself</source>
<translation>Betalings Aan/na jouself</translation>
</message>
<message>
<location line="+2"/>
<source>Mined</source>
<translation>Gemyn</translation>
</message>
<message>
<location line="+38"/>
<source>(n/a)</source>
<translation>(n.v.t)</translation>
</message>
<message>
<location line="+190"/>
<source>Transaction status. Hover over this field to show number of confirmations.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Date and time that the transaction was received.</source>
<translation>Datum en tyd wat die transaksie ontvang was.</translation>
</message>
<message>
<location line="+2"/>
<source>Type of transaction.</source>
<translation>Tipe transaksie.</translation>
</message>
<message>
<location line="+2"/>
<source>Destination address of transaction.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Amount removed from or added to balance.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>TransactionView</name>
<message>
<location filename="../transactionview.cpp" line="+55"/>
<location line="+16"/>
<source>All</source>
<translation>Alles</translation>
</message>
<message>
<location line="-15"/>
<source>Today</source>
<translation>Vandag</translation>
</message>
<message>
<location line="+1"/>
<source>This week</source>
<translation>Hierdie week</translation>
</message>
<message>
<location line="+1"/>
<source>This month</source>
<translation>Hierdie maand</translation>
</message>
<message>
<location line="+1"/>
<source>Last month</source>
<translation>Verlede maand</translation>
</message>
<message>
<location line="+1"/>
<source>This year</source>
<translation>Hierdie jaar</translation>
</message>
<message>
<location line="+1"/>
<source>Range...</source>
<translation>Reeks...</translation>
</message>
<message>
<location line="+11"/>
<source>Received with</source>
<translation>Ontvang met</translation>
</message>
<message>
<location line="+2"/>
<source>Sent to</source>
<translation>Gestuur na</translation>
</message>
<message>
<location line="+2"/>
<source>To yourself</source>
<translation>Aan/na jouself</translation>
</message>
<message>
<location line="+1"/>
<source>Mined</source>
<translation>Gemyn</translation>
</message>
<message>
<location line="+1"/>
<source>Other</source>
<translation>Ander</translation>
</message>
<message>
<location line="+7"/>
<source>Enter address or label to search</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Min amount</source>
<translation>Min bedrag</translation>
</message>
<message>
<location line="+34"/>
<source>Copy address</source>
<translation>Maak kopie van adres</translation>
</message>
<message>
<location line="+1"/>
<source>Copy label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy amount</source>
<translation>Kopieer bedrag</translation>
</message>
<message>
<location line="+1"/>
<source>Copy transaction ID</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Edit label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Show transaction details</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+144"/>
<source>Export Transaction Data</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Comma separated file (*.csv)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Confirmed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Date</source>
<translation>Datum</translation>
</message>
<message>
<location line="+1"/>
<source>Type</source>
<translation>Tipe</translation>
</message>
<message>
<location line="+1"/>
<source>Label</source>
<translation>Etiket</translation>
</message>
<message>
<location line="+1"/>
<source>Address</source>
<translation>Adres</translation>
</message>
<message>
<location line="+1"/>
<source>Amount</source>
<translation>Bedrag</translation>
</message>
<message>
<location line="+1"/>
<source>ID</source>
<translation>ID</translation>
</message>
<message>
<location line="+4"/>
<source>Error exporting</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Could not write to file %1.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+100"/>
<source>Range:</source>
<translation>Reeks:</translation>
</message>
<message>
<location line="+8"/>
<source>to</source>
<translation>aan</translation>
</message>
</context>
<context>
<name>WalletModel</name>
<message>
<location filename="../walletmodel.cpp" line="+206"/>
<source>Sending...</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>bitcoin-core</name>
<message>
<location filename="../bitcoinstrings.cpp" line="+33"/>
<source>Aero version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Usage:</source>
<translation>Gebruik:</translation>
</message>
<message>
<location line="+1"/>
<source>Send command to -server or Aerod</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>List commands</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Get help for a command</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Options:</source>
<translation>Opsies:</translation>
</message>
<message>
<location line="+2"/>
<source>Specify configuration file (default: Aero.conf)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Specify pid file (default: Aerod.pid)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Specify wallet file (within data directory)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-1"/>
<source>Specify data directory</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Set database cache size in megabytes (default: 25)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Set database disk log size in megabytes (default: 100)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Listen for connections on <port> (default: 15714 or testnet: 25714)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Maintain at most <n> connections to peers (default: 125)</source>
<translation>Onderhou op die meeste <n> konneksies na eweknieë (standaard: 125)</translation>
</message>
<message>
<location line="+3"/>
<source>Connect to a node to retrieve peer addresses, and disconnect</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Specify your own public address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Bind to given address. Use [host]:port notation for IPv6</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Stake your coins to support network and gain reward (default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Threshold for disconnecting misbehaving peers (default: 100)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Number of seconds to keep misbehaving peers from reconnecting (default: 86400)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-44"/>
<source>An error occurred while setting up the RPC port %u for listening on IPv4: %s</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+51"/>
<source>Detach block and address databases. Increases shutdown time (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+109"/>
<source>Error: The transaction was rejected. This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-5"/>
<source>Error: This transaction requires a transaction fee of at least %s because of its amount, complexity, or use of recently received funds </source>
<translation type="unfinished"/>
</message>
<message>
<location line="-87"/>
<source>Listen for JSON-RPC connections on <port> (default: 15715 or testnet: 25715)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-11"/>
<source>Accept command line and JSON-RPC commands</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+101"/>
<source>Error: Transaction creation failed </source>
<translation type="unfinished"/>
</message>
<message>
<location line="-5"/>
<source>Error: Wallet locked, unable to create transaction </source>
<translation type="unfinished"/>
</message>
<message>
<location line="-8"/>
<source>Importing blockchain data file.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Importing bootstrap blockchain data file.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-88"/>
<source>Run in the background as a daemon and accept commands</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Use the test network</source>
<translation>Gebruik die toets netwerk</translation>
</message>
<message>
<location line="-24"/>
<source>Accept connections from outside (default: 1 if no -proxy or -connect)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-38"/>
<source>An error occurred while setting up the RPC port %u for listening on IPv6, falling back to IPv4: %s</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+117"/>
<source>Error initializing database environment %s! To recover, BACKUP THAT DIRECTORY, then remove everything from it except for wallet.dat.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-20"/>
<source>Set maximum size of high-priority/low-fee transactions in bytes (default: 27000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>Warning: -paytxfee is set very high! This is the transaction fee you will pay if you send a transaction.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+61"/>
<source>Warning: Please check that your computer's date and time are correct! If your clock is wrong Aero will not work properly.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-31"/>
<source>Warning: error reading wallet.dat! All keys read correctly, but transaction data or address book entries might be missing or incorrect.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-18"/>
<source>Warning: wallet.dat corrupt, data salvaged! Original wallet.dat saved as wallet.{timestamp}.bak in %s; if your balance or transactions are incorrect you should restore from a backup.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-30"/>
<source>Attempt to recover private keys from a corrupt wallet.dat</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Block creation options:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-62"/>
<source>Connect only to the specified node(s)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Discover own IP address (default: 1 when listening and no -externalip)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+94"/>
<source>Failed to listen on any port. Use -listen=0 if you want this.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-90"/>
<source>Find peers using DNS lookup (default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Sync checkpoints policy (default: strict)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+83"/>
<source>Invalid -tor address: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Invalid amount for -reservebalance=<amount></source>
<translation type="unfinished"/>
</message>
<message>
<location line="-82"/>
<source>Maximum per-connection receive buffer, <n>*1000 bytes (default: 5000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Maximum per-connection send buffer, <n>*1000 bytes (default: 1000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-16"/>
<source>Only connect to nodes in network <net> (IPv4, IPv6 or Tor)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+28"/>
<source>Output extra debugging information. Implies all other -debug* options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Output extra network debugging information</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Prepend debug output with timestamp</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>SSL options: (see the Bitcoin Wiki for SSL setup instructions)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-74"/>
<source>Select the version of socks proxy to use (4-5, default: 5)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+41"/>
<source>Send trace/debug info to console instead of debug.log file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Send trace/debug info to debugger</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+28"/>
<source>Set maximum block size in bytes (default: 250000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-1"/>
<source>Set minimum block size in bytes (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-29"/>
<source>Shrink debug.log file on client startup (default: 1 when no -debug)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-42"/>
<source>Specify connection timeout in milliseconds (default: 5000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+109"/>
<source>Unable to sign checkpoint, wrong checkpointkey?
</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-80"/>
<source>Use UPnP to map the listening port (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-1"/>
<source>Use UPnP to map the listening port (default: 1 when listening)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-25"/>
<source>Use proxy to reach tor hidden services (default: same as -proxy)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+42"/>
<source>Username for JSON-RPC connections</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+47"/>
<source>Verifying database integrity...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+57"/>
<source>WARNING: syncronized checkpoint violation detected, but skipped!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Warning: Disk space is low!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-2"/>
<source>Warning: This version is obsolete, upgrade required!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-48"/>
<source>wallet.dat corrupt, salvage failed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-54"/>
<source>Password for JSON-RPC connections</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-84"/>
<source>%s, you must set a rpcpassword in the configuration file:
%s
It is recommended you use the following random password:
rpcuser=Aerorpc
rpcpassword=%s
(you do not need to remember this password)
The username and password MUST NOT be the same.
If the file does not exist, create it with owner-readable-only file permissions.
It is also recommended to set alertnotify so you are notified of problems;
for example: alertnotify=echo %%s | mail -s "Aero Alert" [email protected]
</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+51"/>
<source>Find peers using internet relay chat (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Sync time with other nodes. Disable if time on your system is precise e.g. syncing with NTP (default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>When creating transactions, ignore inputs with value less than this (default: 0.01)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>Allow JSON-RPC connections from specified IP address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Send commands to node running on <ip> (default: 127.0.0.1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Execute command when the best block changes (%s in cmd is replaced by block hash)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Execute command when a wallet transaction changes (%s in cmd is replaced by TxID)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Require a confirmations for change (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Enforce transaction scripts to use canonical PUSH operators (default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Execute command when a relevant alert is received (%s in cmd is replaced by message)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Upgrade wallet to latest format</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Set key pool size to <n> (default: 100)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Rescan the block chain for missing wallet transactions</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>How many blocks to check at startup (default: 2500, 0 = all)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>How thorough the block verification is (0-6, default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Imports blocks from external blk000?.dat file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Use OpenSSL (https) for JSON-RPC connections</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Server certificate file (default: server.cert)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Server private key (default: server.pem)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Acceptable ciphers (default: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+53"/>
<source>Error: Wallet unlocked for staking only, unable to create transaction.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>WARNING: Invalid checkpoint found! Displayed transactions may not be correct! You may need to upgrade, or notify developers.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-158"/>
<source>This help message</source>
<translation>Hierdie help boodskap</translation>
</message>
<message>
<location line="+95"/>
<source>Wallet %s resides outside data directory %s.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Cannot obtain a lock on data directory %s. Aero is probably already running.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-98"/>
<source>Aero</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+140"/>
<source>Unable to bind to %s on this computer (bind returned error %d, %s)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-130"/>
<source>Connect through socks proxy</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Allow DNS lookups for -addnode, -seednode and -connect</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+122"/>
<source>Loading addresses...</source>
<translation>Laai adresse...</translation>
</message>
<message>
<location line="-15"/>
<source>Error loading blkindex.dat</source>
<translation type="unfinished"/>
</message><|fim▁hole|> <source>Error loading wallet.dat: Wallet corrupted</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Error loading wallet.dat: Wallet requires newer version of Aero</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Wallet needed to be rewritten: restart Aero to complete</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Error loading wallet.dat</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-16"/>
<source>Invalid -proxy address: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-1"/>
<source>Unknown network specified in -onlynet: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-1"/>
<source>Unknown -socks proxy version requested: %i</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Cannot resolve -bind address: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Cannot resolve -externalip address: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-24"/>
<source>Invalid amount for -paytxfee=<amount>: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+44"/>
<source>Error: could not start node</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>Sending...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Invalid amount</source>
<translation>Ongeldige bedrag</translation>
</message>
<message>
<location line="+1"/>
<source>Insufficient funds</source>
<translation>Onvoldoende fondse</translation>
</message>
<message>
<location line="-34"/>
<source>Loading block index...</source>
<translation>Laai blok indeks...</translation>
</message>
<message>
<location line="-103"/>
<source>Add a node to connect to and attempt to keep the connection open</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+122"/>
<source>Unable to bind to %s on this computer. Aero is probably already running.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-97"/>
<source>Fee per KB to add to transactions you send</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+55"/>
<source>Invalid amount for -mininput=<amount>: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+25"/>
<source>Loading wallet...</source>
<translation>Laai beursie...</translation>
</message>
<message>
<location line="+8"/>
<source>Cannot downgrade wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Cannot initialize keypool</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Cannot write default address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Rescanning...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Done loading</source>
<translation>Klaar gelaai</translation>
</message>
<message>
<location line="-167"/>
<source>To use the %s option</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>Error</source>
<translation>Fout</translation>
</message>
<message>
<location line="+6"/>
<source>You must set rpcpassword=<password> in the configuration file:
%s
If the file does not exist, create it with owner-readable-only file permissions.</source>
<translation type="unfinished"/>
</message>
</context>
</TS><|fim▁end|>
|
<message>
<location line="+2"/>
|
<|file_name|>server.js<|end_file_name|><|fim▁begin|>let express = require('express');
let config = require('./config/main');
let path = require('path');
// providing route prefixing for express Router
express.application.prefix = express.Router.prefix = function (path, configure) {
let router = express.Router();
this.use(path, router);
configure(router);
return router;
};
let app = express();
let mongodbURL;
// Get env args
let port_string = process.argv[2];
let env_value = process.argv[3];
if (typeof port_string !== "undefined" && port_string.length > 0) {
console.log("port string: " + port_string);
config.port_number = port_string;
}
// Default env value is DEV
if (typeof env_value === "undefined") {
console.log("env_value is not set , Default env is DEV");
env_value = "DEV";
}
switch (env_value) {
case "DEV":
mongodbURL = config.localDB.connection;
console.log("environment = " + mongodbURL);<|fim▁hole|> case "PROD":
mongodbURL = config.db.connection;
console.log("environment = " + mongodbURL);
break;
default:
console.log("env_value is not correct: " + env_value);
return -1;
}
// Set the public folder
app.set('public', path.join(__dirname, 'public'));
app.set("config",config);
app.set("mongoUrl", mongodbURL);
require('./boot/boot')(app);<|fim▁end|>
|
break;
|
<|file_name|>gitlab.py<|end_file_name|><|fim▁begin|>import re
import requests<|fim▁hole|>from jinja2 import Template
from twiggy import log
from bugwarrior.config import asbool, die, get_service_password
from bugwarrior.services import IssueService, Issue
class GitlabIssue(Issue):
TITLE = 'gitlabtitle'
DESCRIPTION = 'gitlabdescription'
CREATED_AT = 'gitlabcreatedon'
UPDATED_AT = 'gitlabupdatedat'
MILESTONE = 'gitlabmilestone'
URL = 'gitlaburl'
REPO = 'gitlabrepo'
TYPE = 'gitlabtype'
NUMBER = 'gitlabnumber'
STATE = 'gitlabstate'
UPVOTES = 'gitlabupvotes'
DOWNVOTES = 'gitlabdownvotes'
UDAS = {
TITLE: {
'type': 'string',
'label': 'Gitlab Title',
},
DESCRIPTION: {
'type': 'string',
'label': 'Gitlab Description',
},
CREATED_AT: {
'type': 'date',
'label': 'Gitlab Created',
},
UPDATED_AT: {
'type': 'date',
'label': 'Gitlab Updated',
},
MILESTONE: {
'type': 'string',
'label': 'Gitlab Milestone',
},
URL: {
'type': 'string',
'label': 'Gitlab URL',
},
REPO: {
'type': 'string',
'label': 'Gitlab Repo Slug',
},
TYPE: {
'type': 'string',
'label': 'Gitlab Type',
},
NUMBER: {
'type': 'numeric',
'label': 'Gitlab Issue/MR #',
},
STATE: {
'type': 'string',
'label': 'Gitlab Issue/MR State',
},
UPVOTES: {
'type': 'numeric',
'label': 'Gitlab Upvotes',
},
DOWNVOTES: {
'type': 'numeric',
'label': 'Gitlab Downvotes',
},
}
UNIQUE_KEY = (REPO, TYPE, NUMBER,)
def _normalize_label_to_tag(self, label):
return re.sub(r'[^a-zA-Z0-9]', '_', label)
def to_taskwarrior(self):
if self.extra['type'] == 'merge_request':
priority = 'H'
milestone = self.record['milestone']
created = self.record['created_at']
updated = self.record['updated_at']
state = self.record['state']
upvotes = self.record['upvotes']
downvotes = self.record['downvotes']
else:
priority = self.origin['default_priority']
milestone = self.record['milestone']
created = self.record['created_at']
updated = self.record['updated_at']
state = self.record['state']
upvotes = 0
downvotes = 0
if milestone:
milestone = milestone['title']
if created:
created = self.parse_date(created)
if updated:
updated = self.parse_date(updated)
return {
'project': self.extra['project'],
'priority': priority,
'annotations': self.extra.get('annotations', []),
'tags': self.get_tags(),
self.URL: self.extra['issue_url'],
self.REPO: self.extra['project'],
self.TYPE: self.extra['type'],
self.TITLE: self.record['title'],
self.DESCRIPTION: self.record['description'],
self.MILESTONE: milestone,
self.NUMBER: self.record['iid'],
self.CREATED_AT: created,
self.UPDATED_AT: updated,
self.STATE: state,
self.UPVOTES: upvotes,
self.DOWNVOTES: downvotes,
}
def get_tags(self):
tags = []
if not self.origin['import_labels_as_tags']:
return tags
context = self.record.copy()
label_template = Template(self.origin['label_template'])
for label in self.record.get('labels', []):
context.update({
'label': self._normalize_label_to_tag(label)
})
tags.append(
label_template.render(context)
)
return tags
def get_default_description(self):
return self.build_default_description(
title=self.record['title'],
url=self.get_processed_url(self.extra['issue_url']),
number=self.record['iid'],
cls=self.extra['type'],
)
class GitlabService(IssueService):
ISSUE_CLASS = GitlabIssue
CONFIG_PREFIX = 'gitlab'
def __init__(self, *args, **kw):
super(GitlabService, self).__init__(*args, **kw)
host = self.config_get_default(
'host', default='gitlab.com', to_type=six.text_type)
self.login = self.config_get('login')
token = self.config_get('token')
if not token or token.startswith('@oracle:'):
token = get_service_password(
self.get_keyring_service(self.config, self.target),
self.login, oracle=password,
interactive=self.config.interactive
)
self.auth = (host, token)
self.exclude_repos = []
if self.config_get_default('exclude_repos', None):
self.exclude_repos = [
item.strip() for item in
self.config_get('exclude_repos').strip().split(',')
]
self.include_repos = []
if self.config_get_default('include_repos', None):
self.include_repos = [
item.strip() for item in
self.config_get('include_repos').strip().split(',')
]
self.import_labels_as_tags = self.config_get_default(
'import_labels_as_tags', default=False, to_type=asbool
)
self.label_template = self.config_get_default(
'label_template', default='{{label}}', to_type=six.text_type
)
self.filter_merge_requests = self.config_get_default(
'filter_merge_requests', default=False, to_type=asbool
)
@classmethod
def get_keyring_service(cls, config, section):
login = config.get(section, cls._get_key('login'))
return "gitlab://%s@%s" % (login, host)
def get_service_metadata(self):
return {
'import_labels_as_tags': self.import_labels_as_tags,
'label_template': self.label_template,
}
def filter_repos(self, repo):
if self.exclude_repos:
if repo['path_with_namespace'] in self.exclude_repos:
return False
if self.include_repos:
if repo['path_with_namespace'] in self.include_repos:
return True
else:
return False
return True
def _get_notes(self, rid, issue_type, issueid):
tmpl = 'https://{host}/api/v3/projects/%d/%s/%d/notes' % (rid, issue_type, issueid)
return self._fetch_paged(tmpl)
def annotations(self, repo, url, issue_type, issue, issue_obj):
notes = self._get_notes(repo['id'], issue_type, issue['id'])
return self.build_annotations(
((
n['author']['username'],
n['body']
) for n in notes),
issue_obj.get_processed_url(url)
)
def _fetch(self, tmpl, **kwargs):
url = tmpl.format(host=self.auth[0])
headers = {'PRIVATE-TOKEN': self.auth[1]}
response = requests.get(url, headers=headers, **kwargs)
if response.status_code != 200:
raise IOError(
"Non-200 status code %r; %r; %r" %(
response.status_code, url, response.json))
if callable(response.json):
return response.json()
else:
return response.json
def _fetch_paged(self, tmpl):
params = {
'page': 1,
'per_page': 100,
}
full = []
while True:
items = self._fetch(tmpl, params=params)
full += items
if len(items) < params['per_page']:
break
params['page'] += 1
return full
def get_repo_issues(self, rid):
tmpl = 'https://{host}/api/v3/projects/%d/issues' % rid
issues = {}
for issue in self._fetch_paged(tmpl):
issues[issue['id']] = (rid, issue)
return issues
def get_repo_merge_requests(self, rid):
tmpl = 'https://{host}/api/v3/projects/%d/merge_requests' % rid
issues = {}
for issue in self._fetch_paged(tmpl):
issues[issue['id']] = (rid, issue)
return issues
def issues(self):
tmpl = 'https://{host}/api/v3/projects'
all_repos = self._fetch_paged(tmpl)
repos = filter(self.filter_repos, all_repos)
repo_map = {}
issues = {}
for repo in repos:
rid = repo['id']
repo_map[rid] = repo
issues.update(
self.get_repo_issues(rid)
)
log.name(self.target).debug(" Found {0} issues.", len(issues))
issues = filter(self.include, issues.values())
log.name(self.target).debug(" Pruned down to {0} issues.", len(issues))
for rid, issue in issues:
repo = repo_map[rid]
issue['repo'] = repo['path']
issue_obj = self.get_issue_for_record(issue)
issue_url = '%s/issues/%d' % (repo['web_url'], issue['iid'])
extra = {
'issue_url': issue_url,
'project': repo['path'],
'type': 'issue',
'annotations': self.annotations(repo, issue_url, 'issues', issue, issue_obj)
}
issue_obj.update_extra(extra)
yield issue_obj
if not self.filter_merge_requests:
merge_requests = {}
for repo in repos:
rid = repo['id']
merge_requests.update(
self.get_repo_merge_requests(rid)
)
log.name(self.target).debug(" Found {0} merge requests.", len(merge_requests))
merge_requests = filter(self.include, merge_requests.values())
log.name(self.target).debug(" Pruned down to {0} merge requests.", len(merge_requests))
for rid, issue in merge_requests:
repo = repo_map[rid]
issue['repo'] = repo['path']
issue_obj = self.get_issue_for_record(issue)
issue_url = '%s/merge_requests/%d' % (repo['web_url'], issue['iid'])
extra = {
'issue_url': issue_url,
'project': repo['path'],
'type': 'merge_request',
'annotations': self.annotations(repo, issue_url, 'merge_requests', issue, issue_obj)
}
issue_obj.update_extra(extra)
yield issue_obj
@classmethod
def validate_config(cls, config, target):
if not config.has_option(target, 'gitlab.host'):
die("[%s] has no 'gitlab.host'" % target)
if not config.has_option(target, 'gitlab.login'):
die("[%s] has no 'gitlab.login'" % target)
if not config.has_option(target, 'gitlab.token'):
die("[%s] has no 'gitlab.token'" % target)
super(GitlabService, cls).validate_config(config, target)<|fim▁end|>
|
import six
|
<|file_name|>unittest_tpl.py<|end_file_name|><|fim▁begin|># This file is part of the Enkel web programming library.
#<|fim▁hole|># as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
from unittest import TestCase
from enkel.wansgli.testhelpers import unit_case_suite, run_suite
class Test(TestCase):
def suite():
return unit_case_suite(Test)
if __name__ == '__main__':
run_suite(suite())<|fim▁end|>
|
# Copyright (C) 2007 Espen Angell Kristiansen ([email protected])
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
|
<|file_name|>03. XML Messenger.js<|end_file_name|><|fim▁begin|>function solve(message) {
let tagValidator = /^<message((?:\s+[a-z]+="[A-Za-z0-9 .]+"\s*?)*)>((?:.|\n)+?)<\/message>$/;
let tokens = tagValidator.exec(message);
if (!tokens) {
console.log("Invalid message format");
return;
}
let [match, attributes, body] = tokens;
let attributeValidator = /\s+([a-z]+)="([A-Za-z0-9 .]+)"\s*?/g;
let sender = '';
let recipient = '';
let attributeTokens = attributeValidator.exec(attributes);
while (attributeTokens) {
if (attributeTokens[1] === 'to') {
recipient = attributeTokens[2];
} else if (attributeTokens[1] === 'from') {
sender = attributeTokens[2];
}
attributeTokens = attributeValidator.exec(attributes);
}
if (sender === '' || recipient === '') {
console.log("Missing attributes");
return;
}
body = body.replace(/\n/g, '</p>\n <p>');
let html = `<article>\n <div>From: <span class="sender">${sender}</span></div>\n`;
html += ` <div>To: <span class="recipient">${recipient}</span></div>\n`;
html += ` <div>\n <p>${body}</p>\n </div>\n</article>`;
console.log(html);
}
solve(`<message from="John Doe" to="Alice">Not much, just chillin. How about you?</message>`);
/*
solve( `<message to="Bob" from="Alice" timestamp="1497254092">Hey man, what's up?</message>`,<|fim▁hole|>);
*/<|fim▁end|>
|
`<message from="Ivan Ivanov" to="Grace">Not much, just chillin. How about you?</message>`
|
<|file_name|>Login.js<|end_file_name|><|fim▁begin|>import React, { Component } from 'react';
import { connect } from 'react-redux';
import { emailChanged, passwordChanged, signinUser } from '../core/actions';
import { Container, Content, Form, Item, Input, Grid, Col, Spinner, Toast, Icon, Button, Text } from 'native-base';
class Login extends Component {
static navigationOptions = {
title: 'Login',
header: {
backTitle: null,
visible: false,
}
};
onLogin() {
const { email, password, navigation } = this.props;
this.props.signinUser(navigation, email, password);
}
onRegister() {
this.props.navigation.navigate('Register');
}
componentWillMount() {
}
onEmailChange(text) {
this.props.emailChanged(text);
}<|fim▁hole|> onPasswordChange(text) {
this.props.passwordChanged(text);
}
renderSpinner() {
if (this.props.loading) {
return (<Spinner />);
} else {
return (
<Grid>
<Col>
<Button full rounded onPress={this.onLogin.bind(this)}>
<Text>Login</Text>
</Button>
</Col>
<Col>
<Button full rounded onPress={this.onRegister.bind(this)}>
<Text>Register</Text>
</Button>
</Col>
</Grid>
);
}
}
render() {
const { navigate } = this.props.navigation;
return (
<Container>
<Content>
<Form>
<Item rounded>
<Input placeholder="E-Mail" value={this.props.email} keyboardType='email-address'
onChangeText={this.onEmailChange.bind(this)} />
</Item>
<Item rounded>
<Input placeholder="Password" value={this.props.password} secureTextEntry
onChangeText={this.onPasswordChange.bind(this)} />
</Item>
<Item>
<Text style={styles.errorTextStyle}>
{this.props.loginError}
</Text>
</Item>
<Item last>
{this.renderSpinner()}
</Item>
</Form>
</Content>
</Container>
);
}
}
const styles = {
errorTextStyle: {
fontSize: 20,
alignSelf: 'center',
color: 'red'
},
imageStyle: {
width: 50,
height: 50,
}
};
const mapStateToProps = ({ authReducer }) => {
const { email, password, loginError, loading } = authReducer;
return { email, password, loginError, loading };
};
export default connect(mapStateToProps, {
emailChanged, passwordChanged, signinUser
})(Login);<|fim▁end|>
| |
<|file_name|>check.rs<|end_file_name|><|fim▁begin|>//! Check logic for instructions
use utils::bigint::M256;
use utils::gas::Gas;
use vm::{Memory, Instruction, PATCH_TEST};
use vm::errors::{MachineError, EvalError};
use vm::eval::{State, ControlCheck};
use super::utils::{check_range, check_memory_write_range};
const CALLSTACK_LIMIT_DEFAULT: usize = 1024;
const CALLSTACK_LIMIT_TEST: usize = 2;
fn check_callstack_overflow<M: Memory>(state: &State<M>) -> Result<(), MachineError> {
if state.depth >= (if state.patch.contains(PATCH_TEST) { CALLSTACK_LIMIT_TEST }
else { CALLSTACK_LIMIT_DEFAULT }) {
return Err(MachineError::CallstackOverflow);
} else {
return Ok(());
}
}
pub fn extra_check_opcode<M: Memory + Default>(instruction: Instruction, state: &State<M>, stipend_gas: Gas, after_gas: Gas) -> Result<(), EvalError> {
match instruction {
Instruction::CALL => {
if after_gas - stipend_gas < state.stack.peek(0).unwrap().into() {<|fim▁hole|> Err(EvalError::Machine(MachineError::EmptyGas))
} else {
Ok(())
}
},
_ => Ok(())
}
}
#[allow(unused_variables)]
/// Check whether `run_opcode` would fail without mutating any of the
/// machine state.
pub fn check_opcode<M: Memory + Default>(instruction: Instruction, state: &State<M>) -> Result<Option<ControlCheck>, EvalError> {
match instruction {
Instruction::STOP => Ok(None),
Instruction::ADD => { state.stack.check_pop_push(2, 1)?; Ok(None) },
Instruction::MUL => { state.stack.check_pop_push(2, 1)?; Ok(None) },
Instruction::SUB => { state.stack.check_pop_push(2, 1)?; Ok(None) },
Instruction::DIV => { state.stack.check_pop_push(2, 1)?; Ok(None) },
Instruction::SDIV => { state.stack.check_pop_push(2, 1)?; Ok(None) },
Instruction::MOD => { state.stack.check_pop_push(2, 1)?; Ok(None) },
Instruction::SMOD => { state.stack.check_pop_push(2, 1)?; Ok(None) },
Instruction::ADDMOD => { state.stack.check_pop_push(3, 1)?; Ok(None) },
Instruction::MULMOD => { state.stack.check_pop_push(3, 1)?; Ok(None) },
Instruction::EXP => { state.stack.check_pop_push(2, 1)?; Ok(None) },
Instruction::SIGNEXTEND => { state.stack.check_pop_push(2, 1)?; Ok(None) },
Instruction::LT => { state.stack.check_pop_push(2, 1)?; Ok(None) },
Instruction::GT => { state.stack.check_pop_push(2, 1)?; Ok(None) },
Instruction::SLT => { state.stack.check_pop_push(2, 1)?; Ok(None) },
Instruction::SGT => { state.stack.check_pop_push(2, 1)?; Ok(None) },
Instruction::EQ => { state.stack.check_pop_push(2, 1)?; Ok(None) },
Instruction::ISZERO => { state.stack.check_pop_push(1, 1)?; Ok(None) },
Instruction::AND => { state.stack.check_pop_push(2, 1)?; Ok(None) },
Instruction::OR => { state.stack.check_pop_push(2, 1)?; Ok(None) },
Instruction::XOR => { state.stack.check_pop_push(2, 1)?; Ok(None) },
Instruction::NOT => { state.stack.check_pop_push(1, 1)?; Ok(None) },
Instruction::BYTE => { state.stack.check_pop_push(2, 1)?; Ok(None) },
Instruction::SHA3 => {
state.stack.check_pop_push(2, 1)?;
check_range(state.stack.peek(0).unwrap(), state.stack.peek(1).unwrap())?;
Ok(None)
},
Instruction::ADDRESS => { state.stack.check_pop_push(0, 1)?; Ok(None) },
Instruction::BALANCE => {
state.stack.check_pop_push(1, 1)?;
state.account_state.require(state.stack.peek(0).unwrap().into())?;
Ok(None)
},
Instruction::ORIGIN => { state.stack.check_pop_push(0, 1)?; Ok(None) },
Instruction::CALLER => { state.stack.check_pop_push(0, 1)?; Ok(None) },
Instruction::CALLVALUE => { state.stack.check_pop_push(0, 1)?; Ok(None) },
Instruction::CALLDATALOAD => { state.stack.check_pop_push(1, 1)?; Ok(None) },
Instruction::CALLDATASIZE => { state.stack.check_pop_push(0, 1)?; Ok(None) },
Instruction::CALLDATACOPY => {
state.stack.check_pop_push(3, 0)?;
check_memory_write_range(&state.memory,
state.stack.peek(0).unwrap(), state.stack.peek(2).unwrap())?;
Ok(None)
},
Instruction::CODESIZE => { state.stack.check_pop_push(0, 1)?; Ok(None) },
Instruction::CODECOPY => {
state.stack.check_pop_push(3, 0)?;
check_memory_write_range(&state.memory,
state.stack.peek(0).unwrap(), state.stack.peek(2).unwrap())?;
Ok(None)
},
Instruction::GASPRICE => { state.stack.check_pop_push(0, 1)?; Ok(None) },
Instruction::EXTCODESIZE => {
state.stack.check_pop_push(1, 1)?;
state.account_state.require_code(state.stack.peek(0).unwrap().into())?;
Ok(None)
},
Instruction::EXTCODECOPY => {
state.stack.check_pop_push(4, 0)?;
state.account_state.require_code(state.stack.peek(0).unwrap().into())?;
check_memory_write_range(&state.memory,
state.stack.peek(1).unwrap(), state.stack.peek(3).unwrap())?;
Ok(None)
},
Instruction::BLOCKHASH => {
state.stack.check_pop_push(1, 1)?;
let current_number = state.block.number;
let number = state.stack.peek(0).unwrap();
if !(number >= current_number || current_number - number > M256::from(256u64)) {
state.blockhash_state.get(number)?;
}
Ok(None)
},
Instruction::COINBASE => { state.stack.check_pop_push(0, 1)?; Ok(None) },
Instruction::TIMESTAMP => { state.stack.check_pop_push(0, 1)?; Ok(None) },
Instruction::NUMBER => { state.stack.check_pop_push(0, 1)?; Ok(None) },
Instruction::DIFFICULTY => { state.stack.check_pop_push(0, 1)?; Ok(None) },
Instruction::GASLIMIT => { state.stack.check_pop_push(0, 1)?; Ok(None) },
Instruction::POP => { state.stack.check_pop_push(1, 0)?; Ok(None) },
Instruction::MLOAD => { state.stack.check_pop_push(1, 1)?; Ok(None) },
Instruction::MSTORE => {
state.stack.check_pop_push(2, 0)?;
state.memory.check_write(state.stack.peek(0).unwrap())?;
Ok(None)
},
Instruction::MSTORE8 => {
state.stack.check_pop_push(2, 0)?;
state.memory.check_write(state.stack.peek(0).unwrap())?;
Ok(None)
},
Instruction::SLOAD => {
state.stack.check_pop_push(1, 1)?;
state.account_state.require(state.context.address)?;
state.account_state.require_storage(state.context.address, state.stack.peek(0).unwrap())?;
Ok(None)
},
Instruction::SSTORE => {
state.stack.check_pop_push(2, 0)?;
state.account_state.require(state.context.address)?;
state.account_state.require_storage(state.context.address, state.stack.peek(0).unwrap())?;
Ok(None)
},
Instruction::JUMP => {
state.stack.check_pop_push(1, 0)?;
Ok(Some(ControlCheck::Jump(state.stack.peek(0).unwrap())))
},
Instruction::JUMPI => {
state.stack.check_pop_push(2, 0)?;
if state.stack.peek(1).unwrap() != M256::zero() {
Ok(Some(ControlCheck::Jump(state.stack.peek(0).unwrap())))
} else {
Ok(None)
}
},
Instruction::PC => { state.stack.check_pop_push(0, 1)?; Ok(None) },
Instruction::MSIZE => { state.stack.check_pop_push(0, 1)?; Ok(None) },
Instruction::GAS => { state.stack.check_pop_push(0, 1)?; Ok(None) },
Instruction::JUMPDEST => Ok(None),
Instruction::PUSH(v) => { state.stack.check_pop_push(0, 1)?; Ok(None) },
Instruction::DUP(v) => { state.stack.check_pop_push(v, v+1)?; Ok(None) },
Instruction::SWAP(v) => { state.stack.check_pop_push(v+1, v+1)?; Ok(None) },
Instruction::LOG(v) => {
state.stack.check_pop_push(v+2, 0)?;
check_range(state.stack.peek(0).unwrap(), state.stack.peek(1).unwrap())?;
Ok(None)
},
Instruction::CREATE => {
state.stack.check_pop_push(3, 1)?;
check_range(state.stack.peek(1).unwrap(), state.stack.peek(2).unwrap())?;
state.account_state.require(state.context.address)?;
Ok(None)
},
Instruction::CALL => {
state.stack.check_pop_push(7, 1)?;
check_range(state.stack.peek(3).unwrap(), state.stack.peek(4).unwrap())?;
check_memory_write_range(&state.memory,
state.stack.peek(5).unwrap(), state.stack.peek(6).unwrap())?;
state.account_state.require(state.context.address)?;
state.account_state.require(state.stack.peek(1).unwrap().into())?;
Ok(None)
},
Instruction::CALLCODE => {
state.stack.check_pop_push(7, 1)?;
check_range(state.stack.peek(3).unwrap(), state.stack.peek(4).unwrap())?;
check_memory_write_range(&state.memory,
state.stack.peek(5).unwrap(), state.stack.peek(6).unwrap())?;
state.account_state.require(state.context.address)?;
state.account_state.require(state.stack.peek(1).unwrap().into())?;
Ok(None)
},
Instruction::RETURN => {
state.stack.check_pop_push(2, 0)?;
check_range(state.stack.peek(0).unwrap(), state.stack.peek(1).unwrap())?;
Ok(None)
},
Instruction::DELEGATECALL => unimplemented!(),
Instruction::SUICIDE => {
state.stack.check_pop_push(1, 0)?;
state.account_state.require(state.context.address)?;
Ok(None)
},
}
}<|fim▁end|>
| |
<|file_name|>functions_8.js<|end_file_name|><|fim▁begin|>var searchData=
[
['jsontographfunction',['jsonToGraphFunction',['../namespacechi.html#a6a3fedb48e6702c016f996d8a7f445fc',1,'chi']]],
['jsontographmodule',['jsonToGraphModule',['../namespacechi.html#a4489e333fecc4168278e94f8b3f81e3c',1,'chi']]],
['jsontographstruct',['jsonToGraphStruct',['../namespacechi.html#a629f77832b6e7a6e0eaab123c4be1cda',1,'chi']]],
['jumpbackinst',['jumpBackInst',['../structchi_1_1NodeCompiler.html#a7dc06ad0390f2113fbeb5e7c0cf3dd06',1,'chi::NodeCompiler']]]<|fim▁hole|><|fim▁end|>
|
];
|
<|file_name|>castle-generator.ts<|end_file_name|><|fim▁begin|>/* THis file contains code to generate castles of different types
* and contents.
*/
import RG from '../rg';
import {Room} from '../../../lib/rot-js/map/features';
import * as Element from '../element';
import {LevelGenerator, ILevelGenOpts} from './level-generator';
import {MapGenerator} from './map.generator';
import {Level, LevelExtraType} from '../level';<|fim▁hole|>import {DungeonPopulate} from '../dungeon-populate';
import {Castle} from '../../data/tiles.castle';
import {LevelSurroundings} from '../level-surroundings';
import {FactoryItem} from '../factory.items';
import {Placer} from '../placer';
import {Random} from '../random';
import {Geometry} from '../geometry';
import {Path} from '../path';
import {ELEM} from '../../data/elem-constants';
const RNG = Random.getRNG();
import {TCoord, ICoordXY, ItemConf} from '../interfaces';
type CellMap = import('../map').CellMap;
type Cell = import('../map.cell').Cell;
type ElementLever = Element.ElementLever;
interface CastleOpts extends ILevelGenOpts {
roomCount: number;
centralCorridors: boolean;
templates?: any;
}
type GateFunc = () => void;
type PartialCastleOpts = Partial<CastleOpts>;
/* This class is used to generate different dungeon levels. */
export class CastleGenerator extends LevelGenerator {
/* Return default options for castle generation. Used in editor mainly. */
public static getOptions(): CastleOpts {
const opts = LevelGenerator.getOptions() as CastleOpts;
opts.centralCorridors = false;
opts.roomCount = -1;
return opts;
}
public static carvePathFromEdge(level: Level, elemType: string): ICoordXY[] {
const edgeConns = level.getFreeEdgeCells();
const map = level.getMap();
const foundElem = level.getCellWithElem(elemType);
const randConn = edgeConns[0];
const [x0, y0] = randConn.getXY();
const [x1, y1] = foundElem.getXY();
const passCb = (x: number, y: number): boolean => (
map.hasXY(x, y) &&
map.getCell(x, y).getBaseElem().getType() !== 'wallcastle'
);
const path: ICoordXY[] = Path.getShortestPath(x0, y0, x1, y1, passCb);
path.forEach((xy: ICoordXY) => {
const cell = map.getCell(xy.x, xy.y);
if (!cell.isFree()) {
cell.setBaseElem(ELEM.FLOOR);
}
});
return path;
}
public addDoors: boolean;
public shouldRemoveMarkers: boolean;
public nItemsAdded: number;
constructor() {
super();
this.addDoors = true;
this.shouldRemoveMarkers = true;
this.nItemsAdded = 0;
}
/* Returns a fully populated castle-level. */
public create(cols: number, rows: number, conf: PartialCastleOpts): Level {
const castleLevel = this.createLevel(cols, rows, conf);
this.removeMarkers(castleLevel, conf);
if (conf.addItems) {
this.nItemsAdded = this.addItemsToCastle(castleLevel, conf);
}
this.populateStoreRooms(castleLevel, conf);
// TODO populate level with actors based on conf
if (conf.addActors) {
RG.err('CastleGenerator', 'create',
'addActors == true not supported yet');
}
return castleLevel;
}
/* Returns a castle level without populating it. */
public createLevel(
cols: number, rows: number, conf: PartialCastleOpts
): Level {
const levelConf: any = Object.assign({
dungeonType: 'castle',
wallType: 'wallcastle',
floorType: 'floorcastle',
}, conf
);
levelConf.preserveMarkers = true;
const mapgen = new MapGenerator();
// Determine direction of castle exit
const gateFunc = getGateDirFunction(conf);
if (gateFunc) {
levelConf.startRoomFunc = gateFunc;
}
if (conf.centralCorridors) {
levelConf.constraintFunc = Castle.constraintFuncCross;
}
const mapObj = mapgen.createCastle(cols, rows, levelConf);
let level = new Level(mapObj.map);
level.setMap(mapObj.map, mapObj);
this.addMarkersFromTiles(level, mapObj.tiles);
if (conf.cellsAround) {
level = this.createCastleSurroundings(level, conf);
if (!level) {
RG.err('CastleGenerator', 'createLevel',
'Got null level from surround. Something went wrong');
}
}
// Note that markers must be preserved in MapGenerator for this to work
this.createDoorsAndLevers(level);
return level;
}
public addItemsToCastle(level: Level, conf: PartialCastleOpts): number {
// Storerooms contain better loot
let nAdded = 0;
const extras = level.getExtras();
const storerooms = extras.storeroom as LevelExtraType[];
const {maxValue} = conf;
if (!maxValue) {
RG.err('CastleGenerator', 'addItemsToCastle',
'maxValue was not given in conf ' + JSON.stringify(conf));
return 0;
}
const itemFunc = item => ((
(item.value <= (2 * maxValue)) && (item.value >= maxValue)
));
const itemConf: ItemConf = {
item: itemFunc, maxValue, nItems: 1
};
const factItem = new FactoryItem();
storerooms.forEach(room => {
const itemsPlaced = factItem.generateItems(itemConf);
if (Placer.addPropsToRoom(level, room, itemsPlaced)) {
nAdded += itemsPlaced.length;
}
});
// One of the storerooms can contain gold as well
if (RG.isSuccess(GOLD_VAULT_CHANCE)) {
const goldRoom = RNG.arrayGetRand(storerooms);
const wealth = RNG.getUniformInt(6, 12);
const goldItems = factItem.generateGold({nGold: 5, nLevel: wealth, maxValue});
if (Placer.addPropsToRoom(level, goldRoom, goldItems)) {
nAdded += goldItems.length;
}
}
const normalRooms = extras.room as LevelExtraType[];
itemConf.nItems = normalRooms.length;
const items = factItem.generateItems(itemConf);
items.forEach(item => {
const room = RNG.arrayGetRand(normalRooms);
if (Placer.addPropsToRoom(level, room, [item])) {
nAdded += 1;
}
});
return nAdded;
}
public addMarkersFromTiles(level: Level, tiles): void {
const extras = {
corridor: [],
entrance: [],
room: [],
storeroom: [],
vault: []
};
level.setExtras(extras);
Object.values(tiles).forEach((tile: any) => {
if (re.storeroom.test(tile.name)) {
this.addToExtras(level, tile, 'storeroom');
}
else if (re.vault.test(tile.name)) {
this.addToExtras(level, tile, 'vault');
}
else if (re.entrance.test(tile.name)) {
this.addToExtras(level, tile, 'entrance');
}
else if (re.corridor.test(tile.name)) {
this.addToExtras(level, tile, 'corridor');
}
else if (!re.filler.test(tile.name)) {
this.addToExtras(level, tile, 'room');
}
});
}
public addToExtras(level: Level, tile, name: MarkerKey): void {
const bbox = Geometry.convertBbox(tile);
const cells = level.getMap().getFreeInBbox(bbox);
cells.forEach((cell: Cell) => {
const [x, y] = cell.getXY();
const marker = new Element.ElementMarker(markers[name]);
marker.setTag(name);
level.addElement(marker, x, y);
});
const room = new Room(bbox.ulx, bbox.uly, bbox.lrx, bbox.lry);
const extras = level.getExtras();
(extras[name] as LevelExtraType[]).push(room as any);
}
/* Links (and first creates) levers and lever doors based on markers. */
public createDoorsAndLevers(level: Level): void {
const map: CellMap = level.getMap();
const cells: Cell[] = map.getCells();
const doorPos: {[key: string]: Element.ElementLeverDoor} = {};
const levers: ElementLever[] = [];
// Note that markers must be preserved in MapGenerator for this to work
cells.forEach(cell => {
if (cell.hasElements()) {
const [x, y] = cell.getXY();
if (cell.hasMarker('leverdoor')) {
const door = new Element.ElementLeverDoor();
map.getCell(x, y).removeProps(RG.TYPE_ELEM);
level.addElement(door, x, y);
doorPos[cell.getKeyXY()] = door;
}
else if (cell.hasMarker('lever')) {
const lever = new Element.ElementLever();
map.getCell(x, y).removeProps(RG.TYPE_ELEM);
level.addElement(lever, x, y);
levers.push(lever);
}
else if (cell.hasMarker('door')) {
const door = new Element.ElementDoor(true);
map.getCell(x, y).removeProps(RG.TYPE_ELEM);
level.addElement(door, x, y);
}
}
});
// Finally connect lever to its door
levers.forEach(lever => {
const [x, y] = lever.getXY();
const xyAround = Geometry.getBoxAround(x, y, 1);
xyAround.forEach((xy: TCoord) => {
const keyXY = xy[0] + ',' + xy[1];
if (doorPos[keyXY]) {
let door: any = map.getCell(xy[0], xy[1]).getPropType('leverdoor');
if (door) {door = door[0];}
else {
RG.err('CastleGenerator', 'createDoorsAndLevers',
`No door found for lever@${x},${y}`);
}
lever.addTarget(door);
}
});
});
}
public populateStoreRooms(level: Level, conf): void {
const dungPopul = new DungeonPopulate();
if (conf.actorFunc) {
dungPopul.setActorFunc(conf.actorFunc);
}
const maxDanger = conf.maxDanger;
const extras = level.getExtras();
if (extras.storeroom) {
const storerooms = extras.storeroom as LevelExtraType[];
storerooms.forEach((room: any) => {
const cPoint: TCoord = room.getCenter();
dungPopul.addPointGuardian(level, cPoint, maxDanger);
});
// Add another main loot + guardian
const mainLootRoom: any = RNG.arrayGetRand(storerooms);
if (mainLootRoom) {
const cMain: TCoord = mainLootRoom.getCenter();
if (dungPopul.addMainLoot(level, cMain, conf.maxValue)) {
dungPopul.addPointGuardian(level, cMain, maxDanger + 4);
}
}
}
}
public createCastleSurroundings(level: Level, conf): null | Level {
const levelSurround = new LevelSurroundings();
const extras = level.getExtras();
const newLevel = levelSurround.surround(level, conf);
if (newLevel) {
newLevel.setExtras(extras);
levelSurround.scaleExtras(newLevel);
return newLevel;
}
return null;
}
}
const GOLD_VAULT_CHANCE = 0.10;
const re = {
corridor: /(corridor|corner)/,
entrance: /entrance/,
storeroom: /storeroom/,
vault: /vault/,
filler: /filler/i
};
const markers = {
corridor: 'C',
room: 'R',
entrance: 'E',
storeroom: 'S',
vault: 'V'
};
type MarkerKey = keyof (typeof markers);
/* Returns the function to generate castle cased based on surrounding
* cells. */
function getGateDirFunction(conf): GateFunc | null {
if (conf.cellsAround) {
const {cellsAround} = conf;
const funcs = [];
const levelSurround = new LevelSurroundings();
const dirBlocked: string[] = levelSurround.getNonBlockedDirs(cellsAround);
dirBlocked.forEach((dir: string) => {
funcs.push(Castle.startFuncs[dir]);
});
if (funcs.length === 0) {
RG.warn('CastleGenerator', 'getGateDirFunction',
'No free cellsAround ' + JSON.stringify(cellsAround));
}
else {
return RNG.arrayGetRand(funcs);
}
}
return null;
}<|fim▁end|>
| |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>#![allow(dead_code)]
#![allow(unused_variables)]
#![allow(unused_mut)]
pub fn foo() {
// strings are implemented as a collection of bytes plus some methods to provide useful
// functionality when those bytes are interpreted as text.
// the 'str' type represents a string slice, and is usually seen in its borrowed form &str
// str is a type in the langugage itself
// String type is provided by the standard library<|fim▁hole|> let slice: &str = &mut s[0..4];
// Can't do the following??
// slice[2] = 'x';
}
println!("new string: {}", s);
// 2 ways of creating String objects from string literals are equivalent
let s = "foo bar".to_string();
let s = String::from("foo bar");
// Since strings are utf-8, the following are all valid
let hello = "السلام عليكم";
let hello = "Dobrý den";
let hello = "Hello";
let hello = "שָׁלוֹם";
let hello = "नमस्ते";
let hello = "こんにちは";
let hello = "안녕하세요";
let hello = "你好";
let hello = "Olá";
let hello = "Здравствуйте";
let hello = "Hola";
// When taking string slices, we only read the bytes specified by the range. If the range of
// bytes is cannot be decoded to UTF-8 code points, an attemp to do so might result in an
// error.
let hello = "नमस्ते";
let s = &hello[..3];
// This will only print न instead of नमस्.
println!("I say: {}", s);
// If we instead give the range of [0..2], the programs panics with the error:
// 'byte index 2 is not a char boundary; it is inside 'न' (bytes 0..3) of `नमस्ते'
// let s = &hello[..2];
// println!("{}", s);
// String mutations
// Push a string slice
let mut hello = String::from("hello");
hello.push_str("foo");
// Push a character (unicode code point)
let mut hello = "hol".to_string();
hello.push('न');
println!("hol is now: {}", hello);
// Concat strings
let hello = "hello,".to_string();
let world = String::from(" world!");
let hello_world = hello /* moved */ + &world;
// We can't add 2 string values. The + operator takes a string reference as its argument.
// Does this lead to new space allocation to store the new string?
// hello = hello + world; is not allowed
// CAn only add a &str to a String. &String gets coerced into a &str using deref coercion
// (something like converting &s to &s[..].
println!("{}", hello_world);
// Concatenating multiple strings
let s1 = String::from("tic");
let s2 = String::from("tac");
let s3 = String::from("toe");
// format! macro is similar to println. Does not take ownership of its arguments
let s = format!("{}-{}-{}", s1, s2, s3);
// When doing simple addition, the first argument is moved. Does the str reference type not
// implement the + operator?
let s = s1 + "-" + &s2 + "-" + &s3;
// println!("{}", s1); s1 has been moved!
println!("{}", s2);
println!("{}", s3);
// We can't index into a string due to UTF-8 encoding and resulting ambiguity in what is really
// desired fromt he index
// Also, the expectation with indexing is O(1) access, whereas thay might not be possible for
// Strings, due to the UTF-8 encoding of characters.
let hello = "Здравствуйте";
// Not allowed:
// let answer = &hello[0];
//
// Instead, rust supports getting a string slice by specifying the byte range
let s: &str = &hello[..2]; // index out-of-range will not be detected at compile time
println!("{}", s);
// In rust, we can think of strings in 3 ways:
// 1. Sequence of bytes (vec[u8])
// 2. Unicode scalar values (these are what the'char' type in rust would have)
// 3. Grapheme Clusters
// e.g., the string "नमस्ते" can be:
// 1. [224, 164, 168, 224, 164, 174, 224, 164, 184, 224, 165, 141, 224, 164, 164, 224, 165, 135]
// 2. ['न', 'म', 'स', '्', 'त', 'े'] // Note that the 4th and 6th elements in this array are not
// really characters, but instead diatrics which do not make sense in isolation.
// 3. ["न", "म", "स्", "ते"]
//
// Apart from string slicing, rust also provides a way to iterator over the bytes or the
// characters. Iteration over grapheme clusters is not provided through the standard library.
//
let hello = "नमस्ते";
for b in hello.bytes() {
println!("{}", b);
}
for c in hello.chars() {
println!("{}", c);
}
}<|fim▁end|>
|
// Both String and string slices are UTF-8 encoded.
let mut s = String::from("foo bar");
{
|
<|file_name|>main_window.py<|end_file_name|><|fim▁begin|>import re
import os
import sys
import time
import datetime
import traceback
from decimal import Decimal
import threading
import asyncio
from electrum.bitcoin import TYPE_ADDRESS
from electrum.storage import WalletStorage
from electrum.wallet import Wallet, InternalAddressCorruption
from electrum.paymentrequest import InvoiceStore
from electrum.util import profiler, InvalidPassword, send_exception_to_crash_reporter
from electrum.plugin import run_hook
from electrum.util import format_satoshis, format_satoshis_plain, format_fee_satoshis
from electrum.paymentrequest import PR_UNPAID, PR_PAID, PR_UNKNOWN, PR_EXPIRED
from electrum import blockchain
from electrum.network import Network, TxBroadcastError, BestEffortRequestFailed
from .i18n import _
from kivy.app import App
from kivy.core.window import Window
from kivy.logger import Logger
from kivy.utils import platform
from kivy.properties import (OptionProperty, AliasProperty, ObjectProperty,
StringProperty, ListProperty, BooleanProperty, NumericProperty)
from kivy.cache import Cache
from kivy.clock import Clock
from kivy.factory import Factory
from kivy.metrics import inch
from kivy.lang import Builder
## lazy imports for factory so that widgets can be used in kv
#Factory.register('InstallWizard', module='electrum.gui.kivy.uix.dialogs.installwizard')
#Factory.register('InfoBubble', module='electrum.gui.kivy.uix.dialogs')
#Factory.register('OutputList', module='electrum.gui.kivy.uix.dialogs')
#Factory.register('OutputItem', module='electrum.gui.kivy.uix.dialogs')
from .uix.dialogs.installwizard import InstallWizard
from .uix.dialogs import InfoBubble, crash_reporter
from .uix.dialogs import OutputList, OutputItem
from .uix.dialogs import TopLabel, RefLabel
#from kivy.core.window import Window
#Window.softinput_mode = 'below_target'
# delayed imports: for startup speed on android
notification = app = ref = None
util = False
# register widget cache for keeping memory down timeout to forever to cache
# the data
Cache.register('electrum_widgets', timeout=0)
from kivy.uix.screenmanager import Screen
from kivy.uix.tabbedpanel import TabbedPanel
from kivy.uix.label import Label
from kivy.core.clipboard import Clipboard
Factory.register('TabbedCarousel', module='electrum.gui.kivy.uix.screens')
# Register fonts without this you won't be able to use bold/italic...
# inside markup.
from kivy.core.text import Label
Label.register('Roboto',
'electrum/gui/kivy/data/fonts/Roboto.ttf',
'electrum/gui/kivy/data/fonts/Roboto.ttf',
'electrum/gui/kivy/data/fonts/Roboto-Bold.ttf',
'electrum/gui/kivy/data/fonts/Roboto-Bold.ttf')
from electrum.util import (base_units, NoDynamicFeeEstimates, decimal_point_to_base_unit_name,
base_unit_name_to_decimal_point, NotEnoughFunds, UnknownBaseUnit,
DECIMAL_POINT_DEFAULT)
class ElectrumWindow(App):
electrum_config = ObjectProperty(None)
language = StringProperty('en')
# properties might be updated by the network
num_blocks = NumericProperty(0)
num_nodes = NumericProperty(0)
server_host = StringProperty('')
server_port = StringProperty('')
num_chains = NumericProperty(0)
blockchain_name = StringProperty('')
fee_status = StringProperty('Fee')
balance = StringProperty('')
fiat_balance = StringProperty('')
is_fiat = BooleanProperty(False)
blockchain_forkpoint = NumericProperty(0)
auto_connect = BooleanProperty(False)
def on_auto_connect(self, instance, x):
net_params = self.network.get_parameters()
net_params = net_params._replace(auto_connect=self.auto_connect)
self.network.run_from_another_thread(self.network.set_parameters(net_params))
def toggle_auto_connect(self, x):
self.auto_connect = not self.auto_connect
oneserver = BooleanProperty(False)
def on_oneserver(self, instance, x):
net_params = self.network.get_parameters()
net_params = net_params._replace(oneserver=self.oneserver)
self.network.run_from_another_thread(self.network.set_parameters(net_params))
def toggle_oneserver(self, x):
self.oneserver = not self.oneserver
proxy_str = StringProperty('')
def update_proxy_str(self, proxy: dict):
mode = proxy.get('mode')
host = proxy.get('host')
port = proxy.get('port')
self.proxy_str = (host + ':' + port) if mode else _('None')
def choose_server_dialog(self, popup):
from .uix.dialogs.choice_dialog import ChoiceDialog
protocol = 's'
def cb2(host):
from electrum import constants
pp = servers.get(host, constants.net.DEFAULT_PORTS)
port = pp.get(protocol, '')
popup.ids.host.text = host
popup.ids.port.text = port
servers = self.network.get_servers()
ChoiceDialog(_('Choose a server'), sorted(servers), popup.ids.host.text, cb2).open()
def choose_blockchain_dialog(self, dt):
from .uix.dialogs.choice_dialog import ChoiceDialog
chains = self.network.get_blockchains()
def cb(name):
with blockchain.blockchains_lock: blockchain_items = list(blockchain.blockchains.items())
for chain_id, b in blockchain_items:
if name == b.get_name():
self.network.run_from_another_thread(self.network.follow_chain_given_id(chain_id))
chain_objects = [blockchain.blockchains.get(chain_id) for chain_id in chains]
chain_objects = filter(lambda b: b is not None, chain_objects)
names = [b.get_name() for b in chain_objects]
if len(names) > 1:
cur_chain = self.network.blockchain().get_name()
ChoiceDialog(_('Choose your chain'), names, cur_chain, cb).open()
use_rbf = BooleanProperty(False)
def on_use_rbf(self, instance, x):
self.electrum_config.set_key('use_rbf', self.use_rbf, True)
use_change = BooleanProperty(False)
def on_use_change(self, instance, x):
self.electrum_config.set_key('use_change', self.use_change, True)
use_unconfirmed = BooleanProperty(False)
def on_use_unconfirmed(self, instance, x):
self.electrum_config.set_key('confirmed_only', not self.use_unconfirmed, True)
def set_URI(self, uri):
self.switch_to('send')
self.send_screen.set_URI(uri)
def on_new_intent(self, intent):
if intent.getScheme() != 'fujicoin':
return
uri = intent.getDataString()
self.set_URI(uri)
def on_language(self, instance, language):
Logger.info('language: {}'.format(language))
_.switch_lang(language)
def update_history(self, *dt):
if self.history_screen:
self.history_screen.update()
def on_quotes(self, d):
Logger.info("on_quotes")
self._trigger_update_status()
self._trigger_update_history()
def on_history(self, d):
Logger.info("on_history")
if self.wallet:
self.wallet.clear_coin_price_cache()
self._trigger_update_history()
def on_fee_histogram(self, *args):
self._trigger_update_history()
def _get_bu(self):
decimal_point = self.electrum_config.get('decimal_point', DECIMAL_POINT_DEFAULT)
try:
return decimal_point_to_base_unit_name(decimal_point)
except UnknownBaseUnit:
return decimal_point_to_base_unit_name(DECIMAL_POINT_DEFAULT)
def _set_bu(self, value):
assert value in base_units.keys()
decimal_point = base_unit_name_to_decimal_point(value)
self.electrum_config.set_key('decimal_point', decimal_point, True)
self._trigger_update_status()
self._trigger_update_history()
wallet_name = StringProperty(_('No Wallet'))
base_unit = AliasProperty(_get_bu, _set_bu)
fiat_unit = StringProperty('')
def on_fiat_unit(self, a, b):
self._trigger_update_history()
def decimal_point(self):
return base_units[self.base_unit]
def btc_to_fiat(self, amount_str):
if not amount_str:
return ''
if not self.fx.is_enabled():
return ''
rate = self.fx.exchange_rate()
if rate.is_nan():
return ''
fiat_amount = self.get_amount(amount_str + ' ' + self.base_unit) * rate / pow(10, 8)
return "{:.2f}".format(fiat_amount).rstrip('0').rstrip('.')
def fiat_to_btc(self, fiat_amount):
if not fiat_amount:
return ''
rate = self.fx.exchange_rate()
if rate.is_nan():
return ''
satoshis = int(pow(10,8) * Decimal(fiat_amount) / Decimal(rate))
return format_satoshis_plain(satoshis, self.decimal_point())
def get_amount(self, amount_str):
a, u = amount_str.split()
assert u == self.base_unit
try:
x = Decimal(a)
except:
return None
p = pow(10, self.decimal_point())
return int(p * x)
_orientation = OptionProperty('landscape',
options=('landscape', 'portrait'))
def _get_orientation(self):
return self._orientation
orientation = AliasProperty(_get_orientation,
None,
bind=('_orientation',))
'''Tries to ascertain the kind of device the app is running on.
Cane be one of `tablet` or `phone`.
:data:`orientation` is a read only `AliasProperty` Defaults to 'landscape'
'''
_ui_mode = OptionProperty('phone', options=('tablet', 'phone'))
def _get_ui_mode(self):
return self._ui_mode
ui_mode = AliasProperty(_get_ui_mode,
None,
bind=('_ui_mode',))
'''Defines tries to ascertain the kind of device the app is running on.
Cane be one of `tablet` or `phone`.
:data:`ui_mode` is a read only `AliasProperty` Defaults to 'phone'
'''
def __init__(self, **kwargs):
# initialize variables
self._clipboard = Clipboard
self.info_bubble = None
self.nfcscanner = None
self.tabs = None
self.is_exit = False
self.wallet = None
self.pause_time = 0
self.asyncio_loop = asyncio.get_event_loop()
App.__init__(self)#, **kwargs)
title = _('Electrum App')
self.electrum_config = config = kwargs.get('config', None)
self.language = config.get('language', 'en')
self.network = network = kwargs.get('network', None) # type: Network
if self.network:
self.num_blocks = self.network.get_local_height()
self.num_nodes = len(self.network.get_interfaces())
net_params = self.network.get_parameters()
self.server_host = net_params.host
self.server_port = net_params.port
self.auto_connect = net_params.auto_connect
self.oneserver = net_params.oneserver
self.proxy_config = net_params.proxy if net_params.proxy else {}
self.update_proxy_str(self.proxy_config)
self.plugins = kwargs.get('plugins', [])
self.gui_object = kwargs.get('gui_object', None)
self.daemon = self.gui_object.daemon
self.fx = self.daemon.fx
self.use_rbf = config.get('use_rbf', True)
self.use_change = config.get('use_change', True)
self.use_unconfirmed = not config.get('confirmed_only', False)
# create triggers so as to minimize updating a max of 2 times a sec
self._trigger_update_wallet = Clock.create_trigger(self.update_wallet, .5)
self._trigger_update_status = Clock.create_trigger(self.update_status, .5)
self._trigger_update_history = Clock.create_trigger(self.update_history, .5)
self._trigger_update_interfaces = Clock.create_trigger(self.update_interfaces, .5)
self._periodic_update_status_during_sync = Clock.schedule_interval(self.update_wallet_synchronizing_progress, .5)
# cached dialogs
self._settings_dialog = None
self._password_dialog = None
self.fee_status = self.electrum_config.get_fee_status()
def on_pr(self, pr):
if not self.wallet:
self.show_error(_('No wallet loaded.'))
return
if pr.verify(self.wallet.contacts):
key = self.wallet.invoices.add(pr)
if self.invoices_screen:
self.invoices_screen.update()
status = self.wallet.invoices.get_status(key)
if status == PR_PAID:
self.show_error("invoice already paid")
self.send_screen.do_clear()
else:
if pr.has_expired():
self.show_error(_('Payment request has expired'))
else:
self.switch_to('send')
self.send_screen.set_request(pr)
else:
self.show_error("invoice error:" + pr.error)
self.send_screen.do_clear()
def on_qr(self, data):
from electrum.bitcoin import base_decode, is_address
data = data.strip()
if is_address(data):
self.set_URI(data)
return
if data.startswith('fujicoin:'):
self.set_URI(data)
return
# try to decode transaction
from electrum.transaction import Transaction
from electrum.util import bh2u
try:
text = bh2u(base_decode(data, None, base=43))
tx = Transaction(text)
tx.deserialize()
except:
tx = None
if tx:
self.tx_dialog(tx)
return
# show error
self.show_error("Unable to decode QR data")
def update_tab(self, name):
s = getattr(self, name + '_screen', None)
if s:
s.update()
@profiler
def update_tabs(self):
for tab in ['invoices', 'send', 'history', 'receive', 'address']:
self.update_tab(tab)
def switch_to(self, name):
s = getattr(self, name + '_screen', None)
if s is None:
s = self.tabs.ids[name + '_screen']
s.load_screen()
panel = self.tabs.ids.panel
tab = self.tabs.ids[name + '_tab']
panel.switch_to(tab)
def show_request(self, addr):
self.switch_to('receive')
self.receive_screen.screen.address = addr
def show_pr_details(self, req, status, is_invoice):
from electrum.util import format_time
requestor = req.get('requestor')
exp = req.get('exp')
memo = req.get('memo')
amount = req.get('amount')
fund = req.get('fund')
popup = Builder.load_file('electrum/gui/kivy/uix/ui_screens/invoice.kv')
popup.is_invoice = is_invoice
popup.amount = amount
popup.requestor = requestor if is_invoice else req.get('address')
popup.exp = format_time(exp) if exp else ''
popup.description = memo if memo else ''
popup.signature = req.get('signature', '')
popup.status = status
popup.fund = fund if fund else 0
txid = req.get('txid')
popup.tx_hash = txid or ''
popup.on_open = lambda: popup.ids.output_list.update(req.get('outputs', []))
popup.export = self.export_private_keys
popup.open()
def show_addr_details(self, req, status):
from electrum.util import format_time
fund = req.get('fund')
isaddr = 'y'
popup = Builder.load_file('electrum/gui/kivy/uix/ui_screens/invoice.kv')
popup.isaddr = isaddr
popup.is_invoice = False
popup.status = status
popup.requestor = req.get('address')
popup.fund = fund if fund else 0
popup.export = self.export_private_keys
popup.open()
def qr_dialog(self, title, data, show_text=False, text_for_clipboard=None):
from .uix.dialogs.qr_dialog import QRDialog
def on_qr_failure():
popup.dismiss()
msg = _('Failed to display QR code.')
if text_for_clipboard:
msg += '\n' + _('Text copied to clipboard.')
self._clipboard.copy(text_for_clipboard)
Clock.schedule_once(lambda dt: self.show_info(msg))
popup = QRDialog(title, data, show_text, failure_cb=on_qr_failure,
text_for_clipboard=text_for_clipboard)
popup.open()
def scan_qr(self, on_complete):
if platform != 'android':
return
from jnius import autoclass, cast
from android import activity
PythonActivity = autoclass('org.kivy.android.PythonActivity')
SimpleScannerActivity = autoclass("org.electrum.qr.SimpleScannerActivity")
Intent = autoclass('android.content.Intent')
intent = Intent(PythonActivity.mActivity, SimpleScannerActivity)
def on_qr_result(requestCode, resultCode, intent):
try:
if resultCode == -1: # RESULT_OK:
# this doesn't work due to some bug in jnius:
# contents = intent.getStringExtra("text")
String = autoclass("java.lang.String")
contents = intent.getStringExtra(String("text"))
on_complete(contents)
except Exception as e: # exc would otherwise get lost
send_exception_to_crash_reporter(e)
finally:
activity.unbind(on_activity_result=on_qr_result)
activity.bind(on_activity_result=on_qr_result)
PythonActivity.mActivity.startActivityForResult(intent, 0)
def do_share(self, data, title):
if platform != 'android':
return
from jnius import autoclass, cast
JS = autoclass('java.lang.String')
Intent = autoclass('android.content.Intent')
sendIntent = Intent()
sendIntent.setAction(Intent.ACTION_SEND)
sendIntent.setType("text/plain")
sendIntent.putExtra(Intent.EXTRA_TEXT, JS(data))
PythonActivity = autoclass('org.kivy.android.PythonActivity')
currentActivity = cast('android.app.Activity', PythonActivity.mActivity)
it = Intent.createChooser(sendIntent, cast('java.lang.CharSequence', JS(title)))
currentActivity.startActivity(it)
def build(self):
return Builder.load_file('electrum/gui/kivy/main.kv')
def _pause(self):
if platform == 'android':
# move activity to back
from jnius import autoclass
python_act = autoclass('org.kivy.android.PythonActivity')
mActivity = python_act.mActivity
mActivity.moveTaskToBack(True)
def on_start(self):
''' This is the start point of the kivy ui
'''
import time
Logger.info('Time to on_start: {} <<<<<<<<'.format(time.clock()))
win = Window
win.bind(size=self.on_size, on_keyboard=self.on_keyboard)
win.bind(on_key_down=self.on_key_down)
#win.softinput_mode = 'below_target'
self.on_size(win, win.size)
self.init_ui()
crash_reporter.ExceptionHook(self)
# init plugins
run_hook('init_kivy', self)
# fiat currency
self.fiat_unit = self.fx.ccy if self.fx.is_enabled() else ''
# default tab
self.switch_to('history')
# bind intent for fujicoin: URI scheme
if platform == 'android':
from android import activity
from jnius import autoclass
PythonActivity = autoclass('org.kivy.android.PythonActivity')
mactivity = PythonActivity.mActivity
self.on_new_intent(mactivity.getIntent())
activity.bind(on_new_intent=self.on_new_intent)
# connect callbacks
if self.network:
interests = ['wallet_updated', 'network_updated', 'blockchain_updated',
'status', 'new_transaction', 'verified']
self.network.register_callback(self.on_network_event, interests)
self.network.register_callback(self.on_fee, ['fee'])
self.network.register_callback(self.on_fee_histogram, ['fee_histogram'])
self.network.register_callback(self.on_quotes, ['on_quotes'])
self.network.register_callback(self.on_history, ['on_history'])
# load wallet
self.load_wallet_by_name(self.electrum_config.get_wallet_path())
# URI passed in config
uri = self.electrum_config.get('url')
if uri:
self.set_URI(uri)
def get_wallet_path(self):
if self.wallet:
return self.wallet.storage.path
else:
return ''
def on_wizard_complete(self, wizard, storage):
if storage:
wallet = Wallet(storage)
wallet.start_network(self.daemon.network)
self.daemon.add_wallet(wallet)
self.load_wallet(wallet)
elif not self.wallet:
# wizard did not return a wallet; and there is no wallet open atm
# try to open last saved wallet (potentially start wizard again)
self.load_wallet_by_name(self.electrum_config.get_wallet_path(), ask_if_wizard=True)
def load_wallet_by_name(self, path, ask_if_wizard=False):
if not path:
return
if self.wallet and self.wallet.storage.path == path:
return
wallet = self.daemon.load_wallet(path, None)
if wallet:
if wallet.has_password():
self.password_dialog(wallet, _('Enter PIN code'), lambda x: self.load_wallet(wallet), self.stop)
else:
self.load_wallet(wallet)
else:
def launch_wizard():
wizard = Factory.InstallWizard(self.electrum_config, self.plugins)
wizard.path = path
wizard.bind(on_wizard_complete=self.on_wizard_complete)
storage = WalletStorage(path, manual_upgrades=True)
if not storage.file_exists():
wizard.run('new')
elif storage.is_encrypted():
raise Exception("Kivy GUI does not support encrypted wallet files.")
elif storage.requires_upgrade():
wizard.upgrade_storage(storage)
else:
raise Exception("unexpected storage file situation")
if not ask_if_wizard:
launch_wizard()
else:
from .uix.dialogs.question import Question
def handle_answer(b: bool):
if b:
launch_wizard()
else:
try: os.unlink(path)
except FileNotFoundError: pass
self.stop()
d = Question(_('Do you want to launch the wizard again?'), handle_answer)
d.open()
def on_stop(self):
Logger.info('on_stop')
if self.wallet:
self.electrum_config.save_last_wallet(self.wallet)
self.stop_wallet()
def stop_wallet(self):
if self.wallet:
self.daemon.stop_wallet(self.wallet.storage.path)
self.wallet = None
def on_key_down(self, instance, key, keycode, codepoint, modifiers):
if 'ctrl' in modifiers:
# q=24 w=25
if keycode in (24, 25):
self.stop()
elif keycode == 27:
# r=27
# force update wallet
self.update_wallet()
elif keycode == 112:
# pageup
#TODO move to next tab
pass
elif keycode == 117:
# pagedown
#TODO move to prev tab
pass
#TODO: alt+tab_number to activate the particular tab
def on_keyboard(self, instance, key, keycode, codepoint, modifiers):
if key == 27 and self.is_exit is False:
self.is_exit = True
self.show_info(_('Press again to exit'))
return True
# override settings button
if key in (319, 282): #f1/settings button on android
#self.gui.main_gui.toggle_settings(self)
return True
def settings_dialog(self):
from .uix.dialogs.settings import SettingsDialog
if self._settings_dialog is None:
self._settings_dialog = SettingsDialog(self)
self._settings_dialog.update()
self._settings_dialog.open()
def popup_dialog(self, name):
if name == 'settings':
self.settings_dialog()
elif name == 'wallets':
from .uix.dialogs.wallets import WalletDialog
d = WalletDialog()
d.open()
elif name == 'status':
popup = Builder.load_file('electrum/gui/kivy/uix/ui_screens/'+name+'.kv')
master_public_keys_layout = popup.ids.master_public_keys
for xpub in self.wallet.get_master_public_keys()[1:]:
master_public_keys_layout.add_widget(TopLabel(text=_('Master Public Key')))
ref = RefLabel()
ref.name = _('Master Public Key')
ref.data = xpub
master_public_keys_layout.add_widget(ref)
popup.open()
else:
popup = Builder.load_file('electrum/gui/kivy/uix/ui_screens/'+name+'.kv')
popup.open()
@profiler
def init_ui(self):
''' Initialize The Ux part of electrum. This function performs the basic
tasks of setting up the ui.
'''
#from weakref import ref
self.funds_error = False
# setup UX
self.screens = {}
#setup lazy imports for mainscreen
Factory.register('AnimatedPopup',
module='electrum.gui.kivy.uix.dialogs')
Factory.register('QRCodeWidget',
module='electrum.gui.kivy.uix.qrcodewidget')
# preload widgets. Remove this if you want to load the widgets on demand
#Cache.append('electrum_widgets', 'AnimatedPopup', Factory.AnimatedPopup())
#Cache.append('electrum_widgets', 'QRCodeWidget', Factory.QRCodeWidget())
# load and focus the ui
self.root.manager = self.root.ids['manager']
self.history_screen = None
self.contacts_screen = None
self.send_screen = None
self.invoices_screen = None
self.receive_screen = None
self.requests_screen = None
self.address_screen = None
self.icon = "electrum/gui/icons/electrum.png"
self.tabs = self.root.ids['tabs']
def update_interfaces(self, dt):
net_params = self.network.get_parameters()
self.num_nodes = len(self.network.get_interfaces())
self.num_chains = len(self.network.get_blockchains())
chain = self.network.blockchain()
self.blockchain_forkpoint = chain.get_max_forkpoint()
self.blockchain_name = chain.get_name()
interface = self.network.interface
if interface:
self.server_host = interface.host
else:
self.server_host = str(net_params.host) + ' (connecting...)'
self.proxy_config = net_params.proxy or {}
self.update_proxy_str(self.proxy_config)
def on_network_event(self, event, *args):
Logger.info('network event: '+ event)
if event == 'network_updated':
self._trigger_update_interfaces()
self._trigger_update_status()
elif event == 'wallet_updated':
self._trigger_update_wallet()
self._trigger_update_status()
elif event == 'blockchain_updated':
# to update number of confirmations in history
self._trigger_update_wallet()
elif event == 'status':
self._trigger_update_status()
elif event == 'new_transaction':
self._trigger_update_wallet()
elif event == 'verified':
self._trigger_update_wallet()
@profiler
def load_wallet(self, wallet):
if self.wallet:
self.stop_wallet()
self.wallet = wallet
self.wallet_name = wallet.basename()
self.update_wallet()
# Once GUI has been initialized check if we want to announce something
# since the callback has been called before the GUI was initialized
if self.receive_screen:
self.receive_screen.clear()
self.update_tabs()
run_hook('load_wallet', wallet, self)
try:
wallet.try_detecting_internal_addresses_corruption()
except InternalAddressCorruption as e:
self.show_error(str(e))
send_exception_to_crash_reporter(e)
def update_status(self, *dt):
if not self.wallet:
return
if self.network is None or not self.network.is_connected():
status = _("Offline")
elif self.network.is_connected():
self.num_blocks = self.network.get_local_height()
server_height = self.network.get_server_height()
server_lag = self.num_blocks - server_height
if not self.wallet.up_to_date or server_height == 0:
num_sent, num_answered = self.wallet.get_history_sync_state_details()
status = ("{} [size=18dp]({}/{})[/size]"
.format(_("Synchronizing..."), num_answered, num_sent))
elif server_lag > 1:
status = _("Server is lagging ({} blocks)").format(server_lag)
else:
status = ''
else:
status = _("Disconnected")
if status:
self.balance = status
self.fiat_balance = status
else:
c, u, x = self.wallet.get_balance()<|fim▁hole|> self.fiat_balance = self.fx.format_amount(c+u+x) + ' [size=22dp]%s[/size]'% self.fx.ccy
def update_wallet_synchronizing_progress(self, *dt):
if not self.wallet:
return
if not self.wallet.up_to_date:
self._trigger_update_status()
def get_max_amount(self):
from electrum.transaction import TxOutput
if run_hook('abort_send', self):
return ''
inputs = self.wallet.get_spendable_coins(None, self.electrum_config)
if not inputs:
return ''
addr = str(self.send_screen.screen.address) or self.wallet.dummy_address()
outputs = [TxOutput(TYPE_ADDRESS, addr, '!')]
try:
tx = self.wallet.make_unsigned_transaction(inputs, outputs, self.electrum_config)
except NoDynamicFeeEstimates as e:
Clock.schedule_once(lambda dt, bound_e=e: self.show_error(str(bound_e)))
return ''
except NotEnoughFunds:
return ''
except InternalAddressCorruption as e:
self.show_error(str(e))
send_exception_to_crash_reporter(e)
return ''
amount = tx.output_value()
__, x_fee_amount = run_hook('get_tx_extra_fee', self.wallet, tx) or (None, 0)
amount_after_all_fees = amount - x_fee_amount
return format_satoshis_plain(amount_after_all_fees, self.decimal_point())
def format_amount(self, x, is_diff=False, whitespaces=False):
return format_satoshis(x, 0, self.decimal_point(), is_diff=is_diff, whitespaces=whitespaces)
def format_amount_and_units(self, x):
return format_satoshis_plain(x, self.decimal_point()) + ' ' + self.base_unit
def format_fee_rate(self, fee_rate):
# fee_rate is in sat/kB
return format_fee_satoshis(fee_rate/1000) + ' sat/byte'
#@profiler
def update_wallet(self, *dt):
self._trigger_update_status()
if self.wallet and (self.wallet.up_to_date or not self.network or not self.network.is_connected()):
self.update_tabs()
def notify(self, message):
try:
global notification, os
if not notification:
from plyer import notification
icon = (os.path.dirname(os.path.realpath(__file__))
+ '/../../' + self.icon)
notification.notify('Electrum', message,
app_icon=icon, app_name='Electrum')
except ImportError:
Logger.Error('Notification: needs plyer; `sudo python3 -m pip install plyer`')
def on_pause(self):
self.pause_time = time.time()
# pause nfc
if self.nfcscanner:
self.nfcscanner.nfc_disable()
return True
def on_resume(self):
now = time.time()
if self.wallet and self.wallet.has_password() and now - self.pause_time > 60:
self.password_dialog(self.wallet, _('Enter PIN'), None, self.stop)
if self.nfcscanner:
self.nfcscanner.nfc_enable()
def on_size(self, instance, value):
width, height = value
self._orientation = 'landscape' if width > height else 'portrait'
self._ui_mode = 'tablet' if min(width, height) > inch(3.51) else 'phone'
def on_ref_label(self, label, touch):
if label.touched:
label.touched = False
self.qr_dialog(label.name, label.data, True)
else:
label.touched = True
self._clipboard.copy(label.data)
Clock.schedule_once(lambda dt: self.show_info(_('Text copied to clipboard.\nTap again to display it as QR code.')))
def show_error(self, error, width='200dp', pos=None, arrow_pos=None,
exit=False, icon='atlas://electrum/gui/kivy/theming/light/error', duration=0,
modal=False):
''' Show an error Message Bubble.
'''
self.show_info_bubble( text=error, icon=icon, width=width,
pos=pos or Window.center, arrow_pos=arrow_pos, exit=exit,
duration=duration, modal=modal)
def show_info(self, error, width='200dp', pos=None, arrow_pos=None,
exit=False, duration=0, modal=False):
''' Show an Info Message Bubble.
'''
self.show_error(error, icon='atlas://electrum/gui/kivy/theming/light/important',
duration=duration, modal=modal, exit=exit, pos=pos,
arrow_pos=arrow_pos)
def show_info_bubble(self, text=_('Hello World'), pos=None, duration=0,
arrow_pos='bottom_mid', width=None, icon='', modal=False, exit=False):
'''Method to show an Information Bubble
.. parameters::
text: Message to be displayed
pos: position for the bubble
duration: duration the bubble remains on screen. 0 = click to hide
width: width of the Bubble
arrow_pos: arrow position for the bubble
'''
info_bubble = self.info_bubble
if not info_bubble:
info_bubble = self.info_bubble = Factory.InfoBubble()
win = Window
if info_bubble.parent:
win.remove_widget(info_bubble
if not info_bubble.modal else
info_bubble._modal_view)
if not arrow_pos:
info_bubble.show_arrow = False
else:
info_bubble.show_arrow = True
info_bubble.arrow_pos = arrow_pos
img = info_bubble.ids.img
if text == 'texture':
# icon holds a texture not a source image
# display the texture in full screen
text = ''
img.texture = icon
info_bubble.fs = True
info_bubble.show_arrow = False
img.allow_stretch = True
info_bubble.dim_background = True
info_bubble.background_image = 'atlas://electrum/gui/kivy/theming/light/card'
else:
info_bubble.fs = False
info_bubble.icon = icon
#if img.texture and img._coreimage:
# img.reload()
img.allow_stretch = False
info_bubble.dim_background = False
info_bubble.background_image = 'atlas://data/images/defaulttheme/bubble'
info_bubble.message = text
if not pos:
pos = (win.center[0], win.center[1] - (info_bubble.height/2))
info_bubble.show(pos, duration, width, modal=modal, exit=exit)
def tx_dialog(self, tx):
from .uix.dialogs.tx_dialog import TxDialog
d = TxDialog(self, tx)
d.open()
def sign_tx(self, *args):
threading.Thread(target=self._sign_tx, args=args).start()
def _sign_tx(self, tx, password, on_success, on_failure):
try:
self.wallet.sign_transaction(tx, password)
except InvalidPassword:
Clock.schedule_once(lambda dt: on_failure(_("Invalid PIN")))
return
on_success = run_hook('tc_sign_wrapper', self.wallet, tx, on_success, on_failure) or on_success
Clock.schedule_once(lambda dt: on_success(tx))
def _broadcast_thread(self, tx, on_complete):
status = False
try:
self.network.run_from_another_thread(self.network.broadcast_transaction(tx))
except TxBroadcastError as e:
msg = e.get_message_for_gui()
except BestEffortRequestFailed as e:
msg = repr(e)
else:
status, msg = True, tx.txid()
Clock.schedule_once(lambda dt: on_complete(status, msg))
def broadcast(self, tx, pr=None):
def on_complete(ok, msg):
if ok:
self.show_info(_('Payment sent.'))
if self.send_screen:
self.send_screen.do_clear()
if pr:
self.wallet.invoices.set_paid(pr, tx.txid())
self.wallet.invoices.save()
self.update_tab('invoices')
else:
msg = msg or ''
self.show_error(msg)
if self.network and self.network.is_connected():
self.show_info(_('Sending'))
threading.Thread(target=self._broadcast_thread, args=(tx, on_complete)).start()
else:
self.show_info(_('Cannot broadcast transaction') + ':\n' + _('Not connected'))
def description_dialog(self, screen):
from .uix.dialogs.label_dialog import LabelDialog
text = screen.message
def callback(text):
screen.message = text
d = LabelDialog(_('Enter description'), text, callback)
d.open()
def amount_dialog(self, screen, show_max):
from .uix.dialogs.amount_dialog import AmountDialog
amount = screen.amount
if amount:
amount, u = str(amount).split()
assert u == self.base_unit
def cb(amount):
screen.amount = amount
popup = AmountDialog(show_max, amount, cb)
popup.open()
def invoices_dialog(self, screen):
from .uix.dialogs.invoices import InvoicesDialog
if len(self.wallet.invoices.sorted_list()) == 0:
self.show_info(' '.join([
_('No saved invoices.'),
_('Signed invoices are saved automatically when you scan them.'),
_('You may also save unsigned requests or contact addresses using the save button.')
]))
return
popup = InvoicesDialog(self, screen, None)
popup.update()
popup.open()
def requests_dialog(self, screen):
from .uix.dialogs.requests import RequestsDialog
if len(self.wallet.get_sorted_requests(self.electrum_config)) == 0:
self.show_info(_('No saved requests.'))
return
popup = RequestsDialog(self, screen, None)
popup.update()
popup.open()
def addresses_dialog(self, screen):
from .uix.dialogs.addresses import AddressesDialog
popup = AddressesDialog(self, screen, None)
popup.update()
popup.open()
def fee_dialog(self, label, dt):
from .uix.dialogs.fee_dialog import FeeDialog
def cb():
self.fee_status = self.electrum_config.get_fee_status()
fee_dialog = FeeDialog(self, self.electrum_config, cb)
fee_dialog.open()
def on_fee(self, event, *arg):
self.fee_status = self.electrum_config.get_fee_status()
def protected(self, msg, f, args):
if self.wallet.has_password():
on_success = lambda pw: f(*(args + (pw,)))
self.password_dialog(self.wallet, msg, on_success, lambda: None)
else:
f(*(args + (None,)))
def delete_wallet(self):
from .uix.dialogs.question import Question
basename = os.path.basename(self.wallet.storage.path)
d = Question(_('Delete wallet?') + '\n' + basename, self._delete_wallet)
d.open()
def _delete_wallet(self, b):
if b:
basename = self.wallet.basename()
self.protected(_("Enter your PIN code to confirm deletion of {}").format(basename), self.__delete_wallet, ())
def __delete_wallet(self, pw):
wallet_path = self.get_wallet_path()
dirname = os.path.dirname(wallet_path)
basename = os.path.basename(wallet_path)
if self.wallet.has_password():
try:
self.wallet.check_password(pw)
except:
self.show_error("Invalid PIN")
return
self.stop_wallet()
os.unlink(wallet_path)
self.show_error(_("Wallet removed: {}").format(basename))
new_path = self.electrum_config.get_wallet_path()
self.load_wallet_by_name(new_path)
def show_seed(self, label):
self.protected(_("Enter your PIN code in order to decrypt your seed"), self._show_seed, (label,))
def _show_seed(self, label, password):
if self.wallet.has_password() and password is None:
return
keystore = self.wallet.keystore
try:
seed = keystore.get_seed(password)
passphrase = keystore.get_passphrase(password)
except:
self.show_error("Invalid PIN")
return
label.text = _('Seed') + ':\n' + seed
if passphrase:
label.text += '\n\n' + _('Passphrase') + ': ' + passphrase
def password_dialog(self, wallet, msg, on_success, on_failure):
from .uix.dialogs.password_dialog import PasswordDialog
if self._password_dialog is None:
self._password_dialog = PasswordDialog()
self._password_dialog.init(self, wallet, msg, on_success, on_failure)
self._password_dialog.open()
def change_password(self, cb):
from .uix.dialogs.password_dialog import PasswordDialog
if self._password_dialog is None:
self._password_dialog = PasswordDialog()
message = _("Changing PIN code.") + '\n' + _("Enter your current PIN:")
def on_success(old_password, new_password):
self.wallet.update_password(old_password, new_password)
self.show_info(_("Your PIN code was updated"))
on_failure = lambda: self.show_error(_("PIN codes do not match"))
self._password_dialog.init(self, self.wallet, message, on_success, on_failure, is_change=1)
self._password_dialog.open()
def export_private_keys(self, pk_label, addr):
if self.wallet.is_watching_only():
self.show_info(_('This is a watching-only wallet. It does not contain private keys.'))
return
def show_private_key(addr, pk_label, password):
if self.wallet.has_password() and password is None:
return
if not self.wallet.can_export():
return
try:
key = str(self.wallet.export_private_key(addr, password)[0])
pk_label.data = key
except InvalidPassword:
self.show_error("Invalid PIN")
return
self.protected(_("Enter your PIN code in order to decrypt your private key"), show_private_key, (addr, pk_label))<|fim▁end|>
|
text = self.format_amount(c+x+u)
self.balance = str(text.strip()) + ' [size=22dp]%s[/size]'% self.base_unit
|
<|file_name|>Int16Record.cpp<|end_file_name|><|fim▁begin|>/**************************************************************************\
*
* FILE: Int16Record.cpp
*
* This source file is part of DIME.
* Copyright (C) 1998-1999 by Systems In Motion. All rights reserved.
*
* This library is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License, version 2, as
* published by the Free Software Foundation.
*
* This library is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* General Public License (the accompanying file named COPYING) for more
* details.
*
**************************************************************************
*
* If you need DIME for a non-GPL project, contact Systems In Motion
* to acquire a Professional Edition License:
*<|fim▁hole|> * NORWAY Fax: +47 67172912
*
\**************************************************************************/
/*!
\class dimeInt16Record dime/records/Int16Record.h
\brief The dimeInt16Record class is a container class for 16-bit integer records.
*/
#include <dime/records/Int16Record.h>
#include <dime/Input.h>
#include <dime/Output.h>
#include <dime/util/MemHandler.h>
/*!
Constructor
*/
dimeInt16Record::dimeInt16Record(const int group_code, const int16 val)
:dimeRecord(group_code)
{
this->setValue(val);
}
//!
dimeRecord *
dimeInt16Record::copy(dimeMemHandler * const mh) const
{
return new(mh) dimeInt16Record(this->groupCode, this->value);
}
/*!
Sets the int16 value.
*/
void
dimeInt16Record::setValue(const int16 val)
{
this->value = val;
}
/*!
Returns the int16 value.
*/
int16
dimeInt16Record::getValue() const
{
return this->value;
}
//!
int
dimeInt16Record::typeId() const
{
return dimeBase::dimeInt16RecordType;
}
//!
bool
dimeInt16Record::read(dimeInput * const in)
{
return in->readInt16(this->value);
}
//!
bool
dimeInt16Record::write(dimeOutput * const out)
{
if (dimeRecord::write(out)) {
return out->writeInt16(this->value);
}
return false;
}
//!
void
dimeInt16Record::setValue(const dimeParam ¶m, dimeMemHandler * const )
{
this->value = param.int16_data;
}
//!
void
dimeInt16Record::getValue(dimeParam ¶m) const
{
param.int16_data = this->value;
}<|fim▁end|>
|
* Systems In Motion http://www.sim.no/
* Prof. Brochs gate 6 [email protected]
* N-7030 Trondheim Voice: +47 22114160
|
<|file_name|>mim-video-playlist.js<|end_file_name|><|fim▁begin|>define(['knockout', 'Q', 'model',
'css!mediaelement-css', 'css!dataTables-bootstrap-css', 'css!datatables-scroller-css', 'text!./mim-video-playlist.html',
'datatables', 'knockout.punches', 'mediaelement', 'datatables-bootstrap', 'datatables-scroller'],
function (ko, Q, model, css, dataTablesBootstrapCss,datatablesScrollerCss, templateMarkup) {
function MimVideoPlaylist(params) {<|fim▁hole|> self.videoSuffix = '.mp4';
self.videoSuffix = '.jpg';
self.isVideoDataLoaded = ko.observable(false);
self.url = {
getVideoList: self.urlApi + 'video'
};
self.list = ko.observableArray([]);
self.list0 = ko.observableArray([]);
self.fileName = ko.observable();
self.posterFileName = ko.observable();
self.isVideoVisible = ko.observable(false);
self.mustPlay = ko.observable(false);
self.playOnRender = ko.observable(false);
self.playerAssign = function () {
delete self.player;
self.player =
$('#videoContent').mediaelementplayer({
alwaysShowControls: false,
features: ['playpause', 'volume'],
success: function (mediaElement, domObject) {
if (self.mustPlay()) mediaElement.play();
},
error: function (data) {
}
});
};
self.playlistClick = function (data) {
self.isVideoVisible(false);
self.fileName(data.video);
ko.utils.arrayForEach(self.list(), function (item) {
item.isPlaying(false)
});
data.isPlaying(true);
self.posterFileName(data.poster);
self.mustPlay(true);
self.isVideoVisible(true);
return true;
}
self.player = null;
self.mapping = {
create: function (options) {
var vmCreate = ko.mapping.fromJS(options.data, {
'ignore': ['Video','Title']
});
vmCreate.isPlaying = ko.observable(false);
vmCreate.isNotPlaying = ko.pureComputed(function () { return !vmCreate.isPlaying(); });
vmCreate.poster = self.urlBaseVideo+ options.data.Video + self.posterSuffix
vmCreate.video = self.urlBaseVideo + options.data.Video + self.videoSuffix;
vmCreate.title = options.data.Title;
return vmCreate;
}
};
self.initModel = function () {
Q(model.post(self.url.getVideoList))
.then(function (data) {
ko.mapping.fromJS(data.d, self.mapping, self.list);
self.fileName(self.list()[0].video);
self.posterFileName(self.list()[0].poster);
self.isVideoDataLoaded(true);
});
}
self.initView = function () {
$('#mim-playlist-innner').DataTable(
{
responsive: true,
"deferRender": true,
"jQueryUI": false,
"sDom": 't',
dom: "S",
"sScrollY": "360px",
scrollCollapse: true,
"deferRender": true,
"autoWidth": true,
"autoHigh": false,
searching: false,
ordering: false,
"info": false,
});
$('#mim-playlist-innner').removeClass('display').addClass('table table-striped table-bordered');
$('.dataTables_scrollBody').css('height', 360);
$('.dataTables_scrollBody').css('width', '100%');
};
ko.punches.enableAll();
self.initModel();
self.initView();
self.isVideoVisible(true);
}
MimVideoPlaylist.prototype.dispose = function() { };
return { viewModel: MimVideoPlaylist, template: templateMarkup };
});<|fim▁end|>
|
var self = this;
self.urlBaseVideo = 'videoDirectory/';
self.urlApi = 'Api/';
|
<|file_name|>auto-ref-bounded-ty-param.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.<|fim▁hole|>use std::io;
trait Foo {
fn f(&self);
}
struct Bar {
x: int
}
trait Baz {
fn g(&self);
}
impl<T:Baz> Foo for T {
fn f(&self) {
self.g();
}
}
impl Baz for Bar {
fn g(&self) {
io::println(self.x.to_str());
}
}
pub fn main() {
let y = Bar { x: 42 };
y.f();
}<|fim▁end|>
| |
<|file_name|>integer_sequence.rs<|end_file_name|><|fim▁begin|>// Implements http://rosettacode.org/wiki/Integer_sequence
extern crate num;
use num::{BigUint, One};
fn main() {
let one: BigUint = One::one();<|fim▁hole|> let mut i: BigUint = One::one();
loop {
println!("{}", i);
i = &i + &one;
}
}<|fim▁end|>
| |
<|file_name|>status_handler_test.go<|end_file_name|><|fim▁begin|>// Copyright 2016 Wercker Holding BV
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package dockerlocal
import (
"testing"
"github.com/docker/docker/pkg/jsonmessage"
"github.com/stretchr/testify/suite"
"github.com/wercker/wercker/util"
)
type StatusHandlerSuite struct {
*util.TestSuite
}
func TestStatusHandlerSuite(t *testing.T) {
suiteTester := &StatusHandlerSuite{&util.TestSuite{}}
suite.Run(t, suiteTester)
}
func (s *StatusHandlerSuite) TestPullParallelDownloads() {
testSteps := []struct {
in *jsonmessage.JSONMessage
expected string
}{
{
&jsonmessage.JSONMessage{
ID: "ubuntu:latest",
Status: "The image you are pulling has been verified",
},
"The image you are pulling has been verified: ubuntu:latest\n",
},
{
&jsonmessage.JSONMessage{
ID: "511136ea3c5a",
Status: "Pulling fs layer",
Progress: &jsonmessage.JSONProgress{Current: 0, Start: 0, Total: 0},
},
"Pulling fs layer: 511136ea3c5a\n",
},
{
&jsonmessage.JSONMessage{
ID: "c7b7c6419568",
Status: "Pulling fs layer",
Progress: &jsonmessage.JSONProgress{Current: 0, Start: 0, Total: 0},
},
"Pulling fs layer: c7b7c6419568\n",
},
{
&jsonmessage.JSONMessage{
ID: "511136ea3c5a",
Status: "Downloading",
Progress: &jsonmessage.JSONProgress{Current: 0, Start: 0, Total: 100},
},
"Downloading: 511136ea3c5a (0%)",
},
{
&jsonmessage.JSONMessage{
ID: "511136ea3c5a",
Status: "Downloading",
Progress: &jsonmessage.JSONProgress{Current: 50, Start: 0, Total: 100},
},
"\rDownloading: 511136ea3c5a (50%)",
},
{
&jsonmessage.JSONMessage{
ID: "c7b7c6419568",
Status: "Downloading",
Progress: &jsonmessage.JSONProgress{Current: 0, Start: 0, Total: 100},
},
"\rDownloading: 511136ea3c5a (50%), Downloading: c7b7c6419568 (0%)",
},
{
&jsonmessage.JSONMessage{
ID: "511136ea3c5a",
Status: "Download complete",
Progress: &jsonmessage.JSONProgress{Current: 0, Start: 0, Total: 0},
},
"\rDownload complete: 511136ea3c5a \nDownloading: c7b7c6419568 (0%)",
},
{
&jsonmessage.JSONMessage{
ID: "c7b7c6419568",
Status: "Downloading",
Progress: &jsonmessage.JSONProgress{Current: 50, Start: 0, Total: 100},
},
"\rDownloading: c7b7c6419568 (50%)",
},
{
&jsonmessage.JSONMessage{
ID: "c7b7c6419568",
Status: "Download complete",
Progress: &jsonmessage.JSONProgress{Current: 0, Start: 0, Total: 0},
},
"\rDownload complete: c7b7c6419568\n",
},
{
&jsonmessage.JSONMessage{
ID: "511136ea3c5a",
Status: "Extracting",
Progress: &jsonmessage.JSONProgress{Current: 10, Start: 0, Total: 100},
},
"Extracting: 511136ea3c5a (10%)",
},
{
&jsonmessage.JSONMessage{
ID: "511136ea3c5a",
Status: "Pull complete",
Progress: &jsonmessage.JSONProgress{Current: 0, Start: 0, Total: 0},
},
"\rPull complete: 511136ea3c5a \n",
},
{
&jsonmessage.JSONMessage{
ID: "c7b7c6419568",
Status: "Extracting",
Progress: &jsonmessage.JSONProgress{Current: 55, Start: 0, Total: 100},
},
"Extracting: c7b7c6419568 (55%)",
},
{
&jsonmessage.JSONMessage{
ID: "c7b7c6419568",
Status: "Pull complete",
Progress: &jsonmessage.JSONProgress{Current: 0, Start: 0, Total: 0},
},
"\rPull complete: c7b7c6419568 \n",
},
{
&jsonmessage.JSONMessage{
Status: "Status: Downloaded newer image for ubuntu:latest;",
},
"Status: Downloaded newer image for ubuntu:latest;\n",
},
}
p := NewJSONMessageProcessor()
for _, step := range testSteps {
actual, err := p.ProcessJSONMessage(step.in)
s.Nil(err)
s.Equal(actual, step.expected)
}
}
func (s *StatusHandlerSuite) TestPushParallelUploads() {
testSteps := []struct {
in *jsonmessage.JSONMessage
expected string
}{
{
&jsonmessage.JSONMessage{
Status: "The push refers to a repository [127.0.0.1:3000/bvdberg/pass] (len: 1)",
},
"Pushing to registry\n",
},
{
&jsonmessage.JSONMessage{
Status: "Sending image list",
},
"Sending image list\n",
},
{
&jsonmessage.JSONMessage{
Status: "Pushing repository 127.0.0.1:3000/bvdberg/pass (1 tags)",
},
"Pushing 1 tag(s)\n", // TODO
},
{
&jsonmessage.JSONMessage{
ID: "511136ea3c5a",
Status: "Pushing",
Progress: &jsonmessage.JSONProgress{Current: 0, Start: 0, Total: 0},
},
"Pushing: 511136ea3c5a",
},
{
&jsonmessage.JSONMessage{
ID: "511136ea3c5a",
Status: "Buffering to disk",
Progress: &jsonmessage.JSONProgress{Current: 10, Start: 0, Total: 0},
},
"\rBuffering to disk: 511136ea3c5a (10 B)",
},
// buffering done?
{
&jsonmessage.JSONMessage{
ID: "511136ea3c5a",
Status: "Pushing",
Progress: &jsonmessage.JSONProgress{Current: 10, Start: 0, Total: 100},
},
"\rPushing: 511136ea3c5a (10%) ",
},
{
&jsonmessage.JSONMessage{
ID: "511136ea3c5a",
Status: "Image successfully pushed",
Progress: &jsonmessage.JSONProgress{Current: 0, Start: 0, Total: 0},
},
"\rImage successfully pushed: 511136ea3c5a\n",
},
{
&jsonmessage.JSONMessage{
ID: "c7b7c6419568",
Status: "Pushing",
Progress: &jsonmessage.JSONProgress{Current: 0, Start: 0, Total: 0},
},
"Pushing: c7b7c6419568",
},
{
&jsonmessage.JSONMessage{
ID: "c7b7c6419568",
Status: "Buffering to disk",
Progress: &jsonmessage.JSONProgress{Current: 524287, Start: 0, Total: 0},
},
"\rBuffering to disk: c7b7c6419568 (511.9 KB)",<|fim▁hole|> &jsonmessage.JSONMessage{
ID: "c7b7c6419568",
Status: "Pushing",
Progress: &jsonmessage.JSONProgress{Current: 44, Start: 0, Total: 100},
},
"\rPushing: c7b7c6419568 (44%) ",
},
{
&jsonmessage.JSONMessage{
ID: "c7b7c6419568",
Status: "Image successfully pushed",
Progress: &jsonmessage.JSONProgress{Current: 0, Start: 0, Total: 0},
},
"\rImage successfully pushed: c7b7c6419568\n",
},
{
&jsonmessage.JSONMessage{
Status: "Pushing tag for rev [a636b9702b50] on {http://127.0.0.1:3000/v1/repositories/bvdberg/pass/tags/build-549305dd56000d6d0700027e};",
},
"Pushing tag for image: a636b9702b50\n", // TODO
},
}
p := NewJSONMessageProcessor()
for _, step := range testSteps {
actual, err := p.ProcessJSONMessage(step.in)
s.Nil(err)
s.Equal(actual, step.expected)
}
}
func (s *StatusHandlerSuite) TestFormatDiskUnitBytes() {
testSteps := []struct {
in int64
expected string
}{
{1, "1 B"},
{1023, "1023 B"},
{1024, "1 KB"},
{1025, "1 KB"},
{1536, "1.5 KB"},
{1048575, "1023.9 KB"},
{1048576, "1 MB"},
{1048577, "1 MB"},
{1073741823, "1023.9 MB"},
{1073741824, "1 GB"},
{1073741825, "1 GB"},
{2147483647, "1.9 GB"},
{1099511628800, "1024 GB"},
{1099511628801, "1024 GB"},
}
for _, step := range testSteps {
actual := formatDiskUnit(step.in)
s.Equal(actual, step.expected)
}
}<|fim▁end|>
|
},
// Buffering done?
{
|
<|file_name|>gulpfile.js<|end_file_name|><|fim▁begin|>var gulp = require('gulp');
var browserSync = require('browser-sync');
var jshint = require('gulp-jshint');
var concat = require('gulp-concat');
var uglify = require('gulp-uglify');
var rename = require('gulp-rename');
// 静态服务器
gulp.task('browser-sync', function() {
browserSync.init({<|fim▁hole|> files: "src/**",
server: {
baseDir: "src/"
}
});
});
// js语法检查
gulp.task('jshint', function() {
return gulp.src('src/js/*.js')
.on('error')
.pipe(jshint())
.pipe(jshint.reporter('default'));
});
// js文件合并及压缩
gulp.task('minifyjs', function() {
return gulp.src('src/js/*.js')
.pipe(concat('all.js'))
.pipe(gulp.dest('dist/js'))
.pipe(uglify())
.pipe(rename({ suffix: '.min' }))
.pipe(gulp.dest('dist/js'));
});
// 事件监听
gulp.task('watch', function() {
gulp.watch('src/js/*.js', ['jshint','minifyjs']);
});
gulp.task('default',['browser-sync','watch']);<|fim▁end|>
| |
<|file_name|>grid-view.ts<|end_file_name|><|fim▁begin|>import {Rect} from "../base/rect";
import {Widget} from "./widget";
import {WidgetFactory} from "./widget-factory";
import {Layouter} from "../layouters/layouter";
import {WidgetRecyclableCreator} from "./widget-recyclable-creator";
import {GridLayouter} from "../layouters/grid-layouter";
import {ScrollView, ScrollerBarVisibility} from "./scroll-view";
/**
* 网格视图。<|fim▁hole|> */
export class GridView extends ScrollView {
/**
* 列数。列数和列宽设置其中之一即可。
*/
public set cols(value:number) {
this._cols = value;
var layouter = <GridLayouter>this._childrenLayouter;
layouter.cols = value;
}
public get cols() : number {
return this._cols;
}
/**
* 列宽。列数和列宽设置其中之一即可。
*/
public set colWidth(value:number) {
this._colWidth = value;
var layouter = <GridLayouter>this._childrenLayouter;
layouter.colWidth = value;
}
public get colWidth() : number {
return this._colWidth;
}
/**
* 行数。行数和行高设置其中之一即可。
*/
public set rows(value:number) {
this._rows = value;
var layouter = <GridLayouter>this._childrenLayouter;
layouter.rows = value;
}
public get rows() : number {
return this._rows;
}
/**
* 行高。行数和行高设置其中之一即可。
*/
public set rowHeight(value:number) {
this._rowHeight = value;
var layouter = <GridLayouter>this._childrenLayouter;
layouter.rowHeight = value;
}
public get rowHeight() : number {
return this._rowHeight;
}
/**
* 每一网格周围的空白。
*/
public setItemMargins(margins:any) : Widget{
var layouter = <GridLayouter>this._childrenLayouter;
layouter.leftMargin = margins.left || margins.all || 0;
layouter.rightMargin = margins.right || margins.all || 0;
layouter.topMargin = margins.top || margins.all || 0;
layouter.bottomMargin = margins.bottom || margins.all || 0;
return this;
}
public get childrenLayouter() : Layouter{
return this._childrenLayouter;
}
protected doDrawChildren(ctx:any) : Widget {
var top = this.offsetY;
var bottom = top + this.h;
this._children.forEach(function(child) {
var visible = child.visible && child.y < bottom && (child.y + child.h) > top;
if(visible) {
child.draw(ctx);
}
});
return this;
}
public relayoutChildren() : Rect {
this.ensureOptions();
var r = super.relayoutChildren();
this.contentW = r.w + this.leftPadding + this.rightPadding;
this.contentH = r.h + this.topPadding + this.bottomPadding;
return r;
}
protected ensureOptions() {
if(this.rows > 0 && this.cols > 0) {
this.scrollerOptions.scrollingX = false;
this.scrollerOptions.scrollingY = false;
this.scrollBarStyle.vBarVisibility = ScrollerBarVisibility.INVISIBLE;
this.scrollBarStyle.hBarVisibility = ScrollerBarVisibility.INVISIBLE;
}else if(this.cols > 0) {
this.scrollerOptions.scrollingX = false;
this.scrollerOptions.scrollingY = true;
this.scrollBarStyle.vBarVisibility = ScrollerBarVisibility.AUTO;
this.scrollBarStyle.hBarVisibility = ScrollerBarVisibility.INVISIBLE;
}else if(this.rows > 0) {
this.scrollerOptions.scrollingX = true;
this.scrollerOptions.scrollingY = false;
this.scrollBarStyle.hBarVisibility = ScrollerBarVisibility.AUTO;
this.scrollBarStyle.vBarVisibility = ScrollerBarVisibility.INVISIBLE;
}else {
this.scrollerOptions.scrollingX = false;
this.scrollerOptions.scrollingY = true;
this.scrollBarStyle.vBarVisibility = ScrollerBarVisibility.AUTO;
this.scrollBarStyle.hBarVisibility = ScrollerBarVisibility.INVISIBLE;
}
}
protected onToJson(json:any) {
delete json.childrenLayouter;
}
protected onInit() {
super.onInit();
this.relayoutChildren();
}
protected _rows : number;
protected _cols : number;
protected _colWidth : number;
protected _rowHeight : number;
constructor() {
super(GridView.TYPE);
}
protected onReset() {
super.onReset();
this._childrenLayouter = GridLayouter.createWithOptions({cols:this.cols, rows:this.rows});
}
protected static defProps = Object.assign({}, ScrollView.defProps,
{_cols:3, _rows:3, _rowHeight:0, _colWidth:0});
protected getDefProps() : any {
return GridView.defProps;
}
public static TYPE = "grid-view";
private static recycleBinGridView = WidgetRecyclableCreator.create(GridView);
public static create(options?:any) : GridView {
return <GridView>GridView.recycleBinGridView.create(options);
}
};
WidgetFactory.register(GridView.TYPE, GridView.create);<|fim▁end|>
| |
<|file_name|>xgen_ltdc_layer.go<|end_file_name|><|fim▁begin|>package ltdc
// DO NOT EDIT THIS FILE. GENERATED BY xgen.
import (
"bits"
"mmio"
"unsafe"
"stm32/o/f40_41xxx/mmap"
)
type LTDC_Layer_Periph struct {
CR RCR
WHPCR RWHPCR
WVPCR RWVPCR
CKCR RCKCR
PFCR RPFCR<|fim▁hole|> BFCR RBFCR
_ [2]uint32
CFBAR RCFBAR
CFBLR RCFBLR
CFBLNR RCFBLNR
_ [3]uint32
CLUTWR RCLUTWR
}
func (p *LTDC_Layer_Periph) BaseAddr() uintptr {
return uintptr(unsafe.Pointer(p))
}
//emgo:const
var LTDC_Layer1 = (*LTDC_Layer_Periph)(unsafe.Pointer(uintptr(mmap.LTDC_Layer1_BASE)))
//emgo:const
var LTDC_Layer2 = (*LTDC_Layer_Periph)(unsafe.Pointer(uintptr(mmap.LTDC_Layer2_BASE)))
type CR uint32
func (b CR) Field(mask CR) int {
return bits.Field32(uint32(b), uint32(mask))
}
func (mask CR) J(v int) CR {
return CR(bits.MakeField32(v, uint32(mask)))
}
type RCR struct{ mmio.U32 }
func (r *RCR) Bits(mask CR) CR { return CR(r.U32.Bits(uint32(mask))) }
func (r *RCR) StoreBits(mask, b CR) { r.U32.StoreBits(uint32(mask), uint32(b)) }
func (r *RCR) SetBits(mask CR) { r.U32.SetBits(uint32(mask)) }
func (r *RCR) ClearBits(mask CR) { r.U32.ClearBits(uint32(mask)) }
func (r *RCR) Load() CR { return CR(r.U32.Load()) }
func (r *RCR) Store(b CR) { r.U32.Store(uint32(b)) }
func (r *RCR) AtomicStoreBits(mask, b CR) { r.U32.AtomicStoreBits(uint32(mask), uint32(b)) }
func (r *RCR) AtomicSetBits(mask CR) { r.U32.AtomicSetBits(uint32(mask)) }
func (r *RCR) AtomicClearBits(mask CR) { r.U32.AtomicClearBits(uint32(mask)) }
type RMCR struct{ mmio.UM32 }
func (rm RMCR) Load() CR { return CR(rm.UM32.Load()) }
func (rm RMCR) Store(b CR) { rm.UM32.Store(uint32(b)) }
type WHPCR uint32
func (b WHPCR) Field(mask WHPCR) int {
return bits.Field32(uint32(b), uint32(mask))
}
func (mask WHPCR) J(v int) WHPCR {
return WHPCR(bits.MakeField32(v, uint32(mask)))
}
type RWHPCR struct{ mmio.U32 }
func (r *RWHPCR) Bits(mask WHPCR) WHPCR { return WHPCR(r.U32.Bits(uint32(mask))) }
func (r *RWHPCR) StoreBits(mask, b WHPCR) { r.U32.StoreBits(uint32(mask), uint32(b)) }
func (r *RWHPCR) SetBits(mask WHPCR) { r.U32.SetBits(uint32(mask)) }
func (r *RWHPCR) ClearBits(mask WHPCR) { r.U32.ClearBits(uint32(mask)) }
func (r *RWHPCR) Load() WHPCR { return WHPCR(r.U32.Load()) }
func (r *RWHPCR) Store(b WHPCR) { r.U32.Store(uint32(b)) }
func (r *RWHPCR) AtomicStoreBits(mask, b WHPCR) { r.U32.AtomicStoreBits(uint32(mask), uint32(b)) }
func (r *RWHPCR) AtomicSetBits(mask WHPCR) { r.U32.AtomicSetBits(uint32(mask)) }
func (r *RWHPCR) AtomicClearBits(mask WHPCR) { r.U32.AtomicClearBits(uint32(mask)) }
type RMWHPCR struct{ mmio.UM32 }
func (rm RMWHPCR) Load() WHPCR { return WHPCR(rm.UM32.Load()) }
func (rm RMWHPCR) Store(b WHPCR) { rm.UM32.Store(uint32(b)) }
type WVPCR uint32
func (b WVPCR) Field(mask WVPCR) int {
return bits.Field32(uint32(b), uint32(mask))
}
func (mask WVPCR) J(v int) WVPCR {
return WVPCR(bits.MakeField32(v, uint32(mask)))
}
type RWVPCR struct{ mmio.U32 }
func (r *RWVPCR) Bits(mask WVPCR) WVPCR { return WVPCR(r.U32.Bits(uint32(mask))) }
func (r *RWVPCR) StoreBits(mask, b WVPCR) { r.U32.StoreBits(uint32(mask), uint32(b)) }
func (r *RWVPCR) SetBits(mask WVPCR) { r.U32.SetBits(uint32(mask)) }
func (r *RWVPCR) ClearBits(mask WVPCR) { r.U32.ClearBits(uint32(mask)) }
func (r *RWVPCR) Load() WVPCR { return WVPCR(r.U32.Load()) }
func (r *RWVPCR) Store(b WVPCR) { r.U32.Store(uint32(b)) }
func (r *RWVPCR) AtomicStoreBits(mask, b WVPCR) { r.U32.AtomicStoreBits(uint32(mask), uint32(b)) }
func (r *RWVPCR) AtomicSetBits(mask WVPCR) { r.U32.AtomicSetBits(uint32(mask)) }
func (r *RWVPCR) AtomicClearBits(mask WVPCR) { r.U32.AtomicClearBits(uint32(mask)) }
type RMWVPCR struct{ mmio.UM32 }
func (rm RMWVPCR) Load() WVPCR { return WVPCR(rm.UM32.Load()) }
func (rm RMWVPCR) Store(b WVPCR) { rm.UM32.Store(uint32(b)) }
type CKCR uint32
func (b CKCR) Field(mask CKCR) int {
return bits.Field32(uint32(b), uint32(mask))
}
func (mask CKCR) J(v int) CKCR {
return CKCR(bits.MakeField32(v, uint32(mask)))
}
type RCKCR struct{ mmio.U32 }
func (r *RCKCR) Bits(mask CKCR) CKCR { return CKCR(r.U32.Bits(uint32(mask))) }
func (r *RCKCR) StoreBits(mask, b CKCR) { r.U32.StoreBits(uint32(mask), uint32(b)) }
func (r *RCKCR) SetBits(mask CKCR) { r.U32.SetBits(uint32(mask)) }
func (r *RCKCR) ClearBits(mask CKCR) { r.U32.ClearBits(uint32(mask)) }
func (r *RCKCR) Load() CKCR { return CKCR(r.U32.Load()) }
func (r *RCKCR) Store(b CKCR) { r.U32.Store(uint32(b)) }
func (r *RCKCR) AtomicStoreBits(mask, b CKCR) { r.U32.AtomicStoreBits(uint32(mask), uint32(b)) }
func (r *RCKCR) AtomicSetBits(mask CKCR) { r.U32.AtomicSetBits(uint32(mask)) }
func (r *RCKCR) AtomicClearBits(mask CKCR) { r.U32.AtomicClearBits(uint32(mask)) }
type RMCKCR struct{ mmio.UM32 }
func (rm RMCKCR) Load() CKCR { return CKCR(rm.UM32.Load()) }
func (rm RMCKCR) Store(b CKCR) { rm.UM32.Store(uint32(b)) }
type PFCR uint32
func (b PFCR) Field(mask PFCR) int {
return bits.Field32(uint32(b), uint32(mask))
}
func (mask PFCR) J(v int) PFCR {
return PFCR(bits.MakeField32(v, uint32(mask)))
}
type RPFCR struct{ mmio.U32 }
func (r *RPFCR) Bits(mask PFCR) PFCR { return PFCR(r.U32.Bits(uint32(mask))) }
func (r *RPFCR) StoreBits(mask, b PFCR) { r.U32.StoreBits(uint32(mask), uint32(b)) }
func (r *RPFCR) SetBits(mask PFCR) { r.U32.SetBits(uint32(mask)) }
func (r *RPFCR) ClearBits(mask PFCR) { r.U32.ClearBits(uint32(mask)) }
func (r *RPFCR) Load() PFCR { return PFCR(r.U32.Load()) }
func (r *RPFCR) Store(b PFCR) { r.U32.Store(uint32(b)) }
func (r *RPFCR) AtomicStoreBits(mask, b PFCR) { r.U32.AtomicStoreBits(uint32(mask), uint32(b)) }
func (r *RPFCR) AtomicSetBits(mask PFCR) { r.U32.AtomicSetBits(uint32(mask)) }
func (r *RPFCR) AtomicClearBits(mask PFCR) { r.U32.AtomicClearBits(uint32(mask)) }
type RMPFCR struct{ mmio.UM32 }
func (rm RMPFCR) Load() PFCR { return PFCR(rm.UM32.Load()) }
func (rm RMPFCR) Store(b PFCR) { rm.UM32.Store(uint32(b)) }
type CACR uint32
func (b CACR) Field(mask CACR) int {
return bits.Field32(uint32(b), uint32(mask))
}
func (mask CACR) J(v int) CACR {
return CACR(bits.MakeField32(v, uint32(mask)))
}
type RCACR struct{ mmio.U32 }
func (r *RCACR) Bits(mask CACR) CACR { return CACR(r.U32.Bits(uint32(mask))) }
func (r *RCACR) StoreBits(mask, b CACR) { r.U32.StoreBits(uint32(mask), uint32(b)) }
func (r *RCACR) SetBits(mask CACR) { r.U32.SetBits(uint32(mask)) }
func (r *RCACR) ClearBits(mask CACR) { r.U32.ClearBits(uint32(mask)) }
func (r *RCACR) Load() CACR { return CACR(r.U32.Load()) }
func (r *RCACR) Store(b CACR) { r.U32.Store(uint32(b)) }
func (r *RCACR) AtomicStoreBits(mask, b CACR) { r.U32.AtomicStoreBits(uint32(mask), uint32(b)) }
func (r *RCACR) AtomicSetBits(mask CACR) { r.U32.AtomicSetBits(uint32(mask)) }
func (r *RCACR) AtomicClearBits(mask CACR) { r.U32.AtomicClearBits(uint32(mask)) }
type RMCACR struct{ mmio.UM32 }
func (rm RMCACR) Load() CACR { return CACR(rm.UM32.Load()) }
func (rm RMCACR) Store(b CACR) { rm.UM32.Store(uint32(b)) }
type DCCR uint32
func (b DCCR) Field(mask DCCR) int {
return bits.Field32(uint32(b), uint32(mask))
}
func (mask DCCR) J(v int) DCCR {
return DCCR(bits.MakeField32(v, uint32(mask)))
}
type RDCCR struct{ mmio.U32 }
func (r *RDCCR) Bits(mask DCCR) DCCR { return DCCR(r.U32.Bits(uint32(mask))) }
func (r *RDCCR) StoreBits(mask, b DCCR) { r.U32.StoreBits(uint32(mask), uint32(b)) }
func (r *RDCCR) SetBits(mask DCCR) { r.U32.SetBits(uint32(mask)) }
func (r *RDCCR) ClearBits(mask DCCR) { r.U32.ClearBits(uint32(mask)) }
func (r *RDCCR) Load() DCCR { return DCCR(r.U32.Load()) }
func (r *RDCCR) Store(b DCCR) { r.U32.Store(uint32(b)) }
func (r *RDCCR) AtomicStoreBits(mask, b DCCR) { r.U32.AtomicStoreBits(uint32(mask), uint32(b)) }
func (r *RDCCR) AtomicSetBits(mask DCCR) { r.U32.AtomicSetBits(uint32(mask)) }
func (r *RDCCR) AtomicClearBits(mask DCCR) { r.U32.AtomicClearBits(uint32(mask)) }
type RMDCCR struct{ mmio.UM32 }
func (rm RMDCCR) Load() DCCR { return DCCR(rm.UM32.Load()) }
func (rm RMDCCR) Store(b DCCR) { rm.UM32.Store(uint32(b)) }
type BFCR uint32
func (b BFCR) Field(mask BFCR) int {
return bits.Field32(uint32(b), uint32(mask))
}
func (mask BFCR) J(v int) BFCR {
return BFCR(bits.MakeField32(v, uint32(mask)))
}
type RBFCR struct{ mmio.U32 }
func (r *RBFCR) Bits(mask BFCR) BFCR { return BFCR(r.U32.Bits(uint32(mask))) }
func (r *RBFCR) StoreBits(mask, b BFCR) { r.U32.StoreBits(uint32(mask), uint32(b)) }
func (r *RBFCR) SetBits(mask BFCR) { r.U32.SetBits(uint32(mask)) }
func (r *RBFCR) ClearBits(mask BFCR) { r.U32.ClearBits(uint32(mask)) }
func (r *RBFCR) Load() BFCR { return BFCR(r.U32.Load()) }
func (r *RBFCR) Store(b BFCR) { r.U32.Store(uint32(b)) }
func (r *RBFCR) AtomicStoreBits(mask, b BFCR) { r.U32.AtomicStoreBits(uint32(mask), uint32(b)) }
func (r *RBFCR) AtomicSetBits(mask BFCR) { r.U32.AtomicSetBits(uint32(mask)) }
func (r *RBFCR) AtomicClearBits(mask BFCR) { r.U32.AtomicClearBits(uint32(mask)) }
type RMBFCR struct{ mmio.UM32 }
func (rm RMBFCR) Load() BFCR { return BFCR(rm.UM32.Load()) }
func (rm RMBFCR) Store(b BFCR) { rm.UM32.Store(uint32(b)) }
type CFBAR uint32
func (b CFBAR) Field(mask CFBAR) int {
return bits.Field32(uint32(b), uint32(mask))
}
func (mask CFBAR) J(v int) CFBAR {
return CFBAR(bits.MakeField32(v, uint32(mask)))
}
type RCFBAR struct{ mmio.U32 }
func (r *RCFBAR) Bits(mask CFBAR) CFBAR { return CFBAR(r.U32.Bits(uint32(mask))) }
func (r *RCFBAR) StoreBits(mask, b CFBAR) { r.U32.StoreBits(uint32(mask), uint32(b)) }
func (r *RCFBAR) SetBits(mask CFBAR) { r.U32.SetBits(uint32(mask)) }
func (r *RCFBAR) ClearBits(mask CFBAR) { r.U32.ClearBits(uint32(mask)) }
func (r *RCFBAR) Load() CFBAR { return CFBAR(r.U32.Load()) }
func (r *RCFBAR) Store(b CFBAR) { r.U32.Store(uint32(b)) }
func (r *RCFBAR) AtomicStoreBits(mask, b CFBAR) { r.U32.AtomicStoreBits(uint32(mask), uint32(b)) }
func (r *RCFBAR) AtomicSetBits(mask CFBAR) { r.U32.AtomicSetBits(uint32(mask)) }
func (r *RCFBAR) AtomicClearBits(mask CFBAR) { r.U32.AtomicClearBits(uint32(mask)) }
type RMCFBAR struct{ mmio.UM32 }
func (rm RMCFBAR) Load() CFBAR { return CFBAR(rm.UM32.Load()) }
func (rm RMCFBAR) Store(b CFBAR) { rm.UM32.Store(uint32(b)) }
type CFBLR uint32
func (b CFBLR) Field(mask CFBLR) int {
return bits.Field32(uint32(b), uint32(mask))
}
func (mask CFBLR) J(v int) CFBLR {
return CFBLR(bits.MakeField32(v, uint32(mask)))
}
type RCFBLR struct{ mmio.U32 }
func (r *RCFBLR) Bits(mask CFBLR) CFBLR { return CFBLR(r.U32.Bits(uint32(mask))) }
func (r *RCFBLR) StoreBits(mask, b CFBLR) { r.U32.StoreBits(uint32(mask), uint32(b)) }
func (r *RCFBLR) SetBits(mask CFBLR) { r.U32.SetBits(uint32(mask)) }
func (r *RCFBLR) ClearBits(mask CFBLR) { r.U32.ClearBits(uint32(mask)) }
func (r *RCFBLR) Load() CFBLR { return CFBLR(r.U32.Load()) }
func (r *RCFBLR) Store(b CFBLR) { r.U32.Store(uint32(b)) }
func (r *RCFBLR) AtomicStoreBits(mask, b CFBLR) { r.U32.AtomicStoreBits(uint32(mask), uint32(b)) }
func (r *RCFBLR) AtomicSetBits(mask CFBLR) { r.U32.AtomicSetBits(uint32(mask)) }
func (r *RCFBLR) AtomicClearBits(mask CFBLR) { r.U32.AtomicClearBits(uint32(mask)) }
type RMCFBLR struct{ mmio.UM32 }
func (rm RMCFBLR) Load() CFBLR { return CFBLR(rm.UM32.Load()) }
func (rm RMCFBLR) Store(b CFBLR) { rm.UM32.Store(uint32(b)) }
type CFBLNR uint32
func (b CFBLNR) Field(mask CFBLNR) int {
return bits.Field32(uint32(b), uint32(mask))
}
func (mask CFBLNR) J(v int) CFBLNR {
return CFBLNR(bits.MakeField32(v, uint32(mask)))
}
type RCFBLNR struct{ mmio.U32 }
func (r *RCFBLNR) Bits(mask CFBLNR) CFBLNR { return CFBLNR(r.U32.Bits(uint32(mask))) }
func (r *RCFBLNR) StoreBits(mask, b CFBLNR) { r.U32.StoreBits(uint32(mask), uint32(b)) }
func (r *RCFBLNR) SetBits(mask CFBLNR) { r.U32.SetBits(uint32(mask)) }
func (r *RCFBLNR) ClearBits(mask CFBLNR) { r.U32.ClearBits(uint32(mask)) }
func (r *RCFBLNR) Load() CFBLNR { return CFBLNR(r.U32.Load()) }
func (r *RCFBLNR) Store(b CFBLNR) { r.U32.Store(uint32(b)) }
func (r *RCFBLNR) AtomicStoreBits(mask, b CFBLNR) { r.U32.AtomicStoreBits(uint32(mask), uint32(b)) }
func (r *RCFBLNR) AtomicSetBits(mask CFBLNR) { r.U32.AtomicSetBits(uint32(mask)) }
func (r *RCFBLNR) AtomicClearBits(mask CFBLNR) { r.U32.AtomicClearBits(uint32(mask)) }
type RMCFBLNR struct{ mmio.UM32 }
func (rm RMCFBLNR) Load() CFBLNR { return CFBLNR(rm.UM32.Load()) }
func (rm RMCFBLNR) Store(b CFBLNR) { rm.UM32.Store(uint32(b)) }
type CLUTWR uint32
func (b CLUTWR) Field(mask CLUTWR) int {
return bits.Field32(uint32(b), uint32(mask))
}
func (mask CLUTWR) J(v int) CLUTWR {
return CLUTWR(bits.MakeField32(v, uint32(mask)))
}
type RCLUTWR struct{ mmio.U32 }
func (r *RCLUTWR) Bits(mask CLUTWR) CLUTWR { return CLUTWR(r.U32.Bits(uint32(mask))) }
func (r *RCLUTWR) StoreBits(mask, b CLUTWR) { r.U32.StoreBits(uint32(mask), uint32(b)) }
func (r *RCLUTWR) SetBits(mask CLUTWR) { r.U32.SetBits(uint32(mask)) }
func (r *RCLUTWR) ClearBits(mask CLUTWR) { r.U32.ClearBits(uint32(mask)) }
func (r *RCLUTWR) Load() CLUTWR { return CLUTWR(r.U32.Load()) }
func (r *RCLUTWR) Store(b CLUTWR) { r.U32.Store(uint32(b)) }
func (r *RCLUTWR) AtomicStoreBits(mask, b CLUTWR) { r.U32.AtomicStoreBits(uint32(mask), uint32(b)) }
func (r *RCLUTWR) AtomicSetBits(mask CLUTWR) { r.U32.AtomicSetBits(uint32(mask)) }
func (r *RCLUTWR) AtomicClearBits(mask CLUTWR) { r.U32.AtomicClearBits(uint32(mask)) }
type RMCLUTWR struct{ mmio.UM32 }
func (rm RMCLUTWR) Load() CLUTWR { return CLUTWR(rm.UM32.Load()) }
func (rm RMCLUTWR) Store(b CLUTWR) { rm.UM32.Store(uint32(b)) }<|fim▁end|>
|
CACR RCACR
DCCR RDCCR
|
<|file_name|>handlers_test.go<|end_file_name|><|fim▁begin|>package request_test
import (
"reflect"
"testing"
"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/aws/request"
"github.com/aws/aws-sdk-go/awstesting/unit"
"github.com/aws/aws-sdk-go/service/s3"
)
func TestHandlerList(t *testing.T) {
s := ""
r := &request.Request{}
l := request.HandlerList{}
l.PushBack(func(r *request.Request) {
s += "a"
r.Data = s
})
l.Run(r)
if e, a := "a", s; e != a {
t.Errorf("expect %q update got %q", e, a)
}
if e, a := "a", r.Data.(string); e != a {
t.Errorf("expect %q data update got %q", e, a)
}
}
func TestMultipleHandlers(t *testing.T) {
r := &request.Request{}
l := request.HandlerList{}
l.PushBack(func(r *request.Request) { r.Data = nil })
l.PushFront(func(r *request.Request) { r.Data = aws.Bool(true) })
l.Run(r)
if r.Data != nil {
t.Error("Expected handler to execute")
}
}
func TestNamedHandlers(t *testing.T) {
l := request.HandlerList{}
named := request.NamedHandler{Name: "Name", Fn: func(r *request.Request) {}}
named2 := request.NamedHandler{Name: "NotName", Fn: func(r *request.Request) {}}
l.PushBackNamed(named)
l.PushBackNamed(named)
l.PushBackNamed(named2)
l.PushBack(func(r *request.Request) {})
if e, a := 4, l.Len(); e != a {
t.Errorf("expect %d list length, got %d", e, a)
}
l.Remove(named)
if e, a := 2, l.Len(); e != a {
t.Errorf("expect %d list length, got %d", e, a)
}
}
func TestSwapHandlers(t *testing.T) {
firstHandlerCalled := 0
swappedOutHandlerCalled := 0
swappedInHandlerCalled := 0
l := request.HandlerList{}
named := request.NamedHandler{Name: "Name", Fn: func(r *request.Request) {
firstHandlerCalled++
}}
named2 := request.NamedHandler{Name: "SwapOutName", Fn: func(r *request.Request) {
swappedOutHandlerCalled++
}}
l.PushBackNamed(named)
l.PushBackNamed(named2)
l.PushBackNamed(named)
l.SwapNamed(request.NamedHandler{Name: "SwapOutName", Fn: func(r *request.Request) {
swappedInHandlerCalled++
}})
l.Run(&request.Request{})
if e, a := 2, firstHandlerCalled; e != a {
t.Errorf("expect first handler to be called %d, was called %d times", e, a)
}
if n := swappedOutHandlerCalled; n != 0 {
t.Errorf("expect swapped out handler to not be called, was called %d times", n)
}
if e, a := 1, swappedInHandlerCalled; e != a {
t.Errorf("expect swapped in handler to be called %d, was called %d times", e, a)
}
}
func TestSetBackNamed_Exists(t *testing.T) {
firstHandlerCalled := 0
swappedOutHandlerCalled := 0
swappedInHandlerCalled := 0
l := request.HandlerList{}
named := request.NamedHandler{Name: "Name", Fn: func(r *request.Request) {
firstHandlerCalled++
}}
named2 := request.NamedHandler{Name: "SwapOutName", Fn: func(r *request.Request) {
swappedOutHandlerCalled++
}}
l.PushBackNamed(named)
l.PushBackNamed(named2)
l.SetBackNamed(request.NamedHandler{Name: "SwapOutName", Fn: func(r *request.Request) {
swappedInHandlerCalled++
}})
l.Run(&request.Request{})
if e, a := 1, firstHandlerCalled; e != a {
t.Errorf("expect first handler to be called %d, was called %d times", e, a)
}
if n := swappedOutHandlerCalled; n != 0 {
t.Errorf("expect swapped out handler to not be called, was called %d times", n)
}
if e, a := 1, swappedInHandlerCalled; e != a {
t.Errorf("expect swapped in handler to be called %d, was called %d times", e, a)
}
}
func TestSetBackNamed_NotExists(t *testing.T) {
firstHandlerCalled := 0
secondHandlerCalled := 0
swappedInHandlerCalled := 0
l := request.HandlerList{}
named := request.NamedHandler{Name: "Name", Fn: func(r *request.Request) {
firstHandlerCalled++
}}
named2 := request.NamedHandler{Name: "OtherName", Fn: func(r *request.Request) {
secondHandlerCalled++
}}
l.PushBackNamed(named)
l.PushBackNamed(named2)
l.SetBackNamed(request.NamedHandler{Name: "SwapOutName", Fn: func(r *request.Request) {
swappedInHandlerCalled++
}})
l.Run(&request.Request{})
if e, a := 1, firstHandlerCalled; e != a {
t.Errorf("expect first handler to be called %d, was called %d times", e, a)
}
if e, a := 1, secondHandlerCalled; e != a {
t.Errorf("expect second handler to be called %d, was called %d times", e, a)
}
if e, a := 1, swappedInHandlerCalled; e != a {
t.Errorf("expect swapped in handler to be called %d, was called %d times", e, a)
}
}
func TestLoggedHandlers(t *testing.T) {
expectedHandlers := []string{"name1", "name2"}
l := request.HandlerList{}
loggedHandlers := []string{}
l.AfterEachFn = request.HandlerListLogItem
cfg := aws.Config{Logger: aws.LoggerFunc(func(args ...interface{}) {
loggedHandlers = append(loggedHandlers, args[2].(string))
})}
named1 := request.NamedHandler{Name: "name1", Fn: func(r *request.Request) {}}
named2 := request.NamedHandler{Name: "name2", Fn: func(r *request.Request) {}}
l.PushBackNamed(named1)<|fim▁hole|> t.Errorf("expect handlers executed %v to match logged handlers, %v",
expectedHandlers, loggedHandlers)
}
}
func TestStopHandlers(t *testing.T) {
l := request.HandlerList{}
stopAt := 1
l.AfterEachFn = func(item request.HandlerListRunItem) bool {
return item.Index != stopAt
}
called := 0
l.PushBackNamed(request.NamedHandler{Name: "name1", Fn: func(r *request.Request) {
called++
}})
l.PushBackNamed(request.NamedHandler{Name: "name2", Fn: func(r *request.Request) {
called++
}})
l.PushBackNamed(request.NamedHandler{Name: "name3", Fn: func(r *request.Request) {
t.Fatalf("third handler should not be called")
}})
l.Run(&request.Request{})
if e, a := 2, called; e != a {
t.Errorf("expect %d handlers called, got %d", e, a)
}
}
func BenchmarkNewRequest(b *testing.B) {
svc := s3.New(unit.Session)
for i := 0; i < b.N; i++ {
r, _ := svc.GetObjectRequest(nil)
if r == nil {
b.Fatal("r should not be nil")
}
}
}
func BenchmarkHandlersCopy(b *testing.B) {
handlers := request.Handlers{}
handlers.Validate.PushBack(func(r *request.Request) {})
handlers.Validate.PushBack(func(r *request.Request) {})
handlers.Build.PushBack(func(r *request.Request) {})
handlers.Build.PushBack(func(r *request.Request) {})
handlers.Send.PushBack(func(r *request.Request) {})
handlers.Send.PushBack(func(r *request.Request) {})
handlers.Unmarshal.PushBack(func(r *request.Request) {})
handlers.Unmarshal.PushBack(func(r *request.Request) {})
for i := 0; i < b.N; i++ {
h := handlers.Copy()
if e, a := handlers.Validate.Len(), h.Validate.Len(); e != a {
b.Fatalf("expected %d handlers got %d", e, a)
}
}
}
func BenchmarkHandlersPushBack(b *testing.B) {
handlers := request.Handlers{}
for i := 0; i < b.N; i++ {
h := handlers.Copy()
h.Validate.PushBack(func(r *request.Request) {})
h.Validate.PushBack(func(r *request.Request) {})
h.Validate.PushBack(func(r *request.Request) {})
h.Validate.PushBack(func(r *request.Request) {})
}
}
func BenchmarkHandlersPushFront(b *testing.B) {
handlers := request.Handlers{}
for i := 0; i < b.N; i++ {
h := handlers.Copy()
h.Validate.PushFront(func(r *request.Request) {})
h.Validate.PushFront(func(r *request.Request) {})
h.Validate.PushFront(func(r *request.Request) {})
h.Validate.PushFront(func(r *request.Request) {})
}
}
func BenchmarkHandlersClear(b *testing.B) {
handlers := request.Handlers{}
for i := 0; i < b.N; i++ {
h := handlers.Copy()
h.Validate.PushFront(func(r *request.Request) {})
h.Validate.PushFront(func(r *request.Request) {})
h.Validate.PushFront(func(r *request.Request) {})
h.Validate.PushFront(func(r *request.Request) {})
h.Clear()
}
}
//Added a line for testing
//Adding another line for Git event testing part 2
//Adding another line for Git event testing part 2.1
//Adding another line for Git event testing part 2.2<|fim▁end|>
|
l.PushBackNamed(named2)
l.Run(&request.Request{Config: cfg})
if !reflect.DeepEqual(expectedHandlers, loggedHandlers) {
|
<|file_name|>ButtonAnim.hpp<|end_file_name|><|fim▁begin|>/*
* A 2D video game in C++ with SFML where platforms are actually Maths Curves resulting of equations you write
* Copyright (C) 2013,2017 Gomez Guillaume, Jarretier Adrien
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.*
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
* Contact us on Github : https://github.com/guillaume-gomez/MathGame
* or https://github.com/AdrienJarretier
*/
#ifndef BUTTONANIM_H
#define BUTTONANIM_H
#ifdef DEBUG
#include <iostream>
#endif // DEBUG
#include "AniSprite.hpp"
#include "StaticButton.hpp"
class ButtonAnim : public StaticButton
{
public:
ButtonAnim(const char* _filename = "", int _widthFrame=0, int _heigtFrame=0);
virtual ~ButtonAnim();
void switchTile();
void handle_input(sf::Event& event, sf::RenderTarget& target);
void Launch();
<|fim▁hole|> bool m_changing;
AniSprite m_spriteList;
sf::Texture m_texture;
};
#endif // BUTTONANIM_H<|fim▁end|>
|
private:
//to launch animation for instance;
|
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
#![deny(warnings)]
use sql::{Connection, Transaction};
use sql_construct::{SqlConstruct, SqlConstructFromMetadataDatabaseConfig};
use sql_ext::SqlConnections;
use anyhow::{anyhow, Error};
use async_trait::async_trait;
use auto_impl::auto_impl;
use context::{CoreContext, PerfCounterType};
use metaconfig_types::CommitSyncConfigVersion;
use mononoke_types::{ChangesetId, RepositoryId};
use sql::mysql_async::{
prelude::{ConvIr, FromValue},
FromValueError, Value,
};
use sql::{mysql, queries};
use stats::prelude::*;
use thiserror::Error;
#[derive(Debug, Eq, Error, PartialEq)]
pub enum ErrorKind {
#[error(
"tried to insert inconsistent small bcs id {actual_bcs_id:?} version {actual_config_version:?}, while db has {expected_bcs_id:?} version {expected_config_version:?}"
)]
InconsistentWorkingCopyEntry {
expected_bcs_id: Option<ChangesetId>,
expected_config_version: Option<CommitSyncConfigVersion>,
actual_bcs_id: Option<ChangesetId>,
actual_config_version: Option<CommitSyncConfigVersion>,
},
#[error(
"tried to insert inconsistent version for {large_cs_id} in repo {large_repo_id}: tried to insert {expected_version_name}, found {actual_version_name}"
)]
InconsistentLargeRepoCommitVersion {
large_repo_id: RepositoryId,
large_cs_id: ChangesetId,
expected_version_name: CommitSyncConfigVersion,
actual_version_name: CommitSyncConfigVersion,
},
}
// TODO(simonfar): Once we've proven the concept, we want to cache these
define_stats! {
prefix = "mononoke.synced_commit_mapping";
gets: timeseries(Rate, Sum),
gets_master: timeseries(Rate, Sum),
adds: timeseries(Rate, Sum),
add_many_in_txn: timeseries(Rate, Sum),
add_bulks: timeseries(Rate, Sum),
insert_working_copy_eqivalence: timeseries(Rate, Sum),
get_equivalent_working_copy: timeseries(Rate, Sum),
}
// Repo that originally contained the synced commit
#[derive(Copy, Clone, Debug, Eq, Hash, PartialEq, mysql::OptTryFromRowField)]
pub enum SyncedCommitSourceRepo {
Large,
Small,
}
impl ConvIr<SyncedCommitSourceRepo> for SyncedCommitSourceRepo {
fn new(v: Value) -> Result<Self, FromValueError> {
use SyncedCommitSourceRepo::*;
match v {
Value::Bytes(ref b) if b == b"large" => Ok(Large),
Value::Bytes(ref b) if b == b"small" => Ok(Small),
v => Err(FromValueError(v)),
}
}
fn commit(self) -> SyncedCommitSourceRepo {
self
}
fn rollback(self) -> Value {
self.into()
}
}
impl FromValue for SyncedCommitSourceRepo {
type Intermediate = SyncedCommitSourceRepo;
}
impl From<SyncedCommitSourceRepo> for Value {
fn from(source_repo: SyncedCommitSourceRepo) -> Self {
use SyncedCommitSourceRepo::*;
match source_repo {
Small => Value::Bytes(b"small".to_vec()),
Large => Value::Bytes(b"large".to_vec()),
}
}
}
#[derive(Clone, Debug, Eq, Hash, PartialEq)]
pub struct SyncedCommitMappingEntry {
pub large_repo_id: RepositoryId,
pub large_bcs_id: ChangesetId,
pub small_repo_id: RepositoryId,
pub small_bcs_id: ChangesetId,
pub version_name: Option<CommitSyncConfigVersion>,
pub source_repo: Option<SyncedCommitSourceRepo>,
}
impl SyncedCommitMappingEntry {
pub fn new(
large_repo_id: RepositoryId,
large_bcs_id: ChangesetId,
small_repo_id: RepositoryId,
small_bcs_id: ChangesetId,
version_name: CommitSyncConfigVersion,
source_repo: SyncedCommitSourceRepo,
) -> Self {
Self {
large_repo_id,
large_bcs_id,
small_repo_id,
small_bcs_id,
version_name: Some(version_name),
source_repo: Some(source_repo),
}
}
fn into_equivalent_working_copy_entry(self) -> EquivalentWorkingCopyEntry {
let Self {
large_repo_id,
large_bcs_id,
small_repo_id,
small_bcs_id,
version_name,
source_repo: _,
} = self;
EquivalentWorkingCopyEntry {
large_repo_id,
large_bcs_id,
small_repo_id,
small_bcs_id: Some(small_bcs_id),
version_name,
}
}
}
#[derive(Clone, Debug, Eq, Hash, PartialEq)]
pub struct EquivalentWorkingCopyEntry {
pub large_repo_id: RepositoryId,
pub large_bcs_id: ChangesetId,
pub small_repo_id: RepositoryId,
pub small_bcs_id: Option<ChangesetId>,
pub version_name: Option<CommitSyncConfigVersion>,
}
#[derive(Debug, PartialEq, Eq)]
pub enum WorkingCopyEquivalence {
/// There's no matching working copy. It can happen if a pre-big-merge commit from one small
/// repo is mapped into another small repo
NoWorkingCopy(CommitSyncConfigVersion),
/// ChangesetId of matching working copy and CommitSyncConfigVersion that was used for mapping
WorkingCopy(ChangesetId, CommitSyncConfigVersion),
}
#[async_trait]
#[auto_impl(Arc)]
pub trait SyncedCommitMapping: Send + Sync {
/// Given the full large, small mapping, store it in the DB.
/// Future resolves to true if the mapping was saved, false otherwise
async fn add(&self, ctx: &CoreContext, entry: SyncedCommitMappingEntry) -> Result<bool, Error>;
/// Bulk insert a set of large, small mappings
/// This is meant for blobimport and similar
async fn add_bulk(
&self,
ctx: &CoreContext,
entries: Vec<SyncedCommitMappingEntry>,
) -> Result<u64, Error>;
/// Find all the mapping entries for a given source commit and target repo
async fn get(
&self,
ctx: &CoreContext,
source_repo_id: RepositoryId,
bcs_id: ChangesetId,
target_repo_id: RepositoryId,
) -> Result<
Vec<(
ChangesetId,
Option<CommitSyncConfigVersion>,
Option<SyncedCommitSourceRepo>,
)>,
Error,
>;
/// Inserts equivalent working copy of a large bcs id. It's similar to mapping entry,
/// however there are a few differences:
/// 1) For (large repo, small repo) pair, many large commits can map to the same small commit
/// 2) Small commit can be null
///
/// If there's a mapping between small and large commits, then equivalent working copy is
/// the same as the same as the mapping.
async fn insert_equivalent_working_copy(
&self,
ctx: &CoreContext,
entry: EquivalentWorkingCopyEntry,
) -> Result<bool, Error>;
/// Same as previous command, but it overwrites existing value.
/// This is not intended to be used in production, but just as a debug tool
async fn overwrite_equivalent_working_copy(
&self,
ctx: &CoreContext,
entry: EquivalentWorkingCopyEntry,
) -> Result<bool, Error>;
/// Finds equivalent working copy
async fn get_equivalent_working_copy(
&self,
ctx: &CoreContext,
source_repo_id: RepositoryId,
source_bcs_id: ChangesetId,
target_repo_id: RepositoryId,
) -> Result<Option<WorkingCopyEquivalence>, Error>;
/// Get version for large repo commit
async fn get_large_repo_commit_version(
&self,
ctx: &CoreContext,
large_repo_id: RepositoryId,
large_repo_cs_id: ChangesetId,
) -> Result<Option<CommitSyncConfigVersion>, Error>;
}
#[derive(Clone)]
pub struct SqlSyncedCommitMapping {
write_connection: Connection,
read_connection: Connection,
read_master_connection: Connection,
}
queries! {
write InsertMapping(values: (
large_repo_id: RepositoryId,
large_bcs_id: ChangesetId,
small_repo_id: RepositoryId,
small_bcs_id: ChangesetId,
sync_map_version_name: Option<CommitSyncConfigVersion>,
source_repo: Option<SyncedCommitSourceRepo>,
)) {
insert_or_ignore,
"{insert_or_ignore} INTO synced_commit_mapping (large_repo_id, large_bcs_id, small_repo_id, small_bcs_id, sync_map_version_name, source_repo) VALUES {values}"
}
read SelectMapping(
source_repo_id: RepositoryId,
bcs_id: ChangesetId,
target_repo_id: RepositoryId,
) -> (RepositoryId, ChangesetId, RepositoryId, ChangesetId, Option<CommitSyncConfigVersion>, Option<SyncedCommitSourceRepo>) {
"SELECT large_repo_id, large_bcs_id, small_repo_id, small_bcs_id, sync_map_version_name, source_repo
FROM synced_commit_mapping
WHERE (large_repo_id = {source_repo_id} AND large_bcs_id = {bcs_id} AND small_repo_id = {target_repo_id}) OR
(small_repo_id = {source_repo_id} AND small_bcs_id = {bcs_id} AND large_repo_id = {target_repo_id})"
}
write InsertWorkingCopyEquivalence(values: (
large_repo_id: RepositoryId,
large_bcs_id: ChangesetId,
small_repo_id: RepositoryId,
small_bcs_id: Option<ChangesetId>,
sync_map_version_name: Option<CommitSyncConfigVersion>,
)) {
insert_or_ignore,
"{insert_or_ignore}
INTO synced_working_copy_equivalence
(large_repo_id, large_bcs_id, small_repo_id, small_bcs_id, sync_map_version_name)
VALUES {values}"
}
write ReplaceWorkingCopyEquivalence(values: (
large_repo_id: RepositoryId,
large_bcs_id: ChangesetId,
small_repo_id: RepositoryId,
small_bcs_id: Option<ChangesetId>,
sync_map_version_name: Option<CommitSyncConfigVersion>,
)) {
none,
"REPLACE
INTO synced_working_copy_equivalence
(large_repo_id, large_bcs_id, small_repo_id, small_bcs_id, sync_map_version_name)
VALUES {values}"
}
read SelectWorkingCopyEquivalence(
source_repo_id: RepositoryId,
bcs_id: ChangesetId,
target_repo_id: RepositoryId,
) -> (RepositoryId, ChangesetId, RepositoryId, Option<ChangesetId>, Option<CommitSyncConfigVersion>) {
"SELECT large_repo_id, large_bcs_id, small_repo_id, small_bcs_id, sync_map_version_name
FROM synced_working_copy_equivalence
WHERE (large_repo_id = {source_repo_id} AND small_repo_id = {target_repo_id} AND large_bcs_id = {bcs_id})
OR (large_repo_id = {target_repo_id} AND small_repo_id = {source_repo_id} AND small_bcs_id = {bcs_id})
ORDER BY mapping_id ASC
LIMIT 1
"
}
write InsertVersionForLargeRepoCommit(values: (
large_repo_id: RepositoryId,
large_bcs_id: ChangesetId,
sync_map_version_name: CommitSyncConfigVersion,
)) {
insert_or_ignore,
"{insert_or_ignore}
INTO version_for_large_repo_commit
(large_repo_id, large_bcs_id, sync_map_version_name)
VALUES {values}"
}
write ReplaceVersionForLargeRepoCommit(values: (
large_repo_id: RepositoryId,
large_bcs_id: ChangesetId,
sync_map_version_name: CommitSyncConfigVersion,
)) {
none,
"REPLACE
INTO version_for_large_repo_commit
(large_repo_id, large_bcs_id, sync_map_version_name)
VALUES {values}"
}
read SelectVersionForLargeRepoCommit(
large_repo_id: RepositoryId,
cs_id: ChangesetId,
) -> (CommitSyncConfigVersion,) {
"SELECT sync_map_version_name
FROM version_for_large_repo_commit
WHERE large_repo_id = {large_repo_id} AND large_bcs_id = {cs_id}"
}
}
impl SqlConstruct for SqlSyncedCommitMapping {
const LABEL: &'static str = "synced_commit_mapping";
const CREATION_QUERY: &'static str =
include_str!("../schemas/sqlite-synced-commit-mapping.sql");
fn from_sql_connections(connections: SqlConnections) -> Self {
Self {
write_connection: connections.write_connection,
read_connection: connections.read_connection,
read_master_connection: connections.read_master_connection,
}
}
}
impl SqlConstructFromMetadataDatabaseConfig for SqlSyncedCommitMapping {}
impl SqlSyncedCommitMapping {
async fn add_many(
&self,
ctx: &CoreContext,
entries: Vec<SyncedCommitMappingEntry>,
) -> Result<u64, Error> {
ctx.perf_counters()
.increment_counter(PerfCounterType::SqlWrites);
let txn = self.write_connection.start_transaction().await?;
let (txn, affected_rows) = add_many_in_txn(txn, entries).await?;
txn.commit().await?;
Ok(affected_rows)
}
async fn insert_or_overwrite_equivalent_working_copy(
&self,
ctx: &CoreContext,
entry: EquivalentWorkingCopyEntry,
should_overwrite: bool,
) -> Result<bool, Error> {
STATS::insert_working_copy_eqivalence.add_value(1);
let EquivalentWorkingCopyEntry {
large_repo_id,
large_bcs_id,
small_repo_id,
small_bcs_id,
version_name,
} = entry;
ctx.perf_counters()
.increment_counter(PerfCounterType::SqlWrites);
if let Some(ref version_name) = version_name {
// TODO(stash): make version non-optional
self.insert_version_for_large_repo_commit(
&ctx,
&self.write_connection,
large_repo_id,
large_bcs_id,
version_name,
should_overwrite,
)
.await?;
}
let result = if should_overwrite {
ReplaceWorkingCopyEquivalence::query(
&self.write_connection,
&[(
&large_repo_id,
&large_bcs_id,
&small_repo_id,
&small_bcs_id,
&version_name,
)],
)
.await?
} else {
InsertWorkingCopyEquivalence::query(
&self.write_connection,
&[(
&large_repo_id,
&large_bcs_id,
&small_repo_id,
&small_bcs_id,
&version_name,
)],
)
.await?
};
if result.affected_rows() >= 1 {
Ok(true)
} else {
if !should_overwrite {
// Check that db stores consistent entry
let maybe_equivalent_wc = self
.get_equivalent_working_copy(ctx, large_repo_id, large_bcs_id, small_repo_id)
.await?;
if let Some(equivalent_wc) = maybe_equivalent_wc {
use WorkingCopyEquivalence::*;
let (expected_bcs_id, expected_version) = match equivalent_wc {
WorkingCopy(wc, mapping) => (Some(wc), mapping),
NoWorkingCopy(mapping) => (None, mapping),
};
let expected_version = Some(expected_version);
if (expected_bcs_id != small_bcs_id) || (expected_version != version_name) {
let err = ErrorKind::InconsistentWorkingCopyEntry {
expected_bcs_id,
expected_config_version: expected_version,
actual_bcs_id: small_bcs_id,
actual_config_version: version_name,
};
return Err(err.into());
}
}
}
Ok(false)
}
}
async fn insert_version_for_large_repo_commit(
&self,
ctx: &CoreContext,
write_connection: &Connection,
large_repo_id: RepositoryId,
large_cs_id: ChangesetId,
version_name: &CommitSyncConfigVersion,
should_overwrite: bool,
) -> Result<bool, Error> {
let result = if should_overwrite {
ReplaceVersionForLargeRepoCommit::query(
&write_connection,
&[(&large_repo_id, &large_cs_id, &version_name)],
)
.await?
} else {
InsertVersionForLargeRepoCommit::query(
&write_connection,
&[(&large_repo_id, &large_cs_id, &version_name)],
)
.await?
};
if result.affected_rows() >= 1 {
Ok(true)
} else {
if !should_overwrite {
// Check that db stores consistent entry
let maybe_large_repo_version = self
.get_large_repo_commit_version(ctx, large_repo_id, large_cs_id)
.await?;
if let Some(actual_version_name) = maybe_large_repo_version {
if &actual_version_name != version_name {
let err = ErrorKind::InconsistentLargeRepoCommitVersion {
large_repo_id,
large_cs_id,
expected_version_name: version_name.clone(),
actual_version_name,
};
return Err(err.into());
}
}
}
Ok(false)
}
}
}
#[async_trait]
impl SyncedCommitMapping for SqlSyncedCommitMapping {
async fn add(&self, ctx: &CoreContext, entry: SyncedCommitMappingEntry) -> Result<bool, Error> {
STATS::adds.add_value(1);
self.add_many(&ctx, vec![entry])
.await
.map(|count| count == 1)
}
async fn add_bulk(
&self,
ctx: &CoreContext,
entries: Vec<SyncedCommitMappingEntry>,
) -> Result<u64, Error> {
STATS::add_bulks.add_value(1);
self.add_many(&ctx, entries).await
}
async fn get(
&self,
ctx: &CoreContext,
source_repo_id: RepositoryId,
bcs_id: ChangesetId,
target_repo_id: RepositoryId,
) -> Result<
Vec<(
ChangesetId,
Option<CommitSyncConfigVersion>,
Option<SyncedCommitSourceRepo>,
)>,
Error,
> {
STATS::gets.add_value(1);
ctx.perf_counters()
.increment_counter(PerfCounterType::SqlReadsReplica);
let rows = SelectMapping::query(
&self.read_connection,
&source_repo_id,
&bcs_id,
&target_repo_id,
)
.await?;
let rows = if rows.is_empty() {
STATS::gets_master.add_value(1);
ctx.perf_counters()
.increment_counter(PerfCounterType::SqlReadsMaster);
SelectMapping::query(
&self.read_master_connection,
&source_repo_id,
&bcs_id,
&target_repo_id,
)
.await?
} else {
rows
};
Ok(rows
.into_iter()
.map(|row| {
let (
large_repo_id,
large_bcs_id,
_small_repo_id,
small_bcs_id,
maybe_version_name,
maybe_source_repo,
) = row;
if target_repo_id == large_repo_id {
(large_bcs_id, maybe_version_name, maybe_source_repo)
} else {
(small_bcs_id, maybe_version_name, maybe_source_repo)
}
})
.collect())
}
async fn insert_equivalent_working_copy(
&self,
ctx: &CoreContext,
entry: EquivalentWorkingCopyEntry,
) -> Result<bool, Error> {
self.insert_or_overwrite_equivalent_working_copy(
ctx, entry, false, /* should overwrite */
)
.await
}
async fn overwrite_equivalent_working_copy(
&self,
ctx: &CoreContext,
entry: EquivalentWorkingCopyEntry,
) -> Result<bool, Error> {
self.insert_or_overwrite_equivalent_working_copy(
ctx, entry, true, /* should overwrite */
)
.await
}
async fn get_equivalent_working_copy(
&self,
ctx: &CoreContext,
source_repo_id: RepositoryId,
source_bcs_id: ChangesetId,
target_repo_id: RepositoryId,
) -> Result<Option<WorkingCopyEquivalence>, Error> {
STATS::get_equivalent_working_copy.add_value(1);
ctx.perf_counters()
.increment_counter(PerfCounterType::SqlReadsReplica);
let rows = SelectWorkingCopyEquivalence::query(
&self.read_connection,<|fim▁hole|> &target_repo_id,
)
.await?;
let maybe_row = if !rows.is_empty() {
rows.get(0).cloned()
} else {
ctx.perf_counters()
.increment_counter(PerfCounterType::SqlReadsMaster);
SelectWorkingCopyEquivalence::query(
&self.read_master_connection,
&source_repo_id,
&source_bcs_id,
&target_repo_id,
)
.await
.map(|rows| rows.get(0).cloned())?
};
Ok(match maybe_row {
Some(row) => {
let (
large_repo_id,
large_bcs_id,
_small_repo_id,
maybe_small_bcs_id,
maybe_mapping,
) = row;
let mapping = maybe_mapping.ok_or_else(|| {
anyhow!(
"unexpected empty mapping for {}, {}->{}",
source_bcs_id,
source_repo_id,
target_repo_id
)
})?;
if target_repo_id == large_repo_id {
Some(WorkingCopyEquivalence::WorkingCopy(large_bcs_id, mapping))
} else {
match maybe_small_bcs_id {
Some(small_bcs_id) => {
Some(WorkingCopyEquivalence::WorkingCopy(small_bcs_id, mapping))
}
None => Some(WorkingCopyEquivalence::NoWorkingCopy(mapping)),
}
}
}
None => None,
})
}
async fn get_large_repo_commit_version(
&self,
ctx: &CoreContext,
large_repo_id: RepositoryId,
large_repo_cs_id: ChangesetId,
) -> Result<Option<CommitSyncConfigVersion>, Error> {
ctx.perf_counters()
.increment_counter(PerfCounterType::SqlReadsReplica);
let maybe_version = SelectVersionForLargeRepoCommit::query(
&self.read_connection,
&large_repo_id,
&large_repo_cs_id,
)
.await?
.pop()
.map(|x| x.0);
if let Some(version) = maybe_version {
return Ok(Some(version));
}
ctx.perf_counters()
.increment_counter(PerfCounterType::SqlReadsMaster);
Ok(SelectVersionForLargeRepoCommit::query(
&self.read_master_connection,
&large_repo_id,
&large_repo_cs_id,
)
.await?
.pop()
.map(|x| x.0))
}
}
pub async fn add_many_in_txn(
txn: Transaction,
entries: Vec<SyncedCommitMappingEntry>,
) -> Result<(Transaction, u64), Error> {
STATS::add_many_in_txn.add_value(1);
let insert_entries: Vec<_> = entries
.iter()
.map(|entry| {
(
&entry.large_repo_id,
&entry.large_bcs_id,
&entry.small_repo_id,
&entry.small_bcs_id,
&entry.version_name,
&entry.source_repo,
)
})
.collect();
let (txn, _result) = InsertMapping::query_with_transaction(txn, &insert_entries).await?;
let owned_entries: Vec<_> = entries
.into_iter()
.map(|entry| entry.into_equivalent_working_copy_entry())
.collect();
let mut large_repo_commit_versions = vec![];
for entry in &owned_entries {
if let Some(version_name) = &entry.version_name {
large_repo_commit_versions.push((
&entry.large_repo_id,
&entry.large_bcs_id,
version_name,
));
}
}
let (txn, _result) =
InsertVersionForLargeRepoCommit::query_with_transaction(txn, &large_repo_commit_versions)
.await?;
let ref_entries: Vec<_> = owned_entries
.iter()
.map(|entry| {
(
&entry.large_repo_id,
&entry.large_bcs_id,
&entry.small_repo_id,
&entry.small_bcs_id,
&entry.version_name,
)
})
.collect();
let (txn, result) =
InsertWorkingCopyEquivalence::query_with_transaction(txn, &ref_entries).await?;
Ok((txn, result.affected_rows()))
}<|fim▁end|>
|
&source_repo_id,
&source_bcs_id,
|
<|file_name|>inception.py<|end_file_name|><|fim▁begin|># coding: utf-8
# pylint: disable= arguments-differ
"""Inception, implemented in Gluon."""
__all__ = ['Inception3', 'inception_v3']
from ....context import cpu
from ...block import HybridBlock
from ... import nn
from ..custom_layers import HybridConcurrent
# Helpers
def _make_basic_conv(**kwargs):
out = nn.HybridSequential(prefix='')
out.add(nn.Conv2D(use_bias=False, **kwargs))
out.add(nn.BatchNorm(epsilon=0.001))
out.add(nn.Activation('relu'))
return out
def _make_branch(use_pool, *conv_settings):
out = nn.HybridSequential(prefix='')
if use_pool == 'avg':
out.add(nn.AvgPool2D(pool_size=3, strides=1, padding=1))
elif use_pool == 'max':
out.add(nn.MaxPool2D(pool_size=3, strides=2))
setting_names = ['channels', 'kernel_size', 'strides', 'padding']
for setting in conv_settings:
kwargs = {}
for i, value in enumerate(setting):
if value is not None:
kwargs[setting_names[i]] = value
out.add(_make_basic_conv(**kwargs))
return out
def _make_A(pool_features, prefix):
out = HybridConcurrent(concat_dim=1, prefix=prefix)
with out.name_scope():
out.add(_make_branch(None,
(64, 1, None, None)))
out.add(_make_branch(None,
(48, 1, None, None),
(64, 5, None, 2)))
out.add(_make_branch(None,
(64, 1, None, None),
(96, 3, None, 1),
(96, 3, None, 1)))
out.add(_make_branch('avg',
(pool_features, 1, None, None)))
return out
def _make_B(prefix):
out = HybridConcurrent(concat_dim=1, prefix=prefix)
with out.name_scope():
out.add(_make_branch(None,
(384, 3, 2, None)))
out.add(_make_branch(None,
(64, 1, None, None),
(96, 3, None, 1),
(96, 3, 2, None)))
out.add(_make_branch('max'))
return out
def _make_C(channels_7x7, prefix):
out = HybridConcurrent(concat_dim=1, prefix=prefix)
with out.name_scope():
out.add(_make_branch(None,
(192, 1, None, None)))
out.add(_make_branch(None,
(channels_7x7, 1, None, None),
(channels_7x7, (1, 7), None, (0, 3)),
(192, (7, 1), None, (3, 0))))
out.add(_make_branch(None,
(channels_7x7, 1, None, None),
(channels_7x7, (7, 1), None, (3, 0)),
(channels_7x7, (1, 7), None, (0, 3)),
(channels_7x7, (7, 1), None, (3, 0)),
(192, (1, 7), None, (0, 3))))
out.add(_make_branch('avg',
(192, 1, None, None)))
return out
def _make_D(prefix):
out = HybridConcurrent(concat_dim=1, prefix=prefix)
with out.name_scope():
out.add(_make_branch(None,
(192, 1, None, None),
(320, 3, 2, None)))
out.add(_make_branch(None,
(192, 1, None, None),
(192, (1, 7), None, (0, 3)),
(192, (7, 1), None, (3, 0)),
(192, 3, 2, None)))
out.add(_make_branch('max'))
return out
def _make_E(prefix):
out = HybridConcurrent(concat_dim=1, prefix=prefix)
with out.name_scope():
out.add(_make_branch(None,
(320, 1, None, None)))
branch_3x3 = nn.HybridSequential(prefix='')
out.add(branch_3x3)
branch_3x3.add(_make_branch(None,
(384, 1, None, None)))
branch_3x3_split = HybridConcurrent(concat_dim=1, prefix='')
branch_3x3_split.add(_make_branch(None,
(384, (1, 3), None, (0, 1))))
branch_3x3_split.add(_make_branch(None,
(384, (3, 1), None, (1, 0))))
branch_3x3.add(branch_3x3_split)
branch_3x3dbl = nn.HybridSequential(prefix='')
out.add(branch_3x3dbl)
branch_3x3dbl.add(_make_branch(None,
(448, 1, None, None),
(384, 3, None, 1)))
branch_3x3dbl_split = HybridConcurrent(concat_dim=1, prefix='')
branch_3x3dbl.add(branch_3x3dbl_split)
branch_3x3dbl_split.add(_make_branch(None,
(384, (1, 3), None, (0, 1))))
branch_3x3dbl_split.add(_make_branch(None,
(384, (3, 1), None, (1, 0))))
out.add(_make_branch('avg',
(192, 1, None, None)))
return out
def make_aux(classes):
out = nn.HybridSequential(prefix='')
out.add(nn.AvgPool2D(pool_size=5, strides=3))
out.add(_make_basic_conv(channels=128, kernel_size=1))
out.add(_make_basic_conv(channels=768, kernel_size=5))
out.add(nn.Flatten())
out.add(nn.Dense(classes))
return out
# Net
class Inception3(HybridBlock):
r"""Inception v3 model from
`"Rethinking the Inception Architecture for Computer Vision"
<http://arxiv.org/abs/1512.00567>`_ paper.
Parameters
----------
classes : int, default 1000
Number of classification classes.
"""
def __init__(self, classes=1000, **kwargs):
super(Inception3, self).__init__(**kwargs)
# self.use_aux_logits = use_aux_logits
with self.name_scope():
self.features = nn.HybridSequential(prefix='')
self.features.add(_make_basic_conv(channels=32, kernel_size=3, strides=2))
self.features.add(_make_basic_conv(channels=32, kernel_size=3))
self.features.add(_make_basic_conv(channels=64, kernel_size=3, padding=1))
self.features.add(nn.MaxPool2D(pool_size=3, strides=2))
self.features.add(_make_basic_conv(channels=80, kernel_size=1))
self.features.add(_make_basic_conv(channels=192, kernel_size=3))
self.features.add(nn.MaxPool2D(pool_size=3, strides=2))
self.features.add(_make_A(32, 'A1_'))
self.features.add(_make_A(64, 'A2_'))
self.features.add(_make_A(64, 'A3_'))
self.features.add(_make_B('B_'))
self.features.add(_make_C(128, 'C1_'))
self.features.add(_make_C(160, 'C2_'))
self.features.add(_make_C(160, 'C3_'))
self.features.add(_make_C(192, 'C4_'))<|fim▁hole|> self.classifier.add(_make_D('D_'))
self.classifier.add(_make_E('E1_'))
self.classifier.add(_make_E('E2_'))
self.classifier.add(nn.AvgPool2D(pool_size=8))
self.classifier.add(nn.Dropout(0.5))
self.classifier.add(nn.Flatten())
self.classifier.add(nn.Dense(classes))
def hybrid_forward(self, F, x):
x = self.features(x)
x = self.classifier(x)
return x
# Constructor
def inception_v3(pretrained=False, ctx=cpu(), **kwargs):
r"""Inception v3 model from
`"Rethinking the Inception Architecture for Computer Vision"
<http://arxiv.org/abs/1512.00567>`_ paper.
Parameters
----------
pretrained : bool, default False
Whether to load the pretrained weights for model.
ctx : Context, default CPU
The context in which to load the pretrained weights.
"""
net = Inception3(**kwargs)
if pretrained:
from ..model_store import get_model_file
net.load_params(get_model_file('inceptionv3'), ctx=ctx)
return net<|fim▁end|>
|
self.classifier = nn.HybridSequential(prefix='')
|
<|file_name|>coalition.py<|end_file_name|><|fim▁begin|>import sys
from typing import Dict, Union, List, TYPE_CHECKING
import dcs.countries as countries
from dcs.mapping import Point
import dcs.unitgroup as unitgroup
import dcs.planes as planes
import dcs.helicopters as helicopters
import dcs.ships as ships
from dcs.unit import Vehicle, Static, Ship, FARP, SingleHeliPad
from dcs.flyingunit import Plane, Helicopter
from dcs.point import MovingPoint, StaticPoint
from dcs.country import Country
from dcs.status_message import StatusMessage, MessageType, MessageSeverity
if TYPE_CHECKING:
from . import Mission
class Coalition:
def __init__(self, name, bullseye=None):
self.name = name
self.countries = {} # type: Dict[str, Country]
self.bullseye = bullseye
self.nav_points = [] # TODO
@staticmethod
def _sort_keys(points):
keys = []
for imp_point_idx in points:
keys.append(int(imp_point_idx))
keys.sort()
return keys
@staticmethod
def _import_moving_point(mission, group: unitgroup.Group, imp_group) -> unitgroup.Group:
keys = Coalition._sort_keys(imp_group["route"]["points"])
for imp_point_idx in keys:
imp_point = imp_group["route"]["points"][imp_point_idx]
point = MovingPoint(Point(0, 0, mission.terrain))
point.load_from_dict(imp_point, mission.translation)
group.add_point(point)
return group
@staticmethod
def _import_static_point(mission, group: unitgroup.Group, imp_group) -> unitgroup.Group:
keys = Coalition._sort_keys(imp_group["route"]["points"])
for imp_point_idx in keys:
imp_point = imp_group["route"]["points"][imp_point_idx]
point = StaticPoint(Point(0, 0, mission.terrain))
point.load_from_dict(imp_point, mission.translation)
group.add_point(point)
return group
@staticmethod
def _park_unit_on_airport(
mission: 'Mission',
group: unitgroup.Group,
unit: Union[Plane, Helicopter]) -> List[StatusMessage]:
ret: List[StatusMessage] = []
if group.points[0].airdrome_id is not None and unit.parking is not None:
airport = mission.terrain.airport_by_id(group.points[0].airdrome_id)
slot = airport.parking_slot(unit.parking)
if slot is not None:
unit.set_parking(slot)
else:
msg = "Parking slot id '{i}' for unit '{u}' in group '{p}' on airport '{a}' " \
"not valid, placing on next free".format(i=unit.parking, u=unit.name,
a=airport.name, p=group.name)
print("WARN", msg, file=sys.stderr)
ret.append(StatusMessage(msg, MessageType.PARKING_SLOT_NOT_VALID, MessageSeverity.WARN))
slot = airport.free_parking_slot(unit.unit_type)
if slot is not None:
unit.set_parking(slot)
else:
msg = "No free parking slots for unit '{u}' in unit group '{p}' on airport '{a}', ignoring"\
.format(u=unit.name, a=airport.name, p=group.name)
print("ERRO", msg, file=sys.stderr)
ret.append(StatusMessage(msg, MessageType.PARKING_SLOTS_FULL, MessageSeverity.ERROR))
return ret
@staticmethod
def get_name(mission: "Mission", name: str) -> str:
# Group, unit names are not localized for missions are created in 2.7.
if mission.version < 19:
return str(mission.translation.get_string(name))
else:
return name
def load_from_dict(self, mission, d) -> List[StatusMessage]:
status: List[StatusMessage] = []
for country_idx in d["country"]:
imp_country = d["country"][country_idx]
_country = countries.get_by_id(imp_country["id"])
if "vehicle" in imp_country:
for vgroup_idx in imp_country["vehicle"]["group"]:
vgroup = imp_country["vehicle"]["group"][vgroup_idx]
vg = unitgroup.VehicleGroup(vgroup["groupId"], self.get_name(mission, vgroup["name"]),
vgroup["start_time"])
vg.load_from_dict(vgroup, mission.terrain)
mission.current_group_id = max(mission.current_group_id, vg.id)
Coalition._import_moving_point(mission, vg, vgroup)
# units
for imp_unit_idx in vgroup["units"]:
imp_unit = vgroup["units"][imp_unit_idx]
unit = Vehicle(
mission.terrain,
id=imp_unit["unitId"],
name=self.get_name(mission, imp_unit["name"]),
_type=imp_unit["type"])
unit.load_from_dict(imp_unit)
mission.current_unit_id = max(mission.current_unit_id, unit.id)
vg.add_unit(unit)
_country.add_vehicle_group(vg)
if "ship" in imp_country:
for group_idx in imp_country["ship"]["group"]:
imp_group = imp_country["ship"]["group"][group_idx]
ship_group = unitgroup.ShipGroup(imp_group["groupId"], self.get_name(mission, imp_group["name"]),
imp_group["start_time"])
ship_group.load_from_dict(imp_group, mission.terrain)
mission.current_group_id = max(mission.current_group_id, ship_group.id)
Coalition._import_moving_point(mission, ship_group, imp_group)
# units
for imp_unit_idx in imp_group["units"]:
imp_unit = imp_group["units"][imp_unit_idx]
ship = Ship(
mission.terrain,
id=imp_unit["unitId"],
name=self.get_name(mission, imp_unit["name"]),
_type=ships.ship_map[imp_unit["type"]])
ship.load_from_dict(imp_unit)
mission.current_unit_id = max(mission.current_unit_id, ship.id)
ship_group.add_unit(ship)
_country.add_ship_group(ship_group)
if "plane" in imp_country:
for pgroup_idx in imp_country["plane"]["group"]:
pgroup = imp_country["plane"]["group"][pgroup_idx]
plane_group = unitgroup.PlaneGroup(pgroup["groupId"],
self.get_name(mission, pgroup["name"]),
pgroup["start_time"])
plane_group.load_from_dict(pgroup, mission.terrain)
mission.current_group_id = max(mission.current_group_id, plane_group.id)
Coalition._import_moving_point(mission, plane_group, pgroup)
# units
for imp_unit_idx in pgroup["units"]:
imp_unit = pgroup["units"][imp_unit_idx]
plane = Plane(
mission.terrain,
_id=imp_unit["unitId"],
name=self.get_name(mission, imp_unit["name"]),
_type=planes.plane_map[imp_unit["type"]],
_country=_country)
plane.load_from_dict(imp_unit)
if _country.reserve_onboard_num(plane.onboard_num):
msg = "{c} Plane '{p}' already using tail number: {t}".format(
c=self.name.upper(), p=plane.name, t=plane.onboard_num)
status.append(StatusMessage(msg, MessageType.ONBOARD_NUM_DUPLICATE, MessageSeverity.WARN))
print("WARN:", msg, file=sys.stderr)
status += self._park_unit_on_airport(mission, plane_group, plane)
mission.current_unit_id = max(mission.current_unit_id, plane.id)
plane_group.add_unit(plane)
# check runway start
# if plane_group.points[0].airdrome_id is not None and plane_group.units[0].parking is None:
# airport = mission.terrain.airport_by_id(plane_group.points[0].airdrome_id)
# airport.occupy_runway(plane_group)
_country.add_plane_group(plane_group)
if "helicopter" in imp_country:
for pgroup_idx in imp_country["helicopter"]["group"]:
pgroup = imp_country["helicopter"]["group"][pgroup_idx]
helicopter_group = unitgroup.HelicopterGroup(
pgroup["groupId"],
self.get_name(mission, pgroup["name"]),
pgroup["start_time"])
helicopter_group.load_from_dict(pgroup, mission.terrain)
mission.current_group_id = max(mission.current_group_id, helicopter_group.id)
Coalition._import_moving_point(mission, helicopter_group, pgroup)
# units
for imp_unit_idx in pgroup["units"]:
imp_unit = pgroup["units"][imp_unit_idx]
heli = Helicopter(
mission.terrain,
_id=imp_unit["unitId"],
name=self.get_name(mission, imp_unit["name"]),
_type=helicopters.helicopter_map[imp_unit["type"]],
_country=_country)
heli.load_from_dict(imp_unit)
if _country.reserve_onboard_num(heli.onboard_num):
msg = "{c} Helicopter '{h}' already using tail number: {t}".format(
c=self.name.upper(), h=heli.name, t=heli.onboard_num)
status.append(StatusMessage(msg, MessageType.ONBOARD_NUM_DUPLICATE, MessageSeverity.WARN))
print("WARN:", msg, file=sys.stderr)
status += self._park_unit_on_airport(mission, helicopter_group, heli)
mission.current_unit_id = max(mission.current_unit_id, heli.id)
helicopter_group.add_unit(heli)
# check runway start
# if helicopter_group.points[0].airdrome_id is not None and helicopter_group.units[0].parking is None:
# airport = mission.terrain.airport_by_id(helicopter_group.points[0].airdrome_id)
# airport.occupy_runway(helicopter_group)
_country.add_helicopter_group(helicopter_group)
if "static" in imp_country:
for sgroup_idx in imp_country["static"]["group"]:
sgroup = imp_country["static"]["group"][sgroup_idx]
static_group = unitgroup.StaticGroup(sgroup["groupId"],<|fim▁hole|> static_group.load_from_dict(sgroup, mission.terrain)
mission.current_group_id = max(mission.current_group_id, static_group.id)
Coalition._import_static_point(mission, static_group, sgroup)
# units
for imp_unit_idx in sgroup["units"]:
imp_unit = sgroup["units"][imp_unit_idx]
static: Static
if imp_unit["type"] == "FARP":
static = FARP(
mission.terrain,
unit_id=imp_unit["unitId"],
name=self.get_name(mission, imp_unit["name"]))
elif imp_unit["type"] == "SINGLE_HELIPAD":
static = SingleHeliPad(
mission.terrain,
unit_id=imp_unit["unitId"],
name=self.get_name(mission, imp_unit["name"]))
else:
static = Static(
unit_id=imp_unit["unitId"],
name=self.get_name(mission, imp_unit["name"]),
_type=imp_unit["type"],
terrain=mission.terrain)
static.load_from_dict(imp_unit)
mission.current_unit_id = max(mission.current_unit_id, static.id)
static_group.add_unit(static)
_country.add_static_group(static_group)
self.add_country(_country)
return status
def set_bullseye(self, bulls):
self.bullseye = bulls
def add_country(self, country):
self.countries[country.name] = country
return country
def remove_country(self, name):
return self.countries.pop(name)
def swap_country(self, coalition, name):
return coalition.add_country(self.remove_country(name))
def country(self, country_name: str):
return self.countries.get(country_name, None)
def country_by_id(self, _id: int):
for cn in self.countries:
c = self.countries[cn]
if c.id == _id:
return c
return None
def find_group(self, group_name, search="exact"):
for c in self.countries:
g = self.countries[c].find_group(group_name, search)
if g:
return g
return None
def dict(self):
d = {"name": self.name}
if self.bullseye:
d["bullseye"] = self.bullseye
d["country"] = {}
i = 1
for country in sorted(self.countries.keys()):
d["country"][i] = self.country(country).dict()
i += 1
d["nav_points"] = {}
return d<|fim▁end|>
|
self.get_name(mission, sgroup["name"]))
|
<|file_name|>submatrix.rs<|end_file_name|><|fim▁begin|>use crate::math::{Determinant, Matrix, Matrix2, Matrix3, Matrix4, Scalar};
pub trait Submatrix : Matrix {
type Output: Matrix;
fn submatrix(&self, y: usize, x: usize) -> <Self as Submatrix>::Output;
fn minor(&self, y: usize, x: usize) -> Scalar {
self.submatrix(y, x).determinant()
}
fn cofactor(&self, y: usize, x: usize) -> Scalar {
if (x + y) % 2 == 0 {
self.minor(y, x)
} else {
-self.minor(y, x)
}
}
}
impl Submatrix for Matrix3 {
type Output = Matrix2;
fn submatrix(&self, y: usize, x: usize) -> Matrix2 {
let mut data = [[0.0; 2]; 2];
for ny in 0..2 {
for nx in 0..2 {
let oy = if ny >= y { ny + 1 } else { ny };
let ox = if nx >= x { nx + 1 } else { nx };
data[ny][nx] = self[oy][ox];
}<|fim▁hole|> }
Matrix2::new(data)
}
}
impl Submatrix for Matrix4 {
type Output = Matrix3;
fn submatrix(&self, y: usize, x: usize) -> Matrix3 {
let mut data = [[0.0; 3]; 3];
for ny in 0..3 {
for nx in 0..3 {
let oy = if ny >= y { ny + 1 } else { ny };
let ox = if nx >= x { nx + 1 } else { nx };
data[ny][nx] = self[oy][ox];
}
}
Matrix3::new(data)
}
}<|fim▁end|>
| |
<|file_name|>EndpointControlResource.java<|end_file_name|><|fim▁begin|>package com.example.mesh;
import java.util.HashMap;
import java.util.Map;
import javax.ws.rs.Consumes;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.MediaType;
@Path("/")
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
public class EndpointControlResource {
/**
* @see <a href="https://relay.bluejeans.com/docs/mesh.html#capabilities">https://relay.bluejeans.com/docs/mesh.html#capabilities</a>
*/
@GET
@Path("{ipAddress}/capabilities")
public Map<String, Boolean> capabilities(@PathParam("ipAddress") final String ipAddress,
@QueryParam("port") final Integer port,
@QueryParam("name") final String name) {
System.out.println("Received capabilities request");
System.out.println(" ipAddress = " + ipAddress);
System.out.println(" port = " + port);
System.out.println(" name = " + name);
final Map<String, Boolean> capabilities = new HashMap<>();
capabilities.put("JOIN", true);
capabilities.put("HANGUP", true);
capabilities.put("STATUS", true);
capabilities.put("MUTEMICROPHONE", true);
return capabilities;
}
/**
* @see <a href="https://relay.bluejeans.com/docs/mesh.html#status">https://relay.bluejeans.com/docs/mesh.html#status</a>
*/
@GET
@Path("{ipAddress}/status")
public Map<String, Boolean> status(@PathParam("ipAddress") final String ipAddress, @QueryParam("port") final Integer port,
@QueryParam("name") final String name) {
System.out.println("Received status request");
System.out.println(" ipAddress = " + ipAddress);
System.out.println(" port = " + port);
System.out.println(" name = " + name);<|fim▁hole|> return status;
}
/**
* @see <a href="https://relay.bluejeans.com/docs/mesh.html#join">https://relay.bluejeans.com/docs/mesh.html#join</a>
*/
@POST
@Path("{ipAddress}/join")
public void join(@PathParam("ipAddress") final String ipAddress, @QueryParam("dialString") final String dialString,
@QueryParam("meetingId") final String meetingId, @QueryParam("passcode") final String passcode,
@QueryParam("bridgeAddress") final String bridgeAddress, final Endpoint endpoint) {
System.out.println("Received join request");
System.out.println(" ipAddress = " + ipAddress);
System.out.println(" dialString = " + dialString);
System.out.println(" meetingId = " + meetingId);
System.out.println(" passcode = " + passcode);
System.out.println(" bridgeAddress = " + bridgeAddress);
System.out.println(" endpoint = " + endpoint);
}
/**
* @see <a href="https://relay.bluejeans.com/docs/mesh.html#hangup">https://relay.bluejeans.com/docs/mesh.html#hangup</a>
*/
@POST
@Path("{ipAddress}/hangup")
public void hangup(@PathParam("ipAddress") final String ipAddress, final Endpoint endpoint) {
System.out.println("Received hangup request");
System.out.println(" ipAddress = " + ipAddress);
System.out.println(" endpoint = " + endpoint);
}
/**
* @see <a href="https://relay.bluejeans.com/docs/mesh.html#mutemicrophone">https://relay.bluejeans.com/docs/mesh.html#mutemicrophone</a>
*/
@POST
@Path("{ipAddress}/mutemicrophone")
public void muteMicrophone(@PathParam("ipAddress") final String ipAddress, final Endpoint endpoint) {
System.out.println("Received mutemicrophone request");
System.out.println(" ipAddress = " + ipAddress);
System.out.println(" endpoint = " + endpoint);
}
/**
* @see <a href="https://relay.bluejeans.com/docs/mesh.html#mutemicrophone">https://relay.bluejeans.com/docs/mesh.html#mutemicrophone</a>
*/
@POST
@Path("{ipAddress}/unmutemicrophone")
public void unmuteMicrophone(@PathParam("ipAddress") final String ipAddress, final Endpoint endpoint) {
System.out.println("Received unmutemicrophone request");
System.out.println(" ipAddress = " + ipAddress);
System.out.println(" endpoint = " + endpoint);
}
}<|fim▁end|>
|
final Map<String, Boolean> status = new HashMap<>();
status.put("callActive", false);
status.put("microphoneMuted", false);
|
<|file_name|>player-svg.js<|end_file_name|><|fim▁begin|><|fim▁hole|> <circle cx="${playerSpec.width/2}" cy="${playerSpec.height/2}" r="${playerSpec.height/4}" class="mapSvg__player" stroke="${playerSpec[player].color}" fill="${playerSpec[player].color}"/> \
</svg>`
)
);
};<|fim▁end|>
|
module.exports = function(player, playerSpec) {
return ('data:image/svg+xml;utf-8,' +
encodeURIComponent(
`<svg width="${playerSpec.width}" height="${playerSpec.height}" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 ${playerSpec.width} ${playerSpec.height}" version="1.1" class="mapSvg"> \
|
<|file_name|>PThreadMutex.cpp<|end_file_name|><|fim▁begin|>/* -*-c++-*- OpenThreads library, Copyright (C) 2002 - 2007 The Open Thread Group
*
* This library is open source and may be redistributed and/or modified under
* the terms of the OpenSceneGraph Public License (OSGPL) version 0.0 or
* (at your option) any later version. The full license is in LICENSE file
* included with this distribution, and on the openscenegraph.org website.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* OpenSceneGraph Public License for more details.
*/
//
// PThreadMutex.c++ - C++ Mutex class built on top of posix threads.
// ~~~~~~~~~~~~~~~~
//
#include <unistd.h>
#include <pthread.h>
#include <OpenThreads/Mutex>
#include "PThreadMutexPrivateData.h"
using namespace OpenThreads;
// ----------------------------------------------------------------------------
//
// Decription: Constructor
//
// Use: public.
//
Mutex::Mutex(MutexType type) :
_mutexType(type)
{
pthread_mutexattr_t mutex_attr;
pthread_mutexattr_init(&mutex_attr);
PThreadMutexPrivateData *pd = new PThreadMutexPrivateData();
if (type == MUTEX_RECURSIVE)
{
pthread_mutexattr_settype(&mutex_attr, PTHREAD_MUTEX_RECURSIVE);
}
else
{
#ifndef __linux__ // (not available until NPTL) [
pthread_mutexattr_settype(&mutex_attr, PTHREAD_MUTEX_ERRORCHECK);
#endif // ] __linux__
}
#ifdef ALLOW_PRIORITY_SCHEDULING // [
#ifdef __sun // [
pthread_mutexattr_setprotocol(&mutex_attr, PTHREAD_PRIO_NONE);
#endif // ] __sun
// -------------------------------------------------------------------------
// Initialization is a bit tricky, since we have to be able to be aware
// that on many-to-many execution vehicle systems, we may run into
// priority inversion deadlocks if a mutex is shared between threads
// of differing priorities. Systems that do this should provide the
// following protocol attributes to prevent deadlocks. Check at runtime.
//
// PRIO_INHERIT causes any thread locking the mutex to temporarily become
// the same priority as the highest thread also blocked on the mutex.
// Although more expensive, this is the preferred method.
//
// PRIO_PROTECT causes any thread locking the mutex to assume the priority
// specified by setprioceiling. pthread_mutex_lock will fail if
// the priority ceiling is lower than the thread's priority. Therefore,
// the priority ceiling must be set to the max priority in order to
// guarantee no deadlocks will occur.
//
#if defined (_POSIX_THREAD_PRIO_INHERIT) || defined (_POSIX_THREAD_PRIO_PROTECT) // [
if (sysconf(_POSIX_THREAD_PRIO_INHERIT))
{
pthread_mutexattr_setprotocol(&mutex_attr, PTHREAD_PRIO_INHERIT);
}
else if (sysconf(_POSIX_THREAD_PRIO_PROTECT))
{
int th_policy;
struct sched_param th_param;
pthread_getschedparam(pthread_self(), &th_policy, &th_param);
pthread_mutexattr_setprotocol(&mutex_attr, PTHREAD_PRIO_PROTECT);
pthread_mutexattr_setprioceiling(&mutex_attr,
sched_get_priority_max(th_policy));
}
#endif // ] Priority Scheduling.
#endif // ] ALLOW_PRIORITY_SCHEDULING
pthread_mutex_init(&pd->mutex, &mutex_attr);
_prvData = static_cast<void*>(pd);
}
// ----------------------------------------------------------------------------
//
// Decription: Destructor
//
// Use: public.
//
Mutex::~Mutex()
{
PThreadMutexPrivateData *pd =
static_cast<PThreadMutexPrivateData*>(_prvData);
pthread_mutex_destroy(&pd->mutex);
delete pd;
}
// ----------------------------------------------------------------------------
//
// Decription: lock the mutex
//
// Use: public.
//
int Mutex::lock()
{
PThreadMutexPrivateData *pd =
static_cast<PThreadMutexPrivateData*>(_prvData);
return pthread_mutex_lock(&pd->mutex);
}
// ----------------------------------------------------------------------------
//
// Decription: unlock the mutex
//
// Use: public.
//
int Mutex::unlock()
{
PThreadMutexPrivateData *pd =
static_cast<PThreadMutexPrivateData*>(_prvData);
return pthread_mutex_unlock(&pd->mutex);
}
// ----------------------------------------------------------------------------
//<|fim▁hole|>// Decription: test if the mutex may be locked
//
// Use: public.
//
int Mutex::trylock()
{
PThreadMutexPrivateData *pd =
static_cast<PThreadMutexPrivateData*>(_prvData);
return pthread_mutex_trylock(&pd->mutex);
}<|fim▁end|>
| |
<|file_name|>app-init.ts<|end_file_name|><|fim▁begin|>/*
* This file is part of CoCalc: Copyright © 2020 Sagemath, Inc.
* License: AGPLv3 s.t. "Commons Clause" – see LICENSE.md for details
*/
// first, load the customize endpoint → check if any images are customized and render banner accordingly. also use the proper name.
// if the endpoint doesn't work, we have a problem. report back accordingly…
declare const CUSTOMIZE: any;
declare const COCALC_ASSETS: string;
let HELP_EMAIL = "[email protected]";
function email() {
return `<a href="mailto:${HELP_EMAIL}" target="_blank" rel="noopener">${HELP_EMAIL}</a>`;
}
function script_error() {
document.body.innerHTML =
"<h1 style='text-align:center;margin-top:10vh'>Initialization problem. Please try again in a minute ...</h1>";
window.stop();
}
// compact trottling with a trailing call, used for updating the progress bars
// credits: https://codeburst.io/throttling-and-debouncing-in-javascript-b01cad5c8edf
function throttle(fn, limit) {
let lastFn;
let lastRan;
return function (...args) {
if (!lastRan) {
fn(...args);
lastRan = Date.now();
} else {
clearTimeout(lastFn);
lastFn = setTimeout(() => {
if (Date.now() - lastRan >= limit) {
fn(...args);
lastRan = Date.now();
}
}, limit - (Date.now() - lastRan));
}
};
}
function style() {
const NAME = CUSTOMIZE.site_name || "CoCalc";
HELP_EMAIL = CUSTOMIZE.help_email || "[email protected]";
const SITE_DESCR = CUSTOMIZE.site_description || "";
document.title = `${NAME} – ${SITE_DESCR}`;
const msg = document.getElementById("cc-message");
if (msg == null) {
// happens when loading is very quick and message is already removed
return;
}
msg.innerHTML += `
Problem while loading ${NAME}.
<br/>
Try hitting shift and reload the page, restart your browser, or <a target="_blank" rel="noopener" href="https://doc.cocalc.com/howto/connectivity-issues.html">follow these steps</a>.
If the problem persists, email ${email()}.`;
if (CUSTOMIZE.logo_square || CUSTOMIZE.logo_rectangular) {
const banner = document.getElementById("cc-banner2");
if (banner == null) return;
banner.style.display = "block";
banner.innerHTML = `<img class="logo-square" src="${CUSTOMIZE.logo_square}">`;
banner.innerHTML += `<img class="logo-rectangular" src="${CUSTOMIZE.logo_rectangular}">`;
} else {
const banner = document.getElementById("cc-banner1");
if (banner == null) return;
banner.style.display = "block";
}
}
// load customization once the DOM exists.
// then start downloading all cocalc assets...
document.addEventListener("DOMContentLoaded", function () {
const customizeScript = document.createElement("script");
customizeScript.onerror = script_error;
customizeScript.onload = function () {
style();
load_assets();
};
document.head.appendChild(customizeScript);<|fim▁hole|>});
function error_msg({ msg, lineNo, columnNo, url, stack, show_explanation }) {
const explanation = show_explanation
? `<div>
Please report the full error, your browser and operating system to ${email()}.
In the mean time, try switching to another browser or upating to the
latest version of your browser.
</div>`
: "";
return `<div><strong>Application Error:</strong> <code>${msg} @ ${lineNo}/${columnNo} of ${url}</code></div>
${explanation}
<pre>
${stack}
</pre>`;
}
/* We do "window.onerror = null" below for the follwoing reason.
When I merged this, the following always results in nonstop 100% cpu usage:
1. Open cocalc
2. Open a project.
3. Boom!
With the profiler on, it's this onerror that is being called repeatedly.
Maybe there is a bug in it that causes it to call itself and crash things.
That seems likely. I've thus rewritten it so that is impossible, e.g., by
making it so that if it is triggered, it disables itself after running once.
*/
function handle_window_error(msg, url, lineNo, columnNo, error) {
if (error == null) {
// Sometimes this window.onerror gets called with error null.
// We ignore that here. E.g., this happens when you open
// a project sometimes with this input:
// {msg: "ResizeObserver loop limit exceeded", url: "https://cocalc.com/45f...44a1-b842-6eaf5ee07f8f/files/?session=default", lineNo: 0, columnNo: 0, error: null}
return;
}
console.log("handle_window_error", { msg, url, lineNo, columnNo, error });
window.onerror = null;
let errorbox = document.getElementById("cocalc-error-report-startup");
let show_explanation = true;
if (errorbox == null) {
// app did startup, hence the banner is removed from the DOM
// instead, check if there is the react error report banner and insert it there!
errorbox = document.getElementById("cocalc-error-report-react");
show_explanation = false;
if (errorbox == null) return;
}
errorbox.style.display = "block";
const stack = error != null ? error.stack : "<no stacktrace>";
errorbox.innerHTML = error_msg({
msg,
lineNo,
columnNo,
url,
stack,
show_explanation,
});
}
window.onerror = handle_window_error;
// magic code to load all webpack assets
// order matters!
const asset_names = ["fill", "css", "pdf.worker", "vendor", "smc"];
const asset_width = Math.max(...asset_names.map((x) => x.length));
function pad(s: string, w: number, align: "left" | "right" = "left") {
const f = Math.max(0, w - s.length);
const fill = " ".repeat(f);
if (align == "left") {
return s + fill;
} else {
return fill + s;
}
}
function delay(t): Promise<void> {
return new Promise((done) => setTimeout(() => done(), t));
}
interface Loading {
err?: string;
size: { [key: string]: number };
loaded: { [key: string]: number };
done: { [key: string]: boolean };
}
// initialize…
const loading: Loading = {
size: {},
loaded: {},
done: {},
};
// will be a div element ...
let loading_output: HTMLElement | null = null;
const progress_bars: { [key: string]: HTMLElement } = {};
const progress_bars_span: { [key: string]: HTMLElement } = {};
let loading_msg: HTMLElement | null = null;
async function show_error(err) {
if (typeof err === "string") {
loading.err = `Error: ${err}`;
} else {
// this is a broken promise: most likely, load_asset failed. We tell the user about this.
loading.err = `Error ${err.status}: ${err.statusText}`;
}
if (loading_output == null) return;
loading_output.innerHTML = `Problem loading assets.\n${loading.err}`;
loading_output.style["white-space"] = "pre-wrap";
const err_box = document.querySelector(
"#smc-startup-banner div.banner-error"
);
// https://github.com/microsoft/TypeScript/issues/3263#issuecomment-277894602
if (err_box instanceof HTMLElement) {
// we know for sure it is there
err_box.style.opacity = "1";
}
["cc-banner1", "cc-banner2"].forEach((id) => {
const banner = document.getElementById(id);
if (banner == null) return;
banner.style.opacity = "1";
banner.classList.add("banner-error");
});
const bottom_status = document.getElementById("smc-startup-banner-status");
if (bottom_status != null) {
bottom_status.innerHTML = "Error: aborting startup initialization";
}
// give it a sec to render, then abort all of this …
await delay(10);
window.stop(); // stops javascript
}
const DEFLATE_FACTOR = 2; // picked by observing in production, no deep insight
// calculate a progress bar for each asset
// the problem is, we use compression, nginx's streaming, or cloudlfare, ... ?
// in any case, there is no header for the total content size. hence the browser
// only knows the bytes it did transfer. that's why we multiply the known "info" value by DEFLATE_FACTOR.
// that's more or less ok.
// also, for chrome, be aware of https://bugs.chromium.org/p/chromium/issues/detail?id=463622
// and chrome, firfox and others might have subtle differences ...
async function show_loading() {
if (loading.err != null) return;
for (const name of asset_names) {
const size = loading.size[name];
const info = DEFLATE_FACTOR * loading.loaded[name];
let msg = `${pad(name, asset_width + 1)} `;
let pct = 0;
if (size != null && size != 0 && info != null && !isNaN(info)) {
if (loading.done[name] != null) {
// set to 100% ... it's not accurate what webpack tells us
if (loading.done[name]) loading.loaded[name] = loading.size[name];
msg += loading.done[name] ? "DONE" : "FAIL";
pct = 100;
} else {
pct = Math.min(100, Math.round((100 * info) / size));
const pct_str = `${pct}%`;
msg += `${pad(pct_str, 4, "right")}`;
}
}
progress_bars[name].setAttribute("data-label", msg);
progress_bars_span[name].style.width = `${pct}%`;
if (loading_msg != null) {
const dones = Object.values(loading.done);
const all_done = asset_names.length == dones.length;
if (all_done && dones.every((v) => !!v)) {
const NAME = CUSTOMIZE.site_name || "CoCalc";
loading_msg.innerHTML = `Starting ${NAME} ...`;
}
}
await delay(1);
}
}
// create the progress bars
function init_loading_output(keys: string[]) {
if (loading_output == null) return;
for (const key of keys) {
const span = document.createElement("span");
span.className = "value";
span.style.width = "0%";
const bar = document.createElement("div");
bar.className = "progress";
bar.setAttribute("data-label", "0%");
bar.appendChild(span);
progress_bars[key] = bar;
progress_bars_span[key] = span;
loading_output.appendChild(bar);
}
loading_msg = document.createElement("pre");
loading_output.appendChild(loading_msg);
}
function _update_loading(name, info) {
// sometimes, the progress indicator says 0
if (info == 0) return;
// console.log("update_loading", name, info, "size", loading.size[name]);
loading.loaded[name] = info;
show_loading();
}
const update_loading = throttle(_update_loading, 10);
function load_asset(name, url, hash): Promise<string> {
return new Promise(function (done, err) {
const req = new XMLHttpRequest();
req.open("GET", `${url}?${hash}`);
req.onload = function () {
if (this.status >= 200 && this.status < 300) {
// report 100%
update_loading(name, req.responseText.length);
done(req.responseText);
} else {
loading.done[name] = false;
err({
status: this.status,
statusText: req.statusText,
});
}
};
req.onerror = function () {
loading.done[name] = false;
err({
status: this.status,
statusText: req.statusText,
});
};
req.addEventListener("progress", function (_e) {
// e.total is 0 if it isn't reported (no surprise with compression),
// but we happen to know the size from webpack anyways ...
// e.loaded gives us the bytes so far, but compressed ...
update_loading(name, req.responseText.length);
});
req.send();
});
}
type Chunks = { [key: string]: { size: number; entry: string; hash: string } };
// load_assets is called after the customization script is loaded
// the point is: there is a potential race condition starting cocalc very quickly, before this is defined
async function load_assets() {
const chunks: Chunks = JSON.parse(COCALC_ASSETS);
delete window["COCALC_ASSETS"];
loading_output = document.getElementById("cocalc-assets-loading");
// loading them in parallel ...
const code: { [key: string]: Promise<string | void> } = {};
try {
init_loading_output(Object.keys(chunks));
for (const [name, chunk] of Object.entries(chunks)) {
loading.size[name] = chunk.size;
loading.loaded[name] = 0;
code[name] = load_asset(name, chunk.entry, chunk.hash).catch((err) => {
loading.done[name] = false;
show_error(err);
});
}
// we eval them in a well defined order: i.e. fill, then css, then vendor?, ...
const names = Object.keys(code);
// safety check
names.forEach((n) => {
if (asset_names.indexOf(n) == -1) throw new Error(`unknown asset ${n}`);
});
await names.forEach(async (name) => {
const source_code = await code[name];
if (loading.err != null) return;
if (typeof source_code === "string") {
loading.done[name] = true;
await show_loading();
await eval(source_code);
} else {
loading.done[name] = false;
}
});
} catch (err) {
show_error(err);
}
}<|fim▁end|>
|
customizeScript.src = `${window.app_base_url}/customize?type=full`;
|
<|file_name|>context.cc<|end_file_name|><|fim▁begin|>#include "rr.h"
namespace rr {
void Context::Init() {
ClassBuilder("Context").
defineSingletonMethod("New", &New).
defineSingletonMethod("GetCurrent", &GetCurrent).<|fim▁hole|> defineSingletonMethod("GetCalling", &GetCalling).
defineSingletonMethod("InContext", &InContext).
defineMethod("Dispose", &Dispose).
defineMethod("Global", &Global).
defineMethod("DetachGlobal", &Global).
defineMethod("ReattachGlobal", &ReattachGlobal).
defineMethod("SetSecurityToken", &SetSecurityToken).
defineMethod("UseDefaultSecurityToken", &UseDefaultSecurityToken).
defineMethod("GetSecurityToken", &GetSecurityToken).
defineMethod("HasOutOfMemoryException", &HasOutOfMemoryException).
defineMethod("SetEmbedderData", &SetEmbedderData).
defineMethod("GetEmbedderData", &GetEmbedderData).
defineMethod("AllowCodeGenerationFromStrings", &AllowCodeGenerationFromStrings).
defineMethod("IsCodeGenerationFromStringsAllowed", &IsCodeGenerationFromStringsAllowed).
defineMethod("Enter", &Enter).
defineMethod("Exit", &Exit).
store(&Class);
ClassBuilder("ExtensionConfiguration").
defineSingletonMethod("new", &ExtensionConfiguration::initialize).
store(&ExtensionConfiguration::Class);
}
VALUE Context::Dispose(VALUE self) {
Void(Context(self).dispose())
}
VALUE Context::Global(VALUE self) {
return Object(Context(self)->Global());
}
VALUE Context::DetachGlobal(VALUE self) {
Void(Context(self)->DetachGlobal());
}
VALUE Context::ReattachGlobal(VALUE self, VALUE global) {
Void(Context(self)->ReattachGlobal(Object(global)));
}
VALUE Context::GetEntered(VALUE self) {
return Context(v8::Context::GetEntered());
}
VALUE Context::GetCurrent(VALUE self) {
return Context(v8::Context::GetCurrent());
}
VALUE Context::GetCalling(VALUE self) {
return Context(v8::Context::GetCalling());
}
VALUE Context::SetSecurityToken(VALUE self, VALUE token) {
Void(Context(self)->SetSecurityToken(Value(token)));
}
VALUE Context::UseDefaultSecurityToken(VALUE self) {
Void(Context(self)->UseDefaultSecurityToken());
}
VALUE Context::GetSecurityToken(VALUE self) {
return Value(Context(self)->GetSecurityToken());
}
VALUE Context::HasOutOfMemoryException(VALUE self) {
return Bool(Context(self)->HasOutOfMemoryException());
}
VALUE Context::InContext(VALUE self) {
return Bool(v8::Context::InContext());
}
VALUE Context::SetEmbedderData(VALUE self, VALUE index, VALUE data) {
Void(Context(self)->SetEmbedderData(NUM2INT(index), Value(data)));
}
VALUE Context::GetEmbedderData(VALUE self, VALUE index) {
Void(Context(self)->GetEmbedderData(NUM2INT(index)));
}
VALUE Context::AllowCodeGenerationFromStrings(VALUE self, VALUE allow) {
Void(Context(self)->AllowCodeGenerationFromStrings(RTEST(allow)));
}
VALUE Context::IsCodeGenerationFromStringsAllowed(VALUE self) {
return Bool(Context(self)->IsCodeGenerationFromStringsAllowed());
}
VALUE ExtensionConfiguration::initialize(VALUE self, VALUE names) {
int length = RARRAY_LENINT(names);
const char* array[length];
for (int i = 0; i < length; i++) {
array[i] = RSTRING_PTR(rb_ary_entry(names, i));
}
return ExtensionConfiguration(new v8::ExtensionConfiguration(length, array));
}
VALUE Context::New(int argc, VALUE argv[], VALUE self) {
VALUE extension_configuration; VALUE global_template; VALUE global_object;
rb_scan_args(argc, argv, "03", &extension_configuration, &global_template, &global_object);
v8::Persistent<v8::Context> context(v8::Context::New(
ExtensionConfiguration(extension_configuration),
*ObjectTemplate(global_template),
*Object(global_object)
));
Context reference(context);
context.Dispose();
return reference;
}
VALUE Context::Enter(VALUE self) {
Void(Context(self)->Enter());
}
VALUE Context::Exit(VALUE self) {
Void(Context(self)->Exit());
}
template <> void Pointer<v8::ExtensionConfiguration>::unwrap(VALUE value) {
Data_Get_Struct(value, class v8::ExtensionConfiguration, pointer);
}
}<|fim▁end|>
|
defineSingletonMethod("GetEntered", &GetEntered).
|
<|file_name|>JoinSheetMapper.java<|end_file_name|><|fim▁begin|>package org.simpleflatmapper.poi.impl;
import org.apache.poi.ss.usermodel.Row;
import org.apache.poi.ss.usermodel.Sheet;
import org.simpleflatmapper.map.ConsumerErrorHandler;
import org.simpleflatmapper.map.ContextualSourceFieldMapper;
import org.simpleflatmapper.map.SourceFieldMapper;
import org.simpleflatmapper.map.MappingContext;
import org.simpleflatmapper.map.MappingException;
import org.simpleflatmapper.map.context.MappingContextFactory;
import org.simpleflatmapper.map.mapper.JoinMapperEnumerable;
import org.simpleflatmapper.poi.RowMapper;
import org.simpleflatmapper.util.CheckedConsumer;
import java.util.Iterator;
import org.simpleflatmapper.util.Enumerable;
import org.simpleflatmapper.util.EnumerableIterator;
//IFJAVA8_START
import org.simpleflatmapper.util.EnumerableSpliterator;
import java.util.stream.Stream;
import java.util.stream.StreamSupport;
//IFJAVA8_END
public class JoinSheetMapper<T> implements RowMapper<T> {
private final ContextualSourceFieldMapper<Row, T> mapper;
private final int startRow = 0;
private final ConsumerErrorHandler consumerErrorHandler;
private final MappingContextFactory<? super Row> mappingContextFactory;
public JoinSheetMapper(ContextualSourceFieldMapper<Row, T> mapper, ConsumerErrorHandler consumerErrorHandler, MappingContextFactory<? super Row> mappingContextFactory) {
this.mapper = mapper;<|fim▁hole|> }
@Override
public Iterator<T> iterator(Sheet sheet) {
return iterator(startRow, sheet);
}
@Override
public Iterator<T> iterator(int startRow, Sheet sheet) {
return new EnumerableIterator<T>(enumerable(startRow, sheet, newMappingContext()));
}
@Override
public Enumerable<T> enumerate(Sheet sheet) {
return enumerate(startRow, sheet);
}
@Override
public Enumerable<T> enumerate(int startRow, Sheet sheet) {
return enumerable(startRow, sheet, newMappingContext());
}
private Enumerable<T> enumerable(int startRow, Sheet sheet, MappingContext<? super Row> mappingContext) {
return new JoinMapperEnumerable<Row, T>(mapper, mappingContext, new RowEnumerable(startRow, sheet));
}
@Override
public <RH extends CheckedConsumer<? super T>> RH forEach(Sheet sheet, RH consumer) {
return forEach(startRow, sheet, consumer);
}
@Override
public <RH extends CheckedConsumer<? super T>> RH forEach(int startRow, Sheet sheet, RH consumer) {
MappingContext<? super Row> mappingContext = newMappingContext();
Enumerable<T> enumarable = enumerable(startRow, sheet, mappingContext);
while(enumarable.next()) {
try {
consumer.accept(enumarable.currentValue());
} catch(Exception e) {
consumerErrorHandler.handlerError(e, enumarable.currentValue());
}
}
return consumer;
}
//IFJAVA8_START
@Override
public Stream<T> stream(Sheet sheet) {
return stream(startRow, sheet);
}
@Override
public Stream<T> stream(int startRow, Sheet sheet) {
return StreamSupport.stream(new EnumerableSpliterator<T>(enumerable(startRow, sheet, newMappingContext())), false);
}
//IFJAVA8_END
@Override
public T map(Row source) throws MappingException {
return mapper.map(source);
}
@Override
public T map(Row source, MappingContext<? super Row> context) throws MappingException {
return mapper.map(source, context);
}
private MappingContext<? super Row> newMappingContext() {
return mappingContextFactory.newContext();
}
}<|fim▁end|>
|
this.consumerErrorHandler = consumerErrorHandler;
this.mappingContextFactory = mappingContextFactory;
|
<|file_name|>unique_index_violation_check.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
class UniqueIndexViolationCheck:
unique_indexes_query = """
select table_oid, index_name, table_name, array_agg(attname) as column_names
from pg_attribute, (
select pg_index.indrelid as table_oid, index_class.relname as index_name, table_class.relname as table_name, unnest(pg_index.indkey) as column_index
from pg_index, pg_class index_class, pg_class table_class
where pg_index.indisunique='t'
and index_class.relnamespace = (select oid from pg_namespace where nspname = 'pg_catalog')<|fim▁hole|> and index_class.oid = pg_index.indexrelid
and table_class.oid = pg_index.indrelid
) as unique_catalog_index_columns
where attnum = column_index
and attrelid = table_oid
group by table_oid, index_name, table_name;
"""
def __init__(self):
self.violated_segments_query = """
select distinct(gp_segment_id) from (
(select gp_segment_id, %s
from gp_dist_random('%s')
where (%s) is not null
group by gp_segment_id, %s
having count(*) > 1)
union
(select gp_segment_id, %s
from %s
where (%s) is not null
group by gp_segment_id, %s
having count(*) > 1)
) as violations
"""
def runCheck(self, db_connection):
unique_indexes = db_connection.query(self.unique_indexes_query).getresult()
violations = []
for (table_oid, index_name, table_name, column_names) in unique_indexes:
column_names = ",".join(column_names)
sql = self.get_violated_segments_query(table_name, column_names)
violated_segments = db_connection.query(sql).getresult()
if violated_segments:
violations.append(dict(table_oid=table_oid,
table_name=table_name,
index_name=index_name,
column_names=column_names,
violated_segments=[row[0] for row in violated_segments]))
return violations
def get_violated_segments_query(self, table_name, column_names):
return self.violated_segments_query % (
column_names, table_name, column_names, column_names, column_names, table_name, column_names, column_names
)<|fim▁end|>
|
and index_class.relkind = 'i'
|
<|file_name|>element_impl.hpp<|end_file_name|><|fim▁begin|>//
// Copyright 2012 Josh Blum
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>.
#ifndef INCLUDED_LIBTSBE_ELEMENT_IMPL_HPP
#define INCLUDED_LIBTSBE_ELEMENT_IMPL_HPP
#include <tsbe/thread_pool.hpp>
#include <tsbe_impl/common_impl.hpp>
#include <vector><|fim▁hole|>namespace tsbe
{
//! ElementImpl is both a topology and a block to allow interconnection
struct ElementImpl
{
ElementImpl(void)
{
//NOP
}
~ElementImpl(void)
{
this->actor.reset();
}
bool block;
bool is_block(void)
{
return block;
}
boost::shared_ptr<Actor> actor;
boost::shared_ptr<Theron::Framework> framework;
ThreadPool thread_pool;
};
} //namespace tsbe
#endif /*INCLUDED_LIBTSBE_ELEMENT_IMPL_HPP*/<|fim▁end|>
|
#include <queue>
#include <iostream>
|
<|file_name|>run_evaluation.py<|end_file_name|><|fim▁begin|>import os
import sys
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
sys.path.append(BASE_DIR)
sys.path.append(os.path.dirname(BASE_DIR))
from global_variables import *
from evaluation_helper import *
cls_names = g_shape_names
img_name_file_list = [os.path.join(g_real_images_voc12val_det_bbox_folder, name+'.txt') for name in cls_names]
det_bbox_mat_file_list = [os.path.join(g_detection_results_folder, x.rstrip()) for x in open(g_rcnn_detection_bbox_mat_filelist)]
result_folder = os.path.join(BASE_DIR, 'avp_test_results')<|fim▁hole|>test_avp_nv(cls_names, img_name_file_list, det_bbox_mat_file_list, result_folder)
img_name_file_list = [os.path.join(g_real_images_voc12val_easy_gt_bbox_folder, name+'.txt') for name in cls_names]
view_label_folder = g_real_images_voc12val_easy_gt_bbox_folder
result_folder = os.path.join(BASE_DIR, 'vp_test_results')
test_vp_acc(cls_names, img_name_file_list, result_folder, view_label_folder)<|fim▁end|>
| |
<|file_name|>settings.py<|end_file_name|><|fim▁begin|># Django settings for testautoslug project.
import os
PROJECT_ROOT = os.path.dirname(__file__)
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
# ('Your Name', '[email protected]'),
)
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3', # Add 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
'NAME': os.path.join(PROJECT_ROOT, 'dev.db'), # Or path to database file if using sqlite3.
'USER': '', # Not used with sqlite3.
'PASSWORD': '', # Not used with sqlite3.
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
}
}
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# On Unix systems, a value of None will cause Django to use the same
# timezone as the operating system.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = 'America/Chicago'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale
USE_L10N = True
# Absolute path to the directory that holds media.
# Example: "/home/media/media.lawrence.com/"
MEDIA_ROOT = ''
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash if there is a path component (optional in other cases).
# Examples: "http://media.lawrence.com", "http://example.com/media/"
MEDIA_URL = ''
# URL prefix for admin media -- CSS, JavaScript and images. Make sure to use a
# trailing slash.
# Examples: "http://foo.com/media/", "/media/".
ADMIN_MEDIA_PREFIX = '/media/'
# Make this unique, and don't share it with anybody.
SECRET_KEY = '44mxeh8nkm^ycwef-eznwgk&8_lwc!j9r)h3y_^ypz1iom18pa'<|fim▁hole|># List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
)
ROOT_URLCONF = 'testautoslug.urls'
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.admin',
'testapp',
)<|fim▁end|>
| |
<|file_name|>StdAfx.cpp<|end_file_name|><|fim▁begin|>// stdafx.cpp : source file that includes just the standard includes
// GetFolderTime.pch will be the pre-compiled header
// stdafx.obj will contain the pre-compiled type information
<|fim▁hole|>
#include "stdafx.h"<|fim▁end|>
| |
<|file_name|>school-session-types-list-test.js<|end_file_name|><|fim▁begin|>import EmberObject from '@ember/object';
import { htmlSafe } from '@ember/string';
import RSVP from 'rsvp';
import { module, test } from 'qunit';
import { setupRenderingTest } from 'ember-qunit';
import {
render,
settled,
find,
click
} from '@ember/test-helpers';
import hbs from 'htmlbars-inline-precompile';
const { resolve } = RSVP;
module('Integration | Component | school session types list', function(hooks) {
setupRenderingTest(hooks);
test('it renders', async function(assert) {
assert.expect(19);
let assessmentOption = EmberObject.create({
id: 1,
name: 'formative'
});
let sessionType1 = EmberObject.create({
id: 1,
school: 1,
title: 'not needed anymore',
assessment: false,
assessmentOption: resolve(null),
safeCalendarColor: htmlSafe('#ffffff'),
sessionCount: 2,
active: false,
});
let sessionType2 = EmberObject.create({
id: 2,
school: 1,
title: 'second',
assessment: true,
assessmentOption: resolve(assessmentOption),
safeCalendarColor: htmlSafe('#123456'),
sessionCount: 0,
active: true,
});
let sessionType3 = EmberObject.create({
id: 2,
school: 1,
title: 'first',
assessment: false,
assessmentOption: resolve(null),
safeCalendarColor: htmlSafe('#cccccc'),
sessionCount: 2,
active: true,
});
this.set('sessionTypes', [sessionType1, sessionType2, sessionType3]);
this.set('nothing', parseInt);
await render(hbs`{{school-session-types-list
sessionTypes=sessionTypes
manageSessionType=(action nothing)
}}`);
const rows = 'table tbody tr';
const firstSessionType = `${rows}:nth-of-type(1)`;
const firstTitle = `${firstSessionType} td:nth-of-type(1)`;
const firstSessionCount = `${firstSessionType} td:nth-of-type(2)`;
const firstAssessment = `${firstSessionType} td:nth-of-type(3) svg`;
const firstAssessmentOption = `${firstSessionType} td:nth-of-type(4)`;
const firstColorBox = `${firstSessionType} td:nth-of-type(6) .box`;
const secondSessionType = `${rows}:nth-of-type(2)`;
const secondTitle = `${secondSessionType} td:nth-of-type(1)`;
const secondSessionCount = `${secondSessionType} td:nth-of-type(2)`;
const secondAssessment = `${secondSessionType} td:nth-of-type(3) svg`;
const secondAssessmentOption = `${secondSessionType} td:nth-of-type(4)`;
const secondColorBox = `${secondSessionType} td:nth-of-type(6) .box`;
const thirdSessionType = `${rows}:nth-of-type(3)`;
const thirdTitle = `${thirdSessionType} td:nth-of-type(1)`;
const thirdSessionCount = `${thirdSessionType} td:nth-of-type(2)`;
const thirdAssessment = `${thirdSessionType} td:nth-of-type(3) svg`;
const thirdAssessmentOption = `${thirdSessionType} td:nth-of-type(4)`;
const thirdColorBox = `${thirdSessionType} td:nth-of-type(6) .box`;
assert.dom(firstTitle).hasText('first');
assert.dom(firstSessionCount).hasText('2');
assert.dom(firstAssessment).hasClass('no');
assert.dom(firstAssessment).hasClass('fa-ban');
assert.dom(firstAssessmentOption).hasText('');
assert.equal(find(firstColorBox).style.getPropertyValue('background-color').trim(), ('rgb(204, 204, 204)'));
assert.dom(secondTitle).hasText('second');
assert.dom(secondSessionCount).hasText('0');
assert.dom(secondAssessment).hasClass('yes');
assert.dom(secondAssessment).hasClass('fa-check');
assert.dom(secondAssessmentOption).hasText('formative');
assert.equal(find(secondColorBox).style.getPropertyValue('background-color').trim(), ('rgb(18, 52, 86)'));
<|fim▁hole|> assert.ok(find(thirdTitle).textContent.trim().startsWith('not needed anymore'));
assert.ok(find(thirdTitle).textContent.trim().endsWith('(inactive)'));
assert.dom(thirdSessionCount).hasText('2');
assert.dom(thirdAssessment).hasClass('no');
assert.dom(thirdAssessment).hasClass('fa-ban');
assert.dom(thirdAssessmentOption).hasText('');
assert.equal(find(thirdColorBox).style.getPropertyValue('background-color').trim(), ('rgb(255, 255, 255)'));
});
test('clicking edit fires action', async function(assert) {
assert.expect(1);
let sessionType = EmberObject.create({
id: 1,
school: 1,
title: 'first',
assessment: false,
assessmentOption: resolve(null),
calendarColor: '#fff'
});
this.set('sessionTypes', [sessionType]);
this.set('manageSessionType', sessionTypeId => {
assert.equal(sessionTypeId, 1);
});
await render(hbs`{{school-session-types-list
sessionTypes=sessionTypes
manageSessionType=(action manageSessionType)
}}`);
await settled();
const rows = 'table tbody tr';
const edit = `${rows}:nth-of-type(1) td:nth-of-type(7) .fa-edit`;
await click(edit);
});
test('clicking title fires action', async function(assert) {
assert.expect(1);
let sessionType = EmberObject.create({
id: 1,
school: 1,
title: 'first',
assessment: false,
assessmentOption: resolve(null),
calendarColor: '#fff'
});
this.set('sessionTypes', [sessionType]);
this.set('manageSessionType', sessionTypeId => {
assert.equal(sessionTypeId, 1);
});
await render(hbs`{{school-session-types-list
sessionTypes=sessionTypes
manageSessionType=(action manageSessionType)
}}`);
await settled();
const rows = 'table tbody tr';
const title = `${rows}:nth-of-type(1) td:nth-of-type(1) a`;
await click(title);
});
test('session types without sessions can be deleted', async function(assert) {
assert.expect(4);
let unlinkedSessionType = EmberObject.create({
id: 1,
school: 1,
title: 'unlinked',
active: true,
assessment: false,
assessmentOption: resolve(null),
calendarColor: '#fff',
sessionCount: 0,
deleteRecord(){
assert.ok(true, 'was deleted');
return resolve();
}
});
let linkedSessionType = EmberObject.create({
id: 1,
school: 1,
title: 'linked',
active: true,
assessment: false,
assessmentOption: resolve(null),
calendarColor: '#fff',
sessionCount: 5,
deleteRecord(){
assert.ok(true, 'was deleted');
return resolve();
}
});
this.set('sessionTypes', [linkedSessionType, unlinkedSessionType]);
this.set('nothing', parseInt);
await render(hbs`{{school-session-types-list
sessionTypes=sessionTypes
manageSessionType=(action nothing)
canDelete=true
}}`);
await settled();
const rows = 'table tbody tr';
const linkedTitle = `${rows}:nth-of-type(1) td:nth-of-type(1)`;
const unlinkedTitle = `${rows}:nth-of-type(2) td:nth-of-type(1)`;
const linkedTrash = `${rows}:nth-of-type(1) td:nth-of-type(7) .fa-trash.disabled`;
const unlinkedTrash = `${rows}:nth-of-type(2) td:nth-of-type(7) .fa-trash.enabled`;
assert.dom(linkedTitle).hasText('linked', 'linked is first');
assert.dom(unlinkedTitle).hasText('unlinked', 'unlinked is second');
assert.dom(linkedTrash).exists({ count: 1 }, 'linked has a disabled trash can');
assert.dom(unlinkedTrash).exists({ count: 1 }, 'unlinked has an enabled trash can');
});
test('clicking delete deletes the record', async function(assert) {
assert.expect(2);
let sessionType = EmberObject.create({
id: 1,
school: 1,
title: 'first',
assessment: false,
assessmentOption: resolve(null),
calendarColor: '#fff',
sessionCount: 0,
deleteRecord(){
assert.ok(true, 'was deleted');
},
save(){
assert.ok(true, 'was deleted');
return resolve();
},
});
this.set('sessionTypes', [sessionType]);
this.set('nothing', parseInt);
await render(hbs`{{school-session-types-list
sessionTypes=sessionTypes
manageSessionType=(action nothing)
canDelete=true
}}`);
await settled();
const rows = 'table tbody tr';
const trash = `${rows}:nth-of-type(1) td:nth-of-type(7) .fa-trash`;
await click(trash);
await settled();
});
});<|fim▁end|>
| |
<|file_name|>index.ts<|end_file_name|><|fim▁begin|>export { validatePath } from "./validate-path"<|fim▁hole|>export {
applyTrailingSlashOption,
TrailingSlash,
} from "./apply-trailing-slash-option"<|fim▁end|>
|
export { createPath } from "./create-path"
export { ignorePath, IPathIgnoreOptions } from "./ignore-path"
export { watchDirectory } from "./watch-directory"
|
<|file_name|>mock-response.ts<|end_file_name|><|fim▁begin|>/* Copyright 2016 Google Inc. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
/* eslint-env serviceworker, browser */
import {newResponse, Proxy, skipWaitingAndClaim} from "../proxy";
const CACHE = "MYCACHE";
// The response for `/quote.txt`.
function getEntries(): { [k: string]: Response } {
const body = [
"The great roe is a mythological beast with the head",
"of a lion and the body of a lion, though not the same",
"lion."<|fim▁hole|> const res = new Response(body, {
headers: {
"cache-control": "max-age=86400",
"content-type": "text/plain",
"date": new Date().toUTCString()
},
status: 200,
statusText: "OK"
});
return {
"/quote.txt": res
};
}
skipWaitingAndClaim(self);
// On "install", inject responses into cache.
self.addEventListener("install", (event: ExtendableEvent) => {
const entries = getEntries();
event.waitUntil(
caches.open(CACHE).then((cache) => {
return Promise.all(Object.keys(entries).reduce((acc: Array<Promise<void>>, url: string) => {
acc.push(cache.put(url, entries[url]));
return acc;
}, []));
})
);
});
self.addEventListener("fetch", (event: FetchEvent) => {
const proxy = new Proxy(CACHE);
event.respondWith(proxy.fetch(event.request));
});<|fim▁end|>
|
].join(" ");
|
<|file_name|>mif.py<|end_file_name|><|fim▁begin|>import json
import objects
from mifkit.util.mif_encoder import MifEncoder
from mifkit.util.case import keys_to_snake_case
from mifkit.util.case import to_capitalized_camel_case
def dump(mif_object, fp, **kwargs):
"""
Convert this object into a JSON-encoded string and save it in a file.
:param mif_object: Object to serialize.
:type mif_object: Single MifObject-type object or list of MifObject-type objects.
:param fp: Object to write the serialization to.
:type fp: File-like object supporting .write() method.
:param kwargs: Any options available to json.dump().
"""
return json.dump(mif_object, fp, cls=MifEncoder, **kwargs)
def dumps(mif_object, **kwargs):
"""
Convert this object into a JSON-encoded string.
:param mif_object: Object to serialize.
:type mif_object: Single MifObject-type object or list of MifObject-type objects.
:param kwargs: Any options available to json.dumps().<|fim▁hole|> return json.dumps(mif_object, cls=MifEncoder, **kwargs)
def load(fp, **kwargs):
"""
Convert content in a JSON-encoded string to a Mif object.
:param fp: Object to deserialize from.
:type fp: File-like object supporting .read() method.
:param kwargs: Any options available to json.load().
:return: Single MifObject-type object or list of MifObject-type objects.
"""
return _to_mif_object(json.load(fp, **kwargs))
def loads(s, **kwargs):
"""
Convert content in a JSON-encoded string to a Mif object.
:param s: String to deserialize from.
:type s: String.
:param kwargs: Any options available to json.loads().
:return: Single MifObject-type object or list of MifObject-type objects.
"""
return _to_mif_object(json.loads(s, **kwargs))
def from_dict(obj):
"""
Convert content in a list or dictionary to
:param obj: Python object to convert to MifObject type.
:type obj: List or dictionary.
:return: Single MifObject-type object or list of MifObject-type objects.
"""
return _to_mif_object(obj)
def _to_mif_object(obj):
"""
Convert a dictionary or list of a single or multiple MifObject objects.
:param obj: Object to convert.
:type obj: Dictionary or list.
:return: A single MifObject object or a list of MifObject objects.
"""
if isinstance(obj, list):
return [_dict_to_mif_object(i) for i in obj]
elif isinstance(obj, dict):
return [_dict_to_mif_object(obj)]
else:
raise ValueError('expecting list or dictionary as outermost structure')
def _dict_to_mif_object(obj):
"""
Convert a dictionary to a MifObject object based on its name.
:param obj: Object to convert to a MifObject object.
:type obj: Dictionary.
:return: MifObject with the content of obj.
"""
if len(obj) != 1:
raise ValueError('Top-level mif object must contain exactly one key')
key = obj.keys()[0]
value = obj[key]
if not isinstance(value, dict):
raise ValueError(key + ' must have a value that is a dictionary')
return getattr(objects, to_capitalized_camel_case(key))(**keys_to_snake_case(value))
class Mif(object):
"""
Legacy class. Don't use this. It's only here to prevent old scripts from breaking.
"""
def __init__(self, sample=None):
"""
Constructor.
:param sample: Samples to sample.
:type sample: Sample object or list of Sample objects.
"""
super(Mif, self).__init__()
self.sample = sample
def to_json(self, indent=None):
"""
Convert this object into a JSON-encoded string.
:param indent: Indent to apply to the json string.
:returns: JSON-encoded string with the content of this object.
"""
return json.dumps(self.sample) if indent is None else json.dumps(self.sample, indent=indent)<|fim▁end|>
|
"""
|
<|file_name|>RegionConstants.java<|end_file_name|><|fim▁begin|>package tencentcloud.constant;
/**
* @author fanwh
* @version v1.0
* @decription
* @create on 2017/11/10 16:09
*/
public class RegionConstants {
/**
* 北京
*/
public static final String PEKING = "ap-beijing";
/**
* 上海
*/
public static final String SHANGHAI = "ap-shanghai";
/**
* 香港
*/<|fim▁hole|> /**
* 多伦多
*/
public static final String TORONTO = "na-toronto";
/**
* 硅谷
*/
public static final String SILICON_VALLEY = "na-siliconvalley";
/**
* 新加坡
*/
public static final String SINGAPORE = "ap-singapore";
/**
* 上海金融
*/
public static final String SHANGHAI_FSI = "ap-shanghai-fsi";
/**
* 广州open专区
*/
public static final String GUANGZHOU_OPEN = "ap-guangzhou-open";
/**
* 深圳金融
*/
public static final String SHENZHEN_FSI = "ap-shenzhen-fsi";
}<|fim▁end|>
|
public static final String HONGKONG = "ap-hongkong";
|
<|file_name|>DownloadStateListener.java<|end_file_name|><|fim▁begin|>package com.seafile.seadroid2.transfer;
/**<|fim▁hole|> void onFileDownloadProgress(int taskID);
void onFileDownloaded(int taskID);
void onFileDownloadFailed(int taskID);
}<|fim▁end|>
|
* Download state listener
*
*/
public interface DownloadStateListener {
|
<|file_name|>apple_clang.py<|end_file_name|><|fim▁begin|># Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os.path
import re
import shutil
import llnl.util.tty as tty
import llnl.util.lang
import spack.compiler
import spack.compilers.clang
import spack.util.executable
import spack.version
class AppleClang(spack.compilers.clang.Clang):
openmp_flag = "-Xpreprocessor -fopenmp"
@classmethod
@llnl.util.lang.memoized
def extract_version_from_output(cls, output):
ver = 'unknown'
match = re.search(
# Apple's LLVM compiler has its own versions, so suffix them.
r'^Apple (?:LLVM|clang) version ([^ )]+)',
output,
# Multi-line, since 'Apple clang' may not be on the first line
# in particular, when run as gcc, it seems to output
# "Configured with: --prefix=..." as the first line
re.M,
)
if match:
ver = match.group(match.lastindex)
return ver
@property
def cxx11_flag(self):
# Adapted from CMake's AppleClang-CXX rules
# Spack's AppleClang detection only valid from Xcode >= 4.6
if self.version < spack.version.ver('4.0.0'):
raise spack.compiler.UnsupportedCompilerFlag(
self, "the C++11 standard", "cxx11_flag", "Xcode < 4.0.0"<|fim▁hole|> return "-std=c++11"
@property
def cxx14_flag(self):
# Adapted from CMake's rules for AppleClang
if self.version < spack.version.ver('5.1.0'):
raise spack.compiler.UnsupportedCompilerFlag(
self, "the C++14 standard", "cxx14_flag", "Xcode < 5.1.0"
)
elif self.version < spack.version.ver('6.1.0'):
return "-std=c++1y"
return "-std=c++14"
@property
def cxx17_flag(self):
# Adapted from CMake's rules for AppleClang
if self.version < spack.version.ver('6.1.0'):
raise spack.compiler.UnsupportedCompilerFlag(
self, "the C++17 standard", "cxx17_flag", "Xcode < 6.1.0"
)
return "-std=c++1z"
def setup_custom_environment(self, pkg, env):
"""Set the DEVELOPER_DIR environment for the Xcode toolchain.
On macOS, not all buildsystems support querying CC and CXX for the
compilers to use and instead query the Xcode toolchain for what
compiler to run. This side-steps the spack wrappers. In order to inject
spack into this setup, we need to copy (a subset of) Xcode.app and
replace the compiler executables with symlinks to the spack wrapper.
Currently, the stage is used to store the Xcode.app copies. We then set
the 'DEVELOPER_DIR' environment variables to cause the xcrun and
related tools to use this Xcode.app.
"""
super(AppleClang, self).setup_custom_environment(pkg, env)
if not pkg.use_xcode:
# if we do it for all packages, we get into big troubles with MPI:
# filter_compilers(self) will use mockup XCode compilers on macOS
# with Clang. Those point to Spack's compiler wrappers and
# consequently render MPI non-functional outside of Spack.
return
# Use special XCode versions of compiler wrappers when using XCode
# Overwrites build_environment's setting of SPACK_CC and SPACK_CXX
xcrun = spack.util.executable.Executable('xcrun')
xcode_clang = xcrun('-f', 'clang', output=str).strip()
xcode_clangpp = xcrun('-f', 'clang++', output=str).strip()
env.set('SPACK_CC', xcode_clang, force=True)
env.set('SPACK_CXX', xcode_clangpp, force=True)
xcode_select = spack.util.executable.Executable('xcode-select')
# Get the path of the active developer directory
real_root = xcode_select('--print-path', output=str).strip()
# The path name can be used to determine whether the full Xcode suite
# or just the command-line tools are installed
if real_root.endswith('Developer'):
# The full Xcode suite is installed
pass
else:
if real_root.endswith('CommandLineTools'):
# Only the command-line tools are installed
msg = 'It appears that you have the Xcode command-line tools '
msg += 'but not the full Xcode suite installed.\n'
else:
# Xcode is not installed
msg = 'It appears that you do not have Xcode installed.\n'
msg += 'In order to use Spack to build the requested application, '
msg += 'you need the full Xcode suite. It can be installed '
msg += 'through the App Store. Make sure you launch the '
msg += 'application and accept the license agreement.\n'
raise OSError(msg)
real_root = os.path.dirname(os.path.dirname(real_root))
developer_root = os.path.join(spack.stage.get_stage_root(),
'xcode-select',
self.name,
str(self.version))
xcode_link = os.path.join(developer_root, 'Xcode.app')
if not os.path.exists(developer_root):
tty.warn('Copying Xcode from %s to %s in order to add spack '
'wrappers to it. Please do not interrupt.'
% (real_root, developer_root))
# We need to make a new Xcode.app instance, but with symlinks to
# the spack wrappers for the compilers it ships. This is necessary
# because some projects insist on just asking xcrun and related
# tools where the compiler runs. These tools are very hard to trick
# as they do realpath and end up ignoring the symlinks in a
# "softer" tree of nothing but symlinks in the right places.
shutil.copytree(
real_root, developer_root, symlinks=True,
ignore=shutil.ignore_patterns(
'AppleTV*.platform', 'Watch*.platform', 'iPhone*.platform',
'Documentation', 'swift*'
))
real_dirs = [
'Toolchains/XcodeDefault.xctoolchain/usr/bin',
'usr/bin',
]
bins = ['c++', 'c89', 'c99', 'cc', 'clang', 'clang++', 'cpp']
for real_dir in real_dirs:
dev_dir = os.path.join(developer_root,
'Contents',
'Developer',
real_dir)
for fname in os.listdir(dev_dir):
if fname in bins:
os.unlink(os.path.join(dev_dir, fname))
os.symlink(
os.path.join(spack.paths.build_env_path, 'cc'),
os.path.join(dev_dir, fname))
os.symlink(developer_root, xcode_link)
env.set('DEVELOPER_DIR', xcode_link)<|fim▁end|>
|
)
|
<|file_name|>cpu_timing.rs<|end_file_name|><|fim▁begin|>// This file is part of zinc64.
// Copyright (c) 2016-2019 Sebastian Jastrzebski. All rights reserved.
// Licensed under the GPLv3. See LICENSE file in the project root for full license text.
use std::cell::{Cell, RefCell};
use std::rc::Rc;
use zinc64_core::{Addressable, Cpu, IoPort, IrqLine, Pin, Ram, TickFn};
use zinc64_emu::cpu::Cpu6510;
struct MockMemory {
ram: Ram,
}
impl MockMemory {
pub fn new(ram: Ram) -> Self {
MockMemory { ram }
}
}
impl Addressable for MockMemory {
fn read(&self, address: u16) -> u8 {
self.ram.read(address)
}
fn write(&mut self, address: u16, value: u8) {
self.ram.write(address, value);
}
}
fn setup_cpu() -> Cpu6510 {
let ba_line = Rc::new(RefCell::new(Pin::new_high()));
let cpu_io_port = Rc::new(RefCell::new(IoPort::new(0x00, 0xff)));
let cpu_irq = Rc::new(RefCell::new(IrqLine::new("irq")));
let cpu_nmi = Rc::new(RefCell::new(IrqLine::new("nmi")));
let mem = Rc::new(RefCell::new(MockMemory::new(Ram::new(0x10000))));
Cpu6510::new(mem, cpu_io_port, ba_line, cpu_irq, cpu_nmi)
}
// Based on 65xx Processor Data from http://www.romhacking.net/documents/318/
const OPCODE_TIMING: [u8; 256] = [
7, // 00 BRK #$ab
6, // 01 ORA ($ab,X)
0, // 02 HLT*
0, // 03 ASO* ($ab,X)
0, // 04 SKB* $ab
3, // 05 ORA $ab
5, // 06 ASL $ab
0, // 07 ASO* $ab
3, // 08 PHP
2, // 09 ORA #$ab
2, // 0A ASL A
0, // 0B ANC* #$ab
0, // 0C SKW* $abcd
4, // 0D ORA $abcd
6, // 0E ASL $abcd
0, // 0F ASO* $abcd
2, // 10 BPL nearlabel
5, // 11 ORA ($ab),Y
0, // 12 HLT*
0, // 13 ASO* ($ab),Y
0, // 14 SKB* $ab,X
4, // 15 ORA $ab,X
6, // 16 ASL $ab,X
0, // 17 ASO* $ab,X
2, // 18 CLC
4, // 19 ORA $abcd,Y
0, // 1A NOP*
0, // 1B ASO* $abcd,Y
0, // 1C SKW* $abcd,X
4, // 1D ORA $abcd,X
7, // 1E ASL $abcd,X
0, // 1F ASO* $abcd,X
6, // 20 JSR $abcd
6, // 21 AND ($ab,X)
0, // 22 HLT*
0, // 23 RLA* ($ab,X)
3, // 24 BIT $ab
3, // 25 AND $ab
5, // 26 ROL $ab
0, // 27 RLA* $ab
4, // 28 PLP
2, // 29 AND #$ab
2, // 2A ROL A
0, // 2B ANC* #$ab
4, // 2C BIT $abcd
4, // 2D AND $abcd
6, // 2E ROL $abcd
0, // 2F RLA* $abcd
2, // 30 BMI nearlabel
5, // 31 AND ($ab),Y
0, // 32 HLT*
0, // 33 RLA* ($ab),Y
0, // 34 SKB* $ab,X
4, // 35 AND $ab,X
6, // 36 ROL $ab,X
0, // 37 RLA* $ab,X
2, // 38 SEC
4, // 39 AND $abcd,Y
0, // 3A NOP*
0, // 3B RLA* $abcd,Y
0, // 3C SKW* $abcd,X
4, // 3D AND $abcd,X
7, // 3E ROL $abcd,X
0, // 3F RLA* $abcd,X
6, // 40 RTI
6, // 41 EOR ($ab,X)
0, // 42 HLT*
8, // 43 LSE* ($ab,X)
0, // 44 SKB* $ab
3, // 45 EOR $ab
5, // 46 LSR $ab
5, // 47 LSE* $ab
3, // 48 PHA
2, // 49 EOR #$ab
2, // 4A LSR A
2, // 4B ALR* #$ab
3, // 4C JMP $abcd
4, // 4D EOR $abcd
6, // 4E LSR $abcd
6, // 4F LSE* $abcd
2, // 50 BVC nearlabel
5, // 51 EOR ($ab),Y
0, // 52 HLT*
8, // 53 LSE* ($ab),Y
0, // 54 SKB* $ab,X
4, // 55 EOR $ab,X
6, // 56 LSR $ab,X
6, // 57 LSE* $ab,X
2, // 58 CLI
4, // 59 EOR $abcd,Y
0, // 5A NOP*
7, // 5B LSE* $abcd,Y
0, // 5C SKW* $abcd,X
4, // 5D EOR $abcd,X
7, // 5E LSR $abcd,X
7, // 5F LSE* $abcd,X
6, // 60 RTS
6, // 61 ADC ($ab,X)
0, // 62 HLT*
0, // 63 RRA* ($ab,X)
0, // 64 SKB* $ab
3, // 65 ADC $ab
5, // 66 ROR $ab
0, // 67 RRA* $ab
4, // 68 PLA
2, // 69 ADC #$ab
2, // 6A ROR A
0, // 6B ARR* #$ab
5, // 6C JMP ($abcd)
4, // 6D ADC $abcd
6, // 6E ROR $abcd
0, // 6F RRA* $abcd
2, // 70 BVS nearlabel
5, // 71 ADC ($ab),Y
0, // 72 HLT*
0, // 73 RRA* ($ab),Y
0, // 74 SKB* $ab,X
4, // 75 ADC $ab,X
6, // 76 ROR $ab,X
0, // 77 RRA* $ab,X
2, // 78 SEI
4, // 79 ADC $abcd,Y
0, // 7A NOP*
0, // 7B RRA* $abcd,Y
0, // 7C SKW* $abcd,X
4, // 7D ADC $abcd,X
7, // 7E ROR $abcd,X
0, // 7F RRA* $abcd,X
0, // 80 SKB* #$ab
6, // 81 STA ($ab,X)
0, // 82 SKB* #$ab
0, // 83 SAX* ($ab,X)
3, // 84 STY $ab
3, // 85 STA $ab
3, // 86 STX $ab
0, // 87 SAX* $ab
2, // 88 DEY
0, // 89 SKB* #$ab
2, // 8A TXA
2, // 8B ANE* #$ab
4, // 8C STY $abcd
4, // 8D STA $abcd
4, // 8E STX $abcd
0, // 8F SAX* $abcd
2, // 90 BCC nearlabel
6, // 91 STA ($ab),Y
0, // 92 HLT*
0, // 93 SHA* ($ab),Y
4, // 94 STY $ab,X
4, // 95 STA $ab,X
4, // 96 STX $ab,Y
0, // 97 SAX* $ab,Y
2, // 98 TYA
5, // 99 STA $abcd,Y
2, // 9A TXS
0, // 9B SHS* $abcd,Y
0, // 9C SHY* $abcd,X
5, // 9D STA $abcd,X
0, // 9E SHX* $abcd,Y
0, // 9F SHA* $abcd,Y
2, // A0 LDY #$ab
6, // A1 LDA ($ab,X)
2, // A2 LDX #$ab
6, // A3 LAX* ($ab,X)
3, // A4 LDY $ab
3, // A5 LDA $ab
3, // A6 LDX $ab
3, // A7 LAX* $ab
2, // A8 TAY
2, // A9 LDA #$ab
2, // AA TAX
2, // AB ANX* #$ab
4, // AC LDY $abcd
4, // AD LDA $abcd
4, // AE LDX $abcd
4, // AF LAX* $abcd
2, // B0 BCS nearlabel<|fim▁hole|> 5, // B1 LDA ($ab),Y
0, // B2 HLT*
5, // B3 LAX* ($ab),Y
4, // B4 LDY $ab,X
4, // B5 LDA $ab,X
4, // B6 LDX $ab,Y
4, // B7 LAX* $ab,Y
2, // B8 CLV
4, // B9 LDA $abcd,Y
2, // BA TSX
0, // BB LAS* $abcd,Y
4, // BC LDY $abcd,X
4, // BD LDA $abcd,X
4, // BE LDX $abcd,Y
4, // BF LAX* $abcd,Y
2, // C0 CPY #$ab
6, // C1 CMP ($ab,X)
0, // C2 SKB* #$ab
0, // C3 DCM* ($ab,X)
3, // C4 CPY $ab
3, // C5 CMP $ab
5, // C6 DEC $ab
0, // C7 DCM* $ab
2, // C8 INY
2, // C9 CMP #$ab
2, // CA DEX
2, // CB SBX* #$ab
4, // CC CPY $abcd
4, // CD CMP $abcd
6, // CE DEC $abcd
0, // CF DCM* $abcd
2, // D0 BNE nearlabel
5, // D1 CMP ($ab),Y
0, // D2 HLT*
0, // D3 DCM* ($ab),Y
0, // D4 SKB* $ab,X
4, // D5 CMP $ab,X
6, // D6 DEC $ab,X
0, // D7 DCM* $ab,X
2, // D8 CLD
4, // D9 CMP $abcd,Y
0, // DA NOP*
0, // DB DCM* $abcd,Y
0, // DC SKW* $abcd,X
4, // DD CMP $abcd,X
7, // DE DEC $abcd,X
0, // DF DCM* $abcd,X
2, // E0 CPX #$ab
6, // E1 SBC ($ab,X)
0, // E2 SKB* #$ab
0, // E3 INS* ($ab,X)
3, // E4 CPX $ab
3, // E5 SBC $ab
5, // E6 INC $ab
0, // E7 INS* $ab
2, // E8 INX
2, // E9 SBC #$ab
2, // EA NOP
0, // EB SBC* #$ab
4, // EC CPX $abcd
4, // ED SBC $abcd
6, // EE INC $abcd
0, // EF INS* $abcd
2, // F0 BEQ nearlabel
5, // F1 SBC ($ab),Y
0, // F2 HLT*
0, // F3 INS* ($ab),Y
0, // F4 SKB* $ab,X
4, // F5 SBC $ab,X
6, // F6 INC $ab,X
0, // F7 INS* $ab,X
2, // F8 SED
4, // F9 SBC $abcd,Y
0, // FA NOP*
0, // FB INS* $abcd,Y
0, // FC SKW* $abcd,X
4, // FD SBC $abcd,X
7, // FE INC $abcd,X
0, // FF INS* $abcd,X
];
#[test]
fn opcode_timing() {
let mut cpu = setup_cpu();
for opcode in 0..256 {
let cycles = OPCODE_TIMING[opcode];
if cycles > 0 {
let clock = Rc::new(Cell::new(0u8));
let clock_clone = clock.clone();
let tick_fn: TickFn = Rc::new(move || {
clock_clone.set(clock_clone.get().wrapping_add(1));
});
cpu.write(0x1000, opcode as u8);
cpu.write(0x1001, 0x00);
cpu.write(0x1002, 0x10);
cpu.set_pc(0x1000);
cpu.step(&tick_fn);
assert_eq!(
cycles,
clock.get(),
"opcode {:02x} timing failed",
opcode as u8
);
}
}
}<|fim▁end|>
| |
<|file_name|>cli.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding:utf-8 -*-
import sys
import argparse
import collections
from functools import wraps
import logging
# We are assuming, that there is an already configured logger present
logger = logging.getLogger(__name__)<|fim▁hole|>
class CommandLine(object):
"""Create a command line interface for the application.
Can call any core method as action.
Careful: The function defintion order is reflected in the cli.
Can be reorder using the *weight* flag of the initializer.
'lighter' Arguments will go first
"""
arguments = collections.OrderedDict()
@classmethod
def handle(self, core, name):
"""Handle the command line arguments.
Returns true if the gui is to be shown, this is controlled
through the 'batch' argument."""
call_buckets = collections.defaultdict(list)
# Build the ArgumentParser
arg_parser = argparse.ArgumentParser(name)
for name, arg in self.arguments.iteritems():
arg_parser.add_argument(
"--{}".format(name),
**{key: val for key, val in filter(lambda e: e is not None, [
("nargs", len(arg.args)) if len(arg.args) > 0 else None,
("metavar", arg.args) if arg.action == "store" else None,
("type", arg.type) if arg.action == "store" else None,
("default", arg.default),
("action", arg.action),
("help", arg.help)
])}
)
call_buckets[arg.weight].append(arg)
# Add batch argument to suppress gui
arg_parser.add_argument("--batch", "-b", "--no-gui",
help="Run in batch mode (Don't show the gui)",
action="store_true",
default=sys.flags.interactive)
# Parse all arguments
args = arg_parser.parse_args()
# Check all actions
logger.debug(call_buckets)
call_order = sorted(call_buckets.keys())
for weight in call_order:
for arg in call_buckets[weight]:
params = getattr(args, arg.name.replace("-", "_"))
method = getattr(core, arg.method)
if params is not None and params != arg.default:
if isinstance(params, list):
method(*params)
else:
method()
return not args.batch
def __init__(self, name, *args, **flags):
"""The constructor for the CommandLine object.
Accepts the same flags as the add_argument function of the
ArgumentParser class.
The *weight* flag can be used to reorder the execution of
arguments. 'lighter' commands will go first."""
self.name = name
self.args = args
self.help = flags.get("help", "")
self.type = flags.get("type", str)
self.default = flags.get("default", None)
self.action = flags.get("action", "store")
self.weight = flags.get("weight", 0)
if self.name in CommandLine.arguments:
raise KeyError(self.name)
CommandLine.arguments[self.name] = self
def __call__(self, func):
if self.help == "":
self.help = func.__doc__
self.method = func.__name__
@wraps(func)
def wrapper(instance, *args, **kwargs):
return func(instance, *args, **kwargs)
return wrapper
def __str__(self):
return "--{} -> {}('{}')".format(self.name, self.method, "', '".join(self.args))
__repr__ = __str__<|fim▁end|>
| |
<|file_name|>bgc-learn.py<|end_file_name|><|fim▁begin|>import os
import sys
import shutil
import straight.plugin
import numpy as np
import pkg_resources
from os import path
from core import utils
from core import argparser
from core import log
from core import parser
def main():
## Parse arguments
ap = argparser.init_arg_parser()
options = ap.parse_args()
## Collect input gbks from folder
input_files = []
if not path.isdir(options["input_folder"]):
log.error("Specified folder didn't exist '%s'" % (options["input_folder"]))
sys.exit(1)
else:
for filename in os.listdir(options["input_folder"]):
filepath = path.join(options["input_folder"], filename)
if not path.isdir(filepath):
ext = path.splitext(filepath)[1][1:]
if ext in ["gbk"]:
input_files.append(filename)
## Initial check parameters
metadata = {}
if options["mode"] == "train":
## check and load metadata file
if not path.exists(options["training_metadata"]):
log.error("Specified file didn't exist '%s'" % (options["training_metadata"]))
sys.exit(1)
else:
metadata = parser.parse_training_metadata(options["training_metadata"])
options["single_values"] = [[]] * len(input_files)
options["train_set"] = []
options["test_set"] = []
# remove GBKs not listed in metadata
input_files[:] = [bgc for bgc in input_files if utils.get_bgc_name(bgc) in metadata["bgc"]]
# features
if "features" not in options:
if "features" not in metadata:
options["features"] = [{"name": plugin.name, "params": [], "subs": [sub for sub in plugin.features]} for plugin in utils.load_plugins("feature_extraction")]
else:
options["features"] = metadata["features"]
# algorithm mode (classification / regression)
if metadata["mode"] == "CLASSIFICATION":
options["algo_mode"] = "classification"
if "algorithm" not in options:
if "algorithm" not in metadata:
options["algorithm"] = {"name": "svm", "params": []}
else:
options["algorithm"] = metadata["algorithm"]
elif metadata["mode"] == "REGRESSION":
options["algo_mode"] = "regression"
if "algorithm" not in options:
if "algorithm" not in metadata:
options["algorithm"] = {"name": "linear_regression", "params": []}
else:
options["algorithm"] = metadata["algorithm"]
else:
log.error("Incorrect metadata file format '%s'" % (options["training_metadata"]))
sys.exit(1)
# single values (from right hand side of data column) & train/test set distribution
for i, fp in enumerate(input_files):
bgc_id = utils.get_bgc_name(fp)
if bgc_id in metadata["bgc"]:
idx_meta = metadata["bgc"].index(bgc_id)
options["single_values"][i] = metadata["single_values"][idx_meta]
if idx_meta in metadata["train_set"]:
options["train_set"].append(i)
if idx_meta in metadata["test_set"]:
options["test_set"].append(i)
else:
log.error("'%s' is not included in your metadata" % (bgc_id))
sys.exit(1)
# pair values for training set (from its own table from the metadata)
options["train_pair_values"] = [[None] * len(options["train_set"]) for _ in range(len(options["train_set"]))]
for i, idx1 in enumerate(options["train_set"]):
for j, idx2 in enumerate(options["train_set"]):
if len(metadata["train_pair_values"]) > i and len(metadata["train_pair_values"][i]) > j:
options["train_pair_values"][i][j] = metadata["train_pair_values"][i][j]
# pair values for test set (from its own table from the metadata)
options["test_pair_values"] = [[None] * len(options["test_set"]) for _ in range(len(options["test_set"]))]
for i, idx1 in enumerate(options["test_set"]):
for j, idx2 in enumerate(options["test_set"]):
if len(metadata["test_pair_values"]) > i and len(metadata["test_pair_values"][i]) > j:
options["test_pair_values"][i][j] = metadata["test_pair_values"][i][j]
if options["mode"] == "predict":
## check and load model file
print "..."
## further checks..
algo_type = utils.get_algo_type(options["algorithm"]["name"])
if algo_type not in ["classification", "regression"]:
log.error("Selected algorithm '%s' did not exist" % (algo["name"]))
sys.exit(1)
if options["algo_mode"] != algo_type:
log.error("Selected algorithm '%s' is for %s, but the provided data is for %s." % (options["algorithm"]["name"], algo_type, options["algo_mode"]))
sys.exit(1)
options["features_scope"] = ""
for idx, feature in enumerate(options["features"]):
for plugin in utils.load_plugins("feature_extraction"):
if plugin.name == feature["name"]:
if len(options["features_scope"]) > 0 and plugin.scope != options["features_scope"]:
log.error("You selected features of different scope ('%s:%s', '%s:%s'). Please select only combination of features with the same scope." % (feature["name"], plugin.scope, options["features"][idx - 1]["name"], options["features_scope"]))
sys.exit(1)
options["features_scope"] = plugin.scope
break
if len(feature["subs"]) < 1:
for plugin in utils.load_plugins("feature_extraction"):
if plugin.name == feature["name"]:
feature["subs"].extend(plugin.features)
break
for sub in feature["subs"]:
for plugin in utils.load_plugins("feature_extraction"):
if plugin.name == feature["name"]:
if sub not in plugin.features:
log.error("Feature unknown: '%s'" % sub)
sys.exit(1)
## Check output folder
if not options["output_folder"]:
options["output_folder"] = path.join(os.getcwd(), path.basename(options["input_folder"]))
if path.isdir(options["output_folder"]):
# output folder exist, probable disrupted job
if not options["continue"] and not options["overwrite"]:
log.error("Output folder '%s' exist. Previous run? use --continue to continue, or --overwrite to start over." % options["output_folder"])
sys.exit(1)
elif options["overwrite"]:
shutil.rmtree(options["output_folder"])
os.makedirs(options["output_folder"])
elif options["reset_preprocesses"]:
bgcjsonpath = path.join(options["output_folder"], "bgcjson")
if path.exists(bgcjsonpath):
shutil.rmtree(bgcjsonpath)
else:
os.makedirs(options["output_folder"])
## Parse gbks
## TODO: multi-threading?
log.info("Started preprocessing input files..")
utils.print_progress(0, len(input_files), prefix='Preprocessing input GBKs..', suffix='', decimals=1)
for i, filename in enumerate(input_files):
filepath = path.join(options["input_folder"], filename)
if not (path.exists(path.join(options["output_folder"], "bgcjson", "%s.bgcjson" % utils.get_bgc_name(filepath)))):
bgc = parser.parse_gbk(filepath)
if bgc is not None:<|fim▁hole|>
## Do feature extraction
# step 1: make folder structure & index file
feature_folder = utils.create_feature_folder(input_files, options["output_folder"])
# step 2: traverse FE modules and run algorithms, then save the results
feature_extraction_plugins = []
for plugin in utils.load_plugins("feature_extraction"):
if ("features" not in options) or (plugin.name in [feature["name"] for feature in options["features"]]):
feature_extraction_plugins.append(plugin)
# calculate features
options["feature_values"] = {}
if options["features_scope"] == "pair":
log.info("Started feature extraction for all BGC pairs..")
nrcomb = len(input_files) * (len(input_files) - 1) / 2
count = 0
utils.print_progress(0, nrcomb, prefix='Feature extraction..', suffix='', decimals=1)
for i, fn1 in enumerate(input_files):
for j, fn2 in enumerate(input_files):
if i < j:
bgc1 = parser.parse_bgcjson(path.join(options["output_folder"], "bgcjson", "%s.bgcjson" % utils.get_bgc_name(fn1)))
bgc2 = parser.parse_bgcjson(path.join(options["output_folder"], "bgcjson", "%s.bgcjson" % utils.get_bgc_name(fn2)))
for plugin in feature_extraction_plugins:
if plugin.name not in options["feature_values"]:
options["feature_values"][plugin.name] = {}
results = plugin.calculate(bgc1, bgc2)
options["feature_values"][plugin.name]["%d+%d" % (i, j)] = [float(result) for result in results]
count += 1
utils.print_progress(count, nrcomb, prefix='Feature extraction..', suffix='', decimals=1)
elif options["features_scope"] == "single":
log.info("Started feature extraction for all BGCs..")
count = 0
utils.print_progress(0, len(input_files), prefix='Feature extraction..', suffix='', decimals=1)
for i, fn in enumerate(input_files):
bgc = parser.parse_bgcjson(path.join(options["output_folder"], "bgcjson", "%s.bgcjson" % utils.get_bgc_name(fn)))
for plugin in feature_extraction_plugins:
if plugin.name not in options["feature_values"]:
options["feature_values"][plugin.name] = {}
results = plugin.calculate(bgc)
options["feature_values"][plugin.name]["%d" % (i)] = [float(result) for result in results]
count += 1
utils.print_progress(count, len(input_files), prefix='Feature extraction..', suffix='', decimals=1)
else:
log.error("Invalid features scope: '%s'" % options["features_scope"])
sys.exit(1)
## Load features & value matrix
features_rows = []
if options["features_scope"] == "pair":
for i, fn1 in enumerate(input_files):
for j, fn2 in enumerate(input_files):
if i < j:
features_rows.append([i, j])
elif options["features_scope"] == "single":
for i in xrange(0, len(input_files)):
features_rows.append([i])
else:
log.error("Invalid features scope: '%s'" % options["features_scope"])
sys.exit(1)
if "features_columns" not in options:
options["features_columns"] = []
for feature in options["features"]:
for sub in feature["subs"]:
options["features_columns"].append("%s.%s" % (feature["name"], sub))
features_matrix = {}
for row_ids in ["+".join([str(row_id) for row_id in row_ids]) for row_ids in features_rows]:
row = [None] * len(options["features_columns"])
for plugin in feature_extraction_plugins:
plugin_folder = path.join(feature_folder, plugin.name)
values = options["feature_values"][plugin.name][row_ids]
if (len(values) != len(plugin.features)):
# technically impossible to reach this, unless output from calculate != #of results expected
log.error("...")
sys.exit(1)
else:
for n, col in enumerate(plugin.features):
colname = ("%s.%s" % (plugin.name, col))
if colname in options["features_columns"]:
row[options["features_columns"].index(colname)] = values[n]
features_matrix[row_ids] = row
## Execute algorithms & save results
if options["mode"] == "train":
## Fetch feature & values training matrix
training_matrix = []
training_target = []
training_rownames = []
if options["features_scope"] == "pair":
for i, idx1 in enumerate(options["train_set"]):
for j, idx2 in enumerate(options["train_set"]):
if idx1 < idx2:
training_matrix.append(features_matrix["%d+%d" % (idx1, idx2)])
training_rownames.append("%s+%s" % (utils.get_bgc_name(input_files[idx1]), utils.get_bgc_name(input_files[idx2])))
if options["algo_mode"] == "classification":
class1 = options["single_values"][idx1].split(",")
class2 = options["single_values"][idx2].split(",")
training_target.append(int(len(set(class1) & set(class2)) > 0))
elif options["algo_mode"] == "regression":
training_target.append(float(options["train_pair_values"][i][j]))
elif options["features_scope"] == "single":
for idx in options["train_set"]:
training_matrix.append(features_matrix["%d" % (idx)])
training_rownames.append("%s" % (utils.get_bgc_name(input_files[idx1])))
training_target.append(options["single_values"][idx])
training_matrix = np.array(training_matrix)
training_target = np.array(training_target)
## Fetch feature & values testing matrix
testing_matrix = []
testing_target = []
testing_rownames = []
if options["features_scope"] == "pair":
for i, idx1 in enumerate(options["test_set"]):
for j, idx2 in enumerate(options["test_set"]):
if idx1 < idx2:
testing_matrix.append(features_matrix["%d+%d" % (idx1, idx2)])
testing_rownames.append("%s+%s" % (utils.get_bgc_name(input_files[idx1]), utils.get_bgc_name(input_files[idx2])))
if options["algo_mode"] == "classification":
class1 = options["single_values"][idx1].split(",")
class2 = options["single_values"][idx2].split(",")
testing_target.append(int(len(set(class1) & set(class2)) > 0))
elif options["algo_mode"] == "regression":
testing_target.append(float(options["test_pair_values"][i][j]))
elif options["features_scope"] == "single":
for idx in options["test_set"]:
testing_matrix.append(features_matrix["%d" % (idx)])
testing_rownames.append("%s" % (utils.get_bgc_name(input_files[idx1])))
testing_target.append(options["single_values"][idx])
testing_matrix = np.array(testing_matrix)
testing_target = np.array(testing_target)
## Load the training model
module = None
for plugin in utils.load_plugins(options["algo_mode"]):
if plugin.name == options["algorithm"]["name"]:
module = plugin
break
if module == None:
log.error("Failed to load module: '%s.%s'" % (options["algo_mode"], options["algorithm"]["name"]))
sys.exit(1)
else:
log.info("Training model...")
classifier = module.train(training_matrix, training_target, options["algorithm"]["params"])
# save model & its metadata to file
model_metadata = {
"mode": options["algo_mode"],
"algorithm": options["algorithm"],
"features": options["features"],
"columns": options["features_columns"],
"training_data_count": len(training_matrix),
"environment": {
"bgc-learn": utils.get_version(),
"scikit-learn": pkg_resources.get_distribution("scikit-learn").version,
"numpy": pkg_resources.get_distribution("numpy").version,
"scipy": pkg_resources.get_distribution("scipy").version,
}
}
save_name = utils.save_result_model(classifier, model_metadata, options["output_folder"])
# calculate accuracies & save summaries
result_training = ({}, [])
if len(training_matrix) > 0:
result_training = module.test(training_matrix, training_target, classifier)
utils.save_result_testing("training-%s" % (save_name), training_rownames, options["features_columns"], training_matrix, training_target, result_training, options["output_folder"])
result_testing = ({}, [])
if len(testing_matrix) > 0:
result_testing = module.test(testing_matrix, testing_target, classifier)
utils.save_result_testing("testing-%s" % (save_name), testing_rownames, options["features_columns"], testing_matrix, testing_target, result_testing, options["output_folder"])
elif options["mode"] == "predict":
print "..."
## Cleanup
log.info("Cleaning up..")
shutil.rmtree(feature_folder) # remove feature folder
## done
log.info("Analysis done. your result is available inside the folder '%s'." % options["output_folder"])
if __name__ == "__main__":
main()<|fim▁end|>
|
utils.save_bgcjson(bgc, options["output_folder"])
utils.print_progress(i + 1, len(input_files), prefix='Preprocessing input GBKs..', suffix='', decimals=1, bar_length=100)
log.info("Finished preprocessing input files..")
|
<|file_name|>test_mean.py<|end_file_name|><|fim▁begin|># Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.<|fim▁hole|>"""Arm(R) Ethos(TM)-N integration mean tests"""
import numpy as np
import tvm
from tvm import relay
from tvm.testing import requires_ethosn
from . import infrastructure as tei
def _get_model(shape, axis, keepdims, input_zp, input_sc, output_zp, output_sc, dtype):
a = relay.var("a", shape=shape, dtype=dtype)
casted = relay.op.cast(a, "int32")
mean = relay.mean(casted, axis, keepdims)
model = relay.qnn.op.requantize(
mean,
input_scale=relay.const(input_sc, "float32"),
input_zero_point=relay.const(input_zp, "int32"),
output_scale=relay.const(output_sc, "float32"),
output_zero_point=relay.const(output_zp, "int32"),
out_dtype=dtype,
)
return model
@requires_ethosn
def test_mean():
trials = [(1, 7, 7, 2048), (1, 8, 8)]
np.random.seed(0)
for shape in trials:
inputs = {
"a": tvm.nd.array(np.random.randint(0, high=255, size=shape, dtype="uint8")),
}
outputs = []
for npu in [False, True]:
model = _get_model(shape, [1, 2], True, 128, 0.0784314, 128, 0.0784314, "uint8")
mod = tei.make_module(model, [])
outputs.append(tei.build_and_run(mod, inputs, 1, {}, npu=npu))
tei.verify(outputs, "uint8", 1)<|fim▁end|>
| |
<|file_name|>forms.py<|end_file_name|><|fim▁begin|>from crispy_forms.helper import FormHelper
from crispy_forms.layout import *
from crispy_forms.bootstrap import *
from crispy_forms.layout import Layout, Submit, Reset, Div
from django import forms
from django.contrib.auth.forms import UserChangeForm, UserCreationForm
from silo.models import TolaUser
from django.contrib.auth.models import User
class RegistrationForm(UserChangeForm):
"""
Form for registering a new account.
"""
def __init__(self, *args, **kwargs):
user = kwargs.pop('initial')
super(RegistrationForm, self).__init__(*args, **kwargs)
del self.fields['password']
print user['username'].is_superuser
# allow country access change for now until we know how we will use this GWL 012617
# if they aren't a super user or User Admin don't let them change countries form field
# if 'User Admin' not in user['username'].groups.values_list('name', flat=True) and not user['username'].is_superuser:
# self.fields['country'].widget.attrs['disabled'] = "disabled"
self.fields['created'].widget.attrs['disabled'] = "disabled"
class Meta:
model = TolaUser
fields = '__all__'
<|fim▁hole|> helper.form_class = 'form-horizontal'
helper.label_class = 'col-sm-2'
helper.field_class = 'col-sm-6'
helper.form_error_title = 'Form Errors'
helper.error_text_inline = True
helper.help_text_inline = True
helper.html5_required = True
helper.layout = Layout(Fieldset('','title', 'name',
'country'),
Submit('submit', 'Submit', css_class='btn-default'),
Reset('reset', 'Reset', css_class='btn-warning'))
class NewUserRegistrationForm(UserCreationForm):
"""
Form for registering a new account.
"""
class Meta:
model = User
fields = ['first_name', 'last_name','email','username']
def __init__(self, *args, **kwargs):
super(NewUserRegistrationForm, self).__init__(*args, **kwargs)
helper = FormHelper()
helper.form_method = 'post'
helper.form_class = 'form-horizontal'
helper.label_class = 'col-sm-2'
helper.field_class = 'col-sm-6'
helper.form_error_title = 'Form Errors'
helper.error_text_inline = True
helper.help_text_inline = True
helper.html5_required = True
helper.form_tag = False
class NewTolaUserRegistrationForm(forms.ModelForm):
"""
Form for registering a new account.
"""
class Meta:
model = TolaUser
fields = ['title', 'country', 'privacy_disclaimer_accepted']
def __init__(self, *args, **kwargs):
super(NewTolaUserRegistrationForm, self).__init__(*args, **kwargs)
helper = FormHelper()
helper.form_method = 'post'
helper.form_class = 'form-horizontal'
helper.label_class = 'col-sm-2'
helper.field_class = 'col-sm-6'
helper.form_error_title = 'Form Errors'
helper.error_text_inline = True
helper.help_text_inline = True
helper.html5_required = True
helper.form_tag = False
helper.layout = Layout(
Fieldset('Information','title', 'country'),
Fieldset('Privacy Statement','privacy_disclaimer_accepted',),
)<|fim▁end|>
|
helper = FormHelper()
helper.form_method = 'post'
|
<|file_name|>server.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import logging
import sys
import os
import signal
import conf
import core
from thrift.transport import TSocket
from thrift.transport import TTransport
from thrift.protocol.TBinaryProtocol import TBinaryProtocolAcceleratedFactory
from thrift.server import TServer
from rpc import RndNodeApi
logger = logging.getLogger(__name__)
class RndProcessHandler(object):
def runTask(self, rtc):
logger.debug("starting core.ProcessMgr.runProcess(rtc): %s", rtc.taskId)
core.ProcessMgr.runProcess(rtc)
logger.debug("finished core.ProcessMgr.runProcess(rtc): %s", rtc.taskId)
def killRunningTask(self, procId, reason):
core.ProcessMgr.killRunningTask(procId, reason)
def getRunningTasks(self):
logger.debug("starting core.ProcessMgr.getRunningTasks()")
tasks = core.ProcessMgr.getRunningTasks()
logger.debug("finished core.ProcessMgr.getRunningTasks()")
return tasks
def reboot(self, now=False):
core.ProcessMgr.reboot(now)<|fim▁hole|> ping = core.Profiler.getPing()
ping.isReboot = core.ProcessMgr.isReboot
if withTasks:
ping.tasks = self.getRunningTasks()
return ping
def get_server(api, handler, port, **kwargs):
processor = api.Processor(handler)
socket = TSocket.TServerSocket(port=port)
tfactory = kwargs.get('transport') or TTransport.TFramedTransportFactory()
pfactory = kwargs.get('protocol') or TBinaryProtocolAcceleratedFactory()
server = TServer.TThreadPoolServer(processor, socket, tfactory, pfactory)
server.setNumThreads(8)
return server
def exit_handler(*args):
logger.info("Caught SIGTERM. Shutting down Process Manager...")
core.ProcessMgr.shutdown()
logger.info("Process Manager finished shutting down")
os._exit(0)
signal.signal(signal.SIGTERM, exit_handler)
def start():
logger.info("Staring Render Node Daemon on TCP port %d" % conf.NETWORK_PORT)
server = get_server(RndNodeApi, RndProcessHandler(), conf.NETWORK_PORT)
try:
server.serve()
except KeyboardInterrupt:
exit_handler()
sys.exit(0)<|fim▁end|>
|
def pingPong(self, withTasks=False):
|
<|file_name|>gl_context.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use super::webgl_thread::{GLState, WebGLImpl};
use canvas_traits::webgl::{
GLContextAttributes, GLFormats, GLLimits, WebGLCommand, WebGLCommandBacktrace, WebGLVersion,
};
use euclid::default::Size2D;
use offscreen_gl_context::{<|fim▁hole|>use offscreen_gl_context::{GLFormats as RawGLFormats, GLLimits as RawGLLimits, GLVersion};
use offscreen_gl_context::{NativeGLContext, NativeGLContextHandle, NativeGLContextMethods};
use offscreen_gl_context::{OSMesaContext, OSMesaContextHandle};
use sparkle::gl;
pub trait CloneableDispatcher: GLContextDispatcher {
fn clone(&self) -> Box<dyn GLContextDispatcher>;
}
/// The GLContextFactory is used to create shared GL contexts with the main thread GL context.
/// Currently, shared textures are used to render WebGL textures into the WR compositor.
/// In order to create a shared context, the GLContextFactory stores the handle of the main GL context.
pub enum GLContextFactory {
Native(
NativeGLContextHandle,
Option<Box<dyn CloneableDispatcher + Send>>,
gl::GlType,
),
OSMesa(OSMesaContextHandle),
}
impl GLContextFactory {
/// Creates a new GLContextFactory that uses the currently bound GL context to create shared contexts.
pub fn current_native_handle(
dispatcher: Box<dyn CloneableDispatcher + Send>,
api_type: gl::GlType,
) -> Option<GLContextFactory> {
let dispatcher = if cfg!(target_os = "windows") {
// Used to dispatch functions from the GLContext thread to the main thread's
// event loop. Required to allow WGL GLContext sharing in Windows.
Some(dispatcher)
} else {
None
};
// FIXME(emilio): This assumes a single GL backend per platform which is
// not true on Linux, we probably need a third `Egl` variant or abstract
// it a bit more...
NativeGLContext::current_handle()
.map(|handle| GLContextFactory::Native(handle, dispatcher, api_type))
}
/// Creates a new GLContextFactory that uses the currently bound OSMesa context to create shared contexts.
pub fn current_osmesa_handle() -> Option<GLContextFactory> {
OSMesaContext::current_handle().map(GLContextFactory::OSMesa)
}
/// Creates a new shared GLContext with the main GLContext
pub fn new_shared_context(
&self,
webgl_version: WebGLVersion,
size: Size2D<u32>,
attributes: GLContextAttributes,
) -> Result<GLContextWrapper, &'static str> {
let attributes = map_attrs(attributes);
Ok(match *self {
GLContextFactory::Native(ref handle, ref dispatcher, ref api_type) => {
GLContextWrapper::Native(GLContext::new_shared_with_dispatcher(
// FIXME(nox): Why are those i32 values?
size.to_i32(),
attributes,
ColorAttachmentType::Texture,
*api_type,
Self::gl_version(webgl_version),
Some(handle),
dispatcher.as_ref().map(|d| (**d).clone()),
)?)
},
GLContextFactory::OSMesa(ref handle) => {
GLContextWrapper::OSMesa(GLContext::new_shared_with_dispatcher(
// FIXME(nox): Why are those i32 values?
size.to_i32(),
attributes,
ColorAttachmentType::Texture,
gl::GlType::Gl,
Self::gl_version(webgl_version),
Some(handle),
None,
)?)
},
})
}
/// Creates a new non-shared GLContext
pub fn new_context(
&self,
webgl_version: WebGLVersion,
size: Size2D<u32>,
attributes: GLContextAttributes,
) -> Result<GLContextWrapper, &'static str> {
let attributes = map_attrs(attributes);
Ok(match *self {
GLContextFactory::Native(_, _, ref api_type) => {
GLContextWrapper::Native(GLContext::new_shared_with_dispatcher(
// FIXME(nox): Why are those i32 values?
size.to_i32(),
attributes,
ColorAttachmentType::Texture,
*api_type,
Self::gl_version(webgl_version),
None,
None,
)?)
},
GLContextFactory::OSMesa(_) => {
GLContextWrapper::OSMesa(GLContext::new_shared_with_dispatcher(
// FIXME(nox): Why are those i32 values?
size.to_i32(),
attributes,
ColorAttachmentType::Texture,
gl::GlType::Gl,
Self::gl_version(webgl_version),
None,
None,
)?)
},
})
}
fn gl_version(webgl_version: WebGLVersion) -> GLVersion {
match webgl_version {
WebGLVersion::WebGL1 => GLVersion::Major(2),
WebGLVersion::WebGL2 => GLVersion::Major(3),
}
}
}
/// GLContextWrapper used to abstract NativeGLContext and OSMesaContext types
pub enum GLContextWrapper {
Native(GLContext<NativeGLContext>),
OSMesa(GLContext<OSMesaContext>),
}
impl GLContextWrapper {
pub fn make_current(&self) {
match *self {
GLContextWrapper::Native(ref ctx) => {
ctx.make_current().unwrap();
},
GLContextWrapper::OSMesa(ref ctx) => {
ctx.make_current().unwrap();
},
}
}
pub fn apply_command(
&self,
cmd: WebGLCommand,
use_apple_vertex_array: bool,
backtrace: WebGLCommandBacktrace,
state: &mut GLState,
) {
match *self {
GLContextWrapper::Native(ref ctx) => {
WebGLImpl::apply(ctx, state, use_apple_vertex_array, cmd, backtrace);
},
GLContextWrapper::OSMesa(ref ctx) => {
WebGLImpl::apply(ctx, state, false, cmd, backtrace);
},
}
}
pub fn gl(&self) -> &gl::Gl {
match *self {
GLContextWrapper::Native(ref ctx) => ctx.gl(),
GLContextWrapper::OSMesa(ref ctx) => ctx.gl(),
}
}
pub fn get_info(&self) -> (Size2D<i32>, u32, GLLimits, GLFormats) {
match *self {
GLContextWrapper::Native(ref ctx) => {
let (real_size, texture_id) = {
let draw_buffer = ctx.borrow_draw_buffer().unwrap();
(
draw_buffer.size(),
draw_buffer.get_bound_texture_id().unwrap(),
)
};
let limits = ctx.borrow_limits().clone();
let formats = map_formats(ctx.borrow_formats());
(real_size, texture_id, map_limits(limits), formats)
},
GLContextWrapper::OSMesa(ref ctx) => {
let (real_size, texture_id) = {
let draw_buffer = ctx.borrow_draw_buffer().unwrap();
(
draw_buffer.size(),
draw_buffer.get_bound_texture_id().unwrap(),
)
};
let limits = ctx.borrow_limits().clone();
let formats = map_formats(ctx.borrow_formats());
(real_size, texture_id, map_limits(limits), formats)
},
}
}
pub fn resize(&mut self, size: Size2D<u32>) -> Result<DrawBuffer, &'static str> {
match *self {
GLContextWrapper::Native(ref mut ctx) => {
// FIXME(nox): Why are those i32 values?
ctx.resize(size.to_i32())
},
GLContextWrapper::OSMesa(ref mut ctx) => {
// FIXME(nox): Why are those i32 values?
ctx.resize(size.to_i32())
},
}
}
}
fn map_limits(limits: RawGLLimits) -> GLLimits {
GLLimits {
max_vertex_attribs: limits.max_vertex_attribs,
max_tex_size: limits.max_tex_size,
max_cube_map_tex_size: limits.max_cube_map_tex_size,
max_combined_texture_image_units: limits.max_combined_texture_image_units,
max_fragment_uniform_vectors: limits.max_fragment_uniform_vectors,
max_renderbuffer_size: limits.max_renderbuffer_size,
max_texture_image_units: limits.max_texture_image_units,
max_varying_vectors: limits.max_varying_vectors,
max_vertex_texture_image_units: limits.max_vertex_texture_image_units,
max_vertex_uniform_vectors: limits.max_vertex_uniform_vectors,
}
}
pub fn map_attrs(attrs: GLContextAttributes) -> RawGLContextAttributes {
RawGLContextAttributes {
alpha: attrs.alpha,
depth: attrs.depth,
stencil: attrs.stencil,
antialias: attrs.antialias,
premultiplied_alpha: attrs.premultiplied_alpha,
preserve_drawing_buffer: attrs.preserve_drawing_buffer,
}
}
pub fn map_attrs_to_script_attrs(attrs: RawGLContextAttributes) -> GLContextAttributes {
GLContextAttributes {
alpha: attrs.alpha,
depth: attrs.depth,
stencil: attrs.stencil,
antialias: attrs.antialias,
premultiplied_alpha: attrs.premultiplied_alpha,
preserve_drawing_buffer: attrs.preserve_drawing_buffer,
}
}
fn map_formats(formats: &RawGLFormats) -> GLFormats {
GLFormats {
texture_format: formats.texture,
texture_type: formats.texture_type,
}
}<|fim▁end|>
|
ColorAttachmentType, DrawBuffer, GLContext, GLContextAttributes as RawGLContextAttributes,
GLContextDispatcher,
};
|
<|file_name|>getLoudMusicComp.py<|end_file_name|><|fim▁begin|>import pyspark
import operator
import sys
#311 call 2010 to present csv
#0 Unique Key,Created Date,Closed Date,Agency,Agency Name,
#5 Complaint Type,Descriptor,Location Type,Incident Zip,Incident Address,
#10 Street Name,Cross Street 1,Cross Street 2,Intersection Street 1,
#14 Intersection Street 2,Address Type,City,Landmark,Facility Type,Status,
#20 Due Date,Resolution Description,Resolution Action Updated Date,<|fim▁hole|>#32 School Code,School Phone Number,School Address,School City,School State,
#37 School Zip,School Not Found,School or Citywide Complaint,Vehicle Type,
#41 Taxi Company Borough,Taxi Pick Up Location,Bridge Highway Name,
#44 Bridge Highway Direction,Road Ramp,Bridge Highway Segment,Garage Lot Name,
#48 Ferry Direction,Ferry Terminal Name,Latitude,Longitude,Location
def mapToLots(records):
import rtree
import csv
import os
os.chmod('plutoindex.idx',0777)
os.chmod('plutoindex.dat',0777)
file_index = rtree.index.Rtree('plutoindex')
for record in records:
list_record=[]
for line in csv.reader([record.encode('utf-8')]):
list_record = line
if len(list_record) < 27:
continue
compType = list_record[5].upper()
descriptor = list_record[6].upper()
if compType.count('NOISE') < 1 or descriptor.count('LOUD MUSIC/PARTY') < 1:
continue
xcoord = list_record[25].strip()
ycoord = list_record[26].strip()
if all((xcoord,ycoord)):
#check intersection
xcoord = int(xcoord)
ycoord = int(ycoord)
for lot in file_index.intersection((xcoord,ycoord),objects = True):
yield (lot.object,1)
def mapResUnits(pairs):
import pickle
with open('plutodict','rb') as fi:
pluto_dict = pickle.load(fi)
for pair in pairs:
dict_entry = pluto_dict[pair[0]]
property_value = dict_entry[1]
res_units = dict_entry[0]
if res_units < 1:
continue
yield (property_value, pair[1] / float(res_units))#pair[1] = number of noise complaints
if __name__=='__main__':
if len(sys.argv)<3:
print "Usage: <input files> <output path>"
sys.exit(-1)
sc = pyspark.SparkContext()
calls311 = sc.textFile(sys.argv[1])
output = calls311.mapPartitions(mapToLots).reduceByKey(operator.add).\
mapPartitions(mapResUnits)
output.saveAsTextFile(sys.argv[-1])<|fim▁end|>
|
#23 Community Board,Borough,X Coordinate (State Plane),Y Coordinate (State Plane),
#27 Park Facility Name,Park Borough,School Name,School Number,School Region,
|
<|file_name|>practice50.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python<|fim▁hole|>import random
print random.uniform(10, 30)<|fim▁end|>
|
# -*- coding: UTF-8 -*-
|
<|file_name|>_pcolormesh.py<|end_file_name|><|fim▁begin|>__author__ = 'olga'
import numpy as np
from prettyplotlib.colors import blue_red, blues_r, reds
from prettyplotlib.utils import remove_chartjunk, maybe_get_fig_ax
def pcolormesh(*args, **kwargs):
"""
Use for large datasets<|fim▁hole|> - yticklables, which will put y tick labels exactly aligned in the center
of the heatmap block
- xticklabels_rotation, which can be either 'horizontal' or 'vertical'
depending on how you want the xticklabels rotated. The default is
'horizontal', but if you have xticklabels that are longer, you may want
to do 'vertical' so they don't overlap.
- yticklabels_rotation, which can also be either 'horizontal' or
'vertical'. The default is 'horizontal' and in most cases,
that's what you'll want to stick with. But the option is there if you
want.
- center_value, which will be the centered value for a divergent
colormap, for example if you have data above and below zero, but you want
the white part of the colormap to be equal to 10 rather than 0,
then specify 'center_value=10'.
"""
# Deal with arguments in kwargs that should be there, or need to be taken
# out
fig, ax, args, kwargs = maybe_get_fig_ax(*args, **kwargs)
# If x and y axis are passed in arguments, gets correct data
# Ticks will work with x and y data, although it would be pointless to use
# both x/y and custom ticks
if len(args) == 3:
x = args[0]
y = args[1]
data = args[2]
elif len(args) == 1:
data = args[0]
kwargs.setdefault('vmax', data.max())
kwargs.setdefault('vmin', data.min())
center_value = kwargs.pop('center_value', 0)
# If
divergent_data = False
if kwargs['vmax'] > 0 and kwargs['vmin'] < 0:
divergent_data = True
kwargs['vmax'] += center_value
kwargs['vmin'] += center_value
# If we have both negative and positive values, use a divergent colormap
if 'cmap' not in kwargs:
# Check if this is divergent
if divergent_data:
kwargs['cmap'] = blue_red
elif kwargs['vmax'] <= 0:
kwargs['cmap'] = blues_r
elif kwargs['vmax'] > 0:
kwargs['cmap'] = reds
if 'xticklabels' in kwargs:
xticklabels = kwargs['xticklabels']
kwargs.pop('xticklabels')
else:
xticklabels = None
if 'yticklabels' in kwargs:
yticklabels = kwargs['yticklabels']
kwargs.pop('yticklabels')
else:
yticklabels = None
if 'xticklabels_rotation' in kwargs:
xticklabels_rotation = kwargs['xticklabels_rotation']
kwargs.pop('xticklabels_rotation')
else:
xticklabels_rotation = 'horizontal'
if 'yticklabels_rotation' in kwargs:
yticklabels_rotation = kwargs['yticklabels_rotation']
kwargs.pop('yticklabels_rotation')
else:
yticklabels_rotation = 'horizontal'
ax_colorbar = kwargs.pop('ax_colorbar', None)
orientation_colorbar = kwargs.pop('orientation_colorbar', 'vertical')
p = ax.pcolormesh(*args, **kwargs)
# ax.set_ylim(0, x.shape[0])
# ax.set_xlim(0, x.shape[1])
# Get rid of ALL axes
remove_chartjunk(ax, ['top', 'right', 'left', 'bottom'])
if xticklabels is not None and any(xticklabels):
if len(args) == 1:
xticks = np.arange(0.5, data.shape[1] + 0.5)
else:
xticks = []
for i in np.arange(len(x) - 1):
half = float(x[i + 1] - x[i]) / 2. + x[i]
xticks.append(half)
xticks = np.array(xticks)
ax.set_xticks(xticks)
ax.set_xticklabels(xticklabels, rotation=xticklabels_rotation)
if yticklabels is not None and any(yticklabels):
if len(args) == 1:
yticks = np.arange(0.5, data.shape[1] + 0.5)
else:
yticks = []
for i in np.arange(len(y) - 1):
half = float(y[i + 1] - y[i]) / 2. + y[i]
yticks.append(half)
yticks = np.array(yticks)
ax.set_yticks(yticks)
ax.set_yticklabels(yticklabels, rotation=yticklabels_rotation)
# Show the scale of the colorbar
cbar = fig.colorbar(p, cax=ax_colorbar, use_gridspec=True,
orientation=orientation_colorbar)
return p, cbar<|fim▁end|>
|
Non-traditional `pcolormesh` kwargs are:
- xticklabels, which will put x tick labels exactly in the center of the
heatmap block
|
<|file_name|>index.js<|end_file_name|><|fim▁begin|><|fim▁hole|>import SerialNumber from './SerialNumber.jsx';
export default SerialNumber;<|fim▁end|>
| |
<|file_name|>MenuListComposition.tsx<|end_file_name|><|fim▁begin|>import * as React from 'react';
import Button from '@material-ui/core/Button';
import ClickAwayListener from '@material-ui/core/ClickAwayListener';
import Grow from '@material-ui/core/Grow';
import Paper from '@material-ui/core/Paper';
import Popper from '@material-ui/core/Popper';
import MenuItem from '@material-ui/core/MenuItem';
import MenuList from '@material-ui/core/MenuList';
import Stack from '@material-ui/core/Stack';
export default function MenuListComposition() {
const [open, setOpen] = React.useState(false);
const anchorRef = React.useRef<HTMLButtonElement>(null);
const handleToggle = () => {
setOpen((prevOpen) => !prevOpen);
};
const handleClose = (event: Event | React.SyntheticEvent) => {
if (
anchorRef.current &&
anchorRef.current.contains(event.target as HTMLElement)
) {
return;
}
setOpen(false);
};
function handleListKeyDown(event: React.KeyboardEvent) {
if (event.key === 'Tab') {
event.preventDefault();
setOpen(false);
} else if (event.key === 'Escape') {
setOpen(false);
}
}
// return focus to the button when we transitioned from !open -> open
const prevOpen = React.useRef(open);
React.useEffect(() => {
if (prevOpen.current === true && open === false) {
anchorRef.current!.focus();
}
prevOpen.current = open;
}, [open]);
return (
<Stack direction="row" spacing={2}>
<Paper>
<MenuList>
<MenuItem>Profile</MenuItem>
<MenuItem>My account</MenuItem>
<MenuItem>Logout</MenuItem>
</MenuList>
</Paper>
<div>
<Button
ref={anchorRef}
id="composition-button"
aria-controls={open ? 'composition-menu' : undefined}
aria-expanded={open ? 'true' : undefined}
aria-haspopup="true"
onClick={handleToggle}
>
Dashboard
</Button>
<Popper
open={open}
anchorEl={anchorRef.current}
role={undefined}
placement="bottom-start"
transition
disablePortal
>
{({ TransitionProps, placement }) => (
<Grow
{...TransitionProps}
style={{
transformOrigin:
placement === 'bottom-start' ? 'left top' : 'left bottom',
}}
>
<Paper>
<ClickAwayListener onClickAway={handleClose}>
<MenuList
autoFocusItem={open}
id="composition-menu"
aria-labelledby="composition-button"
onKeyDown={handleListKeyDown}
>
<MenuItem onClick={handleClose}>Profile</MenuItem>
<MenuItem onClick={handleClose}>My account</MenuItem>
<MenuItem onClick={handleClose}>Logout</MenuItem>
</MenuList><|fim▁hole|> </Paper>
</Grow>
)}
</Popper>
</div>
</Stack>
);
}<|fim▁end|>
|
</ClickAwayListener>
|
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>"""
flask.ext.acl
=============
This extension provides an Access Control implementation for `tipfy <http://www.tipfy.org/>`_.
Links
-----
* `Documentation <http://www.tipfy.org/wiki/extensions/acl/>`_
* `Source Code Repository <http://code.google.com/p/tipfy-ext-acl/>`_
* `Issue Tracker <http://code.google.com/p/tipfy-ext-acl/issues/list>`_
About tipfy
-----------
* `Home page <http://www.tipfy.org/>`_
* `Extension list <http://www.tipfy.org/wiki/extensions/>`_
* `Discussion Group <http://groups.google.com/group/tipfy>`_
"""
from setuptools import setup
setup(
name = 'flask.ext.acl',
version = '0.6',
license = 'BSD',<|fim▁hole|> long_description = __doc__,
author = 'guotie',
author_email = '[email protected]',
zip_safe = False,
platforms = 'any',
packages = [
'flask',
'flask.ext',
],
namespace_packages = [
'flask',
'flask.ext',
],
include_package_data = True,
install_requires = [
'flask',
'flask.ext.sqlalchemy',
'flask.ext.cache',
],
classifiers = [
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules',
]
)<|fim▁end|>
|
url = 'https://github.com/guotie/flask-acl',
description = 'Access Control extension for flask',
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|>
|
"""
Sql support for multilingual models
"""
|
<|file_name|>server.py<|end_file_name|><|fim▁begin|>#-----------------------------------------------------------
# Threaded, Gevent and Prefork Servers
#-----------------------------------------------------------
import datetime
import errno
import logging
import os
import os.path
import platform
import random
import select
import signal
import socket
import subprocess
import sys
import threading
import time
import unittest2
import werkzeug.serving
from werkzeug.debug import DebuggedApplication
if os.name == 'posix':
# Unix only for workers
import fcntl
import resource
import psutil
else:
# Windows shim
signal.SIGHUP = -1
# Optional process names for workers
try:
from setproctitle import setproctitle
except ImportError:
setproctitle = lambda x: None
import openerp
from openerp.modules.registry import RegistryManager
from openerp.release import nt_service_name
import openerp.tools.config as config
from openerp.tools import stripped_sys_argv, dumpstacks, log_ormcache_stats
_logger = logging.getLogger(__name__)
try:
import watchdog
from watchdog.observers import Observer
from watchdog.events import FileCreatedEvent, FileModifiedEvent, FileMovedEvent
except ImportError:
watchdog = None
SLEEP_INTERVAL = 60 # 1 min
def memory_info(process):
""" psutil < 2.0 does not have memory_info, >= 3.0 does not have
get_memory_info """
return (getattr(process, 'memory_info', None) or process.get_memory_info)()
#----------------------------------------------------------
# Werkzeug WSGI servers patched
#----------------------------------------------------------
class LoggingBaseWSGIServerMixIn(object):
def handle_error(self, request, client_address):
t, e, _ = sys.exc_info()
if t == socket.error and e.errno == errno.EPIPE:
# broken pipe, ignore error
return
_logger.exception('Exception happened during processing of request from %s', client_address)
class BaseWSGIServerNoBind(LoggingBaseWSGIServerMixIn, werkzeug.serving.BaseWSGIServer):
""" werkzeug Base WSGI Server patched to skip socket binding. PreforkServer
use this class, sets the socket and calls the process_request() manually
"""
def __init__(self, app):
werkzeug.serving.BaseWSGIServer.__init__(self, "1", "1", app)
def server_bind(self):
# we dont bind beause we use the listen socket of PreforkServer#socket
# instead we close the socket
if self.socket:
self.socket.close()
def server_activate(self):
# dont listen as we use PreforkServer#socket
pass
class RequestHandler(werkzeug.serving.WSGIRequestHandler):
def setup(self):
# flag the current thread as handling a http request
super(RequestHandler, self).setup()
me = threading.currentThread()
me.name = 'openerp.service.http.request.%s' % (me.ident,)
# _reexec() should set LISTEN_* to avoid connection refused during reload time. It
# should also work with systemd socket activation. This is currently untested
# and not yet used.
class ThreadedWSGIServerReloadable(LoggingBaseWSGIServerMixIn, werkzeug.serving.ThreadedWSGIServer):
""" werkzeug Threaded WSGI Server patched to allow reusing a listen socket
given by the environement, this is used by autoreload to keep the listen
socket open when a reload happens.
"""
def __init__(self, host, port, app):
super(ThreadedWSGIServerReloadable, self).__init__(host, port, app,
handler=RequestHandler)
def server_bind(self):
envfd = os.environ.get('LISTEN_FDS')
if envfd and os.environ.get('LISTEN_PID') == str(os.getpid()):
self.reload_socket = True
self.socket = socket.fromfd(int(envfd), socket.AF_INET, socket.SOCK_STREAM)
# should we os.close(int(envfd)) ? it seem python duplicate the fd.
else:
self.reload_socket = False
super(ThreadedWSGIServerReloadable, self).server_bind()
def server_activate(self):
if not self.reload_socket:
super(ThreadedWSGIServerReloadable, self).server_activate()
#----------------------------------------------------------
# FileSystem Watcher for autoreload and cache invalidation
#----------------------------------------------------------
class FSWatcher(object):
def __init__(self):
self.observer = Observer()
for path in openerp.modules.module.ad_paths:
_logger.info('Watching addons folder %s', path)
self.observer.schedule(self, path, recursive=True)
def dispatch(self, event):
if isinstance(event, (FileCreatedEvent, FileModifiedEvent, FileMovedEvent)):
if not event.is_directory:
path = getattr(event, 'dest_path', event.src_path)
if path.endswith('.py'):
try:
source = open(path, 'rb').read() + '\n'
compile(source, path, 'exec')
except SyntaxError:
_logger.error('autoreload: python code change detected, SyntaxError in %s', path)
else:
_logger.info('autoreload: python code updated, autoreload activated')
restart()
def start(self):
self.observer.start()
_logger.info('AutoReload watcher running')
def stop(self):
self.observer.stop()
self.observer.join()
#----------------------------------------------------------
# Servers: Threaded, Gevented and Prefork
#----------------------------------------------------------
class CommonServer(object):
def __init__(self, app):
# TODO Change the xmlrpc_* options to http_*
self.app = app
# config
self.interface = config['xmlrpc_interface'] or '0.0.0.0'
self.port = config['xmlrpc_port']
# runtime
self.pid = os.getpid()
def close_socket(self, sock):
""" Closes a socket instance cleanly
:param sock: the network socket to close
:type sock: socket.socket
"""
try:
sock.shutdown(socket.SHUT_RDWR)
except socket.error, e:
if e.errno == errno.EBADF:
# Werkzeug > 0.9.6 closes the socket itself (see commit
# https://github.com/mitsuhiko/werkzeug/commit/4d8ca089)
return
# On OSX, socket shutdowns both sides if any side closes it
# causing an error 57 'Socket is not connected' on shutdown
# of the other side (or something), see
# http://bugs.python.org/issue4397
# note: stdlib fixed test, not behavior
if e.errno != errno.ENOTCONN or platform.system() not in ['Darwin', 'Windows']:
raise
sock.close()
class ThreadedServer(CommonServer):
def __init__(self, app):
super(ThreadedServer, self).__init__(app)
self.main_thread_id = threading.currentThread().ident
# Variable keeping track of the number of calls to the signal handler defined
# below. This variable is monitored by ``quit_on_signals()``.
self.quit_signals_received = 0
#self.socket = None
self.httpd = None
def signal_handler(self, sig, frame):
if sig in [signal.SIGINT, signal.SIGTERM]:
# shutdown on kill -INT or -TERM
self.quit_signals_received += 1
if self.quit_signals_received > 1:
# logging.shutdown was already called at this point.
sys.stderr.write("Forced shutdown.\n")
os._exit(0)
elif sig == signal.SIGHUP:
# restart on kill -HUP
openerp.phoenix = True
self.quit_signals_received += 1
def cron_thread(self, number):
while True:
time.sleep(SLEEP_INTERVAL + number) # Steve Reich timing style
registries = openerp.modules.registry.RegistryManager.registries
_logger.debug('cron%d polling for jobs', number)
for db_name, registry in registries.iteritems():
while registry.ready:
acquired = openerp.addons.base.ir.ir_cron.ir_cron._acquire_job(db_name)
if not acquired:
break
def cron_spawn(self):
""" Start the above runner function in a daemon thread.
The thread is a typical daemon thread: it will never quit and must be
terminated when the main process exits - with no consequence (the processing
threads it spawns are not marked daemon).
"""
# Force call to strptime just before starting the cron thread
# to prevent time.strptime AttributeError within the thread.
# See: http://bugs.python.org/issue7980
datetime.datetime.strptime('2012-01-01', '%Y-%m-%d')
for i in range(openerp.tools.config['max_cron_threads']):
def target():
self.cron_thread(i)
t = threading.Thread(target=target, name="openerp.service.cron.cron%d" % i)
t.setDaemon(True)
t.start()
_logger.debug("cron%d started!" % i)
def http_thread(self):
def app(e, s):
return self.app(e, s)
self.httpd = ThreadedWSGIServerReloadable(self.interface, self.port, app)
self.httpd.serve_forever()
def http_spawn(self):
t = threading.Thread(target=self.http_thread, name="openerp.service.httpd")
t.setDaemon(True)
t.start()
_logger.info('HTTP service (werkzeug) running on %s:%s', self.interface, self.port)
def start(self, stop=False):
_logger.debug("Setting signal handlers")
if os.name == 'posix':
signal.signal(signal.SIGINT, self.signal_handler)
signal.signal(signal.SIGTERM, self.signal_handler)
signal.signal(signal.SIGCHLD, self.signal_handler)
signal.signal(signal.SIGHUP, self.signal_handler)
signal.signal(signal.SIGQUIT, dumpstacks)
signal.signal(signal.SIGUSR1, log_ormcache_stats)
elif os.name == 'nt':
import win32api
win32api.SetConsoleCtrlHandler(lambda sig: self.signal_handler(sig, None), 1)
test_mode = config['test_enable'] or config['test_file']
if test_mode or (config['xmlrpc'] and not stop):
# some tests need the http deamon to be available...
self.http_spawn()
if not stop:
# only relevant if we are not in "--stop-after-init" mode
self.cron_spawn()
def stop(self):
""" Shutdown the WSGI server. Wait for non deamon threads.
"""
_logger.info("Initiating shutdown")
_logger.info("Hit CTRL-C again or send a second signal to force the shutdown.")
if self.httpd:
self.httpd.shutdown()
self.close_socket(self.httpd.socket)
# Manually join() all threads before calling sys.exit() to allow a second signal
# to trigger _force_quit() in case some non-daemon threads won't exit cleanly.
# threading.Thread.join() should not mask signals (at least in python 2.5).
me = threading.currentThread()
_logger.debug('current thread: %r', me)
for thread in threading.enumerate():
_logger.debug('process %r (%r)', thread, thread.isDaemon())
if thread != me and not thread.isDaemon() and thread.ident != self.main_thread_id:
while thread.isAlive():
_logger.debug('join and sleep')
# Need a busyloop here as thread.join() masks signals
# and would prevent the forced shutdown.
thread.join(0.05)
time.sleep(0.05)
_logger.debug('--')
openerp.modules.registry.RegistryManager.delete_all()
logging.shutdown()
def run(self, preload=None, stop=False):
""" Start the http server and the cron thread then wait for a signal.
The first SIGINT or SIGTERM signal will initiate a graceful shutdown while
a second one if any will force an immediate exit.
"""
self.start(stop=stop)
rc = preload_registries(preload)
if stop:
self.stop()
return rc
# Wait for a first signal to be handled. (time.sleep will be interrupted
# by the signal handler.) The try/except is for the win32 case.
try:
while self.quit_signals_received == 0:
time.sleep(60)
except KeyboardInterrupt:
pass
self.stop()
def reload(self):
os.kill(self.pid, signal.SIGHUP)
class GeventServer(CommonServer):
def __init__(self, app):
super(GeventServer, self).__init__(app)
self.port = config['longpolling_port']
self.httpd = None
def watch_parent(self, beat=4):
import gevent
ppid = os.getppid()
while True:
if ppid != os.getppid():
pid = os.getpid()
_logger.info("LongPolling (%s) Parent changed", pid)
# suicide !!
os.kill(pid, signal.SIGTERM)
return
gevent.sleep(beat)
def start(self):
import gevent
from gevent.wsgi import WSGIServer
if os.name == 'posix':
signal.signal(signal.SIGQUIT, dumpstacks)
signal.signal(signal.SIGUSR1, log_ormcache_stats)
gevent.spawn(self.watch_parent)
self.httpd = WSGIServer((self.interface, self.port), self.app)
_logger.info('Evented Service (longpolling) running on %s:%s', self.interface, self.port)
try:
self.httpd.serve_forever()
except:
_logger.exception("Evented Service (longpolling): uncaught error during main loop")
raise
def stop(self):
import gevent
self.httpd.stop()
gevent.shutdown()
def run(self, preload, stop):
self.start()
self.stop()
class PreforkServer(CommonServer):
""" Multiprocessing inspired by (g)unicorn.
PreforkServer (aka Multicorn) currently uses accept(2) as dispatching
method between workers but we plan to replace it by a more intelligent
dispatcher to will parse the first HTTP request line.
"""
def __init__(self, app):
# config
self.address = (config['xmlrpc_interface'] or '0.0.0.0', config['xmlrpc_port'])
self.population = config['workers']
self.timeout = config['limit_time_real']
self.limit_request = config['limit_request']
# working vars
self.beat = 4
self.app = app
self.pid = os.getpid()
self.socket = None
self.workers_http = {}
self.workers_cron = {}
self.workers = {}
self.generation = 0
self.queue = []
self.long_polling_pid = None
def pipe_new(self):
pipe = os.pipe()
for fd in pipe:
# non_blocking
flags = fcntl.fcntl(fd, fcntl.F_GETFL) | os.O_NONBLOCK
fcntl.fcntl(fd, fcntl.F_SETFL, flags)
# close_on_exec
flags = fcntl.fcntl(fd, fcntl.F_GETFD) | fcntl.FD_CLOEXEC
fcntl.fcntl(fd, fcntl.F_SETFD, flags)
return pipe
def pipe_ping(self, pipe):
try:
os.write(pipe[1], '.')
except IOError, e:
if e.errno not in [errno.EAGAIN, errno.EINTR]:
raise
def signal_handler(self, sig, frame):
if len(self.queue) < 5 or sig == signal.SIGCHLD:
self.queue.append(sig)
self.pipe_ping(self.pipe)
else:
_logger.warn("Dropping signal: %s", sig)
def worker_spawn(self, klass, workers_registry):
self.generation += 1
worker = klass(self)
pid = os.fork()
if pid != 0:
worker.pid = pid
self.workers[pid] = worker
workers_registry[pid] = worker
return worker
else:
worker.run()
sys.exit(0)
def long_polling_spawn(self):
nargs = stripped_sys_argv()
cmd = nargs[0]
cmd = os.path.join(os.path.dirname(cmd), "openerp-gevent")
nargs[0] = cmd
popen = subprocess.Popen([sys.executable] + nargs)
self.long_polling_pid = popen.pid
def worker_pop(self, pid):
if pid == self.long_polling_pid:
self.long_polling_pid = None
if pid in self.workers:
_logger.debug("Worker (%s) unregistered", pid)
try:
self.workers_http.pop(pid, None)
self.workers_cron.pop(pid, None)
u = self.workers.pop(pid)
u.close()
except OSError:
return
def worker_kill(self, pid, sig):<|fim▁hole|> os.kill(pid, sig)
except OSError, e:
if e.errno == errno.ESRCH:
self.worker_pop(pid)
def process_signals(self):
while len(self.queue):
sig = self.queue.pop(0)
if sig in [signal.SIGINT, signal.SIGTERM]:
raise KeyboardInterrupt
elif sig == signal.SIGHUP:
# restart on kill -HUP
openerp.phoenix = True
raise KeyboardInterrupt
elif sig == signal.SIGQUIT:
# dump stacks on kill -3
self.dumpstacks()
elif sig == signal.SIGUSR1:
# log ormcache stats on kill -SIGUSR1
log_ormcache_stats()
elif sig == signal.SIGTTIN:
# increase number of workers
self.population += 1
elif sig == signal.SIGTTOU:
# decrease number of workers
self.population -= 1
def process_zombie(self):
# reap dead workers
while 1:
try:
wpid, status = os.waitpid(-1, os.WNOHANG)
if not wpid:
break
if (status >> 8) == 3:
msg = "Critial worker error (%s)"
_logger.critical(msg, wpid)
raise Exception(msg % wpid)
self.worker_pop(wpid)
except OSError, e:
if e.errno == errno.ECHILD:
break
raise
def process_timeout(self):
now = time.time()
for (pid, worker) in self.workers.items():
if worker.watchdog_timeout is not None and \
(now - worker.watchdog_time) >= worker.watchdog_timeout:
_logger.error("Worker (%s) timeout", pid)
self.worker_kill(pid, signal.SIGKILL)
def process_spawn(self):
if config['xmlrpc']:
while len(self.workers_http) < self.population:
self.worker_spawn(WorkerHTTP, self.workers_http)
if not self.long_polling_pid:
self.long_polling_spawn()
while len(self.workers_cron) < config['max_cron_threads']:
self.worker_spawn(WorkerCron, self.workers_cron)
def sleep(self):
try:
# map of fd -> worker
fds = dict([(w.watchdog_pipe[0], w) for k, w in self.workers.items()])
fd_in = fds.keys() + [self.pipe[0]]
# check for ping or internal wakeups
ready = select.select(fd_in, [], [], self.beat)
# update worker watchdogs
for fd in ready[0]:
if fd in fds:
fds[fd].watchdog_time = time.time()
try:
# empty pipe
while os.read(fd, 1):
pass
except OSError, e:
if e.errno not in [errno.EAGAIN]:
raise
except select.error, e:
if e[0] not in [errno.EINTR]:
raise
def start(self):
# wakeup pipe, python doesnt throw EINTR when a syscall is interrupted
# by a signal simulating a pseudo SA_RESTART. We write to a pipe in the
# signal handler to overcome this behaviour
self.pipe = self.pipe_new()
# set signal handlers
signal.signal(signal.SIGINT, self.signal_handler)
signal.signal(signal.SIGTERM, self.signal_handler)
signal.signal(signal.SIGHUP, self.signal_handler)
signal.signal(signal.SIGCHLD, self.signal_handler)
signal.signal(signal.SIGTTIN, self.signal_handler)
signal.signal(signal.SIGTTOU, self.signal_handler)
signal.signal(signal.SIGQUIT, dumpstacks)
signal.signal(signal.SIGUSR1, log_ormcache_stats)
# listen to socket
self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.socket.setblocking(0)
self.socket.bind(self.address)
self.socket.listen(8 * self.population)
def stop(self, graceful=True):
if self.long_polling_pid is not None:
# FIXME make longpolling process handle SIGTERM correctly
self.worker_kill(self.long_polling_pid, signal.SIGKILL)
self.long_polling_pid = None
if graceful:
_logger.info("Stopping gracefully")
limit = time.time() + self.timeout
for pid in self.workers.keys():
self.worker_kill(pid, signal.SIGINT)
while self.workers and time.time() < limit:
try:
self.process_signals()
except KeyboardInterrupt:
_logger.info("Forced shutdown.")
break
self.process_zombie()
time.sleep(0.1)
else:
_logger.info("Stopping forcefully")
for pid in self.workers.keys():
self.worker_kill(pid, signal.SIGTERM)
self.socket.close()
def run(self, preload, stop):
self.start()
rc = preload_registries(preload)
if stop:
self.stop()
return rc
# Empty the cursor pool, we dont want them to be shared among forked workers.
openerp.sql_db.close_all()
_logger.debug("Multiprocess starting")
while 1:
try:
#_logger.debug("Multiprocess beat (%s)",time.time())
self.process_signals()
self.process_zombie()
self.process_timeout()
self.process_spawn()
self.sleep()
except KeyboardInterrupt:
_logger.debug("Multiprocess clean stop")
self.stop()
break
except Exception, e:
_logger.exception(e)
self.stop(False)
return -1
class Worker(object):
""" Workers """
def __init__(self, multi):
self.multi = multi
self.watchdog_time = time.time()
self.watchdog_pipe = multi.pipe_new()
# Can be set to None if no watchdog is desired.
self.watchdog_timeout = multi.timeout
self.ppid = os.getpid()
self.pid = None
self.alive = True
# should we rename into lifetime ?
self.request_max = multi.limit_request
self.request_count = 0
def setproctitle(self, title=""):
setproctitle('openerp: %s %s %s' % (self.__class__.__name__, self.pid, title))
def close(self):
os.close(self.watchdog_pipe[0])
os.close(self.watchdog_pipe[1])
def signal_handler(self, sig, frame):
self.alive = False
def sleep(self):
try:
select.select([self.multi.socket], [], [], self.multi.beat)
except select.error, e:
if e[0] not in [errno.EINTR]:
raise
def process_limit(self):
# If our parent changed sucide
if self.ppid != os.getppid():
_logger.info("Worker (%s) Parent changed", self.pid)
self.alive = False
# check for lifetime
if self.request_count >= self.request_max:
_logger.info("Worker (%d) max request (%s) reached.", self.pid, self.request_count)
self.alive = False
# Reset the worker if it consumes too much memory (e.g. caused by a memory leak).
rss, vms = memory_info(psutil.Process(os.getpid()))
if vms > config['limit_memory_soft']:
_logger.info('Worker (%d) virtual memory limit (%s) reached.', self.pid, vms)
self.alive = False # Commit suicide after the request.
# VMS and RLIMIT_AS are the same thing: virtual memory, a.k.a. address space
soft, hard = resource.getrlimit(resource.RLIMIT_AS)
resource.setrlimit(resource.RLIMIT_AS, (config['limit_memory_hard'], hard))
# SIGXCPU (exceeded CPU time) signal handler will raise an exception.
r = resource.getrusage(resource.RUSAGE_SELF)
cpu_time = r.ru_utime + r.ru_stime
def time_expired(n, stack):
_logger.info('Worker (%d) CPU time limit (%s) reached.', config['limit_time_cpu'])
# We dont suicide in such case
raise Exception('CPU time limit exceeded.')
signal.signal(signal.SIGXCPU, time_expired)
soft, hard = resource.getrlimit(resource.RLIMIT_CPU)
resource.setrlimit(resource.RLIMIT_CPU, (cpu_time + config['limit_time_cpu'], hard))
def process_work(self):
pass
def start(self):
self.pid = os.getpid()
self.setproctitle()
_logger.info("Worker %s (%s) alive", self.__class__.__name__, self.pid)
# Reseed the random number generator
random.seed()
# Prevent fd inherientence close_on_exec
flags = fcntl.fcntl(self.multi.socket, fcntl.F_GETFD) | fcntl.FD_CLOEXEC
fcntl.fcntl(self.multi.socket, fcntl.F_SETFD, flags)
# reset blocking status
self.multi.socket.setblocking(0)
signal.signal(signal.SIGINT, self.signal_handler)
signal.signal(signal.SIGTERM, signal.SIG_DFL)
signal.signal(signal.SIGCHLD, signal.SIG_DFL)
def stop(self):
pass
def run(self):
try:
self.start()
while self.alive:
self.process_limit()
self.multi.pipe_ping(self.watchdog_pipe)
self.sleep()
self.process_work()
_logger.info("Worker (%s) exiting. request_count: %s, registry count: %s.",
self.pid, self.request_count,
len(openerp.modules.registry.RegistryManager.registries))
self.stop()
except Exception:
_logger.exception("Worker (%s) Exception occured, exiting..." % self.pid)
# should we use 3 to abort everything ?
sys.exit(1)
class WorkerHTTP(Worker):
""" HTTP Request workers """
def process_request(self, client, addr):
client.setblocking(1)
client.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
# Prevent fd inherientence close_on_exec
flags = fcntl.fcntl(client, fcntl.F_GETFD) | fcntl.FD_CLOEXEC
fcntl.fcntl(client, fcntl.F_SETFD, flags)
# do request using BaseWSGIServerNoBind monkey patched with socket
self.server.socket = client
# tolerate broken pipe when the http client closes the socket before
# receiving the full reply
try:
self.server.process_request(client, addr)
except IOError, e:
if e.errno != errno.EPIPE:
raise
self.request_count += 1
def process_work(self):
try:
client, addr = self.multi.socket.accept()
self.process_request(client, addr)
except socket.error, e:
if e[0] not in (errno.EAGAIN, errno.ECONNABORTED):
raise
def start(self):
Worker.start(self)
self.server = BaseWSGIServerNoBind(self.multi.app)
class WorkerCron(Worker):
""" Cron workers """
def __init__(self, multi):
super(WorkerCron, self).__init__(multi)
# process_work() below process a single database per call.
# The variable db_index is keeping track of the next database to
# process.
self.db_index = 0
def sleep(self):
# Really sleep once all the databases have been processed.
if self.db_index == 0:
interval = SLEEP_INTERVAL + self.pid % 10 # chorus effect
time.sleep(interval)
def _db_list(self):
if config['db_name']:
db_names = config['db_name'].split(',')
else:
db_names = openerp.service.db.list_dbs(True)
return db_names
def process_work(self):
rpc_request = logging.getLogger('openerp.netsvc.rpc.request')
rpc_request_flag = rpc_request.isEnabledFor(logging.DEBUG)
_logger.debug("WorkerCron (%s) polling for jobs", self.pid)
db_names = self._db_list()
if len(db_names):
self.db_index = (self.db_index + 1) % len(db_names)
db_name = db_names[self.db_index]
self.setproctitle(db_name)
if rpc_request_flag:
start_time = time.time()
start_rss, start_vms = memory_info(psutil.Process(os.getpid()))
import openerp.addons.base as base
base.ir.ir_cron.ir_cron._acquire_job(db_name)
openerp.modules.registry.RegistryManager.delete(db_name)
# dont keep cursors in multi database mode
if len(db_names) > 1:
openerp.sql_db.close_db(db_name)
if rpc_request_flag:
run_time = time.time() - start_time
end_rss, end_vms = memory_info(psutil.Process(os.getpid()))
vms_diff = (end_vms - start_vms) / 1024
logline = '%s time:%.3fs mem: %sk -> %sk (diff: %sk)' % \
(db_name, run_time, start_vms / 1024, end_vms / 1024, vms_diff)
_logger.debug("WorkerCron (%s) %s", self.pid, logline)
self.request_count += 1
if self.request_count >= self.request_max and self.request_max < len(db_names):
_logger.error("There are more dabatases to process than allowed "
"by the `limit_request` configuration variable: %s more.",
len(db_names) - self.request_max)
else:
self.db_index = 0
def start(self):
os.nice(10) # mommy always told me to be nice with others...
Worker.start(self)
self.multi.socket.close()
#----------------------------------------------------------
# start/stop public api
#----------------------------------------------------------
server = None
def load_server_wide_modules():
for m in openerp.conf.server_wide_modules:
try:
openerp.modules.module.load_openerp_module(m)
except Exception:
msg = ''
if m == 'web':
msg = """
The `web` module is provided by the addons found in the `openerp-web` project.
Maybe you forgot to add those addons in your addons_path configuration."""
_logger.exception('Failed to load server-wide module `%s`.%s', m, msg)
def _reexec(updated_modules=None):
"""reexecute openerp-server process with (nearly) the same arguments"""
if openerp.tools.osutil.is_running_as_nt_service():
subprocess.call('net stop {0} && net start {0}'.format(nt_service_name), shell=True)
exe = os.path.basename(sys.executable)
args = stripped_sys_argv()
if updated_modules:
args += ["-u", ','.join(updated_modules)]
if not args or args[0] != exe:
args.insert(0, exe)
os.execv(sys.executable, args)
def load_test_file_yml(registry, test_file):
with registry.cursor() as cr:
openerp.tools.convert_yaml_import(cr, 'base', file(test_file), 'test', {}, 'init')
if config['test_commit']:
_logger.info('test %s has been commited', test_file)
cr.commit()
else:
_logger.info('test %s has been rollbacked', test_file)
cr.rollback()
def load_test_file_py(registry, test_file):
# Locate python module based on its filename and run the tests
test_path, _ = os.path.splitext(os.path.abspath(test_file))
for mod_name, mod_mod in sys.modules.items():
if mod_mod:
mod_path, _ = os.path.splitext(getattr(mod_mod, '__file__', ''))
if test_path == mod_path:
suite = unittest2.TestSuite()
for t in unittest2.TestLoader().loadTestsFromModule(mod_mod):
suite.addTest(t)
_logger.log(logging.INFO, 'running tests %s.', mod_mod.__name__)
stream = openerp.modules.module.TestStream()
result = unittest2.TextTestRunner(verbosity=2, stream=stream).run(suite)
success = result.wasSuccessful()
if hasattr(registry._assertion_report,'report_result'):
registry._assertion_report.report_result(success)
if not success:
_logger.error('%s: at least one error occurred in a test', test_file)
def preload_registries(dbnames):
""" Preload a registries, possibly run a test file."""
# TODO: move all config checks to args dont check tools.config here
config = openerp.tools.config
test_file = config['test_file']
dbnames = dbnames or []
rc = 0
for dbname in dbnames:
try:
update_module = config['init'] or config['update']
registry = RegistryManager.new(dbname, update_module=update_module)
# run test_file if provided
if test_file:
_logger.info('loading test file %s', test_file)
with openerp.api.Environment.manage():
if test_file.endswith('yml'):
load_test_file_yml(registry, test_file)
elif test_file.endswith('py'):
load_test_file_py(registry, test_file)
if registry._assertion_report.failures:
rc += 1
except Exception:
_logger.critical('Failed to initialize database `%s`.', dbname, exc_info=True)
return -1
return rc
def start(preload=None, stop=False):
""" Start the openerp http server and cron processor.
"""
global server
load_server_wide_modules()
if openerp.evented:
server = GeventServer(openerp.service.wsgi_server.application)
elif config['workers']:
server = PreforkServer(openerp.service.wsgi_server.application)
else:
server = ThreadedServer(openerp.service.wsgi_server.application)
watcher = None
if config['dev_mode']:
if watchdog:
watcher = FSWatcher()
watcher.start()
else:
_logger.warning("'watchdog' module not installed. Code autoreload feature is disabled")
server.app = DebuggedApplication(server.app, evalex=True)
rc = server.run(preload, stop)
# like the legend of the phoenix, all ends with beginnings
if getattr(openerp, 'phoenix', False):
if watcher:
watcher.stop()
_reexec()
return rc if rc else 0
def restart():
""" Restart the server
"""
if os.name == 'nt':
# run in a thread to let the current thread return response to the caller.
threading.Thread(target=_reexec).start()
else:
os.kill(server.pid, signal.SIGHUP)<|fim▁end|>
|
try:
|
<|file_name|>keys.js<|end_file_name|><|fim▁begin|>const _ = require('lodash');
const utils = require('./utils');
/*
* Default key object settings for lando.keys cache
*/
const keyDefaults = {
host: 'lagoon.amazeeio.cloud',
port: '32222',
user: 'lagoon-pending',
date: _.toInteger(_.now() / 1000),
};
/*
* Generates a new lagoon-pending key.
*/
exports.generateKey = (lando, method = 'new') => {<|fim▁hole|>};
/*
* Put keys into inquierer format
*/
exports.getKeys = (keys = []) => _(keys)
.map(key => ({name: key.email, value: key.key}))
.thru(keys => keys.concat([{name: 'add a new key', value: 'more'}]))
.compact()
.value();
// Helper to get preferred key
exports.getPreferredKey = answers => {
return answers['lagoon-auth-generate'] || answers['auth-generate'] || answers['lagoon-auth'] || answers['auth'];
};
/*
* Parses a key over defaults to extact key/host/port info
*/
exports.parseKey = (key = {}) => {
// Split the "key" and get it
const splitter = key.split('@');
const keyPath = _.first(splitter);
// Now handle the host part
const lagoon = keyDefaults;
// If host part of splitter exists lets update things
if (splitter[1]) {
const parts = splitter[1].split(':');
if (parts[0]) lagoon.host = parts[0];
if (parts[1]) lagoon.port = parts[1];
}
return {keyPath, host: lagoon.host, port: lagoon.port};
};
/*
* Sort keys by most recent
*/
exports.sortKeys = (...sources) => _(_.flatten([...sources]))
.filter(key => key !== null)
.sortBy('date')
.groupBy('key')
.map(keys => _.last(keys))
.value();<|fim▁end|>
|
const cmd = '/helpers/lagoon-generate-key.sh lagoon-pending lando';
return utils.run(lando, cmd, null, false);
|
<|file_name|>Base.tpl.py<|end_file_name|><|fim▁begin|>{% block meta %}
name: Base
description: SMACH base template.
language: Python
framework: SMACH
type: Base
tags: [core]
includes: []
extends: []
variables:
- - manifest:
description: ROS manifest name.
type: str
- - node_name:
description: ROS node name for the state machine.
type: str
- outcomes:
description: A list of possible outcomes of the state machine.
type: list
- - userdata:
description: Definitions for userdata needed by child states.
type: dict
- - function_name:
description: A name for the main executable state machine function.
type: str
input_keys: []
output_keys: []
{% endblock meta %}
{% from "Utils.tpl.py" import import_module, render_outcomes, render_userdata %}
{% set defined_headers = [] %}
{% set local_vars = [] %}
{% block base_header %}
#!/usr/bin/env python
{{ base_header }}
{% endblock base_header %}
{% block imports %}
{{ import_module(defined_headers, 'smach') }}
{{ imports }}
{% endblock imports %}
{% block defs %}
{{ defs }}
{% endblock defs %}
{% block class_defs %}<|fim▁hole|>{% block cb_defs %}
{{ cb_defs }}
{% endblock cb_defs %}
{% if name is defined %}{% set sm_name = name | lower() %}{% else %}{% set sm_name = 'sm' %}{% endif %}
{% block main_def %}
def {% if function_name is defined %}{{ function_name | lower() }}{% else %}main{% endif %}():
{{ main_def | indent(4) }}
{% endblock main_def %}
{% block body %}
{{ sm_name }} = smach.StateMachine({{ render_outcomes(outcomes) }})
{# Container header insertion variable indexed by container state name #}
{% if name in header %}{{ header[name] | indent(4, true) }}{% endif %}
{# Render container userdata #}
{% if userdata is defined %}{{ render_userdata(name | lower(), userdata) | indent(4) }}{% endif %}
{# Render state userdata #}
{% if name in header_userdata %}{{ header_userdata[name] | indent(4, true) }}{% endif %}
with {{ sm_name }}:
{# Container body insertion variable #}
{{ body | indent(8) }}
{% endblock body %}
{% block footer %}
{{ footer | indent(8) }}
{% endblock footer %}
{% block execute %}
{{ execute | indent(4) }}
outcome = {{ sm_name }}.execute()
{% endblock execute %}
{% block base_footer %}
{{ base_footer | indent(4) }}
{% endblock base_footer %}
{% block main %}
if __name__ == '__main__':
{{ '' | indent(4, true) }}{% if function_name is defined %}{{ function_name | lower() }}{% else %}main{% endif %}()
{% endblock main %}<|fim▁end|>
|
{{ class_defs }}
{% endblock class_defs %}
|
<|file_name|>win_wao.cpp<|end_file_name|><|fim▁begin|>/***********************************************************************************
Copyright (C) 2014-2015 Ahmet Öztürk ([email protected])
This file is part of Lifeograph.
Lifeograph is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Lifeograph is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Lifeograph. If not, see <http://www.gnu.org/licenses/>.
***********************************************************************************/
#include <winsock2.h> // just to silence the compiler
#include <windows.h>
#include <commctrl.h>
#include "win_wao.hpp"
LRESULT CALLBACK
waoWC_button0Proc( HWND hwnd, UINT msg, WPARAM wParam, LPARAM lParam )
{
static TRACKMOUSEEVENT tME;
switch( msg )
{
case WM_CREATE:
{
tME.cbSize = sizeof( TRACKMOUSEEVENT );
//tME.dwFlags = TME_HOVER;
tME.dwHoverTime = 0;
SendMessage( GetParent (hwnd), WAOM_TBBTNCREATED,
MAKEWPARAM( WAOM_TBBTNCREATED_LW,
GetWindowLong( hwnd, GWL_ID ) ), ( LPARAM ) hwnd );
return FALSE;
}
case WM_ENABLE:
if( GetWindowLong( hwnd, GWL_USERDATA ) & WAO_TBBS_CHECKED )
SetWindowLong( hwnd, GWL_USERDATA,
wParam ? ( WAO_TBBS_NORMAL | WAO_TBBS_CHECKED ) :
( WAO_TBBS_DISABLED | WAO_TBBS_CHECKED ) );
else
SetWindowLong( hwnd, GWL_USERDATA, wParam ? WAO_TBBS_NORMAL : WAO_TBBS_DISABLED );
RedrawWindow( hwnd, NULL, NULL, RDW_ERASE | RDW_INVALIDATE );
return FALSE;
case WM_MOUSEMOVE:
if( GetWindowLong( hwnd, GWL_USERDATA ) != WAO_TBBS_NORMAL )
return FALSE;
SetWindowLong( hwnd, GWL_USERDATA, WAO_TBBS_HOVERED );
RedrawWindow( hwnd, NULL, NULL, RDW_ERASE | RDW_INVALIDATE );
tME.dwFlags = TME_LEAVE;
tME.hwndTrack = hwnd;
TrackMouseEvent( &tME );
return FALSE;
case WM_MOUSELEAVE:
SetWindowLong( hwnd, GWL_USERDATA, WAO_TBBS_NORMAL );
RedrawWindow( hwnd, NULL, NULL, RDW_ERASE | RDW_INVALIDATE );
return FALSE;
case WM_RBUTTONDOWN:
if( GetWindowLong( hwnd, GWL_STYLE ) & BS_RIGHTBUTTON )
{
long btnStt = GetWindowLong( hwnd, GWL_USERDATA );
SetWindowLong( hwnd, GWL_USERDATA,
WAO_TBBS_CLICKED | ( btnStt & WAO_TBBS_CHECKED ) );
}
return FALSE;
case WM_LBUTTONDOWN:
{
long btnStt = GetWindowLong( hwnd, GWL_USERDATA );
SetWindowLong( hwnd, GWL_USERDATA, WAO_TBBS_CLICKED | ( btnStt & WAO_TBBS_CHECKED ) );
RedrawWindow( hwnd, NULL, NULL, RDW_ERASE | RDW_INVALIDATE );
if( GetWindowLong( hwnd, GWL_STYLE ) & BS_NOTIFY )
SetTimer( hwnd, WAO_TMR_TBRPW, WAO_TBBN_RPWTIME, NULL );
return FALSE;
}
case WM_LBUTTONUP:
// should be clicked first:
if( !( GetWindowLong( hwnd, GWL_USERDATA ) & WAO_TBBS_CLICKED ) )
return FALSE;
case WM_CLEAR: // tweak to check with one internal message
{
SetWindowLong( hwnd, GWL_USERDATA, WAO_TBBS_NORMAL );
RedrawWindow( hwnd, NULL, NULL, RDW_ERASE | RDW_INVALIDATE );
DLGPROC parProc = ( DLGPROC ) GetWindowLong ( GetParent( hwnd ), DWL_DLGPROC );
if( msg != WM_CLEAR ) // what about making this a seperate thread?
parProc( GetParent ( hwnd ), WM_COMMAND,
MAKEWPARAM( GetWindowLong( hwnd, GWL_ID ),
WAO_TBBN_LCLCKED ),
( LPARAM ) hwnd );
if( GetWindowLong( hwnd, GWL_STYLE ) & BS_NOTIFY )
{
KillTimer( hwnd, WAO_TMR_TBRPW );
KillTimer( hwnd, WAO_TMR_TBRPT );
}
return FALSE;
}
case WM_RBUTTONUP:
// should be clicked first:
if( !( GetWindowLong( hwnd, GWL_USERDATA ) & WAO_TBBS_CLICKED ) )
return FALSE;
if( GetWindowLong( hwnd, GWL_STYLE ) & BS_RIGHTBUTTON )
{
DLGPROC parProc = ( DLGPROC ) GetWindowLong( GetParent( hwnd ), DWL_DLGPROC );
parProc( GetParent( hwnd ),
WM_COMMAND,
MAKEWPARAM( GetWindowLong ( hwnd, GWL_ID ), WAO_TBBN_RCLCKED ),
( LPARAM ) hwnd );
}
return FALSE;
case WM_TIMER:
{
if( wParam == WAO_TMR_TBRPW ) // repeat wait
{
KillTimer( hwnd, WAO_TMR_TBRPW );
SetTimer( hwnd, WAO_TMR_TBRPT, WAO_TBBN_RPTTIME, NULL );
}
DLGPROC parProc = ( DLGPROC ) GetWindowLong( GetParent( hwnd ), DWL_DLGPROC );
parProc( GetParent( hwnd ),
WM_COMMAND,
MAKEWPARAM( GetWindowLong( hwnd, GWL_ID ), WAO_TBBN_LCLCKED ),
( LPARAM ) hwnd );
return FALSE;
}
case WM_PAINT:
{
PAINTSTRUCT paintStruct;
BeginPaint( hwnd, &paintStruct );
RECT rcWnd;
GetClientRect( hwnd, &rcWnd ); // should this be calculated every time?
<|fim▁hole|> {
FillRect( paintStruct.hdc, &rcWnd, CreateSolidBrush( 0x99ffff ) );
DrawEdge( paintStruct.hdc, &rcWnd, BDR_SUNKENOUTER, BF_RECT );
}
else if( btnStt == WAO_TBBS_CLICKED )
{
DrawEdge( paintStruct.hdc, &rcWnd, BDR_SUNKENOUTER, BF_RECT );
}
else if( btnStt == WAO_TBBS_HOVERED )
{
DrawEdge( paintStruct.hdc, &rcWnd, BDR_RAISEDINNER, BF_RECT );
}
// drawing icon
if( GetWindowLong( hwnd, GWL_STYLE ) & BS_ICON ) // maybe later bitmap too
{
HICON hIco = ( HICON ) GetPropA( hwnd, WAO_PROP_ICON );
DrawIconEx( paintStruct.hdc,
( rcWnd.right - rcWnd.left - 16 ) / 2,
( rcWnd.bottom - rcWnd.top - 16 ) / 2,
hIco, 16, 16, 0, NULL, DI_NORMAL );
}
// drawing text
else
{
int tmpLen = GetWindowTextLength( hwnd );
wchar_t buffer[ tmpLen + 1 ];
SIZE tmpSze;
GetWindowText( hwnd, buffer, tmpLen + 1 );
SetBkMode( paintStruct.hdc, TRANSPARENT );
SelectObject( paintStruct.hdc, GetStockObject( DEFAULT_GUI_FONT ) );
GetTextExtentPoint32( paintStruct.hdc, buffer, tmpLen, &tmpSze );
DrawState( paintStruct.hdc, NULL, NULL,
( LPARAM ) buffer, tmpLen,
( rcWnd.right-rcWnd.left-tmpSze.cx ) / 2,
( rcWnd.bottom-rcWnd.top-tmpSze.cy ) / 2,
tmpSze.cx, tmpSze.cy, DST_TEXT|(
( btnStt & WAO_TBBS_DISABLED ) ? DSS_DISABLED : 0 ) );
}
EndPaint( hwnd, &paintStruct );
return FALSE;
}
default:
return DefWindowProc( hwnd, msg, wParam, lParam );
}
}
LRESULT CALLBACK
waoWC_buttonChkProc( HWND hwnd, UINT msg, WPARAM wParam, LPARAM lParam )
{ static TRACKMOUSEEVENT tME;
switch( msg )
{
case WM_CREATE:
{
tME.cbSize = sizeof( TRACKMOUSEEVENT );
tME.dwHoverTime = 0;
SendMessage( GetParent( hwnd ),
WAOM_TBBTNCREATED,
MAKEWPARAM( WAOM_TBBTNCREATED_LW, GetWindowLong( hwnd, GWL_ID ) ),
( LPARAM ) hwnd );
}
case WM_MOUSELEAVE:
if( !( GetWindowLong( hwnd, GWL_USERDATA ) & WAO_TBBS_CHECKED ) )
{
SetWindowLong( hwnd, GWL_USERDATA, WAO_TBBS_NORMAL );
RedrawWindow( hwnd, NULL, NULL, RDW_ERASE | RDW_INVALIDATE );
}
else
SetWindowLong( hwnd, GWL_USERDATA, WAO_TBBS_CHECKED | WAO_TBBS_NORMAL );
break;
case WM_LBUTTONUP:
{
// should be clicked first:
if( !( GetWindowLong( hwnd, GWL_USERDATA ) & WAO_TBBS_CLICKED ) )
return FALSE;
if( GetWindowLong( hwnd, GWL_USERDATA ) & WAO_TBBS_CHECKED )
{
if( !( GetWindowLong( hwnd, GWL_STYLE ) & BS_AUTORADIOBUTTON ) )
{
SetWindowLong( hwnd, GWL_USERDATA, WAO_TBBS_HOVERED );
tME.dwFlags = TME_LEAVE;
tME.hwndTrack = hwnd;
TrackMouseEvent( &tME );
}
}
else
{
SetWindowLong( hwnd, GWL_USERDATA, WAO_TBBS_CHECKED );
if( GetWindowLong( hwnd, GWL_STYLE ) & BS_RADIOBUTTON )
{
long wStyle;
HWND t_hndWnd = hwnd;
while( ( t_hndWnd = GetWindow( t_hndWnd, GW_HWNDPREV ) ) )
{
wchar_t buffer[ 16 ];
GetClassName( t_hndWnd, buffer, 15 );
if( lstrcmp( buffer, L"wao_BUTTON_Chk" ) )
break;
wStyle = GetWindowLong( t_hndWnd, GWL_STYLE );
if( !( wStyle & BS_RADIOBUTTON ) )
break;
SetWindowLong( t_hndWnd, GWL_USERDATA, WAO_TBBS_NORMAL );
RedrawWindow( t_hndWnd, NULL, NULL, RDW_ERASE | RDW_INVALIDATE );
if( wStyle & WS_GROUP )
break;
}
t_hndWnd = hwnd;
while( ( t_hndWnd = GetWindow( t_hndWnd, GW_HWNDNEXT ) ) )
{
wchar_t buffer[ 16 ];
GetClassName( t_hndWnd, buffer, 15 );
if( lstrcmp( buffer, L"wao_BUTTON_Chk" ) )
break;
wStyle = GetWindowLong( t_hndWnd, GWL_STYLE );
if( !( wStyle & BS_RADIOBUTTON ) )
break;
SetWindowLong( t_hndWnd, GWL_USERDATA, WAO_TBBS_NORMAL );
RedrawWindow( t_hndWnd, NULL, NULL, RDW_ERASE | RDW_INVALIDATE );
if( wStyle & WS_GROUP )
break;
}
}
}
RedrawWindow( hwnd, NULL, NULL, RDW_ERASE | RDW_INVALIDATE );
// RADIO BUTTON
DLGPROC parProc = ( DLGPROC ) GetWindowLong( GetParent( hwnd ), DWL_DLGPROC );
parProc( GetParent( hwnd ),
WM_COMMAND,
MAKEWPARAM( GetWindowLong( hwnd, GWL_ID ), WAO_TBBN_LCLCKED ),
( LPARAM ) hwnd );
}
break;
default:
return( waoWC_button0Proc( hwnd, msg, wParam, lParam ) );
}
return 0; // just to silence the compiler
}
LRESULT CALLBACK
WAO_advanced_edit_proc( HWND hwnd, UINT msg, WPARAM wparam, LPARAM lparam,
UINT_PTR uIdSubclass, DWORD_PTR dwRefData )
{
switch( msg )
{
case WM_KEYDOWN:
if( wparam == VK_RETURN )
SendMessage( GetParent( hwnd ),
WAOM_EDITACTIVATED,
MAKEWPARAM( GetWindowLong( hwnd, GWL_ID ), WAOM_EDITACTIVATED_HW ),
( LPARAM ) hwnd );
else if( wparam == VK_ESCAPE )
SendMessage( GetParent( hwnd ),
WAOM_EDITABORTED,
MAKEWPARAM( GetWindowLong( hwnd, GWL_ID ), WAOM_EDITACTIVATED_HW ),
( LPARAM ) hwnd );
break;
default:
return DefSubclassProc( hwnd, msg, wparam, lparam );
}
return 0;
}
#include <ctime>
bool
WAO_init()
{
WNDCLASS waoWC_button0, waoWC_buttonChk;
waoWC_button0.style = CS_OWNDC | CS_BYTEALIGNWINDOW | CS_PARENTDC;
waoWC_button0.lpfnWndProc = waoWC_button0Proc;
waoWC_button0.cbClsExtra = 0;
waoWC_button0.cbWndExtra = 0; // state bits?
waoWC_button0.hInstance = GetModuleHandle( NULL );
waoWC_button0.hIcon = NULL;
waoWC_button0.hCursor = LoadCursor( NULL, IDC_HAND );
waoWC_button0.hbrBackground = CreateSolidBrush( GetSysColor( COLOR_BTNFACE ) );
waoWC_button0.lpszMenuName = NULL;
waoWC_button0.lpszClassName = L"wao_BUTTON_0";
waoWC_buttonChk = waoWC_button0;
waoWC_buttonChk.lpfnWndProc = waoWC_buttonChkProc;
waoWC_buttonChk.lpszClassName = L"wao_BUTTON_Chk";
if( !RegisterClass( &waoWC_button0 ) )
{
MessageBoxA( NULL, "waoWC_button0 Registration Failed!", "WAO", MB_OK );
return false;
}
if( !RegisterClass( &waoWC_buttonChk ) )
{
MessageBoxA( NULL, "waoWC_buttonChk Registration Failed!", "WAO", MB_OK );
return false;
}
return true;
}
BOOL CALLBACK
WAO_toolbar_proc( HWND hwnd, UINT msg, WPARAM wParam, LPARAM lParam )
{
return FALSE;
}
BOOL
WAO_set_icon( HWND hwnd, HICON hIcon )
{
BOOL retv = SetPropA( hwnd, WAO_PROP_ICON, hIcon );
InvalidateRect( hwnd, NULL, TRUE );
return retv;
}
// INPUT DIALOG ====================================================================================
WAO_InputDlg::WAO_InputDlg( const Wstring& title, const Wstring& text )
: m_text( text ), m_title( title )
{
}
bool
WAO_InputDlg::launch( HWND hWPar, DLGPROC redirProc )
{
DialogBox( GetModuleHandle( NULL ), MAKEINTRESOURCE( WAO_IDD_INPUT ), hWPar, redirProc );
return m_result;
}
bool
WAO_InputDlg::proc( HWND hwnd, UINT msg, WPARAM wParam, LPARAM lParam )
{
switch( msg )
{
case WM_INITDIALOG:
m_hwnd = hwnd;
SetWindowText( m_hwnd, m_title.c_str() );
SetDlgItemText( m_hwnd, WAO_IDE_INPUT, m_text.c_str() );
m_result = false;
return TRUE;
case WM_COMMAND:
switch( LOWORD( wParam ) )
{
case IDOK:
{
m_result = true;
wchar_t buffer[ 1024 ];
GetDlgItemText( m_hwnd, WAO_IDE_INPUT, buffer, 1023 );
m_text = buffer;
EndDialog( m_hwnd, 0 );
}
return FALSE;
}
return FALSE;
case WM_DESTROY:
m_hwnd = NULL; // whenever window is destroyed hW is nullified
return FALSE;
default:
return FALSE;
}
}
// MENU ============================================================================================
WAO_Menu::WAO_Menu()
{
m_hmenu = NULL;
}
HMENU
WAO_Menu::init()
{
return( m_hmenu = CreatePopupMenu() );
}
// APPEND
bool
WAO_Menu::append( UINT flags, UINT newItm, LPCTSTR content )
{
return AppendMenu( m_hmenu, flags, newItm, content );
}
// TRACK MENU
int
WAO_Menu::track( UINT flags, HWND hWOwn, int posX, int posY )
{
//hMenuAct = hMenu;
//hMenu = NULL; // to avoid recreation during append()
if( posX < 0 )
{
POINT pnt;
GetCursorPos (&pnt);
posX = pnt.x;
posY = pnt.y;
}
return TrackPopupMenu( m_hmenu, flags, posX, posY, 0, hWOwn, NULL );
}
// SET DEFAULT ITEM
bool
WAO_Menu::set_default_item( UINT itm, UINT byPos )
{
return SetMenuDefaultItem( m_hmenu, itm, byPos );
}
// WHETHER THE MENU IS ACTIVE
bool
WAO_Menu::is_active()
{
return( ( bool ) m_hmenu );
}
// DESTROY MENU IF IT IS ACTIVE
bool
WAO_Menu::destroy()
{
if( m_hmenu )
{
DestroyMenu( m_hmenu );
m_hmenu = 0;
return true; // it was active
}
else
return false; // it was already inactive
}<|fim▁end|>
|
int btnStt = GetWindowLong( hwnd, GWL_USERDATA );
if( btnStt & WAO_TBBS_CHECKED )
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.