prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>attr.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use devtools_traits::AttrInfo;
use dom::bindings::cell::DOMRefCell;
use dom::bindings::codegen::Bindings::AttrBinding::{self, AttrMethods};
use dom::bindings::codegen::InheritTypes::NodeCast;
use dom::bindings::global::GlobalRef;
use dom::bindings::js::{JS, MutNullableHeap};
use dom::bindings::js::{LayoutJS, Root, RootedReference};
use dom::bindings::utils::{Reflector, reflect_dom_object};
use dom::element::{AttributeMutation, Element};
use dom::virtualmethods::vtable_for;<|fim▁hole|>use std::mem;
use std::ops::Deref;
use string_cache::{Atom, Namespace};
use util::str::{DOMString, parse_unsigned_integer, split_html_space_chars, str_join};
#[derive(JSTraceable, PartialEq, Clone, HeapSizeOf)]
pub enum AttrValue {
String(DOMString),
TokenList(DOMString, Vec<Atom>),
UInt(DOMString, u32),
Atom(Atom),
}
impl AttrValue {
pub fn from_serialized_tokenlist(tokens: DOMString) -> AttrValue {
let atoms =
split_html_space_chars(&tokens)
.map(Atom::from_slice)
.fold(vec![], |mut acc, atom| {
if !acc.contains(&atom) { acc.push(atom) }
acc
});
AttrValue::TokenList(tokens, atoms)
}
pub fn from_atomic_tokens(atoms: Vec<Atom>) -> AttrValue {
let tokens = str_join(&atoms, "\x20");
AttrValue::TokenList(tokens, atoms)
}
// https://html.spec.whatwg.org/multipage/#reflecting-content-attributes-in-idl-attributes:idl-unsigned-long
pub fn from_u32(string: DOMString, default: u32) -> AttrValue {
let result = parse_unsigned_integer(string.chars()).unwrap_or(default);
let result = if result > 2147483647 {
default
} else {
result
};
AttrValue::UInt(string, result)
}
// https://html.spec.whatwg.org/multipage/#limited-to-only-non-negative-numbers-greater-than-zero
pub fn from_limited_u32(string: DOMString, default: u32) -> AttrValue {
let result = parse_unsigned_integer(string.chars()).unwrap_or(default);
let result = if result == 0 || result > 2147483647 {
default
} else {
result
};
AttrValue::UInt(string, result)
}
pub fn from_atomic(string: DOMString) -> AttrValue {
let value = Atom::from_slice(&string);
AttrValue::Atom(value)
}
pub fn as_tokens(&self) -> &[Atom] {
match *self {
AttrValue::TokenList(_, ref tokens) => tokens,
_ => panic!("Tokens not found"),
}
}
pub fn as_atom(&self) -> &Atom {
match *self {
AttrValue::Atom(ref value) => value,
_ => panic!("Atom not found"),
}
}
/// Return the AttrValue as its integer representation, if any.
/// This corresponds to attribute values returned as `AttrValue::UInt(_)`
/// by `VirtualMethods::parse_plain_attribute()`.
pub fn as_uint(&self) -> u32 {
if let AttrValue::UInt(_, value) = *self {
value
} else {
panic!("Uint not found");
}
}
}
impl Deref for AttrValue {
type Target = str;
fn deref(&self) -> &str {
match *self {
AttrValue::String(ref value) |
AttrValue::TokenList(ref value, _) |
AttrValue::UInt(ref value, _) => &value,
AttrValue::Atom(ref value) => &value,
}
}
}
// https://dom.spec.whatwg.org/#interface-attr
#[dom_struct]
pub struct Attr {
reflector_: Reflector,
local_name: Atom,
value: DOMRefCell<AttrValue>,
name: Atom,
namespace: Namespace,
prefix: Option<Atom>,
/// the element that owns this attribute.
owner: MutNullableHeap<JS<Element>>,
}
impl Attr {
fn new_inherited(local_name: Atom, value: AttrValue, name: Atom, namespace: Namespace,
prefix: Option<Atom>, owner: Option<&Element>) -> Attr {
Attr {
reflector_: Reflector::new(),
local_name: local_name,
value: DOMRefCell::new(value),
name: name,
namespace: namespace,
prefix: prefix,
owner: MutNullableHeap::new(owner.map(JS::from_ref)),
}
}
pub fn new(window: &Window, local_name: Atom, value: AttrValue,
name: Atom, namespace: Namespace,
prefix: Option<Atom>, owner: Option<&Element>) -> Root<Attr> {
reflect_dom_object(
box Attr::new_inherited(local_name, value, name, namespace, prefix, owner),
GlobalRef::Window(window),
AttrBinding::Wrap)
}
#[inline]
pub fn name(&self) -> &Atom {
&self.name
}
#[inline]
pub fn namespace(&self) -> &Namespace {
&self.namespace
}
#[inline]
pub fn prefix(&self) -> &Option<Atom> {
&self.prefix
}
}
impl AttrMethods for Attr {
// https://dom.spec.whatwg.org/#dom-attr-localname
fn LocalName(&self) -> DOMString {
(**self.local_name()).to_owned()
}
// https://dom.spec.whatwg.org/#dom-attr-value
fn Value(&self) -> DOMString {
(**self.value()).to_owned()
}
// https://dom.spec.whatwg.org/#dom-attr-value
fn SetValue(&self, value: DOMString) {
match self.owner() {
None => *self.value.borrow_mut() = AttrValue::String(value),
Some(owner) => {
let value = owner.r().parse_attribute(&self.namespace, self.local_name(), value);
self.set_value(value, owner.r());
}
}
}
// https://dom.spec.whatwg.org/#dom-attr-textcontent
fn TextContent(&self) -> DOMString {
self.Value()
}
// https://dom.spec.whatwg.org/#dom-attr-textcontent
fn SetTextContent(&self, value: DOMString) {
self.SetValue(value)
}
// https://dom.spec.whatwg.org/#dom-attr-nodevalue
fn NodeValue(&self) -> DOMString {
self.Value()
}
// https://dom.spec.whatwg.org/#dom-attr-nodevalue
fn SetNodeValue(&self, value: DOMString) {
self.SetValue(value)
}
// https://dom.spec.whatwg.org/#dom-attr-name
fn Name(&self) -> DOMString {
(*self.name).to_owned()
}
// https://dom.spec.whatwg.org/#dom-attr-namespaceuri
fn GetNamespaceURI(&self) -> Option<DOMString> {
let Namespace(ref atom) = self.namespace;
match &**atom {
"" => None,
url => Some(url.to_owned()),
}
}
// https://dom.spec.whatwg.org/#dom-attr-prefix
fn GetPrefix(&self) -> Option<DOMString> {
self.prefix().as_ref().map(|p| (**p).to_owned())
}
// https://dom.spec.whatwg.org/#dom-attr-ownerelement
fn GetOwnerElement(&self) -> Option<Root<Element>> {
self.owner()
}
// https://dom.spec.whatwg.org/#dom-attr-specified
fn Specified(&self) -> bool {
true // Always returns true
}
}
impl Attr {
pub fn set_value(&self, mut value: AttrValue, owner: &Element) {
assert!(Some(owner) == self.owner().r());
mem::swap(&mut *self.value.borrow_mut(), &mut value);
if self.namespace == ns!("") {
vtable_for(NodeCast::from_ref(owner)).attribute_mutated(
self, AttributeMutation::Set(Some(&value)));
}
}
pub fn value(&self) -> Ref<AttrValue> {
self.value.borrow()
}
pub fn local_name(&self) -> &Atom {
&self.local_name
}
/// Sets the owner element. Should be called after the attribute is added
/// or removed from its older parent.
pub fn set_owner(&self, owner: Option<&Element>) {
let ref ns = self.namespace;
match (self.owner().r(), owner) {
(None, Some(new)) => {
// Already in the list of attributes of new owner.
assert!(new.get_attribute(&ns, &self.local_name) == Some(Root::from_ref(self)))
}
(Some(old), None) => {
// Already gone from the list of attributes of old owner.
assert!(old.get_attribute(&ns, &self.local_name).is_none())
}
(old, new) => assert!(old == new)
}
self.owner.set(owner.map(JS::from_ref))
}
pub fn owner(&self) -> Option<Root<Element>> {
self.owner.get().map(Root::from_rooted)
}
pub fn summarize(&self) -> AttrInfo {
let Namespace(ref ns) = self.namespace;
AttrInfo {
namespace: (**ns).to_owned(),
name: self.Name(),
value: self.Value(),
}
}
}
#[allow(unsafe_code)]
pub trait AttrHelpersForLayout {
unsafe fn value_forever(&self) -> &'static AttrValue;
unsafe fn value_ref_forever(&self) -> &'static str;
unsafe fn value_atom_forever(&self) -> Option<Atom>;
unsafe fn value_tokens_forever(&self) -> Option<&'static [Atom]>;
unsafe fn local_name_atom_forever(&self) -> Atom;
unsafe fn value_for_layout(&self) -> &AttrValue;
}
#[allow(unsafe_code)]
impl AttrHelpersForLayout for LayoutJS<Attr> {
#[inline]
unsafe fn value_forever(&self) -> &'static AttrValue {
// This transmute is used to cheat the lifetime restriction.
mem::transmute::<&AttrValue, &AttrValue>((*self.unsafe_get()).value.borrow_for_layout())
}
#[inline]
unsafe fn value_ref_forever(&self) -> &'static str {
&**self.value_forever()
}
#[inline]
unsafe fn value_atom_forever(&self) -> Option<Atom> {
let value = (*self.unsafe_get()).value.borrow_for_layout();
match *value {
AttrValue::Atom(ref val) => Some(val.clone()),
_ => None,
}
}
#[inline]
unsafe fn value_tokens_forever(&self) -> Option<&'static [Atom]> {
// This transmute is used to cheat the lifetime restriction.
match *self.value_forever() {
AttrValue::TokenList(_, ref tokens) => Some(tokens),
_ => None,
}
}
#[inline]
unsafe fn local_name_atom_forever(&self) -> Atom {
(*self.unsafe_get()).local_name.clone()
}
#[inline]
unsafe fn value_for_layout(&self) -> &AttrValue {
(*self.unsafe_get()).value.borrow_for_layout()
}
}<|fim▁end|> | use dom::window::Window;
use std::borrow::ToOwned;
use std::cell::Ref; |
<|file_name|>app.py<|end_file_name|><|fim▁begin|>import demo<|fim▁hole|><|fim▁end|> |
demo.main() |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>mod raster;
mod shader;
mod samplers;
use std::ops::Add;
use std::ops::Mul;
use std::f32;
// A vector in 4-space.
pub struct Vector([f32; 4]);
// A 4x4 matrix.
pub struct Matrix([f32; 16]);
impl Vector {
pub fn new(x: f32, y: f32, z: f32) -> Vector {
Vector([x, y, z, 1.])
}
pub fn zero() -> Vector {
Vector([0., 0., 0., 0.])
}
fn nth(&self, idx: usize) -> Option<f32> {
match (self, idx) {
(&Vector(ref data), 0...3) => Some(data[idx]),
_ => None
}
}
fn x(&self) -> f32 {
match self.nth(0) {
Some(s) => s,
_ => panic!()
}
}
fn y(&self) -> f32 {
match self.nth(1) {
Some(s) => s,
_ => panic!()
}
}
fn z(&self) -> f32 {
match self.nth(2) {
Some(s) => s,
_ => panic!()
}
}
fn w(&self) -> f32 {
match self.nth(3) {
Some(s) => s,
_ => panic!()
}
}
fn dot(&self, vec: &Vector) -> f32 {
match (self, vec) {
(&Vector(a), &Vector(b)) => {
a.iter().zip(b.iter()).fold(0., |sum, (i, j)| sum + (i * j))
}
}
}
fn sub(&self, vec: &Vector) -> Vector {
Vector([self.x() - vec.x(),
self.y() - vec.y(),
self.z() - vec.z(),
self.w() - vec.w()])
}
}
impl Matrix {
fn identity() -> Matrix {
Matrix([1., 0., 0., 0.,
0., 1., 0., 0.,
0., 0., 1., 0.,
0., 0., 0., 1.])
}
fn translate(pos: &Vector) -> Matrix {
Matrix([1., 0., 0., pos.x(),
0., 1., 0., pos.y(),
0., 0., 1., pos.z(),
0., 0., 0., pos.w()])
}
fn scale(scale: &Vector) -> Matrix {
Matrix([scale.x(), 0., 0., 0.,
0., scale.y(), 0., 0.,
0., 0., scale.z(), 0.,
0., 0., 0., scale.w()])
}
fn apply(&self, vec: &Vector) -> Vector {
let mut data: [f32; 4] = [0.; 4];
for i in 0..3 {
data[i] = self.row(i).dot(vec);
}
Vector(data)
}
fn row(&self, row: usize) -> Vector {
match self {
&Matrix(ref data) => {
Vector([data[row * 4],
data[1 + (row * 4)],
data[2 + (row * 4)],
data[3 + (row * 4)]])
}
}
}
fn col(&self, col: usize) -> Vector {
match (self) {
&Matrix(ref data) => {
Vector([data[col],
data[col + 4],
data[col + 8],
data[col + 12]])
}
}
}
}
impl Mul for Matrix {
type Output = Matrix;
// Produces the matrix AB.
fn mul(self, rhs: Matrix) -> Matrix {
let mut out: [f32; 16] = [0.; 16];
for j in 0..3 {
for i in 0..3 {
out[i * j] = self.row(j).dot(&rhs.col(i));
}
}
Matrix(out)
}
}
pub struct Rect {
pub top: f32,
pub bottom: f32,
pub left: f32,
pub right: f32,
}
// A primitive triangle.
pub struct Triangle(Vector, Vector, Vector);
impl Triangle {
pub fn new(a: Vector, b: Vector, c: Vector) -> Triangle {
Triangle(a, b, c)
}
fn vertices(&self) -> Vec<&Vector> {
match self {
&Triangle(ref a, ref b, ref c) => vec![a, b, c]
}
}
// Returns a bounding box encapsulating the triangle in the XY-plane.
fn bounds(&self) -> Rect {
let &Triangle(ref a, ref b, ref c) = self;
let mut rect = Rect {
top: f32::MAX,
bottom: f32::MIN,
left: f32::MAX,
right: f32::MIN,
};
for i in [a, b, c].iter() {
rect.top = rect.top.min(i.x());
rect.bottom = rect.bottom.max(i.x());
rect.left = rect.left.min(i.y());
rect.right = rect.right.max(i.y());
}
rect
}
}
pub struct Mesh(Vec<Triangle>);
impl Mesh {
pub fn new(tris: Vec<Triangle>) -> Mesh {
Mesh(tris)
}
}
pub struct Model {
mesh: Mesh,
pos: Vector,
scale: Vector,
rot: Vector,
}
impl Model {
pub fn new(mesh: Mesh) -> Model {
Model {
mesh: mesh,
pos: Vector::zero(),
scale: Vector::new(1., 1., 1.),
rot: Vector::zero(),
}
}
fn rotate(&mut self, rotation: &Vector) {
// TODO
}
fn translate(&mut self, translation: &Vector) {
// TODO
}
fn scale(&mut self, scale: &Vector) {
// TODO
}
fn get_transform(&self) -> Matrix {
let translate: Matrix = Matrix::translate(&self.pos);
// TODO(acomminos): other transforms
translate
}
}
// A perspective camera.
pub struct Camera {
pos: Vector,
rot: Vector,
z_near: f32, // The near z-clipping plane.
z_far: f32, // The far z-clipping plane.
fov: f32, // The horizontal field of view, in radians.
ratio: f32, // Screen aspect ratio of width/height.
}
impl Camera {
pub fn new(pos: Vector, rot: Vector, aspect: f32, fov: f32, near: f32, far: f32) -> Camera {
Camera {
pos: pos,
rot: rot,
ratio: aspect,
fov: fov,
z_near: near,
z_far: far
}
}
// Projects the vector into normalized screen coordinates.
// Does not perform any clipping.
// TODO: replace this with a simple function returning a matrix to be used
// in a homogenous coordinate system
fn project_vector(&self, v: &Vector) -> Vector {
let x = v.x()/(self.ratio * (self.fov / 2.).tan() * v.z());
let y = v.y()/v.z();
let z = (v.z() - self.z_near)/(self.z_far - self.z_near);
Vector([x, y, z, 1.])
}
fn project_triangle(&self, tri: &Triangle) -> Triangle {
match tri {
&Triangle(ref a, ref b, ref c) => {
Triangle(self.project_vector(a),
self.project_vector(b),
self.project_vector(c))
}
}
}
fn contains_point(&self, (x, y, z): (f32, f32, f32)) -> bool {
x >= -1. && x <= 1. &&
y >= -1. && y <= 1. &&
z >= -1. && z <= 1.
}
}
pub struct Scene {
camera: Camera,
models: Vec<Model>,
}
impl Scene {
pub fn new(camera: Camera) -> Scene {
Scene {
camera: camera,
models: vec![]
}
}
pub fn camera<'a>(&'a self) -> &'a Camera {
&self.camera
}
pub fn add_model(&mut self, model: Model) {
self.models.push(model);
}
pub fn render(&self, rt: &mut RenderTarget) {
for m in &self.models {
let model_transform = &m.get_transform();
let &Mesh(ref triangles) = &m.mesh;
for t in triangles {
// FIXME(acomminos): placeholder
let ph_shader = shader::SolidColorShader(Color::white());
let sampler = samplers::SimpleMultiSampler(2);
// TODO(acomminos): use model_transform
let t_proj = self.camera.project_triangle(t);
raster::rasterize_barycentric_ccw(&t_proj, rt, &self.camera, &sampler, &ph_shader);
}
}
}
}
pub struct Buffer<T> {
width: usize,
height: usize,
data: Vec<T>,
}
impl <T> Buffer<T> where T: Clone {
pub fn new(width: usize, height: usize, initial: T) -> Buffer<T> {
let mut data: Vec<T> = Vec::with_capacity(width * height);
// FIXME(acomminos): find more idiomatic way to do this
for i in 0..(width * height) {
data.push(initial.clone());
}
Buffer {
width: width,
height: height,
data: data,
}
}
pub fn put(&mut self, (x, y): (usize, usize), val: T) {
self.data[x + (y * self.width)] = val;
}
pub fn get(&self, x: usize, y: usize) -> &T {
&self.data[x + (y * self.width)]
}
}
// Pixel blend modes.
pub enum CompositeMode {
SourceOver,
}
// A 32-bit ARGB colour.
// Use premultiplied alpha for consistency.
#[derive(Copy, Clone)]
pub struct Color {
r: f32,
g: f32,
b: f32,
a: f32
}
impl Color {
fn white() -> Color {
Color::new(1., 1., 1., 1.)
}
fn zero() -> Color {
Color::new(0., 0., 0., 0.)
}
// Create
fn new(r: f32, g: f32, b: f32, a: f32) -> Color {
Color { r: r, g: g, b: b, a: a }
}
fn from_rgba32(rgba: &u32) -> Color {
let max = u8::max_value() as f32;
Color::new((((rgba >> 24) & 0xFFu32) as f32)/max,
(((rgba >> 16) & 0xFFu32) as f32)/max,
(((rgba >> 8) & 0xFFu32) as f32)/max,
(((rgba >> 0) & 0xFFu32) as f32)/max)
}
fn to_rgba32(&self) -> (u8, u8, u8, u8) {
((self.r * (u8::max_value() as f32)) as u8,
(self.g * (u8::max_value() as f32)) as u8,
(self.b * (u8::max_value() as f32)) as u8,
(self.a * (u8::max_value() as f32)) as u8)
}
fn unpremultiply(&self) -> Color {
Color {
r: self.r / self.a,
g: self.g / self.a,
b: self.b / self.a,
a: self.a,
}
}
fn multiply(&self, val: f32) -> Color {
Color::new(self.r * val, self.g * val, self.b * val, self.a * val)
}
}
impl Add for Color {
type Output = Color;
fn add(self, rhs: Color) -> Color {
Color::new(self.r + rhs.r, self.g + rhs.g, self.b + rhs.b, self.a + rhs.a)
}
}
// A standard render target with a ARGB color buffer and floating point depth
// buffer.
pub struct RenderTarget {
width: usize,
height: usize,
color: Buffer<u32>,
depth: Buffer<f32>,
}
impl RenderTarget {
pub fn new(width: usize, height: usize) -> RenderTarget {
RenderTarget {
width: width,
height: height,
color: Buffer::<u32>::new(width, height, 0u32),
depth: Buffer::<f32>::new(width, height, 1.),
}
}
// Toy painting function to paint the pixel at (x, y) with the 32-bit RGBA
// colour provided.
pub fn paint(&mut self, (x, y): (usize, usize), src: &Color, op: CompositeMode) {
let dest = Color::from_rgba32(self.color.get(x, y));
let color = match op {
// note: colors here are premultiplied
SourceOver => dest.multiply(1. - src.a) + *src
};
let (r, g, b, a) = color.to_rgba32();
self.color.put((x, y), ((r as u32) << 24) | ((g as u32) << 16) | ((b as u32) << 8) | a as u32)
}
// Checks to see if depth is less than the value stored in the depth buffer.
// If so, returns true and stores the depth value.
// The depth buffer stores floating-point values in the range [0, 1]. By
// default, it is initialized to 1.
pub fn check_depth(&mut self, (x, y): (usize, usize), depth: f32) -> bool {
if depth < *self.depth.get(x, y) {
self.depth.put((x, y), depth);
return true;
}
return false;
}
// Returns the ratio of width:height.
pub fn aspect(&self) -> f32 {
(self.width as f32) / (self.height as f32)
}
pub fn print_ascii(&self) {
print!["┌──"];
for _ in 1..(self.color.width - 1) {
print!["──"];
}
println!["──┐"];
for y in 0..self.color.height {
print!["│"];
for x in 0..self.color.width {
let color = Color::from_rgba32(self.color.get(x, y));
let a = color.a;
let block = if a == 0. {
" "
} else if a <= 0.25 {
"░░"
} else if a <= 0.5 {
"▒▒"
} else if a <= 0.75 {<|fim▁hole|> "██"
};
print!["{}", block];
}
println!["│"];
}
print!["└──"];
for _ in 1..(self.color.width - 1) {
print!["──"];
}
println!["──┘"];
}
}<|fim▁end|> | "▓▓"
} else { |
<|file_name|>index.js<|end_file_name|><|fim▁begin|>import React from 'react';
import ReactDOM from 'react-dom';<|fim▁hole|>import 'semantic-ui-css/semantic.min.css';
ReactDOM.render(<App />, document.getElementById('root'));
registerServiceWorker();<|fim▁end|> | import './index.css';
import App from './App';
import registerServiceWorker from './utils/registerServiceWorker'; |
<|file_name|>i_remote_shell.py<|end_file_name|><|fim▁begin|># proxy module
from __future__ import absolute_import<|fim▁hole|><|fim▁end|> | from envisage.plugins.remote_editor.i_remote_shell import * |
<|file_name|>index.d.ts<|end_file_name|><|fim▁begin|>// Type definitions for lodash.eq 4.0
// Project: http://lodash.com/<|fim▁hole|>// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
// TypeScript Version: 2.1
import { eq } from "lodash";
export = eq;<|fim▁end|> | // Definitions by: Brian Zengel <https://github.com/bczengel>, Ilya Mochalov <https://github.com/chrootsu>, Stepan Mikhaylyuk <https://github.com/stepancar> |
<|file_name|>bottle.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Bottle is a fast and simple micro-framework for small web applications. It
offers request dispatching (Routes) with url parameter support, templates,
a built-in HTTP Server and adapters for many third party WSGI/HTTP-server and
template engines - all in a single file and with no dependencies other than the
Python Standard Library.
Homepage and documentation: http://bottlepy.org/
Copyright (c) 2014, Marcel Hellkamp.
License: MIT (see LICENSE for details)
"""
from __future__ import with_statement
__author__ = 'Marcel Hellkamp'
__version__ = '0.13-dev'
__license__ = 'MIT'
# The gevent server adapter needs to patch some modules before they are imported
# This is why we parse the commandline parameters here but handle them later
if __name__ == '__main__':
from optparse import OptionParser
_cmd_parser = OptionParser(usage="usage: %prog [options] package.module:app")
_opt = _cmd_parser.add_option
_opt("--version", action="store_true", help="show version number.")
_opt("-b", "--bind", metavar="ADDRESS", help="bind socket to ADDRESS.")
_opt("-s", "--server", default='wsgiref', help="use SERVER as backend.")
_opt("-p", "--plugin", action="append", help="install additional plugin/s.")
_opt("--debug", action="store_true", help="start server in debug mode.")
_opt("--reload", action="store_true", help="auto-reload on file changes.")
_cmd_options, _cmd_args = _cmd_parser.parse_args()
if _cmd_options.server and _cmd_options.server.startswith('gevent'):
import gevent.monkey; gevent.monkey.patch_all()
import base64, cgi, email.utils, functools, hmac, imp, itertools, mimetypes,\
os, re, subprocess, sys, tempfile, threading, time, warnings
from datetime import date as datedate, datetime, timedelta
from tempfile import TemporaryFile
from traceback import format_exc, print_exc
from inspect import getargspec
from unicodedata import normalize
try: from simplejson import dumps as json_dumps, loads as json_lds
except ImportError: # pragma: no cover
try: from json import dumps as json_dumps, loads as json_lds
except ImportError:
try: from django.utils.simplejson import dumps as json_dumps, loads as json_lds
except ImportError:
def json_dumps(data):
raise ImportError("JSON support requires Python 2.6 or simplejson.")
json_lds = json_dumps
# We now try to fix 2.5/2.6/3.1/3.2 incompatibilities.
# It ain't pretty but it works... Sorry for the mess.
py = sys.version_info
py3k = py >= (3, 0, 0)
py25 = py < (2, 6, 0)
py31 = (3, 1, 0) <= py < (3, 2, 0)
# Workaround for the missing "as" keyword in py3k.
def _e(): return sys.exc_info()[1]
# Workaround for the "print is a keyword/function" Python 2/3 dilemma
# and a fallback for mod_wsgi (resticts stdout/err attribute access)
try:
_stdout, _stderr = sys.stdout.write, sys.stderr.write
except IOError:
_stdout = lambda x: sys.stdout.write(x)
_stderr = lambda x: sys.stderr.write(x)
# Lots of stdlib and builtin differences.
if py3k:
import http.client as httplib
import _thread as thread
from urllib.parse import urljoin, SplitResult as UrlSplitResult
from urllib.parse import urlencode, quote as urlquote, unquote as urlunquote
urlunquote = functools.partial(urlunquote, encoding='latin1')
from http.cookies import SimpleCookie
from collections import MutableMapping as DictMixin
import pickle
from io import BytesIO
from configparser import ConfigParser
basestring = str
unicode = str
json_loads = lambda s: json_lds(touni(s))
callable = lambda x: hasattr(x, '__call__')
imap = map
def _raise(*a): raise a[0](a[1]).with_traceback(a[2])
else: # 2.x
import httplib
import thread
from urlparse import urljoin, SplitResult as UrlSplitResult
from urllib import urlencode, quote as urlquote, unquote as urlunquote
from Cookie import SimpleCookie
from itertools import imap
import cPickle as pickle
from StringIO import StringIO as BytesIO
from ConfigParser import SafeConfigParser as ConfigParser
if py25:
msg = "Python 2.5 support may be dropped in future versions of Bottle."
warnings.warn(msg, DeprecationWarning)
from UserDict import DictMixin
def next(it): return it.next()
bytes = str
else: # 2.6, 2.7
from collections import MutableMapping as DictMixin
unicode = unicode
json_loads = json_lds
eval(compile('def _raise(*a): raise a[0], a[1], a[2]', '<py3fix>', 'exec'))
# Some helpers for string/byte handling
def tob(s, enc='utf8'):
return s.encode(enc) if isinstance(s, unicode) else bytes(s)
def touni(s, enc='utf8', err='strict'):
if isinstance(s, bytes):
return s.decode(enc, err)
else:
return unicode(s or ("" if s is None else s))
tonat = touni if py3k else tob
# 3.2 fixes cgi.FieldStorage to accept bytes (which makes a lot of sense).
# 3.1 needs a workaround.
if py31:
from io import TextIOWrapper
class NCTextIOWrapper(TextIOWrapper):
def close(self): pass # Keep wrapped buffer open.
# A bug in functools causes it to break if the wrapper is an instance method
def update_wrapper(wrapper, wrapped, *a, **ka):
try: functools.update_wrapper(wrapper, wrapped, *a, **ka)
except AttributeError: pass
# These helpers are used at module level and need to be defined first.
# And yes, I know PEP-8, but sometimes a lower-case classname makes more sense.
def depr(message, hard=False):
warnings.warn(message, DeprecationWarning, stacklevel=3)
def makelist(data): # This is just to handy
if isinstance(data, (tuple, list, set, dict)): return list(data)
elif data: return [data]
else: return []
class DictProperty(object):
''' Property that maps to a key in a local dict-like attribute. '''
def __init__(self, attr, key=None, read_only=False):
self.attr, self.key, self.read_only = attr, key, read_only
def __call__(self, func):
functools.update_wrapper(self, func, updated=[])
self.getter, self.key = func, self.key or func.__name__
return self
def __get__(self, obj, cls):
if obj is None: return self
key, storage = self.key, getattr(obj, self.attr)
if key not in storage: storage[key] = self.getter(obj)
return storage[key]
def __set__(self, obj, value):
if self.read_only: raise AttributeError("Read-Only property.")
getattr(obj, self.attr)[self.key] = value
def __delete__(self, obj):
if self.read_only: raise AttributeError("Read-Only property.")
del getattr(obj, self.attr)[self.key]
class cached_property(object):
''' A property that is only computed once per instance and then replaces
itself with an ordinary attribute. Deleting the attribute resets the
property. '''
def __init__(self, func):
self.__doc__ = getattr(func, '__doc__')
self.func = func
def __get__(self, obj, cls):
if obj is None: return self
value = obj.__dict__[self.func.__name__] = self.func(obj)
return value
class lazy_attribute(object):
''' A property that caches itself to the class object. '''
def __init__(self, func):
functools.update_wrapper(self, func, updated=[])
self.getter = func
def __get__(self, obj, cls):
value = self.getter(cls)
setattr(cls, self.__name__, value)
return value
###############################################################################
# Exceptions and Events ########################################################
###############################################################################
class BottleException(Exception):
""" A base class for exceptions used by bottle. """
pass
###############################################################################
# Routing ######################################################################
###############################################################################
class RouteError(BottleException):
""" This is a base class for all routing related exceptions """
class RouteReset(BottleException):
""" If raised by a plugin or request handler, the route is reset and all
plugins are re-applied. """
class RouterUnknownModeError(RouteError): pass
class RouteSyntaxError(RouteError):
""" The route parser found something not supported by this router. """
class RouteBuildError(RouteError):
""" The route could not be built. """
def _re_flatten(p):
''' Turn all capturing groups in a regular expression pattern into
non-capturing groups. '''
if '(' not in p: return p
return re.sub(r'(\\*)(\(\?P<[^>]+>|\((?!\?))',
lambda m: m.group(0) if len(m.group(1)) % 2 else m.group(1) + '(?:', p)
class Router(object):
''' A Router is an ordered collection of route->target pairs. It is used to
efficiently match WSGI requests against a number of routes and return
the first target that satisfies the request. The target may be anything,
usually a string, ID or callable object. A route consists of a path-rule
and a HTTP method.
The path-rule is either a static path (e.g. `/contact`) or a dynamic
path that contains wildcards (e.g. `/wiki/<page>`). The wildcard syntax
and details on the matching order are described in docs:`routing`.
'''
default_pattern = '[^/]+'
default_filter = 're'
#: The current CPython regexp implementation does not allow more
#: than 99 matching groups per regular expression.
_MAX_GROUPS_PER_PATTERN = 99
def __init__(self, strict=False):
self.rules = [] # All rules in order
self._groups = {} # index of regexes to find them in dyna_routes
self.builder = {} # Data structure for the url builder
self.static = {} # Search structure for static routes
self.dyna_routes = {}
self.dyna_regexes = {} # Search structure for dynamic routes
#: If true, static routes are no longer checked first.
self.strict_order = strict
self.filters = {
're': lambda conf:
(_re_flatten(conf or self.default_pattern), None, None),
'int': lambda conf: (r'-?\d+', int, lambda x: str(int(x))),
'float': lambda conf: (r'-?[\d.]+', float, lambda x: str(float(x))),
'path': lambda conf: (r'.+?', None, None)}
def add_filter(self, name, func):
''' Add a filter. The provided function is called with the configuration
string as parameter and must return a (regexp, to_python, to_url) tuple.
The first element is a string, the last two are callables or None. '''
self.filters[name] = func
rule_syntax = re.compile('(\\\\*)'\
'(?:(?::([a-zA-Z_][a-zA-Z_0-9]*)?()(?:#(.*?)#)?)'\
'|(?:<([a-zA-Z_][a-zA-Z_0-9]*)?(?::([a-zA-Z_]*)'\
'(?::((?:\\\\.|[^\\\\>]+)+)?)?)?>))')
def _itertokens(self, rule):
offset, prefix = 0, ''
for match in self.rule_syntax.finditer(rule):
prefix += rule[offset:match.start()]
g = match.groups()
if len(g[0])%2: # Escaped wildcard
prefix += match.group(0)[len(g[0]):]
offset = match.end()
continue
if prefix:
yield prefix, None, None
name, filtr, conf = g[4:7] if g[2] is None else g[1:4]
yield name, filtr or 'default', conf or None
offset, prefix = match.end(), ''
if offset <= len(rule) or prefix:
yield prefix+rule[offset:], None, None
def add(self, rule, method, target, name=None):
''' Add a new rule or replace the target for an existing rule. '''
anons = 0 # Number of anonymous wildcards found
keys = [] # Names of keys
pattern = '' # Regular expression pattern with named groups
filters = [] # Lists of wildcard input filters
builder = [] # Data structure for the URL builder
is_static = True
for key, mode, conf in self._itertokens(rule):
if mode:
is_static = False
if mode == 'default': mode = self.default_filter
mask, in_filter, out_filter = self.filters[mode](conf)
if not key:
pattern += '(?:%s)' % mask
key = 'anon%d' % anons
anons += 1
else:
pattern += '(?P<%s>%s)' % (key, mask)
keys.append(key)
if in_filter: filters.append((key, in_filter))
builder.append((key, out_filter or str))
elif key:
pattern += re.escape(key)
builder.append((None, key))
self.builder[rule] = builder
if name: self.builder[name] = builder
if is_static and not self.strict_order:
self.static.setdefault(method, {})
self.static[method][self.build(rule)] = (target, None)
return
try:
re_pattern = re.compile('^(%s)$' % pattern)
re_match = re_pattern.match
except re.error:
raise RouteSyntaxError("Could not add Route: %s (%s)" % (rule, _e()))
if filters:
def getargs(path):
url_args = re_match(path).groupdict()
for name, wildcard_filter in filters:
try:
url_args[name] = wildcard_filter(url_args[name])
except ValueError:
raise HTTPError(400, 'Path has wrong format.')
return url_args
elif re_pattern.groupindex:
def getargs(path):
return re_match(path).groupdict()
else:
getargs = None
flatpat = _re_flatten(pattern)
whole_rule = (rule, flatpat, target, getargs)
if (flatpat, method) in self._groups:
if DEBUG:
msg = 'Route <%s %s> overwrites a previously defined route'
warnings.warn(msg % (method, rule), RuntimeWarning)
self.dyna_routes[method][self._groups[flatpat, method]] = whole_rule
else:
self.dyna_routes.setdefault(method, []).append(whole_rule)
self._groups[flatpat, method] = len(self.dyna_routes[method]) - 1
self._compile(method)
def _compile(self, method):
all_rules = self.dyna_routes[method]
comborules = self.dyna_regexes[method] = []
maxgroups = self._MAX_GROUPS_PER_PATTERN
for x in range(0, len(all_rules), maxgroups):
some = all_rules[x:x+maxgroups]
combined = (flatpat for (_, flatpat, _, _) in some)
combined = '|'.join('(^%s$)' % flatpat for flatpat in combined)
combined = re.compile(combined).match
rules = [(target, getargs) for (_, _, target, getargs) in some]
comborules.append((combined, rules))
def build(self, _name, *anons, **query):
''' Build an URL by filling the wildcards in a rule. '''
builder = self.builder.get(_name)
if not builder: raise RouteBuildError("No route with that name.", _name)
try:
for i, value in enumerate(anons): query['anon%d'%i] = value
url = ''.join([f(query.pop(n)) if n else f for (n,f) in builder])
return url if not query else url+'?'+urlencode(query)
except KeyError:
raise RouteBuildError('Missing URL argument: %r' % _e().args[0])
def match(self, environ):
''' Return a (target, url_agrs) tuple or raise HTTPError(400/404/405). '''
verb = environ['REQUEST_METHOD'].upper()
path = environ['PATH_INFO'] or '/'
target = None
if verb == 'HEAD':
methods = ['PROXY', verb, 'GET', 'ANY']
else:
methods = ['PROXY', verb, 'ANY']
for method in methods:
if method in self.static and path in self.static[method]:
target, getargs = self.static[method][path]
return target, getargs(path) if getargs else {}
elif method in self.dyna_regexes:
for combined, rules in self.dyna_regexes[method]:
match = combined(path)
if match:
target, getargs = rules[match.lastindex - 1]
return target, getargs(path) if getargs else {}
# No matching route found. Collect alternative methods for 405 response
allowed = set([])
nocheck = set(methods)
for method in set(self.static) - nocheck:
if path in self.static[method]:
allowed.add(verb)
for method in set(self.dyna_regexes) - allowed - nocheck:
for combined, rules in self.dyna_regexes[method]:
match = combined(path)
if match:
allowed.add(method)
if allowed:
allow_header = ",".join(sorted(allowed))
raise HTTPError(405, "Method not allowed.", Allow=allow_header)
# No matching route and no alternative method found. We give up
raise HTTPError(404, "Not found: " + repr(path))
class Route(object):
''' This class wraps a route callback along with route specific metadata and
configuration and applies Plugins on demand. It is also responsible for
turing an URL path rule into a regular expression usable by the Router.
'''
def __init__(self, app, rule, method, callback, name=None,
plugins=None, skiplist=None, **config):
#: The application this route is installed to.
self.app = app
#: The path-rule string (e.g. ``/wiki/:page``).
self.rule = rule
#: The HTTP method as a string (e.g. ``GET``).
self.method = method
#: The original callback with no plugins applied. Useful for introspection.
self.callback = callback
#: The name of the route (if specified) or ``None``.
self.name = name or None
#: A list of route-specific plugins (see :meth:`Bottle.route`).
self.plugins = plugins or []
#: A list of plugins to not apply to this route (see :meth:`Bottle.route`).
self.skiplist = skiplist or []
#: Additional keyword arguments passed to the :meth:`Bottle.route`
#: decorator are stored in this dictionary. Used for route-specific
#: plugin configuration and meta-data.
self.config = ConfigDict().load_dict(config)
@cached_property
def call(self):
''' The route callback with all plugins applied. This property is
created on demand and then cached to speed up subsequent requests.'''
return self._make_callback()
def reset(self):
''' Forget any cached values. The next time :attr:`call` is accessed,
all plugins are re-applied. '''
self.__dict__.pop('call', None)
def prepare(self):
''' Do all on-demand work immediately (useful for debugging).'''
self.call
def all_plugins(self):
''' Yield all Plugins affecting this route. '''
unique = set()
for p in reversed(self.app.plugins + self.plugins):
if True in self.skiplist: break
name = getattr(p, 'name', False)
if name and (name in self.skiplist or name in unique): continue
if p in self.skiplist or type(p) in self.skiplist: continue
if name: unique.add(name)
yield p
def _make_callback(self):
callback = self.callback
for plugin in self.all_plugins():
try:
if hasattr(plugin, 'apply'):
callback = plugin.apply(callback, self)
else:
callback = plugin(callback)
except RouteReset: # Try again with changed configuration.
return self._make_callback()
if not callback is self.callback:
update_wrapper(callback, self.callback)
return callback
def get_undecorated_callback(self):
''' Return the callback. If the callback is a decorated function, try to
recover the original function. '''
func = self.callback
func = getattr(func, '__func__' if py3k else 'im_func', func)
closure_attr = '__closure__' if py3k else 'func_closure'
while hasattr(func, closure_attr) and getattr(func, closure_attr):
func = getattr(func, closure_attr)[0].cell_contents
return func
def get_callback_args(self):
''' Return a list of argument names the callback (most likely) accepts
as keyword arguments. If the callback is a decorated function, try
to recover the original function before inspection. '''
return getargspec(self.get_undecorated_callback())[0]
def get_config(self, key, default=None):
''' Lookup a config field and return its value, first checking the
route.config, then route.app.config.'''
for conf in (self.config, self.app.conifg):
if key in conf: return conf[key]
return default
def __repr__(self):
cb = self.get_undecorated_callback()
return '<%s %r %r>' % (self.method, self.rule, cb)
###############################################################################
# Application Object ###########################################################
###############################################################################
class Bottle(object):
""" Each Bottle object represents a single, distinct web application and
consists of routes, callbacks, plugins, resources and configuration.
Instances are callable WSGI applications.
:param catchall: If true (default), handle all exceptions. Turn off to
let debugging middleware handle exceptions.
"""
def __init__(self, catchall=True, autojson=True):
#: A :class:`ConfigDict` for app specific configuration.
self.config = ConfigDict()
self.config._on_change = functools.partial(self.trigger_hook, 'config')
self.config.meta_set('autojson', 'validate', bool)
self.config.meta_set('catchall', 'validate', bool)
self.config['catchall'] = catchall
self.config['autojson'] = autojson
#: A :class:`ResourceManager` for application files
self.resources = ResourceManager()
self.routes = [] # List of installed :class:`Route` instances.
self.router = Router() # Maps requests to :class:`Route` instances.
self.error_handler = {}
# Core plugins
self.plugins = [] # List of installed plugins.
if self.config['autojson']:
self.install(JSONPlugin())
self.install(TemplatePlugin())
#: If true, most exceptions are caught and returned as :exc:`HTTPError`
catchall = DictProperty('config', 'catchall')
__hook_names = 'before_request', 'after_request', 'app_reset', 'config'
__hook_reversed = 'after_request'
@cached_property
def _hooks(self):
return dict((name, []) for name in self.__hook_names)
def add_hook(self, name, func):
''' Attach a callback to a hook. Three hooks are currently implemented:
before_request
Executed once before each request. The request context is
available, but no routing has happened yet.
after_request
Executed once after each request regardless of its outcome.
app_reset
Called whenever :meth:`Bottle.reset` is called.
'''
if name in self.__hook_reversed:
self._hooks[name].insert(0, func)
else:
self._hooks[name].append(func)
def remove_hook(self, name, func):
''' Remove a callback from a hook. '''
if name in self._hooks and func in self._hooks[name]:
self._hooks[name].remove(func)
return True
def trigger_hook(self, __name, *args, **kwargs):
''' Trigger a hook and return a list of results. '''
return [hook(*args, **kwargs) for hook in self._hooks[__name][:]]
def hook(self, name):
""" Return a decorator that attaches a callback to a hook. See
:meth:`add_hook` for details."""
def decorator(func):
self.add_hook(name, func)
return func
return decorator
def mount(self, prefix, app, **options):
''' Mount an application (:class:`Bottle` or plain WSGI) to a specific
URL prefix. Example::
root_app.mount('/admin/', admin_app)
:param prefix: path prefix or `mount-point`. If it ends in a slash,
that slash is mandatory.
:param app: an instance of :class:`Bottle` or a WSGI application.
All other parameters are passed to the underlying :meth:`route` call.
'''
segments = [p for p in prefix.split('/') if p]
if not segments: raise ValueError('Empty path prefix.')
path_depth = len(segments)
def mountpoint_wrapper():
try:
request.path_shift(path_depth)
rs = HTTPResponse([])
def start_response(status, headerlist, exc_info=None):
if exc_info:
try:
_raise(*exc_info)
finally:
exc_info = None
rs.status = status
for name, value in headerlist: rs.add_header(name, value)
return rs.body.append
body = app(request.environ, start_response)
if body and rs.body: body = itertools.chain(rs.body, body)
rs.body = body or rs.body
return rs
finally:
request.path_shift(-path_depth)
options.setdefault('skip', True)
options.setdefault('method', 'PROXY')
options.setdefault('mountpoint', {'prefix': prefix, 'target': app})
options['callback'] = mountpoint_wrapper
self.route('/%s/<:re:.*>' % '/'.join(segments), **options)
if not prefix.endswith('/'):
self.route('/' + '/'.join(segments), **options)
def merge(self, routes):
''' Merge the routes of another :class:`Bottle` application or a list of
:class:`Route` objects into this application. The routes keep their
'owner', meaning that the :data:`Route.app` attribute is not
changed. '''
if isinstance(routes, Bottle):
routes = routes.routes
for route in routes:
self.add_route(route)
def install(self, plugin):
''' Add a plugin to the list of plugins and prepare it for being
applied to all routes of this application. A plugin may be a simple
decorator or an object that implements the :class:`Plugin` API.
'''
if hasattr(plugin, 'setup'): plugin.setup(self)
if not callable(plugin) and not hasattr(plugin, 'apply'):
raise TypeError("Plugins must be callable or implement .apply()")
self.plugins.append(plugin)
self.reset()
return plugin
def uninstall(self, plugin):
''' Uninstall plugins. Pass an instance to remove a specific plugin, a type
object to remove all plugins that match that type, a string to remove
all plugins with a matching ``name`` attribute or ``True`` to remove all
plugins. Return the list of removed plugins. '''
removed, remove = [], plugin
for i, plugin in list(enumerate(self.plugins))[::-1]:
if remove is True or remove is plugin or remove is type(plugin) \
or getattr(plugin, 'name', True) == remove:
removed.append(plugin)
del self.plugins[i]
if hasattr(plugin, 'close'): plugin.close()
if removed: self.reset()
return removed
def reset(self, route=None):
''' Reset all routes (force plugins to be re-applied) and clear all
caches. If an ID or route object is given, only that specific route
is affected. '''
if route is None: routes = self.routes
elif isinstance(route, Route): routes = [route]
else: routes = [self.routes[route]]
for route in routes: route.reset()
if DEBUG:
for route in routes: route.prepare()
self.trigger_hook('app_reset')
def close(self):
''' Close the application and all installed plugins. '''
for plugin in self.plugins:
if hasattr(plugin, 'close'): plugin.close()
self.stopped = True
def run(self, **kwargs):
''' Calls :func:`run` with the same parameters. '''
run(self, **kwargs)
def match(self, environ):
""" Search for a matching route and return a (:class:`Route` , urlargs)
tuple. The second value is a dictionary with parameters extracted
from the URL. Raise :exc:`HTTPError` (404/405) on a non-match."""
return self.router.match(environ)
def get_url(self, routename, **kargs):
""" Return a string that matches a named route """
scriptname = request.environ.get('SCRIPT_NAME', '').strip('/') + '/'
location = self.router.build(routename, **kargs).lstrip('/')
return urljoin(urljoin('/', scriptname), location)
def add_route(self, route):
''' Add a route object, but do not change the :data:`Route.app`
attribute.'''
self.routes.append(route)
self.router.add(route.rule, route.method, route, name=route.name)
if DEBUG: route.prepare()
def route(self, path=None, method='GET', callback=None, name=None,
apply=None, skip=None, **config):
""" A decorator to bind a function to a request URL. Example::
@app.route('/hello/:name')
def hello(name):
return 'Hello %s' % name
The ``:name`` part is a wildcard. See :class:`Router` for syntax
details.
:param path: Request path or a list of paths to listen to. If no
path is specified, it is automatically generated from the
signature of the function.
:param method: HTTP method (`GET`, `POST`, `PUT`, ...) or a list of
methods to listen to. (default: `GET`)
:param callback: An optional shortcut to avoid the decorator
syntax. ``route(..., callback=func)`` equals ``route(...)(func)``
:param name: The name for this route. (default: None)
:param apply: A decorator or plugin or a list of plugins. These are
applied to the route callback in addition to installed plugins.
:param skip: A list of plugins, plugin classes or names. Matching
plugins are not installed to this route. ``True`` skips all.
Any additional keyword arguments are stored as route-specific
configuration and passed to plugins (see :meth:`Plugin.apply`).
"""
if callable(path): path, callback = None, path
plugins = makelist(apply)
skiplist = makelist(skip)
def decorator(callback):
# TODO: Documentation and tests
if isinstance(callback, basestring): callback = load(callback)
for rule in makelist(path) or yieldroutes(callback):
for verb in makelist(method):
verb = verb.upper()
route = Route(self, rule, verb, callback, name=name,
plugins=plugins, skiplist=skiplist, **config)
self.add_route(route)
return callback
return decorator(callback) if callback else decorator
def get(self, path=None, method='GET', **options):
""" Equals :meth:`route`. """
return self.route(path, method, **options)
def post(self, path=None, method='POST', **options):
""" Equals :meth:`route` with a ``POST`` method parameter. """
return self.route(path, method, **options)
def put(self, path=None, method='PUT', **options):
""" Equals :meth:`route` with a ``PUT`` method parameter. """
return self.route(path, method, **options)
def delete(self, path=None, method='DELETE', **options):
""" Equals :meth:`route` with a ``DELETE`` method parameter. """
return self.route(path, method, **options)
def error(self, code=500):
""" Decorator: Register an output handler for a HTTP error code"""
def wrapper(handler):
self.error_handler[int(code)] = handler
return handler
return wrapper
def default_error_handler(self, res):
return tob(template(ERROR_PAGE_TEMPLATE, e=res))
def _handle(self, environ):
path = environ['bottle.raw_path'] = environ['PATH_INFO']
if py3k:
try:
environ['PATH_INFO'] = path.encode('latin1').decode('utf8')
except UnicodeError:
return HTTPError(400, 'Invalid path string. Expected UTF-8')
try:
environ['bottle.app'] = self
request.bind(environ)
response.bind()
try:
self.trigger_hook('before_request')
route, args = self.router.match(environ)
environ['route.handle'] = route
environ['bottle.route'] = route
environ['route.url_args'] = args
return route.call(**args)
finally:
self.trigger_hook('after_request')
except HTTPResponse:
return _e()
except RouteReset:
route.reset()
return self._handle(environ)
except (KeyboardInterrupt, SystemExit, MemoryError):
raise
except Exception:
if not self.catchall: raise
stacktrace = format_exc()
environ['wsgi.errors'].write(stacktrace)
return HTTPError(500, "Internal Server Error", _e(), stacktrace)
def _cast(self, out, peek=None):
""" Try to convert the parameter into something WSGI compatible and set
correct HTTP headers when possible.
Support: False, str, unicode, dict, HTTPResponse, HTTPError, file-like,
iterable of strings and iterable of unicodes
"""
# Empty output is done here
if not out:
if 'Content-Length' not in response:
response['Content-Length'] = 0
return []
# Join lists of byte or unicode strings. Mixed lists are NOT supported
if isinstance(out, (tuple, list))\
and isinstance(out[0], (bytes, unicode)):
out = out[0][0:0].join(out) # b'abc'[0:0] -> b''
# Encode unicode strings
if isinstance(out, unicode):
out = out.encode(response.charset)
# Byte Strings are just returned
if isinstance(out, bytes):
if 'Content-Length' not in response:
response['Content-Length'] = len(out)
return [out]
# HTTPError or HTTPException (recursive, because they may wrap anything)
# TODO: Handle these explicitly in handle() or make them iterable.
if isinstance(out, HTTPError):
out.apply(response)
out = self.error_handler.get(out.status_code, self.default_error_handler)(out)
return self._cast(out)
if isinstance(out, HTTPResponse):
out.apply(response)
return self._cast(out.body)
# File-like objects.
if hasattr(out, 'read'):
if 'wsgi.file_wrapper' in request.environ:
return request.environ['wsgi.file_wrapper'](out)
elif hasattr(out, 'close') or not hasattr(out, '__iter__'):
return WSGIFileWrapper(out)
# Handle Iterables. We peek into them to detect their inner type.
try:
iout = iter(out)
first = next(iout)
while not first:
first = next(iout)
except StopIteration:
return self._cast('')
except HTTPResponse:
first = _e()
except (KeyboardInterrupt, SystemExit, MemoryError):
raise
except Exception:
if not self.catchall: raise
first = HTTPError(500, 'Unhandled exception', _e(), format_exc())
# These are the inner types allowed in iterator or generator objects.
if isinstance(first, HTTPResponse):
return self._cast(first)
elif isinstance(first, bytes):
new_iter = itertools.chain([first], iout)
elif isinstance(first, unicode):
encoder = lambda x: x.encode(response.charset)
new_iter = imap(encoder, itertools.chain([first], iout))
else:
msg = 'Unsupported response type: %s' % type(first)
return self._cast(HTTPError(500, msg))
if hasattr(out, 'close'):
new_iter = _closeiter(new_iter, out.close)
return new_iter
def wsgi(self, environ, start_response):
""" The bottle WSGI-interface. """
try:
out = self._cast(self._handle(environ))
# rfc2616 section 4.3
if response._status_code in (100, 101, 204, 304)\
or environ['REQUEST_METHOD'] == 'HEAD':
if hasattr(out, 'close'): out.close()
out = []
start_response(response._status_line, response.headerlist)
return out
except (KeyboardInterrupt, SystemExit, MemoryError):
raise
except Exception:
if not self.catchall: raise
err = '<h1>Critical error while processing request: %s</h1>' \
% html_escape(environ.get('PATH_INFO', '/'))
if DEBUG:
err += '<h2>Error:</h2>\n<pre>\n%s\n</pre>\n' \
'<h2>Traceback:</h2>\n<pre>\n%s\n</pre>\n' \
% (html_escape(repr(_e())), html_escape(format_exc()))
environ['wsgi.errors'].write(err)
headers = [('Content-Type', 'text/html; charset=UTF-8')]
start_response('500 INTERNAL SERVER ERROR', headers, sys.exc_info())
return [tob(err)]
def __call__(self, environ, start_response):
''' Each instance of :class:'Bottle' is a WSGI application. '''
return self.wsgi(environ, start_response)
def __enter__(self):
''' Use this application as default for all module-level shortcuts. '''
default_app.push(self)
return self
def __exit__(self, exc_type, exc_value, traceback):
default_app.pop()
###############################################################################
# HTTP and WSGI Tools ##########################################################
###############################################################################
class BaseRequest(object):
""" A wrapper for WSGI environment dictionaries that adds a lot of
convenient access methods and properties. Most of them are read-only.
Adding new attributes to a request actually adds them to the environ
dictionary (as 'bottle.request.ext.<name>'). This is the recommended
way to store and access request-specific data.
"""
__slots__ = ('environ')
#: Maximum size of memory buffer for :attr:`body` in bytes.
MEMFILE_MAX = 102400
def __init__(self, environ=None):
""" Wrap a WSGI environ dictionary. """
#: The wrapped WSGI environ dictionary. This is the only real attribute.
#: All other attributes actually are read-only properties.
self.environ = {} if environ is None else environ
self.environ['bottle.request'] = self
@DictProperty('environ', 'bottle.app', read_only=True)
def app(self):
''' Bottle application handling this request. '''
raise RuntimeError('This request is not connected to an application.')
@DictProperty('environ', 'bottle.route', read_only=True)
def route(self):
""" The bottle :class:`Route` object that matches this request. """
raise RuntimeError('This request is not connected to a route.')
@DictProperty('environ', 'route.url_args', read_only=True)
def url_args(self):
""" The arguments extracted from the URL. """
raise RuntimeError('This request is not connected to a route.')
@property
def path(self):
''' The value of ``PATH_INFO`` with exactly one prefixed slash (to fix
broken clients and avoid the "empty path" edge case). '''
return '/' + self.environ.get('PATH_INFO','').lstrip('/')
@property
def method(self):
''' The ``REQUEST_METHOD`` value as an uppercase string. '''
return self.environ.get('REQUEST_METHOD', 'GET').upper()
@DictProperty('environ', 'bottle.request.headers', read_only=True)
def headers(self):
''' A :class:`WSGIHeaderDict` that provides case-insensitive access to
HTTP request headers. '''
return WSGIHeaderDict(self.environ)
def get_header(self, name, default=None):
''' Return the value of a request header, or a given default value. '''
return self.headers.get(name, default)
@DictProperty('environ', 'bottle.request.cookies', read_only=True)
def cookies(self):
""" Cookies parsed into a :class:`FormsDict`. Signed cookies are NOT
decoded. Use :meth:`get_cookie` if you expect signed cookies. """
cookies = SimpleCookie(self.environ.get('HTTP_COOKIE','')).values()
return FormsDict((c.key, c.value) for c in cookies)
def get_cookie(self, key, default=None, secret=None):
""" Return the content of a cookie. To read a `Signed Cookie`, the
`secret` must match the one used to create the cookie (see
:meth:`BaseResponse.set_cookie`). If anything goes wrong (missing
cookie or wrong signature), return a default value. """
value = self.cookies.get(key)
if secret and value:
dec = cookie_decode(value, secret) # (key, value) tuple or None
return dec[1] if dec and dec[0] == key else default
return value or default
@DictProperty('environ', 'bottle.request.query', read_only=True)
def query(self):
''' The :attr:`query_string` parsed into a :class:`FormsDict`. These
values are sometimes called "URL arguments" or "GET parameters", but
not to be confused with "URL wildcards" as they are provided by the
:class:`Router`. '''
get = self.environ['bottle.get'] = FormsDict()
pairs = _parse_qsl(self.environ.get('QUERY_STRING', ''))
for key, value in pairs:
get[key] = value
return get
@DictProperty('environ', 'bottle.request.forms', read_only=True)
def forms(self):
""" Form values parsed from an `url-encoded` or `multipart/form-data`
encoded POST or PUT request body. The result is returned as a
:class:`FormsDict`. All keys and values are strings. File uploads
are stored separately in :attr:`files`. """
forms = FormsDict()
for name, item in self.POST.allitems():
if not isinstance(item, FileUpload):
forms[name] = item
return forms
@DictProperty('environ', 'bottle.request.params', read_only=True)
def params(self):
""" A :class:`FormsDict` with the combined values of :attr:`query` and
:attr:`forms`. File uploads are stored in :attr:`files`. """
params = FormsDict()
for key, value in self.query.allitems():
params[key] = value
for key, value in self.forms.allitems():
params[key] = value
return params
@DictProperty('environ', 'bottle.request.files', read_only=True)
def files(self):
""" File uploads parsed from `multipart/form-data` encoded POST or PUT
request body. The values are instances of :class:`FileUpload`.
"""
files = FormsDict()
for name, item in self.POST.allitems():
if isinstance(item, FileUpload):
files[name] = item
return files
@DictProperty('environ', 'bottle.request.json', read_only=True)
def json(self):
''' If the ``Content-Type`` header is ``application/json``, this
property holds the parsed content of the request body. Only requests
smaller than :attr:`MEMFILE_MAX` are processed to avoid memory
exhaustion. '''
if 'application/json' in self.environ.get('CONTENT_TYPE', ''):
return json_loads(self._get_body_string())
return None
def _iter_body(self, read, bufsize):
maxread = max(0, self.content_length)
while maxread:
part = read(min(maxread, bufsize))
if not part: break
yield part
maxread -= len(part)
def _iter_chunked(self, read, bufsize):
err = HTTPError(400, 'Error while parsing chunked transfer body.')
rn, sem, bs = tob('\r\n'), tob(';'), tob('')
while True:
header = read(1)
while header[-2:] != rn:
c = read(1)
header += c
if not c: raise err
if len(header) > bufsize: raise err
size, _, _ = header.partition(sem)
try:
maxread = int(tonat(size.strip()), 16)
except ValueError:
raise err
if maxread == 0: break
buff = bs
while maxread > 0:
if not buff:
buff = read(min(maxread, bufsize))
part, buff = buff[:maxread], buff[maxread:]
if not part: raise err
yield part
maxread -= len(part)
if read(2) != rn:
raise err
@DictProperty('environ', 'bottle.request.body', read_only=True)
def _body(self):
body_iter = self._iter_chunked if self.chunked else self._iter_body
read_func = self.environ['wsgi.input'].read
body, body_size, is_temp_file = BytesIO(), 0, False
for part in body_iter(read_func, self.MEMFILE_MAX):
body.write(part)
body_size += len(part)
if not is_temp_file and body_size > self.MEMFILE_MAX:
body, tmp = TemporaryFile(mode='w+b'), body
body.write(tmp.getvalue())
del tmp
is_temp_file = True
self.environ['wsgi.input'] = body
body.seek(0)
return body
def _get_body_string(self):
''' read body until content-length or MEMFILE_MAX into a string. Raise
HTTPError(413) on requests that are to large. '''
clen = self.content_length
if clen > self.MEMFILE_MAX:
raise HTTPError(413, 'Request to large')
if clen < 0: clen = self.MEMFILE_MAX + 1
data = self.body.read(clen)
if len(data) > self.MEMFILE_MAX: # Fail fast
raise HTTPError(413, 'Request to large')
return data
@property
def body(self):
""" The HTTP request body as a seek-able file-like object. Depending on
:attr:`MEMFILE_MAX`, this is either a temporary file or a
:class:`io.BytesIO` instance. Accessing this property for the first
time reads and replaces the ``wsgi.input`` environ variable.
Subsequent accesses just do a `seek(0)` on the file object. """
self._body.seek(0)
return self._body
@property
def chunked(self):
''' True if Chunked transfer encoding was. '''
return 'chunked' in self.environ.get('HTTP_TRANSFER_ENCODING', '').lower()
#: An alias for :attr:`query`.
GET = query
@DictProperty('environ', 'bottle.request.post', read_only=True)
def POST(self):
""" The values of :attr:`forms` and :attr:`files` combined into a single
:class:`FormsDict`. Values are either strings (form values) or
instances of :class:`cgi.FieldStorage` (file uploads).
"""
post = FormsDict()
# We default to application/x-www-form-urlencoded for everything that
# is not multipart and take the fast path (also: 3.1 workaround)
if not self.content_type.startswith('multipart/'):
pairs = _parse_qsl(tonat(self._get_body_string(), 'latin1'))
for key, value in pairs:
post[key] = value
return post
safe_env = {'QUERY_STRING':''} # Build a safe environment for cgi
for key in ('REQUEST_METHOD', 'CONTENT_TYPE', 'CONTENT_LENGTH'):
if key in self.environ: safe_env[key] = self.environ[key]
args = dict(fp=self.body, environ=safe_env, keep_blank_values=True)
if py31:
args['fp'] = NCTextIOWrapper(args['fp'], encoding='utf8',
newline='\n')
elif py3k:
args['encoding'] = 'utf8'
data = cgi.FieldStorage(**args)
data = data.list or []
for item in data:
if item.filename:
post[item.name] = FileUpload(item.file, item.name,
item.filename, item.headers)
else:
post[item.name] = item.value
return post
@property
def url(self):
""" The full request URI including hostname and scheme. If your app
lives behind a reverse proxy or load balancer and you get confusing
results, make sure that the ``X-Forwarded-Host`` header is set
correctly. """
return self.urlparts.geturl()
@DictProperty('environ', 'bottle.request.urlparts', read_only=True)
def urlparts(self):
''' The :attr:`url` string as an :class:`urlparse.SplitResult` tuple.
The tuple contains (scheme, host, path, query_string and fragment),
but the fragment is always empty because it is not visible to the
server. '''
env = self.environ
http = env.get('HTTP_X_FORWARDED_PROTO') or env.get('wsgi.url_scheme', 'http')
host = env.get('HTTP_X_FORWARDED_HOST') or env.get('HTTP_HOST')
if not host:
# HTTP 1.1 requires a Host-header. This is for HTTP/1.0 clients.
host = env.get('SERVER_NAME', '127.0.0.1')
port = env.get('SERVER_PORT')
if port and port != ('80' if http == 'http' else '443'):
host += ':' + port
path = urlquote(self.fullpath)
return UrlSplitResult(http, host, path, env.get('QUERY_STRING'), '')
@property
def fullpath(self):
""" Request path including :attr:`script_name` (if present). """
return urljoin(self.script_name, self.path.lstrip('/'))
@property
def query_string(self):
""" The raw :attr:`query` part of the URL (everything in between ``?``
and ``#``) as a string. """
return self.environ.get('QUERY_STRING', '')
@property
def script_name(self):
''' The initial portion of the URL's `path` that was removed by a higher
level (server or routing middleware) before the application was
called. This script path is returned with leading and tailing
slashes. '''
script_name = self.environ.get('SCRIPT_NAME', '').strip('/')
return '/' + script_name + '/' if script_name else '/'
def path_shift(self, shift=1):
''' Shift path segments from :attr:`path` to :attr:`script_name` and
vice versa.
:param shift: The number of path segments to shift. May be negative
to change the shift direction. (default: 1)
'''
script = self.environ.get('SCRIPT_NAME','/')
self['SCRIPT_NAME'], self['PATH_INFO'] = path_shift(script, self.path, shift)
@property
def content_length(self):
''' The request body length as an integer. The client is responsible to
set this header. Otherwise, the real length of the body is unknown
and -1 is returned. In this case, :attr:`body` will be empty. '''
return int(self.environ.get('CONTENT_LENGTH') or -1)
@property
def content_type(self):
''' The Content-Type header as a lowercase-string (default: empty). '''
return self.environ.get('CONTENT_TYPE', '').lower()
@property
def is_xhr(self):
''' True if the request was triggered by a XMLHttpRequest. This only
works with JavaScript libraries that support the `X-Requested-With`
header (most of the popular libraries do). '''
requested_with = self.environ.get('HTTP_X_REQUESTED_WITH','')
return requested_with.lower() == 'xmlhttprequest'
@property
def is_ajax(self):
''' Alias for :attr:`is_xhr`. "Ajax" is not the right term. '''
return self.is_xhr
@property
def auth(self):
""" HTTP authentication data as a (user, password) tuple. This
implementation currently supports basic (not digest) authentication
only. If the authentication happened at a higher level (e.g. in the
front web-server or a middleware), the password field is None, but
the user field is looked up from the ``REMOTE_USER`` environ
variable. On any errors, None is returned. """
basic = parse_auth(self.environ.get('HTTP_AUTHORIZATION',''))
if basic: return basic
ruser = self.environ.get('REMOTE_USER')
if ruser: return (ruser, None)
return None
@property
def remote_route(self):
""" A list of all IPs that were involved in this request, starting with
the client IP and followed by zero or more proxies. This does only
work if all proxies support the ```X-Forwarded-For`` header. Note
that this information can be forged by malicious clients. """
proxy = self.environ.get('HTTP_X_FORWARDED_FOR')
if proxy: return [ip.strip() for ip in proxy.split(',')]
remote = self.environ.get('REMOTE_ADDR')
return [remote] if remote else []
@property
def remote_addr(self):
""" The client IP as a string. Note that this information can be forged
by malicious clients. """
route = self.remote_route
return route[0] if route else None
def copy(self):
""" Return a new :class:`Request` with a shallow :attr:`environ` copy. """
return Request(self.environ.copy())
def get(self, value, default=None): return self.environ.get(value, default)
def __getitem__(self, key): return self.environ[key]
def __delitem__(self, key): self[key] = ""; del(self.environ[key])
def __iter__(self): return iter(self.environ)
def __len__(self): return len(self.environ)
def keys(self): return self.environ.keys()
def __setitem__(self, key, value):
""" Change an environ value and clear all caches that depend on it. """
if self.environ.get('bottle.request.readonly'):
raise KeyError('The environ dictionary is read-only.')
self.environ[key] = value
todelete = ()
if key == 'wsgi.input':
todelete = ('body', 'forms', 'files', 'params', 'post', 'json')
elif key == 'QUERY_STRING':
todelete = ('query', 'params')
elif key.startswith('HTTP_'):
todelete = ('headers', 'cookies')
for key in todelete:
self.environ.pop('bottle.request.'+key, None)
def __repr__(self):
return '<%s: %s %s>' % (self.__class__.__name__, self.method, self.url)
def __getattr__(self, name):
''' Search in self.environ for additional user defined attributes. '''
try:
var = self.environ['bottle.request.ext.%s'%name]
return var.__get__(self) if hasattr(var, '__get__') else var
except KeyError:
raise AttributeError('Attribute %r not defined.' % name)
def __setattr__(self, name, value):
if name == 'environ': return object.__setattr__(self, name, value)
self.environ['bottle.request.ext.%s'%name] = value
def _hkey(s):
return s.title().replace('_','-')
class HeaderProperty(object):
def __init__(self, name, reader=None, writer=str, default=''):
self.name, self.default = name, default
self.reader, self.writer = reader, writer
self.__doc__ = 'Current value of the %r header.' % name.title()
def __get__(self, obj, cls):
if obj is None: return self
value = obj.headers.get(self.name, self.default)
return self.reader(value) if self.reader else value
def __set__(self, obj, value):
obj.headers[self.name] = self.writer(value)
def __delete__(self, obj):
del obj.headers[self.name]
class BaseResponse(object):
""" Storage class for a response body as well as headers and cookies.
This class does support dict-like case-insensitive item-access to
headers, but is NOT a dict. Most notably, iterating over a response
yields parts of the body and not the headers.
:param body: The response body as one of the supported types.
:param status: Either an HTTP status code (e.g. 200) or a status line
including the reason phrase (e.g. '200 OK').
:param headers: A dictionary or a list of name-value pairs.
Additional keyword arguments are added to the list of headers.
Underscores in the header name are replaced with dashes.
"""
default_status = 200
default_content_type = 'text/html; charset=UTF-8'
# Header blacklist for specific response codes
# (rfc2616 section 10.2.3 and 10.3.5)
bad_headers = {
204: set(('Content-Type',)),
304: set(('Allow', 'Content-Encoding', 'Content-Language',
'Content-Length', 'Content-Range', 'Content-Type',
'Content-Md5', 'Last-Modified'))}
def __init__(self, body='', status=None, headers=None, **more_headers):
self._cookies = None
self._headers = {}
self.body = body
self.status = status or self.default_status
if headers:
if isinstance(headers, dict):
headers = headers.items()
for name, value in headers:
self.add_header(name, value)
if more_headers:
for name, value in more_headers.items():
self.add_header(name, value)
def copy(self, cls=None):
''' Returns a copy of self. '''
cls = cls or BaseResponse
assert issubclass(cls, BaseResponse)
copy = cls()
copy.status = self.status
copy._headers = dict((k, v[:]) for (k, v) in self._headers.items())
if self._cookies:
copy._cookies = SimpleCookie()
copy._cookies.load(self._cookies.output())
return copy
def __iter__(self):
return iter(self.body)
def close(self):
if hasattr(self.body, 'close'):
self.body.close()
@property
def status_line(self):
''' The HTTP status line as a string (e.g. ``404 Not Found``).'''
return self._status_line
@property
def status_code(self):
''' The HTTP status code as an integer (e.g. 404).'''
return self._status_code
def _set_status(self, status):
if isinstance(status, int):
code, status = status, _HTTP_STATUS_LINES.get(status)
elif ' ' in status:
status = status.strip()
code = int(status.split()[0])
else:
raise ValueError('String status line without a reason phrase.')
if not 100 <= code <= 999: raise ValueError('Status code out of range.')
self._status_code = code
self._status_line = str(status or ('%d Unknown' % code))
def _get_status(self):
return self._status_line
status = property(_get_status, _set_status, None,
''' A writeable property to change the HTTP response status. It accepts
either a numeric code (100-999) or a string with a custom reason
phrase (e.g. "404 Brain not found"). Both :data:`status_line` and
:data:`status_code` are updated accordingly. The return value is
always a status string. ''')
del _get_status, _set_status
@property
def headers(self):
''' An instance of :class:`HeaderDict`, a case-insensitive dict-like
view on the response headers. '''
hdict = HeaderDict()
hdict.dict = self._headers
return hdict
def __contains__(self, name): return _hkey(name) in self._headers
def __delitem__(self, name): del self._headers[_hkey(name)]
def __getitem__(self, name): return self._headers[_hkey(name)][-1]
def __setitem__(self, name, value): self._headers[_hkey(name)] = [str(value)]
def get_header(self, name, default=None):
''' Return the value of a previously defined header. If there is no
header with that name, return a default value. '''
return self._headers.get(_hkey(name), [default])[-1]
def set_header(self, name, value):
''' Create a new response header, replacing any previously defined
headers with the same name. '''
self._headers[_hkey(name)] = [str(value)]
def add_header(self, name, value):
''' Add an additional response header, not removing duplicates. '''
self._headers.setdefault(_hkey(name), []).append(str(value))
def iter_headers(self):
''' Yield (header, value) tuples, skipping headers that are not
allowed with the current response status code. '''
return self.headerlist
@property
def headerlist(self):
''' WSGI conform list of (header, value) tuples. '''
out = []
headers = list(self._headers.items())
if 'Content-Type' not in self._headers:
headers.append(('Content-Type', [self.default_content_type]))
if self._status_code in self.bad_headers:
bad_headers = self.bad_headers[self._status_code]
headers = [h for h in headers if h[0] not in bad_headers]
out += [(name, val) for name, vals in headers for val in vals]
if self._cookies:
for c in self._cookies.values():
out.append(('Set-Cookie', c.OutputString()))
return out
content_type = HeaderProperty('Content-Type')
content_length = HeaderProperty('Content-Length', reader=int)
expires = HeaderProperty('Expires',
reader=lambda x: datetime.utcfromtimestamp(parse_date(x)),
writer=lambda x: http_date(x))
@property
def charset(self, default='UTF-8'):
""" Return the charset specified in the content-type header (default: utf8). """
if 'charset=' in self.content_type:
return self.content_type.split('charset=')[-1].split(';')[0].strip()
return default
def set_cookie(self, name, value, secret=None, **options):
''' Create a new cookie or replace an old one. If the `secret` parameter is
set, create a `Signed Cookie` (described below).
:param name: the name of the cookie.
:param value: the value of the cookie.
:param secret: a signature key required for signed cookies.
Additionally, this method accepts all RFC 2109 attributes that are
supported by :class:`cookie.Morsel`, including:
:param max_age: maximum age in seconds. (default: None)
:param expires: a datetime object or UNIX timestamp. (default: None)
:param domain: the domain that is allowed to read the cookie.
(default: current domain)
:param path: limits the cookie to a given path (default: current path)
:param secure: limit the cookie to HTTPS connections (default: off).
:param httponly: prevents client-side javascript to read this cookie
(default: off, requires Python 2.6 or newer).
If neither `expires` nor `max_age` is set (default), the cookie will
expire at the end of the browser session (as soon as the browser
window is closed).
Signed cookies may store any pickle-able object and are
cryptographically signed to prevent manipulation. Keep in mind that
cookies are limited to 4kb in most browsers.
Warning: Signed cookies are not encrypted (the client can still see
the content) and not copy-protected (the client can restore an old
cookie). The main intention is to make pickling and unpickling
save, not to store secret information at client side.
'''
if not self._cookies:
self._cookies = SimpleCookie()
if secret:
value = touni(cookie_encode((name, value), secret))
elif not isinstance(value, basestring):
raise TypeError('Secret key missing for non-string Cookie.')
if len(value) > 4096: raise ValueError('Cookie value to long.')
self._cookies[name] = value
for key, value in options.items():
if key == 'max_age':
if isinstance(value, timedelta):
value = value.seconds + value.days * 24 * 3600
if key == 'expires':
if isinstance(value, (datedate, datetime)):
value = value.timetuple()
elif isinstance(value, (int, float)):
value = time.gmtime(value)
value = time.strftime("%a, %d %b %Y %H:%M:%S GMT", value)
self._cookies[name][key.replace('_', '-')] = value
def delete_cookie(self, key, **kwargs):
''' Delete a cookie. Be sure to use the same `domain` and `path`
settings as used to create the cookie. '''
kwargs['max_age'] = -1
kwargs['expires'] = 0
self.set_cookie(key, '', **kwargs)
def __repr__(self):
out = ''
for name, value in self.headerlist:
out += '%s: %s\n' % (name.title(), value.strip())
return out
def _local_property():
ls = threading.local()
def fget(self):
try: return ls.var
except AttributeError:
raise RuntimeError("Request context not initialized.")
def fset(self, value): ls.var = value
def fdel(self): del ls.var
return property(fget, fset, fdel, 'Thread-local property')
class LocalRequest(BaseRequest):
''' A thread-local subclass of :class:`BaseRequest` with a different
set of attributes for each thread. There is usually only one global
instance of this class (:data:`request`). If accessed during a
request/response cycle, this instance always refers to the *current*
request (even on a multithreaded server). '''
bind = BaseRequest.__init__
environ = _local_property()
class LocalResponse(BaseResponse):
''' A thread-local subclass of :class:`BaseResponse` with a different
set of attributes for each thread. There is usually only one global
instance of this class (:data:`response`). Its attributes are used
to build the HTTP response at the end of the request/response cycle.
'''
bind = BaseResponse.__init__
_status_line = _local_property()
_status_code = _local_property()
_cookies = _local_property()
_headers = _local_property()
body = _local_property()
Request = BaseRequest
Response = BaseResponse
class HTTPResponse(Response, BottleException):
def __init__(self, body='', status=None, headers=None, **more_headers):
super(HTTPResponse, self).__init__(body, status, headers, **more_headers)
def apply(self, response):
response._status_code = self._status_code
response._status_line = self._status_line
response._headers = self._headers
response._cookies = self._cookies
response.body = self.body
class HTTPError(HTTPResponse):
default_status = 500
def __init__(self, status=None, body=None, exception=None, traceback=None,
**options):
self.exception = exception
self.traceback = traceback
super(HTTPError, self).__init__(body, status, **options)
###############################################################################
# Plugins ######################################################################
###############################################################################
class PluginError(BottleException): pass
class JSONPlugin(object):
name = 'json'
api = 2
def __init__(self, json_dumps=json_dumps):
self.json_dumps = json_dumps
def apply(self, callback, route):
dumps = self.json_dumps
if not dumps: return callback
def wrapper(*a, **ka):
try:
rv = callback(*a, **ka)
except HTTPError:
rv = _e()
if isinstance(rv, dict):
#Attempt to serialize, raises exception on failure
json_response = dumps(rv)
#Set content type only if serialization succesful
response.content_type = 'application/json'
return json_response
elif isinstance(rv, HTTPResponse) and isinstance(rv.body, dict):
rv.body = dumps(rv.body)
rv.content_type = 'application/json'
return rv
return wrapper
class TemplatePlugin(object):
''' This plugin applies the :func:`view` decorator to all routes with a
`template` config parameter. If the parameter is a tuple, the second
element must be a dict with additional options (e.g. `template_engine`)
or default variables for the template. '''
name = 'template'
api = 2
def apply(self, callback, route):
conf = route.config.get('template')
if isinstance(conf, (tuple, list)) and len(conf) == 2:
return view(conf[0], **conf[1])(callback)
elif isinstance(conf, str):
return view(conf)(callback)
else:
return callback
#: Not a plugin, but part of the plugin API. TODO: Find a better place.
class _ImportRedirect(object):
def __init__(self, name, impmask):
''' Create a virtual package that redirects imports (see PEP 302). '''
self.name = name
self.impmask = impmask
self.module = sys.modules.setdefault(name, imp.new_module(name))
self.module.__dict__.update({'__file__': __file__, '__path__': [],
'__all__': [], '__loader__': self})
sys.meta_path.append(self)
def find_module(self, fullname, path=None):
if '.' not in fullname: return
packname = fullname.rsplit('.', 1)[0]
if packname != self.name: return
return self
def load_module(self, fullname):
if fullname in sys.modules: return sys.modules[fullname]
modname = fullname.rsplit('.', 1)[1]
realname = self.impmask % modname
__import__(realname)
module = sys.modules[fullname] = sys.modules[realname]
setattr(self.module, modname, module)
module.__loader__ = self
return module
###############################################################################
# Common Utilities #############################################################
###############################################################################
class MultiDict(DictMixin):
""" This dict stores multiple values per key, but behaves exactly like a
normal dict in that it returns only the newest value for any given key.
There are special methods available to access the full list of values.
"""
def __init__(self, *a, **k):
self.dict = dict((k, [v]) for (k, v) in dict(*a, **k).items())
def __len__(self): return len(self.dict)
def __iter__(self): return iter(self.dict)
def __contains__(self, key): return key in self.dict
def __delitem__(self, key): del self.dict[key]
def __getitem__(self, key): return self.dict[key][-1]
def __setitem__(self, key, value): self.append(key, value)
def keys(self): return self.dict.keys()
if py3k:
def values(self): return (v[-1] for v in self.dict.values())
def items(self): return ((k, v[-1]) for k, v in self.dict.items())
def allitems(self):
return ((k, v) for k, vl in self.dict.items() for v in vl)
iterkeys = keys
itervalues = values
iteritems = items
iterallitems = allitems
else:
def values(self): return [v[-1] for v in self.dict.values()]
def items(self): return [(k, v[-1]) for k, v in self.dict.items()]
def iterkeys(self): return self.dict.iterkeys()
def itervalues(self): return (v[-1] for v in self.dict.itervalues())
def iteritems(self):
return ((k, v[-1]) for k, v in self.dict.iteritems())
def iterallitems(self):
return ((k, v) for k, vl in self.dict.iteritems() for v in vl)
def allitems(self):
return [(k, v) for k, vl in self.dict.iteritems() for v in vl]
def get(self, key, default=None, index=-1, type=None):
''' Return the most recent value for a key.
:param default: The default value to be returned if the key is not
present or the type conversion fails.
:param index: An index for the list of available values.
:param type: If defined, this callable is used to cast the value
into a specific type. Exception are suppressed and result in
the default value to be returned.
'''
try:
val = self.dict[key][index]
return type(val) if type else val
except Exception:
pass
return default
def append(self, key, value):
''' Add a new value to the list of values for this key. '''
self.dict.setdefault(key, []).append(value)
def replace(self, key, value):
''' Replace the list of values with a single value. '''
self.dict[key] = [value]
def getall(self, key):
''' Return a (possibly empty) list of values for a key. '''
return self.dict.get(key) or []
#: Aliases for WTForms to mimic other multi-dict APIs (Django)
getone = get
getlist = getall
class FormsDict(MultiDict):
''' This :class:`MultiDict` subclass is used to store request form data.
Additionally to the normal dict-like item access methods (which return
unmodified data as native strings), this container also supports
attribute-like access to its values. Attributes are automatically de-
or recoded to match :attr:`input_encoding` (default: 'utf8'). Missing
attributes default to an empty string. '''
#: Encoding used for attribute values.
input_encoding = 'utf8'
#: If true (default), unicode strings are first encoded with `latin1`
#: and then decoded to match :attr:`input_encoding`.
recode_unicode = True
def _fix(self, s, encoding=None):
if isinstance(s, unicode) and self.recode_unicode: # Python 3 WSGI
return s.encode('latin1').decode(encoding or self.input_encoding)
elif isinstance(s, bytes): # Python 2 WSGI
return s.decode(encoding or self.input_encoding)
else:
return s
def decode(self, encoding=None):
''' Returns a copy with all keys and values de- or recoded to match
:attr:`input_encoding`. Some libraries (e.g. WTForms) want a
unicode dictionary. '''
copy = FormsDict()
enc = copy.input_encoding = encoding or self.input_encoding
copy.recode_unicode = False
for key, value in self.allitems():
copy.append(self._fix(key, enc), self._fix(value, enc))
return copy
def getunicode(self, name, default=None, encoding=None):
''' Return the value as a unicode string, or the default. '''
try:
return self._fix(self[name], encoding)
except (UnicodeError, KeyError):
return default
def __getattr__(self, name, default=unicode()):
# Without this guard, pickle generates a cryptic TypeError:
if name.startswith('__') and name.endswith('__'):
return super(FormsDict, self).__getattr__(name)
return self.getunicode(name, default=default)
class HeaderDict(MultiDict):
""" A case-insensitive version of :class:`MultiDict` that defaults to
replace the old value instead of appending it. """
def __init__(self, *a, **ka):
self.dict = {}
if a or ka: self.update(*a, **ka)
def __contains__(self, key): return _hkey(key) in self.dict
def __delitem__(self, key): del self.dict[_hkey(key)]
def __getitem__(self, key): return self.dict[_hkey(key)][-1]
def __setitem__(self, key, value): self.dict[_hkey(key)] = [str(value)]
def append(self, key, value):
self.dict.setdefault(_hkey(key), []).append(str(value))
def replace(self, key, value): self.dict[_hkey(key)] = [str(value)]
def getall(self, key): return self.dict.get(_hkey(key)) or []
def get(self, key, default=None, index=-1):
return MultiDict.get(self, _hkey(key), default, index)
def filter(self, names):
for name in [_hkey(n) for n in names]:
if name in self.dict:
del self.dict[name]
class WSGIHeaderDict(DictMixin):
''' This dict-like class wraps a WSGI environ dict and provides convenient
access to HTTP_* fields. Keys and values are native strings
(2.x bytes or 3.x unicode) and keys are case-insensitive. If the WSGI
environment contains non-native string values, these are de- or encoded
using a lossless 'latin1' character set.
The API will remain stable even on changes to the relevant PEPs.
Currently PEP 333, 444 and 3333 are supported. (PEP 444 is the only one
that uses non-native strings.)
'''
#: List of keys that do not have a ``HTTP_`` prefix.
cgikeys = ('CONTENT_TYPE', 'CONTENT_LENGTH')
def __init__(self, environ):
self.environ = environ
def _ekey(self, key):
''' Translate header field name to CGI/WSGI environ key. '''
key = key.replace('-','_').upper()
if key in self.cgikeys:
return key
return 'HTTP_' + key
def raw(self, key, default=None):
''' Return the header value as is (may be bytes or unicode). '''
return self.environ.get(self._ekey(key), default)
def __getitem__(self, key):
return tonat(self.environ[self._ekey(key)], 'latin1')
def __setitem__(self, key, value):
raise TypeError("%s is read-only." % self.__class__)
def __delitem__(self, key):
raise TypeError("%s is read-only." % self.__class__)
def __iter__(self):
for key in self.environ:
if key[:5] == 'HTTP_':
yield key[5:].replace('_', '-').title()
elif key in self.cgikeys:
yield key.replace('_', '-').title()
def keys(self): return [x for x in self]
def __len__(self): return len(self.keys())
def __contains__(self, key): return self._ekey(key) in self.environ
class ConfigDict(dict):
''' A dict-like configuration storage with additional support for
namespaces, validators, meta-data, on_change listeners and more.
'''
__slots__ = ('_meta', '_on_change')
def __init__(self):
self._meta = {}
self._on_change = lambda name, value: None
def load_config(self, filename):
''' Load values from an *.ini style config file.
If the config file contains sections, their names are used as
namespaces for the values within. The two special sections
``DEFAULT`` and ``bottle`` refer to the root namespace (no prefix).
'''
conf = ConfigParser()
conf.read(filename)
for section in conf.sections():
for key, value in conf.items(section):
if section not in ('DEFAULT', 'bottle'):
key = section + '.' + key
self[key] = value
return self
def load_dict(self, source, namespace=''):
''' Load values from a dictionary structure. Nesting can be used to
represent namespaces.
>>> c.load_dict({'some': {'namespace': {'key': 'value'} } })
{'some.namespace.key': 'value'}
'''
for key, value in source.items():
if isinstance(key, str):
nskey = (namespace + '.' + key).strip('.')
if isinstance(value, dict):
self.load_dict(value, namespace=nskey)
else:
self[nskey] = value
else:
raise TypeError('Key has type %r (not a string)' % type(key))
return self
def update(self, *a, **ka):
''' If the first parameter is a string, all keys are prefixed with this
namespace. Apart from that it works just as the usual dict.update().
Example: ``update('some.namespace', key='value')`` '''
prefix = ''
if a and isinstance(a[0], str):
prefix = a[0].strip('.') + '.'
a = a[1:]
for key, value in dict(*a, **ka).items():
self[prefix+key] = value
def setdefault(self, key, value):
if key not in self:
self[key] = value
def __setitem__(self, key, value):
if not isinstance(key, str):
raise TypeError('Key has type %r (not a string)' % type(key))
value = self.meta_get(key, 'filter', lambda x: x)(value)
if key in self and self[key] is value:
return
self._on_change(key, value)
dict.__setitem__(self, key, value)
def __delitem__(self, key):
self._on_change(key, None)
dict.__delitem__(self, key)
def meta_get(self, key, metafield, default=None):
''' Return the value of a meta field for a key. '''
return self._meta.get(key, {}).get(metafield, default)
def meta_set(self, key, metafield, value):
''' Set the meta field for a key to a new value. This triggers the
on-change handler for existing keys. '''
self._meta.setdefault(key, {})[metafield] = value
if key in self:
self[key] = self[key]
def meta_list(self, key):
''' Return an iterable of meta field names defined for a key. '''
return self._meta.get(key, {}).keys()
class AppStack(list):
""" A stack-like list. Calling it returns the head of the stack. """
def __call__(self):
""" Return the current default application. """
return self[-1]
def push(self, value=None):
""" Add a new :class:`Bottle` instance to the stack """
if not isinstance(value, Bottle):
value = Bottle()
self.append(value)
return value
class WSGIFileWrapper(object):
def __init__(self, fp, buffer_size=1024*64):
self.fp, self.buffer_size = fp, buffer_size
for attr in ('fileno', 'close', 'read', 'readlines', 'tell', 'seek'):
if hasattr(fp, attr): setattr(self, attr, getattr(fp, attr))
def __iter__(self):
buff, read = self.buffer_size, self.read
while True:
part = read(buff)
if not part: return
yield part
class _closeiter(object):
''' This only exists to be able to attach a .close method to iterators that
do not support attribute assignment (most of itertools). '''
def __init__(self, iterator, close=None):
self.iterator = iterator
self.close_callbacks = makelist(close)
def __iter__(self):
return iter(self.iterator)
def close(self):
for func in self.close_callbacks:
func()
class ResourceManager(object):
''' This class manages a list of search paths and helps to find and open
application-bound resources (files).
:param base: default value for :meth:`add_path` calls.
:param opener: callable used to open resources.
:param cachemode: controls which lookups are cached. One of 'all',
'found' or 'none'.
'''
def __init__(self, base='./', opener=open, cachemode='all'):
self.opener = open
self.base = base
self.cachemode = cachemode
#: A list of search paths. See :meth:`add_path` for details.
self.path = []
#: A cache for resolved paths. ``res.cache.clear()`` clears the cache.
self.cache = {}
def add_path(self, path, base=None, index=None, create=False):
''' Add a new path to the list of search paths. Return False if the
path does not exist.
:param path: The new search path. Relative paths are turned into
an absolute and normalized form. If the path looks like a file
(not ending in `/`), the filename is stripped off.
:param base: Path used to absolutize relative search paths.
Defaults to :attr:`base` which defaults to ``os.getcwd()``.
:param index: Position within the list of search paths. Defaults
to last index (appends to the list).
The `base` parameter makes it easy to reference files installed
along with a python module or package::
res.add_path('./resources/', __file__)
'''
base = os.path.abspath(os.path.dirname(base or self.base))
path = os.path.abspath(os.path.join(base, os.path.dirname(path)))
path += os.sep
if path in self.path:
self.path.remove(path)
if create and not os.path.isdir(path):
os.makedirs(path)
if index is None:
self.path.append(path)
else:
self.path.insert(index, path)
self.cache.clear()
return os.path.exists(path)
def __iter__(self):
''' Iterate over all existing files in all registered paths. '''
search = self.path[:]<|fim▁hole|> for name in os.listdir(path):
full = os.path.join(path, name)
if os.path.isdir(full): search.append(full)
else: yield full
def lookup(self, name):
''' Search for a resource and return an absolute file path, or `None`.
The :attr:`path` list is searched in order. The first match is
returend. Symlinks are followed. The result is cached to speed up
future lookups. '''
if name not in self.cache or DEBUG:
for path in self.path:
fpath = os.path.join(path, name)
if os.path.isfile(fpath):
if self.cachemode in ('all', 'found'):
self.cache[name] = fpath
return fpath
if self.cachemode == 'all':
self.cache[name] = None
return self.cache[name]
def open(self, name, mode='r', *args, **kwargs):
''' Find a resource and return a file object, or raise IOError. '''
fname = self.lookup(name)
if not fname: raise IOError("Resource %r not found." % name)
return self.opener(fname, mode=mode, *args, **kwargs)
class FileUpload(object):
def __init__(self, fileobj, name, filename, headers=None):
''' Wrapper for file uploads. '''
#: Open file(-like) object (BytesIO buffer or temporary file)
self.file = fileobj
#: Name of the upload form field
self.name = name
#: Raw filename as sent by the client (may contain unsafe characters)
self.raw_filename = filename
#: A :class:`HeaderDict` with additional headers (e.g. content-type)
self.headers = HeaderDict(headers) if headers else HeaderDict()
content_type = HeaderProperty('Content-Type')
content_length = HeaderProperty('Content-Length', reader=int, default=-1)
@cached_property
def filename(self):
''' Name of the file on the client file system, but normalized to ensure
file system compatibility. An empty filename is returned as 'empty'.
Only ASCII letters, digits, dashes, underscores and dots are
allowed in the final filename. Accents are removed, if possible.
Whitespace is replaced by a single dash. Leading or tailing dots
or dashes are removed. The filename is limited to 255 characters.
'''
fname = self.raw_filename
if not isinstance(fname, unicode):
fname = fname.decode('utf8', 'ignore')
fname = normalize('NFKD', fname).encode('ASCII', 'ignore').decode('ASCII')
fname = os.path.basename(fname.replace('\\', os.path.sep))
fname = re.sub(r'[^a-zA-Z0-9-_.\s]', '', fname).strip()
fname = re.sub(r'[-\s]+', '-', fname).strip('.-')
return fname[:255] or 'empty'
def _copy_file(self, fp, chunk_size=2**16):
read, write, offset = self.file.read, fp.write, self.file.tell()
while 1:
buf = read(chunk_size)
if not buf: break
write(buf)
self.file.seek(offset)
def save(self, destination, overwrite=False, chunk_size=2**16):
''' Save file to disk or copy its content to an open file(-like) object.
If *destination* is a directory, :attr:`filename` is added to the
path. Existing files are not overwritten by default (IOError).
:param destination: File path, directory or file(-like) object.
:param overwrite: If True, replace existing files. (default: False)
:param chunk_size: Bytes to read at a time. (default: 64kb)
'''
if isinstance(destination, basestring): # Except file-likes here
if os.path.isdir(destination):
destination = os.path.join(destination, self.filename)
if not overwrite and os.path.exists(destination):
raise IOError('File exists.')
with open(destination, 'wb') as fp:
self._copy_file(fp, chunk_size)
else:
self._copy_file(destination, chunk_size)
###############################################################################
# Application Helper ###########################################################
###############################################################################
def abort(code=500, text='Unknown Error.'):
""" Aborts execution and causes a HTTP error. """
raise HTTPError(code, text)
def redirect(url, code=None):
""" Aborts execution and causes a 303 or 302 redirect, depending on
the HTTP protocol version. """
if not code:
code = 303 if request.get('SERVER_PROTOCOL') == "HTTP/1.1" else 302
res = response.copy(cls=HTTPResponse)
res.status = code
res.body = ""
res.set_header('Location', urljoin(request.url, url))
raise res
def _file_iter_range(fp, offset, bytes, maxread=1024*1024):
''' Yield chunks from a range in a file. No chunk is bigger than maxread.'''
fp.seek(offset)
while bytes > 0:
part = fp.read(min(bytes, maxread))
if not part: break
bytes -= len(part)
yield part
def static_file(filename, root, mimetype='auto', download=False, charset='UTF-8'):
""" Open a file in a safe way and return :exc:`HTTPResponse` with status
code 200, 305, 403 or 404. The ``Content-Type``, ``Content-Encoding``,
``Content-Length`` and ``Last-Modified`` headers are set if possible.
Special support for ``If-Modified-Since``, ``Range`` and ``HEAD``
requests.
:param filename: Name or path of the file to send.
:param root: Root path for file lookups. Should be an absolute directory
path.
:param mimetype: Defines the content-type header (default: guess from
file extension)
:param download: If True, ask the browser to open a `Save as...` dialog
instead of opening the file with the associated program. You can
specify a custom filename as a string. If not specified, the
original filename is used (default: False).
:param charset: The charset to use for files with a ``text/*``
mime-type. (default: UTF-8)
"""
root = os.path.abspath(root) + os.sep
filename = os.path.abspath(os.path.join(root, filename.strip('/\\')))
headers = dict()
if not filename.startswith(root):
return HTTPError(403, "Access denied.")
if not os.path.exists(filename) or not os.path.isfile(filename):
return HTTPError(404, "File does not exist.")
if not os.access(filename, os.R_OK):
return HTTPError(403, "You do not have permission to access this file.")
if mimetype == 'auto':
mimetype, encoding = mimetypes.guess_type(filename)
if encoding: headers['Content-Encoding'] = encoding
if mimetype:
if mimetype[:5] == 'text/' and charset and 'charset' not in mimetype:
mimetype += '; charset=%s' % charset
headers['Content-Type'] = mimetype
if download:
download = os.path.basename(filename if download == True else download)
headers['Content-Disposition'] = 'attachment; filename="%s"' % download
stats = os.stat(filename)
headers['Content-Length'] = clen = stats.st_size
lm = time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(stats.st_mtime))
headers['Last-Modified'] = lm
ims = request.environ.get('HTTP_IF_MODIFIED_SINCE')
if ims:
ims = parse_date(ims.split(";")[0].strip())
if ims is not None and ims >= int(stats.st_mtime):
headers['Date'] = time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime())
return HTTPResponse(status=304, **headers)
body = '' if request.method == 'HEAD' else open(filename, 'rb')
headers["Accept-Ranges"] = "bytes"
ranges = request.environ.get('HTTP_RANGE')
if 'HTTP_RANGE' in request.environ:
ranges = list(parse_range_header(request.environ['HTTP_RANGE'], clen))
if not ranges:
return HTTPError(416, "Requested Range Not Satisfiable")
offset, end = ranges[0]
headers["Content-Range"] = "bytes %d-%d/%d" % (offset, end-1, clen)
headers["Content-Length"] = str(end-offset)
if body: body = _file_iter_range(body, offset, end-offset)
return HTTPResponse(body, status=206, **headers)
return HTTPResponse(body, **headers)
###############################################################################
# HTTP Utilities and MISC (TODO) ###############################################
###############################################################################
def debug(mode=True):
""" Change the debug level.
There is only one debug level supported at the moment."""
global DEBUG
if mode: warnings.simplefilter('default')
DEBUG = bool(mode)
def http_date(value):
if isinstance(value, (datedate, datetime)):
value = value.utctimetuple()
elif isinstance(value, (int, float)):
value = time.gmtime(value)
if not isinstance(value, basestring):
value = time.strftime("%a, %d %b %Y %H:%M:%S GMT", value)
return value
def parse_date(ims):
""" Parse rfc1123, rfc850 and asctime timestamps and return UTC epoch. """
try:
ts = email.utils.parsedate_tz(ims)
return time.mktime(ts[:8] + (0,)) - (ts[9] or 0) - time.timezone
except (TypeError, ValueError, IndexError, OverflowError):
return None
def parse_auth(header):
""" Parse rfc2617 HTTP authentication header string (basic) and return (user,pass) tuple or None"""
try:
method, data = header.split(None, 1)
if method.lower() == 'basic':
user, pwd = touni(base64.b64decode(tob(data))).split(':',1)
return user, pwd
except (KeyError, ValueError):
return None
def parse_range_header(header, maxlen=0):
''' Yield (start, end) ranges parsed from a HTTP Range header. Skip
unsatisfiable ranges. The end index is non-inclusive.'''
if not header or header[:6] != 'bytes=': return
ranges = [r.split('-', 1) for r in header[6:].split(',') if '-' in r]
for start, end in ranges:
try:
if not start: # bytes=-100 -> last 100 bytes
start, end = max(0, maxlen-int(end)), maxlen
elif not end: # bytes=100- -> all but the first 99 bytes
start, end = int(start), maxlen
else: # bytes=100-200 -> bytes 100-200 (inclusive)
start, end = int(start), min(int(end)+1, maxlen)
if 0 <= start < end <= maxlen:
yield start, end
except ValueError:
pass
def _parse_qsl(qs):
r = []
for pair in qs.replace(';','&').split('&'):
if not pair: continue
nv = pair.split('=', 1)
if len(nv) != 2: nv.append('')
key = urlunquote(nv[0].replace('+', ' '))
value = urlunquote(nv[1].replace('+', ' '))
r.append((key, value))
return r
def _lscmp(a, b):
''' Compares two strings in a cryptographically safe way:
Runtime is not affected by length of common prefix. '''
return not sum(0 if x==y else 1 for x, y in zip(a, b)) and len(a) == len(b)
def cookie_encode(data, key):
''' Encode and sign a pickle-able object. Return a (byte) string '''
msg = base64.b64encode(pickle.dumps(data, -1))
sig = base64.b64encode(hmac.new(tob(key), msg).digest())
return tob('!') + sig + tob('?') + msg
def cookie_decode(data, key):
''' Verify and decode an encoded string. Return an object or None.'''
data = tob(data)
if cookie_is_encoded(data):
sig, msg = data.split(tob('?'), 1)
if _lscmp(sig[1:], base64.b64encode(hmac.new(tob(key), msg).digest())):
return pickle.loads(base64.b64decode(msg))
return None
def cookie_is_encoded(data):
''' Return True if the argument looks like a encoded cookie.'''
return bool(data.startswith(tob('!')) and tob('?') in data)
def html_escape(string):
''' Escape HTML special characters ``&<>`` and quotes ``'"``. '''
return string.replace('&','&').replace('<','<').replace('>','>')\
.replace('"','"').replace("'",''')
def html_quote(string):
''' Escape and quote a string to be used as an HTTP attribute.'''
return '"%s"' % html_escape(string).replace('\n',' ')\
.replace('\r',' ').replace('\t','	')
def yieldroutes(func):
""" Return a generator for routes that match the signature (name, args)
of the func parameter. This may yield more than one route if the function
takes optional keyword arguments. The output is best described by example::
a() -> '/a'
b(x, y) -> '/b/<x>/<y>'
c(x, y=5) -> '/c/<x>' and '/c/<x>/<y>'
d(x=5, y=6) -> '/d' and '/d/<x>' and '/d/<x>/<y>'
"""
path = '/' + func.__name__.replace('__','/').lstrip('/')
spec = getargspec(func)
argc = len(spec[0]) - len(spec[3] or [])
path += ('/<%s>' * argc) % tuple(spec[0][:argc])
yield path
for arg in spec[0][argc:]:
path += '/<%s>' % arg
yield path
def path_shift(script_name, path_info, shift=1):
''' Shift path fragments from PATH_INFO to SCRIPT_NAME and vice versa.
:return: The modified paths.
:param script_name: The SCRIPT_NAME path.
:param script_name: The PATH_INFO path.
:param shift: The number of path fragments to shift. May be negative to
change the shift direction. (default: 1)
'''
if shift == 0: return script_name, path_info
pathlist = path_info.strip('/').split('/')
scriptlist = script_name.strip('/').split('/')
if pathlist and pathlist[0] == '': pathlist = []
if scriptlist and scriptlist[0] == '': scriptlist = []
if shift > 0 and shift <= len(pathlist):
moved = pathlist[:shift]
scriptlist = scriptlist + moved
pathlist = pathlist[shift:]
elif shift < 0 and shift >= -len(scriptlist):
moved = scriptlist[shift:]
pathlist = moved + pathlist
scriptlist = scriptlist[:shift]
else:
empty = 'SCRIPT_NAME' if shift < 0 else 'PATH_INFO'
raise AssertionError("Cannot shift. Nothing left from %s" % empty)
new_script_name = '/' + '/'.join(scriptlist)
new_path_info = '/' + '/'.join(pathlist)
if path_info.endswith('/') and pathlist: new_path_info += '/'
return new_script_name, new_path_info
def auth_basic(check, realm="private", text="Access denied"):
''' Callback decorator to require HTTP auth (basic).
TODO: Add route(check_auth=...) parameter. '''
def decorator(func):
@functools.wraps(func)
def wrapper(*a, **ka):
user, password = request.auth or (None, None)
if user is None or not check(user, password):
err = HTTPError(401, text)
err.add_header('WWW-Authenticate', 'Basic realm="%s"' % realm)
return err
return func(*a, **ka)
return wrapper
return decorator
# Shortcuts for common Bottle methods.
# They all refer to the current default application.
def make_default_app_wrapper(name):
''' Return a callable that relays calls to the current default app. '''
@functools.wraps(getattr(Bottle, name))
def wrapper(*a, **ka):
return getattr(app(), name)(*a, **ka)
return wrapper
route = make_default_app_wrapper('route')
get = make_default_app_wrapper('get')
post = make_default_app_wrapper('post')
put = make_default_app_wrapper('put')
delete = make_default_app_wrapper('delete')
error = make_default_app_wrapper('error')
mount = make_default_app_wrapper('mount')
hook = make_default_app_wrapper('hook')
install = make_default_app_wrapper('install')
uninstall = make_default_app_wrapper('uninstall')
url = make_default_app_wrapper('get_url')
###############################################################################
# Server Adapter ###############################################################
###############################################################################
class ServerAdapter(object):
quiet = False
def __init__(self, host='127.0.0.1', port=8080, **options):
self.options = options
self.host = host
self.port = int(port)
def run(self, handler): # pragma: no cover
pass
def __repr__(self):
args = ', '.join(['%s=%s'%(k,repr(v)) for k, v in self.options.items()])
return "%s(%s)" % (self.__class__.__name__, args)
class CGIServer(ServerAdapter):
quiet = True
def run(self, handler): # pragma: no cover
from wsgiref.handlers import CGIHandler
def fixed_environ(environ, start_response):
environ.setdefault('PATH_INFO', '')
return handler(environ, start_response)
CGIHandler().run(fixed_environ)
class FlupFCGIServer(ServerAdapter):
def run(self, handler): # pragma: no cover
import flup.server.fcgi
self.options.setdefault('bindAddress', (self.host, self.port))
flup.server.fcgi.WSGIServer(handler, **self.options).run()
class WSGIRefServer(ServerAdapter):
def run(self, app): # pragma: no cover
from wsgiref.simple_server import WSGIRequestHandler, WSGIServer
from wsgiref.simple_server import make_server
import socket
class FixedHandler(WSGIRequestHandler):
def address_string(self): # Prevent reverse DNS lookups please.
return self.client_address[0]
def log_request(*args, **kw):
if not self.quiet:
return WSGIRequestHandler.log_request(*args, **kw)
handler_cls = self.options.get('handler_class', FixedHandler)
server_cls = self.options.get('server_class', WSGIServer)
if ':' in self.host: # Fix wsgiref for IPv6 addresses.
if getattr(server_cls, 'address_family') == socket.AF_INET:
class server_cls(server_cls):
address_family = socket.AF_INET6
srv = make_server(self.host, self.port, app, server_cls, handler_cls)
srv.serve_forever()
class CherryPyServer(ServerAdapter):
def run(self, handler): # pragma: no cover
from cherrypy import wsgiserver
self.options['bind_addr'] = (self.host, self.port)
self.options['wsgi_app'] = handler
certfile = self.options.get('certfile')
if certfile:
del self.options['certfile']
keyfile = self.options.get('keyfile')
if keyfile:
del self.options['keyfile']
ssl_module = self.options.get('ssl_module')
if ssl_module:
del self.options['ssl_module']
server = wsgiserver.CherryPyWSGIServer(**self.options)
if certfile:
server.ssl_certificate = certfile
if keyfile:
server.ssl_private_key = keyfile
if ssl_module:
print("Setting SSL module = %s" % ssl_module)
adapterClass = wsgiserver.get_ssl_adapter_class(ssl_module)
adapter = adapterClass(certfile, keyfile)
server.ssl_adapter = adapter
try:
server.start()
finally:
server.stop()
class WaitressServer(ServerAdapter):
def run(self, handler):
from waitress import serve
serve(handler, host=self.host, port=self.port)
class PasteServer(ServerAdapter):
def run(self, handler): # pragma: no cover
from paste import httpserver
from paste.translogger import TransLogger
handler = TransLogger(handler, setup_console_handler=(not self.quiet))
httpserver.serve(handler, host=self.host, port=str(self.port),
**self.options)
class MeinheldServer(ServerAdapter):
def run(self, handler):
from meinheld import server
server.listen((self.host, self.port))
server.run(handler)
class FapwsServer(ServerAdapter):
""" Extremely fast webserver using libev. See http://www.fapws.org/ """
def run(self, handler): # pragma: no cover
import fapws._evwsgi as evwsgi
from fapws import base, config
port = self.port
if float(config.SERVER_IDENT[-2:]) > 0.4:
# fapws3 silently changed its API in 0.5
port = str(port)
evwsgi.start(self.host, port)
# fapws3 never releases the GIL. Complain upstream. I tried. No luck.
if 'BOTTLE_CHILD' in os.environ and not self.quiet:
_stderr("WARNING: Auto-reloading does not work with Fapws3.\n")
_stderr(" (Fapws3 breaks python thread support)\n")
evwsgi.set_base_module(base)
def app(environ, start_response):
environ['wsgi.multiprocess'] = False
return handler(environ, start_response)
evwsgi.wsgi_cb(('', app))
evwsgi.run()
class TornadoServer(ServerAdapter):
""" The super hyped asynchronous server by facebook. Untested. """
def run(self, handler): # pragma: no cover
import tornado.wsgi, tornado.httpserver, tornado.ioloop
container = tornado.wsgi.WSGIContainer(handler)
server = tornado.httpserver.HTTPServer(container)
server.listen(port=self.port,address=self.host)
tornado.ioloop.IOLoop.instance().start()
class AppEngineServer(ServerAdapter):
""" Adapter for Google App Engine. """
quiet = True
def run(self, handler):
from google.appengine.ext.webapp import util
# A main() function in the handler script enables 'App Caching'.
# Lets makes sure it is there. This _really_ improves performance.
module = sys.modules.get('__main__')
if module and not hasattr(module, 'main'):
module.main = lambda: util.run_wsgi_app(handler)
util.run_wsgi_app(handler)
class TwistedServer(ServerAdapter):
""" Untested. """
def run(self, handler):
from twisted.web import server, wsgi
from twisted.python.threadpool import ThreadPool
from twisted.internet import reactor
thread_pool = ThreadPool()
thread_pool.start()
reactor.addSystemEventTrigger('after', 'shutdown', thread_pool.stop)
factory = server.Site(wsgi.WSGIResource(reactor, thread_pool, handler))
reactor.listenTCP(self.port, factory, interface=self.host)
reactor.run()
class DieselServer(ServerAdapter):
""" Untested. """
def run(self, handler):
from diesel.protocols.wsgi import WSGIApplication
app = WSGIApplication(handler, port=self.port)
app.run()
class GeventServer(ServerAdapter):
""" Untested. Options:
* `fast` (default: False) uses libevent's http server, but has some
issues: No streaming, no pipelining, no SSL.
* See gevent.wsgi.WSGIServer() documentation for more options.
"""
def run(self, handler):
from gevent import wsgi, pywsgi, local
if not isinstance(threading.local(), local.local):
msg = "Bottle requires gevent.monkey.patch_all() (before import)"
raise RuntimeError(msg)
if not self.options.pop('fast', None): wsgi = pywsgi
self.options['log'] = None if self.quiet else 'default'
address = (self.host, self.port)
server = wsgi.WSGIServer(address, handler, **self.options)
if 'BOTTLE_CHILD' in os.environ:
import signal
signal.signal(signal.SIGINT, lambda s, f: server.stop())
server.serve_forever()
class GeventSocketIOServer(ServerAdapter):
def run(self,handler):
from socketio import server
address = (self.host, self.port)
server.SocketIOServer(address, handler, **self.options).serve_forever()
class GunicornServer(ServerAdapter):
""" Untested. See http://gunicorn.org/configure.html for options. """
def run(self, handler):
from gunicorn.app.base import Application
config = {'bind': "%s:%d" % (self.host, int(self.port))}
config.update(self.options)
class GunicornApplication(Application):
def init(self, parser, opts, args):
return config
def load(self):
return handler
GunicornApplication().run()
class EventletServer(ServerAdapter):
""" Untested """
def run(self, handler):
from eventlet import wsgi, listen
try:
wsgi.server(listen((self.host, self.port)), handler,
log_output=(not self.quiet))
except TypeError:
# Fallback, if we have old version of eventlet
wsgi.server(listen((self.host, self.port)), handler)
class RocketServer(ServerAdapter):
""" Untested. """
def run(self, handler):
from rocket import Rocket
server = Rocket((self.host, self.port), 'wsgi', { 'wsgi_app' : handler })
server.start()
class BjoernServer(ServerAdapter):
""" Fast server written in C: https://github.com/jonashaag/bjoern """
def run(self, handler):
from bjoern import run
run(handler, self.host, self.port)
class AutoServer(ServerAdapter):
""" Untested. """
adapters = [WaitressServer, PasteServer, TwistedServer, CherryPyServer, WSGIRefServer]
def run(self, handler):
for sa in self.adapters:
try:
return sa(self.host, self.port, **self.options).run(handler)
except ImportError:
pass
server_names = {
'cgi': CGIServer,
'flup': FlupFCGIServer,
'wsgiref': WSGIRefServer,
'waitress': WaitressServer,
'cherrypy': CherryPyServer,
'paste': PasteServer,
'fapws3': FapwsServer,
'tornado': TornadoServer,
'gae': AppEngineServer,
'twisted': TwistedServer,
'diesel': DieselServer,
'meinheld': MeinheldServer,
'gunicorn': GunicornServer,
'eventlet': EventletServer,
'gevent': GeventServer,
'geventSocketIO':GeventSocketIOServer,
'rocket': RocketServer,
'bjoern' : BjoernServer,
'auto': AutoServer,
}
###############################################################################
# Application Control ##########################################################
###############################################################################
def load(target, **namespace):
""" Import a module or fetch an object from a module.
* ``package.module`` returns `module` as a module object.
* ``pack.mod:name`` returns the module variable `name` from `pack.mod`.
* ``pack.mod:func()`` calls `pack.mod.func()` and returns the result.
The last form accepts not only function calls, but any type of
expression. Keyword arguments passed to this function are available as
local variables. Example: ``import_string('re:compile(x)', x='[a-z]')``
"""
module, target = target.split(":", 1) if ':' in target else (target, None)
if module not in sys.modules: __import__(module)
if not target: return sys.modules[module]
if target.isalnum(): return getattr(sys.modules[module], target)
package_name = module.split('.')[0]
namespace[package_name] = sys.modules[package_name]
return eval('%s.%s' % (module, target), namespace)
def load_app(target):
""" Load a bottle application from a module and make sure that the import
does not affect the current default application, but returns a separate
application object. See :func:`load` for the target parameter. """
global NORUN; NORUN, nr_old = True, NORUN
try:
tmp = default_app.push() # Create a new "default application"
rv = load(target) # Import the target module
return rv if callable(rv) else tmp
finally:
default_app.remove(tmp) # Remove the temporary added default application
NORUN = nr_old
_debug = debug
def run(app=None, server='wsgiref', host='127.0.0.1', port=8080,
interval=1, reloader=False, quiet=False, plugins=None,
debug=None, **kargs):
""" Start a server instance. This method blocks until the server terminates.
:param app: WSGI application or target string supported by
:func:`load_app`. (default: :func:`default_app`)
:param server: Server adapter to use. See :data:`server_names` keys
for valid names or pass a :class:`ServerAdapter` subclass.
(default: `wsgiref`)
:param host: Server address to bind to. Pass ``0.0.0.0`` to listens on
all interfaces including the external one. (default: 127.0.0.1)
:param port: Server port to bind to. Values below 1024 require root
privileges. (default: 8080)
:param reloader: Start auto-reloading server? (default: False)
:param interval: Auto-reloader interval in seconds (default: 1)
:param quiet: Suppress output to stdout and stderr? (default: False)
:param options: Options passed to the server adapter.
"""
if NORUN: return
if reloader and not os.environ.get('BOTTLE_CHILD'):
try:
lockfile = None
fd, lockfile = tempfile.mkstemp(prefix='bottle.', suffix='.lock')
os.close(fd) # We only need this file to exist. We never write to it
while os.path.exists(lockfile):
args = [sys.executable] + sys.argv
environ = os.environ.copy()
environ['BOTTLE_CHILD'] = 'true'
environ['BOTTLE_LOCKFILE'] = lockfile
p = subprocess.Popen(args, env=environ)
while p.poll() is None: # Busy wait...
os.utime(lockfile, None) # I am alive!
time.sleep(interval)
if p.poll() != 3:
if os.path.exists(lockfile): os.unlink(lockfile)
sys.exit(p.poll())
except KeyboardInterrupt:
pass
finally:
if os.path.exists(lockfile):
os.unlink(lockfile)
return
try:
if debug is not None: _debug(debug)
app = app or default_app()
if isinstance(app, basestring):
app = load_app(app)
if not callable(app):
raise ValueError("Application is not callable: %r" % app)
for plugin in plugins or []:
app.install(plugin)
if server in server_names:
server = server_names.get(server)
if isinstance(server, basestring):
server = load(server)
if isinstance(server, type):
server = server(host=host, port=port, **kargs)
if not isinstance(server, ServerAdapter):
raise ValueError("Unknown or unsupported server: %r" % server)
server.quiet = server.quiet or quiet
if not server.quiet:
_stderr("Bottle v%s server starting up (using %s)...\n" % (__version__, repr(server)))
_stderr("Listening on http://%s:%d/\n" % (server.host, server.port))
_stderr("Hit Ctrl-C to quit.\n\n")
if reloader:
lockfile = os.environ.get('BOTTLE_LOCKFILE')
bgcheck = FileCheckerThread(lockfile, interval)
with bgcheck:
server.run(app)
if bgcheck.status == 'reload':
sys.exit(3)
else:
server.run(app)
except KeyboardInterrupt:
pass
except (SystemExit, MemoryError):
raise
except:
if not reloader: raise
if not getattr(server, 'quiet', quiet):
print_exc()
time.sleep(interval)
sys.exit(3)
class FileCheckerThread(threading.Thread):
''' Interrupt main-thread as soon as a changed module file is detected,
the lockfile gets deleted or gets to old. '''
def __init__(self, lockfile, interval):
threading.Thread.__init__(self)
self.lockfile, self.interval = lockfile, interval
#: Is one of 'reload', 'error' or 'exit'
self.status = None
def run(self):
exists = os.path.exists
mtime = lambda path: os.stat(path).st_mtime
files = dict()
for module in list(sys.modules.values()):
path = getattr(module, '__file__', '')
if path[-4:] in ('.pyo', '.pyc'): path = path[:-1]
if path and exists(path): files[path] = mtime(path)
while not self.status:
if not exists(self.lockfile)\
or mtime(self.lockfile) < time.time() - self.interval - 5:
self.status = 'error'
thread.interrupt_main()
for path, lmtime in list(files.items()):
if not exists(path) or mtime(path) > lmtime:
self.status = 'reload'
thread.interrupt_main()
break
time.sleep(self.interval)
def __enter__(self):
self.start()
def __exit__(self, exc_type, exc_val, exc_tb):
if not self.status: self.status = 'exit' # silent exit
self.join()
return exc_type is not None and issubclass(exc_type, KeyboardInterrupt)
###############################################################################
# Template Adapters ############################################################
###############################################################################
class TemplateError(HTTPError):
def __init__(self, message):
HTTPError.__init__(self, 500, message)
class BaseTemplate(object):
""" Base class and minimal API for template adapters """
extensions = ['tpl','html','thtml','stpl']
settings = {} #used in prepare()
defaults = {} #used in render()
def __init__(self, source=None, name=None, lookup=[], encoding='utf8', **settings):
""" Create a new template.
If the source parameter (str or buffer) is missing, the name argument
is used to guess a template filename. Subclasses can assume that
self.source and/or self.filename are set. Both are strings.
The lookup, encoding and settings parameters are stored as instance
variables.
The lookup parameter stores a list containing directory paths.
The encoding parameter should be used to decode byte strings or files.
The settings parameter contains a dict for engine-specific settings.
"""
self.name = name
self.source = source.read() if hasattr(source, 'read') else source
self.filename = source.filename if hasattr(source, 'filename') else None
self.lookup = [os.path.abspath(x) for x in lookup]
self.encoding = encoding
self.settings = self.settings.copy() # Copy from class variable
self.settings.update(settings) # Apply
if not self.source and self.name:
self.filename = self.search(self.name, self.lookup)
if not self.filename:
raise TemplateError('Template %s not found.' % repr(name))
if not self.source and not self.filename:
raise TemplateError('No template specified.')
self.prepare(**self.settings)
@classmethod
def search(cls, name, lookup=[]):
""" Search name in all directories specified in lookup.
First without, then with common extensions. Return first hit. """
if not lookup:
depr('The template lookup path list should not be empty.', True) #0.12
lookup = ['.']
if os.path.isabs(name) and os.path.isfile(name):
depr('Absolute template path names are deprecated.', True) #0.12
return os.path.abspath(name)
for spath in lookup:
spath = os.path.abspath(spath) + os.sep
fname = os.path.abspath(os.path.join(spath, name))
if not fname.startswith(spath): continue
if os.path.isfile(fname): return fname
for ext in cls.extensions:
if os.path.isfile('%s.%s' % (fname, ext)):
return '%s.%s' % (fname, ext)
@classmethod
def global_config(cls, key, *args):
''' This reads or sets the global settings stored in class.settings. '''
if args:
cls.settings = cls.settings.copy() # Make settings local to class
cls.settings[key] = args[0]
else:
return cls.settings[key]
def prepare(self, **options):
""" Run preparations (parsing, caching, ...).
It should be possible to call this again to refresh a template or to
update settings.
"""
raise NotImplementedError
def render(self, *args, **kwargs):
""" Render the template with the specified local variables and return
a single byte or unicode string. If it is a byte string, the encoding
must match self.encoding. This method must be thread-safe!
Local variables may be provided in dictionaries (args)
or directly, as keywords (kwargs).
"""
raise NotImplementedError
class MakoTemplate(BaseTemplate):
def prepare(self, **options):
from mako.template import Template
from mako.lookup import TemplateLookup
options.update({'input_encoding':self.encoding})
options.setdefault('format_exceptions', bool(DEBUG))
lookup = TemplateLookup(directories=self.lookup, **options)
if self.source:
self.tpl = Template(self.source, lookup=lookup, **options)
else:
self.tpl = Template(uri=self.name, filename=self.filename, lookup=lookup, **options)
def render(self, *args, **kwargs):
for dictarg in args: kwargs.update(dictarg)
_defaults = self.defaults.copy()
_defaults.update(kwargs)
return self.tpl.render(**_defaults)
class CheetahTemplate(BaseTemplate):
def prepare(self, **options):
from Cheetah.Template import Template
self.context = threading.local()
self.context.vars = {}
options['searchList'] = [self.context.vars]
if self.source:
self.tpl = Template(source=self.source, **options)
else:
self.tpl = Template(file=self.filename, **options)
def render(self, *args, **kwargs):
for dictarg in args: kwargs.update(dictarg)
self.context.vars.update(self.defaults)
self.context.vars.update(kwargs)
out = str(self.tpl)
self.context.vars.clear()
return out
class Jinja2Template(BaseTemplate):
def prepare(self, filters=None, tests=None, globals={}, **kwargs):
from jinja2 import Environment, FunctionLoader
self.env = Environment(loader=FunctionLoader(self.loader), **kwargs)
if filters: self.env.filters.update(filters)
if tests: self.env.tests.update(tests)
if globals: self.env.globals.update(globals)
if self.source:
self.tpl = self.env.from_string(self.source)
else:
self.tpl = self.env.get_template(self.filename)
def render(self, *args, **kwargs):
for dictarg in args: kwargs.update(dictarg)
_defaults = self.defaults.copy()
_defaults.update(kwargs)
return self.tpl.render(**_defaults)
def loader(self, name):
fname = self.search(name, self.lookup)
if not fname: return
with open(fname, "rb") as f:
return f.read().decode(self.encoding)
class SimpleTemplate(BaseTemplate):
# TODO: Figure out how to pass the arguments for this correctly
def prepare(self, escape_func=html_escape, noescape=True, syntax=None, **ka):
self.cache = {}
enc = self.encoding
self._str = lambda x: touni(x, enc)
self._escape = lambda x: escape_func(touni(x, enc))
self.syntax = syntax
if noescape:
self._str, self._escape = self._escape, self._str
@cached_property
def co(self):
return compile(self.code, self.filename or '<string>', 'exec')
@cached_property
def code(self):
source = self.source or open(self.filename, 'rb').read()
try:
source, encoding = touni(source), 'utf8'
except UnicodeError:
depr('Template encodings other than utf8 are no longer supported.') #0.11
source, encoding = touni(source, 'latin1'), 'latin1'
parser = StplParser(source, encoding=encoding, syntax=self.syntax)
code = parser.translate()
self.encoding = parser.encoding
return code
def _rebase(self, _env, _name=None, **kwargs):
_env['_rebase'] = (_name, kwargs)
def _include(self, _env, _name=None, **kwargs):
env = _env.copy()
env.update(kwargs)
if _name not in self.cache:
self.cache[_name] = self.__class__(name=_name, lookup=self.lookup)
return self.cache[_name].execute(env['_stdout'], env)
def execute(self, _stdout, kwargs):
env = self.defaults.copy()
env.update(kwargs)
env.update({'_stdout': _stdout, '_printlist': _stdout.extend,
'include': functools.partial(self._include, env),
'rebase': functools.partial(self._rebase, env), '_rebase': None,
'_str': self._str, '_escape': self._escape, 'get': env.get,
'setdefault': env.setdefault, 'defined': env.__contains__ })
eval(self.co, env)
if env.get('_rebase'):
subtpl, rargs = env.pop('_rebase')
rargs['base'] = ''.join(_stdout) #copy stdout
del _stdout[:] # clear stdout
return self._include(env, subtpl, **rargs)
return env
def render(self, *args, **kwargs):
""" Render the template using keyword arguments as local variables. """
env = {}; stdout = []
for dictarg in args: env.update(dictarg)
env.update(kwargs)
self.execute(stdout, env)
return ''.join(stdout)
class StplSyntaxError(TemplateError): pass
class StplParser(object):
''' Parser for stpl templates. '''
_re_cache = {} #: Cache for compiled re patterns
# This huge pile of voodoo magic splits python code into 8 different tokens.
# 1: All kinds of python strings (trust me, it works)
_re_tok = '((?m)[urbURB]?(?:\'\'(?!\')|""(?!")|\'{6}|"{6}' \
'|\'(?:[^\\\\\']|\\\\.)+?\'|"(?:[^\\\\"]|\\\\.)+?"' \
'|\'{3}(?:[^\\\\]|\\\\.|\\n)+?\'{3}' \
'|"{3}(?:[^\\\\]|\\\\.|\\n)+?"{3}))'
_re_inl = _re_tok.replace('|\\n','') # We re-use this string pattern later
# 2: Comments (until end of line, but not the newline itself)
_re_tok += '|(#.*)'
# 3,4: Keywords that start or continue a python block (only start of line)
_re_tok += '|^([ \\t]*(?:if|for|while|with|try|def|class)\\b)' \
'|^([ \\t]*(?:elif|else|except|finally)\\b)'
# 5: Our special 'end' keyword (but only if it stands alone)
_re_tok += '|((?:^|;)[ \\t]*end[ \\t]*(?=(?:%(block_close)s[ \\t]*)?\\r?$|;|#))'
# 6: A customizable end-of-code-block template token (only end of line)
_re_tok += '|(%(block_close)s[ \\t]*(?=$))'
# 7: And finally, a single newline. The 8th token is 'everything else'
_re_tok += '|(\\r?\\n)'
# Match the start tokens of code areas in a template
_re_split = '(?m)^[ \t]*(\\\\?)((%(line_start)s)|(%(block_start)s))'
# Match inline statements (may contain python strings)
_re_inl = '%%(inline_start)s((?:%s|[^\'"\n]*?)+)%%(inline_end)s' % _re_inl
default_syntax = '<% %> % {{ }}'
def __init__(self, source, syntax=None, encoding='utf8'):
self.source, self.encoding = touni(source, encoding), encoding
self.set_syntax(syntax or self.default_syntax)
self.code_buffer, self.text_buffer = [], []
self.lineno, self.offset = 1, 0
self.indent, self.indent_mod = 0, 0
def get_syntax(self):
''' Tokens as a space separated string (default: <% %> % {{ }}) '''
return self._syntax
def set_syntax(self, syntax):
self._syntax = syntax
self._tokens = syntax.split()
if not syntax in self._re_cache:
names = 'block_start block_close line_start inline_start inline_end'
etokens = map(re.escape, self._tokens)
pattern_vars = dict(zip(names.split(), etokens))
patterns = (self._re_split, self._re_tok, self._re_inl)
patterns = [re.compile(p%pattern_vars) for p in patterns]
self._re_cache[syntax] = patterns
self.re_split, self.re_tok, self.re_inl = self._re_cache[syntax]
syntax = property(get_syntax, set_syntax)
def translate(self):
if self.offset: raise RuntimeError('Parser is a one time instance.')
while True:
m = self.re_split.search(self.source[self.offset:])
if m:
text = self.source[self.offset:self.offset+m.start()]
self.text_buffer.append(text)
self.offset += m.end()
if m.group(1): # Escape syntax
line, sep, _ = self.source[self.offset:].partition('\n')
self.text_buffer.append(m.group(2)+line+sep)
self.offset += len(line+sep)+1
continue
self.flush_text()
self.read_code(multiline=bool(m.group(4)))
else: break
self.text_buffer.append(self.source[self.offset:])
self.flush_text()
return ''.join(self.code_buffer)
def read_code(self, multiline):
code_line, comment = '', ''
while True:
m = self.re_tok.search(self.source[self.offset:])
if not m:
code_line += self.source[self.offset:]
self.offset = len(self.source)
self.write_code(code_line.strip(), comment)
return
code_line += self.source[self.offset:self.offset+m.start()]
self.offset += m.end()
_str, _com, _blk1, _blk2, _end, _cend, _nl = m.groups()
if _str: # Python string
code_line += _str
elif _com: # Python comment (up to EOL)
comment = _com
if multiline and _com.strip().endswith(self._tokens[1]):
multiline = False # Allow end-of-block in comments
elif _blk1: # Start-block keyword (if/for/while/def/try/...)
code_line, self.indent_mod = _blk1, -1
self.indent += 1
elif _blk2: # Continue-block keyword (else/elif/except/...)
code_line, self.indent_mod = _blk2, -1
elif _end: # The non-standard 'end'-keyword (ends a block)
self.indent -= 1
elif _cend: # The end-code-block template token (usually '%>')
if multiline: multiline = False
else: code_line += _cend
else: # \n
self.write_code(code_line.strip(), comment)
self.lineno += 1
code_line, comment, self.indent_mod = '', '', 0
if not multiline:
break
def flush_text(self):
text = ''.join(self.text_buffer)
del self.text_buffer[:]
if not text: return
parts, pos, nl = [], 0, '\\\n'+' '*self.indent
for m in self.re_inl.finditer(text):
prefix, pos = text[pos:m.start()], m.end()
if prefix:
parts.append(nl.join(map(repr, prefix.splitlines(True))))
if prefix.endswith('\n'): parts[-1] += nl
parts.append(self.process_inline(m.group(1).strip()))
if pos < len(text):
prefix = text[pos:]
lines = prefix.splitlines(True)
if lines[-1].endswith('\\\\\n'): lines[-1] = lines[-1][:-3]
elif lines[-1].endswith('\\\\\r\n'): lines[-1] = lines[-1][:-4]
parts.append(nl.join(map(repr, lines)))
code = '_printlist((%s,))' % ', '.join(parts)
self.lineno += code.count('\n')+1
self.write_code(code)
def process_inline(self, chunk):
if chunk[0] == '!': return '_str(%s)' % chunk[1:]
return '_escape(%s)' % chunk
def write_code(self, line, comment=''):
code = ' ' * (self.indent+self.indent_mod)
code += line.lstrip() + comment + '\n'
self.code_buffer.append(code)
def template(*args, **kwargs):
'''
Get a rendered template as a string iterator.
You can use a name, a filename or a template string as first parameter.
Template rendering arguments can be passed as dictionaries
or directly (as keyword arguments).
'''
tpl = args[0] if args else None
adapter = kwargs.pop('template_adapter', SimpleTemplate)
lookup = kwargs.pop('template_lookup', TEMPLATE_PATH)
tplid = (id(lookup), tpl)
if tplid not in TEMPLATES or DEBUG:
settings = kwargs.pop('template_settings', {})
if isinstance(tpl, adapter):
TEMPLATES[tplid] = tpl
if settings: TEMPLATES[tplid].prepare(**settings)
elif "\n" in tpl or "{" in tpl or "%" in tpl or '$' in tpl:
TEMPLATES[tplid] = adapter(source=tpl, lookup=lookup, **settings)
else:
TEMPLATES[tplid] = adapter(name=tpl, lookup=lookup, **settings)
if not TEMPLATES[tplid]:
abort(500, 'Template (%s) not found' % tpl)
for dictarg in args[1:]: kwargs.update(dictarg)
return TEMPLATES[tplid].render(kwargs)
mako_template = functools.partial(template, template_adapter=MakoTemplate)
cheetah_template = functools.partial(template, template_adapter=CheetahTemplate)
jinja2_template = functools.partial(template, template_adapter=Jinja2Template)
def view(tpl_name, **defaults):
''' Decorator: renders a template for a handler.
The handler can control its behavior like that:
- return a dict of template vars to fill out the template
- return something other than a dict and the view decorator will not
process the template, but return the handler result as is.
This includes returning a HTTPResponse(dict) to get,
for instance, JSON with autojson or other castfilters.
'''
def decorator(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
result = func(*args, **kwargs)
if isinstance(result, (dict, DictMixin)):
tplvars = defaults.copy()
tplvars.update(result)
return template(tpl_name, **tplvars)
elif result is None:
return template(tpl_name, defaults)
return result
return wrapper
return decorator
mako_view = functools.partial(view, template_adapter=MakoTemplate)
cheetah_view = functools.partial(view, template_adapter=CheetahTemplate)
jinja2_view = functools.partial(view, template_adapter=Jinja2Template)
###############################################################################
# Constants and Globals ########################################################
###############################################################################
TEMPLATE_PATH = ['./', './views/']
TEMPLATES = {}
DEBUG = False
NORUN = False # If set, run() does nothing. Used by load_app()
#: A dict to map HTTP status codes (e.g. 404) to phrases (e.g. 'Not Found')
HTTP_CODES = httplib.responses
HTTP_CODES[418] = "I'm a teapot" # RFC 2324
HTTP_CODES[428] = "Precondition Required"
HTTP_CODES[429] = "Too Many Requests"
HTTP_CODES[431] = "Request Header Fields Too Large"
HTTP_CODES[511] = "Network Authentication Required"
_HTTP_STATUS_LINES = dict((k, '%d %s'%(k,v)) for (k,v) in HTTP_CODES.items())
#: The default template used for error pages. Override with @error()
ERROR_PAGE_TEMPLATE = """
%%try:
%%from %s import DEBUG, HTTP_CODES, request, touni
<!DOCTYPE HTML PUBLIC "-//IETF//DTD HTML 2.0//EN">
<html>
<head>
<title>Error: {{e.status}}</title>
<style type="text/css">
html {background-color: #eee; font-family: sans;}
body {background-color: #fff; border: 1px solid #ddd;
padding: 15px; margin: 15px;}
pre {background-color: #eee; border: 1px solid #ddd; padding: 5px;}
</style>
</head>
<body>
<h1>Error: {{e.status}}</h1>
<p>Sorry, the requested URL <tt>{{repr(request.url)}}</tt>
caused an error:</p>
<pre>{{e.body}}</pre>
%%if DEBUG and e.exception:
<h2>Exception:</h2>
<pre>{{repr(e.exception)}}</pre>
%%end
%%if DEBUG and e.traceback:
<h2>Traceback:</h2>
<pre>{{e.traceback}}</pre>
%%end
</body>
</html>
%%except ImportError:
<b>ImportError:</b> Could not generate the error page. Please add bottle to
the import path.
%%end
""" % __name__
#: A thread-safe instance of :class:`LocalRequest`. If accessed from within a
#: request callback, this instance always refers to the *current* request
#: (even on a multithreaded server).
request = LocalRequest()
#: A thread-safe instance of :class:`LocalResponse`. It is used to change the
#: HTTP response for the *current* request.
response = LocalResponse()
#: A thread-safe namespace. Not used by Bottle.
local = threading.local()
# Initialize app stack (create first empty Bottle app)
# BC: 0.6.4 and needed for run()
app = default_app = AppStack()
app.push()
#: A virtual package that redirects import statements.
#: Example: ``import bottle.ext.sqlite`` actually imports `bottle_sqlite`.
ext = _ImportRedirect('bottle.ext' if __name__ == '__main__' else __name__+".ext", 'bottle_%s').module
if __name__ == '__main__':
opt, args, parser = _cmd_options, _cmd_args, _cmd_parser
if opt.version:
_stdout('Bottle %s\n'%__version__)
sys.exit(0)
if not args:
parser.print_help()
_stderr('\nError: No application specified.\n')
sys.exit(1)
sys.path.insert(0, '.')
sys.modules.setdefault('bottle', sys.modules['__main__'])
host, port = (opt.bind or 'localhost'), 8080
if ':' in host and host.rfind(']') < host.rfind(':'):
host, port = host.rsplit(':', 1)
host = host.strip('[]')
run(args[0], host=host, port=int(port), server=opt.server,
reloader=opt.reload, plugins=opt.plugin, debug=opt.debug)
# THE END<|fim▁end|> | while search:
path = search.pop()
if not os.path.isdir(path): continue |
<|file_name|>mehi_local.py<|end_file_name|><|fim▁begin|>###############################
# Author : septicmk
# Date : 2015/07/25 16:14:09
# FileName : main.py
################################
from lambdaimage import preprocess as prep
from lambdaimage import registration as reg
from lambdaimage import fusion as fus
from pyspark import SparkContext, SparkConf
from lambdaimage import lambdaimageContext
from lambdaimage.utils.tool import exeTime, log, showsize
from parseXML import load_xml_file, get_function
import numpy as np
conf = SparkConf().setAppName('test').setMaster('local[1]').set('spark.executor.memory','2g').set('spark.driver.maxResultSize','6g').set('spark.driver.memory','8g').set('spark.local.dir','/dev/shm').set('spark.storage.memoryFraction','0.2').set('spark.default.parallelism','10')
tsc=lambdaimageContext.start(conf=conf)
result = load_xml_file("./lambdaimage.xml")
log('info')('tiff load start...')
rddA = tsc.loadImages('/home/wb/data/1-L/*.tif', inputFormat='tif-stack')
rddB = tsc.loadImages('/home/wb/data/1-R/*.tif', inputFormat='tif-stack')
log('info')('tiff load over...')
log('info')('intensity normalization start ...')
rddA = prep.intensity_normalization(rddA)
rddB = prep.intensity_normalization(rddB)
rddB = prep.flip(rddB)
_rddA = prep.intensity_normalization(rddA,8)
_rddB = prep.intensity_normalization(rddB,8)
log('info')('intensity normalization over ...')
log('info')('registration start ...')
vec0 = [0,0,0,1,1,0,0]
#vec = reg.c_powell(_rddA.get(4), _rddB.get(4), vec0)
vec = eval(get_function("reg",result))(_rddA.get(4), _rddB.get(4), vec0)<|fim▁hole|>log('info')('registration over ...')
log('info')('fusion start ...')
L_img_stack = rddA.collectValuesAsArray()
R_img_stack = rddB.collectValuesAsArray()
img_stack = zip(L_img_stack, R_img_stack)
rdd = tsc.loadImagesFromArray(img_stack)
#fused_img = fus.wavelet_fusion(rdd)
fused_img = eval(get_function("fus", result))(rdd)
fused_img = tsc.loadImagesFromArray(fused_img)
log('info')('fusion over ...')
log('info')('saving ...')
fused_img.exportAsTiffs('/home/wb/data/lambdaimage/fusion',overwrite = True)
#fused_img = np.squeeze(np.array(fused_img.values().collect()))
log('info')('subtract background start ...')
sb_img = prep.subtract_Background(fused_img)
log('info')('sbutract background over ... ')
log('info')('saving ...')
sb_img.exportAsTiffs('/home/wb/data/lambdaimage/subtract',overwrite = True)<|fim▁end|> | rddB = reg.execute(rddB, vec) |
<|file_name|>sipaiSampleServer.py<|end_file_name|><|fim▁begin|>#! /usr/bin/env python
# -*- coding: utf-8 -*-
'''
Created on 2012-2-5
@author: zepheir
'''
import sys
sys.path.append('/app/srv/src')
from binascii import b2a_hex<|fim▁hole|> from twisted.internet import epollreactor
epollreactor.install()
except:
pass
from twisted.internet import reactor
from twisted.python import log
from twisted.application import service
from zhyDB import ZhyDB
import Zoro
from ussop import sipai as Sipai
import time
import config
from config import *
def ReceiveData(*data):
if DEBUG: print 'print data----------------', data
# 常量
# ZDB = SipaiDB()
zhy = ZhyDB()
SipaiModsDict = zhy.listSipaiMods(allSDS=None)
# factoryDict = {}
# modules = {}
class SampleServer(object):
"""docstring for SampleServer"""
def __init__(self, *sds):
super(SampleServer, self).__init__()
self.sds = sds
self.host,self.port = self.sds[0], int(self.sds[1])
self.modules = []
self.mod = object
self.nowtype=''
self.factory = Zoro.SetupModbusConnect(self.host, self.port, self.ReceiveData, reConnectMode=False)
self.factory.spendtime = 0.3
self.setup()
def setup(self):
self.modules += SipaiModsDict[self.sds]
self.sampletimer = SipaiSampleTimer
if ECHO: print "*********** Time pass from start: %s"%(time.ctime()), self.factory.connection.getDestination(),self.factory.getState()
def ReceiveData(self, *data):
if DEBUG: print ' ===> Received Data:', data, b2a_hex(data[2])
# global zhy
_result = self.mod.dealdata(data[2])
print '----------result---------',_result
print data[0],data[1],zhy.updateSipaiResults(
ip=data[1][0],
port=data[1][1],
addr=data[0],
type=self.nowtype,
# value=b2a_hex(data[2])
value=_result
)
def update(self):
if DEBUG: print "[",self.sds,"] starting in the SampleServer Class!"
if len(self.modules)>0:
modinfo=self.modules.pop(0)
self.nowtype = modinfo['type']
self.mod = Sipai.createspm(type=modinfo['type'], address=modinfo['addr'])
_cmd = self.mod.cmd(self.mod.CMD_READDATA)
zhy.setSipaiModState(
ip=self.host,
port=str(self.port),
addr=modinfo['addr'],
type=self.nowtype,
state='reading'
)
if DEBUG: print "===> Output command:",b2a_hex(_cmd)
reactor.callLater(0.1, self.factory.protocol.SendCmd, _cmd)
reactor.callLater(self.factory.spendtime, self.update)
self.sampletimer-=self.factory.spendtime
else:
if SERVERRECONNECT:
reactor.callLater(self.factory.spendtime, self.factory.connection.disconnect)
reactor.callLater(SdsConnectTimer,self.factory.connection.connect)
reactor.callLater(SdsConnectTimer,self.setup)
reactor.callLater(self.sampletimer-SdsConnectTimer, self.update)
# reactor.callLater(SdsConnectTimer+self.factory.spendtime, self.update)
servs ={}
def main():
for sds in SipaiModsDict:
servs[sds]=SampleServer(sds[0],sds[1])
servs[sds].update()
# time.sleep(0.2)
# if DEBUG:
# # servs1=SampleServer('130.139.200.50','6020')
# servs2=SampleServer('130.139.200.51','10001')
# # servs3=SampleServer('130.139.200.56','10001')
# # servs1.update()
# servs2.update()
# # servs3.update()
# else:
# for sds in SipaiModsDict:
# servs[sds]=SampleServer(sds[0],sds[1])
# servs[sds].update()
# time.sleep(0.2)
if __name__ == '__main__':
import sys
main()
reactor.run()
print 'reactor stopped!'
sys.exit(1)
elif __name__ =="__builtin__":
import sys
main()
application = service.Application("SIPAI")<|fim▁end|> |
try: |
<|file_name|>sample_template.py<|end_file_name|><|fim▁begin|># -----------------------------------------------------------------------------
# Copyright (c) 2014--, The Qiita Development Team.
#
# Distributed under the terms of the BSD 3-clause License.
#
# The full license is in the file LICENSE, distributed with this software.
# -----------------------------------------------------------------------------
from json import loads
from collections import defaultdict
from qiita_core.util import execute_as_transaction
from qiita_core.qiita_settings import r_client
from qiita_db.util import generate_analyses_list_per_study
from qiita_db.metadata_template.sample_template import SampleTemplate
from qiita_db.exceptions import QiitaDBUnknownIDError
from qiita_db.exceptions import QiitaDBColumnError
from qiita_db.processing_job import ProcessingJob
from qiita_pet.handlers.api_proxy.util import check_access
SAMPLE_TEMPLATE_KEY_FORMAT = 'sample_template_%s'
def _check_sample_template_exists(samp_id):
"""Make sure a sample template exists in the system
Parameters
----------
samp_id : int or str castable to int
SampleTemplate id to check
Returns
-------
dict
{'status': status,
'message': msg}
"""
if not SampleTemplate.exists(int(samp_id)):
return {'status': 'error',
'message': 'Sample template %d does not exist' % int(samp_id)
}
return {'status': 'success',
'message': ''}
def sample_template_get_req(samp_id, user_id):
"""Gets the json of the full sample template
Parameters
----------
samp_id : int or int castable string
SampleTemplate id to get info for
user_id : str
User requesting the sample template info
Returns
-------
dict of objects
{'status': status,
'message': msg,
'template': dict of {str: {str: object, ...}, ...}
template is dictionary where the keys access_error the metadata samples
and the values are a dictionary of column and value.
Format {sample: {column: value, ...}, ...}
"""
exists = _check_sample_template_exists(int(samp_id))
if exists['status'] != 'success':
return exists
access_error = check_access(int(samp_id), user_id)
if access_error:
return access_error
template = SampleTemplate(int(samp_id))
access_error = check_access(template.study_id, user_id)
if access_error:
return access_error
df = template.to_dataframe()
return {'status': 'success',
'message': '',
'template': df.to_dict(orient='index')}
def sample_template_samples_get_req(samp_id, user_id):
"""Returns list of samples in the sample template
Parameters
----------
samp_id : int or str typecastable to int
SampleTemplate id to get info for
user_id : str
User requesting the sample template info
Returns
-------
dict
Returns summary information in the form
{'status': str,
'message': str,
'samples': list of str}
samples is list of samples in the template
"""
exists = _check_sample_template_exists(int(samp_id))
if exists['status'] != 'success':
return exists
access_error = check_access(samp_id, user_id)
if access_error:
return access_error
return {'status': 'success',
'message': '',
'samples': sorted(x for x in SampleTemplate(int(samp_id)))
}
def sample_template_meta_cats_get_req(samp_id, user_id):
"""Returns list of metadata categories in the sample template
Parameters
----------
samp_id : int or str typecastable to int
SampleTemplate id to get info for
user_id : str
User requesting the sample template info
Returns
-------
dict
Returns information in the form
{'status': str,
'message': str,
'categories': list of str}
samples is list of metadata categories in the template
"""
exists = _check_sample_template_exists(int(samp_id))
if exists['status'] != 'success':
return exists
access_error = check_access(samp_id, user_id)
if access_error:
return access_error
return {'status': 'success',
'message': '',
'categories': sorted(SampleTemplate(int(samp_id)).categories)
}
def sample_template_category_get_req(category, samp_id, user_id):
"""Returns dict of values for each sample in the given category
Parameters
----------
category : str
Metadata category to get values for
samp_id : int or str typecastable to int
SampleTemplate id to get info for
user_id : str
User requesting the sample template info
Returns
-------
dict
Returns information in the form
{'status': str,
'message': str,
'values': dict of {str: object}}
"""
exists = _check_sample_template_exists(int(samp_id))
if exists['status'] != 'success':
return exists
access_error = check_access(samp_id, user_id)
if access_error:
return access_error
st = SampleTemplate(int(samp_id))
try:
values = st.get_category(category)
except QiitaDBColumnError:
return {'status': 'error',
'message': 'Category %s does not exist in sample template' %
category}
return {'status': 'success',
'message': '',
'values': values}
def analyses_associated_with_study(study_id, user_id):
"""Returns all available analyses in study_id
Parameters
----------
study_id : int or str typecastable to int
Study id to get info for
user_id : str
User requesting the sample template info
Returns
-------
dict
Returns information in the form
{'status': str,
'message': str,
'values': list of [qiita_db.analysis.Analysis,
prep_ids for this study]}
"""
access_error = check_access(study_id, user_id)
if access_error:
return access_error
values = generate_analyses_list_per_study(study_id)
return {'status': 'success',
'message': '',
'values': values}
def get_sample_template_processing_status(st_id):
# Initialize variables here
processing = False
alert_type = ''
alert_msg = ''
job_info = r_client.get(SAMPLE_TEMPLATE_KEY_FORMAT % st_id)
if job_info:
job_info = defaultdict(lambda: '', loads(job_info))
job_id = job_info['job_id']
job = ProcessingJob(job_id)
job_status = job.status
processing = job_status not in ('success', 'error')
if processing:
alert_type = 'info'
alert_msg = 'This sample template is currently being processed'
elif job_status == 'error':
alert_type = 'danger'
alert_msg = job.log.msg.replace('\n', '</br>')
else:
alert_type = job_info['alert_type']
alert_msg = job_info['alert_msg'].replace('\n', '</br>')
return processing, alert_type, alert_msg
@execute_as_transaction
def sample_template_filepaths_get_req(study_id, user_id):
"""Returns all the filepaths attached to the sample template
Parameters
----------
study_id : int
The current study object id
user_id : str
The current user object id
Returns
-------
dict
Filepaths in the form
{'status': status,
'message': msg,
'filepaths': filepaths}
status can be success, warning, or error depending on result
message has the warnings or errors
filepaths is a list of tuple of int and str
All files in the sample template, as [(id, URL), ...]
"""<|fim▁hole|> return exists
access_error = check_access(study_id, user_id)
if access_error:
return access_error
try:
template = SampleTemplate(int(study_id))
except QiitaDBUnknownIDError as e:
return {'status': 'error',
'message': str(e)}
return {'status': 'success',
'message': '',
'filepaths': template.get_filepaths()
}<|fim▁end|> | exists = _check_sample_template_exists(int(study_id))
if exists['status'] != 'success': |
<|file_name|>nodejs_test.go<|end_file_name|><|fim▁begin|>package nodejs_test
import (
"log"
"net"
"net/http"
"net/http/httptest"
"os"
"os/exec"
"path/filepath"
"syscall"
"testing"
"time"
"github.com/yookoala/gofast/example/nodejs"
)
func examplePath() string {
basePath, err := os.Getwd()
if err != nil {
panic(err)
}
return filepath.Join(basePath, "src", "index.js")
}
func exampleAssetPath() string {
basePath, err := os.Getwd()
if err != nil {
panic(err)
}
return filepath.Join(basePath, "assets")
}
func waitConn(socket string) <-chan net.Conn {
chanConn := make(chan net.Conn)
go func() {
log.Printf("wait for socket: %s", socket)
for {
if conn, err := net.Dial("unix", socket); err != nil {
time.Sleep(time.Millisecond * 2)
} else {
chanConn <- conn
break
}
}
}()
return chanConn
}
func TestHandler(t *testing.T) {
webapp := examplePath()
socket := filepath.Join(filepath.Dir(webapp), "test.sock")
// define webapp.py command
cmd := exec.Command("node", webapp)
cmd.Env = append(os.Environ(), "TEST_FCGI_SOCK="+socket)
cmd.Stdout = os.Stdout
cmd.Stderr = os.Stderr
// start the command and wait for its exit
done := make(chan error, 1)
go func() {
if err := cmd.Start(); err != nil {
done <- err
return
}
// wait if the command started successfully
log.Printf("started successfully")
log.Printf("process=%#v", cmd.Process)
done <- cmd.Wait()
log.Printf("wait ended")
}()
// wait until socket ready
conn := <-waitConn(socket)
conn.Close()
log.Printf("socket ready")<|fim▁hole|>
// start the proxy handler
h := nodejs.NewHandler(webapp, "unix", socket)
get := func(path string) (w *httptest.ResponseRecorder, err error) {
r, err := http.NewRequest("GET", path, nil)
if err != nil {
return
}
w = httptest.NewRecorder()
h.ServeHTTP(w, r)
return
}
testDone := make(chan bool)
go func() {
w, err := get("/")
if err != nil {
t.Errorf("unexpected error %v", err)
testDone <- false
return
}
if want, have := "hello index", w.Body.String(); want != have {
t.Errorf("expected %#v, got %#v", want, have)
testDone <- false
return
}
testDone <- true
}()
select {
case testSuccess := <-testDone:
if !testSuccess {
log.Printf("test failed")
}
case <-time.After(3 * time.Second):
log.Printf("test timeout")
case err := <-done:
if err != nil {
log.Printf("process done with error = %v", err)
} else {
log.Print("process done gracefully without error")
}
}
log.Printf("send SIGTERM")
if err := cmd.Process.Signal(syscall.SIGTERM); err != nil {
log.Fatal("failed to kill: ", err)
}
log.Println("process killed")
os.Remove(socket)
}
func TestMuxHandler(t *testing.T) {
root := exampleAssetPath() // the "assets" folder
webapp := examplePath() // the "src/index.js" file path
socket := filepath.Join(filepath.Dir(webapp), "test2.sock")
// define webapp.py command
cmd := exec.Command("node", webapp)
cmd.Env = append(os.Environ(), "TEST_FCGI_SOCK="+socket)
cmd.Stdout = os.Stdout
cmd.Stderr = os.Stderr
// start the command and wait for its exit
done := make(chan error, 1)
go func() {
if err := cmd.Start(); err != nil {
done <- err
return
}
// wait if the command started successfully
log.Printf("started successfully")
log.Printf("process=%#v", cmd.Process)
done <- cmd.Wait()
log.Printf("wait ended")
}()
// wait until socket ready
conn := <-waitConn(socket)
conn.Close()
log.Printf("socket ready")
// start the proxy handler
h := nodejs.NewMuxHandler(
root,
webapp,
"unix", socket,
)
get := func(path string) (w *httptest.ResponseRecorder, err error) {
r, err := http.NewRequest("GET", path, nil)
if err != nil {
return
}
w = httptest.NewRecorder()
h.ServeHTTP(w, r)
return
}
testDone := make(chan bool)
go func() {
w, err := get("/responder/")
if err != nil {
t.Errorf("unexpected error %v", err)
testDone <- false
return
}
if want, have := "hello index", w.Body.String(); want != have {
t.Errorf("expected %#v, got %#v", want, have)
testDone <- false
return
}
w, err = get("/filter/content.txt")
if err != nil {
t.Errorf("unexpected error %v", err)
testDone <- false
return
}
if want, have := "ereh dlrow olleh", w.Body.String(); want != have {
t.Errorf("expected %#v, got %#v", want, have)
testDone <- false
return
}
testDone <- true
}()
select {
case testSuccess := <-testDone:
if !testSuccess {
log.Printf("test failed")
}
case <-time.After(3 * time.Second):
log.Printf("test timeout")
case err := <-done:
if err != nil {
log.Printf("process done with error = %v", err)
} else {
log.Print("process done gracefully without error")
}
}
log.Printf("send SIGTERM")
if err := cmd.Process.Signal(syscall.SIGTERM); err != nil {
log.Fatal("failed to kill: ", err)
}
log.Println("process killed")
os.Remove(socket)
}
func TestMuxHandler_authorizer(t *testing.T) {
root := exampleAssetPath() // the "assets" folder
webapp := examplePath() // the "src/index.js" file path
socket := filepath.Join(filepath.Dir(webapp), "test3.sock")
// define webapp.py command
cmd := exec.Command("node", webapp)
cmd.Env = append(os.Environ(), "TEST_FCGI_SOCK="+socket)
cmd.Stdout = os.Stdout
cmd.Stderr = os.Stderr
// start the command and wait for its exit
done := make(chan error, 1)
go func() {
if err := cmd.Start(); err != nil {
done <- err
return
}
// wait if the command started successfully
log.Printf("started successfully")
log.Printf("process=%#v", cmd.Process)
done <- cmd.Wait()
log.Printf("wait ended")
}()
// wait until socket ready
conn := <-waitConn(socket)
conn.Close()
log.Printf("socket ready")
// start the proxy handler
h := nodejs.NewMuxHandler(
root,
webapp,
"unix", socket,
)
testDone := make(chan bool)
go func() {
path := "/authorized/responder/"
r, err := http.NewRequest("GET", path, nil)
if err != nil {
return
}
w := httptest.NewRecorder()
// try to access without proper authorization
h.ServeHTTP(w, r)
if err != nil {
t.Errorf("unexpected error %v", err)
testDone <- false
return
}
if want, have := "authorizer app: permission denied", w.Body.String(); want != have {
t.Errorf("expected %#v, got %#v", want, have)
testDone <- false
return
}
if want, have := http.StatusForbidden, w.Code; want != have {
t.Errorf("expected %#v, got %#v", want, have)
testDone <- false
return
}
// try to access with proper authorization
r, err = http.NewRequest("GET", path, nil)
if err != nil {
return
}
r.Header.Add("Authorization", "hello-auth")
w = httptest.NewRecorder()
h.ServeHTTP(w, r)
if err != nil {
t.Errorf("unexpected error %v", err)
testDone <- false
return
}
if want1, want2, have := "foo: bar!\nhello: howdy!\nhello index", "hello: howdy!\nfoo: bar!\nhello index", w.Body.String(); want1 != have && want2 != have {
t.Errorf("expected %#v or %#v, got %#v", want1, want2, have)
testDone <- false
return
}
testDone <- true
}()
select {
case testSuccess := <-testDone:
if !testSuccess {
log.Printf("test failed")
}
case <-time.After(3 * time.Second):
log.Printf("test timeout")
case err := <-done:
if err != nil {
log.Printf("process done with error = %v", err)
} else {
log.Print("process done gracefully without error")
}
}
log.Printf("send SIGTERM")
if err := cmd.Process.Signal(syscall.SIGTERM); err != nil {
log.Fatal("failed to kill: ", err)
}
log.Println("process killed")
os.Remove(socket)
}<|fim▁end|> | |
<|file_name|>dialog.module.js<|end_file_name|><|fim▁begin|>(function (module) {
'use strict';
module.service("Dialog", dialogService);
module.constant("dialogMap", dialogMap());
function dialogMap() {
return {
'product-details': {
controller: 'ProductDetailsController',
templateUrl: '/views/widgets/product-details.html'
},
'category-details': {
controller: 'CategoryDetailsController',
templateUrl: '/views/widgets/category-details.html'
}
};
}
dialogService.$inject = ['$rootScope', '$modal', 'dialogMap'];
function dialogService($rootScope, $modal, dialogMap) {
return {
open: function (id, data, options) {
var dialogContext = dialogMap[id];
var scope = ng.extend($rootScope.$new(), {options: options});
return $modal.open({
templateUrl: dialogContext.templateUrl,
controller: dialogContext.controller,
scope: scope,
resolve: {
data: function () {
return ng.copy(data);<|fim▁hole|> }
}
}).result;
}
};
}
})(ng.module('korann.modal', []));<|fim▁end|> | |
<|file_name|>tennisserver.cpp<|end_file_name|><|fim▁begin|>/****************************************************************************
**
** Copyright (C) 2013 Digia Plc and/or its subsidiary(-ies).
** Contact: http://www.qt-project.org/legal
**
** This file is part of the Qt Toolkit.
**
** $QT_BEGIN_LICENSE:LGPL$
** Commercial License Usage
** Licensees holding valid commercial Qt licenses may use this file in
** accordance with the commercial license agreement provided with the
** Software or, alternatively, in accordance with the terms contained in
** a written agreement between you and Digia. For licensing terms and
** conditions see http://qt.digia.com/licensing. For further information
** use the contact form at http://qt.digia.com/contact-us.
**
** GNU Lesser General Public License Usage
** Alternatively, this file may be used under the terms of the GNU Lesser
** General Public License version 2.1 as published by the Free Software
** Foundation and appearing in the file LICENSE.LGPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU Lesser General Public License version 2.1 requirements
** will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** In addition, as a special exception, Digia gives you certain additional
** rights. These rights are described in the Digia Qt LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
** GNU General Public License Usage
** Alternatively, this file may be used under the terms of the GNU
** General Public License version 3.0 as published by the Free Software
** Foundation and appearing in the file LICENSE.GPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU General Public License version 3.0 requirements will be
** met: http://www.gnu.org/copyleft/gpl.html.
**
**
** $QT_END_LICENSE$
**
****************************************************************************/
#include "tennisserver.h"
#include "tennis.h"
#include <qbluetoothserver.h>
#include <qbluetoothsocket.h>
#include <QDebug>
TennisServer::TennisServer(QObject *parent)
: QObject(parent), l2capServer(0), clientSocket(0), stream(0), lagReplyTimeout(0)
{
elapsed.start();
ballElapsed.start();
lagTimer.setInterval(1000);
connect(&lagTimer, SIGNAL(timeout()), this, SLOT(sendEcho()));
}
TennisServer::~TennisServer()
{
if (stream){
QByteArray b;
QDataStream s(&b, QIODevice::WriteOnly);
s << QString("D");
clientSocket->write(b);
}
stopServer();
}
void TennisServer::startServer()
{
if (l2capServer)
return;
//! [Create the server]
l2capServer = new QBluetoothServer(QBluetoothServiceInfo::L2capProtocol, this);
connect(l2capServer, SIGNAL(newConnection()), this, SLOT(clientConnected()));
l2capServer->listen();
//! [Create the server]
serviceInfo.setAttribute(QBluetoothServiceInfo::ServiceRecordHandle, (uint)0x00010010);
//! [Class ServiceClass must contain at least 1 entry]
QBluetoothServiceInfo::Sequence classId;
// classId << QVariant::fromValue(QBluetoothUuid(serviceUuid));
classId << QVariant::fromValue(QBluetoothUuid(QBluetoothUuid::SerialPort));
serviceInfo.setAttribute(QBluetoothServiceInfo::ServiceClassIds, classId);
//! [Class ServiceClass must contain at least 1 entry]
//! [Service name, description and provider]
serviceInfo.setAttribute(QBluetoothServiceInfo::ServiceName, tr("Example Tennis Server"));
serviceInfo.setAttribute(QBluetoothServiceInfo::ServiceDescription,
tr("Example bluetooth tennis server"));
serviceInfo.setAttribute(QBluetoothServiceInfo::ServiceProvider, tr("Nokia, QtDF"));
//! [Service name, description and provider]
//! [Service UUID set]
serviceInfo.setServiceUuid(QBluetoothUuid(serviceUuid));
//! [Service UUID set]
//! [Service Discoverability]
serviceInfo.setAttribute(QBluetoothServiceInfo::BrowseGroupList,
QBluetoothUuid(QBluetoothUuid::PublicBrowseGroup));
//! [Service Discoverability]
//! [Protocol descriptor list]
QBluetoothServiceInfo::Sequence protocolDescriptorList;
QBluetoothServiceInfo::Sequence protocol;
protocol << QVariant::fromValue(QBluetoothUuid(QBluetoothUuid::L2cap))
<< QVariant::fromValue(quint16(l2capServer->serverPort()));
protocolDescriptorList.append(QVariant::fromValue(protocol));
serviceInfo.setAttribute(QBluetoothServiceInfo::ProtocolDescriptorList,
protocolDescriptorList);
//! [Protocol descriptor list]
//! [Register service]
serviceInfo.registerService();
//! [Register service]
}
//! [stopServer]
void TennisServer::stopServer()
{
qDebug() <<Q_FUNC_INFO;
// Unregister service
serviceInfo.unregisterService();
delete stream;
stream = 0;
// Close sockets
delete clientSocket;
clientSocket = 0;
// Close server
delete l2capServer;
l2capServer = 0;
}
//! [stopServer]
quint16 TennisServer::serverPort() const
{
return l2capServer->serverPort();
}
//! [moveBall]
void TennisServer::moveBall(int x, int y)
{
int msec = ballElapsed.elapsed();
if (stream && msec > 30){
QByteArray b;
QDataStream s(&b, QIODevice::WriteOnly);
s << QString("m %1 %2").arg(x).arg(y);
//s << QLatin1String("m") << x << y;
clientSocket->write(b);
ballElapsed.restart();
}
}
//! [moveBall]
void TennisServer::score(int left, int right)
{
if (stream){
QByteArray b;
QDataStream s(&b, QIODevice::WriteOnly);
s << QString("s %1 %2").arg(left).arg(right);
// s << QChar('s') << left << right;
clientSocket->write(b);
}
}
void TennisServer::moveLeftPaddle(int y)
{
int msec = elapsed.elapsed();
if (stream && msec > 50) {<|fim▁hole|> clientSocket->write(b);
elapsed.restart();
}
}
void TennisServer::readSocket()
{
if (!clientSocket)
return;
while (clientSocket->bytesAvailable()) {
QString str;
*stream >> str;
QStringList args = str.split(QChar(' '));
QString s = args.takeFirst();
if (s == "r" && args.count() == 1){
emit moveRightPaddle(args.at(0).toInt());
}
else if (s == "e" && args.count() == 1){
lagReplyTimeout = 0;
QTime then = QTime::fromString(args.at(0), "hh:mm:ss.zzz");
if (then.isValid()) {
emit lag(then.msecsTo(QTime::currentTime()));
// qDebug() << "RTT: " << then.msecsTo(QTime::currentTime()) << "ms";
}
}
else if (s == "E"){
QByteArray b;
QDataStream st(&b, QIODevice::WriteOnly);
st << str;
clientSocket->write(b);
}
else if (s == "D"){
qDebug() << Q_FUNC_INFO << "closing!";
clientSocket->deleteLater();
clientSocket = 0;
}
else {
qDebug() << Q_FUNC_INFO << "Unknown command" << str;
}
}
}
//! [clientConnected]
void TennisServer::clientConnected()
{
qDebug() << Q_FUNC_INFO << "connect";
QBluetoothSocket *socket = l2capServer->nextPendingConnection();
if (!socket)
return;
if (clientSocket){
qDebug() << Q_FUNC_INFO << "Closing socket!";
delete socket;
return;
}
connect(socket, SIGNAL(readyRead()), this, SLOT(readSocket()));
connect(socket, SIGNAL(disconnected()), this, SLOT(clientDisconnected()));
connect(socket, SIGNAL(error(QBluetoothSocket::SocketError)), this, SLOT(socketError(QBluetoothSocket::SocketError)));
stream = new QDataStream(socket);
clientSocket = socket;
qDebug() << Q_FUNC_INFO << "started";
emit clientConnected(clientSocket->peerName());
lagTimer.start();
}
//! [clientConnected]
void TennisServer::socketError(QBluetoothSocket::SocketError err)
{
qDebug() << Q_FUNC_INFO << err;
}
//! [sendEcho]
void TennisServer::sendEcho()
{
if (lagReplyTimeout) {
lagReplyTimeout--;
return;
}
if (stream) {
QByteArray b;
QDataStream s(&b, QIODevice::WriteOnly);
s << QString("e %1").arg(QTime::currentTime().toString("hh:mm:ss.zzz"));
clientSocket->write(b);
lagReplyTimeout = 10;
}
}
//! [sendEcho]
//! [clientDisconnected]
void TennisServer::clientDisconnected()
{
qDebug() << Q_FUNC_INFO << "client closing!";
lagTimer.stop();
lagReplyTimeout = 0;
QBluetoothSocket *socket = qobject_cast<QBluetoothSocket *>(sender());
if (!socket)
return;
emit clientDisconnected(socket->peerName());
clientSocket->deleteLater();
clientSocket = 0;
delete stream;
stream = 0;
// socket->deleteLater();
}
//! [clientDisconnected]<|fim▁end|> | QByteArray b;
QDataStream s(&b, QIODevice::WriteOnly);
s << QString("l %1").arg(y);
// s << QChar('l') << y; |
<|file_name|>0003_link_users_to_account.py<|end_file_name|><|fim▁begin|># encoding: utf-8
import datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
from django.contrib.auth.models import User
from account.models import Account
class Migration(DataMigration):
def forwards(self, orm):
# we need to associate each user to an account object
for user in User.objects.all():
a = Account()
a.user = user
a.language = 'en' # default language
a.save()
def backwards(self, orm):
# we need to delete all the accounts records
Account.objects.all().delete()
models = {
'actstream.action': {
'Meta': {'ordering': "('-timestamp',)", 'object_name': 'Action'},
'action_object_content_type': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'action_object'", 'null': 'True', 'to': "orm['contenttypes.ContentType']"}),
'action_object_object_id': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'actor_content_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'actor'", 'to': "orm['contenttypes.ContentType']"}),
'actor_object_id': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'data': ('jsonfield.fields.JSONField', [], {'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'public': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'target_content_type': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'target'", 'null': 'True', 'to': "orm['contenttypes.ContentType']"}),
'target_object_id': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2013, 1, 14, 4, 17, 6, 973224)'}),
'verb': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2013, 1, 14, 4, 17, 6, 974570)'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2013, 1, 14, 4, 17, 6, 974509)'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'relationships': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'related_to'", 'symmetrical': 'False', 'through': "orm['relationships.Relationship']", 'to': "orm['auth.User']"}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'people.profile': {
'Meta': {'object_name': 'Profile'},
'area': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'city': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'country': ('django.db.models.fields.CharField', [], {'max_length': '3', 'null': 'True', 'blank': 'True'}),
'delivery': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}),
'fax': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'organization': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'position': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'profile': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'blank': 'True', 'related_name': "'profile'", 'unique': 'True', 'null': 'True', 'to': "orm['auth.User']"}),
'voice': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'zipcode': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'})
},
'people.role': {
'Meta': {'object_name': 'Role'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'value': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'})
},
'relationships.relationship': {
'Meta': {'ordering': "('created',)", 'unique_together': "(('from_user', 'to_user', 'status', 'site'),)", 'object_name': 'Relationship'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'from_user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'from_users'", 'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'site': ('django.db.models.fields.related.ForeignKey', [], {'default': '1', 'related_name': "'relationships'", 'to': "orm['sites.Site']"}),
'status': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['relationships.RelationshipStatus']"}),
'to_user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'to_users'", 'to': "orm['auth.User']"}),
'weight': ('django.db.models.fields.FloatField', [], {'default': '1.0', 'null': 'True', 'blank': 'True'})
},
'relationships.relationshipstatus': {
'Meta': {'ordering': "('name',)", 'object_name': 'RelationshipStatus'},
'from_slug': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'login_required': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),<|fim▁hole|> 'symmetrical_slug': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'to_slug': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'verb': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'sites.site': {
'Meta': {'ordering': "('domain',)", 'object_name': 'Site', 'db_table': "'django_site'"},
'domain': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
}
}
complete_apps = ['people']<|fim▁end|> | 'private': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), |
<|file_name|>structs.pb.go<|end_file_name|><|fim▁begin|>// Code generated by protoc-gen-gogo.
// source: structs.proto
// DO NOT EDIT!
/*
Package db is a generated protocol buffer package.
It is generated from these files:
structs.proto
It has these top-level messages:
FileVersion
VersionList
FileInfoTruncated
*/
package db
import proto "github.com/gogo/protobuf/proto"
import fmt "fmt"
import math "math"
import _ "github.com/gogo/protobuf/gogoproto"
import protocol "github.com/syncthing/syncthing/lib/protocol"
import io "io"
// Reference imports to suppress errors if they are not otherwise used.
var _ = proto.Marshal
var _ = fmt.Errorf
var _ = math.Inf
// This is a compile-time assertion to ensure that this generated file
// is compatible with the proto package it is being compiled against.
const _ = proto.GoGoProtoPackageIsVersion1
type FileVersion struct {
Version protocol.Vector `protobuf:"bytes,1,opt,name=version" json:"version"`
Device []byte `protobuf:"bytes,2,opt,name=device,proto3" json:"device,omitempty"`
}
func (m *FileVersion) Reset() { *m = FileVersion{} }
func (m *FileVersion) String() string { return proto.CompactTextString(m) }
func (*FileVersion) ProtoMessage() {}
func (*FileVersion) Descriptor() ([]byte, []int) { return fileDescriptorStructs, []int{0} }
type VersionList struct {
Versions []FileVersion `protobuf:"bytes,1,rep,name=versions" json:"versions"`
}
func (m *VersionList) Reset() { *m = VersionList{} }
func (*VersionList) ProtoMessage() {}
func (*VersionList) Descriptor() ([]byte, []int) { return fileDescriptorStructs, []int{1} }
// Must be the same as FileInfo but without the blocks field
type FileInfoTruncated struct {
Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
Type protocol.FileInfoType `protobuf:"varint,2,opt,name=type,proto3,enum=protocol.FileInfoType" json:"type,omitempty"`
Size int64 `protobuf:"varint,3,opt,name=size,proto3" json:"size,omitempty"`
Permissions uint32 `protobuf:"varint,4,opt,name=permissions,proto3" json:"permissions,omitempty"`
ModifiedS int64 `protobuf:"varint,5,opt,name=modified_s,json=modifiedS,proto3" json:"modified_s,omitempty"`
ModifiedNs int32 `protobuf:"varint,11,opt,name=modified_ns,json=modifiedNs,proto3" json:"modified_ns,omitempty"`
Deleted bool `protobuf:"varint,6,opt,name=deleted,proto3" json:"deleted,omitempty"`
Invalid bool `protobuf:"varint,7,opt,name=invalid,proto3" json:"invalid,omitempty"`
NoPermissions bool `protobuf:"varint,8,opt,name=no_permissions,json=noPermissions,proto3" json:"no_permissions,omitempty"`
Version protocol.Vector `protobuf:"bytes,9,opt,name=version" json:"version"`
Sequence int64 `protobuf:"varint,10,opt,name=sequence,proto3" json:"sequence,omitempty"`
}
func (m *FileInfoTruncated) Reset() { *m = FileInfoTruncated{} }
func (*FileInfoTruncated) ProtoMessage() {}
func (*FileInfoTruncated) Descriptor() ([]byte, []int) { return fileDescriptorStructs, []int{2} }
func init() {
proto.RegisterType((*FileVersion)(nil), "db.FileVersion")
proto.RegisterType((*VersionList)(nil), "db.VersionList")
proto.RegisterType((*FileInfoTruncated)(nil), "db.FileInfoTruncated")
}
func (m *FileVersion) Marshal() (data []byte, err error) {
size := m.ProtoSize()
data = make([]byte, size)
n, err := m.MarshalTo(data)
if err != nil {
return nil, err
}
return data[:n], nil
}
func (m *FileVersion) MarshalTo(data []byte) (int, error) {
var i int
_ = i
var l int
_ = l
data[i] = 0xa
i++
i = encodeVarintStructs(data, i, uint64(m.Version.ProtoSize()))
n1, err := m.Version.MarshalTo(data[i:])
if err != nil {
return 0, err
}
i += n1
if len(m.Device) > 0 {
data[i] = 0x12
i++
i = encodeVarintStructs(data, i, uint64(len(m.Device)))
i += copy(data[i:], m.Device)
}
return i, nil
}
func (m *VersionList) Marshal() (data []byte, err error) {
size := m.ProtoSize()
data = make([]byte, size)
n, err := m.MarshalTo(data)
if err != nil {
return nil, err
}
return data[:n], nil
}
func (m *VersionList) MarshalTo(data []byte) (int, error) {
var i int
_ = i
var l int
_ = l
if len(m.Versions) > 0 {
for _, msg := range m.Versions {
data[i] = 0xa
i++
i = encodeVarintStructs(data, i, uint64(msg.ProtoSize()))
n, err := msg.MarshalTo(data[i:])
if err != nil {
return 0, err
}
i += n
}
}
return i, nil
}
func (m *FileInfoTruncated) Marshal() (data []byte, err error) {
size := m.ProtoSize()
data = make([]byte, size)
n, err := m.MarshalTo(data)
if err != nil {
return nil, err
}
return data[:n], nil
}
func (m *FileInfoTruncated) MarshalTo(data []byte) (int, error) {
var i int
_ = i
var l int
_ = l
if len(m.Name) > 0 {
data[i] = 0xa
i++
i = encodeVarintStructs(data, i, uint64(len(m.Name)))
i += copy(data[i:], m.Name)
}
if m.Type != 0 {
data[i] = 0x10
i++
i = encodeVarintStructs(data, i, uint64(m.Type))
}
if m.Size != 0 {
data[i] = 0x18
i++
i = encodeVarintStructs(data, i, uint64(m.Size))
}
if m.Permissions != 0 {
data[i] = 0x20
i++
i = encodeVarintStructs(data, i, uint64(m.Permissions))
}
if m.ModifiedS != 0 {
data[i] = 0x28
i++
i = encodeVarintStructs(data, i, uint64(m.ModifiedS))
}
if m.Deleted {
data[i] = 0x30
i++
if m.Deleted {
data[i] = 1
} else {
data[i] = 0
}
i++
}
if m.Invalid {
data[i] = 0x38
i++
if m.Invalid {
data[i] = 1
} else {
data[i] = 0
}
i++
}
if m.NoPermissions {
data[i] = 0x40
i++
if m.NoPermissions {
data[i] = 1
} else {
data[i] = 0
}
i++
}
data[i] = 0x4a
i++
i = encodeVarintStructs(data, i, uint64(m.Version.ProtoSize()))
n2, err := m.Version.MarshalTo(data[i:])
if err != nil {
return 0, err
}
i += n2
if m.Sequence != 0 {
data[i] = 0x50
i++
i = encodeVarintStructs(data, i, uint64(m.Sequence))
}
if m.ModifiedNs != 0 {
data[i] = 0x58
i++
i = encodeVarintStructs(data, i, uint64(m.ModifiedNs))
}
return i, nil
}
func encodeFixed64Structs(data []byte, offset int, v uint64) int {
data[offset] = uint8(v)
data[offset+1] = uint8(v >> 8)
data[offset+2] = uint8(v >> 16)
data[offset+3] = uint8(v >> 24)
data[offset+4] = uint8(v >> 32)
data[offset+5] = uint8(v >> 40)
data[offset+6] = uint8(v >> 48)
data[offset+7] = uint8(v >> 56)
return offset + 8
}
func encodeFixed32Structs(data []byte, offset int, v uint32) int {
data[offset] = uint8(v)
data[offset+1] = uint8(v >> 8)
data[offset+2] = uint8(v >> 16)
data[offset+3] = uint8(v >> 24)
return offset + 4
}
func encodeVarintStructs(data []byte, offset int, v uint64) int {
for v >= 1<<7 {
data[offset] = uint8(v&0x7f | 0x80)
v >>= 7
offset++
}
data[offset] = uint8(v)
return offset + 1
}
func (m *FileVersion) ProtoSize() (n int) {
var l int
_ = l
l = m.Version.ProtoSize()
n += 1 + l + sovStructs(uint64(l))
l = len(m.Device)
if l > 0 {
n += 1 + l + sovStructs(uint64(l))
}
return n
}
func (m *VersionList) ProtoSize() (n int) {
var l int
_ = l
if len(m.Versions) > 0 {
for _, e := range m.Versions {
l = e.ProtoSize()
n += 1 + l + sovStructs(uint64(l))
}
}
return n
}
func (m *FileInfoTruncated) ProtoSize() (n int) {
var l int
_ = l
l = len(m.Name)
if l > 0 {
n += 1 + l + sovStructs(uint64(l))
}
if m.Type != 0 {
n += 1 + sovStructs(uint64(m.Type))
}
if m.Size != 0 {
n += 1 + sovStructs(uint64(m.Size))
}
if m.Permissions != 0 {
n += 1 + sovStructs(uint64(m.Permissions))
}
if m.ModifiedS != 0 {
n += 1 + sovStructs(uint64(m.ModifiedS))
}
if m.Deleted {
n += 2
}
if m.Invalid {
n += 2
}
if m.NoPermissions {
n += 2
}
l = m.Version.ProtoSize()
n += 1 + l + sovStructs(uint64(l))
if m.Sequence != 0 {
n += 1 + sovStructs(uint64(m.Sequence))
}
if m.ModifiedNs != 0 {
n += 1 + sovStructs(uint64(m.ModifiedNs))
}
return n
}
func sovStructs(x uint64) (n int) {
for {
n++
x >>= 7
if x == 0 {
break
}
}
return n
}
func sozStructs(x uint64) (n int) {
return sovStructs(uint64((x << 1) ^ uint64((int64(x) >> 63))))
}
func (m *FileVersion) Unmarshal(data []byte) error {
l := len(data)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowStructs
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := data[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: FileVersion: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: FileVersion: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
case 1:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Version", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowStructs
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := data[iNdEx]
iNdEx++
msglen |= (int(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthStructs
}
postIndex := iNdEx + msglen
if postIndex > l {
return io.ErrUnexpectedEOF
}
if err := m.Version.Unmarshal(data[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
case 2:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Device", wireType)
}
var byteLen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowStructs
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := data[iNdEx]
iNdEx++
byteLen |= (int(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
if byteLen < 0 {
return ErrInvalidLengthStructs
}
postIndex := iNdEx + byteLen
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.Device = append(m.Device[:0], data[iNdEx:postIndex]...)
if m.Device == nil {
m.Device = []byte{}
}
iNdEx = postIndex
default:
iNdEx = preIndex
skippy, err := skipStructs(data[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthStructs
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}
func (m *VersionList) Unmarshal(data []byte) error {
l := len(data)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowStructs
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := data[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: VersionList: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: VersionList: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
case 1:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Versions", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowStructs
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := data[iNdEx]
iNdEx++
msglen |= (int(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthStructs
}
postIndex := iNdEx + msglen
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.Versions = append(m.Versions, FileVersion{})
if err := m.Versions[len(m.Versions)-1].Unmarshal(data[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
default:
iNdEx = preIndex
skippy, err := skipStructs(data[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthStructs
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}
func (m *FileInfoTruncated) Unmarshal(data []byte) error {
l := len(data)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowStructs
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := data[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: FileInfoTruncated: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: FileInfoTruncated: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
case 1:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Name", wireType)
}
var stringLen uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowStructs
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := data[iNdEx]
iNdEx++
stringLen |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
intStringLen := int(stringLen)
if intStringLen < 0 {
return ErrInvalidLengthStructs
}
postIndex := iNdEx + intStringLen
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.Name = string(data[iNdEx:postIndex])
iNdEx = postIndex
case 2:
if wireType != 0 {
return fmt.Errorf("proto: wrong wireType = %d for field Type", wireType)
}
m.Type = 0
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowStructs
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := data[iNdEx]
iNdEx++
m.Type |= (protocol.FileInfoType(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
case 3:
if wireType != 0 {
return fmt.Errorf("proto: wrong wireType = %d for field Size", wireType)
}
m.Size = 0
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowStructs
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := data[iNdEx]
iNdEx++
m.Size |= (int64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
case 4:
if wireType != 0 {
return fmt.Errorf("proto: wrong wireType = %d for field Permissions", wireType)
}
m.Permissions = 0
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowStructs
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := data[iNdEx]
iNdEx++
m.Permissions |= (uint32(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
case 5:
if wireType != 0 {
return fmt.Errorf("proto: wrong wireType = %d for field ModifiedS", wireType)
}
m.ModifiedS = 0
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowStructs
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := data[iNdEx]
iNdEx++
m.ModifiedS |= (int64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
case 6:
if wireType != 0 {
return fmt.Errorf("proto: wrong wireType = %d for field Deleted", wireType)
}
var v int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowStructs
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := data[iNdEx]
iNdEx++
v |= (int(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
m.Deleted = bool(v != 0)
case 7:
if wireType != 0 {
return fmt.Errorf("proto: wrong wireType = %d for field Invalid", wireType)
}
var v int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowStructs
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := data[iNdEx]
iNdEx++
v |= (int(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
m.Invalid = bool(v != 0)
case 8:
if wireType != 0 {
return fmt.Errorf("proto: wrong wireType = %d for field NoPermissions", wireType)
}
var v int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowStructs
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := data[iNdEx]
iNdEx++
v |= (int(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
m.NoPermissions = bool(v != 0)
case 9:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Version", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowStructs
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := data[iNdEx]
iNdEx++
msglen |= (int(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthStructs
}
postIndex := iNdEx + msglen
if postIndex > l {
return io.ErrUnexpectedEOF
}
if err := m.Version.Unmarshal(data[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
case 10:
if wireType != 0 {
return fmt.Errorf("proto: wrong wireType = %d for field Sequence", wireType)
}
m.Sequence = 0
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowStructs
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := data[iNdEx]
iNdEx++
m.Sequence |= (int64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
case 11:
if wireType != 0 {
return fmt.Errorf("proto: wrong wireType = %d for field ModifiedNs", wireType)
}
m.ModifiedNs = 0
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowStructs
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := data[iNdEx]
iNdEx++
m.ModifiedNs |= (int32(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
default:
iNdEx = preIndex
skippy, err := skipStructs(data[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthStructs
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}
func skipStructs(data []byte) (n int, err error) {
l := len(data)
iNdEx := 0
for iNdEx < l {
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return 0, ErrIntOverflowStructs
}
if iNdEx >= l {
return 0, io.ErrUnexpectedEOF
}
b := data[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
wireType := int(wire & 0x7)
switch wireType {
case 0:
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return 0, ErrIntOverflowStructs
}
if iNdEx >= l {
return 0, io.ErrUnexpectedEOF
}
iNdEx++
if data[iNdEx-1] < 0x80 {
break
}
}
return iNdEx, nil
case 1:
iNdEx += 8<|fim▁hole|> case 2:
var length int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return 0, ErrIntOverflowStructs
}
if iNdEx >= l {
return 0, io.ErrUnexpectedEOF
}
b := data[iNdEx]
iNdEx++
length |= (int(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
iNdEx += length
if length < 0 {
return 0, ErrInvalidLengthStructs
}
return iNdEx, nil
case 3:
for {
var innerWire uint64
var start int = iNdEx
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return 0, ErrIntOverflowStructs
}
if iNdEx >= l {
return 0, io.ErrUnexpectedEOF
}
b := data[iNdEx]
iNdEx++
innerWire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
innerWireType := int(innerWire & 0x7)
if innerWireType == 4 {
break
}
next, err := skipStructs(data[start:])
if err != nil {
return 0, err
}
iNdEx = start + next
}
return iNdEx, nil
case 4:
return iNdEx, nil
case 5:
iNdEx += 4
return iNdEx, nil
default:
return 0, fmt.Errorf("proto: illegal wireType %d", wireType)
}
}
panic("unreachable")
}
var (
ErrInvalidLengthStructs = fmt.Errorf("proto: negative length found during unmarshaling")
ErrIntOverflowStructs = fmt.Errorf("proto: integer overflow")
)
var fileDescriptorStructs = []byte{
// 419 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x09, 0x6e, 0x88, 0x02, 0xff, 0x8c, 0x51, 0xcd, 0xaa, 0xd3, 0x40,
0x18, 0x4d, 0xda, 0xdc, 0x36, 0xfd, 0x62, 0xaf, 0x3a, 0xc8, 0x25, 0x14, 0x4c, 0x2f, 0x05, 0x41,
0x04, 0x53, 0xbd, 0xe2, 0xc6, 0x65, 0x17, 0x05, 0x41, 0x44, 0x46, 0xa9, 0xcb, 0xd2, 0x64, 0xa6,
0xe9, 0x40, 0x32, 0x13, 0x33, 0x93, 0x42, 0x7d, 0x12, 0x97, 0x7d, 0x9c, 0x2e, 0x7d, 0x02, 0xd1,
0xfa, 0x12, 0x2e, 0x9d, 0x4e, 0x7e, 0xcc, 0xd2, 0x45, 0xe0, 0x3b, 0x73, 0xce, 0xf9, 0xce, 0x99,
0x0c, 0x8c, 0xa5, 0x2a, 0xca, 0x58, 0xc9, 0x30, 0x2f, 0x84, 0x12, 0xa8, 0x47, 0xa2, 0xc9, 0xf3,
0x84, 0xa9, 0x5d, 0x19, 0x85, 0xb1, 0xc8, 0xe6, 0x89, 0x48, 0xc4, 0xdc, 0x50, 0x51, 0xb9, 0x35,
0xc8, 0x00, 0x33, 0x55, 0x96, 0xc9, 0xeb, 0x8e, 0x5c, 0x1e, 0x78, 0xac, 0x76, 0x8c, 0x27, 0x9d,
0x29, 0x65, 0x51, 0xb5, 0x21, 0x16, 0xe9, 0x3c, 0xa2, 0x79, 0x65, 0x9b, 0x7d, 0x06, 0x6f, 0xc9,
0x52, 0xba, 0xa2, 0x85, 0x64, 0x82, 0xa3, 0x17, 0x30, 0xdc, 0x57, 0xa3, 0x6f, 0xdf, 0xda, 0x4f,
0xbd, 0xbb, 0x07, 0x61, 0x63, 0x0a, 0x57, 0x34, 0x56, 0xa2, 0x58, 0x38, 0xa7, 0x1f, 0x53, 0x0b,
0x37, 0x32, 0x74, 0x03, 0x03, 0x42, 0xf7, 0x2c, 0xa6, 0x7e, 0x4f, 0x1b, 0xee, 0xe1, 0x1a, 0xcd,
0x96, 0xe0, 0xd5, 0x4b, 0xdf, 0x31, 0xa9, 0xd0, 0x4b, 0x70, 0x6b, 0x87, 0xd4, 0x9b, 0xfb, 0x7a,
0xf3, 0xfd, 0x90, 0x44, 0x61, 0x27, 0xbb, 0x5e, 0xdc, 0xca, 0xde, 0x38, 0xdf, 0x8e, 0x53, 0x6b,
0xf6, 0xa7, 0x07, 0x0f, 0x2f, 0xaa, 0xb7, 0x7c, 0x2b, 0x3e, 0x15, 0x25, 0x8f, 0x37, 0x8a, 0x12,
0x84, 0xc0, 0xe1, 0x9b, 0x8c, 0x9a, 0x92, 0x23, 0x6c, 0x66, 0xf4, 0x0c, 0x1c, 0x75, 0xc8, 0xab,
0x1e, 0xd7, 0x77, 0x37, 0xff, 0x8a, 0xb7, 0x76, 0xcd, 0x62, 0xa3, 0xb9, 0xf8, 0x25, 0xfb, 0x4a,
0xfd, 0xbe, 0xd6, 0xf6, 0xb1, 0x99, 0xd1, 0x2d, 0x78, 0x39, 0x2d, 0x32, 0x26, 0xab, 0x96, 0x8e,
0xa6, 0xc6, 0xb8, 0x7b, 0x84, 0x1e, 0x03, 0x64, 0x82, 0xb0, 0x2d, 0xa3, 0x64, 0x2d, 0xfd, 0x2b,
0xe3, 0x1d, 0x35, 0x27, 0x1f, 0x91, 0x0f, 0x43, 0x42, 0x53, 0xaa, 0xfb, 0xf9, 0x03, 0xcd, 0xb9,
0xb8, 0x81, 0x17, 0x86, 0xf1, 0xfd, 0x26, 0x65, 0xc4, 0x1f, 0x56, 0x4c, 0x0d, 0xd1, 0x13, 0xb8,
0xe6, 0x62, 0xdd, 0xcd, 0x75, 0x8d, 0x60, 0xcc, 0xc5, 0x87, 0x4e, 0x72, 0xe7, 0x5d, 0x46, 0xff,
0xf7, 0x2e, 0x13, 0x70, 0x25, 0xfd, 0x52, 0x52, 0xae, 0x5f, 0x06, 0x4c, 0xd3, 0x16, 0xa3, 0x29,
0x78, 0xed, 0x3d, 0x74, 0xa2, 0xa7, 0xe9, 0x2b, 0xdc, 0x5e, 0xed, 0x7d, 0xfd, 0xeb, 0x17, 0x8f,
0x4e, 0xbf, 0x02, 0xeb, 0x74, 0x0e, 0xec, 0xef, 0xfa, 0xfb, 0x79, 0x0e, 0xac, 0xe3, 0xef, 0xc0,
0x8e, 0x06, 0x26, 0xf8, 0xd5, 0xdf, 0x00, 0x00, 0x00, 0xff, 0xff, 0x2a, 0xae, 0x24, 0x77, 0xb3,
0x02, 0x00, 0x00,
}<|fim▁end|> | return iNdEx, nil |
<|file_name|>intro.module.ts<|end_file_name|><|fim▁begin|>import { NgModule } from '@angular/core';
import { RouterModule } from '@angular/router';
import { SlidesModule, SlidesRoutes } from '@ng360/slides';
import { FeedbackModule } from '@codelab/feedback';
import { CommonModule } from '@angular/common';
import { BrowserWindowModule } from '@codelab/browser';
import { CodeDemoModule } from '@codelab/code-demos';<|fim▁hole|>import { IntroComponent } from './intro.component';
@NgModule({
imports: [
RouterModule.forChild([...SlidesRoutes.get(IntroComponent)]),
FeedbackModule,
CommonModule,
CodeDemoModule,
BrowserWindowModule,
CodeDemoModule,
SlidesModule,
FormsModule
],
declarations: [IntroComponent],
exports: [IntroComponent]
})
export class IntroModule {}<|fim▁end|> | import { FormsModule } from '@angular/forms'; |
<|file_name|>SystemOutLogger.java<|end_file_name|><|fim▁begin|>package org.thoughtcrime.securesms.testutil;
import org.signal.core.util.logging.Log;
public final class SystemOutLogger extends Log.Logger {
@Override
public void v(String tag, String message, Throwable t, boolean keepLonger) {
printlnFormatted('v', tag, message, t);
}
@Override
public void d(String tag, String message, Throwable t, boolean keepLonger) {
printlnFormatted('d', tag, message, t);
}
@Override
public void i(String tag, String message, Throwable t, boolean keepLonger) {
printlnFormatted('i', tag, message, t);
}
@Override
public void w(String tag, String message, Throwable t, boolean keepLonger) {
printlnFormatted('w', tag, message, t);
}
@Override
public void e(String tag, String message, Throwable t, boolean keepLonger) {
printlnFormatted('e', tag, message, t);
}
@Override
public void flush() { }
private void printlnFormatted(char level, String tag, String message, Throwable t) {
System.out.println(format(level, tag, message, t));
}
private String format(char level, String tag, String message, Throwable t) {
if (t != null) {<|fim▁hole|> }
}<|fim▁end|> | return String.format("%c[%s] %s %s:%s", level, tag, message, t.getClass().getSimpleName(), t.getMessage());
} else {
return String.format("%c[%s] %s", level, tag, message);
} |
<|file_name|>hr_timesheet.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
from osv import fields
from osv import osv
from tools.translate import _
class hr_employee(osv.osv):
_name = "hr.employee"
_inherit = "hr.employee"
_columns = {
'product_id': fields.many2one('product.product', 'Product', help="Specifies employee's designation as a product with type 'service'."),
'journal_id': fields.many2one('account.analytic.journal', 'Analytic Journal'),
'uom_id': fields.related('product_id', 'uom_id', type='many2one', relation='product.uom', string='UoM', store=True, readonly=True)
}
def _getAnalyticJournal(self, cr, uid, context=None):
md = self.pool.get('ir.model.data')
try:
result = md.get_object_reference(cr, uid, 'hr_timesheet', 'analytic_journal')
return result[1]
except ValueError:
pass
return False
def _getEmployeeProduct(self, cr, uid, context=None):
md = self.pool.get('ir.model.data')
try:
result = md.get_object_reference(cr, uid, 'product', 'product_consultant')<|fim▁hole|> return False
_defaults = {
'journal_id': _getAnalyticJournal,
'product_id': _getEmployeeProduct
}
hr_employee()
class hr_analytic_timesheet(osv.osv):
_name = "hr.analytic.timesheet"
_table = 'hr_analytic_timesheet'
_description = "Timesheet Line"
_inherits = {'account.analytic.line': 'line_id'}
_order = "id desc"
_columns = {
'line_id': fields.many2one('account.analytic.line', 'Analytic Line', ondelete='cascade', required=True),
'partner_id': fields.related('account_id', 'partner_id', type='many2one', string='Partner', relation='res.partner', store=True),
}
def unlink(self, cr, uid, ids, context=None):
toremove = {}
for obj in self.browse(cr, uid, ids, context=context):
toremove[obj.line_id.id] = True
self.pool.get('account.analytic.line').unlink(cr, uid, toremove.keys(), context=context)
return super(hr_analytic_timesheet, self).unlink(cr, uid, ids, context=context)
def on_change_unit_amount(self, cr, uid, id, prod_id, unit_amount, company_id, unit=False, journal_id=False, context=None):
res = {'value':{}}
if prod_id and unit_amount:
# find company
company_id = self.pool.get('res.company')._company_default_get(cr, uid, 'account.analytic.line', context=context)
r = self.pool.get('account.analytic.line').on_change_unit_amount(cr, uid, id, prod_id, unit_amount, company_id, unit, journal_id, context=context)
if r:
res.update(r)
# update unit of measurement
if prod_id:
uom = self.pool.get('product.product').browse(cr, uid, prod_id, context=context)
if uom.uom_id:
res['value'].update({'product_uom_id': uom.uom_id.id})
else:
res['value'].update({'product_uom_id': False})
return res
def _getEmployeeProduct(self, cr, uid, context=None):
if context is None:
context = {}
emp_obj = self.pool.get('hr.employee')
emp_id = emp_obj.search(cr, uid, [('user_id', '=', context.get('user_id', uid))], context=context)
if emp_id:
emp = emp_obj.browse(cr, uid, emp_id[0], context=context)
if emp.product_id:
return emp.product_id.id
return False
def _getEmployeeUnit(self, cr, uid, context=None):
emp_obj = self.pool.get('hr.employee')
if context is None:
context = {}
emp_id = emp_obj.search(cr, uid, [('user_id', '=', context.get('user_id', uid))], context=context)
if emp_id:
emp = emp_obj.browse(cr, uid, emp_id[0], context=context)
if emp.product_id:
return emp.product_id.uom_id.id
return False
def _getGeneralAccount(self, cr, uid, context=None):
emp_obj = self.pool.get('hr.employee')
if context is None:
context = {}
emp_id = emp_obj.search(cr, uid, [('user_id', '=', context.get('user_id', uid))], context=context)
if emp_id:
emp = emp_obj.browse(cr, uid, emp_id[0], context=context)
if bool(emp.product_id):
a = emp.product_id.product_tmpl_id.property_account_expense.id
if not a:
a = emp.product_id.categ_id.property_account_expense_categ.id
if a:
return a
return False
def _getAnalyticJournal(self, cr, uid, context=None):
emp_obj = self.pool.get('hr.employee')
if context is None:
context = {}
emp_id = emp_obj.search(cr, uid, [('user_id', '=', context.get('user_id', uid))], context=context)
if emp_id:
emp = emp_obj.browse(cr, uid, emp_id[0], context=context)
if emp.journal_id:
return emp.journal_id.id
return False
_defaults = {
'product_uom_id': _getEmployeeUnit,
'product_id': _getEmployeeProduct,
'general_account_id': _getGeneralAccount,
'journal_id': _getAnalyticJournal,
'date': lambda self, cr, uid, ctx: ctx.get('date', fields.date.context_today(self,cr,uid,context=ctx)),
'user_id': lambda obj, cr, uid, ctx: ctx.get('user_id', uid),
}
def on_change_account_id(self, cr, uid, ids, account_id):
return {'value':{}}
def on_change_date(self, cr, uid, ids, date):
if ids:
new_date = self.read(cr, uid, ids[0], ['date'])['date']
if date != new_date:
warning = {'title':'User Alert!','message':'Changing the date will let this entry appear in the timesheet of the new date.'}
return {'value':{},'warning':warning}
return {'value':{}}
def create(self, cr, uid, vals, context=None):
if context is None:
context = {}
emp_obj = self.pool.get('hr.employee')
emp_id = emp_obj.search(cr, uid, [('user_id', '=', context.get('user_id', uid))], context=context)
ename = ''
if emp_id:
ename = emp_obj.browse(cr, uid, emp_id[0], context=context).name
if not vals.get('journal_id',False):
raise osv.except_osv(_('Warning !'), _('Analytic journal is not defined for employee %s \nDefine an employee for the selected user and assign an analytic journal!')%(ename,))
if not vals.get('account_id',False):
raise osv.except_osv(_('Warning !'), _('No analytic account defined on the project.\nPlease set one or we can not automatically fill the timesheet.'))
return super(hr_analytic_timesheet, self).create(cr, uid, vals, context=context)
def on_change_user_id(self, cr, uid, ids, user_id):
if not user_id:
return {}
context = {'user_id': user_id}
return {'value': {
'product_id': self. _getEmployeeProduct(cr, uid, context),
'product_uom_id': self._getEmployeeUnit(cr, uid, context),
'general_account_id': self._getGeneralAccount(cr, uid, context),
'journal_id': self._getAnalyticJournal(cr, uid, context),
}}
hr_analytic_timesheet()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:<|fim▁end|> | return result[1]
except ValueError:
pass |
<|file_name|>server.go<|end_file_name|><|fim▁begin|>package httpserver
import (
"fmt"
"github.com/devfeel/dotweb"
"github.com/devfeel/tokenserver/config"
"github.com/devfeel/tokenserver/framework/log"
"strconv"
)
func StartServer() error {
//初始化DotServer
app := dotweb.New()
//设置dotserver日志目录
app.SetLogPath(config.CurrentConfig.Log.FilePath)
//设置路由
InitRoute(app)
innerLogger := logger.GetInnerLogger()<|fim▁hole|>
// 开始服务
port := config.CurrentConfig.HttpServer.HttpPort
innerLogger.Debug("dotweb.StartServer => " + strconv.Itoa(port))
err := app.StartServer(port)
return err
}
func ReSetServer() {
//初始化应用信息
fmt.Println("ReSetServer")
}<|fim▁end|> |
//启动监控服务
pprofport := config.CurrentConfig.HttpServer.PProfPort
app.SetPProfConfig(true, pprofport) |
<|file_name|>issue-36023.rs<|end_file_name|><|fim▁begin|>// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed<|fim▁hole|>use std::ops::Deref;
fn main() {
if env_var("FOOBAR").as_ref().map(Deref::deref).ok() == Some("yes") {
panic!()
}
let env_home: Result<String, ()> = Ok("foo-bar-baz".to_string());
let env_home = env_home.as_ref().map(Deref::deref).ok();
if env_home == Some("") { panic!() }
}
#[inline(never)]
fn env_var(s: &str) -> Result<String, VarError> {
Err(VarError::NotPresent)
}
pub enum VarError {
NotPresent,
NotUnicode(String),
}<|fim▁end|> | // except according to those terms.
// run-pass
#![allow(unused_variables)] |
<|file_name|>Part.cpp<|end_file_name|><|fim▁begin|>/***************************************************************************
* Copyright (c) Juergen Riegel ([email protected]) 2014 *
* *
* This file is part of the FreeCAD CAx development system. *
* *
* This library is free software; you can redistribute it and/or *
* modify it under the terms of the GNU Library General Public *
* License as published by the Free Software Foundation; either *
* version 2 of the License, or (at your option) any later version. *
* *
* This library is distributed in the hope that it will be useful, *
* but WITHOUT ANY WARRANTY; without even the implied warranty of *
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
* GNU Library General Public License for more details. *
* *
* You should have received a copy of the GNU Library General Public *
* License along with this library; see the file COPYING.LIB. If not, *
* write to the Free Software Foundation, Inc., 59 Temple Place, *
* Suite 330, Boston, MA 02111-1307, USA *
* *
***************************************************************************/
#include "PreCompiled.h"
#ifndef _PreComp_
#endif
#include <App/Document.h>
#include <App/Plane.h>
#include "Part.h"
#include "Origin.h"
#include "PartPy.h"
#include <boost/bind.hpp>
using namespace App;
PROPERTY_SOURCE(App::Part, App::GeoFeatureGroup)
//===========================================================================
// Feature
//===========================================================================
const char* Part::BaseplaneTypes[3] = {"XY_Plane", "XZ_Plane", "YZ_Plane"};
const char* Part::BaselineTypes[3] = {"X_Axis", "Y_Axis", "Z_Axis"};
Part::Part(void)
{
ADD_PROPERTY(Type,(""));
ADD_PROPERTY_TYPE(Material, (), 0, App::Prop_None, "Map with material properties");
ADD_PROPERTY_TYPE(Meta, (), 0, App::Prop_None, "Map with additional meta information");
// create the uuid for the document
Base::Uuid id;
ADD_PROPERTY_TYPE(Id, (""), 0, App::Prop_None, "ID (Part-Number) of the Item");
ADD_PROPERTY_TYPE(Uid, (id), 0, App::Prop_None, "UUID of the Item");
// license stuff
ADD_PROPERTY_TYPE(License, ("CC BY 3.0"), 0, App::Prop_None, "License string of the Item");
ADD_PROPERTY_TYPE(LicenseURL, ("http://creativecommons.org/licenses/by/3.0/"), 0, App::Prop_None, "URL to the license text/contract");
// color and apperance
ADD_PROPERTY(Color, (1.0, 1.0, 1.0, 1.0)); // set transparent -> not used
}
Part::~Part(void)
{
}
PyObject *Part::getPyObject()
{
if (PythonObject.is(Py::_None())){
// ref counter is set to 1
PythonObject = Py::Object(new PartPy(this),true);
}
return Py::new_reference_to(PythonObject);
}
void Part::onSettingDocument() {
if(connection.connected())
connection.disconnect();
getDocument()->signalDeletedObject.connect(boost::bind(&Part::onDelete, this, _1));
App::DocumentObject::onSettingDocument();
}
<|fim▁hole|> if(&obj == this) {
//delete all child objects if needed
this->removeObjectsFromDocument();
}
}
// Python feature ---------------------------------------------------------
// Not quit sure yet makeing Part derivable in Python is good Idea!
// JR 2014
//namespace App {
///// @cond DOXERR
//PROPERTY_SOURCE_TEMPLATE(App::PartPython, App::Part)
//template<> const char* App::PartPython::getViewProviderName(void) const {
// return "Gui::ViewProviderPartPython";
//}
//template<> PyObject* App::PartPython::getPyObject(void) {
// if (PythonObject.is(Py::_None())) {
// // ref counter is set to 1
// PythonObject = Py::Object(new FeaturePythonPyT<App::PartPy>(this),true);
// }
// return Py::new_reference_to(PythonObject);
//}
///// @endcond
//
//// explicit template instantiation
//template class AppExport FeaturePythonT<App::Part>;
//}<|fim▁end|> | void Part::onDelete(const App::DocumentObject& obj) {
|
<|file_name|>mainscr.cpp<|end_file_name|><|fim▁begin|>// _________ __ __
// / _____// |_____________ _/ |______ ____ __ __ ______
// \_____ \\ __\_ __ \__ \\ __\__ \ / ___\| | \/ ___/
// / \| | | | \// __ \| | / __ \_/ /_/ > | /\___ |
// /_______ /|__| |__| (____ /__| (____ /\___ /|____//____ >
// \/ \/ \//_____/ \/
// ______________________ ______________________
// T H E W A R B E G I N S
// Stratagus - A free fantasy real time strategy game engine
//
/**@name mainscr.cpp - The main screen. */
//
// (c) Copyright 1998-2007 by Lutz Sammer, Valery Shchedrin, and
// Jimmy Salmon
//
// This program is free software; you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation; only version 2 of the License.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program; if not, write to the Free Software
// Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA
// 02111-1307, USA.
//
//@{
/*----------------------------------------------------------------------------
-- Includes
----------------------------------------------------------------------------*/
#include "stratagus.h"
#include "action/action_built.h"
#include "action/action_research.h"
#include "action/action_train.h"
#include "action/action_upgradeto.h"
#include "font.h"
#include "icons.h"
#include "interface.h"
#include "map.h"
#include "menus.h"
#include "network.h"
#include "player.h"
#include "settings.h"
#include "sound.h"
#include "spells.h"
#include "translate.h"
#include "trigger.h"
#include "ui/contenttype.h"
#include "ui.h"
#include "unit.h"
#include "unitsound.h"
#include "unittype.h"
#include "upgrade.h"
#include "video.h"
#ifdef DEBUG
#include "../ai/ai_local.h"
#endif
#include <sstream>
/*----------------------------------------------------------------------------
-- UI BUTTONS
----------------------------------------------------------------------------*/
static void DrawMenuButtonArea_noNetwork()
{
if (UI.MenuButton.X != -1) {
DrawUIButton(UI.MenuButton.Style,
(ButtonAreaUnderCursor == ButtonAreaMenu
&& ButtonUnderCursor == ButtonUnderMenu ? MI_FLAGS_ACTIVE : 0) |
(GameMenuButtonClicked ? MI_FLAGS_CLICKED : 0),
UI.MenuButton.X, UI.MenuButton.Y,
UI.MenuButton.Text);
}
}
static void DrawMenuButtonArea_Network()
{
if (UI.NetworkMenuButton.X != -1) {
DrawUIButton(UI.NetworkMenuButton.Style,
(ButtonAreaUnderCursor == ButtonAreaMenu
&& ButtonUnderCursor == ButtonUnderNetworkMenu ? MI_FLAGS_ACTIVE : 0) |
(GameMenuButtonClicked ? MI_FLAGS_CLICKED : 0),
UI.NetworkMenuButton.X, UI.NetworkMenuButton.Y,
UI.NetworkMenuButton.Text);
}
if (UI.NetworkDiplomacyButton.X != -1) {
DrawUIButton(UI.NetworkDiplomacyButton.Style,
(ButtonAreaUnderCursor == ButtonAreaMenu
&& ButtonUnderCursor == ButtonUnderNetworkDiplomacy ? MI_FLAGS_ACTIVE : 0) |
(GameDiplomacyButtonClicked ? MI_FLAGS_CLICKED : 0),
UI.NetworkDiplomacyButton.X, UI.NetworkDiplomacyButton.Y,
UI.NetworkDiplomacyButton.Text);
}
}
/**
** Draw menu button area.
*/
void DrawMenuButtonArea()
{
if (!IsNetworkGame()) {
DrawMenuButtonArea_noNetwork();
} else {
DrawMenuButtonArea_Network();
}
}
void DrawUserDefinedButtons()
{
for (size_t i = 0; i < UI.UserButtons.size(); ++i) {
const CUIUserButton &button = UI.UserButtons[i];
if (button.Button.X != -1) {
DrawUIButton(button.Button.Style,
(ButtonAreaUnderCursor == ButtonAreaUser
&& size_t(ButtonUnderCursor) == i ? MI_FLAGS_ACTIVE : 0) |
(button.Clicked ? MI_FLAGS_CLICKED : 0),
button.Button.X, button.Button.Y,
button.Button.Text);
}
}
}
/*----------------------------------------------------------------------------
-- Icons
----------------------------------------------------------------------------*/
/**
** Draw life bar of a unit at x,y.
** Placed under icons on top-panel.
**
** @param unit Pointer to unit.
** @param x Screen X position of icon
** @param y Screen Y position of icon
*/
static void UiDrawLifeBar(const CUnit &unit, int x, int y)
{
// FIXME: add icon borders
int hBar, hAll;
if (Preference.IconsShift) {
hBar = 6;
hAll = 10;
} else {
hBar = 5;
hAll = 7;
}
y += unit.Type->Icon.Icon->G->Height;
Video.FillRectangleClip(ColorBlack, x - 4, y + 2,
unit.Type->Icon.Icon->G->Width + 8, hAll);
if (unit.Variable[HP_INDEX].Value) {
Uint32 color;
int f = (100 * unit.Variable[HP_INDEX].Value) / unit.Variable[HP_INDEX].Max;
if (f > 75) {
color = ColorDarkGreen;
} else if (f > 50) {
color = ColorYellow;
} else if (f > 25) {
color = ColorOrange;
} else {
color = ColorRed;
}
f = (f * (unit.Type->Icon.Icon->G->Width + 6)) / 100;
Video.FillRectangleClip(color, x - 2, y + 4,
f > 1 ? f - 2 : 0, hBar);
}
}
/**
** Draw mana bar of a unit at x,y.
** Placed under icons on top-panel.
**
** @param unit Pointer to unit.
** @param x Screen X position of icon
** @param y Screen Y position of icon
*/
static void UiDrawManaBar(const CUnit &unit, int x, int y)
{
// FIXME: add icon borders
y += unit.Type->Icon.Icon->G->Height;
Video.FillRectangleClip(ColorBlack, x, y + 3, unit.Type->Icon.Icon->G->Width, 4);
if (unit.Stats->Variables[MANA_INDEX].Max) {
int f = (100 * unit.Variable[MANA_INDEX].Value) / unit.Variable[MANA_INDEX].Max;
f = (f * (unit.Type->Icon.Icon->G->Width)) / 100;
Video.FillRectangleClip(ColorBlue, x + 1, y + 3 + 1, f, 2);
}
}
/**
** Tell if we can show the content.
** verify each sub condition for that.
**
** @param condition condition to verify.
** @param unit unit that certain condition can refer.
**
** @return 0 if we can't show the content, else 1.
*/
static bool CanShowContent(const ConditionPanel *condition, const CUnit &unit)
{
if (!condition) {
return true;
}
if ((condition->ShowOnlySelected && !unit.Selected)
|| (unit.Player->Type == PlayerNeutral && condition->HideNeutral)
|| (ThisPlayer->IsEnemy(unit) && !condition->ShowOpponent)
|| (ThisPlayer->IsAllied(unit) && (unit.Player != ThisPlayer) && condition->HideAllied)) {
return false;
}
if (condition->BoolFlags && !unit.Type->CheckUserBoolFlags(condition->BoolFlags)) {
return false;
}
if (condition->Variables) {
for (unsigned int i = 0; i < UnitTypeVar.GetNumberVariable(); ++i) {
if (condition->Variables[i] != CONDITION_TRUE) {
if ((condition->Variables[i] == CONDITION_ONLY) ^ unit.Variable[i].Enable) {
return false;
}
}
}
}
return true;
}
enum UStrIntType {
USTRINT_STR, USTRINT_INT
};
struct UStrInt {
union {const char *s; int i;};
UStrIntType type;
};
/**
** Return the value corresponding.
**
** @param unit Unit.
** @param index Index of the variable.
** @param e Component of the variable.
** @param t Which var use (0:unit, 1:Type, 2:Stats)
**
** @return Value corresponding
*/
UStrInt GetComponent(const CUnit &unit, int index, EnumVariable e, int t)
{
UStrInt val;
CVariable *var;
Assert((unsigned int) index < UnitTypeVar.GetNumberVariable());
switch (t) {
case 0: // Unit:
var = &unit.Variable[index];
break;
case 1: // Type:
var = &unit.Type->DefaultStat.Variables[index];
break;
case 2: // Stats:
var = &unit.Stats->Variables[index];
break;
default:
DebugPrint("Bad value for GetComponent: t = %d" _C_ t);
var = &unit.Variable[index];
break;
}
switch (e) {
case VariableValue:
val.type = USTRINT_INT;
val.i = var->Value;
break;
case VariableMax:
val.type = USTRINT_INT;
val.i = var->Max;
break;
case VariableIncrease:
val.type = USTRINT_INT;
val.i = var->Increase;
break;
case VariableDiff:
val.type = USTRINT_INT;
val.i = var->Max - var->Value;
break;
case VariablePercent:
Assert(unit.Variable[index].Max != 0);
val.type = USTRINT_INT;
val.i = 100 * var->Value / var->Max;
break;
case VariableName:
if (index == GIVERESOURCE_INDEX) {
val.type = USTRINT_STR;
val.i = unit.Type->GivesResource;
val.s = DefaultResourceNames[unit.Type->GivesResource].c_str();
} else if (index == CARRYRESOURCE_INDEX) {
val.type = USTRINT_STR;
val.i = unit.CurrentResource;
val.s = DefaultResourceNames[unit.CurrentResource].c_str();
} else {
val.type = USTRINT_STR;
val.i = index;
val.s = UnitTypeVar.VariableNameLookup[index];
}
break;
}
return val;
}
UStrInt GetComponent(const CUnitType &type, int index, EnumVariable e, int t)
{
UStrInt val;
CVariable *var;
Assert((unsigned int) index < UnitTypeVar.GetNumberVariable());
switch (t) {
case 0: // Unit:
var = &type.Stats[ThisPlayer->Index].Variables[index];;
break;
case 1: // Type:
var = &type.DefaultStat.Variables[index];
break;
case 2: // Stats:
var = &type.Stats[ThisPlayer->Index].Variables[index];
break;
default:
DebugPrint("Bad value for GetComponent: t = %d" _C_ t);
var = &type.Stats[ThisPlayer->Index].Variables[index];
break;
}
switch (e) {
case VariableValue:
val.type = USTRINT_INT;
val.i = var->Value;
break;
case VariableMax:
val.type = USTRINT_INT;
val.i = var->Max;
break;
case VariableIncrease:
val.type = USTRINT_INT;
val.i = var->Increase;
break;
case VariableDiff:
val.type = USTRINT_INT;
val.i = var->Max - var->Value;
break;
case VariablePercent:
Assert(type.Stats[ThisPlayer->Index].Variables[index].Max != 0);
val.type = USTRINT_INT;
val.i = 100 * var->Value / var->Max;
break;
case VariableName:
if (index == GIVERESOURCE_INDEX) {
val.type = USTRINT_STR;
val.i = type.GivesResource;
val.s = DefaultResourceNames[type.GivesResource].c_str();
} else {
val.type = USTRINT_STR;
val.i = index;
val.s = UnitTypeVar.VariableNameLookup[index];
}
break;
}
return val;
}
static void DrawUnitInfo_Training(const CUnit &unit)
{
if (unit.Orders.size() == 1 || unit.Orders[1]->Action != UnitActionTrain) {
if (!UI.SingleTrainingText.empty()) {
CLabel label(*UI.SingleTrainingFont);
label.Draw(UI.SingleTrainingTextX, UI.SingleTrainingTextY, UI.SingleTrainingText);
}
if (UI.SingleTrainingButton) {
const COrder_Train &order = *static_cast<COrder_Train *>(unit.CurrentOrder());
CIcon &icon = *order.GetUnitType().Icon.Icon;
const unsigned int flags = (ButtonAreaUnderCursor == ButtonAreaTraining && ButtonUnderCursor == 0) ?
(IconActive | (MouseButtons & LeftButton)) : 0;
const PixelPos pos(UI.SingleTrainingButton->X, UI.SingleTrainingButton->Y);
icon.DrawUnitIcon(*UI.SingleTrainingButton->Style, flags, pos, "", unit.RescuedFrom
? GameSettings.Presets[unit.RescuedFrom->Index].PlayerColor
: GameSettings.Presets[unit.Player->Index].PlayerColor);
}
} else {
if (!UI.TrainingText.empty()) {
CLabel label(*UI.TrainingFont);
label.Draw(UI.TrainingTextX, UI.TrainingTextY, UI.TrainingText);
}
if (!UI.TrainingButtons.empty()) {
for (size_t i = 0; i < unit.Orders.size()
&& i < UI.TrainingButtons.size(); ++i) {
if (unit.Orders[i]->Action == UnitActionTrain) {
const COrder_Train &order = *static_cast<COrder_Train *>(unit.Orders[i]);
CIcon &icon = *order.GetUnitType().Icon.Icon;
const int flag = (ButtonAreaUnderCursor == ButtonAreaTraining
&& static_cast<size_t>(ButtonUnderCursor) == i) ?
(IconActive | (MouseButtons & LeftButton)) : 0;
const PixelPos pos(UI.TrainingButtons[i].X, UI.TrainingButtons[i].Y);
icon.DrawUnitIcon(*UI.TrainingButtons[i].Style, flag, pos, "", unit.RescuedFrom
? GameSettings.Presets[unit.RescuedFrom->Index].PlayerColor
: GameSettings.Presets[unit.Player->Index].PlayerColor);
}
}
}
}
}
static void DrawUnitInfo_portrait(const CUnit &unit)
{
const CUnitType &type = *unit.Type;
#ifdef USE_MNG
if (type.Portrait.Num) {
type.Portrait.Mngs[type.Portrait.CurrMng]->Draw(
UI.SingleSelectedButton->X, UI.SingleSelectedButton->Y);
if (type.Portrait.Mngs[type.Portrait.CurrMng]->iteration == type.Portrait.NumIterations) {
type.Portrait.Mngs[type.Portrait.CurrMng]->Reset();
// FIXME: should be configurable
if (type.Portrait.CurrMng == 0) {
type.Portrait.CurrMng = (SyncRand() % (type.Portrait.Num - 1)) + 1;
type.Portrait.NumIterations = 1;
} else {
type.Portrait.CurrMng = 0;
type.Portrait.NumIterations = SyncRand() % 16 + 1;
}
}
return;
}
#endif
if (UI.SingleSelectedButton) {
const PixelPos pos(UI.SingleSelectedButton->X, UI.SingleSelectedButton->Y);
const int flag = (ButtonAreaUnderCursor == ButtonAreaSelected && ButtonUnderCursor == 0) ?
(IconActive | (MouseButtons & LeftButton)) : 0;
type.Icon.Icon->DrawUnitIcon(*UI.SingleSelectedButton->Style, flag, pos, "", unit.RescuedFrom
? GameSettings.Presets[unit.RescuedFrom->Index].PlayerColor
: GameSettings.Presets[unit.Player->Index].PlayerColor);
}
}
static bool DrawUnitInfo_single_selection(const CUnit &unit)
{
switch (unit.CurrentAction()) {
case UnitActionTrain: { // Building training units.
DrawUnitInfo_Training(unit);
return true;
}
case UnitActionUpgradeTo: { // Building upgrading to better type.
if (UI.UpgradingButton) {
const COrder_UpgradeTo &order = *static_cast<COrder_UpgradeTo *>(unit.CurrentOrder());
CIcon &icon = *order.GetUnitType().Icon.Icon;
unsigned int flag = (ButtonAreaUnderCursor == ButtonAreaUpgrading
&& ButtonUnderCursor == 0) ?
(IconActive | (MouseButtons & LeftButton)) : 0;
const PixelPos pos(UI.UpgradingButton->X, UI.UpgradingButton->Y);
icon.DrawUnitIcon(*UI.UpgradingButton->Style, flag, pos, "", unit.RescuedFrom
? GameSettings.Presets[unit.RescuedFrom->Index].PlayerColor
: GameSettings.Presets[unit.Player->Index].PlayerColor);
}
return true;
}
case UnitActionResearch: { // Building research new technology.
if (UI.ResearchingButton) {
COrder_Research &order = *static_cast<COrder_Research *>(unit.CurrentOrder());
CIcon &icon = *order.GetUpgrade().Icon;
int flag = (ButtonAreaUnderCursor == ButtonAreaResearching
&& ButtonUnderCursor == 0) ?
(IconActive | (MouseButtons & LeftButton)) : 0;
PixelPos pos(UI.ResearchingButton->X, UI.ResearchingButton->Y);
icon.DrawUnitIcon(*UI.ResearchingButton->Style, flag, pos, "", unit.RescuedFrom
? GameSettings.Presets[unit.RescuedFrom->Index].PlayerColor
: GameSettings.Presets[unit.Player->Index].PlayerColor);
}
return true;
}
default:
return false;
}
}
static void DrawUnitInfo_transporter(CUnit &unit)
{
CUnit *uins = unit.UnitInside;
size_t j = 0;
for (int i = 0; i < unit.InsideCount; ++i, uins = uins->NextContained) {
if (!uins->Boarded || j >= UI.TransportingButtons.size()) {
continue;
}
CIcon &icon = *uins->Type->Icon.Icon;
int flag = (ButtonAreaUnderCursor == ButtonAreaTransporting && static_cast<size_t>(ButtonUnderCursor) == j) ?
(IconActive | (MouseButtons & LeftButton)) : 0;
const PixelPos pos(UI.TransportingButtons[j].X, UI.TransportingButtons[j].Y);
icon.DrawUnitIcon(*UI.TransportingButtons[j].Style, flag, pos, "", unit.RescuedFrom
? GameSettings.Presets[unit.RescuedFrom->Index].PlayerColor
: GameSettings.Presets[unit.Player->Index].PlayerColor);
UiDrawLifeBar(*uins, pos.x, pos.y);
if (uins->Type->CanCastSpell && uins->Variable[MANA_INDEX].Max) {
UiDrawManaBar(*uins, pos.x, pos.y);
}
if (ButtonAreaUnderCursor == ButtonAreaTransporting
&& static_cast<size_t>(ButtonUnderCursor) == j) {
UI.StatusLine.Set(uins->Type->Name);
}
++j;
}
}
/**
** Draw the unit info into top-panel.
**
** @param unit Pointer to unit.
*/
static void DrawUnitInfo(CUnit &unit)
{
UpdateUnitVariables(unit);
for (size_t i = 0; i != UI.InfoPanelContents.size(); ++i) {
if (CanShowContent(UI.InfoPanelContents[i]->Condition, unit)) {
for (std::vector<CContentType *>::const_iterator content = UI.InfoPanelContents[i]->Contents.begin();
content != UI.InfoPanelContents[i]->Contents.end(); ++content) {
if (CanShowContent((*content)->Condition, unit)) {
(*content)->Draw(unit, UI.InfoPanelContents[i]->DefaultFont);
}
}
}
}
const CUnitType &type = *unit.Type;
Assert(&type);
// Draw IconUnit
DrawUnitInfo_portrait(unit);
if (unit.Player != ThisPlayer && !ThisPlayer->IsAllied(*unit.Player)) {
return;
}
// Show progress if they are selected.
if (IsOnlySelected(unit)) {
if (DrawUnitInfo_single_selection(unit)) {
return;
}
}
// Transporting units.
if (type.CanTransport() && unit.BoardCount && CurrentButtonLevel == unit.Type->ButtonLevelForTransporter) {
DrawUnitInfo_transporter(unit);
return;
}
}
/*----------------------------------------------------------------------------
-- RESOURCES
----------------------------------------------------------------------------*/
/**
** Draw the player resource in top line.
**
** @todo FIXME : make DrawResources more configurable (format, font).<|fim▁hole|>*/
void DrawResources()
{
CLabel label(GetGameFont());
// Draw all icons of resource.
for (int i = 0; i <= FreeWorkersCount; ++i) {
if (UI.Resources[i].G) {
UI.Resources[i].G->DrawFrameClip(UI.Resources[i].IconFrame,
UI.Resources[i].IconX, UI.Resources[i].IconY);
}
}
for (int i = 0; i < MaxCosts; ++i) {
if (UI.Resources[i].TextX != -1) {
const int resourceAmount = ThisPlayer->Resources[i];
if (ThisPlayer->MaxResources[i] != -1) {
const int resAmount = ThisPlayer->StoredResources[i] + ThisPlayer->Resources[i];
char tmp[256];
snprintf(tmp, sizeof(tmp), "%d (%d)", resAmount, ThisPlayer->MaxResources[i] - ThisPlayer->StoredResources[i]);
label.SetFont(GetSmallFont());
label.Draw(UI.Resources[i].TextX, UI.Resources[i].TextY + 3, tmp);
} else {
label.SetFont(resourceAmount > 99999 ? GetSmallFont() : GetGameFont());
label.Draw(UI.Resources[i].TextX, UI.Resources[i].TextY + (resourceAmount > 99999) * 3, resourceAmount);
}
}
}
if (UI.Resources[FoodCost].TextX != -1) {
char tmp[256];
snprintf(tmp, sizeof(tmp), "%d/%d", ThisPlayer->Demand, ThisPlayer->Supply);
label.SetFont(GetGameFont());
if (ThisPlayer->Supply < ThisPlayer->Demand) {
label.DrawReverse(UI.Resources[FoodCost].TextX, UI.Resources[FoodCost].TextY, tmp);
} else {
label.Draw(UI.Resources[FoodCost].TextX, UI.Resources[FoodCost].TextY, tmp);
}
}
if (UI.Resources[ScoreCost].TextX != -1) {
const int score = ThisPlayer->Score;
label.SetFont(score > 99999 ? GetSmallFont() : GetGameFont());
label.Draw(UI.Resources[ScoreCost].TextX, UI.Resources[ScoreCost].TextY + (score > 99999) * 3, score);
}
if (UI.Resources[FreeWorkersCount].TextX != -1) {
const int workers = ThisPlayer->FreeWorkers.size();
label.SetFont(GetGameFont());
label.Draw(UI.Resources[FreeWorkersCount].TextX, UI.Resources[FreeWorkersCount].TextY, workers);
}
}
/*----------------------------------------------------------------------------
-- MESSAGE
----------------------------------------------------------------------------*/
#define MESSAGES_MAX 10 /// How many can be displayed
static char MessagesEvent[MESSAGES_MAX][256]; /// Array of event messages
static Vec2i MessagesEventPos[MESSAGES_MAX]; /// coordinate of event
static int MessagesEventCount; /// Number of event messages
static int MessagesEventIndex; /// FIXME: docu
class MessagesDisplay
{
public:
MessagesDisplay() : show(true)
{
#ifdef DEBUG
showBuilList = false;
#endif
CleanMessages();
}
void UpdateMessages();
void AddUniqueMessage(const char *s);
void DrawMessages();
void CleanMessages();
void ToggleShowMessages() { show = !show; }
#ifdef DEBUG
void ToggleShowBuilListMessages() { showBuilList = !showBuilList; }
#endif
protected:
void ShiftMessages();
void AddMessage(const char *msg);
bool CheckRepeatMessage(const char *msg);
private:
char Messages[MESSAGES_MAX][256]; /// Array of messages
int MessagesCount; /// Number of messages
int MessagesSameCount; /// Counts same message repeats
int MessagesScrollY;
unsigned long MessagesFrameTimeout; /// Frame to expire message
bool show;
#ifdef DEBUG
bool showBuilList;
#endif
};
/**
** Shift messages array by one.
*/
void MessagesDisplay::ShiftMessages()
{
if (MessagesCount) {
--MessagesCount;
for (int z = 0; z < MessagesCount; ++z) {
strcpy_s(Messages[z], sizeof(Messages[z]), Messages[z + 1]);
}
}
}
/**
** Update messages
**
** @todo FIXME: make scroll speed configurable.
*/
void MessagesDisplay::UpdateMessages()
{
if (!MessagesCount) {
return;
}
// Scroll/remove old message line
const unsigned long ticks = GetTicks();
if (MessagesFrameTimeout < ticks) {
++MessagesScrollY;
if (MessagesScrollY == UI.MessageFont->Height() + 1) {
MessagesFrameTimeout = ticks + UI.MessageScrollSpeed * 1000;
MessagesScrollY = 0;
ShiftMessages();
}
}
}
/**
** Draw message(s).
**
** @todo FIXME: make message font configurable.
*/
void MessagesDisplay::DrawMessages()
{
if (show && Preference.ShowMessages) {
CLabel label(*UI.MessageFont);
#ifdef DEBUG
if (showBuilList && ThisPlayer->Ai) {
char buffer[256];
int count = ThisPlayer->Ai->UnitTypeBuilt.size();
// Draw message line(s)
for (int z = 0; z < count; ++z) {
if (z == 0) {
PushClipping();
SetClipping(UI.MapArea.X + 8, UI.MapArea.Y + 8,
Video.Width - 1, Video.Height - 1);
}
snprintf(buffer, 256, "%s (%d/%d) Wait %lu [%d,%d]",
ThisPlayer->Ai->UnitTypeBuilt[z].Type->Name.c_str(),
ThisPlayer->Ai->UnitTypeBuilt[z].Made,
ThisPlayer->Ai->UnitTypeBuilt[z].Want,
ThisPlayer->Ai->UnitTypeBuilt[z].Wait,
ThisPlayer->Ai->UnitTypeBuilt[z].Pos.x,
ThisPlayer->Ai->UnitTypeBuilt[z].Pos.y);
label.DrawClip(UI.MapArea.X + 8,
UI.MapArea.Y + 8 + z * (UI.MessageFont->Height() + 1),
buffer);
if (z == 0) {
PopClipping();
}
}
} else {
#endif
// background so the text is easier to read
if (MessagesCount) {
int textHeight = MessagesCount * (UI.MessageFont->Height() + 1);
Uint32 color = Video.MapRGB(TheScreen->format, 38, 38, 78);
Video.FillTransRectangleClip(color, UI.MapArea.X + 7, UI.MapArea.Y + 7,
UI.MapArea.EndX - UI.MapArea.X - 16,
textHeight - MessagesScrollY + 1, 0x80);
Video.DrawRectangle(color, UI.MapArea.X + 6, UI.MapArea.Y + 6,
UI.MapArea.EndX - UI.MapArea.X - 15,
textHeight - MessagesScrollY + 2);
}
// Draw message line(s)
for (int z = 0; z < MessagesCount; ++z) {
if (z == 0) {
PushClipping();
SetClipping(UI.MapArea.X + 8, UI.MapArea.Y + 8, Video.Width - 1,
Video.Height - 1);
}
/*
* Due parallel drawing we have to force message copy due temp
* std::string(Messages[z]) creation because
* char * pointer may change during text drawing.
*/
label.DrawClip(UI.MapArea.X + 8,
UI.MapArea.Y + 8 +
z * (UI.MessageFont->Height() + 1) - MessagesScrollY,
std::string(Messages[z]));
if (z == 0) {
PopClipping();
}
}
if (MessagesCount < 1) {
MessagesSameCount = 0;
}
#ifdef DEBUG
}
#endif
}
}
/**
** Adds message to the stack
**
** @param msg Message to add.
*/
void MessagesDisplay::AddMessage(const char *msg)
{
unsigned long ticks = GetTicks();
if (!MessagesCount) {
MessagesFrameTimeout = ticks + UI.MessageScrollSpeed * 1000;
}
if (MessagesCount == MESSAGES_MAX) {
// Out of space to store messages, can't scroll smoothly
ShiftMessages();
MessagesFrameTimeout = ticks + UI.MessageScrollSpeed * 1000;
MessagesScrollY = 0;
}
char *ptr;
char *next;
char *message = Messages[MessagesCount];
// Split long message into lines
if (strlen(msg) >= sizeof(Messages[0])) {
strncpy(message, msg, sizeof(Messages[0]) - 1);
ptr = message + sizeof(Messages[0]) - 1;
*ptr-- = '\0';
next = ptr + 1;
while (ptr >= message) {
if (*ptr == ' ') {
*ptr = '\0';
next = ptr + 1;
break;
}
--ptr;
}
if (ptr < message) {
ptr = next - 1;
}
} else {
strcpy_s(message, sizeof(Messages[MessagesCount]), msg);
next = ptr = message + strlen(message);
}
while (UI.MessageFont->Width(message) + 8 >= UI.MapArea.EndX - UI.MapArea.X) {
while (1) {
--ptr;
if (*ptr == ' ') {
*ptr = '\0';
next = ptr + 1;
break;
} else if (ptr == message) {
break;
}
}
// No space found, wrap in the middle of a word
if (ptr == message) {
ptr = next - 1;
while (UI.MessageFont->Width(message) + 8 >= UI.MapArea.EndX - UI.MapArea.X) {
*--ptr = '\0';
}
next = ptr + 1;
break;
}
}
++MessagesCount;
if (strlen(msg) != (size_t)(ptr - message)) {
AddMessage(msg + (next - message));
}
}
/**
** Check if this message repeats
**
** @param msg Message to check.
**
** @return true to skip this message
*/
bool MessagesDisplay::CheckRepeatMessage(const char *msg)
{
if (MessagesCount < 1) {
return false;
}
if (!strcmp(msg, Messages[MessagesCount - 1])) {
++MessagesSameCount;
return true;
}
if (MessagesSameCount > 0) {
char temp[256];
int n = MessagesSameCount;
MessagesSameCount = 0;
// NOTE: vladi: yep it's a tricky one, but should work fine prbably :)
snprintf(temp, sizeof(temp), _("Last message repeated ~<%d~> times"), n + 1);
AddMessage(temp);
}
return false;
}
/**
** Add a new message to display only if it differs from the preceding one.
*/
void MessagesDisplay::AddUniqueMessage(const char *s)
{
if (!CheckRepeatMessage(s)) {
AddMessage(s);
}
}
/**
** Clean up messages.
*/
void MessagesDisplay::CleanMessages()
{
MessagesCount = 0;
MessagesSameCount = 0;
MessagesScrollY = 0;
MessagesFrameTimeout = 0;
MessagesEventCount = 0;
MessagesEventIndex = 0;
}
static MessagesDisplay allmessages;
/**
** Update messages
*/
void UpdateMessages()
{
allmessages.UpdateMessages();
}
/**
** Clean messages
*/
void CleanMessages()
{
allmessages.CleanMessages();
}
/**
** Draw messages
*/
void DrawMessages()
{
allmessages.DrawMessages();
}
/**
** Set message to display.
**
** @param fmt To be displayed in text overlay.
*/
void SetMessage(const char *fmt, ...)
{
char temp[512];
va_list va;
va_start(va, fmt);
vsnprintf(temp, sizeof(temp) - 1, fmt, va);
temp[sizeof(temp) - 1] = '\0';
va_end(va);
allmessages.AddUniqueMessage(temp);
}
/**
** Shift messages events array by one.
*/
void ShiftMessagesEvent()
{
if (MessagesEventCount) {
--MessagesEventCount;
for (int z = 0; z < MessagesEventCount; ++z) {
MessagesEventPos[z] = MessagesEventPos[z + 1];
strcpy_s(MessagesEvent[z], sizeof(MessagesEvent[z]), MessagesEvent[z + 1]);
}
}
}
/**
** Set message to display.
**
** @param pos Message pos map origin.
** @param fmt To be displayed in text overlay.
**
** @note FIXME: vladi: I know this can be just separated func w/o msg but
** it is handy to stick all in one call, someone?
*/
void SetMessageEvent(const Vec2i &pos, const char *fmt, ...)
{
Assert(Map.Info.IsPointOnMap(pos));
char temp[256];
va_list va;
va_start(va, fmt);
vsnprintf(temp, sizeof(temp) - 1, fmt, va);
temp[sizeof(temp) - 1] = '\0';
va_end(va);
allmessages.AddUniqueMessage(temp);
if (MessagesEventCount == MESSAGES_MAX) {
ShiftMessagesEvent();
}
strcpy_s(MessagesEvent[MessagesEventCount], sizeof(MessagesEvent[MessagesEventCount]), temp);
MessagesEventPos[MessagesEventCount] = pos;
MessagesEventIndex = MessagesEventCount;
++MessagesEventCount;
}
/**
** Goto message origin.
*/
void CenterOnMessage()
{
if (MessagesEventIndex >= MessagesEventCount) {
MessagesEventIndex = 0;
}
if (MessagesEventCount == 0) {
return;
}
const Vec2i &pos(MessagesEventPos[MessagesEventIndex]);
UI.SelectedViewport->Center(Map.TilePosToMapPixelPos_Center(pos));
SetMessage(_("~<Event: %s~>"), MessagesEvent[MessagesEventIndex]);
++MessagesEventIndex;
}
void ToggleShowMessages()
{
allmessages.ToggleShowMessages();
}
#ifdef DEBUG
void ToggleShowBuilListMessages()
{
allmessages.ToggleShowBuilListMessages();
}
#endif
/*----------------------------------------------------------------------------
-- INFO PANEL
----------------------------------------------------------------------------*/
/**
** Draw info panel background.
**
** @param frame frame nr. of the info panel background.
*/
static void DrawInfoPanelBackground(unsigned frame)
{
if (UI.InfoPanel.G) {
UI.InfoPanel.G->DrawFrame(frame, UI.InfoPanel.X, UI.InfoPanel.Y);
}
}
static void InfoPanel_draw_no_selection()
{
DrawInfoPanelBackground(0);
if (UnitUnderCursor && UnitUnderCursor->IsVisible(*ThisPlayer)
&& !UnitUnderCursor->Type->BoolFlag[ISNOTSELECTABLE_INDEX].value) {
// FIXME: not correct for enemies units
DrawUnitInfo(*UnitUnderCursor);
} else {
// FIXME: need some cool ideas for this.
int x = UI.InfoPanel.X + 16;
int y = UI.InfoPanel.Y + 8;
CLabel label(GetGameFont());
label.Draw(x, y, "Stratagus");
y += 16;
label.Draw(x, y, _("Cycle:"));
label.Draw(x + 48, y, GameCycle);
label.Draw(x + 110, y, CYCLES_PER_SECOND * VideoSyncSpeed / 100);
y += 20;
std::string nc;
std::string rc;
GetDefaultTextColors(nc, rc);
for (int i = 0; i < PlayerMax - 1; ++i) {
if (Players[i].Type != PlayerNobody) {
if (ThisPlayer->IsAllied(Players[i])) {
label.SetNormalColor(FontGreen);
} else if (ThisPlayer->IsEnemy(Players[i])) {
label.SetNormalColor(FontRed);
} else {
label.SetNormalColor(nc);
}
label.Draw(x + 15, y, i);
Video.DrawRectangleClip(ColorWhite, x, y, 12, 12);
Video.FillRectangleClip(PlayerColors[GameSettings.Presets[i].PlayerColor][0], x + 1, y + 1, 10, 10);
label.Draw(x + 27, y, Players[i].Name);
label.Draw(x + 117, y, Players[i].Score);
y += 14;
}
}
}
}
static void InfoPanel_draw_single_selection(CUnit *selUnit)
{
CUnit &unit = (selUnit ? *selUnit : *Selected[0]);
int panelIndex;
// FIXME: not correct for enemy's units
if (unit.Player == ThisPlayer
|| ThisPlayer->IsTeamed(unit)
|| ThisPlayer->IsAllied(unit)
|| ReplayRevealMap) {
if (unit.Orders[0]->Action == UnitActionBuilt
|| unit.Orders[0]->Action == UnitActionResearch
|| unit.Orders[0]->Action == UnitActionUpgradeTo
|| unit.Orders[0]->Action == UnitActionTrain) {
panelIndex = 3;
} else if (unit.Stats->Variables[MANA_INDEX].Max) {
panelIndex = 2;
} else {
panelIndex = 1;
}
} else {
panelIndex = 0;
}
DrawInfoPanelBackground(panelIndex);
DrawUnitInfo(unit);
if (ButtonAreaUnderCursor == ButtonAreaSelected && ButtonUnderCursor == 0) {
UI.StatusLine.Set(unit.Type->Name);
}
}
static void InfoPanel_draw_multiple_selection()
{
// If there are more units selected draw their pictures and a health bar
DrawInfoPanelBackground(0);
for (size_t i = 0; i != std::min(Selected.size(), UI.SelectedButtons.size()); ++i) {
const CIcon &icon = *Selected[i]->Type->Icon.Icon;
const PixelPos pos(UI.SelectedButtons[i].X, UI.SelectedButtons[i].Y);
icon.DrawUnitIcon(*UI.SelectedButtons[i].Style,
(ButtonAreaUnderCursor == ButtonAreaSelected && ButtonUnderCursor == (int)i) ?
(IconActive | (MouseButtons & LeftButton)) : 0,
pos, "", Selected[i]->RescuedFrom
? GameSettings.Presets[Selected[i]->RescuedFrom->Index].PlayerColor
: GameSettings.Presets[Selected[i]->Player->Index].PlayerColor);
UiDrawLifeBar(*Selected[i], UI.SelectedButtons[i].X, UI.SelectedButtons[i].Y);
if (ButtonAreaUnderCursor == ButtonAreaSelected && ButtonUnderCursor == (int) i) {
UI.StatusLine.Set(Selected[i]->Type->Name);
}
}
if (Selected.size() > UI.SelectedButtons.size()) {
char buf[5];
sprintf(buf, "+%lu", (long unsigned int)(Selected.size() - UI.SelectedButtons.size()));
CLabel(*UI.MaxSelectedFont).Draw(UI.MaxSelectedTextX, UI.MaxSelectedTextY, buf);
}
}
/**
** Draw info panel.
**
** Panel:
** neutral - neutral or opponent
** normal - not 1,3,4
** magic unit - magic units
** construction - under construction
*/
void CInfoPanel::Draw()
{
if (UnitUnderCursor && Selected.empty() && !UnitUnderCursor->Type->IsNotSelectable
&& (ReplayRevealMap || UnitUnderCursor->IsVisible(*ThisPlayer))) {
InfoPanel_draw_single_selection(UnitUnderCursor);
} else {
switch (Selected.size()) {
case 0: { InfoPanel_draw_no_selection(); break; }
case 1: { InfoPanel_draw_single_selection(NULL); break; }
default: { InfoPanel_draw_multiple_selection(); break; }
}
}
}
/*----------------------------------------------------------------------------
-- TIMER
----------------------------------------------------------------------------*/
/**
** Draw the timer
**
** @todo FIXME : make DrawTimer more configurable (Pos, format).
*/
void DrawTimer()
{
if (!GameTimer.Init) {
return;
}
int sec = GameTimer.Cycles / CYCLES_PER_SECOND;
UI.Timer.Draw(sec);
}
/**
** Update the timer
*/
void UpdateTimer()
{
if (GameTimer.Running) {
if (GameTimer.Increasing) {
GameTimer.Cycles += GameCycle - GameTimer.LastUpdate;
} else {
GameTimer.Cycles -= GameCycle - GameTimer.LastUpdate;
GameTimer.Cycles = std::max(GameTimer.Cycles, 0l);
}
GameTimer.LastUpdate = GameCycle;
}
}
//@}<|fim▁end|> | |
<|file_name|>selectedUserSelector.js<|end_file_name|><|fim▁begin|>import {createSelector} from 'reselect'
import usersSelector from 'usersSelector'
const selectedUserIdSelector = (state) => state.selectedUserId
export default createSelector(<|fim▁hole|><|fim▁end|> | [usersSelector, selectedUserIdSelector],
(users, selectedUserId) => users.get(selectedUserId)
) |
<|file_name|>solve0043.js<|end_file_name|><|fim▁begin|>var library = require('./library.js');
var check_cond = function(num, div, start)
{
var n = '';
for(var i = start; i < start + 3; i++)
{
n = n + num.toString().charAt(i - 1);
}
if(parseInt(n) % div === 0)
{
return true;
}
return false;
}
var check_all = function(num)
{
var all = [2, 3, 5, 7, 11, 13, 17];
for(var i = 0; i < all.length; i += 1)
{
if(!check_cond(num, all[i], i + 2))
{
return false;
}
}
return true;
}
var solve = function ()
{
var sum = 0;
var start = 1234567890;
var end = 9876543210;
for(var i = start, count = 0; i <= end; i += 1, count += 1)
{
if(count % 1000000 == 0)
{
console.log("\$i : " + i);
}
if(!library.is_pandigital(i, 0))
{
continue;
}
if(!check_all(i))
{
continue;
}
console.log("OK : " + i);
<|fim▁hole|>
var check_all_2 = function(num)
{
var y = num.toString();
var n = [0];
for(var i = 0; i < y.length; i += 1)
{
n.push(parseInt(y[i]));
}
if(n[4] % 2 != 0)
{
return false;
}
var a = n[3] + n[4] + n[5];
if(a % 3 != 0)
{
return false;
}
if(n[6] % 5 != 0)
{
return false;
}
var b = n[5] * 10 + n[6] - 2 * n[7];
if(b % 7 != 0)
{
return false;
}
var c = n[6] * 10 + n[7] - n[8];
if(c % 11 != 0)
{
return false;
}
var d = n[7] * 10 + n[8] + 4 * n[9];
if(d % 13 != 0)
{
return false;
}
var e = n[8] * 10 + n[9] - 5 * n[10];
if(e % 17 != 0)
{
return false;
}
return true;
}
var solve_2 = function ()
{
var sum = 0;
var start = 1234567890;
var end = 9876543210;
for(var i = start, count = 0; i <= end; i += 1, count += 1)
{
if(count % 1000000 == 0)
{
console.log("\$i : " + i);
}
if(!check_all_2(i))
{
continue;
}
if(!library.is_pandigital_v2(i, 0))
{
continue;
}
console.log("OK : " + i);
sum += i;
}
};
var sum = solve_2();
console.log(sum);
//var num = process.argv[2];
//console.log(check_all_2(num));<|fim▁end|> | sum += i;
}
}; |
<|file_name|>electrolyte.hh<|end_file_name|><|fim▁begin|>#ifndef DUNE_AX1_ELECTROLYTE_HH
#define DUNE_AX1_ELECTROLYTE_HH
#include <valarray>
#include <vector>
#include <dune/ax1/common/constants.hh>
// Ion
template<class T>
class Ion
{
public:
Ion (T valence_, std::string name_, T relCon_=1.0)
: valence(valence_), relCon(relCon_), name(name_)
{}
T getValence () const
{
return valence;
}
T getRelCon () const
{
return relCon;
}
std::string getName() const
{
return name;
}
private:
T valence;
T relCon; // relative concentration for stationary case
T diffConst;
std::string name;
};
// Solvent
template<class T>
class Solvent
{
private:
T permittivity;
public:
Solvent (T permittivity_)
: permittivity(permittivity_)
{}
T getPermittivity () const { return permittivity; }
};
// Electrolyte
template<class T>
class Electrolyte
{
public:
Electrolyte (const T permittivity_, const T temperature_, const T stdCon_, const T lengthScale)
: permittivity(permittivity_), temperature(temperature_), stdCon(stdCon_)
{
debyeLength = std::sqrt( 0.5 * con_eps0 * con_k * temperature / ( con_e * con_e * stdCon ) );
//lengthConstantSqr = con_eps0 * con_k * temperature / ( con_e * con_e * stdCon );
poissonConstant = con_e * con_e * stdCon * lengthScale * lengthScale / ( con_eps0 * con_k * temperature );
}
T getDebyeLength () const
{
return debyeLength;
}
T getPoissonConstant () const
{
return poissonConstant;
}
T getPermittivity () const
{
return permittivity;
}
void setPermittivity(T perm)
{
permittivity = perm;
}
T getTemperature () const
{
return temperature;
}
T getStdCon () const
{
return stdCon;
}
// add ion to electrolyte
void addIon (Ion<T> ion)
{
ions.push_back(ion);
con_diffWater.resize(ions.size());
}
// number of ion species
int numOfSpecies () const
{
return ions.size();
}
// right hand side for the Poisson Boltzmann equation
T rhsPoissonBoltzmann (const T phi) const
{
T sum = 0.0;
for (int i=0; i<ions.size(); ++i)
{
sum = sum + ions[i].getValence() * ions[i].getRelCon() * exp(-ions[i].getValence() * phi);
}
return - 0.5 * sum / ( debyeLength * debyeLength );
}
// concentration of ion species for stationary case
T getConcentration (const int& i, const T& phi) const
{
return stdCon * ions[i].getRelCon() * exp(-ions[i].getValence() * phi);
}
// get diffusion constant
T getDiffConst ( const unsigned int ionSpecies ) const
{
assert(ionSpecies <= con_diffWater.size());
return con_diffWater[ionSpecies];
}
void setDiffConst ( const unsigned int ionSpecies, T diffCoeff )
{
assert(ionSpecies <= con_diffWater.size());
con_diffWater[ionSpecies] = diffCoeff;<|fim▁hole|> // valence of ion species
T getValence ( const unsigned int ionSpecies ) const
{
return ions[ionSpecies].getValence();
}
// name of ion species
std::string getIonName ( const unsigned int ionSpecies ) const
{
return ions[ionSpecies].getName();
}
// charge density
void addToChargeDensity(std::valarray<T>& chargeDensity,
const std::valarray<T>& concentrations,
const unsigned int ionSpecies)
{
chargeDensity += ions[ionSpecies].getValence() * concentrations;
}
private:
T permittivity;
std::vector<Ion<T> > ions; // collection of ion species
std::vector<T> con_diffWater; // corresponding diff coeffs for ions
T temperature;
T stdCon; // scaling concentration
T debyeLength;
T poissonConstant;
};
#endif // DUNE_AX1_ELECTROLYTE_HH<|fim▁end|> | }
|
<|file_name|>End.js<|end_file_name|><|fim▁begin|>// module export
if (typeof define === "function" && define.amd) {
// AMD
define("bridge", [], function () { return Bridge; });
} else if (typeof module !== "undefined" && module.exports) {
// Node
module.exports = Bridge;<|fim▁hole|><|fim▁end|> | } |
<|file_name|>template-expected.cc<|end_file_name|><|fim▁begin|>// Copyright 2016 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
namespace not_blink {
void function(int x) {}
class Class {
public:
void method() {}
template <typename T>
void methodTemplate(T) {}
template <typename T>
static void staticMethodTemplate(T) {}
};
template <typename T>
void functionTemplate(T x) {}
} // not_blink
namespace blink {
template <typename T, int number>
void F() {
// We don't assert on this, and we don't end up considering it a const for
// now.
const int maybe_a_const = sizeof(T);
const int is_a_const = number;
}
template <int number, typename... T>
void F() {
// We don't assert on this, and we don't end up considering it a const for
// now.
const int maybe_a_const = sizeof...(T);
const int is_a_const = number;
}
namespace test_template_arg_is_function {
void F(int x) {}
template <typename T, void g(T)>
void H(T x) {
g(x);
}
void Test() {
// f should be rewritten.
H<int, F>(0);
// Non-Blink should stay the same.
H<int, not_blink::function>(1);
}
} // namespace test_template_arg_is_function
namespace test_template_arg_is_method {
class Class {
public:<|fim▁hole|>template <typename T, void (T::*g)()>
void H(T&& x) {
(x.*g)();
}
void Test() {
// method should be rewritten.
H<Class, &Class::Method>(Class());
// Non-Blink should stay the same.
H<not_blink::Class, ¬_blink::Class::method>(not_blink::Class());
}
} // namespace test_template_arg_is_method
namespace test_template_arg_is_function_template {
namespace nested {
template <typename T>
void F(T) {}
}
template <typename T, void g(T)>
void H(T x) {
g(x);
}
void Test() {
// f should be rewritten.
H<int, nested::F>(0);
// Non-Blink should stay the same.
H<int, not_blink::functionTemplate>(1);
}
} // namespace test_template_arg_is_function_template
namespace test_template_arg_is_method_template_in_non_member_context {
struct Class {
template <typename T>
static void F(T) {}
};
template <typename T, void g(T)>
void H(T x) {
g(x);
}
void Test() {
// f should be rewritten.
H<int, Class::F>(0);
// Non-Blink should stay the same.
H<int, not_blink::Class::staticMethodTemplate>(1);
}
} // test_template_arg_is_method_template_in_non_member_context
namespace test_template_arg_is_method_template_in_member_context {
struct Class {
template <typename T>
static void F(T) {}
};
struct Class2 {
template <typename T>
void F(T x) {
// f should be rewritten.
Class c;
c.F(x);
// Non-Blink should stay the same.
not_blink::Class c2;
c2.method(x);
}
};
} // namespace test_template_arg_is_method_template_in_member_context
} // namespace blink<|fim▁end|> | void Method() {}
};
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>from . import tnt_config
from . import delivery
from . import stock<|fim▁hole|><|fim▁end|> | from . import carrier_file |
<|file_name|>contributor_orcid.py<|end_file_name|><|fim▁begin|># coding: utf-8
"""
ORCID Member
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: Latest
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class ContributorOrcid(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self, uri=None, path=None, host=None):
"""
ContributorOrcid - a model defined in Swagger
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.
"""
self.swagger_types = {
'uri': 'str',
'path': 'str',
'host': 'str'
}
self.attribute_map = {
'uri': 'uri',
'path': 'path',
'host': 'host'
}
self._uri = uri
self._path = path
self._host = host
@property
def uri(self):
"""
Gets the uri of this ContributorOrcid.
:return: The uri of this ContributorOrcid.
:rtype: str
"""
return self._uri
@uri.setter
def uri(self, uri):
"""
Sets the uri of this ContributorOrcid.
:param uri: The uri of this ContributorOrcid.
:type: str
"""
self._uri = uri
@property
def path(self):
"""
Gets the path of this ContributorOrcid.
:return: The path of this ContributorOrcid.
:rtype: str<|fim▁hole|> return self._path
@path.setter
def path(self, path):
"""
Sets the path of this ContributorOrcid.
:param path: The path of this ContributorOrcid.
:type: str
"""
self._path = path
@property
def host(self):
"""
Gets the host of this ContributorOrcid.
:return: The host of this ContributorOrcid.
:rtype: str
"""
return self._host
@host.setter
def host(self, host):
"""
Sets the host of this ContributorOrcid.
:param host: The host of this ContributorOrcid.
:type: str
"""
self._host = host
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, ContributorOrcid):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other<|fim▁end|> | """ |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! A cache that holds a limited number of key-value pairs. When the
//! capacity of the cache is exceeded, the least-recently-used
//! (where "used" means a look-up or putting the pair into the cache)
//! pair is automatically removed.
//!
//! # Examples
//!
//! ```
//! use lru_cache::LruCache;
//!
//! let mut cache = LruCache::new(2);
//!
//! cache.insert(1, 10);
//! cache.insert(2, 20);
//! cache.insert(3, 30);
//! assert!(cache.get_mut(&1).is_none());
//! assert_eq!(*cache.get_mut(&2).unwrap(), 20);
//! assert_eq!(*cache.get_mut(&3).unwrap(), 30);
//!
//! cache.insert(2, 22);
//! assert_eq!(*cache.get_mut(&2).unwrap(), 22);
//!
//! cache.insert(6, 60);
//! assert!(cache.get_mut(&3).is_none());
//!
//! cache.set_capacity(1);
//! assert!(cache.get_mut(&2).is_none());
//! ```
#![feature(hashmap_hasher)]
extern crate linked_hash_map;
use std::collections::hash_map::RandomState;
use std::collections::hash_state::HashState;
use std::fmt;
use std::hash::Hash;
use std::borrow::Borrow;
use linked_hash_map::LinkedHashMap;
// FIXME(conventions): implement indexing?
/// An LRU cache.
pub struct LruCache<K, V, S = RandomState> where K: Eq + Hash, S: HashState {
map: LinkedHashMap<K, V, S>,
max_size: usize,
}
impl<K: Hash + Eq, V> LruCache<K, V> {
/// Creates an empty cache that can hold at most `capacity` items.
///
/// # Examples
///
/// ```
/// use lru_cache::LruCache;
/// let mut cache: LruCache<i32, &str> = LruCache::new(10);
/// ```
pub fn new(capacity: usize) -> LruCache<K, V> {
LruCache {
map: LinkedHashMap::new(),
max_size: capacity,
}
}
}
impl<K, V, S> LruCache<K, V, S> where K: Eq + Hash, S: HashState {
/// Creates an empty cache that can hold at most `capacity` items with the given hash state.
pub fn with_hash_state(capacity: usize, hash_state: S) -> LruCache<K, V, S> {
LruCache { map: LinkedHashMap::with_hash_state(hash_state), max_size: capacity }
}
/// Checks if the map contains the given key.
///
/// # Examples
///
/// ```
/// use lru_cache::LruCache;
///
/// let mut cache = LruCache::new(1);
///
/// cache.insert(1, "a");
/// assert_eq!(cache.contains_key(&1), true);
/// ```
pub fn contains_key<Q: ?Sized>(&mut self, key: &Q) -> bool
where K: Borrow<Q>,
Q: Hash + Eq
{
self.get_mut(key).is_some()
}
/// Inserts a key-value pair into the cache. If the key already existed, the old value is
/// returned.
///
/// # Examples
///
/// ```
/// use lru_cache::LruCache;
///
/// let mut cache = LruCache::new(2);
///
/// cache.insert(1, "a");
/// cache.insert(2, "b");
/// assert_eq!(cache.get_mut(&1), Some(&mut "a"));
/// assert_eq!(cache.get_mut(&2), Some(&mut "b"));
/// ```
pub fn insert(&mut self, k: K, v: V) -> Option<V> {
let old_val = self.map.insert(k, v);
if self.len() > self.capacity() {
self.remove_lru();
}
old_val
}
/// Returns a mutable reference to the value corresponding to the given key in the cache, if
/// any.
///
/// # Examples
///
/// ```
/// use lru_cache::LruCache;
///
/// let mut cache = LruCache::new(2);
///
/// cache.insert(1, "a");
/// cache.insert(2, "b");
/// cache.insert(2, "c");
/// cache.insert(3, "d");
///
/// assert_eq!(cache.get_mut(&1), None);
/// assert_eq!(cache.get_mut(&2), Some(&mut "c"));
/// ```
pub fn get_mut<Q: ?Sized>(&mut self, k: &Q) -> Option<&mut V>
where K: Borrow<Q>,
Q: Hash + Eq
{
self.map.get_refresh(k)
}
/// Removes the given key from the cache and returns its corresponding value.
///
/// # Examples
///
/// ```
/// use lru_cache::LruCache;
///
/// let mut cache = LruCache::new(2);
///
/// cache.insert(2, "a");
///
/// assert_eq!(cache.remove(&1), None);
/// assert_eq!(cache.remove(&2), Some("a"));
/// assert_eq!(cache.remove(&2), None);
/// assert_eq!(cache.len(), 0);
/// ```
pub fn remove<Q: ?Sized>(&mut self, k: &Q) -> Option<V>
where K: Borrow<Q>,
Q: Hash + Eq
{
self.map.remove(k)
}
/// Returns the maximum number of key-value pairs the cache can hold.
///
/// # Examples
///
/// ```
/// use lru_cache::LruCache;
/// let mut cache: LruCache<i32, &str> = LruCache::new(2);
/// assert_eq!(cache.capacity(), 2);
/// ```
pub fn capacity(&self) -> usize {
self.max_size
}
/// Sets the number of key-value pairs the cache can hold. Removes
/// least-recently-used key-value pairs if necessary.
///
/// # Examples
///
/// ```
/// use lru_cache::LruCache;
///
/// let mut cache = LruCache::new(2);
///
/// cache.insert(1, "a");
/// cache.insert(2, "b");
/// cache.insert(3, "c");
///
/// assert_eq!(cache.get_mut(&1), None);
/// assert_eq!(cache.get_mut(&2), Some(&mut "b"));
/// assert_eq!(cache.get_mut(&3), Some(&mut "c"));
///
/// cache.set_capacity(3);
/// cache.insert(1, "a");
/// cache.insert(2, "b");
///
/// assert_eq!(cache.get_mut(&1), Some(&mut "a"));
/// assert_eq!(cache.get_mut(&2), Some(&mut "b"));
/// assert_eq!(cache.get_mut(&3), Some(&mut "c"));
///
/// cache.set_capacity(1);
///
/// assert_eq!(cache.get_mut(&1), None);
/// assert_eq!(cache.get_mut(&2), None);
/// assert_eq!(cache.get_mut(&3), Some(&mut "c"));
/// ```
pub fn set_capacity(&mut self, capacity: usize) {
for _ in capacity..self.len() {
self.remove_lru();
}
self.max_size = capacity;
}
#[inline]
fn remove_lru(&mut self) -> Option<(K, V)> {
self.map.pop_front()
}
/// Returns the number of key-value pairs in the cache.
pub fn len(&self) -> usize { self.map.len() }
/// Returns `true` if the cache contains no key-value pairs.
pub fn is_empty(&self) -> bool { self.map.is_empty() }
/// Removes all key-value pairs from the cache.
pub fn clear(&mut self) { self.map.clear(); }
/// Returns an iterator over the cache's key-value pairs in least- to most-recently-used order.
///
/// Accessing the cache through the iterator does _not_ affect the cache's LRU state.
///
/// # Examples
///
/// ```
/// use lru_cache::LruCache;
///
/// let mut cache = LruCache::new(2);
///
/// cache.insert(1, 10);
/// cache.insert(2, 20);
/// cache.insert(3, 30);
///
/// let kvs: Vec<_> = cache.iter().collect();
/// assert_eq!(kvs, [(&2, &20), (&3, &30)]);
/// ```
pub fn iter(&self) -> Iter<K, V> { Iter(self.map.iter()) }
/// Returns an iterator over the cache's key-value pairs in least- to most-recently-used order,
/// with mutable references to the values.
///
/// Accessing the cache through the iterator does _not_ affect the cache's LRU state.
///
/// # Examples
///
/// ```
/// use lru_cache::LruCache;
///
/// let mut cache = LruCache::new(2);
///
/// cache.insert(1, 10);
/// cache.insert(2, 20);
/// cache.insert(3, 30);
///
/// let mut n = 2;
///
/// for (k, v) in cache.iter_mut() {
/// assert_eq!(*k, n);
/// assert_eq!(*v, n * 10);
/// *v *= 10;
/// n += 1;
/// }
///
/// assert_eq!(n, 4);
/// assert_eq!(cache.get_mut(&2), Some(&mut 200));
/// assert_eq!(cache.get_mut(&3), Some(&mut 300));
/// ```
pub fn iter_mut(&mut self) -> IterMut<K, V> { IterMut(self.map.iter_mut()) }
}
impl<K: Hash + Eq, V, S: HashState> Extend<(K, V)> for LruCache<K, V, S> {
fn extend<T: IntoIterator<Item=(K, V)>>(&mut self, iter: T) {
for (k, v) in iter {
self.insert(k, v);
}
}
}
impl<A: fmt::Debug + Hash + Eq, B: fmt::Debug, S: HashState> fmt::Debug for LruCache<A, B, S> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_map().entries(self.iter().rev()).finish()
}
}
impl<'a, K, V, S> IntoIterator for &'a LruCache<K, V, S> where K: Eq + Hash, S: HashState {
type Item = (&'a K, &'a V);
type IntoIter = Iter<'a, K, V>;
fn into_iter(self) -> Iter<'a, K, V> { self.iter() }
}
impl<'a, K, V, S> IntoIterator for &'a mut LruCache<K, V, S> where K: Eq + Hash, S: HashState {
type Item = (&'a K, &'a mut V);
type IntoIter = IterMut<'a, K, V>;
fn into_iter(self) -> IterMut<'a, K, V> { self.iter_mut() }
}
impl<K, V> Clone for LruCache<K, V> where K: Clone + Eq + Hash, V: Clone {
fn clone(&self) -> LruCache<K, V> { LruCache { map: self.map.clone(), ..*self } }
}
/// An iterator over a cache's key-value pairs in least- to most-recently-used order.
///
/// Accessing a cache through the iterator does _not_ affect the cache's LRU state.
pub struct Iter<'a, K: 'a, V: 'a>(linked_hash_map::Iter<'a, K, V>);
impl<'a, K, V> Clone for Iter<'a, K, V> {
fn clone(&self) -> Iter<'a, K, V> { Iter(self.0.clone()) }
}
impl<'a, K, V> Iterator for Iter<'a, K, V> {
type Item = (&'a K, &'a V);
fn next(&mut self) -> Option<(&'a K, &'a V)> { self.0.next() }
fn size_hint(&self) -> (usize, Option<usize>) { self.0.size_hint() }
}
impl<'a, K, V> DoubleEndedIterator for Iter<'a, K, V> {
fn next_back(&mut self) -> Option<(&'a K, &'a V)> { self.0.next_back() }
}
impl<'a, K, V> ExactSizeIterator for Iter<'a, K, V> {
fn len(&self) -> usize { self.0.len() }
}
/// An iterator over a cache's key-value pairs in least- to most-recently-used order with mutable
/// references to the values.
///
/// Accessing a cache through the iterator does _not_ affect the cache's LRU state.
pub struct IterMut<'a, K: 'a, V: 'a>(linked_hash_map::IterMut<'a, K, V>);
impl<'a, K, V> Iterator for IterMut<'a, K, V> {
type Item = (&'a K, &'a mut V);
fn next(&mut self) -> Option<(&'a K, &'a mut V)> { self.0.next() }
fn size_hint(&self) -> (usize, Option<usize>) { self.0.size_hint() }
}
impl<'a, K, V> DoubleEndedIterator for IterMut<'a, K, V> {
fn next_back(&mut self) -> Option<(&'a K, &'a mut V)> { self.0.next_back() }
}
impl<'a, K, V> ExactSizeIterator for IterMut<'a, K, V> {
fn len(&self) -> usize { self.0.len() }
}
#[cfg(test)]
mod tests {
use super::LruCache;
#[test]
fn test_put_and_get() {
let mut cache = LruCache::new(2);
cache.insert(1, 10);
cache.insert(2, 20);
assert_eq!(cache.get_mut(&1), Some(&mut 10));
assert_eq!(cache.get_mut(&2), Some(&mut 20));
assert_eq!(cache.len(), 2);
}
#[test]
fn test_put_update() {
let mut cache = LruCache::new(1);
cache.insert("1", 10);
cache.insert("1", 19);
assert_eq!(cache.get_mut("1"), Some(&mut 19));
assert_eq!(cache.len(), 1);
}
#[test]
fn test_contains_key() {
let mut cache = LruCache::new(1);
cache.insert("1", 10);
assert_eq!(cache.contains_key("1"), true);
}
#[test]
fn test_expire_lru() {
let mut cache = LruCache::new(2);
cache.insert("foo1", "bar1");
cache.insert("foo2", "bar2");
cache.insert("foo3", "bar3");
assert!(cache.get_mut("foo1").is_none());
cache.insert("foo2", "bar2update");
cache.insert("foo4", "bar4");
assert!(cache.get_mut("foo3").is_none());
}
#[test]
fn test_pop() {
let mut cache = LruCache::new(2);
cache.insert(1, 10);
cache.insert(2, 20);
assert_eq!(cache.len(), 2);
let opt1 = cache.remove(&1);
assert!(opt1.is_some());
assert_eq!(opt1.unwrap(), 10);
assert!(cache.get_mut(&1).is_none());
assert_eq!(cache.len(), 1);
}
#[test]
fn test_change_capacity() {
let mut cache = LruCache::new(2);
assert_eq!(cache.capacity(), 2);
cache.insert(1, 10);
cache.insert(2, 20);
cache.set_capacity(1);
assert!(cache.get_mut(&1).is_none());
assert_eq!(cache.capacity(), 1);
}
#[test]
fn test_debug() {
let mut cache = LruCache::new(3);
cache.insert(1, 10);
cache.insert(2, 20);
cache.insert(3, 30);
assert_eq!(format!("{:?}", cache), "{3: 30, 2: 20, 1: 10}");
cache.insert(2, 22);
assert_eq!(format!("{:?}", cache), "{2: 22, 3: 30, 1: 10}");
cache.insert(6, 60);
assert_eq!(format!("{:?}", cache), "{6: 60, 2: 22, 3: 30}");
cache.get_mut(&3);
assert_eq!(format!("{:?}", cache), "{3: 30, 6: 60, 2: 22}");
cache.set_capacity(2);
assert_eq!(format!("{:?}", cache), "{3: 30, 6: 60}");
}
#[test]
fn test_remove() {
let mut cache = LruCache::new(3);
cache.insert(1, 10);
cache.insert(2, 20);
cache.insert(3, 30);
cache.insert(4, 40);
cache.insert(5, 50);
cache.remove(&3);
cache.remove(&4);
assert!(cache.get_mut(&3).is_none());
assert!(cache.get_mut(&4).is_none());
cache.insert(6, 60);
cache.insert(7, 70);
cache.insert(8, 80);
assert!(cache.get_mut(&5).is_none());
assert_eq!(cache.get_mut(&6), Some(&mut 60));
assert_eq!(cache.get_mut(&7), Some(&mut 70));
assert_eq!(cache.get_mut(&8), Some(&mut 80));
}<|fim▁hole|> cache.insert(1, 10);
cache.insert(2, 20);
cache.clear();
assert!(cache.get_mut(&1).is_none());
assert!(cache.get_mut(&2).is_none());
assert_eq!(format!("{:?}", cache), "{}");
}
#[test]
fn test_iter() {
let mut cache = LruCache::new(3);
cache.insert(1, 10);
cache.insert(2, 20);
cache.insert(3, 30);
cache.insert(4, 40);
cache.insert(5, 50);
assert_eq!(cache.iter().collect::<Vec<_>>(),
[(&3, &30), (&4, &40), (&5, &50)]);
assert_eq!(cache.iter_mut().collect::<Vec<_>>(),
[(&3, &mut 30), (&4, &mut 40), (&5, &mut 50)]);
assert_eq!(cache.iter().rev().collect::<Vec<_>>(),
[(&5, &50), (&4, &40), (&3, &30)]);
assert_eq!(cache.iter_mut().rev().collect::<Vec<_>>(),
[(&5, &mut 50), (&4, &mut 40), (&3, &mut 30)]);
}
}<|fim▁end|> |
#[test]
fn test_clear() {
let mut cache = LruCache::new(2); |
<|file_name|>timer-spec.ts<|end_file_name|><|fim▁begin|>import * as Rx from '../../dist/cjs/Rx';
import marbleTestingSignature = require('../helpers/marble-testing'); // tslint:disable-line:no-require-imports
declare const { asDiagram, time };
declare const expectObservable: typeof marbleTestingSignature.expectObservable;
declare const rxTestScheduler: Rx.TestScheduler;
const Observable = Rx.Observable;
/** @test {timer} */
describe('Observable.timer', () => {
asDiagram('timer(3000, 1000)')('should create an observable emitting periodically', () => {
const e1 = Observable.timer(60, 20, rxTestScheduler)
.take(4) // make it actually finite, so it can be rendered
.concat(Observable.never()); // but pretend it's infinite by not completing
const expected = '------a-b-c-d-';
const values = {
a: 0,
b: 1,
c: 2,
d: 3,
};
expectObservable(e1).toBe(expected, values);
});
it('should schedule a value of 0 then complete', () => {
const dueTime = time('-----|');
const expected = '-----(x|)';
<|fim▁hole|>
it('should emit a single value immediately', () => {
const dueTime = time('|');
const expected = '(x|)';
const source = Observable.timer(dueTime, rxTestScheduler);
expectObservable(source).toBe(expected, {x: 0});
});
it('should start after delay and periodically emit values', () => {
const dueTime = time('----|');
const period = time( '--|');
const expected = '----a-b-c-d-(e|)';
const source = Observable.timer(dueTime, period, rxTestScheduler).take(5);
const values = { a: 0, b: 1, c: 2, d: 3, e: 4};
expectObservable(source).toBe(expected, values);
});
it('should start immediately and periodically emit values', () => {
const dueTime = time('|');
const period = time('---|');
const expected = 'a--b--c--d--(e|)';
const source = Observable.timer(dueTime, period, rxTestScheduler).take(5);
const values = { a: 0, b: 1, c: 2, d: 3, e: 4};
expectObservable(source).toBe(expected, values);
});
it('should stop emiting values when subscription is done', () => {
const dueTime = time('|');
const period = time('---|');
const expected = 'a--b--c--d--e';
const unsub = '^ !';
const source = Observable.timer(dueTime, period, rxTestScheduler);
const values = { a: 0, b: 1, c: 2, d: 3, e: 4};
expectObservable(source, unsub).toBe(expected, values);
});
it('should schedule a value at a specified Date', () => {
const offset = time('----|');
const expected = '----(a|)';
const dueTime = new Date(rxTestScheduler.now() + offset);
const source = Observable.timer(dueTime, null, rxTestScheduler);
expectObservable(source).toBe(expected, {a: 0});
});
it('should start after delay and periodically emit values', () => {
const offset = time('----|');
const period = time( '--|');
const expected = '----a-b-c-d-(e|)';
const dueTime = new Date(rxTestScheduler.now() + offset);
const source = Observable.timer(dueTime, period, rxTestScheduler).take(5);
const values = { a: 0, b: 1, c: 2, d: 3, e: 4};
expectObservable(source).toBe(expected, values);
});
});<|fim▁end|> | const source = Observable.timer(dueTime, undefined, rxTestScheduler);
expectObservable(source).toBe(expected, {x: 0});
}); |
<|file_name|>script_info_dummy.cpp<|end_file_name|><|fim▁begin|>/* $Id$ */
/*
* This file is part of OpenTTD.
* OpenTTD is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, version 2.
* OpenTTD is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with OpenTTD. If not, see <http://www.gnu.org/licenses/>.
*/
/** @file script_info_dummy.cpp Implementation of a dummy Script. */
#include "../stdafx.h"
#include <squirrel.h>
#include "../string_func.h"
#include "../strings_func.h"
#include "../safeguards.h"
/* The reason this exists in C++, is that a user can trash his ai/ or game/ dir,
* leaving no Scripts available. The complexity to solve this is insane, and
* therefore the alternative is used, and make sure there is always a Script<|fim▁hole|> * available, no matter what the situation is. By defining it in C++, there
* is simply no way a user can delete it, and therefore safe to use. It has
* to be noted that this Script is complete invisible for the user, and impossible
* to select manual. It is a fail-over in case no Scripts are available.
*/
/** Run the dummy info.nut. */
void Script_CreateDummyInfo(HSQUIRRELVM vm, const char *type, const char *dir)
{
char dummy_script[4096];
char *dp = dummy_script;
dp += seprintf(dp, lastof(dummy_script), "class Dummy%s extends %sInfo {\n", type, type);
dp += seprintf(dp, lastof(dummy_script), "function GetAuthor() { return \"OpenTTD Developers Team\"; }\n");
dp += seprintf(dp, lastof(dummy_script), "function GetName() { return \"Dummy%s\"; }\n", type);
dp += seprintf(dp, lastof(dummy_script), "function GetShortName() { return \"DUMM\"; }\n");
dp += seprintf(dp, lastof(dummy_script), "function GetDescription() { return \"A Dummy %s that is loaded when your %s/ dir is empty\"; }\n", type, dir);
dp += seprintf(dp, lastof(dummy_script), "function GetVersion() { return 1; }\n");
dp += seprintf(dp, lastof(dummy_script), "function GetDate() { return \"2008-07-26\"; }\n");
dp += seprintf(dp, lastof(dummy_script), "function CreateInstance() { return \"Dummy%s\"; }\n", type);
dp += seprintf(dp, lastof(dummy_script), "} RegisterDummy%s(Dummy%s());\n", type, type);
const SQChar *sq_dummy_script = dummy_script;
sq_pushroottable(vm);
/* Load and run the script */
if (SQ_SUCCEEDED(sq_compilebuffer(vm, sq_dummy_script, strlen(sq_dummy_script), "dummy", SQTrue))) {
sq_push(vm, -2);
if (SQ_SUCCEEDED(sq_call(vm, 1, SQFalse, SQTrue))) {
sq_pop(vm, 1);
return;
}
}
NOT_REACHED();
}
/** Run the dummy AI and let it generate an error message. */
void Script_CreateDummy(HSQUIRRELVM vm, StringID string, const char *type)
{
/* We want to translate the error message.
* We do this in three steps:
* 1) We get the error message
*/
char error_message[1024];
GetString(error_message, string, lastof(error_message));
/* Make escapes for all quotes and slashes. */
char safe_error_message[1024];
char *q = safe_error_message;
for (const char *p = error_message; *p != '\0' && q < lastof(safe_error_message) - 2; p++, q++) {
if (*p == '"' || *p == '\\') *q++ = '\\';
*q = *p;
}
*q = '\0';
/* 2) We construct the AI's code. This is done by merging a header, body and footer */
char dummy_script[4096];
char *dp = dummy_script;
dp += seprintf(dp, lastof(dummy_script), "class Dummy%s extends %sController {\n function Start()\n {\n", type, type);
/* As special trick we need to split the error message on newlines and
* emit each newline as a separate error printing string. */
char *newline;
char *p = safe_error_message;
do {
newline = strchr(p, '\n');
if (newline != NULL) *newline = '\0';
dp += seprintf(dp, lastof(dummy_script), " %sLog.Error(\"%s\");\n", type, p);
p = newline + 1;
} while (newline != NULL);
dp = strecpy(dp, " }\n}\n", lastof(dummy_script));
/* 3) We translate the error message in the character format that Squirrel wants.
* We can use the fact that the wchar string printing also uses %s to print
* old style char strings, which is what was generated during the script generation. */
const SQChar *sq_dummy_script = dummy_script;
/* And finally we load and run the script */
sq_pushroottable(vm);
if (SQ_SUCCEEDED(sq_compilebuffer(vm, sq_dummy_script, strlen(sq_dummy_script), "dummy", SQTrue))) {
sq_push(vm, -2);
if (SQ_SUCCEEDED(sq_call(vm, 1, SQFalse, SQTrue))) {
sq_pop(vm, 1);
return;
}
}
NOT_REACHED();
}<|fim▁end|> | |
<|file_name|>test_nrfjprog.py<|end_file_name|><|fim▁begin|># Copyright (c) 2018 Foundries.io
#
# SPDX-License-Identifier: Apache-2.0
import argparse
from unittest.mock import patch, call
import pytest
from runners.nrfjprog import NrfJprogBinaryRunner
from conftest import RC_KERNEL_HEX
#
# Test values
#
TEST_DEF_SNR = 'test-default-serial-number' # for mocking user input
TEST_OVR_SNR = 'test-override-serial-number'
#
# Expected results.
#
# This dictionary maps different configurations to the commands we expect to be
# executed for them. Verification is done by mocking the check_call() method,
# which is used to run the commands.
#
# The key naming scheme is <F><SR><SN><E>, where:
#
# - F: family, 1 for 'NRF51' or 2 for 'NRF52'
# - SR: soft reset, Y for yes, N for pin reset
# - SNR: serial number override, Y for yes, N for 'use default'
# - E: full chip erase, Y for yes, N for sector / sector and UICR only
#
EXPECTED_COMMANDS = {
# NRF51:
'1NNN':
(['nrfjprog', '--program', RC_KERNEL_HEX, '-f', 'NRF51', '--snr', TEST_DEF_SNR, '--sectorerase'], # noqa: E501
['nrfjprog', '--pinreset', '-f', 'NRF51', '--snr', TEST_DEF_SNR]),
'1NNY':
(['nrfjprog', '--eraseall', '-f', 'NRF51', '--snr', TEST_DEF_SNR],
['nrfjprog', '--program', RC_KERNEL_HEX, '-f', 'NRF51', '--snr', TEST_DEF_SNR], # noqa: E501
['nrfjprog', '--pinreset', '-f', 'NRF51', '--snr', TEST_DEF_SNR]),
'1NYN':
(['nrfjprog', '--program', RC_KERNEL_HEX, '-f', 'NRF51', '--snr', TEST_OVR_SNR, '--sectorerase'], # noqa: E501
['nrfjprog', '--pinreset', '-f', 'NRF51', '--snr', TEST_OVR_SNR]),
'1NYY':
(['nrfjprog', '--eraseall', '-f', 'NRF51', '--snr', TEST_OVR_SNR],
['nrfjprog', '--program', RC_KERNEL_HEX, '-f', 'NRF51', '--snr', TEST_OVR_SNR], # noqa: E501
['nrfjprog', '--pinreset', '-f', 'NRF51', '--snr', TEST_OVR_SNR]),
'1YNN':
(['nrfjprog', '--program', RC_KERNEL_HEX, '-f', 'NRF51', '--snr', TEST_DEF_SNR, '--sectorerase'], # noqa: E501
['nrfjprog', '--reset', '-f', 'NRF51', '--snr', TEST_DEF_SNR]),
'1YNY':
(['nrfjprog', '--eraseall', '-f', 'NRF51', '--snr', TEST_DEF_SNR],
['nrfjprog', '--program', RC_KERNEL_HEX, '-f', 'NRF51', '--snr', TEST_DEF_SNR], # noqa: E501
['nrfjprog', '--reset', '-f', 'NRF51', '--snr', TEST_DEF_SNR]),
'1YYN':
(['nrfjprog', '--program', RC_KERNEL_HEX, '-f', 'NRF51', '--snr', TEST_OVR_SNR, '--sectorerase'], # noqa: E501
['nrfjprog', '--reset', '-f', 'NRF51', '--snr', TEST_OVR_SNR]),
'1YYY':
(['nrfjprog', '--eraseall', '-f', 'NRF51', '--snr', TEST_OVR_SNR],
['nrfjprog', '--program', RC_KERNEL_HEX, '-f', 'NRF51', '--snr', TEST_OVR_SNR], # noqa: E501
['nrfjprog', '--reset', '-f', 'NRF51', '--snr', TEST_OVR_SNR]),
# NRF52:
'2NNN':
(['nrfjprog', '--program', RC_KERNEL_HEX, '-f', 'NRF52', '--snr', TEST_DEF_SNR, '--sectoranduicrerase'], # noqa: E501
['nrfjprog', '--pinresetenable', '-f', 'NRF52', '--snr', TEST_DEF_SNR],
['nrfjprog', '--pinreset', '-f', 'NRF52', '--snr', TEST_DEF_SNR]),
'2NNY':
(['nrfjprog', '--eraseall', '-f', 'NRF52', '--snr', TEST_DEF_SNR],
['nrfjprog', '--program', RC_KERNEL_HEX, '-f', 'NRF52', '--snr', TEST_DEF_SNR], # noqa: E501<|fim▁hole|> ['nrfjprog', '--pinresetenable', '-f', 'NRF52', '--snr', TEST_DEF_SNR],
['nrfjprog', '--pinreset', '-f', 'NRF52', '--snr', TEST_DEF_SNR]),
'2NYN':
(['nrfjprog', '--program', RC_KERNEL_HEX, '-f', 'NRF52', '--snr', TEST_OVR_SNR, '--sectoranduicrerase'], # noqa: E501
['nrfjprog', '--pinresetenable', '-f', 'NRF52', '--snr', TEST_OVR_SNR],
['nrfjprog', '--pinreset', '-f', 'NRF52', '--snr', TEST_OVR_SNR]),
'2NYY':
(['nrfjprog', '--eraseall', '-f', 'NRF52', '--snr', TEST_OVR_SNR],
['nrfjprog', '--program', RC_KERNEL_HEX, '-f', 'NRF52', '--snr', TEST_OVR_SNR], # noqa: E501
['nrfjprog', '--pinresetenable', '-f', 'NRF52', '--snr', TEST_OVR_SNR],
['nrfjprog', '--pinreset', '-f', 'NRF52', '--snr', TEST_OVR_SNR]),
'2YNN':
(['nrfjprog', '--program', RC_KERNEL_HEX, '-f', 'NRF52', '--snr', TEST_DEF_SNR, '--sectoranduicrerase'], # noqa: E501
['nrfjprog', '--reset', '-f', 'NRF52', '--snr', TEST_DEF_SNR]),
'2YNY':
(['nrfjprog', '--eraseall', '-f', 'NRF52', '--snr', TEST_DEF_SNR],
['nrfjprog', '--program', RC_KERNEL_HEX, '-f', 'NRF52', '--snr', TEST_DEF_SNR], # noqa: E501
['nrfjprog', '--reset', '-f', 'NRF52', '--snr', TEST_DEF_SNR]),
'2YYN':
(['nrfjprog', '--program', RC_KERNEL_HEX, '-f', 'NRF52', '--snr', TEST_OVR_SNR, '--sectoranduicrerase'], # noqa: E501
['nrfjprog', '--reset', '-f', 'NRF52', '--snr', TEST_OVR_SNR]),
'2YYY':
(['nrfjprog', '--eraseall', '-f', 'NRF52', '--snr', TEST_OVR_SNR],
['nrfjprog', '--program', RC_KERNEL_HEX, '-f', 'NRF52', '--snr', TEST_OVR_SNR], # noqa: E501
['nrfjprog', '--reset', '-f', 'NRF52', '--snr', TEST_OVR_SNR]),
# NRF91:
'9NNN':
(['nrfjprog', '--program', RC_KERNEL_HEX, '-f', 'NRF91', '--snr', TEST_DEF_SNR, '--sectorerase'], # noqa: E501
['nrfjprog', '--pinreset', '-f', 'NRF91', '--snr', TEST_DEF_SNR]),
'9NNY':
(['nrfjprog', '--eraseall', '-f', 'NRF91', '--snr', TEST_DEF_SNR],
['nrfjprog', '--program', RC_KERNEL_HEX, '-f', 'NRF91', '--snr', TEST_DEF_SNR], # noqa: E501
['nrfjprog', '--pinreset', '-f', 'NRF91', '--snr', TEST_DEF_SNR]),
'9NYN':
(['nrfjprog', '--program', RC_KERNEL_HEX, '-f', 'NRF91', '--snr', TEST_OVR_SNR, '--sectorerase'], # noqa: E501
['nrfjprog', '--pinreset', '-f', 'NRF91', '--snr', TEST_OVR_SNR]),
'9NYY':
(['nrfjprog', '--eraseall', '-f', 'NRF91', '--snr', TEST_OVR_SNR],
['nrfjprog', '--program', RC_KERNEL_HEX, '-f', 'NRF91', '--snr', TEST_OVR_SNR], # noqa: E501
['nrfjprog', '--pinreset', '-f', 'NRF91', '--snr', TEST_OVR_SNR]),
'9YNN':
(['nrfjprog', '--program', RC_KERNEL_HEX, '-f', 'NRF91', '--snr', TEST_DEF_SNR, '--sectorerase'], # noqa: E501
['nrfjprog', '--reset', '-f', 'NRF91', '--snr', TEST_DEF_SNR]),
'9YNY':
(['nrfjprog', '--eraseall', '-f', 'NRF91', '--snr', TEST_DEF_SNR],
['nrfjprog', '--program', RC_KERNEL_HEX, '-f', 'NRF91', '--snr', TEST_DEF_SNR], # noqa: E501
['nrfjprog', '--reset', '-f', 'NRF91', '--snr', TEST_DEF_SNR]),
'9YYN':
(['nrfjprog', '--program', RC_KERNEL_HEX, '-f', 'NRF91', '--snr', TEST_OVR_SNR, '--sectorerase'], # noqa: E501
['nrfjprog', '--reset', '-f', 'NRF91', '--snr', TEST_OVR_SNR]),
'9YYY':
(['nrfjprog', '--eraseall', '-f', 'NRF91', '--snr', TEST_OVR_SNR],
['nrfjprog', '--program', RC_KERNEL_HEX, '-f', 'NRF91', '--snr', TEST_OVR_SNR], # noqa: E501
['nrfjprog', '--reset', '-f', 'NRF91', '--snr', TEST_OVR_SNR]),
}
def expected_commands(family, softreset, snr, erase):
'''Expected NrfJprogBinaryRunner results given parameters.
Returns a factory function which expects the following arguments:
- family: string, 'NRF51', 'NRF52' or 'NRF91'
- softreset: boolean, controls whether soft reset is performed
- snr: string serial number of board, or None
- erase: boolean, whether to do a full chip erase or not
'''
expected_key = '{}{}{}{}'.format(
'1' if family == 'NRF51' else '2' if family == 'NRF52' else '9',
'Y' if softreset else 'N',
'Y' if snr else 'N',
'Y' if erase else 'N')
return EXPECTED_COMMANDS[expected_key]
#
# Test cases
#
TEST_CASES = [(f, sr, snr, e)
for f in ('NRF51', 'NRF52', 'NRF91')
for sr in (False, True)
for snr in (TEST_OVR_SNR, None)
for e in (False, True)]
def get_board_snr_patch():
return TEST_DEF_SNR
def require_patch(program):
assert program == 'nrfjprog'
def id_fn(test_case):
ret = ''
for x in test_case:
if x in ('NRF51', 'NRF52'):
ret += x[-1:]
else:
ret += 'Y' if x else 'N'
return ret
@pytest.mark.parametrize('test_case', TEST_CASES, ids=id_fn)
@patch('runners.core.ZephyrBinaryRunner.require', side_effect=require_patch)
@patch('runners.nrfjprog.NrfJprogBinaryRunner.get_board_snr_from_user',
side_effect=get_board_snr_patch)
@patch('runners.nrfjprog.NrfJprogBinaryRunner.check_call')
def test_nrfjprog_init(cc, get_snr, req, test_case, runner_config):
family, softreset, snr, erase = test_case
runner = NrfJprogBinaryRunner(runner_config, family, softreset, snr,
erase=erase)
runner.run('flash')
assert req.called
assert cc.call_args_list == [call(x) for x in
expected_commands(*test_case)]
if snr is None:
get_snr.assert_called_once_with()
else:
get_snr.assert_not_called()
@pytest.mark.parametrize('test_case', TEST_CASES, ids=id_fn)
@patch('runners.core.ZephyrBinaryRunner.require', side_effect=require_patch)
@patch('runners.nrfjprog.NrfJprogBinaryRunner.get_board_snr_from_user',
side_effect=get_board_snr_patch)
@patch('runners.nrfjprog.NrfJprogBinaryRunner.check_call')
def test_nrfjprog_create(cc, get_snr, req, test_case, runner_config):
family, softreset, snr, erase = test_case
args = ['--nrf-family', family]
if softreset:
args.append('--softreset')
if snr is not None:
args.extend(['--snr', snr])
if erase:
args.append('--erase')
parser = argparse.ArgumentParser()
NrfJprogBinaryRunner.add_parser(parser)
arg_namespace = parser.parse_args(args)
runner = NrfJprogBinaryRunner.create(runner_config, arg_namespace)
runner.run('flash')
assert req.called
assert cc.call_args_list == [call(x) for x in
expected_commands(*test_case)]
if snr is None:
get_snr.assert_called_once_with()
else:
get_snr.assert_not_called()<|fim▁end|> | |
<|file_name|>newlower.py<|end_file_name|><|fim▁begin|># -*- encoding: utf-8 -*-
import ast
import inspect
class NameLower(ast.NodeVisitor):
def __init__(self, lowered_names):
self.lowered_names = lowered_names
def visit_FunctionDef(self, node):
code = '__globals = globals()\n'
code += '\n'.join("{0} = __globals['{0}']".format(name) for name in self.lowered_names)
code_ast = ast.parse(code, mode='exec')
node.body[:0] = code_ast.body
self.func = node
def lower_names(*namelist):
def lower(func):
srclines = inspect.getsource(func).splitlines()
for n, line in enumerate(srclines):
if '@lower_names' in line:
break
src = '\n'.join(srclines[n + 1:])
if src.startswith(' ', '\t'):
src = 'if 1:\n' + src
top = ast.parse(src, mode='exec')
cl = NameLower(namelist)
cl.visit(top)<|fim▁hole|>
func.__code__ = temp[func.__name__].__code__
return func
return lower<|fim▁end|> |
temp = {}
exec(compile(top, '', 'exec'), temp, temp) |
<|file_name|>switch.py<|end_file_name|><|fim▁begin|>"""Support for WeMo switches."""
import asyncio
import logging
from datetime import datetime, timedelta
import requests
import async_timeout
from homeassistant.components.switch import SwitchDevice
from homeassistant.exceptions import PlatformNotReady
from homeassistant.util import convert
from homeassistant.const import (
STATE_OFF, STATE_ON, STATE_STANDBY, STATE_UNKNOWN)
from . import SUBSCRIPTION_REGISTRY
SCAN_INTERVAL = timedelta(seconds=10)
_LOGGER = logging.getLogger(__name__)
ATTR_SENSOR_STATE = 'sensor_state'
ATTR_SWITCH_MODE = 'switch_mode'
ATTR_CURRENT_STATE_DETAIL = 'state_detail'
ATTR_COFFEMAKER_MODE = 'coffeemaker_mode'
MAKER_SWITCH_MOMENTARY = 'momentary'
MAKER_SWITCH_TOGGLE = 'toggle'
WEMO_ON = 1
WEMO_OFF = 0
WEMO_STANDBY = 8
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up discovered WeMo switches."""
from pywemo import discovery
if discovery_info is not None:
location = discovery_info['ssdp_description']
mac = discovery_info['mac_address']
try:
device = discovery.device_from_description(location, mac)
except (requests.exceptions.ConnectionError,
requests.exceptions.Timeout) as err:
_LOGGER.error("Unable to access %s (%s)", location, err)
raise PlatformNotReady
if device:
add_entities([WemoSwitch(device)])
class WemoSwitch(SwitchDevice):
"""Representation of a WeMo switch."""
def __init__(self, device):
"""Initialize the WeMo switch."""
self.wemo = device<|fim▁hole|> self._state = None
self._mode_string = None
self._available = True
self._update_lock = None
self._model_name = self.wemo.model_name
self._name = self.wemo.name
self._serialnumber = self.wemo.serialnumber
def _subscription_callback(self, _device, _type, _params):
"""Update the state by the Wemo device."""
_LOGGER.info("Subscription update for %s", self.name)
updated = self.wemo.subscription_update(_type, _params)
self.hass.add_job(
self._async_locked_subscription_callback(not updated))
async def _async_locked_subscription_callback(self, force_update):
"""Handle an update from a subscription."""
# If an update is in progress, we don't do anything
if self._update_lock.locked():
return
await self._async_locked_update(force_update)
self.async_schedule_update_ha_state()
@property
def unique_id(self):
"""Return the ID of this WeMo switch."""
return self._serialnumber
@property
def name(self):
"""Return the name of the switch if any."""
return self._name
@property
def device_state_attributes(self):
"""Return the state attributes of the device."""
attr = {}
if self.maker_params:
# Is the maker sensor on or off.
if self.maker_params['hassensor']:
# Note a state of 1 matches the WeMo app 'not triggered'!
if self.maker_params['sensorstate']:
attr[ATTR_SENSOR_STATE] = STATE_OFF
else:
attr[ATTR_SENSOR_STATE] = STATE_ON
# Is the maker switch configured as toggle(0) or momentary (1).
if self.maker_params['switchmode']:
attr[ATTR_SWITCH_MODE] = MAKER_SWITCH_MOMENTARY
else:
attr[ATTR_SWITCH_MODE] = MAKER_SWITCH_TOGGLE
if self.insight_params or (self.coffeemaker_mode is not None):
attr[ATTR_CURRENT_STATE_DETAIL] = self.detail_state
if self.insight_params:
attr['on_latest_time'] = \
WemoSwitch.as_uptime(self.insight_params['onfor'])
attr['on_today_time'] = \
WemoSwitch.as_uptime(self.insight_params['ontoday'])
attr['on_total_time'] = \
WemoSwitch.as_uptime(self.insight_params['ontotal'])
attr['power_threshold_w'] = \
convert(
self.insight_params['powerthreshold'], float, 0.0
) / 1000.0
if self.coffeemaker_mode is not None:
attr[ATTR_COFFEMAKER_MODE] = self.coffeemaker_mode
return attr
@staticmethod
def as_uptime(_seconds):
"""Format seconds into uptime string in the format: 00d 00h 00m 00s."""
uptime = datetime(1, 1, 1) + timedelta(seconds=_seconds)
return "{:0>2d}d {:0>2d}h {:0>2d}m {:0>2d}s".format(
uptime.day-1, uptime.hour, uptime.minute, uptime.second)
@property
def current_power_w(self):
"""Return the current power usage in W."""
if self.insight_params:
return convert(
self.insight_params['currentpower'], float, 0.0
) / 1000.0
@property
def today_energy_kwh(self):
"""Return the today total energy usage in kWh."""
if self.insight_params:
miliwatts = convert(self.insight_params['todaymw'], float, 0.0)
return round(miliwatts / (1000.0 * 1000.0 * 60), 2)
@property
def detail_state(self):
"""Return the state of the device."""
if self.coffeemaker_mode is not None:
return self._mode_string
if self.insight_params:
standby_state = int(self.insight_params['state'])
if standby_state == WEMO_ON:
return STATE_ON
if standby_state == WEMO_OFF:
return STATE_OFF
if standby_state == WEMO_STANDBY:
return STATE_STANDBY
return STATE_UNKNOWN
@property
def is_on(self):
"""Return true if switch is on. Standby is on."""
return self._state
@property
def available(self):
"""Return true if switch is available."""
return self._available
@property
def icon(self):
"""Return the icon of device based on its type."""
if self._model_name == 'CoffeeMaker':
return 'mdi:coffee'
return None
def turn_on(self, **kwargs):
"""Turn the switch on."""
self.wemo.on()
def turn_off(self, **kwargs):
"""Turn the switch off."""
self.wemo.off()
async def async_added_to_hass(self):
"""Wemo switch added to HASS."""
# Define inside async context so we know our event loop
self._update_lock = asyncio.Lock()
registry = SUBSCRIPTION_REGISTRY
await self.hass.async_add_job(registry.register, self.wemo)
registry.on(self.wemo, None, self._subscription_callback)
async def async_update(self):
"""Update WeMo state.
Wemo has an aggressive retry logic that sometimes can take over a
minute to return. If we don't get a state after 5 seconds, assume the
Wemo switch is unreachable. If update goes through, it will be made
available again.
"""
# If an update is in progress, we don't do anything
if self._update_lock.locked():
return
try:
with async_timeout.timeout(5):
await asyncio.shield(self._async_locked_update(True))
except asyncio.TimeoutError:
_LOGGER.warning('Lost connection to %s', self.name)
self._available = False
async def _async_locked_update(self, force_update):
"""Try updating within an async lock."""
async with self._update_lock:
await self.hass.async_add_job(self._update, force_update)
def _update(self, force_update):
"""Update the device state."""
try:
self._state = self.wemo.get_state(force_update)
if self._model_name == 'Insight':
self.insight_params = self.wemo.insight_params
self.insight_params['standby_state'] = (
self.wemo.get_standby_state)
elif self._model_name == 'Maker':
self.maker_params = self.wemo.maker_params
elif self._model_name == 'CoffeeMaker':
self.coffeemaker_mode = self.wemo.mode
self._mode_string = self.wemo.mode_string
if not self._available:
_LOGGER.info('Reconnected to %s', self.name)
self._available = True
except AttributeError as err:
_LOGGER.warning("Could not update status for %s (%s)",
self.name, err)
self._available = False<|fim▁end|> | self.insight_params = None
self.maker_params = None
self.coffeemaker_mode = None |
<|file_name|>gallery-footer.component.ts<|end_file_name|><|fim▁begin|>import {Component, forwardRef, Inject, Input, Optional} from '@angular/core';
import {GalleryComponent} from './gallery.component';
@Component({
selector: 'vcl-gallery-footer',
templateUrl: 'gallery-footer.component.html',
})
export class GalleryFooterComponent {
@Input()
target: GalleryComponent;
constructor(@Optional() parent: GalleryComponent) {
if (this.target == null) {
this.target = parent;<|fim▁hole|><|fim▁end|> | }
}
} |
<|file_name|>no-extern-crate-in-type.rs<|end_file_name|><|fim▁begin|>// aux-build:foo.rs
extern crate foo;<|fim▁hole|>type Output = Option<Foo>; //~ ERROR cannot find type `Foo`
fn main() {}<|fim▁end|> | |
<|file_name|>aggregates_client.py<|end_file_name|><|fim▁begin|># Copyright 2013 NEC Corporation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from lxml import etree
from tempest.common import rest_client
from tempest.common import xml_utils
from tempest import config
from tempest import exceptions
CONF = config.CONF
class AggregatesClientXML(rest_client.RestClient):
TYPE = "xml"
def __init__(self, auth_provider):<|fim▁hole|> self.service = CONF.compute.catalog_type
def _format_aggregate(self, g):
agg = xml_utils.xml_to_json(g)
aggregate = {}
for key, value in agg.items():
if key == 'hosts':
aggregate['hosts'] = []
for k, v in value.items():
aggregate['hosts'].append(v)
elif key == 'availability_zone':
aggregate[key] = None if value == 'None' else value
else:
aggregate[key] = value
return aggregate
def _parse_array(self, node):
return [self._format_aggregate(x) for x in node]
def list_aggregates(self):
"""Get aggregate list."""
resp, body = self.get("os-aggregates")
aggregates = self._parse_array(etree.fromstring(body))
return resp, aggregates
def get_aggregate(self, aggregate_id):
"""Get details of the given aggregate."""
resp, body = self.get("os-aggregates/%s" % str(aggregate_id))
aggregate = self._format_aggregate(etree.fromstring(body))
return resp, aggregate
def create_aggregate(self, name, availability_zone=None):
"""Creates a new aggregate."""
if availability_zone is not None:
post_body = xml_utils.Element("aggregate", name=name,
availability_zone=availability_zone)
else:
post_body = xml_utils.Element("aggregate", name=name)
resp, body = self.post('os-aggregates',
str(xml_utils.Document(post_body)))
aggregate = self._format_aggregate(etree.fromstring(body))
return resp, aggregate
def update_aggregate(self, aggregate_id, name, availability_zone=None):
"""Update a aggregate."""
if availability_zone is not None:
put_body = xml_utils.Element("aggregate", name=name,
availability_zone=availability_zone)
else:
put_body = xml_utils.Element("aggregate", name=name)
resp, body = self.put('os-aggregates/%s' % str(aggregate_id),
str(xml_utils.Document(put_body)))
aggregate = self._format_aggregate(etree.fromstring(body))
return resp, aggregate
def delete_aggregate(self, aggregate_id):
"""Deletes the given aggregate."""
return self.delete("os-aggregates/%s" % str(aggregate_id))
def is_resource_deleted(self, id):
try:
self.get_aggregate(id)
except exceptions.NotFound:
return True
return False
def add_host(self, aggregate_id, host):
"""Adds a host to the given aggregate."""
post_body = xml_utils.Element("add_host", host=host)
resp, body = self.post('os-aggregates/%s/action' % aggregate_id,
str(xml_utils.Document(post_body)))
aggregate = self._format_aggregate(etree.fromstring(body))
return resp, aggregate
def remove_host(self, aggregate_id, host):
"""Removes a host from the given aggregate."""
post_body = xml_utils.Element("remove_host", host=host)
resp, body = self.post('os-aggregates/%s/action' % aggregate_id,
str(xml_utils.Document(post_body)))
aggregate = self._format_aggregate(etree.fromstring(body))
return resp, aggregate
def set_metadata(self, aggregate_id, meta):
"""Replaces the aggregate's existing metadata with new metadata."""
post_body = xml_utils.Element("set_metadata")
metadata = xml_utils.Element("metadata")
post_body.append(metadata)
for k, v in meta.items():
meta = xml_utils.Element(k)
meta.append(xml_utils.Text(v))
metadata.append(meta)
resp, body = self.post('os-aggregates/%s/action' % aggregate_id,
str(xml_utils.Document(post_body)))
aggregate = self._format_aggregate(etree.fromstring(body))
return resp, aggregate<|fim▁end|> | super(AggregatesClientXML, self).__init__(auth_provider) |
<|file_name|>test_model.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# pylint: disable=no-self-use, line-too-long
from __future__ import absolute_import, print_function, with_statement
import sys
import unittest
from mock import Mock, patch
from nose.tools import * # pylint: disable=wildcard-import, unused-wildcard-import
import six
from six.moves import range # pylint: disable=redefined-builtin
from six.moves import zip # pylint: disable=redefined-builtin
from behave.model_core import FileLocation
from behave.model import Feature, Scenario, ScenarioOutline, Step
from behave.model import Table, Row
from behave.matchers import NoMatch
from behave.configuration import Configuration
from behave.compat.collections import OrderedDict
from behave import step_registry
# -- CONVENIENCE-ALIAS:
_text = six.text_type
class TestFeatureRun(unittest.TestCase):
# pylint: disable=invalid-name
def setUp(self):
self.runner = Mock()
self.runner.feature.tags = []
self.config = self.runner.config = Mock()
self.context = self.runner.context = Mock()
self.formatters = self.runner.formatters = [Mock()]
self.run_hook = self.runner.run_hook = Mock()
def test_formatter_feature_called(self):
feature = Feature('foo.feature', 1, u'Feature', u'foo',
background=Mock())
feature.run(self.runner)
self.formatters[0].feature.assert_called_with(feature)
def test_formatter_background_called_when_feature_has_background(self):
feature = Feature('foo.feature', 1, u'Feature', u'foo',
background=Mock())
feature.run(self.runner)
self.formatters[0].background.assert_called_with(feature.background)
def test_formatter_background_not_called_when_feature_has_no_background(self):
feature = Feature('foo.feature', 1, u'Feature', u'foo')
feature.run(self.runner)
assert not self.formatters[0].background.called
def test_run_runs_scenarios(self):
scenarios = [Mock(), Mock()]
for scenario in scenarios:
scenario.tags = []
scenario.run.return_value = False
self.config.tags.check.return_value = True # pylint: disable=no-member
self.config.name = []
feature = Feature('foo.feature', 1, u'Feature', u'foo',
scenarios=scenarios)
feature.run(self.runner)
for scenario in scenarios:
scenario.run.assert_called_with(self.runner)
def test_run_runs_named_scenarios(self):
scenarios = [Mock(Scenario), Mock(Scenario)]
scenarios[0].name = 'first scenario'
scenarios[1].name = 'second scenario'
scenarios[0].tags = []
scenarios[1].tags = []
# -- FAKE-CHECK:
scenarios[0].should_run_with_name_select.return_value = True
scenarios[1].should_run_with_name_select.return_value = False
for scenario in scenarios:
scenario.run.return_value = False
self.config.tags.check.return_value = True # pylint: disable=no-member
self.config.name = ['first', 'third']
self.config.name_re = Configuration.build_name_re(self.config.name)
feature = Feature('foo.feature', 1, u'Feature', u'foo',
scenarios=scenarios)
feature.run(self.runner)
scenarios[0].run.assert_called_with(self.runner)
assert not scenarios[1].run.called
scenarios[0].should_run_with_name_select.assert_called_with(self.config)
scenarios[1].should_run_with_name_select.assert_called_with(self.config)
def test_run_runs_named_scenarios_with_regexp(self):
scenarios = [Mock(), Mock()]
scenarios[0].name = 'first scenario'
scenarios[1].name = 'second scenario'
scenarios[0].tags = []
scenarios[1].tags = []
# -- FAKE-CHECK:
scenarios[0].should_run_with_name_select.return_value = False
scenarios[1].should_run_with_name_select.return_value = True
for scenario in scenarios:
scenario.run.return_value = False
self.config.tags.check.return_value = True # pylint: disable=no-member
self.config.name = ['third .*', 'second .*']
self.config.name_re = Configuration.build_name_re(self.config.name)
feature = Feature('foo.feature', 1, u'Feature', u'foo',
scenarios=scenarios)
feature.run(self.runner)
assert not scenarios[0].run.called
scenarios[1].run.assert_called_with(self.runner)
scenarios[0].should_run_with_name_select.assert_called_with(self.config)
scenarios[1].should_run_with_name_select.assert_called_with(self.config)
def test_feature_hooks_not_run_if_feature_not_being_run(self):
self.config.tags.check.return_value = False # pylint: disable=no-member
feature = Feature('foo.feature', 1, u'Feature', u'foo')
feature.run(self.runner)
assert not self.run_hook.called
class TestScenarioRun(unittest.TestCase):
# pylint: disable=invalid-name
def setUp(self):
self.runner = Mock()
self.runner.feature.tags = []
self.config = self.runner.config = Mock()
self.config.dry_run = False
self.context = self.runner.context = Mock()
self.formatters = self.runner.formatters = [Mock()]
self.run_hook = self.runner.run_hook = Mock()
def test_run_invokes_formatter_scenario_and_steps_correctly(self):
self.config.stdout_capture = False
self.config.log_capture = False
self.config.tags.check.return_value = True # pylint: disable=no-member
steps = [Mock(), Mock()]
scenario = Scenario('foo.feature', 17, u'Scenario', u'foo',
steps=steps)
scenario.run(self.runner)
self.formatters[0].scenario.assert_called_with(scenario)
for step in steps:
step.run.assert_called_with(self.runner)
if sys.version_info[0] == 3:
stringio_target = 'io.StringIO'
else:
stringio_target = 'StringIO.StringIO'
def test_handles_stdout_and_log_capture(self):
self.config.stdout_capture = True
self.config.log_capture = True
self.config.tags.check.return_value = True # pylint: disable=no-member
steps = [Mock(), Mock()]
scenario = Scenario('foo.feature', 17, u'Scenario', u'foo',
steps=steps)
scenario.run(self.runner)
self.runner.setup_capture.assert_called_with()
self.runner.teardown_capture.assert_called_with()
def test_failed_step_causes_remaining_steps_to_be_skipped(self):
self.config.stdout_capture = False
self.config.log_capture = False
self.config.tags.check.return_value = True # pylint: disable=no-member
steps = [Mock(), Mock()]
scenario = Scenario('foo.feature', 17, u'Scenario', u'foo',
steps=steps)
steps[0].run.return_value = False
steps[1].step_type = "when"
steps[1].name = "step1"
def step1_function(context): # pylint: disable=unused-argument
pass
my_step_registry = step_registry.StepRegistry()
my_step_registry.add_step_definition("when", "step1", step1_function)
with patch("behave.step_registry.registry", my_step_registry):
assert scenario.run(self.runner)
eq_(steps[1].status, 'skipped')
def test_failed_step_causes_context_failure_to_be_set(self):
self.config.stdout_capture = False
self.config.log_capture = False
self.config.tags.check.return_value = True # pylint: disable=no-member
steps = [
Mock(step_type="given", name="step0"),
Mock(step_type="then", name="step1"),
]
scenario = Scenario('foo.feature', 17, u'Scenario', u'foo',
steps=steps)
steps[0].run.return_value = False
assert scenario.run(self.runner)
# pylint: disable=protected-access
self.context._set_root_attribute.assert_called_with('failed', True)
def test_undefined_step_causes_failed_scenario_status(self):
self.config.stdout_capture = False
self.config.log_capture = False
self.config.tags.check.return_value = True # pylint: disable=no-member
passed_step = Mock()
undefined_step = Mock()
steps = [passed_step, undefined_step]
scenario = Scenario('foo.feature', 17, u'Scenario', u'foo',
steps=steps)
passed_step.run.return_value = True
passed_step.status = 'passed'
undefined_step.run.return_value = False
undefined_step.status = 'undefined'
assert scenario.run(self.runner)
eq_(undefined_step.status, 'undefined')
eq_(scenario.status, 'failed')
# pylint: disable=protected-access
self.context._set_root_attribute.assert_called_with('failed', True)
def test_skipped_steps_set_step_status_and_scenario_status_if_not_set(self):
self.config.stdout_capture = False
self.config.log_capture = False
self.config.tags.check.return_value = False # pylint: disable=no-member
steps = [Mock(), Mock()]
scenario = Scenario('foo.feature', 17, u'Scenario', u'foo',
steps=steps)
scenario.run(self.runner)
assert False not in [s.status == 'skipped' for s in steps]
eq_(scenario.status, 'skipped')
def test_scenario_hooks_not_run_if_scenario_not_being_run(self):
self.config.tags.check.return_value = False # pylint: disable=no-member
scenario = Scenario('foo.feature', 17, u'Scenario', u'foo')
scenario.run(self.runner)
assert not self.run_hook.called
def test_should_run_with_name_select(self):
scenario_name = u"first scenario"
scenario = Scenario("foo.feature", 17, u"Scenario", scenario_name)
self.config.name = ['first .*', 'second .*']
self.config.name_re = Configuration.build_name_re(self.config.name)
assert scenario.should_run_with_name_select(self.config)
class TestScenarioOutline(unittest.TestCase):
# pylint: disable=invalid-name
def test_run_calls_run_on_each_generated_scenario(self):
# pylint: disable=protected-access
outline = ScenarioOutline('foo.feature', 17, u'Scenario Outline',
u'foo')
outline._scenarios = [Mock(), Mock()]
for scenario in outline._scenarios:
scenario.run.return_value = False
runner = Mock()
runner.context = Mock()
outline.run(runner)
for s in outline._scenarios:
s.run.assert_called_with(runner)
def test_run_stops_on_first_failure_if_requested(self):
# pylint: disable=protected-access
outline = ScenarioOutline('foo.feature', 17, u'Scenario Outline',
u'foo')
outline._scenarios = [Mock(), Mock()]
outline._scenarios[0].run.return_value = True
runner = Mock()
runner.context = Mock()
config = runner.config = Mock()
config.stop = True
outline.run(runner)
outline._scenarios[0].run.assert_called_with(runner)
assert not outline._scenarios[1].run.called
def test_run_sets_context_variable_for_outline(self):
# pylint: disable=protected-access
outline = ScenarioOutline('foo.feature', 17, u'Scenario Outline',
u'foo')
outline._scenarios = [Mock(), Mock(), Mock()]
for scenario in outline._scenarios:
scenario.run.return_value = False
runner = Mock()
context = runner.context = Mock()
config = runner.config = Mock()
config.stop = True
outline.run(runner)
eq_(context._set_root_attribute.call_args_list, [
(('active_outline', outline._scenarios[0]._row), {}),
(('active_outline', outline._scenarios[1]._row), {}),
(('active_outline', outline._scenarios[2]._row), {}),
(('active_outline', None), {}),
])
def test_run_should_pass_when_all_examples_pass(self):
# pylint: disable=protected-access
outline = ScenarioOutline('foo.feature', 17, u'Scenario Outline',
u'foo')
outline._scenarios = [Mock(), Mock(), Mock()]
for scenario in outline._scenarios:
scenario.run.return_value = False
runner = Mock()
context = runner.context = Mock()
config = runner.config = Mock()
config.stop = True
resultFailed = outline.run(runner)
eq_(resultFailed, False)
def test_run_should_fail_when_first_examples_fails(self):
outline = ScenarioOutline('foo.feature', 17, u'Scenario Outline',
u'foo')
failed = True
# pylint: disable=protected-access
outline._scenarios = [Mock(), Mock()]
outline._scenarios[0].run.return_value = failed
outline._scenarios[1].run.return_value = not failed
runner = Mock()
context = runner.context = Mock()
config = runner.config = Mock()
config.stop = True
resultFailed = outline.run(runner)
eq_(resultFailed, True)
def test_run_should_fail_when_last_examples_fails(self):
outline = ScenarioOutline('foo.feature', 17, u'Scenario Outline',
u'foo')
failed = True
# pylint: disable=protected-access
outline._scenarios = [Mock(), Mock()]
outline._scenarios[0].run.return_value = not failed
outline._scenarios[1].run.return_value = failed
runner = Mock()
context = runner.context = Mock()
config = runner.config = Mock()
config.stop = True
resultFailed = outline.run(runner)
eq_(resultFailed, True)
def test_run_should_fail_when_middle_examples_fails(self):
outline = ScenarioOutline('foo.feature', 17, u'Scenario Outline',
u'foo')
failed = True
# pylint: disable=protected-access
outline._scenarios = [Mock(), Mock(), Mock()]
outline._scenarios[0].run.return_value = not failed
outline._scenarios[1].run.return_value = failed
outline._scenarios[2].run.return_value = not failed
runner = Mock()
context = runner.context = Mock()
config = runner.config = Mock()
config.stop = True
resultFailed = outline.run(runner)
eq_(resultFailed, True)
def raiser(exception):
def func(*args, **kwargs): # pylint: disable=unused-argument
raise exception
return func
class TestStepRun(unittest.TestCase):
# pylint: disable=invalid-name
def setUp(self):
self.step_registry = Mock()
self.runner = Mock()
self.runner.step_registry = self.step_registry
self.config = self.runner.config = Mock()
self.config.outputs = [None]
self.context = self.runner.context = Mock()
print('context is %s' % self.context)
self.formatters = self.runner.formatters = [Mock()]
self.stdout_capture = self.runner.stdout_capture = Mock()
self.stdout_capture.getvalue.return_value = ''
self.stderr_capture = self.runner.stderr_capture = Mock()
self.stderr_capture.getvalue.return_value = ''
self.log_capture = self.runner.log_capture = Mock()
self.log_capture.getvalue.return_value = ''
self.run_hook = self.runner.run_hook = Mock()
def test_run_appends_step_to_undefined_when_no_match_found(self):
step = Step('foo.feature', 17, u'Given', 'given', u'foo')
self.runner.step_registry.find_match.return_value = None
self.runner.undefined_steps = []
assert not step.run(self.runner)
assert step in self.runner.undefined_steps
eq_(step.status, 'undefined')
def test_run_reports_undefined_step_via_formatter_when_not_quiet(self):
step = Step('foo.feature', 17, u'Given', 'given', u'foo')
self.runner.step_registry.find_match.return_value = None
assert not step.run(self.runner)
self.formatters[0].match.assert_called_with(NoMatch())
self.formatters[0].result.assert_called_with(step)
def test_run_with_no_match_does_not_touch_formatter_when_quiet(self):
step = Step('foo.feature', 17, u'Given', 'given', u'foo')
self.runner.step_registry.find_match.return_value = None
assert not step.run(self.runner, quiet=True)
assert not self.formatters[0].match.called
assert not self.formatters[0].result.called
def test_run_when_not_quiet_reports_match_and_result(self):
step = Step('foo.feature', 17, u'Given', 'given', u'foo')
match = Mock()
self.runner.step_registry.find_match.return_value = match
side_effects = (None, raiser(AssertionError('whee')),
raiser(Exception('whee')))
for side_effect in side_effects:
match.run.side_effect = side_effect
step.run(self.runner)
self.formatters[0].match.assert_called_with(match)
self.formatters[0].result.assert_called_with(step)
def test_run_when_quiet_reports_nothing(self):
step = Step('foo.feature', 17, u'Given', 'given', u'foo')
match = Mock()
self.runner.step_registry.find_match.return_value = match
side_effects = (None, raiser(AssertionError('whee')),
raiser(Exception('whee')))
for side_effect in side_effects:
match.run.side_effect = side_effect
step.run(self.runner, quiet=True)
assert not self.formatters[0].match.called
assert not self.formatters[0].result.called
def test_run_runs_before_hook_then_match_then_after_hook(self):
step = Step('foo.feature', 17, u'Given', 'given', u'foo')
match = Mock()
self.runner.step_registry.find_match.return_value = match
side_effects = (None, AssertionError('whee'), Exception('whee'))
for side_effect in side_effects:
# Make match.run() and runner.run_hook() the same mock so
# we can make sure things happen in the right order.
self.runner.run_hook = match.run = Mock()
def effect(thing):
# pylint: disable=unused-argument
def raiser_(*args, **kwargs):
match.run.side_effect = None
if thing:
raise thing
def nonraiser(*args, **kwargs):
match.run.side_effect = raiser_
return nonraiser
match.run.side_effect = effect(side_effect)
step.run(self.runner)
eq_(match.run.call_args_list, [
(('before_step', self.context, step), {}),
((self.context,), {}),
(('after_step', self.context, step), {}),
])
def test_run_sets_table_if_present(self):
step = Step('foo.feature', 17, u'Given', 'given', u'foo',
table=Mock())
self.runner.step_registry.find_match.return_value = Mock()
step.run(self.runner)
eq_(self.context.table, step.table)
def test_run_sets_text_if_present(self):
step = Step('foo.feature', 17, u'Given', 'given', u'foo',
text=Mock(name='text'))
self.runner.step_registry.find_match.return_value = Mock()
step.run(self.runner)
eq_(self.context.text, step.text)
def test_run_sets_status_to_passed_if_nothing_goes_wrong(self):
step = Step('foo.feature', 17, u'Given', 'given', u'foo')<|fim▁hole|> step.run(self.runner)
eq_(step.status, 'passed')
eq_(step.error_message, None)
def test_run_sets_status_to_failed_on_assertion_error(self):
step = Step('foo.feature', 17, u'Given', 'given', u'foo')
step.error_message = None
match = Mock()
match.run.side_effect = raiser(AssertionError('whee'))
self.runner.step_registry.find_match.return_value = match
step.run(self.runner)
eq_(step.status, 'failed')
assert step.error_message.startswith('Assertion Failed')
@patch('traceback.format_exc')
def test_run_sets_status_to_failed_on_exception(self, format_exc):
step = Step('foo.feature', 17, u'Given', 'given', u'foo')
step.error_message = None
match = Mock()
match.run.side_effect = raiser(Exception('whee'))
self.runner.step_registry.find_match.return_value = match
format_exc.return_value = 'something to do with an exception'
step.run(self.runner)
eq_(step.status, 'failed')
eq_(step.error_message, format_exc.return_value)
@patch('time.time')
def test_run_calculates_duration(self, time_time):
step = Step('foo.feature', 17, u'Given', 'given', u'foo')
match = Mock()
self.runner.step_registry.find_match.return_value = match
def time_time_1():
def time_time_2():
return 23
time_time.side_effect = time_time_2
return 17
side_effects = (None, raiser(AssertionError('whee')),
raiser(Exception('whee')))
for side_effect in side_effects:
match.run.side_effect = side_effect
time_time.side_effect = time_time_1
step.run(self.runner)
eq_(step.duration, 23 - 17)
def test_run_captures_stdout_and_logging(self):
step = Step('foo.feature', 17, u'Given', 'given', u'foo')
match = Mock()
self.runner.step_registry.find_match.return_value = match
assert step.run(self.runner)
self.runner.start_capture.assert_called_with()
self.runner.stop_capture.assert_called_with()
def test_run_appends_any_captured_stdout_on_failure(self):
step = Step('foo.feature', 17, u'Given', 'given', u'foo')
match = Mock()
self.runner.step_registry.find_match.return_value = match
self.stdout_capture.getvalue.return_value = 'frogs'
match.run.side_effect = raiser(Exception('halibut'))
assert not step.run(self.runner)
assert 'Captured stdout:' in step.error_message
assert 'frogs' in step.error_message
def test_run_appends_any_captured_logging_on_failure(self):
step = Step('foo.feature', 17, u'Given', 'given', u'foo')
match = Mock()
self.runner.step_registry.find_match.return_value = match
self.log_capture.getvalue.return_value = 'toads'
match.run.side_effect = raiser(AssertionError('kipper'))
assert not step.run(self.runner)
assert 'Captured logging:' in step.error_message
assert 'toads' in step.error_message
class TestTableModel(unittest.TestCase):
# pylint: disable=invalid-name
HEAD = [u'type of stuff', u'awesomeness', u'ridiculousness']
DATA = [
[u'fluffy', u'large', u'frequent'],
[u'lint', u'low', u'high'],
[u'green', u'variable', u'awkward'],
]
def setUp(self):
self.table = Table(self.HEAD, 0, self.DATA)
def test_equivalence(self):
t1 = self.table
self.setUp()
eq_(t1, self.table)
def test_table_iteration(self):
for i, row in enumerate(self.table):
for j, cell in enumerate(row):
eq_(cell, self.DATA[i][j])
def test_table_row_by_index(self):
for i in range(3):
eq_(self.table[i], Row(self.HEAD, self.DATA[i], 0))
def test_table_row_name(self):
eq_(self.table[0]['type of stuff'], 'fluffy')
eq_(self.table[1]['awesomeness'], 'low')
eq_(self.table[2]['ridiculousness'], 'awkward')
def test_table_row_index(self):
eq_(self.table[0][0], 'fluffy')
eq_(self.table[1][1], 'low')
eq_(self.table[2][2], 'awkward')
@raises(KeyError)
def test_table_row_keyerror(self):
self.table[0]['spam'] # pylint: disable=pointless-statement
def test_table_row_items(self):
eq_(list(self.table[0].items()), list(zip(self.HEAD, self.DATA[0])))
class TestModelRow(unittest.TestCase):
# pylint: disable=invalid-name, bad-whitespace
HEAD = [u'name', u'sex', u'age']
DATA = [u'Alice', u'female', u'12']
def setUp(self):
self.row = Row(self.HEAD, self.DATA, 0)
def test_len(self):
eq_(len(self.row), 3)
def test_getitem_with_valid_colname(self):
# pylint: disable=bad-whitespace
eq_(self.row['name'], u'Alice')
eq_(self.row['sex'], u'female')
eq_(self.row['age'], u'12')
@raises(KeyError)
def test_getitem_with_unknown_colname(self):
self.row['__UNKNOWN_COLUMN__'] # pylint: disable=pointless-statement
def test_getitem_with_valid_index(self):
eq_(self.row[0], u'Alice')
eq_(self.row[1], u'female')
eq_(self.row[2], u'12')
@raises(IndexError)
def test_getitem_with_invalid_index(self):
colsize = len(self.row)
eq_(colsize, 3)
self.row[colsize] # pylint: disable=pointless-statement
def test_get_with_valid_colname(self):
# pylint: disable=bad-whitespace
eq_(self.row.get('name'), u'Alice')
eq_(self.row.get('sex'), u'female')
eq_(self.row.get('age'), u'12')
def test_getitem_with_unknown_colname_should_return_default(self):
eq_(self.row.get('__UNKNOWN_COLUMN__', 'XXX'), u'XXX')
def test_as_dict(self):
data1 = self.row.as_dict()
data2 = dict(self.row.as_dict())
assert isinstance(data1, dict)
assert isinstance(data2, dict)
assert isinstance(data1, OrderedDict)
# -- REQUIRES: Python2.7 or ordereddict installed.
# assert not isinstance(data2, OrderedDict)
eq_(data1, data2)
# pylint: disable=bad-whitespace
eq_(data1['name'], u'Alice')
eq_(data1['sex'], u'female')
eq_(data1['age'], u'12')
class TestFileLocation(unittest.TestCase):
# pylint: disable=invalid-name
ordered_locations1 = [
FileLocation("features/alice.feature", 1),
FileLocation("features/alice.feature", 5),
FileLocation("features/alice.feature", 10),
FileLocation("features/alice.feature", 11),
FileLocation("features/alice.feature", 100),
]
ordered_locations2 = [
FileLocation("features/alice.feature", 1),
FileLocation("features/alice.feature", 10),
FileLocation("features/bob.feature", 5),
FileLocation("features/charly.feature", None),
FileLocation("features/charly.feature", 0),
FileLocation("features/charly.feature", 100),
]
same_locations = [
(FileLocation("alice.feature"),
FileLocation("alice.feature", None),
),
(FileLocation("alice.feature", 10),
FileLocation("alice.feature", 10),
),
(FileLocation("features/bob.feature", 11),
FileLocation("features/bob.feature", 11),
),
]
def test_compare_equal(self):
for value1, value2 in self.same_locations:
eq_(value1, value2)
def test_compare_equal_with_string(self):
for location in self.ordered_locations2:
eq_(location, location.filename)
eq_(location.filename, location)
def test_compare_not_equal(self):
for value1, value2 in self.same_locations:
assert not(value1 != value2) # pylint: disable=unneeded-not, superfluous-parens
for locations in [self.ordered_locations1, self.ordered_locations2]:
for value1, value2 in zip(locations, locations[1:]):
assert value1 != value2
def test_compare_less_than(self):
for locations in [self.ordered_locations1, self.ordered_locations2]:
for value1, value2 in zip(locations, locations[1:]):
assert value1 < value2, "FAILED: %s < %s" % (_text(value1), _text(value2))
assert value1 != value2
def test_compare_less_than_with_string(self):
locations = self.ordered_locations2
for value1, value2 in zip(locations, locations[1:]):
if value1.filename == value2.filename:
continue
assert value1 < value2.filename, \
"FAILED: %s < %s" % (_text(value1), _text(value2.filename))
assert value1.filename < value2, \
"FAILED: %s < %s" % (_text(value1.filename), _text(value2))
def test_compare_greater_than(self):
for locations in [self.ordered_locations1, self.ordered_locations2]:
for value1, value2 in zip(locations, locations[1:]):
assert value2 > value1, "FAILED: %s > %s" % (_text(value2), _text(value1))
assert value2 != value1
def test_compare_less_or_equal(self):
for value1, value2 in self.same_locations:
assert value1 <= value2, "FAILED: %s <= %s" % (_text(value1), _text(value2))
assert value1 == value2
for locations in [self.ordered_locations1, self.ordered_locations2]:
for value1, value2 in zip(locations, locations[1:]):
assert value1 <= value2, "FAILED: %s <= %s" % (_text(value1), _text(value2))
assert value1 != value2
def test_compare_greater_or_equal(self):
for value1, value2 in self.same_locations:
assert value2 >= value1, "FAILED: %s >= %s" % (_text(value2), _text(value1))
assert value2 == value1
for locations in [self.ordered_locations1, self.ordered_locations2]:
for value1, value2 in zip(locations, locations[1:]):
assert value2 >= value1, "FAILED: %s >= %s" % (_text(value2), _text(value1))
assert value2 != value1
def test_filename_should_be_same_as_self(self):
for location in self.ordered_locations2:
assert location == location.filename
assert location.filename == location
def test_string_conversion(self):
for location in self.ordered_locations2:
expected = u"%s:%s" % (location.filename, location.line)
if location.line is None:
expected = location.filename
assert six.text_type(location) == expected
def test_repr_conversion(self):
for location in self.ordered_locations2:
expected = u'<FileLocation: filename="%s", line=%s>' % \
(location.filename, location.line)
actual = repr(location)
assert actual == expected, "FAILED: %s == %s" % (actual, expected)<|fim▁end|> | step.error_message = None
self.runner.step_registry.find_match.return_value = Mock() |
<|file_name|>vitality.js<|end_file_name|><|fim▁begin|>/*!
* Vitality v2.0.0 (http://themes.startbootstrap.com/vitality-v2.0.0)
* Copyright 2013-2017 Start Bootstrap
* Purchase a license to use this theme at (https://wrapbootstrap.com)
*/
/*!
* Vitality v2.0.0 (http://themes.startbootstrap.com/vitality-v2.0.0)
* Copyright 2013-2017 Start Bootstrap
* Purchase a license to use this theme at (https://wrapbootstrap.com)
*/
// Load WOW.js on non-touch devices
var isPhoneDevice = "ontouchstart" in document.documentElement;
$(document).ready(function() {
if (isPhoneDevice) {
//mobile
} else {
//desktop
// Initialize WOW.js
wow = new WOW({
offset: 50
})
wow.init();
}
});
(function($) {
"use strict"; // Start of use strict
// Collapse the navbar when page is scrolled
$(window).scroll(function() {
if ($("#mainNav").offset().top > 100) {
$("#mainNav").addClass("navbar-shrink");
} else {
$("#mainNav").removeClass("navbar-shrink");
}
});
// Activate scrollspy to add active class to navbar items on scroll
$('body').scrollspy({
target: '#mainNav',
offset: 68
});
// Smooth Scrolling: Smooth scrolls to an ID on the current page
// To use this feature, add a link on your page that links to an ID, and add the .page-scroll class to the link itself. See the docs for more details.
$('a.page-scroll').bind('click', function(event) {
var $anchor = $(this);
$('html, body').stop().animate({
scrollTop: ($($anchor.attr('href')).offset().top - 68)
}, 1250, 'easeInOutExpo');
event.preventDefault();
});
// Closes responsive menu when a link is clicked
$('.navbar-collapse>ul>li>a, .navbar-brand').click(function() {
$('.navbar-collapse').collapse('hide');
});
// Activates floating label headings for the contact form
$("body").on("input propertychange", ".floating-label-form-group", function(e) {
$(this).toggleClass("floating-label-form-group-with-value", !!$(e.target).val());
}).on("focus", ".floating-label-form-group", function() {
$(this).addClass("floating-label-form-group-with-focus");
}).on("blur", ".floating-label-form-group", function() {
$(this).removeClass("floating-label-form-group-with-focus");
});
// Owl Carousel Settings
$(".team-carousel").owlCarousel({
items: 3,<|fim▁hole|> navigation: true,
pagination: false,
navigationText: [
"<i class='fa fa-angle-left'></i>",
"<i class='fa fa-angle-right'></i>"
],
});
$(".portfolio-carousel").owlCarousel({
singleItem: true,
navigation: true,
pagination: false,
navigationText: [
"<i class='fa fa-angle-left'></i>",
"<i class='fa fa-angle-right'></i>"
],
autoHeight: true,
mouseDrag: false,
touchDrag: false,
transitionStyle: "fadeUp"
});
$(".testimonials-carousel, .mockup-carousel").owlCarousel({
singleItem: true,
navigation: true,
pagination: true,
autoHeight: true,
navigationText: [
"<i class='fa fa-angle-left'></i>",
"<i class='fa fa-angle-right'></i>"
],
transitionStyle: "backSlide"
});
$(".portfolio-gallery").owlCarousel({
items: 3,
});
// Magnific Popup jQuery Lightbox Gallery Settings
$('.gallery-link').magnificPopup({
type: 'image',
gallery: {
enabled: true
},
image: {
titleSrc: 'title'
}
});
// Magnific Popup Settings
$('.mix').magnificPopup({
type: 'image',
image: {
titleSrc: 'title'
}
});
// Vide - Video Background Settings
$('header.video').vide({
mp4: "mp4/camera.mp4",
poster: "img/agency/backgrounds/bg-mobile-fallback.jpg"
}, {
posterType: 'jpg'
});
})(jQuery); // End of use strict<|fim▁end|> | |
<|file_name|>EventGateway.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2010-2020 Alfresco Software, Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.activiti.bpmn.model;
public class EventGateway extends Gateway {
public EventGateway clone() {
EventGateway clone = new EventGateway();
clone.setValues(this);
return clone;
}
public void setValues(EventGateway otherElement) {
super.setValues(otherElement);<|fim▁hole|> }
}<|fim▁end|> | |
<|file_name|>SvgRenderer.js<|end_file_name|><|fim▁begin|>/* *
*
* (c) 2010-2018 Torstein Honsi
*
* License: www.highcharts.com/license
*
* */
/**
* Options to align the element relative to the chart or another box.
*
* @interface Highcharts.AlignObject
*//**
* Horizontal alignment. Can be one of `left`, `center` and `right`.
*
* @name Highcharts.AlignObject#align
* @type {string|undefined}
*
* @default left
*//**
* Vertical alignment. Can be one of `top`, `middle` and `bottom`.
*
* @name Highcharts.AlignObject#verticalAlign
* @type {string|undefined}
*
* @default top
*//**
* Horizontal pixel offset from alignment.
*
* @name Highcharts.AlignObject#x
* @type {number|undefined}
*
* @default 0
*//**
* Vertical pixel offset from alignment.
*
* @name Highcharts.AlignObject#y
* @type {number|undefined}
*
* @default 0
*//**
* Use the `transform` attribute with translateX and translateY custom
* attributes to align this elements rather than `x` and `y` attributes.
*
* @name Highcharts.AlignObject#alignByTranslate
* @type {boolean|undefined}
*
* @default false
*/
/**
* Bounding box of an element.
*
* @interface Highcharts.BBoxObject
*//**
* Height of the bounding box.
*
* @name Highcharts.BBoxObject#height
* @type {number}
*//**
* Width of the bounding box.
*
* @name Highcharts.BBoxObject#width
* @type {number}
*//**
* Horizontal position of the bounding box.
*
* @name Highcharts.BBoxObject#x
* @type {number}
*//**
* Vertical position of the bounding box.
*
* @name Highcharts.BBoxObject#y
* @type {number}
*/
/**
* A clipping rectangle that can be applied to one or more {@link SVGElement}
* instances. It is instanciated with the {@link SVGRenderer#clipRect} function
* and applied with the {@link SVGElement#clip} function.
*
* @example
* var circle = renderer.circle(100, 100, 100)
* .attr({ fill: 'red' })
* .add();
* var clipRect = renderer.clipRect(100, 100, 100, 100);
*
* // Leave only the lower right quarter visible
* circle.clip(clipRect);
*
* @typedef {Highcharts.SVGElement} Highcharts.ClipRectElement
*/
/**
* The font metrics.
*
* @interface Highcharts.FontMetricsObject
*//**
* The baseline relative to the top of the box.
*
* @name Highcharts.FontMetricsObject#b
* @type {number}
*//**
* The line height.
*
* @name Highcharts.FontMetricsObject#h
* @type {number}
*//**
* The font size.
*
* @name Highcharts.FontMetricsObject#f
* @type {number}
*/
/**
* Gradient options instead of a solid color.
*
* @example
* // Linear gradient used as a color option
* color: {
* linearGradient: { x1: 0, x2: 0, y1: 0, y2: 1 },
* stops: [
* [0, '#003399'], // start
* [0.5, '#ffffff'], // middle
* [1, '#3366AA'] // end
* ]
* }
* }
*
* @interface Highcharts.GradientColorObject
*//**
* Holds an object that defines the start position and the end position relative
* to the shape.
*
* @name Highcharts.GradientColorObject#linearGradient
* @type {Highcharts.LinearGradientColorObject|undefined}
*//**
* Holds an object that defines the center position and the radius.
*
* @name Highcharts.GradientColorObject#radialGradient
* @type {Highcharts.RadialGradientColorObject|undefined}
*//**
* The first item in each tuple is the position in the gradient, where 0 is the
* start of the gradient and 1 is the end of the gradient. Multiple stops can be
* applied. The second item is the color for each stop. This color can also be
* given in the rgba format.
*
* @name Highcharts.GradientColorObject#stops
* @type {Array<Array<number,Highcharts.ColorString>>|undefined}
*/
/**
* Defines the start position and the end position for a gradient relative
* to the shape. Start position (x1, y1) and end position (x2, y2) are relative
* to the shape, where 0 means top/left and 1 is bottom/right.
*
* @interface Highcharts.LinearGradientColorObject
*//**
* Start horizontal position of the gradient. Float ranges 0-1.
*
* @name Highcharts.LinearGradientColorObject#x1
* @type {number}
*//**
* End horizontal position of the gradient. Float ranges 0-1.
*
* @name Highcharts.LinearGradientColorObject#x2
* @type {number}
*//**
* Start vertical position of the gradient. Float ranges 0-1.
*
* @name Highcharts.LinearGradientColorObject#y1
* @type {number}
*//**
* End vertical position of the gradient. Float ranges 0-1.
*
* @name Highcharts.LinearGradientColorObject#y2
* @type {number}
*/
/**
* Defines the center position and the radius for a gradient.
*
* @interface Highcharts.RadialGradientColorObject
*//**
* Center horizontal position relative to the shape. Float ranges 0-1.
*
* @name Highcharts.RadialGradientColorObject#cx
* @type {number}
*//**
* Center vertical position relative to the shape. Float ranges 0-1.
*
* @name Highcharts.RadialGradientColorObject#cy
* @type {number}
*//**
* Radius relative to the shape. Float ranges 0-1.
*
* @name Highcharts.RadialGradientColorObject#r
* @type {number}
*/
/**
* A rectangle.
*
* @interface Highcharts.RectangleObject
*//**
* Height of the rectangle.
*
* @name Highcharts.RectangleObject#height
* @type {number}
*//**
* Width of the rectangle.
*
* @name Highcharts.RectangleObject#width
* @type {number}
*//**
* Horizontal position of the rectangle.
*
* @name Highcharts.RectangleObject#x
* @type {number}
*//**
* Vertical position of the rectangle.
*
* @name Highcharts.RectangleObject#y
* @type {number}
*/
/**
* The shadow options.
*
* @interface Highcharts.ShadowOptionsObject
*//**
* The shadow color.
*
* @name Highcharts.ShadowOptionsObject#color
* @type {string|undefined}
*
* @default #000000
*//**
* The horizontal offset from the element.
*
* @name Highcharts.ShadowOptionsObject#offsetX
* @type {number|undefined}
*
* @default 1
*//**
* The vertical offset from the element.
*
* @name Highcharts.ShadowOptionsObject#offsetY
* @type {number|undefined}
*
* @default 1
*//**
* The shadow opacity.
*
* @name Highcharts.ShadowOptionsObject#opacity
* @type {number|undefined}
*
* @default 0.15
*//**
* The shadow width or distance from the element.
*
* @name Highcharts.ShadowOptionsObject#width
* @type {number|undefined}
*
* @default 3
*/
/**
* Serialized form of an SVG definition, including children. Some key
* property names are reserved: tagName, textContent, and children.
*
* @interface Highcharts.SVGDefinitionObject
*//**
* @name Highcharts.SVGDefinitionObject#[key:string]
* @type {number|string|Array<Highcharts.SVGDefinitionObject>|undefined}
*//**
* @name Highcharts.SVGDefinitionObject#children
* @type {Array<Highcharts.SVGDefinitionObject>|undefined}
*//**
* @name Highcharts.SVGDefinitionObject#tagName
* @type {string|undefined}
*//**
* @name Highcharts.SVGDefinitionObject#textContent
* @type {string|undefined}
*/
/**
* An extendable collection of functions for defining symbol paths.
*
* @typedef Highcharts.SymbolDictionary
*
* @property {Function|undefined} [key:Highcharts.SymbolKey]
*/
/**
* Can be one of `arc`, `callout`, `circle`, `diamond`, `square`,
* `triangle`, `triangle-down`. Symbols are used internally for point
* markers, button and label borders and backgrounds, or custom shapes.
* Extendable by adding to {@link SVGRenderer#symbols}.
*
* @typedef {string} Highcharts.SymbolKey
* @validvalue ["arc", "callout", "circle", "diamond", "square", "triangle",
* "triangle-down"]
*/
/**
* Additional options, depending on the actual symbol drawn.
*
* @interface Highcharts.SymbolOptionsObject
*//**
* The anchor X position for the `callout` symbol. This is where the chevron
* points to.
*
* @name Highcharts.SymbolOptionsObject#anchorX
* @type {number}
*//**
* The anchor Y position for the `callout` symbol. This is where the chevron
* points to.
*
* @name Highcharts.SymbolOptionsObject#anchorY
* @type {number}
*//**
* The end angle of an `arc` symbol.
*
* @name Highcharts.SymbolOptionsObject#end
* @type {number}
*//**
* Whether to draw `arc` symbol open or closed.
*
* @name Highcharts.SymbolOptionsObject#open
* @type {boolean}
*//**
* The radius of an `arc` symbol, or the border radius for the `callout` symbol.
*
* @name Highcharts.SymbolOptionsObject#r
* @type {number}
*//**
* The start angle of an `arc` symbol.
*
* @name Highcharts.SymbolOptionsObject#start
* @type {number}
*/
'use strict';
import H from './Globals.js';
import './Utilities.js';
import './Color.js';
var SVGElement,
SVGRenderer,
addEvent = H.addEvent,
animate = H.animate,
attr = H.attr,
charts = H.charts,
color = H.color,
css = H.css,
createElement = H.createElement,
defined = H.defined,
deg2rad = H.deg2rad,
destroyObjectProperties = H.destroyObjectProperties,
doc = H.doc,
extend = H.extend,
erase = H.erase,
hasTouch = H.hasTouch,
isArray = H.isArray,
isFirefox = H.isFirefox,
isMS = H.isMS,
isObject = H.isObject,
isString = H.isString,
isWebKit = H.isWebKit,
merge = H.merge,
noop = H.noop,
objectEach = H.objectEach,
pick = H.pick,
pInt = H.pInt,
removeEvent = H.removeEvent,
splat = H.splat,
stop = H.stop,
svg = H.svg,
SVG_NS = H.SVG_NS,
symbolSizes = H.symbolSizes,
win = H.win;
/**
* The SVGElement prototype is a JavaScript wrapper for SVG elements used in the
* rendering layer of Highcharts. Combined with the {@link
* Highcharts.SVGRenderer} object, these prototypes allow freeform annotation
* in the charts or even in HTML pages without instanciating a chart. The
* SVGElement can also wrap HTML labels, when `text` or `label` elements are
* created with the `useHTML` parameter.
*
* The SVGElement instances are created through factory functions on the {@link
* Highcharts.SVGRenderer} object, like {@link Highcharts.SVGRenderer#rect|
* rect}, {@link Highcharts.SVGRenderer#path|path}, {@link
* Highcharts.SVGRenderer#text|text}, {@link Highcharts.SVGRenderer#label|
* label}, {@link Highcharts.SVGRenderer#g|g} and more.
*
* @class
* @name Highcharts.SVGElement
*/
SVGElement = H.SVGElement = function () {
return this;
};
extend(SVGElement.prototype, /** @lends Highcharts.SVGElement.prototype */ {
// Default base for animation
opacity: 1,
SVG_NS: SVG_NS,
/**
* For labels, these CSS properties are applied to the `text` node directly.
*
* @private
* @name Highcharts.SVGElement#textProps
* @type {Array<string>}
*/
textProps: ['direction', 'fontSize', 'fontWeight', 'fontFamily',
'fontStyle', 'color', 'lineHeight', 'width', 'textAlign',
'textDecoration', 'textOverflow', 'textOutline', 'cursor'],
/**
* Initialize the SVG element. This function only exists to make the
* initiation process overridable. It should not be called directly.
*
* @function Highcharts.SVGElement#init
*
* @param {Highcharts.SVGRenderer} renderer
* The SVGRenderer instance to initialize to.
*
* @param {string} nodeName
* The SVG node name.
*/
init: function (renderer, nodeName) {
/**
* The primary DOM node. Each `SVGElement` instance wraps a main DOM
* node, but may also represent more nodes.
*
* @name Highcharts.SVGElement#element
* @type {Highcharts.SVGDOMElement|Highcharts.HTMLDOMElement}
*/
this.element = nodeName === 'span' ?
createElement(nodeName) :
doc.createElementNS(this.SVG_NS, nodeName);
/**
* The renderer that the SVGElement belongs to.
*
* @name Highcharts.SVGElement#renderer
* @type {Highcharts.SVGRenderer}
*/
this.renderer = renderer;
},
/**
* Animate to given attributes or CSS properties.
*
* @sample highcharts/members/element-on/
* Setting some attributes by animation
*
* @function Highcharts.SVGElement#animate
*
* @param {Highcharts.SVGAttributes} params
* SVG attributes or CSS to animate.
*
* @param {Highcharts.AnimationOptionsObject} [options]
* Animation options.
*
* @param {Function} [complete]
* Function to perform at the end of animation.
*
* @return {Highcharts.SVGElement}
* Returns the SVGElement for chaining.
*/
animate: function (params, options, complete) {
var animOptions = H.animObject(
pick(options, this.renderer.globalAnimation, true)
);
if (animOptions.duration !== 0) {
// allows using a callback with the global animation without
// overwriting it
if (complete) {
animOptions.complete = complete;
}
animate(this, params, animOptions);
} else {
this.attr(params, null, complete);
if (animOptions.step) {
animOptions.step.call(this);
}
}
return this;
},
/**
* Build and apply an SVG gradient out of a common JavaScript configuration
* object. This function is called from the attribute setters. An event
* hook is added for supporting other complex color types.
*
* @private
* @function Highcharts.SVGElement#complexColor
*
* @param {Highcharts.GradientColorObject} color
* The gradient options structure.
*
* @param {string} prop
* The property to apply, can either be `fill` or `stroke`.
*
* @param {Highcharts.SVGDOMElement} elem
* SVG DOM element to apply the gradient on.
*/
complexColor: function (color, prop, elem) {
var renderer = this.renderer,
colorObject,
gradName,
gradAttr,
radAttr,
gradients,
gradientObject,
stops,
stopColor,
stopOpacity,
radialReference,
id,
key = [],
value;
H.fireEvent(this.renderer, 'complexColor', {
args: arguments
}, function () {
// Apply linear or radial gradients
if (color.radialGradient) {
gradName = 'radialGradient';
} else if (color.linearGradient) {
gradName = 'linearGradient';
}
if (gradName) {
gradAttr = color[gradName];
gradients = renderer.gradients;
stops = color.stops;
radialReference = elem.radialReference;
// Keep < 2.2 kompatibility
if (isArray(gradAttr)) {
color[gradName] = gradAttr = {
x1: gradAttr[0],
y1: gradAttr[1],
x2: gradAttr[2],
y2: gradAttr[3],
gradientUnits: 'userSpaceOnUse'
};
}
// Correct the radial gradient for the radial reference system
if (
gradName === 'radialGradient' &&
radialReference &&
!defined(gradAttr.gradientUnits)
) {
// Save the radial attributes for updating
radAttr = gradAttr;
gradAttr = merge(
gradAttr,
renderer.getRadialAttr(radialReference, radAttr),
{ gradientUnits: 'userSpaceOnUse' }
);
}
// Build the unique key to detect whether we need to create a
// new element (#1282)
objectEach(gradAttr, function (val, n) {
if (n !== 'id') {
key.push(n, val);
}
});
objectEach(stops, function (val) {
key.push(val);
});
key = key.join(',');
// Check if a gradient object with the same config object is
// created within this renderer
if (gradients[key]) {
id = gradients[key].attr('id');
} else {
// Set the id and create the element
gradAttr.id = id = H.uniqueKey();
gradients[key] = gradientObject =
renderer.createElement(gradName)
.attr(gradAttr)
.add(renderer.defs);
gradientObject.radAttr = radAttr;
// The gradient needs to keep a list of stops to be able to
// destroy them
gradientObject.stops = [];
stops.forEach(function (stop) {
var stopObject;
if (stop[1].indexOf('rgba') === 0) {
colorObject = H.color(stop[1]);
stopColor = colorObject.get('rgb');
stopOpacity = colorObject.get('a');
} else {
stopColor = stop[1];
stopOpacity = 1;
}
stopObject = renderer.createElement('stop').attr({
offset: stop[0],
'stop-color': stopColor,
'stop-opacity': stopOpacity
}).add(gradientObject);
// Add the stop element to the gradient
gradientObject.stops.push(stopObject);
});
}
// Set the reference to the gradient object
value = 'url(' + renderer.url + '#' + id + ')';
elem.setAttribute(prop, value);
elem.gradient = key;
// Allow the color to be concatenated into tooltips formatters
// etc. (#2995)
color.toString = function () {
return value;
};
}
});
},
/**
* Apply a text outline through a custom CSS property, by copying the text
* element and apply stroke to the copy. Used internally. Contrast checks at
* [example](https://jsfiddle.net/highcharts/43soe9m1/2/).
*
* @example
* // Specific color
* text.css({
* textOutline: '1px black'
* });
* // Automatic contrast
* text.css({
* color: '#000000', // black text
* textOutline: '1px contrast' // => white outline
* });
*
* @private
* @function Highcharts.SVGElement#applyTextOutline
*
* @param {string} textOutline
* A custom CSS `text-outline` setting, defined by `width color`.
*/
applyTextOutline: function (textOutline) {
var elem = this.element,
tspans,
tspan,
hasContrast = textOutline.indexOf('contrast') !== -1,
styles = {},
color,
strokeWidth,
firstRealChild,
i;
// When the text shadow is set to contrast, use dark stroke for light
// text and vice versa.
if (hasContrast) {
styles.textOutline = textOutline = textOutline.replace(
/contrast/g,
this.renderer.getContrast(elem.style.fill)
);
}
// Extract the stroke width and color
textOutline = textOutline.split(' ');
color = textOutline[textOutline.length - 1];
strokeWidth = textOutline[0];
if (strokeWidth && strokeWidth !== 'none' && H.svg) {
this.fakeTS = true; // Fake text shadow
tspans = [].slice.call(elem.getElementsByTagName('tspan'));
// In order to get the right y position of the clone,
// copy over the y setter
this.ySetter = this.xSetter;
// Since the stroke is applied on center of the actual outline, we
// need to double it to get the correct stroke-width outside the
// glyphs.
strokeWidth = strokeWidth.replace(
/(^[\d\.]+)(.*?)$/g,
function (match, digit, unit) {
return (2 * digit) + unit;
}
);
// Remove shadows from previous runs. Iterate from the end to
// support removing items inside the cycle (#6472).
i = tspans.length;
while (i--) {
tspan = tspans[i];
if (tspan.getAttribute('class') === 'highcharts-text-outline') {
// Remove then erase
erase(tspans, elem.removeChild(tspan));
}
}
// For each of the tspans, create a stroked copy behind it.
firstRealChild = elem.firstChild;
tspans.forEach(function (tspan, y) {
var clone;
// Let the first line start at the correct X position
if (y === 0) {
tspan.setAttribute('x', elem.getAttribute('x'));
y = elem.getAttribute('y');
tspan.setAttribute('y', y || 0);
if (y === null) {
elem.setAttribute('y', 0);
}
}
// Create the clone and apply outline properties
clone = tspan.cloneNode(1);
attr(clone, {
'class': 'highcharts-text-outline',
'fill': color,
'stroke': color,
'stroke-width': strokeWidth,
'stroke-linejoin': 'round'
});
elem.insertBefore(clone, firstRealChild);
});
}
},
// Custom attributes used for symbols, these should be filtered out when
// setting SVGElement attributes (#9375).
symbolCustomAttribs: [
'x',
'y',
'width',
'height',
'r',
'start',
'end',
'innerR',
'anchorX',
'anchorY',
'rounded'
],
/**
* Apply native and custom attributes to the SVG elements.
*
* In order to set the rotation center for rotation, set x and y to 0 and
* use `translateX` and `translateY` attributes to position the element
* instead.
*
* Attributes frequently used in Highcharts are `fill`, `stroke`,
* `stroke-width`.
*
* @sample highcharts/members/renderer-rect/
* Setting some attributes
*
* @example
* // Set multiple attributes
* element.attr({
* stroke: 'red',
* fill: 'blue',
* x: 10,
* y: 10
* });
*
* // Set a single attribute
* element.attr('stroke', 'red');
*
* // Get an attribute
* element.attr('stroke'); // => 'red'
*
* @function Highcharts.SVGElement#attr
*
* @param {string|Highcharts.SVGAttributes} [hash]
* The native and custom SVG attributes.
*
* @param {string} [val]
* If the type of the first argument is `string`, the second can be a
* value, which will serve as a single attribute setter. If the first
* argument is a string and the second is undefined, the function
* serves as a getter and the current value of the property is
* returned.
*
* @param {Function} [complete]
* A callback function to execute after setting the attributes. This
* makes the function compliant and interchangeable with the
* {@link SVGElement#animate} function.
*
* @param {boolean} [continueAnimation=true]
* Used internally when `.attr` is called as part of an animation
* step. Otherwise, calling `.attr` for an attribute will stop
* animation for that attribute.
*
* @return {number|string|Highcharts.SVGElement}
* If used as a setter, it returns the current
* {@link Highcharts.SVGElement} so the calls can be chained. If
* used as a getter, the current value of the attribute is returned.
*/
attr: function (hash, val, complete, continueAnimation) {
var key,
element = this.element,
hasSetSymbolSize,
ret = this,
skipAttr,
setter,
symbolCustomAttribs = this.symbolCustomAttribs;
// single key-value pair
if (typeof hash === 'string' && val !== undefined) {
key = hash;
hash = {};
hash[key] = val;
}
// used as a getter: first argument is a string, second is undefined
if (typeof hash === 'string') {
ret = (this[hash + 'Getter'] || this._defaultGetter).call(
this,
hash,
element
);
// setter
} else {
objectEach(hash, function eachAttribute(val, key) {
skipAttr = false;
// Unless .attr is from the animator update, stop current
// running animation of this property
if (!continueAnimation) {
stop(this, key);
}
// Special handling of symbol attributes
if (
this.symbolName &&
H.inArray(key, symbolCustomAttribs) !== -1
) {
if (!hasSetSymbolSize) {
this.symbolAttr(hash);
hasSetSymbolSize = true;
}
skipAttr = true;
}
if (this.rotation && (key === 'x' || key === 'y')) {
this.doTransform = true;
}
if (!skipAttr) {
setter = this[key + 'Setter'] || this._defaultSetter;
setter.call(this, val, key, element);
// Let the shadow follow the main element
if (
!this.styledMode &&
this.shadows &&
/^(width|height|visibility|x|y|d|transform|cx|cy|r)$/
.test(key)
) {
this.updateShadows(key, val, setter);
}
}
}, this);
this.afterSetters();
}
// In accordance with animate, run a complete callback
if (complete) {
complete.call(this);
}
return ret;
},
/**
* This method is executed in the end of `attr()`, after setting all
* attributes in the hash. In can be used to efficiently consolidate
* multiple attributes in one SVG property -- e.g., translate, rotate and
* scale are merged in one "transform" attribute in the SVG node.
*
* @private
* @function Highcharts.SVGElement#afterSetters
*/
afterSetters: function () {
// Update transform. Do this outside the loop to prevent redundant
// updating for batch setting of attributes.
if (this.doTransform) {
this.updateTransform();
this.doTransform = false;
}
},
/**
* Update the shadow elements with new attributes.
*
* @private
* @function Highcharts.SVGElement#updateShadows
*
* @param {string} key
* The attribute name.
*
* @param {string|number} value
* The value of the attribute.
*
* @param {Function} setter
* The setter function, inherited from the parent wrapper.
*/
updateShadows: function (key, value, setter) {
var shadows = this.shadows,
i = shadows.length;
while (i--) {
setter.call(
shadows[i],
key === 'height' ?
Math.max(value - (shadows[i].cutHeight || 0), 0) :
key === 'd' ? this.d : value,
key,
shadows[i]
);
}
},
/**
* Add a class name to an element.
*
* @function Highcharts.SVGElement#addClass
*
* @param {string} className
* The new class name to add.
*
* @param {boolean} [replace=false]
* When true, the existing class name(s) will be overwritten with
* the new one. When false, the new one is added.
*
* @return {Highcharts.SVGElement}
* Return the SVG element for chainability.
*/
addClass: function (className, replace) {
var currentClassName = this.attr('class') || '';
if (currentClassName.indexOf(className) === -1) {
if (!replace) {
className =
(currentClassName + (currentClassName ? ' ' : '') +
className).replace(' ', ' ');
}
this.attr('class', className);
}
return this;
},
/**
* Check if an element has the given class name.
*
* @function Highcharts.SVGElement#hasClass
*
* @param {string} className
* The class name to check for.
*
* @return {boolean}
* Whether the class name is found.
*/
hasClass: function (className) {
return (this.attr('class') || '').split(' ').indexOf(className) !== -1;
},
/**
* Remove a class name from the element.
*
* @function Highcharts.SVGElement#removeClass
*
* @param {string|RegExp} className
* The class name to remove.
*
* @return {Highcharts.SVGElement} Returns the SVG element for chainability.
*/
removeClass: function (className) {
return this.attr(
'class',
(this.attr('class') || '').replace(className, '')
);
},
/**
* If one of the symbol size affecting parameters are changed,
* check all the others only once for each call to an element's
* .attr() method
*
* @private
* @function Highcharts.SVGElement#symbolAttr
*
* @param {Highcharts.Dictionary<number|string>} hash
* The attributes to set.
*/
symbolAttr: function (hash) {
var wrapper = this;
[
'x',
'y',
'r',
'start',
'end',
'width',
'height',
'innerR',
'anchorX',
'anchorY'
].forEach(function (key) {
wrapper[key] = pick(hash[key], wrapper[key]);
});
wrapper.attr({
d: wrapper.renderer.symbols[wrapper.symbolName](
wrapper.x,
wrapper.y,
wrapper.width,
wrapper.height,
wrapper
)
});
},
/**
* Apply a clipping rectangle to this element.
*
* @function Highcharts.SVGElement#clip
*
* @param {Highcharts.ClipRectElement} [clipRect]
* The clipping rectangle. If skipped, the current clip is removed.
*
* @return {Highcharts.SVGElement}
* Returns the SVG element to allow chaining.
*/
clip: function (clipRect) {
return this.attr(
'clip-path',
clipRect ?
'url(' + this.renderer.url + '#' + clipRect.id + ')' :
'none'
);
},
/**
* Calculate the coordinates needed for drawing a rectangle crisply and
* return the calculated attributes.
*
* @function Highcharts.SVGElement#crisp
*
* @param {Highcharts.RectangleObject} rect
* Rectangle to crisp.
*
* @param {number} [strokeWidth]
* The stroke width to consider when computing crisp positioning. It
* can also be set directly on the rect parameter.
*
* @return {Highcharts.RectangleObject}
* The modified rectangle arguments.
*/
crisp: function (rect, strokeWidth) {
var wrapper = this,
normalizer;
strokeWidth = strokeWidth || rect.strokeWidth || 0;
// Math.round because strokeWidth can sometimes have roundoff errors
normalizer = Math.round(strokeWidth) % 2 / 2;
// normalize for crisp edges
rect.x = Math.floor(rect.x || wrapper.x || 0) + normalizer;
rect.y = Math.floor(rect.y || wrapper.y || 0) + normalizer;
rect.width = Math.floor(
(rect.width || wrapper.width || 0) - 2 * normalizer
);
rect.height = Math.floor(
(rect.height || wrapper.height || 0) - 2 * normalizer
);
if (defined(rect.strokeWidth)) {
rect.strokeWidth = strokeWidth;
}
return rect;
},
/**
* Set styles for the element. In addition to CSS styles supported by
* native SVG and HTML elements, there are also some custom made for
* Highcharts, like `width`, `ellipsis` and `textOverflow` for SVG text
* elements.
*
* @sample highcharts/members/renderer-text-on-chart/
* Styled text
*
* @function Highcharts.SVGElement#css
*
* @param {Highcharts.CSSObject} styles
* The new CSS styles.
*
* @return {Highcharts.SVGElement}
* Return the SVG element for chaining.
*/
css: function (styles) {
var oldStyles = this.styles,
newStyles = {},
elem = this.element,
textWidth,
serializedCss = '',
hyphenate,
hasNew = !oldStyles,
// These CSS properties are interpreted internally by the SVG
// renderer, but are not supported by SVG and should not be added to
// the DOM. In styled mode, no CSS should find its way to the DOM
// whatsoever (#6173, #6474).
svgPseudoProps = ['textOutline', 'textOverflow', 'width'];
// convert legacy
if (styles && styles.color) {
styles.fill = styles.color;
}
// Filter out existing styles to increase performance (#2640)
if (oldStyles) {
objectEach(styles, function (style, n) {
if (style !== oldStyles[n]) {
newStyles[n] = style;
hasNew = true;
}
});
}
if (hasNew) {
// Merge the new styles with the old ones
if (oldStyles) {
styles = extend(
oldStyles,
newStyles
);
}
// Get the text width from style
if (styles) {
// Previously set, unset it (#8234)
if (styles.width === null || styles.width === 'auto') {
delete this.textWidth;
// Apply new
} else if (
elem.nodeName.toLowerCase() === 'text' &&
styles.width
) {
textWidth = this.textWidth = pInt(styles.width);
}
}
// store object
this.styles = styles;
if (textWidth && (!svg && this.renderer.forExport)) {
delete styles.width;
}
// Serialize and set style attribute
if (elem.namespaceURI === this.SVG_NS) { // #7633
hyphenate = function (a, b) {
return '-' + b.toLowerCase();
};
objectEach(styles, function (style, n) {
if (svgPseudoProps.indexOf(n) === -1) {
serializedCss +=
n.replace(/([A-Z])/g, hyphenate) + ':' +
style + ';';
}
});
if (serializedCss) {
attr(elem, 'style', serializedCss); // #1881
}
} else {
css(elem, styles);
}
if (this.added) {
// Rebuild text after added. Cache mechanisms in the buildText
// will prevent building if there are no significant changes.
if (this.element.nodeName === 'text') {
this.renderer.buildText(this);
}
// Apply text outline after added
if (styles && styles.textOutline) {
this.applyTextOutline(styles.textOutline);
}
}
}
return this;
},
/**
* Get the computed style. Only in styled mode.
*
* @example
* chart.series[0].points[0].graphic.getStyle('stroke-width'); // => '1px'
*
* @function Highcharts.SVGElement#getStyle
*
* @param {string} prop
* The property name to check for.
*
* @return {string}
* The current computed value.
*/
getStyle: function (prop) {
return win.getComputedStyle(this.element || this, '')
.getPropertyValue(prop);
},
/**
* Get the computed stroke width in pixel values. This is used extensively
* when drawing shapes to ensure the shapes are rendered crisp and
* positioned correctly relative to each other. Using
* `shape-rendering: crispEdges` leaves us less control over positioning,
* for example when we want to stack columns next to each other, or position
* things pixel-perfectly within the plot box.
*
* The common pattern when placing a shape is:
* - Create the SVGElement and add it to the DOM. In styled mode, it will
* now receive a stroke width from the style sheet. In classic mode we
* will add the `stroke-width` attribute.
* - Read the computed `elem.strokeWidth()`.
* - Place it based on the stroke width.
*
* @function Highcharts.SVGElement#strokeWidth
*
* @return {number}
* The stroke width in pixels. Even if the given stroke widtch (in
* CSS or by attributes) is based on `em` or other units, the pixel
* size is returned.
*/
strokeWidth: function () {
// In non-styled mode, read the stroke width as set by .attr
if (!this.renderer.styledMode) {
return this['stroke-width'] || 0;
}
// In styled mode, read computed stroke width
var val = this.getStyle('stroke-width'),
ret,
dummy;
// Read pixel values directly
if (val.indexOf('px') === val.length - 2) {
ret = pInt(val);
// Other values like em, pt etc need to be measured
} else {
dummy = doc.createElementNS(SVG_NS, 'rect');
attr(dummy, {
'width': val,
'stroke-width': 0
});
this.element.parentNode.appendChild(dummy);
ret = dummy.getBBox().width;
dummy.parentNode.removeChild(dummy);
}
return ret;
},
/**
* Add an event listener. This is a simple setter that replaces all other
* events of the same type, opposed to the {@link Highcharts#addEvent}
* function.
*
* @sample highcharts/members/element-on/
* A clickable rectangle
*
* @function Highcharts.SVGElement#on
*
* @param {string} eventType
* The event type. If the type is `click`, Highcharts will internally
* translate it to a `touchstart` event on touch devices, to prevent
* the browser from waiting for a click event from firing.
*
* @param {Function} handler
* The handler callback.
*
* @return {Highcharts.SVGElement}
* The SVGElement for chaining.
*/
on: function (eventType, handler) {
var svgElement = this,
element = svgElement.element;
// touch
if (hasTouch && eventType === 'click') {
element.ontouchstart = function (e) {
svgElement.touchEventFired = Date.now(); // #2269
e.preventDefault();
handler.call(element, e);
};
element.onclick = function (e) {
if (win.navigator.userAgent.indexOf('Android') === -1 ||
Date.now() - (svgElement.touchEventFired || 0) > 1100) {
handler.call(element, e);
}
};
} else {
// simplest possible event model for internal use
element['on' + eventType] = handler;
}
return this;
},
/**
* Set the coordinates needed to draw a consistent radial gradient across
* a shape regardless of positioning inside the chart. Used on pie slices
* to make all the slices have the same radial reference point.
*
* @function Highcharts.SVGElement#setRadialReference
*
* @param {Array<number>} coordinates
* The center reference. The format is `[centerX, centerY, diameter]`
* in pixels.
*
* @return {Highcharts.SVGElement}
* Returns the SVGElement for chaining.
*/
setRadialReference: function (coordinates) {
var existingGradient = this.renderer.gradients[this.element.gradient];
this.element.radialReference = coordinates;
// On redrawing objects with an existing gradient, the gradient needs
// to be repositioned (#3801)
if (existingGradient && existingGradient.radAttr) {
existingGradient.animate(
this.renderer.getRadialAttr(
coordinates,
existingGradient.radAttr
)
);
}
return this;
},
/**
* Move an object and its children by x and y values.
*
* @function Highcharts.SVGElement#translate
*
* @param {number} x
* The x value.
*
* @param {number} y
* The y value.
*/
translate: function (x, y) {
return this.attr({
translateX: x,
translateY: y
});
},
/**
* Invert a group, rotate and flip. This is used internally on inverted
* charts, where the points and graphs are drawn as if not inverted, then
* the series group elements are inverted.
*
* @function Highcharts.SVGElement#invert
*
* @param {boolean} inverted
* Whether to invert or not. An inverted shape can be un-inverted by
* setting it to false.
*
* @return {Highcharts.SVGElement}
* Return the SVGElement for chaining.
*/
invert: function (inverted) {
var wrapper = this;
wrapper.inverted = inverted;
wrapper.updateTransform();
return wrapper;
},
/**
* Update the transform attribute based on internal properties. Deals with
* the custom `translateX`, `translateY`, `rotation`, `scaleX` and `scaleY`
* attributes and updates the SVG `transform` attribute.
*
* @private
* @function Highcharts.SVGElement#updateTransform
*/
updateTransform: function () {
var wrapper = this,
translateX = wrapper.translateX || 0,
translateY = wrapper.translateY || 0,
scaleX = wrapper.scaleX,
scaleY = wrapper.scaleY,
inverted = wrapper.inverted,
rotation = wrapper.rotation,
matrix = wrapper.matrix,
element = wrapper.element,
transform;
// Flipping affects translate as adjustment for flipping around the
// group's axis
if (inverted) {
translateX += wrapper.width;
translateY += wrapper.height;
}
// Apply translate. Nearly all transformed elements have translation,
// so instead of checking for translate = 0, do it always (#1767,
// #1846).
transform = ['translate(' + translateX + ',' + translateY + ')'];
// apply matrix
if (defined(matrix)) {
transform.push(
'matrix(' + matrix.join(',') + ')'
);
}
// apply rotation
if (inverted) {
transform.push('rotate(90) scale(-1,1)');
} else if (rotation) { // text rotation
transform.push(
'rotate(' + rotation + ' ' +
pick(this.rotationOriginX, element.getAttribute('x'), 0) +
' ' +
pick(this.rotationOriginY, element.getAttribute('y') || 0) + ')'
);
}
// apply scale
if (defined(scaleX) || defined(scaleY)) {
transform.push(
'scale(' + pick(scaleX, 1) + ' ' + pick(scaleY, 1) + ')'
);
}
if (transform.length) {
element.setAttribute('transform', transform.join(' '));
}
},
/**
* Bring the element to the front. Alternatively, a new zIndex can be set.
*
* @sample highcharts/members/element-tofront/
* Click an element to bring it to front
*
* @function Highcharts.SVGElement#toFront
*
* @return {Highcharts.SVGElement}
* Returns the SVGElement for chaining.
*/
toFront: function () {
var element = this.element;
element.parentNode.appendChild(element);
return this;
},
/**
* Align the element relative to the chart or another box.
*
* @function Highcharts.SVGElement#align
*
* @param {Highcharts.AlignObject} [alignOptions]
* The alignment options. The function can be called without this
* parameter in order to re-align an element after the box has been
* updated.
*
* @param {boolean} [alignByTranslate]
* Align element by translation.
*
* @param {string|Highcharts.BBoxObject} [box]
* The box to align to, needs a width and height. When the box is a
* string, it refers to an object in the Renderer. For example, when
* box is `spacingBox`, it refers to `Renderer.spacingBox` which
* holds `width`, `height`, `x` and `y` properties.
*
* @return {Highcharts.SVGElement} Returns the SVGElement for chaining.
*/
align: function (alignOptions, alignByTranslate, box) {
var align,
vAlign,
x,
y,
attribs = {},
alignTo,
renderer = this.renderer,
alignedObjects = renderer.alignedObjects,
alignFactor,
vAlignFactor;
// First call on instanciate
if (alignOptions) {
this.alignOptions = alignOptions;
this.alignByTranslate = alignByTranslate;
if (!box || isString(box)) {
this.alignTo = alignTo = box || 'renderer';
// prevent duplicates, like legendGroup after resize
erase(alignedObjects, this);
alignedObjects.push(this);
box = null; // reassign it below
}
// When called on resize, no arguments are supplied
} else {
alignOptions = this.alignOptions;
alignByTranslate = this.alignByTranslate;
alignTo = this.alignTo;
}
box = pick(box, renderer[alignTo], renderer);
// Assign variables
align = alignOptions.align;
vAlign = alignOptions.verticalAlign;
x = (box.x || 0) + (alignOptions.x || 0); // default: left align
y = (box.y || 0) + (alignOptions.y || 0); // default: top align
// Align
if (align === 'right') {
alignFactor = 1;
} else if (align === 'center') {
alignFactor = 2;
}
if (alignFactor) {
x += (box.width - (alignOptions.width || 0)) / alignFactor;
}
attribs[alignByTranslate ? 'translateX' : 'x'] = Math.round(x);
// Vertical align
if (vAlign === 'bottom') {
vAlignFactor = 1;
} else if (vAlign === 'middle') {
vAlignFactor = 2;
}
if (vAlignFactor) {
y += (box.height - (alignOptions.height || 0)) / vAlignFactor;
}
attribs[alignByTranslate ? 'translateY' : 'y'] = Math.round(y);
// Animate only if already placed
this[this.placed ? 'animate' : 'attr'](attribs);
this.placed = true;
this.alignAttr = attribs;
return this;
},
/**
* Get the bounding box (width, height, x and y) for the element. Generally
* used to get rendered text size. Since this is called a lot in charts,
* the results are cached based on text properties, in order to save DOM
* traffic. The returned bounding box includes the rotation, so for example
* a single text line of rotation 90 will report a greater height, and a
* width corresponding to the line-height.
*
* @sample highcharts/members/renderer-on-chart/
* Draw a rectangle based on a text's bounding box
*
* @function Highcharts.SVGElement#getBBox
*
* @param {boolean} [reload]
* Skip the cache and get the updated DOM bouding box.
*
* @param {number} [rot]
* Override the element's rotation. This is internally used on axis
* labels with a value of 0 to find out what the bounding box would
* be have been if it were not rotated.
*
* @return {Highcharts.BBoxObject}
* The bounding box with `x`, `y`, `width` and `height` properties.
*/
getBBox: function (reload, rot) {
var wrapper = this,
bBox, // = wrapper.bBox,
renderer = wrapper.renderer,
width,
height,
rotation,
rad,
element = wrapper.element,
styles = wrapper.styles,
fontSize,
textStr = wrapper.textStr,
toggleTextShadowShim,
cache = renderer.cache,
cacheKeys = renderer.cacheKeys,
isSVG = element.namespaceURI === wrapper.SVG_NS,
cacheKey;
rotation = pick(rot, wrapper.rotation);
rad = rotation * deg2rad;
fontSize = renderer.styledMode ? (
element &&
SVGElement.prototype.getStyle.call(element, 'font-size')
) : (
styles && styles.fontSize
);
// Avoid undefined and null (#7316)
if (defined(textStr)) {
cacheKey = textStr.toString();
// Since numbers are monospaced, and numerical labels appear a lot
// in a chart, we assume that a label of n characters has the same
// bounding box as others of the same length. Unless there is inner
// HTML in the label. In that case, leave the numbers as is (#5899).
if (cacheKey.indexOf('<') === -1) {
cacheKey = cacheKey.replace(/[0-9]/g, '0');
}
// Properties that affect bounding box
cacheKey += [
'',
rotation || 0,
fontSize,
wrapper.textWidth, // #7874, also useHTML
styles && styles.textOverflow // #5968
]
.join(',');
}
if (cacheKey && !reload) {
bBox = cache[cacheKey];
}
// No cache found
if (!bBox) {
// SVG elements
if (isSVG || renderer.forExport) {
try { // Fails in Firefox if the container has display: none.
// When the text shadow shim is used, we need to hide the
// fake shadows to get the correct bounding box (#3872)
toggleTextShadowShim = this.fakeTS && function (display) {
[].forEach.call(
element.querySelectorAll(
'.highcharts-text-outline'
),
function (tspan) {
tspan.style.display = display;
}
);
};
// Workaround for #3842, Firefox reporting wrong bounding
// box for shadows
if (toggleTextShadowShim) {
toggleTextShadowShim('none');
}
bBox = element.getBBox ?
// SVG: use extend because IE9 is not allowed to change
// width and height in case of rotation (below)
extend({}, element.getBBox()) : {
// Legacy IE in export mode
width: element.offsetWidth,
height: element.offsetHeight
};
// #3842
if (toggleTextShadowShim) {
toggleTextShadowShim('');
}
} catch (e) {}
// If the bBox is not set, the try-catch block above failed. The
// other condition is for Opera that returns a width of
// -Infinity on hidden elements.
if (!bBox || bBox.width < 0) {
bBox = { width: 0, height: 0 };
}
// VML Renderer or useHTML within SVG
} else {
bBox = wrapper.htmlGetBBox();
}
// True SVG elements as well as HTML elements in modern browsers
// using the .useHTML option need to compensated for rotation
if (renderer.isSVG) {
width = bBox.width;
height = bBox.height;
// Workaround for wrong bounding box in IE, Edge and Chrome on
// Windows. With Highcharts' default font, IE and Edge report
// a box height of 16.899 and Chrome rounds it to 17. If this
// stands uncorrected, it results in more padding added below
// the text than above when adding a label border or background.
// Also vertical positioning is affected.
// https://jsfiddle.net/highcharts/em37nvuj/
// (#1101, #1505, #1669, #2568, #6213).
if (isSVG) {
bBox.height = height = (
{
'11px,17': 14,
'13px,20': 16
}[
styles && styles.fontSize + ',' + Math.round(height)
] ||
height
);
}
// Adjust for rotated text
if (rotation) {
bBox.width = Math.abs(height * Math.sin(rad)) +
Math.abs(width * Math.cos(rad));
bBox.height = Math.abs(height * Math.cos(rad)) +
Math.abs(width * Math.sin(rad));
}
}
// Cache it. When loading a chart in a hidden iframe in Firefox and
// IE/Edge, the bounding box height is 0, so don't cache it (#5620).
if (cacheKey && bBox.height > 0) {
// Rotate (#4681)
while (cacheKeys.length > 250) {
delete cache[cacheKeys.shift()];
}
if (!cache[cacheKey]) {
cacheKeys.push(cacheKey);
}
cache[cacheKey] = bBox;
}
}
return bBox;
},
/**
* Show the element after it has been hidden.
*
* @function Highcharts.SVGElement#show
*
* @param {boolean} [inherit=false]
* Set the visibility attribute to `inherit` rather than `visible`.
* The difference is that an element with `visibility="visible"`
* will be visible even if the parent is hidden.
*
* @return {Highcharts.SVGElement}
* Returns the SVGElement for chaining.
*/
show: function (inherit) {
return this.attr({ visibility: inherit ? 'inherit' : 'visible' });
},
/**
* Hide the element, equivalent to setting the `visibility` attribute to
* `hidden`.
*
* @function Highcharts.SVGElement#hide
*
* @return {Highcharts.SVGElement}
* Returns the SVGElement for chaining.
*/
hide: function () {
return this.attr({ visibility: 'hidden' });
},
/**
* Fade out an element by animating its opacity down to 0, and hide it on
* complete. Used internally for the tooltip.
*
* @function Highcharts.SVGElement#fadeOut
*
* @param {number} [duration=150]
* The fade duration in milliseconds.
*/
fadeOut: function (duration) {
var elemWrapper = this;
elemWrapper.animate({
opacity: 0
}, {
duration: duration || 150,
complete: function () {
// #3088, assuming we're only using this for tooltips
elemWrapper.attr({ y: -9999 });
}
});
},
/**
* Add the element to the DOM. All elements must be added this way.
*
* @sample highcharts/members/renderer-g
* Elements added to a group
*
* @function Highcharts.SVGElement#add
*
* @param {Highcharts.SVGElement|Highcharts.SVGDOMElement} [parent]
* The parent item to add it to. If undefined, the element is added
* to the {@link Highcharts.SVGRenderer.box}.
*
* @return {Highcharts.SVGElement}
* Returns the SVGElement for chaining.
*/
add: function (parent) {
var renderer = this.renderer,
element = this.element,
inserted;
if (parent) {
this.parentGroup = parent;
}
// mark as inverted
this.parentInverted = parent && parent.inverted;
// build formatted text
if (this.textStr !== undefined) {
renderer.buildText(this);
}
// Mark as added
this.added = true;
// If we're adding to renderer root, or other elements in the group
// have a z index, we need to handle it
if (!parent || parent.handleZ || this.zIndex) {
inserted = this.zIndexSetter();
}
// If zIndex is not handled, append at the end
if (!inserted) {
(parent ? parent.element : renderer.box).appendChild(element);
}
// fire an event for internal hooks
if (this.onAdd) {
this.onAdd();
}
return this;
},
/**
* Removes an element from the DOM.
*
* @private
* @function Highcharts.SVGElement#safeRemoveChild
*
* @param {Highcharts.SVGDOMElement|Highcharts.HTMLDOMElement} element
* The DOM node to remove.
*/
safeRemoveChild: function (element) {
var parentNode = element.parentNode;
if (parentNode) {
parentNode.removeChild(element);
}
},
/**
* Destroy the element and element wrapper and clear up the DOM and event
* hooks.
*
* @function Highcharts.SVGElement#destroy
*/
destroy: function () {
var wrapper = this,
element = wrapper.element || {},
renderer = wrapper.renderer,
parentToClean =
renderer.isSVG &&
element.nodeName === 'SPAN' &&
wrapper.parentGroup,
grandParent,
ownerSVGElement = element.ownerSVGElement,
i,
clipPath = wrapper.clipPath;
// remove events
element.onclick = element.onmouseout = element.onmouseover =
element.onmousemove = element.point = null;
stop(wrapper); // stop running animations
if (clipPath && ownerSVGElement) {
// Look for existing references to this clipPath and remove them
// before destroying the element (#6196).
// The upper case version is for Edge
[].forEach.call(
ownerSVGElement.querySelectorAll('[clip-path],[CLIP-PATH]'),
function (el) {
var clipPathAttr = el.getAttribute('clip-path'),
clipPathId = clipPath.element.id;
// Include the closing paranthesis in the test to rule out
// id's from 10 and above (#6550). Edge puts quotes inside
// the url, others not.
if (
clipPathAttr.indexOf('(#' + clipPathId + ')') > -1 ||
clipPathAttr.indexOf('("#' + clipPathId + '")') > -1
) {
el.removeAttribute('clip-path');
}
}
);
wrapper.clipPath = clipPath.destroy();
}
// Destroy stops in case this is a gradient object
if (wrapper.stops) {
for (i = 0; i < wrapper.stops.length; i++) {
wrapper.stops[i] = wrapper.stops[i].destroy();
}
wrapper.stops = null;
}
// remove element
wrapper.safeRemoveChild(element);
if (!renderer.styledMode) {
wrapper.destroyShadows();
}
// In case of useHTML, clean up empty containers emulating SVG groups
// (#1960, #2393, #2697).
while (
parentToClean &&
parentToClean.div &&
parentToClean.div.childNodes.length === 0
) {
grandParent = parentToClean.parentGroup;
wrapper.safeRemoveChild(parentToClean.div);
delete parentToClean.div;
parentToClean = grandParent;
}
// remove from alignObjects
if (wrapper.alignTo) {
erase(renderer.alignedObjects, wrapper);
}
objectEach(wrapper, function (val, key) {
delete wrapper[key];
});
return null;
},
/**
* Add a shadow to the element. Must be called after the element is added to
* the DOM. In styled mode, this method is not used, instead use `defs` and
* filters.
*
* @example
* renderer.rect(10, 100, 100, 100)
* .attr({ fill: 'red' })
* .shadow(true);
*
* @function Highcharts.SVGElement#shadow
*
* @param {boolean|Highcharts.ShadowOptionsObject} shadowOptions
* The shadow options. If `true`, the default options are applied. If
* `false`, the current shadow will be removed.
*
* @param {Highcharts.SVGElement} [group]
* The SVG group element where the shadows will be applied. The
* default is to add it to the same parent as the current element.
* Internally, this is ised for pie slices, where all the shadows are
* added to an element behind all the slices.
*
* @param {boolean} [cutOff]
* Used internally for column shadows.
*
* @return {Highcharts.SVGElement}
* Returns the SVGElement for chaining.
*/
shadow: function (shadowOptions, group, cutOff) {
var shadows = [],
i,
shadow,
element = this.element,
strokeWidth,
shadowWidth,
shadowElementOpacity,
// compensate for inverted plot area
transform;
if (!shadowOptions) {
this.destroyShadows();
} else if (!this.shadows) {
shadowWidth = pick(shadowOptions.width, 3);
shadowElementOpacity = (shadowOptions.opacity || 0.15) /
shadowWidth;
transform = this.parentInverted ?
'(-1,-1)' :
'(' + pick(shadowOptions.offsetX, 1) + ', ' +
pick(shadowOptions.offsetY, 1) + ')';
for (i = 1; i <= shadowWidth; i++) {
shadow = element.cloneNode(0);
strokeWidth = (shadowWidth * 2) + 1 - (2 * i);
attr(shadow, {
'stroke':
shadowOptions.color || '#000000',
'stroke-opacity': shadowElementOpacity * i,
'stroke-width': strokeWidth,
'transform': 'translate' + transform,
'fill': 'none'
});
shadow.setAttribute(
'class',
(shadow.getAttribute('class') || '') + ' highcharts-shadow'
);
if (cutOff) {
attr(
shadow,
'height',
Math.max(attr(shadow, 'height') - strokeWidth, 0)
);
shadow.cutHeight = strokeWidth;
}
if (group) {
group.element.appendChild(shadow);
} else if (element.parentNode) {
element.parentNode.insertBefore(shadow, element);
}
shadows.push(shadow);
}
this.shadows = shadows;
}
return this;
},
/**
* Destroy shadows on the element.
*
* @private
* @function Highcharts.SVGElement#destroyShadows
*/
destroyShadows: function () {
(this.shadows || []).forEach(function (shadow) {
this.safeRemoveChild(shadow);
}, this);
this.shadows = undefined;
},
/**
* @private
* @function Highcharts.SVGElement#xGetter
*
* @param {string} key
*
* @return {number|string|null}
*/
xGetter: function (key) {
if (this.element.nodeName === 'circle') {
if (key === 'x') {
key = 'cx';
} else if (key === 'y') {
key = 'cy';
}
}
return this._defaultGetter(key);
},
/**
* Get the current value of an attribute or pseudo attribute,
* used mainly for animation. Called internally from
* the {@link Highcharts.SVGRenderer#attr} function.
*
* @private
* @function Highcharts.SVGElement#_defaultGetter
*
* @param {string} key
* Property key.
*
* @return {number|string|null}
* Property value.
*/
_defaultGetter: function (key) {
var ret = pick(
this[key + 'Value'], // align getter
this[key],
this.element ? this.element.getAttribute(key) : null,
0
);
if (/^[\-0-9\.]+$/.test(ret)) { // is numerical
ret = parseFloat(ret);
}
return ret;
},
/**
* @private
* @function Highcharts.SVGElement#dSettter
*
* @param {number|string|Highcharts.SVGPathArray} value
*
* @param {string} key
*
* @param {Highcharts.SVGDOMElement} element
*/
dSetter: function (value, key, element) {
if (value && value.join) { // join path
value = value.join(' ');
}
if (/(NaN| {2}|^$)/.test(value)) {
value = 'M 0 0';
}
// Check for cache before resetting. Resetting causes disturbance in the
// DOM, causing flickering in some cases in Edge/IE (#6747). Also
// possible performance gain.
if (this[key] !== value) {
element.setAttribute(key, value);
this[key] = value;
}
},
/**
* @private
* @function Highcharts.SVGElement#dashstyleSetter
*
* @param {string} value
*/
dashstyleSetter: function (value) {
var i,
strokeWidth = this['stroke-width'];
// If "inherit", like maps in IE, assume 1 (#4981). With HC5 and the new
// strokeWidth function, we should be able to use that instead.
if (strokeWidth === 'inherit') {
strokeWidth = 1;
}
value = value && value.toLowerCase();
if (value) {
value = value
.replace('shortdashdotdot', '3,1,1,1,1,1,')
.replace('shortdashdot', '3,1,1,1')
.replace('shortdot', '1,1,')
.replace('shortdash', '3,1,')
.replace('longdash', '8,3,')
.replace(/dot/g, '1,3,')
.replace('dash', '4,3,')
.replace(/,$/, '')
.split(','); // ending comma
i = value.length;
while (i--) {
value[i] = pInt(value[i]) * strokeWidth;
}
value = value.join(',')
.replace(/NaN/g, 'none'); // #3226
this.element.setAttribute('stroke-dasharray', value);
}
},
/**
* @private
* @function Highcharts.SVGElement#alignSetter
*
* @param {"start"|"middle"|"end"} value
*/
alignSetter: function (value) {
var convert = { left: 'start', center: 'middle', right: 'end' };
this.alignValue = value;
this.element.setAttribute('text-anchor', convert[value]);
},
/**
* @private
* @function Highcharts.SVGElement#opacitySetter
*
* @param {string} value
*
* @param {string} key
*
* @param {Highcharts.SVGDOMElement} element
*/
opacitySetter: function (value, key, element) {
this[key] = value;
element.setAttribute(key, value);
},
/**
* @private
* @function Highcharts.SVGElement#titleSetter
*
* @param {string} value
*/
titleSetter: function (value) {
var titleNode = this.element.getElementsByTagName('title')[0];
if (!titleNode) {
titleNode = doc.createElementNS(this.SVG_NS, 'title');
this.element.appendChild(titleNode);
}
// Remove text content if it exists
if (titleNode.firstChild) {
titleNode.removeChild(titleNode.firstChild);
}
titleNode.appendChild(
doc.createTextNode(
// #3276, #3895
(String(pick(value), ''))
.replace(/<[^>]*>/g, '')
.replace(/</g, '<')
.replace(/>/g, '>')
)
);
},
/**
* @private
* @function Highcharts.SVGElement#textSetter
*
* @param {string} value
*/
textSetter: function (value) {
if (value !== this.textStr) {
// Delete bBox memo when the text changes
delete this.bBox;
this.textStr = value;
if (this.added) {
this.renderer.buildText(this);
}
}
},
/**
* @private
* @function Highcharts.SVGElement#fillSetter
*
* @param {Highcharts.Color|Highcharts.ColorString} value
*
* @param {string} key
*
* @param {Highcharts.SVGDOMElement} element
*/
fillSetter: function (value, key, element) {
if (typeof value === 'string') {
element.setAttribute(key, value);
} else if (value) {
this.complexColor(value, key, element);
}
},
/**
* @private
* @function Highcharts.SVGElement#visibilitySetter
*
* @param {string} value
*
* @param {string} key
*
* @param {Highcharts.SVGDOMElement} element
*/
visibilitySetter: function (value, key, element) {
// IE9-11 doesn't handle visibilty:inherit well, so we remove the
// attribute instead (#2881, #3909)
if (value === 'inherit') {
element.removeAttribute(key);
} else if (this[key] !== value) { // #6747
element.setAttribute(key, value);
}
this[key] = value;
},
/**
* @private
* @function Highcharts.SVGElement#zIndexSetter
*
* @param {string} value
*
* @param {string} key
*
* @return {boolean}
*/
zIndexSetter: function (value, key) {
var renderer = this.renderer,
parentGroup = this.parentGroup,
parentWrapper = parentGroup || renderer,
parentNode = parentWrapper.element || renderer.box,
childNodes,
otherElement,
otherZIndex,
element = this.element,
inserted,
undefinedOtherZIndex,
svgParent = parentNode === renderer.box,
run = this.added,
i;
if (defined(value)) {
// So we can read it for other elements in the group
element.setAttribute('data-z-index', value);
value = +value;
if (this[key] === value) { // Only update when needed (#3865)
run = false;
}
} else if (defined(this[key])) {
element.removeAttribute('data-z-index');
}
this[key] = value;
// Insert according to this and other elements' zIndex. Before .add() is
// called, nothing is done. Then on add, or by later calls to
// zIndexSetter, the node is placed on the right place in the DOM.
if (run) {
value = this.zIndex;
if (value && parentGroup) {
parentGroup.handleZ = true;
}
childNodes = parentNode.childNodes;
for (i = childNodes.length - 1; i >= 0 && !inserted; i--) {
otherElement = childNodes[i];
otherZIndex = otherElement.getAttribute('data-z-index');
undefinedOtherZIndex = !defined(otherZIndex);
if (otherElement !== element) {
if (
// Negative zIndex versus no zIndex:
// On all levels except the highest. If the parent is
// <svg>, then we don't want to put items before <desc>
// or <defs>
(value < 0 && undefinedOtherZIndex && !svgParent && !i)
) {
parentNode.insertBefore(element, childNodes[i]);
inserted = true;
} else if (
// Insert after the first element with a lower zIndex
pInt(otherZIndex) <= value ||
// If negative zIndex, add this before first undefined
// zIndex element
(
undefinedOtherZIndex &&
(!defined(value) || value >= 0)
)
) {
parentNode.insertBefore(
element,
childNodes[i + 1] || null // null for oldIE export
);
inserted = true;
}
}
}
if (!inserted) {
parentNode.insertBefore(
element,
childNodes[svgParent ? 3 : 0] || null // null for oldIE
);
inserted = true;
}
}
return inserted;
},
/**
* @private
* @function Highcharts.SVGElement#_defaultSetter
*
* @param {string} value
*
* @param {string} key
*
* @param {Highcharts.SVGDOMElement} element
*/
_defaultSetter: function (value, key, element) {
element.setAttribute(key, value);
}
});
// Some shared setters and getters
SVGElement.prototype.yGetter =
SVGElement.prototype.xGetter;
SVGElement.prototype.translateXSetter =
SVGElement.prototype.translateYSetter =
SVGElement.prototype.rotationSetter =
SVGElement.prototype.verticalAlignSetter =
SVGElement.prototype.rotationOriginXSetter =
SVGElement.prototype.rotationOriginYSetter =
SVGElement.prototype.scaleXSetter =
SVGElement.prototype.scaleYSetter =
SVGElement.prototype.matrixSetter = function (value, key) {
this[key] = value;
this.doTransform = true;
};
// WebKit and Batik have problems with a stroke-width of zero, so in this case
// we remove the stroke attribute altogether. #1270, #1369, #3065, #3072.
SVGElement.prototype['stroke-widthSetter'] =
/**
* @private
* @function Highcharts.SVGElement#strokeSetter
*
* @param {number|string} value
*
* @param {string} key
*
* @param {Highcharts.SVGDOMElement} element
*/
SVGElement.prototype.strokeSetter = function (value, key, element) {
this[key] = value;
// Only apply the stroke attribute if the stroke width is defined and larger
// than 0
if (this.stroke && this['stroke-width']) {
// Use prototype as instance may be overridden
SVGElement.prototype.fillSetter.call(
this,
this.stroke,
'stroke',
element
);
element.setAttribute('stroke-width', this['stroke-width']);
this.hasStroke = true;
} else if (key === 'stroke-width' && value === 0 && this.hasStroke) {
element.removeAttribute('stroke');
this.hasStroke = false;
}
};
/**
* Allows direct access to the Highcharts rendering layer in order to draw
* primitive shapes like circles, rectangles, paths or text directly on a chart,
* or independent from any chart. The SVGRenderer represents a wrapper object
* for SVG in modern browsers. Through the VMLRenderer, part of the `oldie.js`
* module, it also brings vector graphics to IE <= 8.
*
* An existing chart's renderer can be accessed through {@link Chart.renderer}.
* The renderer can also be used completely decoupled from a chart.
*
* @sample highcharts/members/renderer-on-chart
* Annotating a chart programmatically.
* @sample highcharts/members/renderer-basic
* Independent SVG drawing.
*
* @example
* // Use directly without a chart object.
* var renderer = new Highcharts.Renderer(parentNode, 600, 400);
*
* @class
* @name Highcharts.SVGRenderer
*
* @param {Highcharts.HTMLDOMElement} container
* Where to put the SVG in the web page.
*
* @param {number} width
* The width of the SVG.
*
* @param {number} height
* The height of the SVG.
*
* @param {boolean} [forExport=false]
* Whether the rendered content is intended for export.
*
* @param {boolean} [allowHTML=true]
* Whether the renderer is allowed to include HTML text, which will be
* projected on top of the SVG.
*/
SVGRenderer = H.SVGRenderer = function () {
this.init.apply(this, arguments);
};
extend(SVGRenderer.prototype, /** @lends Highcharts.SVGRenderer.prototype */ {
/**
* A pointer to the renderer's associated Element class. The VMLRenderer
* will have a pointer to VMLElement here.
*
* @name Highcharts.SVGRenderer#Element
* @type {Highcharts.SVGElement}
*/
Element: SVGElement,
SVG_NS: SVG_NS,
/**
* Initialize the SVGRenderer. Overridable initiator function that takes
* the same parameters as the constructor.
*
* @function Highcharts.SVGRenderer#init
*
* @param {Highcharts.HTMLDOMElement} container
* Where to put the SVG in the web page.
*
* @param {number} width
* The width of the SVG.
*
* @param {number} height
* The height of the SVG.
*
* @param {boolean} [forExport=false]
* Whether the rendered content is intended for export.
*
* @param {boolean} [allowHTML=true]
* Whether the renderer is allowed to include HTML text, which will
* be projected on top of the SVG.
*
* @param {boolean} [styledMode=false]
* Whether the renderer belongs to a chart that is in styled mode.
* If it does, it will avoid setting presentational attributes in
* some cases, but not when set explicitly through `.attr` and `.css`
* etc.
*
* @return {void}
*/
init: function (
container,
width,
height,
style,
forExport,
allowHTML,
styledMode
) {
var renderer = this,
boxWrapper,
element,
desc;
boxWrapper = renderer.createElement('svg')
.attr({
'version': '1.1',
'class': 'highcharts-root'
});
if (!styledMode) {
boxWrapper.css(this.getStyle(style));
}
element = boxWrapper.element;
container.appendChild(element);
// Always use ltr on the container, otherwise text-anchor will be
// flipped and text appear outside labels, buttons, tooltip etc (#3482)
attr(container, 'dir', 'ltr');
// For browsers other than IE, add the namespace attribute (#1978)
if (container.innerHTML.indexOf('xmlns') === -1) {
attr(element, 'xmlns', this.SVG_NS);
}
// object properties
renderer.isSVG = true;
/**
* The root `svg` node of the renderer.
*
* @name Highcharts.SVGRenderer#box
* @type {Highcharts.SVGDOMElement}
*/
this.box = element;
/**
* The wrapper for the root `svg` node of the renderer.
*
* @name Highcharts.SVGRenderer#boxWrapper
* @type {Highcharts.SVGElement}
*/
this.boxWrapper = boxWrapper;
renderer.alignedObjects = [];
/**
* Page url used for internal references.
*
* @private
* @name Highcharts.SVGRenderer#url
* @type {string}
*/
// #24, #672, #1070
this.url = (
(isFirefox || isWebKit) &&
doc.getElementsByTagName('base').length
) ?
win.location.href
.split('#')[0] // remove the hash
.replace(/<[^>]*>/g, '') // wing cut HTML
// escape parantheses and quotes
.replace(/([\('\)])/g, '\\$1')
// replace spaces (needed for Safari only)
.replace(/ /g, '%20') :
'';
// Add description
desc = this.createElement('desc').add();
desc.element.appendChild(
doc.createTextNode('Created with @product.name@ @product.version@')
);
/**
* A pointer to the `defs` node of the root SVG.
*
* @name Highcharts.SVGRenderer#defs
* @type {Highcharts.SVGElement}
*/
renderer.defs = this.createElement('defs').add();
renderer.allowHTML = allowHTML;
renderer.forExport = forExport;
renderer.styledMode = styledMode;
renderer.gradients = {}; // Object where gradient SvgElements are stored
renderer.cache = {}; // Cache for numerical bounding boxes
renderer.cacheKeys = [];
renderer.imgCount = 0;
renderer.setSize(width, height, false);
// Issue 110 workaround:
// In Firefox, if a div is positioned by percentage, its pixel position
// may land between pixels. The container itself doesn't display this,
// but an SVG element inside this container will be drawn at subpixel
// precision. In order to draw sharp lines, this must be compensated
// for. This doesn't seem to work inside iframes though (like in
// jsFiddle).
var subPixelFix, rect;
if (isFirefox && container.getBoundingClientRect) {
subPixelFix = function () {
css(container, { left: 0, top: 0 });
rect = container.getBoundingClientRect();
css(container, {
left: (Math.ceil(rect.left) - rect.left) + 'px',
top: (Math.ceil(rect.top) - rect.top) + 'px'
});
};
// run the fix now
subPixelFix();
// run it on resize
renderer.unSubPixelFix = addEvent(win, 'resize', subPixelFix);
}
},
/**
* General method for adding a definition to the SVG `defs` tag. Can be used
* for gradients, fills, filters etc. Styled mode only. A hook for adding
* general definitions to the SVG's defs tag. Definitions can be referenced
* from the CSS by its `id`. Read more in
* [gradients, shadows and patterns](https://www.highcharts.com/docs/chart-design-and-style/gradients-shadows-and-patterns).
* Styled mode only.
*
* @function Highcharts.SVGRenderer#definition
*
* @param {Highcharts.SVGDefinitionObject} def
* A serialized form of an SVG definition, including children.
*
* @return {Highcharts.SVGElement}
* The inserted node.
*/
definition: function (def) {
var ren = this;
function recurse(config, parent) {
var ret;
splat(config).forEach(function (item) {
var node = ren.createElement(item.tagName),
attr = {};
// Set attributes
objectEach(item, function (val, key) {
if (
key !== 'tagName' &&
key !== 'children' &&
key !== 'textContent'
) {
attr[key] = val;
}
});
node.attr(attr);
// Add to the tree
node.add(parent || ren.defs);
// Add text content
if (item.textContent) {
node.element.appendChild(
doc.createTextNode(item.textContent)
);
}
// Recurse
recurse(item.children || [], node);
ret = node;
});
// Return last node added (on top level it's the only one)
return ret;
}
return recurse(def);
},
/**
* Get the global style setting for the renderer.
*
* @private
* @function Highcharts.SVGRenderer#getStyle
*
* @param {Highcharts.CSSObject} style
* Style settings.
*
* @return {Highcharts.CSSObject}
* The style settings mixed with defaults.
*/
getStyle: function (style) {
this.style = extend({
fontFamily: '"Lucida Grande", "Lucida Sans Unicode", ' +
'Arial, Helvetica, sans-serif',
fontSize: '12px'
}, style);
return this.style;
},
/**
* Apply the global style on the renderer, mixed with the default styles.
*
* @function Highcharts.SVGRenderer#setStyle
*
* @param {Highcharts.CSSObject} style
* CSS to apply.
*/
setStyle: function (style) {
this.boxWrapper.css(this.getStyle(style));
},
/**
* Detect whether the renderer is hidden. This happens when one of the
* parent elements has `display: none`. Used internally to detect when we
* needto render preliminarily in another div to get the text bounding boxes
* right.
*
* @function Highcharts.SVGRenderer#isHidden
*
* @return {boolean}
* True if it is hidden.
*/
isHidden: function () { // #608
return !this.boxWrapper.getBBox().width;
},
/**
* Destroys the renderer and its allocated members.
*
* @function Highcharts.SVGRenderer#destroy
*/
destroy: function () {
var renderer = this,
rendererDefs = renderer.defs;
renderer.box = null;
renderer.boxWrapper = renderer.boxWrapper.destroy();
// Call destroy on all gradient elements
destroyObjectProperties(renderer.gradients || {});
renderer.gradients = null;
// Defs are null in VMLRenderer
// Otherwise, destroy them here.
if (rendererDefs) {
renderer.defs = rendererDefs.destroy();
}
// Remove sub pixel fix handler (#982)
if (renderer.unSubPixelFix) {
renderer.unSubPixelFix();
}
renderer.alignedObjects = null;
return null;
},
/**
* Create a wrapper for an SVG element. Serves as a factory for
* {@link SVGElement}, but this function is itself mostly called from
* primitive factories like {@link SVGRenderer#path}, {@link
* SVGRenderer#rect} or {@link SVGRenderer#text}.
*
* @function Highcharts.SVGRenderer#createElement
*
* @param {string} nodeName
* The node name, for example `rect`, `g` etc.
*
* @return {Highcharts.SVGElement}
* The generated SVGElement.
*/
createElement: function (nodeName) {
var wrapper = new this.Element();
wrapper.init(this, nodeName);
return wrapper;
},
/**
* Dummy function for plugins, called every time the renderer is updated.
* Prior to Highcharts 5, this was used for the canvg renderer.
*
* @deprecated
* @function Highcharts.SVGRenderer#draw
*/
draw: noop,
/**
* Get converted radial gradient attributes according to the radial
* reference. Used internally from the {@link SVGElement#colorGradient}
* function.
*
* @private
* @function Highcharts.SVGRenderer#getRadialAttr
*
* @param {Array<number>} radialReference
*
* @param {Highcharts.SVGAttributes} gradAttr
*
* @return {Highcharts.SVGAttributes}
*/
getRadialAttr: function (radialReference, gradAttr) {
return {
cx: (radialReference[0] - radialReference[2] / 2) +
gradAttr.cx * radialReference[2],
cy: (radialReference[1] - radialReference[2] / 2) +
gradAttr.cy * radialReference[2],
r: gradAttr.r * radialReference[2]
};
},
/**
* Truncate the text node contents to a given length. Used when the css
* width is set. If the `textOverflow` is `ellipsis`, the text is truncated
* character by character to the given length. If not, the text is
* word-wrapped line by line.
*
* @private
* @function Highcharts.SVGRenderer#truncate
*
* @param {Highcharts.SVGElement} wrapper
*
* @param {Highcharts.SVGDOMElement} tspan
*
* @param {string} text
*
* @param {Array.<string>} words
*
* @param {number} width
*
* @param {Function} getString
*
* @return {boolean}
* True if tspan is too long.
*/
truncate: function (
wrapper,
tspan,
text,
words,
startAt,
width,
getString
) {
var renderer = this,
rotation = wrapper.rotation,
str,
// Word wrap can not be truncated to shorter than one word, ellipsis
// text can be completely blank.
minIndex = words ? 1 : 0,
maxIndex = (text || words).length,
currentIndex = maxIndex,
// Cache the lengths to avoid checking the same twice
lengths = [],
updateTSpan = function (s) {
if (tspan.firstChild) {
tspan.removeChild(tspan.firstChild);
}
if (s) {
tspan.appendChild(doc.createTextNode(s));
}
},
getSubStringLength = function (charEnd, concatenatedEnd) {
// charEnd is useed when finding the character-by-character
// break for ellipsis, concatenatedEnd is used for word-by-word
// break for word wrapping.
var end = concatenatedEnd || charEnd;
if (lengths[end] === undefined) {
// Modern browsers
if (tspan.getSubStringLength) {
// Fails with DOM exception on unit-tests/legend/members
// of unknown reason. Desired width is 0, text content
// is "5" and end is 1.
try {
lengths[end] = startAt + tspan.getSubStringLength(
0,
words ? end + 1 : end
);
} catch (e) {}
// Legacy
} else if (renderer.getSpanWidth) { // #9058 jsdom
updateTSpan(getString(text || words, charEnd));
lengths[end] = startAt +
renderer.getSpanWidth(wrapper, tspan);
}
}
return lengths[end];
},
actualWidth,
truncated;
wrapper.rotation = 0; // discard rotation when computing box
actualWidth = getSubStringLength(tspan.textContent.length);
truncated = startAt + actualWidth > width;
if (truncated) {
// Do a binary search for the index where to truncate the text
while (minIndex <= maxIndex) {
currentIndex = Math.ceil((minIndex + maxIndex) / 2);
// When checking words for word-wrap, we need to build the
// string and measure the subStringLength at the concatenated
// word length.
if (words) {
str = getString(words, currentIndex);
}
actualWidth = getSubStringLength(
currentIndex,
str && str.length - 1
);
if (minIndex === maxIndex) {
// Complete
minIndex = maxIndex + 1;
} else if (actualWidth > width) {
// Too large. Set max index to current.
maxIndex = currentIndex - 1;
} else {
// Within width. Set min index to current.
minIndex = currentIndex;
}
}
// If max index was 0 it means the shortest possible text was also
// too large. For ellipsis that means only the ellipsis, while for
// word wrap it means the whole first word.
if (maxIndex === 0) {
// Remove ellipsis
updateTSpan('');
// If the new text length is one less than the original, we don't
// need the ellipsis
} else if (!(text && maxIndex === text.length - 1)) {
updateTSpan(str || getString(text || words, currentIndex));
}
}
// When doing line wrapping, prepare for the next line by removing the
// items from this line.
if (words) {
words.splice(0, currentIndex);
}
wrapper.actualWidth = actualWidth;
wrapper.rotation = rotation; // Apply rotation again.
return truncated;
},
/**
* A collection of characters mapped to HTML entities. When `useHTML` on an
* element is true, these entities will be rendered correctly by HTML. In
* the SVG pseudo-HTML, they need to be unescaped back to simple characters,
* so for example `<` will render as `<`.
*
* @example
* // Add support for unescaping quotes
* Highcharts.SVGRenderer.prototype.escapes['"'] = '"';
*
* @name Highcharts.SVGRenderer#escapes
* @type {Highcharts.Dictionary<string>}
*/
escapes: {
'&': '&',
'<': '<',
'>': '>',
"'": ''', // eslint-disable-line quotes
'"': '"'
},
/**
* Parse a simple HTML string into SVG tspans. Called internally when text
* is set on an SVGElement. The function supports a subset of HTML tags, CSS
* text features like `width`, `text-overflow`, `white-space`, and also
* attributes like `href` and `style`.
*
* @private
* @function Highcharts.SVGRenderer#buildText
*
* @param {Highcharts.SVGElement} wrapper
* The parent SVGElement.
*/
buildText: function (wrapper) {
var textNode = wrapper.element,
renderer = this,
forExport = renderer.forExport,
textStr = pick(wrapper.textStr, '').toString(),
hasMarkup = textStr.indexOf('<') !== -1,
lines,
childNodes = textNode.childNodes,
truncated,
parentX = attr(textNode, 'x'),
textStyles = wrapper.styles,
width = wrapper.textWidth,
textLineHeight = textStyles && textStyles.lineHeight,
textOutline = textStyles && textStyles.textOutline,
ellipsis = textStyles && textStyles.textOverflow === 'ellipsis',
noWrap = textStyles && textStyles.whiteSpace === 'nowrap',
fontSize = textStyles && textStyles.fontSize,
textCache,
isSubsequentLine,
i = childNodes.length,
tempParent = width && !wrapper.added && this.box,
getLineHeight = function (tspan) {
var fontSizeStyle;
if (!renderer.styledMode) {
fontSizeStyle =
/(px|em)$/.test(tspan && tspan.style.fontSize) ?
tspan.style.fontSize :
(fontSize || renderer.style.fontSize || 12);
}
return textLineHeight ?
pInt(textLineHeight) :
renderer.fontMetrics(
fontSizeStyle,
// Get the computed size from parent if not explicit
tspan.getAttribute('style') ? tspan : textNode
).h;
},
unescapeEntities = function (inputStr, except) {
objectEach(renderer.escapes, function (value, key) {
if (!except || except.indexOf(value) === -1) {
inputStr = inputStr.toString().replace(
new RegExp(value, 'g'), // eslint-disable-line security/detect-non-literal-regexp
key
);
}
});
return inputStr;
},
parseAttribute = function (s, attr) {
var start,
delimiter;
start = s.indexOf('<');
s = s.substring(start, s.indexOf('>') - start);
start = s.indexOf(attr + '=');
if (start !== -1) {
start = start + attr.length + 1;
delimiter = s.charAt(start);
if (delimiter === '"' || delimiter === "'") { // eslint-disable-line quotes
s = s.substring(start + 1);
return s.substring(0, s.indexOf(delimiter));
}
}
};
// The buildText code is quite heavy, so if we're not changing something
// that affects the text, skip it (#6113).
textCache = [
textStr,
ellipsis,
noWrap,
textLineHeight,
textOutline,
fontSize,
width
].join(',');
if (textCache === wrapper.textCache) {
return;
}
wrapper.textCache = textCache;
// Remove old text
while (i--) {
textNode.removeChild(childNodes[i]);
}
// Skip tspans, add text directly to text node. The forceTSpan is a hook
// used in text outline hack.
if (
!hasMarkup &&
!textOutline &&
!ellipsis &&
!width &&
textStr.indexOf(' ') === -1
) {
textNode.appendChild(doc.createTextNode(unescapeEntities(textStr)));
// Complex strings, add more logic
} else {
<|fim▁hole|> if (tempParent) {
// attach it to the DOM to read offset width
tempParent.appendChild(textNode);
}
if (hasMarkup) {
lines = renderer.styledMode ? (
textStr.replace(
/<(b|strong)>/g,
'<span class="highcharts-strong">'
)
.replace(
/<(i|em)>/g,
'<span class="highcharts-emphasized">'
)
) : (
textStr
.replace(
/<(b|strong)>/g,
'<span style="font-weight:bold">'
)
.replace(
/<(i|em)>/g,
'<span style="font-style:italic">'
)
);
lines = lines
.replace(/<a/g, '<span')
.replace(/<\/(b|strong|i|em|a)>/g, '</span>')
.split(/<br.*?>/g);
} else {
lines = [textStr];
}
// Trim empty lines (#5261)
lines = lines.filter(function (line) {
return line !== '';
});
// build the lines
lines.forEach(function buildTextLines(line, lineNo) {
var spans,
spanNo = 0,
lineLength = 0;
line = line
// Trim to prevent useless/costly process on the spaces
// (#5258)
.replace(/^\s+|\s+$/g, '')
.replace(/<span/g, '|||<span')
.replace(/<\/span>/g, '</span>|||');
spans = line.split('|||');
spans.forEach(function buildTextSpans(span) {
if (span !== '' || spans.length === 1) {
var attributes = {},
tspan = doc.createElementNS(
renderer.SVG_NS,
'tspan'
),
classAttribute,
styleAttribute, // #390
hrefAttribute;
classAttribute = parseAttribute(span, 'class');
if (classAttribute) {
attr(tspan, 'class', classAttribute);
}
styleAttribute = parseAttribute(span, 'style');
if (styleAttribute) {
styleAttribute = styleAttribute.replace(
/(;| |^)color([ :])/,
'$1fill$2'
);
attr(tspan, 'style', styleAttribute);
}
// Not for export - #1529
hrefAttribute = parseAttribute(span, 'href');
if (hrefAttribute && !forExport) {
attr(
tspan,
'onclick',
'location.href=\"' + hrefAttribute + '\"'
);
attr(tspan, 'class', 'highcharts-anchor');
if (!renderer.styledMode) {
css(tspan, { cursor: 'pointer' });
}
}
// Strip away unsupported HTML tags (#7126)
span = unescapeEntities(
span.replace(/<[a-zA-Z\/](.|\n)*?>/g, '') || ' '
);
// Nested tags aren't supported, and cause crash in
// Safari (#1596)
if (span !== ' ') {
// add the text node
tspan.appendChild(doc.createTextNode(span));
// First span in a line, align it to the left
if (!spanNo) {
if (lineNo && parentX !== null) {
attributes.x = parentX;
}
} else {
attributes.dx = 0; // #16
}
// add attributes
attr(tspan, attributes);
// Append it
textNode.appendChild(tspan);
// first span on subsequent line, add the line
// height
if (!spanNo && isSubsequentLine) {
// allow getting the right offset height in
// exporting in IE
if (!svg && forExport) {
css(tspan, { display: 'block' });
}
// Set the line height based on the font size of
// either the text element or the tspan element
attr(
tspan,
'dy',
getLineHeight(tspan)
);
}
// Check width and apply soft breaks or ellipsis
if (width) {
var words = span.replace(
/([^\^])-/g,
'$1- '
).split(' '), // #1273
hasWhiteSpace = !noWrap && (
spans.length > 1 ||
lineNo ||
words.length > 1
),
wrapLineNo = 0,
dy = getLineHeight(tspan);
if (ellipsis) {
truncated = renderer.truncate(
wrapper,
tspan,
span,
undefined,
0,
// Target width
Math.max(
0,
// Substract the font face to make
// room for the ellipsis itself
width - parseInt(fontSize || 12, 10)
),
// Build the text to test for
function (text, currentIndex) {
return text.substring(
0,
currentIndex
) + '\u2026';
}
);
} else if (hasWhiteSpace) {
while (words.length) {
// For subsequent lines, create tspans
// with the same style attributes as the
// parent text node.
if (
words.length &&
!noWrap &&
wrapLineNo > 0
) {
tspan = doc.createElementNS(
SVG_NS,
'tspan'
);
attr(tspan, {
dy: dy,
x: parentX
});
if (styleAttribute) { // #390
attr(
tspan,
'style',
styleAttribute
);
}
// Start by appending the full
// remaining text
tspan.appendChild(
doc.createTextNode(
words.join(' ')
.replace(/- /g, '-')
)
);
textNode.appendChild(tspan);
}
// For each line, truncate the remaining
// words into the line length.
renderer.truncate(
wrapper,
tspan,
null,
words,
wrapLineNo === 0 ? lineLength : 0,
width,
// Build the text to test for
function (text, currentIndex) {
return words
.slice(0, currentIndex)
.join(' ')
.replace(/- /g, '-');
}
);
lineLength = wrapper.actualWidth;
wrapLineNo++;
}
}
}
spanNo++;
}
}
});
// To avoid beginning lines that doesn't add to the textNode
// (#6144)
isSubsequentLine = (
isSubsequentLine ||
textNode.childNodes.length
);
});
if (ellipsis && truncated) {
wrapper.attr(
'title',
unescapeEntities(wrapper.textStr, ['<', '>']) // #7179
);
}
if (tempParent) {
tempParent.removeChild(textNode);
}
// Apply the text outline
if (textOutline && wrapper.applyTextOutline) {
wrapper.applyTextOutline(textOutline);
}
}
},
/**
* Returns white for dark colors and black for bright colors.
*
* @function Highcharts.SVGRenderer#getContrast
*
* @param {Highcharts.ColorString} rgba
* The color to get the contrast for.
*
* @return {string}
* The contrast color, either `#000000` or `#FFFFFF`.
*/
getContrast: function (rgba) {
rgba = color(rgba).rgba;
// The threshold may be discussed. Here's a proposal for adding
// different weight to the color channels (#6216)
rgba[0] *= 1; // red
rgba[1] *= 1.2; // green
rgba[2] *= 0.5; // blue
return rgba[0] + rgba[1] + rgba[2] > 1.8 * 255 ? '#000000' : '#FFFFFF';
},
/**
* Create a button with preset states.
*
* @function Highcharts.SVGRenderer#button
*
* @param {string} text
* The text or HTML to draw.
*
* @param {number} x
* The x position of the button's left side.
*
* @param {number} y
* The y position of the button's top side.
*
* @param {Function} callback
* The function to execute on button click or touch.
*
* @param {Highcharts.SVGAttributes} [normalState]
* SVG attributes for the normal state.
*
* @param {Highcharts.SVGAttributes} [hoverState]
* SVG attributes for the hover state.
*
* @param {Highcharts.SVGAttributes} [pressedState]
* SVG attributes for the pressed state.
*
* @param {Highcharts.SVGAttributes} [disabledState]
* SVG attributes for the disabled state.
*
* @param {Highcharts.SymbolKey} [shape=rect]
* The shape type.
*
* @return {Highcharts.SVGElement}
* The button element.
*/
button: function (
text,
x,
y,
callback,
normalState,
hoverState,
pressedState,
disabledState,
shape
) {
var label = this.label(
text,
x,
y,
shape,
null,
null,
null,
null,
'button'
),
curState = 0,
styledMode = this.styledMode;
// Default, non-stylable attributes
label.attr(merge({
'padding': 8,
'r': 2
}, normalState));
if (!styledMode) {
// Presentational
var normalStyle,
hoverStyle,
pressedStyle,
disabledStyle;
// Normal state - prepare the attributes
normalState = merge({
fill: '#f7f7f7',
stroke: '#cccccc',
'stroke-width': 1,
style: {
color: '#333333',
cursor: 'pointer',
fontWeight: 'normal'
}
}, normalState);
normalStyle = normalState.style;
delete normalState.style;
// Hover state
hoverState = merge(normalState, {
fill: '#e6e6e6'
}, hoverState);
hoverStyle = hoverState.style;
delete hoverState.style;
// Pressed state
pressedState = merge(normalState, {
fill: '#e6ebf5',
style: {
color: '#000000',
fontWeight: 'bold'
}
}, pressedState);
pressedStyle = pressedState.style;
delete pressedState.style;
// Disabled state
disabledState = merge(normalState, {
style: {
color: '#cccccc'
}
}, disabledState);
disabledStyle = disabledState.style;
delete disabledState.style;
}
// Add the events. IE9 and IE10 need mouseover and mouseout to funciton
// (#667).
addEvent(label.element, isMS ? 'mouseover' : 'mouseenter', function () {
if (curState !== 3) {
label.setState(1);
}
});
addEvent(label.element, isMS ? 'mouseout' : 'mouseleave', function () {
if (curState !== 3) {
label.setState(curState);
}
});
label.setState = function (state) {
// Hover state is temporary, don't record it
if (state !== 1) {
label.state = curState = state;
}
// Update visuals
label.removeClass(
/highcharts-button-(normal|hover|pressed|disabled)/
)
.addClass(
'highcharts-button-' +
['normal', 'hover', 'pressed', 'disabled'][state || 0]
);
if (!styledMode) {
label.attr([
normalState,
hoverState,
pressedState,
disabledState
][state || 0])
.css([
normalStyle,
hoverStyle,
pressedStyle,
disabledStyle
][state || 0]);
}
};
// Presentational attributes
if (!styledMode) {
label
.attr(normalState)
.css(extend({ cursor: 'default' }, normalStyle));
}
return label
.on('click', function (e) {
if (curState !== 3) {
callback.call(label, e);
}
});
},
/**
* Make a straight line crisper by not spilling out to neighbour pixels.
*
* @function Highcharts.SVGRenderer#crispLine
*
* @param {Highcharts.SVGPathArray} points
* The original points on the format `['M', 0, 0, 'L', 100, 0]`.
*
* @param {number} width
* The width of the line.
*
* @return {Highcharts.SVGPathArray}
* The original points array, but modified to render crisply.
*/
crispLine: function (points, width) {
// normalize to a crisp line
if (points[1] === points[4]) {
// Substract due to #1129. Now bottom and left axis gridlines behave
// the same.
points[1] = points[4] = Math.round(points[1]) - (width % 2 / 2);
}
if (points[2] === points[5]) {
points[2] = points[5] = Math.round(points[2]) + (width % 2 / 2);
}
return points;
},
/**
* Draw a path, wraps the SVG `path` element.
*
* @sample highcharts/members/renderer-path-on-chart/
* Draw a path in a chart
* @sample highcharts/members/renderer-path/
* Draw a path independent from a chart
*
* @example
* var path = renderer.path(['M', 10, 10, 'L', 30, 30, 'z'])
* .attr({ stroke: '#ff00ff' })
* .add();
*
* @function Highcharts.SVGRenderer#path
*
* @param {Highcharts.SVGPathArray} [path]
* An SVG path definition in array form.
*
* @return {Highcharts.SVGElement}
* The generated wrapper element.
*
*//**
* Draw a path, wraps the SVG `path` element.
*
* @function Highcharts.SVGRenderer#path
*
* @param {Highcharts.SVGAttributes} [attribs]
* The initial attributes.
*
* @return {Highcharts.SVGElement}
* The generated wrapper element.
*/
path: function (path) {
var attribs = this.styledMode ? {} : {
fill: 'none'
};
if (isArray(path)) {
attribs.d = path;
} else if (isObject(path)) { // attributes
extend(attribs, path);
}
return this.createElement('path').attr(attribs);
},
/**
* Draw a circle, wraps the SVG `circle` element.
*
* @sample highcharts/members/renderer-circle/
* Drawing a circle
*
* @function Highcharts.SVGRenderer#circle
*
* @param {number} [x]
* The center x position.
*
* @param {number} [y]
* The center y position.
*
* @param {number} [r]
* The radius.
*
* @return {Highcharts.SVGElement}
* The generated wrapper element.
*//**
* Draw a circle, wraps the SVG `circle` element.
*
* @function Highcharts.SVGRenderer#circle
*
* @param {Highcharts.SVGAttributes} [attribs]
* The initial attributes.
*
* @return {Highcharts.SVGElement}
* The generated wrapper element.
*/
circle: function (x, y, r) {
var attribs = (
isObject(x) ?
x :
x === undefined ? {} : { x: x, y: y, r: r }
),
wrapper = this.createElement('circle');
// Setting x or y translates to cx and cy
wrapper.xSetter = wrapper.ySetter = function (value, key, element) {
element.setAttribute('c' + key, value);
};
return wrapper.attr(attribs);
},
/**
* Draw and return an arc.
*
* @sample highcharts/members/renderer-arc/
* Drawing an arc
*
* @function Highcharts.SVGRenderer#arc
*
* @param {number} [x=0]
* Center X position.
*
* @param {number} [y=0]
* Center Y position.
*
* @param {number} [r=0]
* The outer radius of the arc.
*
* @param {number} [innerR=0]
* Inner radius like used in donut charts.
*
* @param {number} [start=0]
* The starting angle of the arc in radians, where 0 is to the right
* and `-Math.PI/2` is up.
*
* @param {number} [end=0]
* The ending angle of the arc in radians, where 0 is to the right
* and `-Math.PI/2` is up.
*
* @return {Highcharts.SVGElement}
* The generated wrapper element.
*//**
* Draw and return an arc. Overloaded function that takes arguments object.
*
* @function Highcharts.SVGRenderer#arc
*
* @param {Highcharts.SVGAttributes} attribs
* Initial SVG attributes.
*
* @return {Highcharts.SVGElement}
* The generated wrapper element.
*/
arc: function (x, y, r, innerR, start, end) {
var arc,
options;
if (isObject(x)) {
options = x;
y = options.y;
r = options.r;
innerR = options.innerR;
start = options.start;
end = options.end;
x = options.x;
} else {
options = {
innerR: innerR,
start: start,
end: end
};
}
// Arcs are defined as symbols for the ability to set
// attributes in attr and animate
arc = this.symbol('arc', x, y, r, r, options);
arc.r = r; // #959
return arc;
},
/**
* Draw and return a rectangle.
*
* @function Highcharts.SVGRenderer#rect
*
* @param {number} [x]
* Left position.
*
* @param {number} [y]
* Top position.
*
* @param {number} [width]
* Width of the rectangle.
*
* @param {number} [height]
* Height of the rectangle.
*
* @param {number} [r]
* Border corner radius.
*
* @param {number} [strokeWidth]
* A stroke width can be supplied to allow crisp drawing.
*
* @return {Highcharts.SVGElement}
* The generated wrapper element.
*//**
* Draw and return a rectangle.
*
* @sample highcharts/members/renderer-rect-on-chart/
* Draw a rectangle in a chart
* @sample highcharts/members/renderer-rect/
* Draw a rectangle independent from a chart
*
* @function Highcharts.SVGRenderer#rect
*
* @param {Highcharts.SVGAttributes} [attributes]
* General SVG attributes for the rectangle.
*
* @return {Highcharts.SVGElement}
* The generated wrapper element.
*/
rect: function (x, y, width, height, r, strokeWidth) {
r = isObject(x) ? x.r : r;
var wrapper = this.createElement('rect'),
attribs = isObject(x) ? x : x === undefined ? {} : {
x: x,
y: y,
width: Math.max(width, 0),
height: Math.max(height, 0)
};
if (!this.styledMode) {
if (strokeWidth !== undefined) {
attribs.strokeWidth = strokeWidth;
attribs = wrapper.crisp(attribs);
}
attribs.fill = 'none';
}
if (r) {
attribs.r = r;
}
wrapper.rSetter = function (value, key, element) {
attr(element, {
rx: value,
ry: value
});
};
return wrapper.attr(attribs);
},
/**
* Resize the {@link SVGRenderer#box} and re-align all aligned child
* elements.
*
* @sample highcharts/members/renderer-g/
* Show and hide grouped objects
*
* @function Highcharts.SVGRenderer#setSize
*
* @param {number} width
* The new pixel width.
*
* @param {number} height
* The new pixel height.
*
* @param {boolean|Highcharts.AnimationOptionsObject} [animate=true]
* Whether and how to animate.
*/
setSize: function (width, height, animate) {
var renderer = this,
alignedObjects = renderer.alignedObjects,
i = alignedObjects.length;
renderer.width = width;
renderer.height = height;
renderer.boxWrapper.animate({
width: width,
height: height
}, {
step: function () {
this.attr({
viewBox: '0 0 ' + this.attr('width') + ' ' +
this.attr('height')
});
},
duration: pick(animate, true) ? undefined : 0
});
while (i--) {
alignedObjects[i].align();
}
},
/**
* Create and return an svg group element. Child
* {@link Highcharts.SVGElement} objects are added to the group by using the
* group as the first parameter in {@link Highcharts.SVGElement#add|add()}.
*
* @function Highcharts.SVGRenderer#g
*
* @param {string} [name]
* The group will be given a class name of `highcharts-{name}`. This
* can be used for styling and scripting.
*
* @return {Highcharts.SVGElement}
* The generated wrapper element.
*/
g: function (name) {
var elem = this.createElement('g');
return name ? elem.attr({ 'class': 'highcharts-' + name }) : elem;
},
/**
* Display an image.
*
* @sample highcharts/members/renderer-image-on-chart/
* Add an image in a chart
* @sample highcharts/members/renderer-image/
* Add an image independent of a chart
*
* @function Highcharts.SVGRenderer#image
*
* @param {string} src
* The image source.
*
* @param {number} [x]
* The X position.
*
* @param {number} [y]
* The Y position.
*
* @param {number} [width]
* The image width. If omitted, it defaults to the image file width.
*
* @param {number} [height]
* The image height. If omitted it defaults to the image file
* height.
*
* @param {Function} [onload]
* Event handler for image load.
*
* @return {Highcharts.SVGElement}
* The generated wrapper element.
*/
image: function (src, x, y, width, height, onload) {
var attribs = {
preserveAspectRatio: 'none'
},
elemWrapper,
dummy,
setSVGImageSource = function (el, src) {
// Set the href in the xlink namespace
if (el.setAttributeNS) {
el.setAttributeNS(
'http://www.w3.org/1999/xlink', 'href', src
);
} else {
// could be exporting in IE
// using href throws "not supported" in ie7 and under,
// requries regex shim to fix later
el.setAttribute('hc-svg-href', src);
}
},
onDummyLoad = function (e) {
setSVGImageSource(elemWrapper.element, src);
onload.call(elemWrapper, e);
};
// optional properties
if (arguments.length > 1) {
extend(attribs, {
x: x,
y: y,
width: width,
height: height
});
}
elemWrapper = this.createElement('image').attr(attribs);
// Add load event if supplied
if (onload) {
// We have to use a dummy HTML image since IE support for SVG image
// load events is very buggy. First set a transparent src, wait for
// dummy to load, and then add the real src to the SVG image.
setSVGImageSource(
elemWrapper.element,
'data:image/gif;base64,R0lGODlhAQABAAAAACH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==' /* eslint-disable-line */
);
dummy = new win.Image();
addEvent(dummy, 'load', onDummyLoad);
dummy.src = src;
if (dummy.complete) {
onDummyLoad({});
}
} else {
setSVGImageSource(elemWrapper.element, src);
}
return elemWrapper;
},
/**
* Draw a symbol out of pre-defined shape paths from
* {@link SVGRenderer#symbols}.
* It is used in Highcharts for point makers, which cake a `symbol` option,
* and label and button backgrounds like in the tooltip and stock flags.
*
* @function Highcharts.SVGRenderer#symbol
*
* @param {symbol} symbol
* The symbol name.
*
* @param {number} x
* The X coordinate for the top left position.
*
* @param {number} y
* The Y coordinate for the top left position.
*
* @param {number} width
* The pixel width.
*
* @param {number} height
* The pixel height.
*
* @param {Highcharts.SymbolOptionsObject} [options]
* Additional options, depending on the actual symbol drawn.
*
* @return {Highcharts.SVGElement}
*/
symbol: function (symbol, x, y, width, height, options) {
var ren = this,
obj,
imageRegex = /^url\((.*?)\)$/,
isImage = imageRegex.test(symbol),
sym = !isImage && (this.symbols[symbol] ? symbol : 'circle'),
// get the symbol definition function
symbolFn = sym && this.symbols[sym],
// check if there's a path defined for this symbol
path = defined(x) && symbolFn && symbolFn.call(
this.symbols,
Math.round(x),
Math.round(y),
width,
height,
options
),
imageSrc,
centerImage;
if (symbolFn) {
obj = this.path(path);
if (!ren.styledMode) {
obj.attr('fill', 'none');
}
// expando properties for use in animate and attr
extend(obj, {
symbolName: sym,
x: x,
y: y,
width: width,
height: height
});
if (options) {
extend(obj, options);
}
// Image symbols
} else if (isImage) {
imageSrc = symbol.match(imageRegex)[1];
// Create the image synchronously, add attribs async
obj = this.image(imageSrc);
// The image width is not always the same as the symbol width. The
// image may be centered within the symbol, as is the case when
// image shapes are used as label backgrounds, for example in flags.
obj.imgwidth = pick(
symbolSizes[imageSrc] && symbolSizes[imageSrc].width,
options && options.width
);
obj.imgheight = pick(
symbolSizes[imageSrc] && symbolSizes[imageSrc].height,
options && options.height
);
/**
* Set the size and position
*/
centerImage = function () {
obj.attr({
width: obj.width,
height: obj.height
});
};
/**
* Width and height setters that take both the image's physical size
* and the label size into consideration, and translates the image
* to center within the label.
*/
['width', 'height'].forEach(function (key) {
obj[key + 'Setter'] = function (value, key) {
var attribs = {},
imgSize = this['img' + key],
trans = key === 'width' ? 'translateX' : 'translateY';
this[key] = value;
if (defined(imgSize)) {
if (this.element) {
this.element.setAttribute(key, imgSize);
}
if (!this.alignByTranslate) {
attribs[trans] = ((this[key] || 0) - imgSize) / 2;
this.attr(attribs);
}
}
};
});
if (defined(x)) {
obj.attr({
x: x,
y: y
});
}
obj.isImg = true;
if (defined(obj.imgwidth) && defined(obj.imgheight)) {
centerImage();
} else {
// Initialize image to be 0 size so export will still function
// if there's no cached sizes.
obj.attr({ width: 0, height: 0 });
// Create a dummy JavaScript image to get the width and height.
createElement('img', {
onload: function () {
var chart = charts[ren.chartIndex];
// Special case for SVGs on IE11, the width is not
// accessible until the image is part of the DOM
// (#2854).
if (this.width === 0) {
css(this, {
position: 'absolute',
top: '-999em'
});
doc.body.appendChild(this);
}
// Center the image
symbolSizes[imageSrc] = { // Cache for next
width: this.width,
height: this.height
};
obj.imgwidth = this.width;
obj.imgheight = this.height;
if (obj.element) {
centerImage();
}
// Clean up after #2854 workaround.
if (this.parentNode) {
this.parentNode.removeChild(this);
}
// Fire the load event when all external images are
// loaded
ren.imgCount--;
if (!ren.imgCount && chart && chart.onload) {
chart.onload();
}
},
src: imageSrc
});
this.imgCount++;
}
}
return obj;
},
/**
* An extendable collection of functions for defining symbol paths.
*
* @name Highcharts.SVGRenderer#symbols
* @type {Highcharts.SymbolDictionary}
*/
symbols: {
'circle': function (x, y, w, h) {
// Return a full arc
return this.arc(x + w / 2, y + h / 2, w / 2, h / 2, {
start: 0,
end: Math.PI * 2,
open: false
});
},
'square': function (x, y, w, h) {
return [
'M', x, y,
'L', x + w, y,
x + w, y + h,
x, y + h,
'Z'
];
},
'triangle': function (x, y, w, h) {
return [
'M', x + w / 2, y,
'L', x + w, y + h,
x, y + h,
'Z'
];
},
'triangle-down': function (x, y, w, h) {
return [
'M', x, y,
'L', x + w, y,
x + w / 2, y + h,
'Z'
];
},
'diamond': function (x, y, w, h) {
return [
'M', x + w / 2, y,
'L', x + w, y + h / 2,
x + w / 2, y + h,
x, y + h / 2,
'Z'
];
},
'arc': function (x, y, w, h, options) {
var start = options.start,
rx = options.r || w,
ry = options.r || h || w,
proximity = 0.001,
fullCircle =
Math.abs(options.end - options.start - 2 * Math.PI) <
proximity,
// Substract a small number to prevent cos and sin of start and
// end from becoming equal on 360 arcs (related: #1561)
end = options.end - proximity,
innerRadius = options.innerR,
open = pick(options.open, fullCircle),
cosStart = Math.cos(start),
sinStart = Math.sin(start),
cosEnd = Math.cos(end),
sinEnd = Math.sin(end),
// Proximity takes care of rounding errors around PI (#6971)
longArc = options.end - start - Math.PI < proximity ? 0 : 1,
arc;
arc = [
'M',
x + rx * cosStart,
y + ry * sinStart,
'A', // arcTo
rx, // x radius
ry, // y radius
0, // slanting
longArc, // long or short arc
1, // clockwise
x + rx * cosEnd,
y + ry * sinEnd
];
if (defined(innerRadius)) {
arc.push(
open ? 'M' : 'L',
x + innerRadius * cosEnd,
y + innerRadius * sinEnd,
'A', // arcTo
innerRadius, // x radius
innerRadius, // y radius
0, // slanting
longArc, // long or short arc
0, // clockwise
x + innerRadius * cosStart,
y + innerRadius * sinStart
);
}
arc.push(open ? '' : 'Z'); // close
return arc;
},
/**
* Callout shape used for default tooltips, also used for rounded
* rectangles in VML
*/
'callout': function (x, y, w, h, options) {
var arrowLength = 6,
halfDistance = 6,
r = Math.min((options && options.r) || 0, w, h),
safeDistance = r + halfDistance,
anchorX = options && options.anchorX,
anchorY = options && options.anchorY,
path;
path = [
'M', x + r, y,
'L', x + w - r, y, // top side
'C', x + w, y, x + w, y, x + w, y + r, // top-right corner
'L', x + w, y + h - r, // right side
'C', x + w, y + h, x + w, y + h, x + w - r, y + h, // bottom-rgt
'L', x + r, y + h, // bottom side
'C', x, y + h, x, y + h, x, y + h - r, // bottom-left corner
'L', x, y + r, // left side
'C', x, y, x, y, x + r, y // top-left corner
];
// Anchor on right side
if (anchorX && anchorX > w) {
// Chevron
if (
anchorY > y + safeDistance &&
anchorY < y + h - safeDistance
) {
path.splice(13, 3,
'L', x + w, anchorY - halfDistance,
x + w + arrowLength, anchorY,
x + w, anchorY + halfDistance,
x + w, y + h - r
);
// Simple connector
} else {
path.splice(13, 3,
'L', x + w, h / 2,
anchorX, anchorY,
x + w, h / 2,
x + w, y + h - r
);
}
// Anchor on left side
} else if (anchorX && anchorX < 0) {
// Chevron
if (
anchorY > y + safeDistance &&
anchorY < y + h - safeDistance
) {
path.splice(33, 3,
'L', x, anchorY + halfDistance,
x - arrowLength, anchorY,
x, anchorY - halfDistance,
x, y + r
);
// Simple connector
} else {
path.splice(33, 3,
'L', x, h / 2,
anchorX, anchorY,
x, h / 2,
x, y + r
);
}
} else if ( // replace bottom
anchorY &&
anchorY > h &&
anchorX > x + safeDistance &&
anchorX < x + w - safeDistance
) {
path.splice(23, 3,
'L', anchorX + halfDistance, y + h,
anchorX, y + h + arrowLength,
anchorX - halfDistance, y + h,
x + r, y + h
);
} else if ( // replace top
anchorY &&
anchorY < 0 &&
anchorX > x + safeDistance &&
anchorX < x + w - safeDistance
) {
path.splice(3, 3,
'L', anchorX - halfDistance, y,
anchorX, y - arrowLength,
anchorX + halfDistance, y,
w - r, y
);
}
return path;
}
},
/**
* Define a clipping rectangle. The clipping rectangle is later applied
* to {@link SVGElement} objects through the {@link SVGElement#clip}
* function.
*
* @example
* var circle = renderer.circle(100, 100, 100)
* .attr({ fill: 'red' })
* .add();
* var clipRect = renderer.clipRect(100, 100, 100, 100);
*
* // Leave only the lower right quarter visible
* circle.clip(clipRect);
*
* @function Highcharts.SVGRenderer#clipRect
*
* @param {string} id
*
* @param {number} x
*
* @param {number} y
*
* @param {number} width
*
* @param {number} height
*
* @return {Highcharts.ClipRectElement}
* A clipping rectangle.
*/
clipRect: function (x, y, width, height) {
var wrapper,
id = H.uniqueKey(),
clipPath = this.createElement('clipPath').attr({
id: id
}).add(this.defs);
wrapper = this.rect(x, y, width, height, 0).add(clipPath);
wrapper.id = id;
wrapper.clipPath = clipPath;
wrapper.count = 0;
return wrapper;
},
/**
* Draw text. The text can contain a subset of HTML, like spans and anchors
* and some basic text styling of these. For more advanced features like
* border and background, use {@link Highcharts.SVGRenderer#label} instead.
* To update the text after render, run `text.attr({ text: 'New text' })`.
*
* @sample highcharts/members/renderer-text-on-chart/
* Annotate the chart freely
* @sample highcharts/members/renderer-on-chart/
* Annotate with a border and in response to the data
* @sample highcharts/members/renderer-text/
* Formatted text
*
* @function Highcharts.SVGRenderer#text
*
* @param {string} str
* The text of (subset) HTML to draw.
*
* @param {number} x
* The x position of the text's lower left corner.
*
* @param {number} y
* The y position of the text's lower left corner.
*
* @param {boolean} [useHTML=false]
* Use HTML to render the text.
*
* @return {Highcharts.SVGElement}
* The text object.
*/
text: function (str, x, y, useHTML) {
// declare variables
var renderer = this,
wrapper,
attribs = {};
if (useHTML && (renderer.allowHTML || !renderer.forExport)) {
return renderer.html(str, x, y);
}
attribs.x = Math.round(x || 0); // X always needed for line-wrap logic
if (y) {
attribs.y = Math.round(y);
}
if (defined(str)) {
attribs.text = str;
}
wrapper = renderer.createElement('text')
.attr(attribs);
if (!useHTML) {
wrapper.xSetter = function (value, key, element) {
var tspans = element.getElementsByTagName('tspan'),
tspan,
parentVal = element.getAttribute(key),
i;
for (i = 0; i < tspans.length; i++) {
tspan = tspans[i];
// If the x values are equal, the tspan represents a
// linebreak
if (tspan.getAttribute(key) === parentVal) {
tspan.setAttribute(key, value);
}
}
element.setAttribute(key, value);
};
}
return wrapper;
},
/**
* Utility to return the baseline offset and total line height from the font
* size.
*
* @function Highcharts.SVGRenderer#fontMetrics
*
* @param {string} [fontSize]
* The current font size to inspect. If not given, the font size
* will be found from the DOM element.
*
* @param {Highcharts.SVGElement|Highcharts.SVGDOMElement} [elem]
* The element to inspect for a current font size.
*
* @return {Highcharts.FontMetricsObject}
* The font metrics.
*/
fontMetrics: function (fontSize, elem) {
var lineHeight,
baseline;
if (this.styledMode) {
fontSize = elem && SVGElement.prototype.getStyle.call(
elem,
'font-size'
);
} else {
fontSize = fontSize ||
// When the elem is a DOM element (#5932)
(elem && elem.style && elem.style.fontSize) ||
// Fall back on the renderer style default
(this.style && this.style.fontSize);
}
// Handle different units
if (/px/.test(fontSize)) {
fontSize = pInt(fontSize);
} else if (/em/.test(fontSize)) {
// The em unit depends on parent items
fontSize = parseFloat(fontSize) *
(elem ? this.fontMetrics(null, elem.parentNode).f : 16);
} else {
fontSize = 12;
}
// Empirical values found by comparing font size and bounding box
// height. Applies to the default font family.
// https://jsfiddle.net/highcharts/7xvn7/
lineHeight = fontSize < 24 ? fontSize + 3 : Math.round(fontSize * 1.2);
baseline = Math.round(lineHeight * 0.8);
return {
h: lineHeight,
b: baseline,
f: fontSize
};
},
/**
* Correct X and Y positioning of a label for rotation (#1764).
*
* @private
* @function Highcharts.SVGRenderer#rotCorr
*
* @param {number} baseline
*
* @param {number} rotation
*
* @param {boolean} alterY
*/
rotCorr: function (baseline, rotation, alterY) {
var y = baseline;
if (rotation && alterY) {
y = Math.max(y * Math.cos(rotation * deg2rad), 4);
}
return {
x: (-baseline / 3) * Math.sin(rotation * deg2rad),
y: y
};
},
/**
* Draw a label, which is an extended text element with support for border
* and background. Highcharts creates a `g` element with a text and a `path`
* or `rect` inside, to make it behave somewhat like a HTML div. Border and
* background are set through `stroke`, `stroke-width` and `fill` attributes
* using the {@link Highcharts.SVGElement#attr|attr} method. To update the
* text after render, run `label.attr({ text: 'New text' })`.
*
* @sample highcharts/members/renderer-label-on-chart/
* A label on the chart
*
* @function Highcharts.SVGRenderer#label
*
* @param {string} str
* The initial text string or (subset) HTML to render.
*
* @param {number} x
* The x position of the label's left side.
*
* @param {number} y
* The y position of the label's top side or baseline, depending on
* the `baseline` parameter.
*
* @param {string} [shape='rect']
* The shape of the label's border/background, if any. Defaults to
* `rect`. Other possible values are `callout` or other shapes
* defined in {@link Highcharts.SVGRenderer#symbols}.
*
* @param {string} [shape='rect']
* The shape of the label's border/background, if any. Defaults to
* `rect`. Other possible values are `callout` or other shapes
* defined in {@link Highcharts.SVGRenderer#symbols}.
*
* @param {number} [anchorX]
* In case the `shape` has a pointer, like a flag, this is the
* coordinates it should be pinned to.
*
* @param {number} [anchorY]
* In case the `shape` has a pointer, like a flag, this is the
* coordinates it should be pinned to.
*
* @param {boolean} [useHTML=false]
* Wether to use HTML to render the label.
*
* @param {boolean} [baseline=false]
* Whether to position the label relative to the text baseline,
* like {@link Highcharts.SVGRenderer#text|renderer.text}, or to the
* upper border of the rectangle.
*
* @param {string} [className]
* Class name for the group.
*
* @return {Highcharts.SVGElement}
* The generated label.
*/
label: function (
str,
x,
y,
shape,
anchorX,
anchorY,
useHTML,
baseline,
className
) {
var renderer = this,
styledMode = renderer.styledMode,
wrapper = renderer.g(className !== 'button' && 'label'),
text = wrapper.text = renderer.text('', 0, 0, useHTML)
.attr({
zIndex: 1
}),
box,
bBox,
alignFactor = 0,
padding = 3,
paddingLeft = 0,
width,
height,
wrapperX,
wrapperY,
textAlign,
deferredAttr = {},
strokeWidth,
baselineOffset,
hasBGImage = /^url\((.*?)\)$/.test(shape),
needsBox = styledMode || hasBGImage,
getCrispAdjust = function () {
return styledMode ?
box.strokeWidth() % 2 / 2 :
(strokeWidth ? parseInt(strokeWidth, 10) : 0) % 2 / 2;
},
updateBoxSize,
updateTextPadding,
boxAttr;
if (className) {
wrapper.addClass('highcharts-' + className);
}
/* This function runs after the label is added to the DOM (when the
bounding box is available), and after the text of the label is
updated to detect the new bounding box and reflect it in the border
box. */
updateBoxSize = function () {
var style = text.element.style,
crispAdjust,
attribs = {};
bBox = (
(width === undefined || height === undefined || textAlign) &&
defined(text.textStr) &&
text.getBBox()
); // #3295 && 3514 box failure when string equals 0
wrapper.width = (
(width || bBox.width || 0) +
2 * padding +
paddingLeft
);
wrapper.height = (height || bBox.height || 0) + 2 * padding;
// Update the label-scoped y offset
baselineOffset = padding + Math.min(
renderer.fontMetrics(style && style.fontSize, text).b,
// Math.min because of inline style (#9400)
bBox ? bBox.height : Infinity
);
if (needsBox) {
// Create the border box if it is not already present
if (!box) {
// Symbol definition exists (#5324)
wrapper.box = box = renderer.symbols[shape] || hasBGImage ?
renderer.symbol(shape) :
renderer.rect();
box.addClass( // Don't use label className for buttons
(className === 'button' ? '' : 'highcharts-label-box') +
(className ? ' highcharts-' + className + '-box' : '')
);
box.add(wrapper);
crispAdjust = getCrispAdjust();
attribs.x = crispAdjust;
attribs.y = (baseline ? -baselineOffset : 0) + crispAdjust;
}
// Apply the box attributes
attribs.width = Math.round(wrapper.width);
attribs.height = Math.round(wrapper.height);
box.attr(extend(attribs, deferredAttr));
deferredAttr = {};
}
};
/*
* This function runs after setting text or padding, but only if padding
* is changed.
*/
updateTextPadding = function () {
var textX = paddingLeft + padding,
textY;
// determin y based on the baseline
textY = baseline ? 0 : baselineOffset;
// compensate for alignment
if (
defined(width) &&
bBox &&
(textAlign === 'center' || textAlign === 'right')
) {
textX += { center: 0.5, right: 1 }[textAlign] *
(width - bBox.width);
}
// update if anything changed
if (textX !== text.x || textY !== text.y) {
text.attr('x', textX);
// #8159 - prevent misplaced data labels in treemap
// (useHTML: true)
if (text.hasBoxWidthChanged) {
bBox = text.getBBox(true);
updateBoxSize();
}
if (textY !== undefined) {
text.attr('y', textY);
}
}
// record current values
text.x = textX;
text.y = textY;
};
/*
* Set a box attribute, or defer it if the box is not yet created
*/
boxAttr = function (key, value) {
if (box) {
box.attr(key, value);
} else {
deferredAttr[key] = value;
}
};
/*
* After the text element is added, get the desired size of the border
* box and add it before the text in the DOM.
*/
wrapper.onAdd = function () {
text.add(wrapper);
wrapper.attr({
// Alignment is available now (#3295, 0 not rendered if given
// as a value)
text: (str || str === 0) ? str : '',
x: x,
y: y
});
if (box && defined(anchorX)) {
wrapper.attr({
anchorX: anchorX,
anchorY: anchorY
});
}
};
/*
* Add specific attribute setters.
*/
// only change local variables
wrapper.widthSetter = function (value) {
width = H.isNumber(value) ? value : null; // width:auto => null
};
wrapper.heightSetter = function (value) {
height = value;
};
wrapper['text-alignSetter'] = function (value) {
textAlign = value;
};
wrapper.paddingSetter = function (value) {
if (defined(value) && value !== padding) {
padding = wrapper.padding = value;
updateTextPadding();
}
};
wrapper.paddingLeftSetter = function (value) {
if (defined(value) && value !== paddingLeft) {
paddingLeft = value;
updateTextPadding();
}
};
// change local variable and prevent setting attribute on the group
wrapper.alignSetter = function (value) {
value = { left: 0, center: 0.5, right: 1 }[value];
if (value !== alignFactor) {
alignFactor = value;
// Bounding box exists, means we're dynamically changing
if (bBox) {
wrapper.attr({ x: wrapperX }); // #5134
}
}
};
// apply these to the box and the text alike
wrapper.textSetter = function (value) {
if (value !== undefined) {
text.textSetter(value);
}
updateBoxSize();
updateTextPadding();
};
// apply these to the box but not to the text
wrapper['stroke-widthSetter'] = function (value, key) {
if (value) {
needsBox = true;
}
strokeWidth = this['stroke-width'] = value;
boxAttr(key, value);
};
if (styledMode) {
wrapper.rSetter = function (value, key) {
boxAttr(key, value);
};
} else {
wrapper.strokeSetter =
wrapper.fillSetter =
wrapper.rSetter = function (value, key) {
if (key !== 'r') {
if (key === 'fill' && value) {
needsBox = true;
}
// for animation getter (#6776)
wrapper[key] = value;
}
boxAttr(key, value);
};
}
wrapper.anchorXSetter = function (value, key) {
anchorX = wrapper.anchorX = value;
boxAttr(key, Math.round(value) - getCrispAdjust() - wrapperX);
};
wrapper.anchorYSetter = function (value, key) {
anchorY = wrapper.anchorY = value;
boxAttr(key, value - wrapperY);
};
// rename attributes
wrapper.xSetter = function (value) {
wrapper.x = value; // for animation getter
if (alignFactor) {
value -= alignFactor * ((width || bBox.width) + 2 * padding);
// Force animation even when setting to the same value (#7898)
wrapper['forceAnimate:x'] = true;
}
wrapperX = Math.round(value);
wrapper.attr('translateX', wrapperX);
};
wrapper.ySetter = function (value) {
wrapperY = wrapper.y = Math.round(value);
wrapper.attr('translateY', wrapperY);
};
// Redirect certain methods to either the box or the text
var baseCss = wrapper.css;
var wrapperExtension = {
/**
* Pick up some properties and apply them to the text instead of the
* wrapper.
*/
css: function (styles) {
if (styles) {
var textStyles = {};
// Create a copy to avoid altering the original object
// (#537)
styles = merge(styles);
wrapper.textProps.forEach(function (prop) {
if (styles[prop] !== undefined) {
textStyles[prop] = styles[prop];
delete styles[prop];
}
});
text.css(textStyles);
// Update existing text and box
if ('width' in textStyles) {
updateBoxSize();
}
// Keep updated (#9400)
if ('fontSize' in textStyles) {
updateBoxSize();
updateTextPadding();
}
}
return baseCss.call(wrapper, styles);
},
/*
* Return the bounding box of the box, not the group.
*/
getBBox: function () {
return {
width: bBox.width + 2 * padding,
height: bBox.height + 2 * padding,
x: bBox.x - padding,
y: bBox.y - padding
};
},
/**
* Destroy and release memory.
*/
destroy: function () {
// Added by button implementation
removeEvent(wrapper.element, 'mouseenter');
removeEvent(wrapper.element, 'mouseleave');
if (text) {
text = text.destroy();
}
if (box) {
box = box.destroy();
}
// Call base implementation to destroy the rest
SVGElement.prototype.destroy.call(wrapper);
// Release local pointers (#1298)
wrapper =
renderer =
updateBoxSize =
updateTextPadding =
boxAttr = null;
}
};
if (!styledMode) {
/**
* Apply the shadow to the box.
*
* @ignore
* @function Highcharts.SVGElement#shadow
*
* @return {Highcharts.SVGElement}
*/
wrapperExtension.shadow = function (b) {
if (b) {
updateBoxSize();
if (box) {
box.shadow(b);
}
}
return wrapper;
};
}
return extend(wrapper, wrapperExtension);
}
}); // end SVGRenderer
// general renderer
H.Renderer = SVGRenderer;<|fim▁end|> | |
<|file_name|>msg_pong.rs<|end_file_name|><|fim▁begin|>use std;
use ::serialize::{self, Serializable};
use super::PingMessage;
use super::BIP0031_VERSION;
#[derive(Debug,Default,Clone)]
pub struct PongMessage
{
pub nonce: u64,
}
impl PongMessage {
pub fn new(ping:&PingMessage) -> PongMessage {
PongMessage{ nonce: ping.nonce }
}
}
impl super::Message for PongMessage {
fn get_command(&self) -> [u8; super::message_header::COMMAND_SIZE] {
super::message_header::COMMAND_PONG
}
}
impl std::fmt::Display for PongMessage {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(f, "Pong(nonce={})", self.nonce)
}
}
impl Serializable for PongMessage {
fn get_serialize_size(&self, ser:&serialize::SerializeParam) -> usize {
if BIP0031_VERSION < ser.version {
self.nonce.get_serialize_size(ser)
} else {<|fim▁hole|> fn serialize(&self, io:&mut std::io::Write, ser:&serialize::SerializeParam) -> serialize::Result {
if BIP0031_VERSION < ser.version {
self.nonce.serialize(io, ser)
} else {
Ok(0usize)
}
}
fn deserialize(&mut self, io:&mut std::io::Read, ser:&serialize::SerializeParam) -> serialize::Result {
if BIP0031_VERSION < ser.version {
self.nonce.deserialize(io, ser)
} else {
Ok(0usize)
}
}
}<|fim▁end|> | 0usize
}
} |
<|file_name|>Creator.py<|end_file_name|><|fim▁begin|>#
# Created as part of the StratusLab project (http://stratuslab.eu),
# co-funded by the European Commission under the Grant Agreement
# INFSO-RI-261552."
#
# Copyright (c) 2011, SixSq Sarl
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
from datetime import datetime
import time
import tempfile
from stratuslab.CloudConnectorFactory import CloudConnectorFactory
from stratuslab.Util import sshCmd
from stratuslab.Util import sshCmdWithOutput
from stratuslab.Util import waitUntilPingOrTimeout
from stratuslab.Util import getHostnameFromUri
import stratuslab.Util as Util
from Exceptions import ValidationException
from Exceptions import ExecutionException
from Authn import AuthnFactory
from stratuslab.system.ubuntu import installCmd as aptInstallCmd
from stratuslab.system.ubuntu import updateCmd as aptUpdateCmd
from stratuslab.system.ubuntu import cleanPackageCacheCmd as aptCleanPackageCacheCmd
from stratuslab.system.centos import installCmd as yumInstallCmd
from stratuslab.system.centos import updateCmd as yumUpdateCmd
from stratuslab.system.centos import cleanPackageCacheCmd as yumCleanPackageCacheCmd
from stratuslab.image.Image import Image
from stratuslab.system import Systems
from stratuslab import Defaults
from stratuslab.marketplace.ManifestDownloader import ManifestDownloader
from stratuslab.Monitor import Monitor
from stratuslab.vm_manager.vm_manager import VmManager
from stratuslab.vm_manager.vm_manager_factory import VmManagerFactory
class Creator(object):
VM_START_TIMEOUT = 60 * 10
VM_PING_TIMEOUT = 60 * 5
excludeFromCreatedImageDefault = ['/tmp/*',
'/etc/ssh/ssh_host_*',
'/root/.ssh/{authorized_keys,known_hosts}']
def __init__(self, image, configHolder):
self.image = image
self.configHolder = configHolder
self.newImageGroupName = ''
self.newInstalledSoftwareName = ''
self.newInstalledSoftwareVersion = ''
self.newImageGroupVersion = ''
self.newImageGroupVersionWithManifestId = False
self.author = ''
self.title = ''
self.comment = ''
self.os = ''
self.authorEmail = ''
self.marketplaceEndpointNewimage = ''
self.endpoint = ''
self.extraOsReposUrls = ''
self.packages = ''
self.scripts = ''
self.prerecipe = ''
self.recipe = ''
self.verboseLevel = ''
self.shutdownVm = True
self.signManifest = True
self.vmStartTimeout = self.VM_START_TIMEOUT
self.vmPingTimeout = self.VM_PING_TIMEOUT
self.options = VmManager.defaultRunOptions()
self.options.update(configHolder.options)
self.configHolder.options.update(self.options)
configHolder.assign(self)
self._set_stdouterr()
credentials = AuthnFactory.getCredentials(self)
self.cloud = CloudConnectorFactory.getCloud(credentials)
self.cloud.setEndpoint(self.endpoint)
self.runner = None
self.vmAddress = None
self.vmId = None
self.vmIp = None
self.vmName = 'creator'
self.userPublicKeyFile = self.options.get('userPublicKeyFile',
Defaults.sshPublicKeyLocation)
self.userPrivateKeyFile = self.userPublicKeyFile.strip('.pub')
self.mainDisk = ''
self.extraDisk = ''
self.mountPointExtraDisk = '/media'
self.imageFile = ''
self.imageFileBundled = ''
self.excludeFromCreatedImage = \
self.excludeFromCreatedImageDefault + \
self.options.get('excludeFromCreatedImage', '').split(',')
self.installer = self.options.get('installer')
self.targetImageUri = ''
self.targetManifestUri = ''
self.manifest = ''
self.manifestObject = None
self.newManifestFileName = None
self.manifestLocalFileName = ''
self.__listener = CreatorBaseListener()
def _set_stdouterr(self):
Util.set_stdouterr(self)
def printDetail(self, msg):
return Util.printDetail(msg, self.verboseLevel, Util.VERBOSE_LEVEL_NORMAL)
def create(self):
self._printAction('Starting image creation')
self.startNode()
try:
self.buildNodeIncrement()
self._printAction('Finished building image increment.')
self._printAction('Please check %s for new image ID and instruction.' %
self.authorEmail)
finally:
self._shutdownNode()
self._localCleanUp()
def startNode(self):
self._imageExists()
self._retrieveManifest()
self.__setAttributesFromManifest()
self.__createRunner()
self._startMachine()
self._waitMachineNetworkUpOrAbort()
self._checkIfCanConnectToMachine()
def buildNodeIncrement(self):
self._executePrerecipe()
self._installPackages()
self._executeRecipe()
self._executeScripts()
def _printAction(self, msg):
Util.printAction(msg)
self._notifyOnAction(msg)
def _printStep(self, msg):
Util.printStep(msg)
self._notifyOnStep(msg)
def _printError(self, msg):
self._notifyOnError(msg)
Util.printError(msg)
def setListener(self, listener):
if listener:
self.__listener = listener
def _notifyOnAction(self, note):
self._notify('Action', note)
def _notifyOnStep(self, note):
self._notify('Step', note)
def _notifyOnError(self, note):
self._notify('Error', note)
def _notify(self, operation, note):
def callListener():
notifyFunction = getattr(self.__listener, onOperation)
notifyFunction(note)
onOperation = 'on%s' % operation
if hasattr(self.__listener, onOperation):
pass
elif hasattr(self.__listener, 'onAny'):
onOperation = 'onAny'
callListener()
def _checkIfCanConnectToMachine(self):
self._printStep('Check if we can connect to the machine')
cmd = 'true'
try:
self._sshCmdWithOutputVerb(cmd)
except ExecutionException:
sleepTime = 6
maxCount = 40
counter = 0
while True:
try:
self.printDetail('Sleeping %i sec. Retry %i out of %i.' % (sleepTime, counter + 1, maxCount))
time.sleep(sleepTime)
self._sshCmdWithOutputVerb(cmd)
break
except ExecutionException, e:
if counter >= maxCount:
raise ExecutionException(e)
counter += 1<|fim▁hole|>
def _imageExists(self):
self._printStep('Checking that base image exists')
self._checkImageExists()
def _checkImageExists(self):
image = Image(self.configHolder)
image.checkImageExists(self.image)
def _getCreateImageTemplateDict(self):
return {VmManager.CREATE_IMAGE_KEY_CREATOR_EMAIL: self.authorEmail,
VmManager.CREATE_IMAGE_KEY_CREATOR_NAME: self.author,
VmManager.CREATE_IMAGE_KEY_NEWIMAGE_TITLE: self.title,
VmManager.CREATE_IMAGE_KEY_NEWIMAGE_COMMENT: self.comment,
VmManager.CREATE_IMAGE_KEY_NEWIMAGE_VERSION: self.newImageGroupVersion,
VmManager.CREATE_IMAGE_KEY_NEWIMAGE_MARKETPLACE: self.marketplaceEndpointNewimage}
def createRunner(self):
self.__createRunner()
def __createRunner(self):
self.configHolder.set('vmName',
'%s: %s' % (self.vmName, Util.getTimeInIso8601()))
self.configHolder.set('noCheckImageUrl', True)
self.configHolder.set('saveDisk', True)
self.runner = VmManagerFactory.create(self.image, self.configHolder)
self.runner.updateCreateImageTemplateData(
self._getCreateImageTemplateDict())
def _startMachine(self):
self._printStep('Starting base image')
try:
self.vmId = self.runner.runInstance()[0]
except Exception, msg:
self._printError('An error occurred while starting machine: \n\t%s' % msg)
try:
_, self.vmIp = self.runner.getNetworkDetail(self.vmId)
self.vmAddress = self.vmIp
except Exception, e:
self._printError('An error occurred while getting machine network details: \n\t%s' % str(e))
self._printStep('Waiting for machine to boot')
vmStarted = self.runner.waitUntilVmRunningOrTimeout(self.vmId,
self.vmStartTimeout,
failOn='Failed')
if not vmStarted:
if self.runner.getVmState(self.vmId) == 'Failed':
msg = 'Failed to start VM (id=%s, ip=%s): %s' % \
(self.vmId, self.vmAddress,
self._getVmFailureMessage(self.vmId))
else:
msg = 'Failed to start VM within %i seconds (id=%s, ip=%s)' % \
(self.vmStartTimeout, self.vmId, self.vmAddress)
self.printDetail(msg)
self._killMachine()
self._printError(msg)
def _stopMachine(self):
self._printStep('Shutting down machine')
if self.getVmState() != 'Failed':
self.cloud.vmStop(self.vmId)
def _killMachine(self):
self._printStep('Killing machine')
if self.vmId:
self.cloud.vmKill(self.vmId)
else:
Util.printWarning('Undefined VM ID, when trying to kill machine.')
def _getVmFailureMessage(self, vmId):
return getattr(Monitor(self.configHolder)._vmDetail(vmId),
'template_error_message', '')
def _shutdownNode(self):
if self.shutdownVm:
self._stopMachine()
else:
self._printStep('Machine ready for use')
msg = '\n\tMachine IP: %s\tRemember to stop the machine when finished' % self.vmIp
Util.printInfo(msg)
def _waitMachineNetworkUpOrAbort(self):
self._printStep('Waiting for machine network to start')
if not waitUntilPingOrTimeout(self.vmAddress, self.vmPingTimeout):
msg = 'Unable to ping VM in %i seconds (id=%s, ip=%s)' % \
(self.vmPingTimeout, self.vmId, self.vmAddress)
self._printError(msg)
self._stopMachine()
def _getPublicAddress(self):
return self.vmIp
def _retrieveManifest(self):
self._printStep('Retrieving image manifest')
configHolder = self.configHolder.copy()
downloader = ManifestDownloader(configHolder)
self.manifestObject = downloader.getManifestInfo(self.image)
self.manifest = self.manifestObject.tostring()
def __setAttributesFromManifest(self):
self._setOsFromManifest()
self._setInstallerBasedOnOs()
def _setOsFromManifest(self):
if not self.os:
self.os = self._getAttrFromManifest('os').lower()
def _setInstallerBasedOnOs(self):
if not self.installer:
self.installer = Systems.getInstallerBasedOnOs(self.os)
def _getAttrFromManifest(self, attr):
return getattr(self.manifestObject, attr)
def _installPackages(self):
self._printStep('Installing user packages')
if len(self.packages) == 0:
self.printDetail('No packages to install')
return
self._setUpExtraRepositories()
self.printDetail('Updating installer')
ret = self._doInstallerUpdate()
self.printDetail('Installing packages: %s' % self.packages)
ret = self._doInstallPackagesRemotly(self.packages)
if ret != 0:
self._printError('An error occurred while installing packages')
def _setUpExtraRepositories(self):
if not self.extraOsReposUrls:
return
self.printDetail('Adding extra repositories')
if self.installer not in Systems.INSTALLERS:
ValidationException('Unknown installer %s. Bailing out.' %
self.installer)
extraReposList = self.extraOsReposUrls.split(',')
if self.installer == 'yum':
for i, repoUrl in enumerate(extraReposList):
repoName = getHostnameFromUri(repoUrl)
cmd = """cat >> /etc/yum.repos.d/%(name)s.repo << EOF
[%(name)s]
name=%(name)s
baseurl=%(url)s
gpgcheck=0
enabled=1
EOF
""" % {'name': '%s-%i' % (repoName, i), 'id': i, 'url': repoUrl}
elif self.installer == 'apt':
for repoUrl in extraReposList:
repoName = getHostnameFromUri(repoUrl)
cmd = """cat >> /etc/apt/sources.list.d/%(reponame)s.list << EOF
deb %(repourl)s
EOF
""" % {'reponame': repoName, 'repourl': repoUrl}
self._sshCmdWithOutput(cmd)
def _doInstallPackagesRemotly(self, packages):
cmd = self._buildInstallerCommand() + ' '
cmd += ' '.join(packages.split(','))
return self._sshCmd(cmd, stderr=self.stderr, stdout=self.stdout)
def _doInstallerUpdate(self):
cmd = self._buildUpdaterCommand()
return self._sshCmd(cmd, stderr=self.stderr, stdout=self.stdout)
def _buildInstallerCommand(self):
if self.installer == 'yum':
return yumInstallCmd
elif self.installer == 'apt':
return aptInstallCmd
def _buildUpdaterCommand(self):
if self.installer == 'yum':
return yumUpdateCmd
elif self.installer == 'apt':
return aptUpdateCmd
def _buildPackageCacheCleanerCommand(self):
if self.installer == 'yum':
return yumCleanPackageCacheCmd
elif self.installer == 'apt':
return aptCleanPackageCacheCmd
def _executeScripts(self):
self._printStep('Executing user scripts')
if len(self.scripts) == 0:
self.printDetail('No scripts to execute')
return
self.printDetail('Executing scripts: %s' % self.scripts)
for script in self.scripts.split(','):
self._uploadAndExecuteRemoteScript(script)
def _uploadAndExecuteRemoteScript(self, script):
def __tellScriptNameAndArgs(script):
scriptNameAndArgs = os.path.basename(script)
scriptNameAndArgsList = scriptNameAndArgs.split(' ', 1)
if len(scriptNameAndArgsList) == 1: # no arguments given
scriptNameAndArgsList = scriptNameAndArgsList + ['']
return scriptNameAndArgsList
def _uploadScript(script):
scriptName, args = __tellScriptNameAndArgs(script)
scriptDirectory = Util.sanitizePath(os.path.dirname(script))
scriptPathLocal = os.path.abspath(os.path.join(scriptDirectory, scriptName))
scriptPathRemote = '/tmp/%s' % scriptName
rc, output = self._scpWithOutput(scriptPathLocal, 'root@%s:%s' % (self.vmAddress, scriptPathRemote))
if rc != 0:
self._printError('An error occurred while uploading script %s\n%s' % (script, output))
self._sshCmdWithOutput('chmod 0755 %s' % scriptPathRemote)
return scriptPathRemote, args
def _executeRemoteScript(scriptPathRemote, args=''):
rc = self._sshCmd('%s %s' % (scriptPathRemote, args), throwOnError=False,
pseudoTTY=True)
if rc != 0:
self._printError('An error occurred while executing script %s' % script)
scriptPathRemote, args = _uploadScript(script)
_executeRemoteScript(scriptPathRemote, args)
def _executePrerecipe(self):
self._printStep('Executing user prerecipe')
if len(self.prerecipe) == 0:
self.printDetail('No prerecipe to execute')
return
self._uploadAndExecuteRemoteRecipe(self.prerecipe)
def _executeRecipe(self):
self._printStep('Executing user recipe')
if len(self.recipe) == 0:
self.printDetail('No recipe to execute')
return
self._uploadAndExecuteRemoteRecipe(self.recipe)
def _uploadAndExecuteRemoteRecipe(self, script):
fd, recipeFile = tempfile.mkstemp()
try:
os.write(fd, script)
os.close(fd)
os.chmod(recipeFile, 0755)
scriptPath = '/tmp/%s' % os.path.basename(recipeFile)
rc = self._scp(recipeFile, 'root@%s:%s' % (self.vmAddress, scriptPath))
if rc != 0:
self._printError('An error occurred while uploading recipe')
self._sshCmdWithOutput('chmod 0755 %s' % scriptPath)
rc = self._sshCmd(scriptPath, throwOnError=False, pseudoTTY=True)
if rc != 0:
self._printError('An error occurred while executing user recipe.')
finally:
try:
os.unlink(recipeFile)
except:
pass
def _localCleanUp(self):
Util.execute(['rm', '-rf', self.manifestLocalFileName])
def _scp(self, src, dst, **kwargs):
return Util.scp(src, dst, self.userPrivateKeyFile,
verboseLevel=self.verboseLevel, verboseThreshold=Util.VERBOSE_LEVEL_DETAILED,
stderr=self.stderr, stdout=self.stdout, **kwargs)
def _scpWithOutput(self, src, dst):
return self._scp(src, dst, withOutput=True)
def _sshCmd(self, cmd, throwOnError=True, **kwargs):
ret = sshCmd(cmd, self.vmAddress,
sshKey=self.userPrivateKeyFile,
verboseLevel=self.verboseLevel,
verboseThreshold=Util.VERBOSE_LEVEL_DETAILED,
**kwargs)
if ret and throwOnError:
raise ExecutionException('Error executing command: %s' % cmd)
return ret
def _sshCmdWithOutput(self, cmd, throwOnError=True, **kwargs):
rc, output = sshCmdWithOutput(cmd, self.vmAddress,
sshKey=self.userPrivateKeyFile,
verboseLevel=self.verboseLevel,
verboseThreshold=Util.VERBOSE_LEVEL_DETAILED,
**kwargs)
if rc and throwOnError:
raise ExecutionException('Error executing command: %s\n%s' % (cmd, output))
return rc, output
def _sshCmdWithOutputVerb(self, cmd, **kwargs):
return self._sshCmdWithOutput(cmd, sshVerb=True, **kwargs)
def _sshCmdWithOutputQuiet(self, cmd, **kwargs):
return self._sshCmdWithOutput(cmd, sshQuiet=True, **kwargs)
def getNewImageId(self):
return self.manifestObject.identifier
def getVmId(self):
return self.vmId
def getVmState(self):
return self.runner.getVmState(self.vmId)
# FIXME: This should be treated as a log handler rather than an ad hoc class.
class CreatorBaseListener(object):
def __init__(self, verbose=False):
if verbose:
self.write = self.__beVerbose
def write(self, msg):
pass
def __beVerbose(self, msg):
print msg
def onAction(self, msg):
self.write('action: %s' % msg)
def onStep(self, msg):
self.write('step: %s' % msg)
def onError(self, msg):
self.write('error: %s' % msg)<|fim▁end|> | |
<|file_name|>AddressBookProtos.java<|end_file_name|><|fim▁begin|>// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: qiantao.proto
package com.qiyi.usercloud.carve.demoservice;
public final class AddressBookProtos {
private AddressBookProtos() {}
public static void registerAllExtensions(
com.google.protobuf.ExtensionRegistry registry) {
}
public interface PersonOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// required string name = 1;
/**
* <code>required string name = 1;</code>
*/
boolean hasName();
/**
* <code>required string name = 1;</code>
*/
java.lang.String getName();
/**
* <code>required string name = 1;</code>
*/
com.google.protobuf.ByteString
getNameBytes();
// required int32 id = 2;
/**
* <code>required int32 id = 2;</code>
*
* <pre>
* Unique ID number for this person.
* </pre>
*/
boolean hasId();
/**
* <code>required int32 id = 2;</code>
*
* <pre>
* Unique ID number for this person.
* </pre>
*/
int getId();
// optional string email = 3;
/**
* <code>optional string email = 3;</code>
*/
boolean hasEmail();
/**
* <code>optional string email = 3;</code>
*/
java.lang.String getEmail();
/**
* <code>optional string email = 3;</code>
*/
com.google.protobuf.ByteString
getEmailBytes();
// optional double doubleF = 4;
/**
* <code>optional double doubleF = 4;</code>
*/
boolean hasDoubleF();
/**
* <code>optional double doubleF = 4;</code>
*/
double getDoubleF();
// optional float floatF = 5;
/**
* <code>optional float floatF = 5;</code>
*/
boolean hasFloatF();
/**
* <code>optional float floatF = 5;</code>
*/
float getFloatF();
// optional bytes bytesF = 6;
/**
* <code>optional bytes bytesF = 6;</code>
*/
boolean hasBytesF();
/**
* <code>optional bytes bytesF = 6;</code>
*/
com.google.protobuf.ByteString getBytesF();
// optional bool boolF = 7;
/**
* <code>optional bool boolF = 7;</code>
*/
boolean hasBoolF();
/**
* <code>optional bool boolF = 7;</code>
*/
boolean getBoolF();
}
/**
* Protobuf type {@code tutorial.Person}
*/
public static final class Person extends
com.google.protobuf.GeneratedMessage
implements PersonOrBuilder {
// Use Person.newBuilder() to construct.
private Person(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private Person(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final Person defaultInstance;
public static Person getDefaultInstance() {
return defaultInstance;
}
public Person getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private Person(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
bitField0_ |= 0x00000001;
name_ = input.readBytes();
break;
}
case 16: {
bitField0_ |= 0x00000002;
id_ = input.readInt32();
break;
}
case 26: {
bitField0_ |= 0x00000004;
email_ = input.readBytes();
break;
}
case 33: {
bitField0_ |= 0x00000008;
doubleF_ = input.readDouble();
break;
}
case 45: {
bitField0_ |= 0x00000010;
floatF_ = input.readFloat();
break;
}
case 50: {
bitField0_ |= 0x00000020;
bytesF_ = input.readBytes();
break;
}
case 56: {
bitField0_ |= 0x00000040;
boolF_ = input.readBool();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.qiyi.usercloud.carve.demoservice.AddressBookProtos.internal_static_tutorial_Person_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.qiyi.usercloud.carve.demoservice.AddressBookProtos.internal_static_tutorial_Person_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.qiyi.usercloud.carve.demoservice.AddressBookProtos.Person.class, com.qiyi.usercloud.carve.demoservice.AddressBookProtos.Person.Builder.class);
}
public static com.google.protobuf.Parser<Person> PARSER =
new com.google.protobuf.AbstractParser<Person>() {
public Person parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new Person(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<Person> getParserForType() {
return PARSER;
}
private int bitField0_;
// required string name = 1;
public static final int NAME_FIELD_NUMBER = 1;
private java.lang.Object name_;
/**
* <code>required string name = 1;</code>
*/
public boolean hasName() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required string name = 1;</code>
*/
public java.lang.String getName() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
name_ = s;
}
return s;
}
}
/**
* <code>required string name = 1;</code>
*/
public com.google.protobuf.ByteString
getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
// required int32 id = 2;
public static final int ID_FIELD_NUMBER = 2;
private int id_;
/**
* <code>required int32 id = 2;</code>
*
* <pre>
* Unique ID number for this person.
* </pre>
*/
public boolean hasId() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>required int32 id = 2;</code>
*
* <pre>
* Unique ID number for this person.
* </pre>
*/
public int getId() {
return id_;
}
// optional string email = 3;
public static final int EMAIL_FIELD_NUMBER = 3;
private java.lang.Object email_;
/**
* <code>optional string email = 3;</code>
*/
public boolean hasEmail() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>optional string email = 3;</code>
*/
public java.lang.String getEmail() {
java.lang.Object ref = email_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
email_ = s;
}
return s;
}
}
/**
* <code>optional string email = 3;</code>
*/
public com.google.protobuf.ByteString
getEmailBytes() {
java.lang.Object ref = email_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
email_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
// optional double doubleF = 4;
public static final int DOUBLEF_FIELD_NUMBER = 4;
private double doubleF_;
/**
* <code>optional double doubleF = 4;</code>
*/
public boolean hasDoubleF() {
return ((bitField0_ & 0x00000008) == 0x00000008);
}
/**
* <code>optional double doubleF = 4;</code>
*/
public double getDoubleF() {
return doubleF_;
}
// optional float floatF = 5;
public static final int FLOATF_FIELD_NUMBER = 5;
private float floatF_;
/**
* <code>optional float floatF = 5;</code>
*/
public boolean hasFloatF() {
return ((bitField0_ & 0x00000010) == 0x00000010);
}
/**
* <code>optional float floatF = 5;</code>
*/
public float getFloatF() {
return floatF_;
}
// optional bytes bytesF = 6;
public static final int BYTESF_FIELD_NUMBER = 6;
private com.google.protobuf.ByteString bytesF_;
/**
* <code>optional bytes bytesF = 6;</code>
*/
public boolean hasBytesF() {
return ((bitField0_ & 0x00000020) == 0x00000020);
}
/**
* <code>optional bytes bytesF = 6;</code>
*/
public com.google.protobuf.ByteString getBytesF() {
return bytesF_;
}
// optional bool boolF = 7;
public static final int BOOLF_FIELD_NUMBER = 7;
private boolean boolF_;
/**
* <code>optional bool boolF = 7;</code>
*/
public boolean hasBoolF() {
return ((bitField0_ & 0x00000040) == 0x00000040);
}
/**
* <code>optional bool boolF = 7;</code>
*/
public boolean getBoolF() {
return boolF_;
}
private void initFields() {
name_ = "";
id_ = 0;
email_ = "";
doubleF_ = 0D;
floatF_ = 0F;
bytesF_ = com.google.protobuf.ByteString.EMPTY;
boolF_ = false;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
if (!hasName()) {
memoizedIsInitialized = 0;
return false;
}
if (!hasId()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeBytes(1, getNameBytes());
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeInt32(2, id_);
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
output.writeBytes(3, getEmailBytes());
}
if (((bitField0_ & 0x00000008) == 0x00000008)) {
output.writeDouble(4, doubleF_);
}
if (((bitField0_ & 0x00000010) == 0x00000010)) {
output.writeFloat(5, floatF_);
}
if (((bitField0_ & 0x00000020) == 0x00000020)) {
output.writeBytes(6, bytesF_);
}
if (((bitField0_ & 0x00000040) == 0x00000040)) {
output.writeBool(7, boolF_);
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(1, getNameBytes());
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += com.google.protobuf.CodedOutputStream
.computeInt32Size(2, id_);
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(3, getEmailBytes());
}
if (((bitField0_ & 0x00000008) == 0x00000008)) {
size += com.google.protobuf.CodedOutputStream
.computeDoubleSize(4, doubleF_);
}
if (((bitField0_ & 0x00000010) == 0x00000010)) {
size += com.google.protobuf.CodedOutputStream
.computeFloatSize(5, floatF_);
}
if (((bitField0_ & 0x00000020) == 0x00000020)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(6, bytesF_);
}
if (((bitField0_ & 0x00000040) == 0x00000040)) {
size += com.google.protobuf.CodedOutputStream
.computeBoolSize(7, boolF_);
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
public static com.qiyi.usercloud.carve.demoservice.AddressBookProtos.Person parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.qiyi.usercloud.carve.demoservice.AddressBookProtos.Person parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.qiyi.usercloud.carve.demoservice.AddressBookProtos.Person parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.qiyi.usercloud.carve.demoservice.AddressBookProtos.Person parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.qiyi.usercloud.carve.demoservice.AddressBookProtos.Person parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static com.qiyi.usercloud.carve.demoservice.AddressBookProtos.Person parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static com.qiyi.usercloud.carve.demoservice.AddressBookProtos.Person parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static com.qiyi.usercloud.carve.demoservice.AddressBookProtos.Person parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static com.qiyi.usercloud.carve.demoservice.AddressBookProtos.Person parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static com.qiyi.usercloud.carve.demoservice.AddressBookProtos.Person parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(com.qiyi.usercloud.carve.demoservice.AddressBookProtos.Person prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code tutorial.Person}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements com.qiyi.usercloud.carve.demoservice.AddressBookProtos.PersonOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.qiyi.usercloud.carve.demoservice.AddressBookProtos.internal_static_tutorial_Person_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.qiyi.usercloud.carve.demoservice.AddressBookProtos.internal_static_tutorial_Person_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.qiyi.usercloud.carve.demoservice.AddressBookProtos.Person.class, com.qiyi.usercloud.carve.demoservice.AddressBookProtos.Person.Builder.class);
}
// Construct using com.qiyi.usercloud.carve.demoservice.AddressBookProtos.Person.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
name_ = "";
bitField0_ = (bitField0_ & ~0x00000001);
id_ = 0;
bitField0_ = (bitField0_ & ~0x00000002);
email_ = "";
bitField0_ = (bitField0_ & ~0x00000004);
doubleF_ = 0D;
bitField0_ = (bitField0_ & ~0x00000008);
floatF_ = 0F;
bitField0_ = (bitField0_ & ~0x00000010);
bytesF_ = com.google.protobuf.ByteString.EMPTY;
bitField0_ = (bitField0_ & ~0x00000020);
boolF_ = false;
bitField0_ = (bitField0_ & ~0x00000040);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.qiyi.usercloud.carve.demoservice.AddressBookProtos.internal_static_tutorial_Person_descriptor;
}
public com.qiyi.usercloud.carve.demoservice.AddressBookProtos.Person getDefaultInstanceForType() {
return com.qiyi.usercloud.carve.demoservice.AddressBookProtos.Person.getDefaultInstance();
}
public com.qiyi.usercloud.carve.demoservice.AddressBookProtos.Person build() {
com.qiyi.usercloud.carve.demoservice.AddressBookProtos.Person result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public com.qiyi.usercloud.carve.demoservice.AddressBookProtos.Person buildPartial() {
com.qiyi.usercloud.carve.demoservice.AddressBookProtos.Person result = new com.qiyi.usercloud.carve.demoservice.AddressBookProtos.Person(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.name_ = name_;
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000002;
}
result.id_ = id_;
if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
to_bitField0_ |= 0x00000004;
}
result.email_ = email_;
if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
to_bitField0_ |= 0x00000008;
}
result.doubleF_ = doubleF_;
if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
to_bitField0_ |= 0x00000010;
}
result.floatF_ = floatF_;
if (((from_bitField0_ & 0x00000020) == 0x00000020)) {
to_bitField0_ |= 0x00000020;
}
result.bytesF_ = bytesF_;
if (((from_bitField0_ & 0x00000040) == 0x00000040)) {
to_bitField0_ |= 0x00000040;
}
result.boolF_ = boolF_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.qiyi.usercloud.carve.demoservice.AddressBookProtos.Person) {
return mergeFrom((com.qiyi.usercloud.carve.demoservice.AddressBookProtos.Person)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.qiyi.usercloud.carve.demoservice.AddressBookProtos.Person other) {
if (other == com.qiyi.usercloud.carve.demoservice.AddressBookProtos.Person.getDefaultInstance()) return this;
if (other.hasName()) {
bitField0_ |= 0x00000001;
name_ = other.name_;
onChanged();
}
if (other.hasId()) {
setId(other.getId());
}
if (other.hasEmail()) {
bitField0_ |= 0x00000004;
email_ = other.email_;
onChanged();
}
if (other.hasDoubleF()) {
setDoubleF(other.getDoubleF());
}
if (other.hasFloatF()) {
setFloatF(other.getFloatF());
}
if (other.hasBytesF()) {
setBytesF(other.getBytesF());
}
if (other.hasBoolF()) {
setBoolF(other.getBoolF());
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
if (!hasName()) {
return false;
}
if (!hasId()) {
return false;
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.qiyi.usercloud.carve.demoservice.AddressBookProtos.Person parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (com.qiyi.usercloud.carve.demoservice.AddressBookProtos.Person) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// required string name = 1;
private java.lang.Object name_ = "";
/**
* <code>required string name = 1;</code>
*/
public boolean hasName() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required string name = 1;</code>
*/
public java.lang.String getName() {
java.lang.Object ref = name_;
if (!(ref instanceof java.lang.String)) {
java.lang.String s = ((com.google.protobuf.ByteString) ref)
.toStringUtf8();
name_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <code>required string name = 1;</code>
*/
public com.google.protobuf.ByteString
getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <code>required string name = 1;</code>
*/
public Builder setName(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
name_ = value;
onChanged();
return this;
}
/**
* <code>required string name = 1;</code>
*/
public Builder clearName() {
bitField0_ = (bitField0_ & ~0x00000001);
name_ = getDefaultInstance().getName();
onChanged();
return this;
}
/**
* <code>required string name = 1;</code>
*/
public Builder setNameBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
name_ = value;
onChanged();
return this;
}
// required int32 id = 2;
private int id_ ;
/**
* <code>required int32 id = 2;</code>
*
* <pre>
* Unique ID number for this person.
* </pre>
*/
public boolean hasId() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>required int32 id = 2;</code>
*
* <pre>
* Unique ID number for this person.
* </pre>
*/
public int getId() {
return id_;
}
/**
* <code>required int32 id = 2;</code>
*
* <pre>
* Unique ID number for this person.
* </pre>
*/
public Builder setId(int value) {
bitField0_ |= 0x00000002;
id_ = value;
onChanged();
return this;
}
/**
* <code>required int32 id = 2;</code>
*
* <pre>
* Unique ID number for this person.
* </pre>
*/
public Builder clearId() {
bitField0_ = (bitField0_ & ~0x00000002);
id_ = 0;
onChanged();
return this;
}
// optional string email = 3;
private java.lang.Object email_ = "";
/**
* <code>optional string email = 3;</code>
*/
public boolean hasEmail() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>optional string email = 3;</code>
*/
public java.lang.String getEmail() {
java.lang.Object ref = email_;
if (!(ref instanceof java.lang.String)) {
java.lang.String s = ((com.google.protobuf.ByteString) ref)
.toStringUtf8();
email_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <code>optional string email = 3;</code>
*/
public com.google.protobuf.ByteString
getEmailBytes() {
java.lang.Object ref = email_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
email_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <code>optional string email = 3;</code>
*/
public Builder setEmail(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000004;
email_ = value;
onChanged();
return this;
}
/**
* <code>optional string email = 3;</code>
*/
public Builder clearEmail() {
bitField0_ = (bitField0_ & ~0x00000004);
email_ = getDefaultInstance().getEmail();
onChanged();
return this;
}
/**
* <code>optional string email = 3;</code>
*/
public Builder setEmailBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000004;
email_ = value;
onChanged();
return this;
}
// optional double doubleF = 4;
private double doubleF_ ;
/**
* <code>optional double doubleF = 4;</code>
*/
public boolean hasDoubleF() {
return ((bitField0_ & 0x00000008) == 0x00000008);
}
/**
* <code>optional double doubleF = 4;</code>
*/
public double getDoubleF() {
return doubleF_;
}
/**
* <code>optional double doubleF = 4;</code>
*/
public Builder setDoubleF(double value) {
bitField0_ |= 0x00000008;
doubleF_ = value;
onChanged();
return this;
}
/**
* <code>optional double doubleF = 4;</code>
*/
public Builder clearDoubleF() {
bitField0_ = (bitField0_ & ~0x00000008);
doubleF_ = 0D;
onChanged();
return this;
}
// optional float floatF = 5;
private float floatF_ ;
/**
* <code>optional float floatF = 5;</code>
*/
public boolean hasFloatF() {
return ((bitField0_ & 0x00000010) == 0x00000010);
}
/**
* <code>optional float floatF = 5;</code>
*/
public float getFloatF() {
return floatF_;
}
/**
* <code>optional float floatF = 5;</code>
*/
public Builder setFloatF(float value) {
bitField0_ |= 0x00000010;
floatF_ = value;
onChanged();
return this;
}
/**
* <code>optional float floatF = 5;</code>
*/
public Builder clearFloatF() {
bitField0_ = (bitField0_ & ~0x00000010);
floatF_ = 0F;
onChanged();
return this;
}
// optional bytes bytesF = 6;
private com.google.protobuf.ByteString bytesF_ = com.google.protobuf.ByteString.EMPTY;
/**
* <code>optional bytes bytesF = 6;</code>
*/
public boolean hasBytesF() {
return ((bitField0_ & 0x00000020) == 0x00000020);
}
/**
* <code>optional bytes bytesF = 6;</code>
*/
public com.google.protobuf.ByteString getBytesF() {
return bytesF_;
}
/**
* <code>optional bytes bytesF = 6;</code>
*/
public Builder setBytesF(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000020;
bytesF_ = value;
onChanged();
return this;
}
/**
* <code>optional bytes bytesF = 6;</code>
*/
public Builder clearBytesF() {
bitField0_ = (bitField0_ & ~0x00000020);
bytesF_ = getDefaultInstance().getBytesF();
onChanged();
return this;
}
// optional bool boolF = 7;
private boolean boolF_ ;
/**
* <code>optional bool boolF = 7;</code>
*/
public boolean hasBoolF() {
return ((bitField0_ & 0x00000040) == 0x00000040);
}
/**
* <code>optional bool boolF = 7;</code>
*/
public boolean getBoolF() {
return boolF_;
}
/**
* <code>optional bool boolF = 7;</code>
*/
public Builder setBoolF(boolean value) {
bitField0_ |= 0x00000040;
boolF_ = value;
onChanged();
return this;
}
/**
* <code>optional bool boolF = 7;</code>
*/
public Builder clearBoolF() {
bitField0_ = (bitField0_ & ~0x00000040);
boolF_ = false;
onChanged();
return this;
}
// @@protoc_insertion_point(builder_scope:tutorial.Person)
}
static {
defaultInstance = new Person(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:tutorial.Person)
}
public interface AddressBookOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// repeated .tutorial.Person person = 1;
/**
* <code>repeated .tutorial.Person person = 1;</code>
*/
java.util.List<com.qiyi.usercloud.carve.demoservice.AddressBookProtos.Person>
getPersonList();
/**
* <code>repeated .tutorial.Person person = 1;</code>
*/
com.qiyi.usercloud.carve.demoservice.AddressBookProtos.Person getPerson(int index);
/**
* <code>repeated .tutorial.Person person = 1;</code>
*/
int getPersonCount();
/**
* <code>repeated .tutorial.Person person = 1;</code>
*/
java.util.List<? extends com.qiyi.usercloud.carve.demoservice.AddressBookProtos.PersonOrBuilder>
getPersonOrBuilderList();
/**
* <code>repeated .tutorial.Person person = 1;</code>
*/
com.qiyi.usercloud.carve.demoservice.AddressBookProtos.PersonOrBuilder getPersonOrBuilder(<|fim▁hole|> /**
* Protobuf type {@code tutorial.AddressBook}
*
* <pre>
* Our address book file is just one of these.
* </pre>
*/
public static final class AddressBook extends
com.google.protobuf.GeneratedMessage
implements AddressBookOrBuilder {
// Use AddressBook.newBuilder() to construct.
private AddressBook(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private AddressBook(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final AddressBook defaultInstance;
public static AddressBook getDefaultInstance() {
return defaultInstance;
}
public AddressBook getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private AddressBook(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
person_ = new java.util.ArrayList<com.qiyi.usercloud.carve.demoservice.AddressBookProtos.Person>();
mutable_bitField0_ |= 0x00000001;
}
person_.add(input.readMessage(com.qiyi.usercloud.carve.demoservice.AddressBookProtos.Person.PARSER, extensionRegistry));
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
person_ = java.util.Collections.unmodifiableList(person_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.qiyi.usercloud.carve.demoservice.AddressBookProtos.internal_static_tutorial_AddressBook_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.qiyi.usercloud.carve.demoservice.AddressBookProtos.internal_static_tutorial_AddressBook_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.qiyi.usercloud.carve.demoservice.AddressBookProtos.AddressBook.class, com.qiyi.usercloud.carve.demoservice.AddressBookProtos.AddressBook.Builder.class);
}
public static com.google.protobuf.Parser<AddressBook> PARSER =
new com.google.protobuf.AbstractParser<AddressBook>() {
public AddressBook parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new AddressBook(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<AddressBook> getParserForType() {
return PARSER;
}
// repeated .tutorial.Person person = 1;
public static final int PERSON_FIELD_NUMBER = 1;
private java.util.List<com.qiyi.usercloud.carve.demoservice.AddressBookProtos.Person> person_;
/**
* <code>repeated .tutorial.Person person = 1;</code>
*/
public java.util.List<com.qiyi.usercloud.carve.demoservice.AddressBookProtos.Person> getPersonList() {
return person_;
}
/**
* <code>repeated .tutorial.Person person = 1;</code>
*/
public java.util.List<? extends com.qiyi.usercloud.carve.demoservice.AddressBookProtos.PersonOrBuilder>
getPersonOrBuilderList() {
return person_;
}
/**
* <code>repeated .tutorial.Person person = 1;</code>
*/
public int getPersonCount() {
return person_.size();
}
/**
* <code>repeated .tutorial.Person person = 1;</code>
*/
public com.qiyi.usercloud.carve.demoservice.AddressBookProtos.Person getPerson(int index) {
return person_.get(index);
}
/**
* <code>repeated .tutorial.Person person = 1;</code>
*/
public com.qiyi.usercloud.carve.demoservice.AddressBookProtos.PersonOrBuilder getPersonOrBuilder(
int index) {
return person_.get(index);
}
private void initFields() {
person_ = java.util.Collections.emptyList();
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
for (int i = 0; i < getPersonCount(); i++) {
if (!getPerson(i).isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
for (int i = 0; i < person_.size(); i++) {
output.writeMessage(1, person_.get(i));
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < person_.size(); i++) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(1, person_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
public static com.qiyi.usercloud.carve.demoservice.AddressBookProtos.AddressBook parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.qiyi.usercloud.carve.demoservice.AddressBookProtos.AddressBook parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.qiyi.usercloud.carve.demoservice.AddressBookProtos.AddressBook parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.qiyi.usercloud.carve.demoservice.AddressBookProtos.AddressBook parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.qiyi.usercloud.carve.demoservice.AddressBookProtos.AddressBook parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static com.qiyi.usercloud.carve.demoservice.AddressBookProtos.AddressBook parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static com.qiyi.usercloud.carve.demoservice.AddressBookProtos.AddressBook parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static com.qiyi.usercloud.carve.demoservice.AddressBookProtos.AddressBook parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static com.qiyi.usercloud.carve.demoservice.AddressBookProtos.AddressBook parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static com.qiyi.usercloud.carve.demoservice.AddressBookProtos.AddressBook parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(com.qiyi.usercloud.carve.demoservice.AddressBookProtos.AddressBook prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code tutorial.AddressBook}
*
* <pre>
* Our address book file is just one of these.
* </pre>
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements com.qiyi.usercloud.carve.demoservice.AddressBookProtos.AddressBookOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.qiyi.usercloud.carve.demoservice.AddressBookProtos.internal_static_tutorial_AddressBook_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.qiyi.usercloud.carve.demoservice.AddressBookProtos.internal_static_tutorial_AddressBook_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.qiyi.usercloud.carve.demoservice.AddressBookProtos.AddressBook.class, com.qiyi.usercloud.carve.demoservice.AddressBookProtos.AddressBook.Builder.class);
}
// Construct using com.qiyi.usercloud.carve.demoservice.AddressBookProtos.AddressBook.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
getPersonFieldBuilder();
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
if (personBuilder_ == null) {
person_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
} else {
personBuilder_.clear();
}
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.qiyi.usercloud.carve.demoservice.AddressBookProtos.internal_static_tutorial_AddressBook_descriptor;
}
public com.qiyi.usercloud.carve.demoservice.AddressBookProtos.AddressBook getDefaultInstanceForType() {
return com.qiyi.usercloud.carve.demoservice.AddressBookProtos.AddressBook.getDefaultInstance();
}
public com.qiyi.usercloud.carve.demoservice.AddressBookProtos.AddressBook build() {
com.qiyi.usercloud.carve.demoservice.AddressBookProtos.AddressBook result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public com.qiyi.usercloud.carve.demoservice.AddressBookProtos.AddressBook buildPartial() {
com.qiyi.usercloud.carve.demoservice.AddressBookProtos.AddressBook result = new com.qiyi.usercloud.carve.demoservice.AddressBookProtos.AddressBook(this);
int from_bitField0_ = bitField0_;
if (personBuilder_ == null) {
if (((bitField0_ & 0x00000001) == 0x00000001)) {
person_ = java.util.Collections.unmodifiableList(person_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.person_ = person_;
} else {
result.person_ = personBuilder_.build();
}
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.qiyi.usercloud.carve.demoservice.AddressBookProtos.AddressBook) {
return mergeFrom((com.qiyi.usercloud.carve.demoservice.AddressBookProtos.AddressBook)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.qiyi.usercloud.carve.demoservice.AddressBookProtos.AddressBook other) {
if (other == com.qiyi.usercloud.carve.demoservice.AddressBookProtos.AddressBook.getDefaultInstance()) return this;
if (personBuilder_ == null) {
if (!other.person_.isEmpty()) {
if (person_.isEmpty()) {
person_ = other.person_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensurePersonIsMutable();
person_.addAll(other.person_);
}
onChanged();
}
} else {
if (!other.person_.isEmpty()) {
if (personBuilder_.isEmpty()) {
personBuilder_.dispose();
personBuilder_ = null;
person_ = other.person_;
bitField0_ = (bitField0_ & ~0x00000001);
personBuilder_ =
com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
getPersonFieldBuilder() : null;
} else {
personBuilder_.addAllMessages(other.person_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
for (int i = 0; i < getPersonCount(); i++) {
if (!getPerson(i).isInitialized()) {
return false;
}
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.qiyi.usercloud.carve.demoservice.AddressBookProtos.AddressBook parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (com.qiyi.usercloud.carve.demoservice.AddressBookProtos.AddressBook) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// repeated .tutorial.Person person = 1;
private java.util.List<com.qiyi.usercloud.carve.demoservice.AddressBookProtos.Person> person_ =
java.util.Collections.emptyList();
private void ensurePersonIsMutable() {
if (!((bitField0_ & 0x00000001) == 0x00000001)) {
person_ = new java.util.ArrayList<com.qiyi.usercloud.carve.demoservice.AddressBookProtos.Person>(person_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilder<
com.qiyi.usercloud.carve.demoservice.AddressBookProtos.Person, com.qiyi.usercloud.carve.demoservice.AddressBookProtos.Person.Builder, com.qiyi.usercloud.carve.demoservice.AddressBookProtos.PersonOrBuilder> personBuilder_;
/**
* <code>repeated .tutorial.Person person = 1;</code>
*/
public java.util.List<com.qiyi.usercloud.carve.demoservice.AddressBookProtos.Person> getPersonList() {
if (personBuilder_ == null) {
return java.util.Collections.unmodifiableList(person_);
} else {
return personBuilder_.getMessageList();
}
}
/**
* <code>repeated .tutorial.Person person = 1;</code>
*/
public int getPersonCount() {
if (personBuilder_ == null) {
return person_.size();
} else {
return personBuilder_.getCount();
}
}
/**
* <code>repeated .tutorial.Person person = 1;</code>
*/
public com.qiyi.usercloud.carve.demoservice.AddressBookProtos.Person getPerson(int index) {
if (personBuilder_ == null) {
return person_.get(index);
} else {
return personBuilder_.getMessage(index);
}
}
/**
* <code>repeated .tutorial.Person person = 1;</code>
*/
public Builder setPerson(
int index, com.qiyi.usercloud.carve.demoservice.AddressBookProtos.Person value) {
if (personBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensurePersonIsMutable();
person_.set(index, value);
onChanged();
} else {
personBuilder_.setMessage(index, value);
}
return this;
}
/**
* <code>repeated .tutorial.Person person = 1;</code>
*/
public Builder setPerson(
int index, com.qiyi.usercloud.carve.demoservice.AddressBookProtos.Person.Builder builderForValue) {
if (personBuilder_ == null) {
ensurePersonIsMutable();
person_.set(index, builderForValue.build());
onChanged();
} else {
personBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* <code>repeated .tutorial.Person person = 1;</code>
*/
public Builder addPerson(com.qiyi.usercloud.carve.demoservice.AddressBookProtos.Person value) {
if (personBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensurePersonIsMutable();
person_.add(value);
onChanged();
} else {
personBuilder_.addMessage(value);
}
return this;
}
/**
* <code>repeated .tutorial.Person person = 1;</code>
*/
public Builder addPerson(
int index, com.qiyi.usercloud.carve.demoservice.AddressBookProtos.Person value) {
if (personBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensurePersonIsMutable();
person_.add(index, value);
onChanged();
} else {
personBuilder_.addMessage(index, value);
}
return this;
}
/**
* <code>repeated .tutorial.Person person = 1;</code>
*/
public Builder addPerson(
com.qiyi.usercloud.carve.demoservice.AddressBookProtos.Person.Builder builderForValue) {
if (personBuilder_ == null) {
ensurePersonIsMutable();
person_.add(builderForValue.build());
onChanged();
} else {
personBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* <code>repeated .tutorial.Person person = 1;</code>
*/
public Builder addPerson(
int index, com.qiyi.usercloud.carve.demoservice.AddressBookProtos.Person.Builder builderForValue) {
if (personBuilder_ == null) {
ensurePersonIsMutable();
person_.add(index, builderForValue.build());
onChanged();
} else {
personBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* <code>repeated .tutorial.Person person = 1;</code>
*/
public Builder addAllPerson(
java.lang.Iterable<? extends com.qiyi.usercloud.carve.demoservice.AddressBookProtos.Person> values) {
if (personBuilder_ == null) {
ensurePersonIsMutable();
super.addAll(values, person_);
onChanged();
} else {
personBuilder_.addAllMessages(values);
}
return this;
}
/**
* <code>repeated .tutorial.Person person = 1;</code>
*/
public Builder clearPerson() {
if (personBuilder_ == null) {
person_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
personBuilder_.clear();
}
return this;
}
/**
* <code>repeated .tutorial.Person person = 1;</code>
*/
public Builder removePerson(int index) {
if (personBuilder_ == null) {
ensurePersonIsMutable();
person_.remove(index);
onChanged();
} else {
personBuilder_.remove(index);
}
return this;
}
/**
* <code>repeated .tutorial.Person person = 1;</code>
*/
public com.qiyi.usercloud.carve.demoservice.AddressBookProtos.Person.Builder getPersonBuilder(
int index) {
return getPersonFieldBuilder().getBuilder(index);
}
/**
* <code>repeated .tutorial.Person person = 1;</code>
*/
public com.qiyi.usercloud.carve.demoservice.AddressBookProtos.PersonOrBuilder getPersonOrBuilder(
int index) {
if (personBuilder_ == null) {
return person_.get(index); } else {
return personBuilder_.getMessageOrBuilder(index);
}
}
/**
* <code>repeated .tutorial.Person person = 1;</code>
*/
public java.util.List<? extends com.qiyi.usercloud.carve.demoservice.AddressBookProtos.PersonOrBuilder>
getPersonOrBuilderList() {
if (personBuilder_ != null) {
return personBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(person_);
}
}
/**
* <code>repeated .tutorial.Person person = 1;</code>
*/
public com.qiyi.usercloud.carve.demoservice.AddressBookProtos.Person.Builder addPersonBuilder() {
return getPersonFieldBuilder().addBuilder(
com.qiyi.usercloud.carve.demoservice.AddressBookProtos.Person.getDefaultInstance());
}
/**
* <code>repeated .tutorial.Person person = 1;</code>
*/
public com.qiyi.usercloud.carve.demoservice.AddressBookProtos.Person.Builder addPersonBuilder(
int index) {
return getPersonFieldBuilder().addBuilder(
index, com.qiyi.usercloud.carve.demoservice.AddressBookProtos.Person.getDefaultInstance());
}
/**
* <code>repeated .tutorial.Person person = 1;</code>
*/
public java.util.List<com.qiyi.usercloud.carve.demoservice.AddressBookProtos.Person.Builder>
getPersonBuilderList() {
return getPersonFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilder<
com.qiyi.usercloud.carve.demoservice.AddressBookProtos.Person, com.qiyi.usercloud.carve.demoservice.AddressBookProtos.Person.Builder, com.qiyi.usercloud.carve.demoservice.AddressBookProtos.PersonOrBuilder>
getPersonFieldBuilder() {
if (personBuilder_ == null) {
personBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
com.qiyi.usercloud.carve.demoservice.AddressBookProtos.Person, com.qiyi.usercloud.carve.demoservice.AddressBookProtos.Person.Builder, com.qiyi.usercloud.carve.demoservice.AddressBookProtos.PersonOrBuilder>(
person_,
((bitField0_ & 0x00000001) == 0x00000001),
getParentForChildren(),
isClean());
person_ = null;
}
return personBuilder_;
}
// @@protoc_insertion_point(builder_scope:tutorial.AddressBook)
}
static {
defaultInstance = new AddressBook(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:tutorial.AddressBook)
}
private static com.google.protobuf.Descriptors.Descriptor
internal_static_tutorial_Person_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_tutorial_Person_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_tutorial_AddressBook_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_tutorial_AddressBook_fieldAccessorTable;
public static com.google.protobuf.Descriptors.FileDescriptor
getDescriptor() {
return descriptor;
}
private static com.google.protobuf.Descriptors.FileDescriptor
descriptor;
static {
java.lang.String[] descriptorData = {
"\n\rqiantao.proto\022\010tutorial\"q\n\006Person\022\014\n\004n" +
"ame\030\001 \002(\t\022\n\n\002id\030\002 \002(\005\022\r\n\005email\030\003 \001(\t\022\017\n\007" +
"doubleF\030\004 \001(\001\022\016\n\006floatF\030\005 \001(\002\022\016\n\006bytesF\030" +
"\006 \001(\014\022\r\n\005boolF\030\007 \001(\010\"/\n\013AddressBook\022 \n\006p" +
"erson\030\001 \003(\0132\020.tutorial.PersonB@\n+com.bai" +
"du.bjf.remoting.protobuf.complexListB\021Ad" +
"dressBookProtos"
};
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
public com.google.protobuf.ExtensionRegistry assignDescriptors(
com.google.protobuf.Descriptors.FileDescriptor root) {
descriptor = root;
internal_static_tutorial_Person_descriptor =
getDescriptor().getMessageTypes().get(0);
internal_static_tutorial_Person_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_tutorial_Person_descriptor,
new java.lang.String[] { "Name", "Id", "Email", "DoubleF", "FloatF", "BytesF", "BoolF", });
internal_static_tutorial_AddressBook_descriptor =
getDescriptor().getMessageTypes().get(1);
internal_static_tutorial_AddressBook_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_tutorial_AddressBook_descriptor,
new java.lang.String[] { "Person", });
return null;
}
};
com.google.protobuf.Descriptors.FileDescriptor
.internalBuildGeneratedFileFrom(descriptorData,
new com.google.protobuf.Descriptors.FileDescriptor[] {
}, assigner);
}
}<|fim▁end|> | int index);
} |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>import logging
from django.http import HttpResponse
from receiver.submitresponse import SubmitResponse
def duplicate_attachment(way_handled, additional_params):
'''Return a custom http response associated the handling
of the xform. In this case, telling the sender that
they submitted a duplicate
'''
try:
# NOTE: this possibly shouldn't be a "200" code, but it is for
# now because it's not clear how JavaRosa will handle 202.
# see: http://code.dimagi.com/JavaRosa/wiki/ServerResponseFormat
response = SubmitResponse(status_code=200, or_status_code=2020,
or_status="Duplicate Submission.",
submit_id=way_handled.submission.id,
**additional_params)
return response.to_response()
except Exception, e:<|fim▁hole|><|fim▁end|> | logging.error("Problem in properly responding to instance data handling of %s" %
way_handled) |
<|file_name|>request.rs<|end_file_name|><|fim▁begin|>//! Types for the [`m.key.verification.request`] event.
//!
//! [`m.key.verification.request`]: https://spec.matrix.org/v1.2/client-server-api/#mkeyverificationrequest
use ruma_macros::EventContent;
use serde::{Deserialize, Serialize};
use super::VerificationMethod;
use crate::{DeviceId, MilliSecondsSinceUnixEpoch, TransactionId};
/// The content of an `m.key.verification.request` event.
#[derive(Clone, Debug, Deserialize, Serialize, EventContent)]
#[cfg_attr(not(feature = "unstable-exhaustive-types"), non_exhaustive)]
#[ruma_event(type = "m.key.verification.request", kind = ToDevice)]
pub struct ToDeviceKeyVerificationRequestEventContent {
/// The device ID which is initiating the request.
pub from_device: Box<DeviceId>,
/// An opaque identifier for the verification request.
///
/// Must be unique with respect to the devices involved.
pub transaction_id: Box<TransactionId>,
/// The verification methods supported by the sender.
pub methods: Vec<VerificationMethod>,<|fim▁hole|> /// The time in milliseconds for when the request was made.
///
/// If the request is in the future by more than 5 minutes or more than 10 minutes in
/// the past, the message should be ignored by the receiver.
pub timestamp: MilliSecondsSinceUnixEpoch,
}
impl ToDeviceKeyVerificationRequestEventContent {
/// Creates a new `ToDeviceKeyVerificationRequestEventContent` with the given device ID,
/// transaction ID, methods and timestamp.
pub fn new(
from_device: Box<DeviceId>,
transaction_id: Box<TransactionId>,
methods: Vec<VerificationMethod>,
timestamp: MilliSecondsSinceUnixEpoch,
) -> Self {
Self { from_device, transaction_id, methods, timestamp }
}
}<|fim▁end|> | |
<|file_name|>ECHODate.java<|end_file_name|><|fim▁begin|>/*******
Copyright 2015 NeuroBASE,Inc. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
**********/
package com.echopf;
import java.text.DateFormat;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Locale;
import java.util.TimeZone;
/**
* An ECHODate is the extended Date object for the SDK.
*/
public class ECHODate extends Date {
private static final long serialVersionUID = 1L;
/**
* {@.en Constructs a new ECHODate.}
* {@.ja 日時オブジェクトを現在時刻で生成します。}
*/
public ECHODate() {
super();
}
/**
* {@.en Constructs a new ECHODate with an acceptable date string for the API.}
* {@.ja APIの仕様に準拠した文字列形式の日時から、日時オブジェクトを生成します。}
* @param s an acceptable date string for the API (e.g. "2015-02-20 00:00:00")
*/
public ECHODate(String s) throws ParseException {
super();
DateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss", Locale.US);
sdf.setTimeZone(TimeZone.getTimeZone("GMT"));
try {
setTime(sdf.parse(s).getTime());
} catch (ParseException e) {
throw e;<|fim▁hole|> }
/**
* {@.en Converts this object to an acceptable date string for the API.}
* {@.ja APIの仕様に準拠した文字列形式の日時へ変換します。}
* @return the formatted date string for the ECHO API.
*/
public String toStringForECHO() {
DateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss", Locale.US);
sdf.setTimeZone(TimeZone.getTimeZone("GMT"));
return sdf.format(this);
}
}<|fim▁end|> | } |
<|file_name|>hello_asyncio.py<|end_file_name|><|fim▁begin|># hello_asyncio.py
import asyncio
import tornado.ioloop
import tornado.web
import tornado.gen
from tornado.httpclient import AsyncHTTPClient
try:
import aioredis
except ImportError:
print("Please install aioredis: pip install aioredis")
exit(0)
class AsyncRequestHandler(tornado.web.RequestHandler):
"""Base class for request handlers with `asyncio` coroutines support.
It runs methods on Tornado's ``AsyncIOMainLoop`` instance.
Subclasses have to implement one of `get_async()`, `post_async()`, etc.
Asynchronous method should be decorated with `@asyncio.coroutine`.
Usage example::
class MyAsyncRequestHandler(AsyncRequestHandler):
@asyncio.coroutine
def get_async(self):
html = yield from self.application.http.get('http://python.org')
self.write({'html': html})
You may also just re-define `get()` or `post()` methods and they will be simply run
synchronously. This may be convinient for draft implementation, i.e. for testing
new libs or concepts.
"""
@tornado.gen.coroutine
def get(self, *args, **kwargs):
"""Handle GET request asyncronously, delegates to
``self.get_async()`` coroutine.
"""
yield self._run_method('get', *args, **kwargs)
@tornado.gen.coroutine
def post(self, *args, **kwargs):
"""Handle POST request asyncronously, delegates to
``self.post_async()`` coroutine.
"""
yield self._run_method('post', *args, **kwargs)
@asyncio.coroutine
def _run_async(self, coroutine, future_, *args, **kwargs):
"""Perform coroutine and set result to ``Future`` object."""
try:
result = yield from coroutine(*args, **kwargs)
future_.set_result(result)
except Exception as e:
future_.set_exception(e)
print(traceback.format_exc())
def _run_method(self, method_, *args, **kwargs):
"""Run ``get_async()`` / ``post_async()`` / etc. coroutine
wrapping result with ``tornado.concurrent.Future`` for
compatibility with ``gen.coroutine``.
"""
coroutine = getattr(self, '%s_async' % method_, None)
if not coroutine:
raise tornado.web.HTTPError(405)
future_ = tornado.concurrent.Future()
asyncio.async(
self._run_async(coroutine, future_, *args, **kwargs)
)
return future_
class MainHandler(AsyncRequestHandler):
@asyncio.coroutine
def get_async(self):
redis = self.application.redis
yield from redis.set('my-key', 'OK')
val = yield from redis.get('my-key')
self.write('Hello asyncio.coroutine: %s' % val)
class Application(tornado.web.Application):
def __init__(self):
# Prepare IOLoop class to run instances on asyncio
tornado.ioloop.IOLoop.configure('tornado.platform.asyncio.AsyncIOMainLoop')
handlers = [
(r"/", MainHandler),
]
super().__init__(handlers, debug=True)
def init_with_loop(self, loop):
self.redis = loop.run_until_complete(
aioredis.create_redis(('localhost', 6379), loop=loop)
)<|fim▁hole|>
if __name__ == "__main__":
print("Run hello_asyncio ... http://127.0.0.1:8888")
application = Application()
application.listen(8888)
loop = asyncio.get_event_loop()
application.init_with_loop(loop)
loop.run_forever()<|fim▁end|> | |
<|file_name|>deployment.py<|end_file_name|><|fim▁begin|># Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Provides generic deployment steps for machines post boot.
"""
import os
class Deployment(object):
"""
Base class for deployment tasks.
"""
def run(self, node, client):
"""
Runs this deployment task on C{node} using the C{client} provided.
@type node: L{Node}
@keyword node: Node to operate one
@type client: L{BaseSSHClient}
@keyword client: Connected SSH client to use.
<|fim▁hole|> @return: L{Node}
"""
raise NotImplementedError, \
'run not implemented for this deployment'
class SSHKeyDeployment(Deployment):
"""
Installs a public SSH Key onto a host.
"""
def __init__(self, key):
"""
@type key: C{str}
@keyword key: Contents of the public key write
"""
self.key = key
def run(self, node, client):
"""
Installs SSH key into C{.ssh/authorized_keys}
See also L{Deployment.run}
"""
client.put(".ssh/authorized_keys", contents=self.key)
return node
class ScriptDeployment(Deployment):
"""
Runs an arbitrary Shell Script task.
"""
def __init__(self, script, name=None, delete=False):
"""
@type script: C{str}
@keyword script: Contents of the script to run
@type name: C{str}
@keyword name: Name of the script to upload it as, if not specified, a random name will be choosen.
@type delete: C{bool}
@keyword delete: Whether to delete the script on completion.
"""
self.script = script
self.stdout = None
self.stderr = None
self.exit_status = None
self.delete = delete
self.name = name
if self.name is None:
self.name = "/root/deployment_%s.sh" % (os.urandom(4).encode('hex'))
def run(self, node, client):
"""
Uploads the shell script and then executes it.
See also L{Deployment.run}
"""
client.put(path=self.name, chmod=755, contents=self.script)
self.stdout, self.stderr, self.exit_status = client.run(self.name)
if self.delete:
client.delete(self.name)
return node
class MultiStepDeployment(Deployment):
"""
Runs a chain of Deployment steps.
"""
def __init__(self, add = None):
"""
@type add: C{list}
@keyword add: Deployment steps to add.
"""
self.steps = []
self.add(add)
def add(self, add):
"""Add a deployment to this chain.
@type add: Single L{Deployment} or a C{list} of L{Deployment}
@keyword add: Adds this deployment to the others already in this object.
"""
if add is not None:
add = add if isinstance(add, (list, tuple)) else [add]
self.steps.extend(add)
def run(self, node, client):
"""
Run each deployment that has been added.
See also L{Deployment.run}
"""
for s in self.steps:
node = s.run(node, client)
return node<|fim▁end|> | |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//
// ignore-lexer-test FIXME #15883
// FIXME: cover these topics:
// path, reader, writer, stream, raii (close not needed),
// stdio, print!, println!, file access, process spawning,
// error handling
/*! I/O, including files, networking, timers, and processes
`std::io` provides Rust's basic I/O types,
for reading and writing to files, TCP, UDP,
and other types of sockets and pipes,
manipulating the file system, spawning processes.
# Examples
Some examples of obvious things you might want to do
* Read lines from stdin
```rust
use std::io;
for line in io::stdin().lines() {
print!("{}", line.unwrap());
}
```
* Read a complete file
```rust
use std::io::File;
let contents = File::open(&Path::new("message.txt")).read_to_end();
```
* Write a line to a file
```rust
# #![allow(unused_must_use)]
use std::io::File;
let mut file = File::create(&Path::new("message.txt"));
file.write(b"hello, file!\n");
# drop(file);
# ::std::io::fs::unlink(&Path::new("message.txt"));
```
<|fim▁hole|> ```rust,no_run
use std::io::BufferedReader;
use std::io::File;
let path = Path::new("message.txt");
let mut file = BufferedReader::new(File::open(&path));
for line in file.lines() {
print!("{}", line.unwrap());
}
```
* Pull the lines of a file into a vector of strings
```rust,no_run
use std::io::BufferedReader;
use std::io::File;
let path = Path::new("message.txt");
let mut file = BufferedReader::new(File::open(&path));
let lines: Vec<String> = file.lines().map(|x| x.unwrap()).collect();
```
* Make a simple TCP client connection and request
```rust
# #![allow(unused_must_use)]
use std::io::TcpStream;
# // connection doesn't fail if a server is running on 8080
# // locally, we still want to be type checking this code, so lets
# // just stop it running (#11576)
# if false {
let mut socket = TcpStream::connect("127.0.0.1:8080").unwrap();
socket.write(b"GET / HTTP/1.0\n\n");
let response = socket.read_to_end();
# }
```
* Make a simple TCP server
```rust
# fn main() { }
# fn foo() {
# #![allow(dead_code)]
use std::io::{TcpListener, TcpStream};
use std::io::{Acceptor, Listener};
let listener = TcpListener::bind("127.0.0.1:80");
// bind the listener to the specified address
let mut acceptor = listener.listen();
fn handle_client(mut stream: TcpStream) {
// ...
# &mut stream; // silence unused mutability/variable warning
}
// accept connections and process them, spawning a new tasks for each one
for stream in acceptor.incoming() {
match stream {
Err(e) => { /* connection failed */ }
Ok(stream) => spawn(proc() {
// connection succeeded
handle_client(stream)
})
}
}
// close the socket server
drop(acceptor);
# }
```
# Error Handling
I/O is an area where nearly every operation can result in unexpected
errors. Errors should be painfully visible when they happen, and handling them
should be easy to work with. It should be convenient to handle specific I/O
errors, and it should also be convenient to not deal with I/O errors.
Rust's I/O employs a combination of techniques to reduce boilerplate
while still providing feedback about errors. The basic strategy:
* All I/O operations return `IoResult<T>` which is equivalent to
`Result<T, IoError>`. The `Result` type is defined in the `std::result`
module.
* If the `Result` type goes unused, then the compiler will by default emit a
warning about the unused result. This is because `Result` has the
`#[must_use]` attribute.
* Common traits are implemented for `IoResult`, e.g.
`impl<R: Reader> Reader for IoResult<R>`, so that error values do not have
to be 'unwrapped' before use.
These features combine in the API to allow for expressions like
`File::create(&Path::new("diary.txt")).write(b"Met a girl.\n")`
without having to worry about whether "diary.txt" exists or whether
the write succeeds. As written, if either `new` or `write_line`
encounters an error then the result of the entire expression will
be an error.
If you wanted to handle the error though you might write:
```rust
# #![allow(unused_must_use)]
use std::io::File;
match File::create(&Path::new("diary.txt")).write(b"Met a girl.\n") {
Ok(()) => (), // succeeded
Err(e) => println!("failed to write to my diary: {}", e),
}
# ::std::io::fs::unlink(&Path::new("diary.txt"));
```
So what actually happens if `create` encounters an error?
It's important to know that what `new` returns is not a `File`
but an `IoResult<File>`. If the file does not open, then `new` will simply
return `Err(..)`. Because there is an implementation of `Writer` (the trait
required ultimately required for types to implement `write_line`) there is no
need to inspect or unwrap the `IoResult<File>` and we simply call `write_line`
on it. If `new` returned an `Err(..)` then the followup call to `write_line`
will also return an error.
## `try!`
Explicit pattern matching on `IoResult`s can get quite verbose, especially
when performing many I/O operations. Some examples (like those above) are
alleviated with extra methods implemented on `IoResult`, but others have more
complex interdependencies among each I/O operation.
The `try!` macro from `std::macros` is provided as a method of early-return
inside `Result`-returning functions. It expands to an early-return on `Err`
and otherwise unwraps the contained `Ok` value.
If you wanted to read several `u32`s from a file and return their product:
```rust
use std::io::{File, IoResult};
fn file_product(p: &Path) -> IoResult<u32> {
let mut f = File::open(p);
let x1 = try!(f.read_le_u32());
let x2 = try!(f.read_le_u32());
Ok(x1 * x2)
}
match file_product(&Path::new("numbers.bin")) {
Ok(x) => println!("{}", x),
Err(e) => println!("Failed to read numbers!")
}
```
With `try!` in `file_product`, each `read_le_u32` need not be directly
concerned with error handling; instead its caller is responsible for
responding to errors that may occur while attempting to read the numbers.
*/
#![experimental]
#![deny(unused_must_use)]
pub use self::SeekStyle::*;
pub use self::FileMode::*;
pub use self::FileAccess::*;
pub use self::FileType::*;
pub use self::IoErrorKind::*;
use char::Char;
use clone::Clone;
use default::Default;
use error::{FromError, Error};
use fmt;
use int;
use iter::Iterator;
use mem::transmute;
use ops::{BitOr, BitXor, BitAnd, Sub, Not};
use option::{Option, Some, None};
use os;
use boxed::Box;
use result::{Ok, Err, Result};
use sys;
use slice::{AsSlice, SlicePrelude};
use str::{Str, StrPrelude};
use str;
use string::String;
use uint;
use unicode::char::UnicodeChar;
use vec::Vec;
// Reexports
pub use self::stdio::stdin;
pub use self::stdio::stdout;
pub use self::stdio::stderr;
pub use self::stdio::print;
pub use self::stdio::println;
pub use self::fs::File;
pub use self::timer::Timer;
pub use self::net::ip::IpAddr;
pub use self::net::tcp::TcpListener;
pub use self::net::tcp::TcpStream;
pub use self::net::udp::UdpStream;
pub use self::pipe::PipeStream;
pub use self::process::{Process, Command};
pub use self::tempfile::TempDir;
pub use self::mem::{MemReader, BufReader, MemWriter, BufWriter};
pub use self::buffered::{BufferedReader, BufferedWriter, BufferedStream,
LineBufferedWriter};
pub use self::comm_adapters::{ChanReader, ChanWriter};
mod buffered;
mod comm_adapters;
mod mem;
mod result;
mod tempfile;
pub mod extensions;
pub mod fs;
pub mod net;
pub mod pipe;
pub mod process;
pub mod stdio;
pub mod test;
pub mod timer;
pub mod util;
/// The default buffer size for various I/O operations
// libuv recommends 64k buffers to maximize throughput
// https://groups.google.com/forum/#!topic/libuv/oQO1HJAIDdA
const DEFAULT_BUF_SIZE: uint = 1024 * 64;
/// A convenient typedef of the return value of any I/O action.
pub type IoResult<T> = Result<T, IoError>;
/// The type passed to I/O condition handlers to indicate error
///
/// # FIXME
///
/// Is something like this sufficient? It's kind of archaic
#[deriving(PartialEq, Eq, Clone)]
pub struct IoError {
/// An enumeration which can be matched against for determining the flavor
/// of error.
pub kind: IoErrorKind,
/// A human-readable description about the error
pub desc: &'static str,
/// Detailed information about this error, not always available
pub detail: Option<String>
}
impl IoError {
/// Convert an `errno` value into an `IoError`.
///
/// If `detail` is `true`, the `detail` field of the `IoError`
/// struct is filled with an allocated string describing the error
/// in more detail, retrieved from the operating system.
pub fn from_errno(errno: uint, detail: bool) -> IoError {
let mut err = sys::decode_error(errno as i32);
if detail && err.kind == OtherIoError {
err.detail = Some(os::error_string(errno).as_slice().chars()
.map(|c| c.to_lowercase()).collect())
}
err
}
/// Retrieve the last error to occur as a (detailed) IoError.
///
/// This uses the OS `errno`, and so there should not be any task
/// descheduling or migration (other than that performed by the
/// operating system) between the call(s) for which errors are
/// being checked and the call of this function.
pub fn last_error() -> IoError {
IoError::from_errno(os::errno() as uint, true)
}
}
impl fmt::Show for IoError {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
match *self {
IoError { kind: OtherIoError, desc: "unknown error", detail: Some(ref detail) } =>
write!(fmt, "{}", detail),
IoError { detail: None, desc, .. } =>
write!(fmt, "{}", desc),
IoError { detail: Some(ref detail), desc, .. } =>
write!(fmt, "{} ({})", desc, detail)
}
}
}
impl Error for IoError {
fn description(&self) -> &str {
self.desc
}
fn detail(&self) -> Option<String> {
self.detail.clone()
}
}
impl FromError<IoError> for Box<Error> {
fn from_error(err: IoError) -> Box<Error> {
box err
}
}
/// A list specifying general categories of I/O error.
#[deriving(PartialEq, Eq, Clone, Show)]
pub enum IoErrorKind {
/// Any I/O error not part of this list.
OtherIoError,
/// The operation could not complete because end of file was reached.
EndOfFile,
/// The file was not found.
FileNotFound,
/// The file permissions disallowed access to this file.
PermissionDenied,
/// A network connection failed for some reason not specified in this list.
ConnectionFailed,
/// The network operation failed because the network connection was closed.
Closed,
/// The connection was refused by the remote server.
ConnectionRefused,
/// The connection was reset by the remote server.
ConnectionReset,
/// The connection was aborted (terminated) by the remote server.
ConnectionAborted,
/// The network operation failed because it was not connected yet.
NotConnected,
/// The operation failed because a pipe was closed.
BrokenPipe,
/// A file already existed with that name.
PathAlreadyExists,
/// No file exists at that location.
PathDoesntExist,
/// The path did not specify the type of file that this operation required. For example,
/// attempting to copy a directory with the `fs::copy()` operation will fail with this error.
MismatchedFileTypeForOperation,
/// The operation temporarily failed (for example, because a signal was received), and retrying
/// may succeed.
ResourceUnavailable,
/// No I/O functionality is available for this task.
IoUnavailable,
/// A parameter was incorrect in a way that caused an I/O error not part of this list.
InvalidInput,
/// The I/O operation's timeout expired, causing it to be canceled.
TimedOut,
/// This write operation failed to write all of its data.
///
/// Normally the write() method on a Writer guarantees that all of its data
/// has been written, but some operations may be terminated after only
/// partially writing some data. An example of this is a timed out write
/// which successfully wrote a known number of bytes, but bailed out after
/// doing so.
///
/// The payload contained as part of this variant is the number of bytes
/// which are known to have been successfully written.
ShortWrite(uint),
/// The Reader returned 0 bytes from `read()` too many times.
NoProgress,
}
/// A trait that lets you add a `detail` to an IoError easily
trait UpdateIoError<T> {
/// Returns an IoError with updated description and detail
fn update_err(self, desc: &'static str, detail: |&IoError| -> String) -> Self;
/// Returns an IoError with updated detail
fn update_detail(self, detail: |&IoError| -> String) -> Self;
/// Returns an IoError with update description
fn update_desc(self, desc: &'static str) -> Self;
}
impl<T> UpdateIoError<T> for IoResult<T> {
fn update_err(self, desc: &'static str, detail: |&IoError| -> String) -> IoResult<T> {
self.map_err(|mut e| {
let detail = detail(&e);
e.desc = desc;
e.detail = Some(detail);
e
})
}
fn update_detail(self, detail: |&IoError| -> String) -> IoResult<T> {
self.map_err(|mut e| { e.detail = Some(detail(&e)); e })
}
fn update_desc(self, desc: &'static str) -> IoResult<T> {
self.map_err(|mut e| { e.desc = desc; e })
}
}
static NO_PROGRESS_LIMIT: uint = 1000;
/// A trait for objects which are byte-oriented streams. Readers are defined by
/// one method, `read`. This function will block until data is available,
/// filling in the provided buffer with any data read.
///
/// Readers are intended to be composable with one another. Many objects
/// throughout the I/O and related libraries take and provide types which
/// implement the `Reader` trait.
pub trait Reader {
// Only method which need to get implemented for this trait
/// Read bytes, up to the length of `buf` and place them in `buf`.
/// Returns the number of bytes read. The number of bytes read may
/// be less than the number requested, even 0. Returns `Err` on EOF.
///
/// # Error
///
/// If an error occurs during this I/O operation, then it is returned as
/// `Err(IoError)`. Note that end-of-file is considered an error, and can be
/// inspected for in the error's `kind` field. Also note that reading 0
/// bytes is not considered an error in all circumstances
///
/// # Implementation Note
///
/// When implementing this method on a new Reader, you are strongly encouraged
/// not to return 0 if you can avoid it.
fn read(&mut self, buf: &mut [u8]) -> IoResult<uint>;
// Convenient helper methods based on the above methods
/// Reads at least `min` bytes and places them in `buf`.
/// Returns the number of bytes read.
///
/// This will continue to call `read` until at least `min` bytes have been
/// read. If `read` returns 0 too many times, `NoProgress` will be
/// returned.
///
/// # Error
///
/// If an error occurs at any point, that error is returned, and no further
/// bytes are read.
fn read_at_least(&mut self, min: uint, buf: &mut [u8]) -> IoResult<uint> {
if min > buf.len() {
return Err(IoError {
detail: Some(String::from_str("the buffer is too short")),
..standard_error(InvalidInput)
});
}
let mut read = 0;
while read < min {
let mut zeroes = 0;
loop {
match self.read(buf[mut read..]) {
Ok(0) => {
zeroes += 1;
if zeroes >= NO_PROGRESS_LIMIT {
return Err(standard_error(NoProgress));
}
}
Ok(n) => {
read += n;
break;
}
err@Err(_) => return err
}
}
}
Ok(read)
}
/// Reads a single byte. Returns `Err` on EOF.
fn read_byte(&mut self) -> IoResult<u8> {
let mut buf = [0];
try!(self.read_at_least(1, &mut buf));
Ok(buf[0])
}
/// Reads up to `len` bytes and appends them to a vector.
/// Returns the number of bytes read. The number of bytes read may be
/// less than the number requested, even 0. Returns Err on EOF.
///
/// # Error
///
/// If an error occurs during this I/O operation, then it is returned
/// as `Err(IoError)`. See `read()` for more details.
fn push(&mut self, len: uint, buf: &mut Vec<u8>) -> IoResult<uint> {
let start_len = buf.len();
buf.reserve(len);
let n = {
let s = unsafe { slice_vec_capacity(buf, start_len, start_len + len) };
try!(self.read(s))
};
unsafe { buf.set_len(start_len + n) };
Ok(n)
}
/// Reads at least `min` bytes, but no more than `len`, and appends them to
/// a vector.
/// Returns the number of bytes read.
///
/// This will continue to call `read` until at least `min` bytes have been
/// read. If `read` returns 0 too many times, `NoProgress` will be
/// returned.
///
/// # Error
///
/// If an error occurs at any point, that error is returned, and no further
/// bytes are read.
fn push_at_least(&mut self, min: uint, len: uint, buf: &mut Vec<u8>) -> IoResult<uint> {
if min > len {
return Err(IoError {
detail: Some(String::from_str("the buffer is too short")),
..standard_error(InvalidInput)
});
}
let start_len = buf.len();
buf.reserve(len);
// we can't just use self.read_at_least(min, slice) because we need to push
// successful reads onto the vector before any returned errors.
let mut read = 0;
while read < min {
read += {
let s = unsafe { slice_vec_capacity(buf, start_len + read, start_len + len) };
try!(self.read_at_least(1, s))
};
unsafe { buf.set_len(start_len + read) };
}
Ok(read)
}
/// Reads exactly `len` bytes and gives you back a new vector of length
/// `len`
///
/// # Error
///
/// Fails with the same conditions as `read`. Additionally returns error
/// on EOF. Note that if an error is returned, then some number of bytes may
/// have already been consumed from the underlying reader, and they are lost
/// (not returned as part of the error). If this is unacceptable, then it is
/// recommended to use the `push_at_least` or `read` methods.
fn read_exact(&mut self, len: uint) -> IoResult<Vec<u8>> {
let mut buf = Vec::with_capacity(len);
match self.push_at_least(len, len, &mut buf) {
Ok(_) => Ok(buf),
Err(e) => Err(e),
}
}
/// Reads all remaining bytes from the stream.
///
/// # Error
///
/// Returns any non-EOF error immediately. Previously read bytes are
/// discarded when an error is returned.
///
/// When EOF is encountered, all bytes read up to that point are returned.
fn read_to_end(&mut self) -> IoResult<Vec<u8>> {
let mut buf = Vec::with_capacity(DEFAULT_BUF_SIZE);
loop {
match self.push_at_least(1, DEFAULT_BUF_SIZE, &mut buf) {
Ok(_) => {}
Err(ref e) if e.kind == EndOfFile => break,
Err(e) => return Err(e)
}
}
return Ok(buf);
}
/// Reads all of the remaining bytes of this stream, interpreting them as a
/// UTF-8 encoded stream. The corresponding string is returned.
///
/// # Error
///
/// This function returns all of the same errors as `read_to_end` with an
/// additional error if the reader's contents are not a valid sequence of
/// UTF-8 bytes.
fn read_to_string(&mut self) -> IoResult<String> {
self.read_to_end().and_then(|s| {
match String::from_utf8(s) {
Ok(s) => Ok(s),
Err(_) => Err(standard_error(InvalidInput)),
}
})
}
// Byte conversion helpers
/// Reads `n` little-endian unsigned integer bytes.
///
/// `n` must be between 1 and 8, inclusive.
fn read_le_uint_n(&mut self, nbytes: uint) -> IoResult<u64> {
assert!(nbytes > 0 && nbytes <= 8);
let mut val = 0u64;
let mut pos = 0;
let mut i = nbytes;
while i > 0 {
val += (try!(self.read_u8()) as u64) << pos;
pos += 8;
i -= 1;
}
Ok(val)
}
/// Reads `n` little-endian signed integer bytes.
///
/// `n` must be between 1 and 8, inclusive.
fn read_le_int_n(&mut self, nbytes: uint) -> IoResult<i64> {
self.read_le_uint_n(nbytes).map(|i| extend_sign(i, nbytes))
}
/// Reads `n` big-endian unsigned integer bytes.
///
/// `n` must be between 1 and 8, inclusive.
fn read_be_uint_n(&mut self, nbytes: uint) -> IoResult<u64> {
assert!(nbytes > 0 && nbytes <= 8);
let mut val = 0u64;
let mut i = nbytes;
while i > 0 {
i -= 1;
val += (try!(self.read_u8()) as u64) << i * 8;
}
Ok(val)
}
/// Reads `n` big-endian signed integer bytes.
///
/// `n` must be between 1 and 8, inclusive.
fn read_be_int_n(&mut self, nbytes: uint) -> IoResult<i64> {
self.read_be_uint_n(nbytes).map(|i| extend_sign(i, nbytes))
}
/// Reads a little-endian unsigned integer.
///
/// The number of bytes returned is system-dependent.
fn read_le_uint(&mut self) -> IoResult<uint> {
self.read_le_uint_n(uint::BYTES).map(|i| i as uint)
}
/// Reads a little-endian integer.
///
/// The number of bytes returned is system-dependent.
fn read_le_int(&mut self) -> IoResult<int> {
self.read_le_int_n(int::BYTES).map(|i| i as int)
}
/// Reads a big-endian unsigned integer.
///
/// The number of bytes returned is system-dependent.
fn read_be_uint(&mut self) -> IoResult<uint> {
self.read_be_uint_n(uint::BYTES).map(|i| i as uint)
}
/// Reads a big-endian integer.
///
/// The number of bytes returned is system-dependent.
fn read_be_int(&mut self) -> IoResult<int> {
self.read_be_int_n(int::BYTES).map(|i| i as int)
}
/// Reads a big-endian `u64`.
///
/// `u64`s are 8 bytes long.
fn read_be_u64(&mut self) -> IoResult<u64> {
self.read_be_uint_n(8)
}
/// Reads a big-endian `u32`.
///
/// `u32`s are 4 bytes long.
fn read_be_u32(&mut self) -> IoResult<u32> {
self.read_be_uint_n(4).map(|i| i as u32)
}
/// Reads a big-endian `u16`.
///
/// `u16`s are 2 bytes long.
fn read_be_u16(&mut self) -> IoResult<u16> {
self.read_be_uint_n(2).map(|i| i as u16)
}
/// Reads a big-endian `i64`.
///
/// `i64`s are 8 bytes long.
fn read_be_i64(&mut self) -> IoResult<i64> {
self.read_be_int_n(8)
}
/// Reads a big-endian `i32`.
///
/// `i32`s are 4 bytes long.
fn read_be_i32(&mut self) -> IoResult<i32> {
self.read_be_int_n(4).map(|i| i as i32)
}
/// Reads a big-endian `i16`.
///
/// `i16`s are 2 bytes long.
fn read_be_i16(&mut self) -> IoResult<i16> {
self.read_be_int_n(2).map(|i| i as i16)
}
/// Reads a big-endian `f64`.
///
/// `f64`s are 8 byte, IEEE754 double-precision floating point numbers.
fn read_be_f64(&mut self) -> IoResult<f64> {
self.read_be_u64().map(|i| unsafe {
transmute::<u64, f64>(i)
})
}
/// Reads a big-endian `f32`.
///
/// `f32`s are 4 byte, IEEE754 single-precision floating point numbers.
fn read_be_f32(&mut self) -> IoResult<f32> {
self.read_be_u32().map(|i| unsafe {
transmute::<u32, f32>(i)
})
}
/// Reads a little-endian `u64`.
///
/// `u64`s are 8 bytes long.
fn read_le_u64(&mut self) -> IoResult<u64> {
self.read_le_uint_n(8)
}
/// Reads a little-endian `u32`.
///
/// `u32`s are 4 bytes long.
fn read_le_u32(&mut self) -> IoResult<u32> {
self.read_le_uint_n(4).map(|i| i as u32)
}
/// Reads a little-endian `u16`.
///
/// `u16`s are 2 bytes long.
fn read_le_u16(&mut self) -> IoResult<u16> {
self.read_le_uint_n(2).map(|i| i as u16)
}
/// Reads a little-endian `i64`.
///
/// `i64`s are 8 bytes long.
fn read_le_i64(&mut self) -> IoResult<i64> {
self.read_le_int_n(8)
}
/// Reads a little-endian `i32`.
///
/// `i32`s are 4 bytes long.
fn read_le_i32(&mut self) -> IoResult<i32> {
self.read_le_int_n(4).map(|i| i as i32)
}
/// Reads a little-endian `i16`.
///
/// `i16`s are 2 bytes long.
fn read_le_i16(&mut self) -> IoResult<i16> {
self.read_le_int_n(2).map(|i| i as i16)
}
/// Reads a little-endian `f64`.
///
/// `f64`s are 8 byte, IEEE754 double-precision floating point numbers.
fn read_le_f64(&mut self) -> IoResult<f64> {
self.read_le_u64().map(|i| unsafe {
transmute::<u64, f64>(i)
})
}
/// Reads a little-endian `f32`.
///
/// `f32`s are 4 byte, IEEE754 single-precision floating point numbers.
fn read_le_f32(&mut self) -> IoResult<f32> {
self.read_le_u32().map(|i| unsafe {
transmute::<u32, f32>(i)
})
}
/// Read a u8.
///
/// `u8`s are 1 byte.
fn read_u8(&mut self) -> IoResult<u8> {
self.read_byte()
}
/// Read an i8.
///
/// `i8`s are 1 byte.
fn read_i8(&mut self) -> IoResult<i8> {
self.read_byte().map(|i| i as i8)
}
}
/// A reader which can be converted to a RefReader.
#[deprecated = "use ByRefReader instead"]
pub trait AsRefReader {
/// Creates a wrapper around a mutable reference to the reader.
///
/// This is useful to allow applying adaptors while still
/// retaining ownership of the original value.
fn by_ref<'a>(&'a mut self) -> RefReader<'a, Self>;
}
#[allow(deprecated)]
impl<T: Reader> AsRefReader for T {
fn by_ref<'a>(&'a mut self) -> RefReader<'a, T> {
RefReader { inner: self }
}
}
/// A reader which can be converted to a RefReader.
pub trait ByRefReader {
/// Creates a wrapper around a mutable reference to the reader.
///
/// This is useful to allow applying adaptors while still
/// retaining ownership of the original value.
fn by_ref<'a>(&'a mut self) -> RefReader<'a, Self>;
}
impl<T: Reader> ByRefReader for T {
fn by_ref<'a>(&'a mut self) -> RefReader<'a, T> {
RefReader { inner: self }
}
}
/// A reader which can be converted to bytes.
pub trait BytesReader {
/// Create an iterator that reads a single byte on
/// each iteration, until EOF.
///
/// # Error
///
/// Any error other than `EndOfFile` that is produced by the underlying Reader
/// is returned by the iterator and should be handled by the caller.
fn bytes<'r>(&'r mut self) -> extensions::Bytes<'r, Self>;
}
impl<T: Reader> BytesReader for T {
fn bytes<'r>(&'r mut self) -> extensions::Bytes<'r, T> {
extensions::Bytes::new(self)
}
}
impl<'a> Reader for Box<Reader+'a> {
fn read(&mut self, buf: &mut [u8]) -> IoResult<uint> {
let reader: &mut Reader = &mut **self;
reader.read(buf)
}
}
impl<'a> Reader for &'a mut Reader+'a {
fn read(&mut self, buf: &mut [u8]) -> IoResult<uint> { (*self).read(buf) }
}
/// Returns a slice of `v` between `start` and `end`.
///
/// Similar to `slice()` except this function only bounds the slice on the
/// capacity of `v`, not the length.
///
/// # Panics
///
/// Panics when `start` or `end` point outside the capacity of `v`, or when
/// `start` > `end`.
// Private function here because we aren't sure if we want to expose this as
// API yet. If so, it should be a method on Vec.
unsafe fn slice_vec_capacity<'a, T>(v: &'a mut Vec<T>, start: uint, end: uint) -> &'a mut [T] {
use raw::Slice;
use ptr::RawPtr;
assert!(start <= end);
assert!(end <= v.capacity());
transmute(Slice {
data: v.as_ptr().offset(start as int),
len: end - start
})
}
/// A `RefReader` is a struct implementing `Reader` which contains a reference
/// to another reader. This is often useful when composing streams.
///
/// # Example
///
/// ```
/// # fn main() {}
/// # fn process_input<R: Reader>(r: R) {}
/// # fn foo() {
/// use std::io;
/// use std::io::ByRefReader;
/// use std::io::util::LimitReader;
///
/// let mut stream = io::stdin();
///
/// // Only allow the function to process at most one kilobyte of input
/// {
/// let stream = LimitReader::new(stream.by_ref(), 1024);
/// process_input(stream);
/// }
///
/// // 'stream' is still available for use here
///
/// # }
/// ```
pub struct RefReader<'a, R:'a> {
/// The underlying reader which this is referencing
inner: &'a mut R
}
impl<'a, R: Reader> Reader for RefReader<'a, R> {
fn read(&mut self, buf: &mut [u8]) -> IoResult<uint> { self.inner.read(buf) }
}
impl<'a, R: Buffer> Buffer for RefReader<'a, R> {
fn fill_buf<'a>(&'a mut self) -> IoResult<&'a [u8]> { self.inner.fill_buf() }
fn consume(&mut self, amt: uint) { self.inner.consume(amt) }
}
fn extend_sign(val: u64, nbytes: uint) -> i64 {
let shift = (8 - nbytes) * 8;
(val << shift) as i64 >> shift
}
/// A trait for objects which are byte-oriented streams. Writers are defined by
/// one method, `write`. This function will block until the provided buffer of
/// bytes has been entirely written, and it will return any failures which occur.
///
/// Another commonly overridden method is the `flush` method for writers such as
/// buffered writers.
///
/// Writers are intended to be composable with one another. Many objects
/// throughout the I/O and related libraries take and provide types which
/// implement the `Writer` trait.
pub trait Writer {
/// Write the entirety of a given buffer
///
/// # Errors
///
/// If an error happens during the I/O operation, the error is returned as
/// `Err`. Note that it is considered an error if the entire buffer could
/// not be written, and if an error is returned then it is unknown how much
/// data (if any) was actually written.
fn write(&mut self, buf: &[u8]) -> IoResult<()>;
/// Flush this output stream, ensuring that all intermediately buffered
/// contents reach their destination.
///
/// This is by default a no-op and implementers of the `Writer` trait should
/// decide whether their stream needs to be buffered or not.
fn flush(&mut self) -> IoResult<()> { Ok(()) }
/// Writes a formatted string into this writer, returning any error
/// encountered.
///
/// This method is primarily used to interface with the `format_args!`
/// macro, but it is rare that this should explicitly be called. The
/// `write!` macro should be favored to invoke this method instead.
///
/// # Errors
///
/// This function will return any I/O error reported while formatting.
fn write_fmt(&mut self, fmt: &fmt::Arguments) -> IoResult<()> {
// Create a shim which translates a Writer to a FormatWriter and saves
// off I/O errors. instead of discarding them
struct Adaptor<'a, T:'a> {
inner: &'a mut T,
error: IoResult<()>,
}
impl<'a, T: Writer> fmt::FormatWriter for Adaptor<'a, T> {
fn write(&mut self, bytes: &[u8]) -> fmt::Result {
match self.inner.write(bytes) {
Ok(()) => Ok(()),
Err(e) => {
self.error = Err(e);
Err(fmt::Error)
}
}
}
}
let mut output = Adaptor { inner: self, error: Ok(()) };
match fmt::write(&mut output, fmt) {
Ok(()) => Ok(()),
Err(..) => output.error
}
}
/// Write a rust string into this sink.
///
/// The bytes written will be the UTF-8 encoded version of the input string.
/// If other encodings are desired, it is recommended to compose this stream
/// with another performing the conversion, or to use `write` with a
/// converted byte-array instead.
#[inline]
fn write_str(&mut self, s: &str) -> IoResult<()> {
self.write(s.as_bytes())
}
/// Writes a string into this sink, and then writes a literal newline (`\n`)
/// byte afterwards. Note that the writing of the newline is *not* atomic in
/// the sense that the call to `write` is invoked twice (once with the
/// string and once with a newline character).
///
/// If other encodings or line ending flavors are desired, it is recommended
/// that the `write` method is used specifically instead.
#[inline]
fn write_line(&mut self, s: &str) -> IoResult<()> {
self.write_str(s).and_then(|()| self.write(&[b'\n']))
}
/// Write a single char, encoded as UTF-8.
#[inline]
fn write_char(&mut self, c: char) -> IoResult<()> {
let mut buf = [0u8, ..4];
let n = c.encode_utf8(buf[mut]).unwrap_or(0);
self.write(buf[..n])
}
/// Write the result of passing n through `int::to_str_bytes`.
#[inline]
fn write_int(&mut self, n: int) -> IoResult<()> {
write!(self, "{}", n)
}
/// Write the result of passing n through `uint::to_str_bytes`.
#[inline]
fn write_uint(&mut self, n: uint) -> IoResult<()> {
write!(self, "{}", n)
}
/// Write a little-endian uint (number of bytes depends on system).
#[inline]
fn write_le_uint(&mut self, n: uint) -> IoResult<()> {
extensions::u64_to_le_bytes(n as u64, uint::BYTES, |v| self.write(v))
}
/// Write a little-endian int (number of bytes depends on system).
#[inline]
fn write_le_int(&mut self, n: int) -> IoResult<()> {
extensions::u64_to_le_bytes(n as u64, int::BYTES, |v| self.write(v))
}
/// Write a big-endian uint (number of bytes depends on system).
#[inline]
fn write_be_uint(&mut self, n: uint) -> IoResult<()> {
extensions::u64_to_be_bytes(n as u64, uint::BYTES, |v| self.write(v))
}
/// Write a big-endian int (number of bytes depends on system).
#[inline]
fn write_be_int(&mut self, n: int) -> IoResult<()> {
extensions::u64_to_be_bytes(n as u64, int::BYTES, |v| self.write(v))
}
/// Write a big-endian u64 (8 bytes).
#[inline]
fn write_be_u64(&mut self, n: u64) -> IoResult<()> {
extensions::u64_to_be_bytes(n, 8u, |v| self.write(v))
}
/// Write a big-endian u32 (4 bytes).
#[inline]
fn write_be_u32(&mut self, n: u32) -> IoResult<()> {
extensions::u64_to_be_bytes(n as u64, 4u, |v| self.write(v))
}
/// Write a big-endian u16 (2 bytes).
#[inline]
fn write_be_u16(&mut self, n: u16) -> IoResult<()> {
extensions::u64_to_be_bytes(n as u64, 2u, |v| self.write(v))
}
/// Write a big-endian i64 (8 bytes).
#[inline]
fn write_be_i64(&mut self, n: i64) -> IoResult<()> {
extensions::u64_to_be_bytes(n as u64, 8u, |v| self.write(v))
}
/// Write a big-endian i32 (4 bytes).
#[inline]
fn write_be_i32(&mut self, n: i32) -> IoResult<()> {
extensions::u64_to_be_bytes(n as u64, 4u, |v| self.write(v))
}
/// Write a big-endian i16 (2 bytes).
#[inline]
fn write_be_i16(&mut self, n: i16) -> IoResult<()> {
extensions::u64_to_be_bytes(n as u64, 2u, |v| self.write(v))
}
/// Write a big-endian IEEE754 double-precision floating-point (8 bytes).
#[inline]
fn write_be_f64(&mut self, f: f64) -> IoResult<()> {
unsafe {
self.write_be_u64(transmute(f))
}
}
/// Write a big-endian IEEE754 single-precision floating-point (4 bytes).
#[inline]
fn write_be_f32(&mut self, f: f32) -> IoResult<()> {
unsafe {
self.write_be_u32(transmute(f))
}
}
/// Write a little-endian u64 (8 bytes).
#[inline]
fn write_le_u64(&mut self, n: u64) -> IoResult<()> {
extensions::u64_to_le_bytes(n, 8u, |v| self.write(v))
}
/// Write a little-endian u32 (4 bytes).
#[inline]
fn write_le_u32(&mut self, n: u32) -> IoResult<()> {
extensions::u64_to_le_bytes(n as u64, 4u, |v| self.write(v))
}
/// Write a little-endian u16 (2 bytes).
#[inline]
fn write_le_u16(&mut self, n: u16) -> IoResult<()> {
extensions::u64_to_le_bytes(n as u64, 2u, |v| self.write(v))
}
/// Write a little-endian i64 (8 bytes).
#[inline]
fn write_le_i64(&mut self, n: i64) -> IoResult<()> {
extensions::u64_to_le_bytes(n as u64, 8u, |v| self.write(v))
}
/// Write a little-endian i32 (4 bytes).
#[inline]
fn write_le_i32(&mut self, n: i32) -> IoResult<()> {
extensions::u64_to_le_bytes(n as u64, 4u, |v| self.write(v))
}
/// Write a little-endian i16 (2 bytes).
#[inline]
fn write_le_i16(&mut self, n: i16) -> IoResult<()> {
extensions::u64_to_le_bytes(n as u64, 2u, |v| self.write(v))
}
/// Write a little-endian IEEE754 double-precision floating-point
/// (8 bytes).
#[inline]
fn write_le_f64(&mut self, f: f64) -> IoResult<()> {
unsafe {
self.write_le_u64(transmute(f))
}
}
/// Write a little-endian IEEE754 single-precision floating-point
/// (4 bytes).
#[inline]
fn write_le_f32(&mut self, f: f32) -> IoResult<()> {
unsafe {
self.write_le_u32(transmute(f))
}
}
/// Write a u8 (1 byte).
#[inline]
fn write_u8(&mut self, n: u8) -> IoResult<()> {
self.write(&[n])
}
/// Write an i8 (1 byte).
#[inline]
fn write_i8(&mut self, n: i8) -> IoResult<()> {
self.write(&[n as u8])
}
}
/// A writer which can be converted to a RefWriter.
#[deprecated = "use ByRefWriter instead"]
pub trait AsRefWriter {
/// Creates a wrapper around a mutable reference to the writer.
///
/// This is useful to allow applying wrappers while still
/// retaining ownership of the original value.
#[inline]
fn by_ref<'a>(&'a mut self) -> RefWriter<'a, Self>;
}
#[allow(deprecated)]
impl<T: Writer> AsRefWriter for T {
fn by_ref<'a>(&'a mut self) -> RefWriter<'a, T> {
RefWriter { inner: self }
}
}
/// A writer which can be converted to a RefWriter.
pub trait ByRefWriter {
/// Creates a wrapper around a mutable reference to the writer.
///
/// This is useful to allow applying wrappers while still
/// retaining ownership of the original value.
#[inline]
fn by_ref<'a>(&'a mut self) -> RefWriter<'a, Self>;
}
impl<T: Writer> ByRefWriter for T {
fn by_ref<'a>(&'a mut self) -> RefWriter<'a, T> {
RefWriter { inner: self }
}
}
impl<'a> Writer for Box<Writer+'a> {
#[inline]
fn write(&mut self, buf: &[u8]) -> IoResult<()> {
(&mut **self).write(buf)
}
#[inline]
fn flush(&mut self) -> IoResult<()> {
(&mut **self).flush()
}
}
impl<'a> Writer for &'a mut Writer+'a {
#[inline]
fn write(&mut self, buf: &[u8]) -> IoResult<()> { (**self).write(buf) }
#[inline]
fn flush(&mut self) -> IoResult<()> { (**self).flush() }
}
/// A `RefWriter` is a struct implementing `Writer` which contains a reference
/// to another writer. This is often useful when composing streams.
///
/// # Example
///
/// ```
/// # fn main() {}
/// # fn process_input<R: Reader>(r: R) {}
/// # fn foo () {
/// use std::io::util::TeeReader;
/// use std::io::{stdin, ByRefWriter};
///
/// let mut output = Vec::new();
///
/// {
/// // Don't give ownership of 'output' to the 'tee'. Instead we keep a
/// // handle to it in the outer scope
/// let mut tee = TeeReader::new(stdin(), output.by_ref());
/// process_input(tee);
/// }
///
/// println!("input processed: {}", output);
/// # }
/// ```
pub struct RefWriter<'a, W:'a> {
/// The underlying writer which this is referencing
inner: &'a mut W
}
impl<'a, W: Writer> Writer for RefWriter<'a, W> {
#[inline]
fn write(&mut self, buf: &[u8]) -> IoResult<()> { self.inner.write(buf) }
#[inline]
fn flush(&mut self) -> IoResult<()> { self.inner.flush() }
}
/// A Stream is a readable and a writable object. Data written is typically
/// received by the object which reads receive data from.
pub trait Stream: Reader + Writer { }
impl<T: Reader + Writer> Stream for T {}
/// An iterator that reads a line on each iteration,
/// until `.read_line()` encounters `EndOfFile`.
///
/// # Notes about the Iteration Protocol
///
/// The `Lines` may yield `None` and thus terminate
/// an iteration, but continue to yield elements if iteration
/// is attempted again.
///
/// # Error
///
/// Any error other than `EndOfFile` that is produced by the underlying Reader
/// is returned by the iterator and should be handled by the caller.
pub struct Lines<'r, T:'r> {
buffer: &'r mut T,
}
impl<'r, T: Buffer> Iterator<IoResult<String>> for Lines<'r, T> {
fn next(&mut self) -> Option<IoResult<String>> {
match self.buffer.read_line() {
Ok(x) => Some(Ok(x)),
Err(IoError { kind: EndOfFile, ..}) => None,
Err(y) => Some(Err(y))
}
}
}
/// An iterator that reads a utf8-encoded character on each iteration,
/// until `.read_char()` encounters `EndOfFile`.
///
/// # Notes about the Iteration Protocol
///
/// The `Chars` may yield `None` and thus terminate
/// an iteration, but continue to yield elements if iteration
/// is attempted again.
///
/// # Error
///
/// Any error other than `EndOfFile` that is produced by the underlying Reader
/// is returned by the iterator and should be handled by the caller.
pub struct Chars<'r, T:'r> {
buffer: &'r mut T
}
impl<'r, T: Buffer> Iterator<IoResult<char>> for Chars<'r, T> {
fn next(&mut self) -> Option<IoResult<char>> {
match self.buffer.read_char() {
Ok(x) => Some(Ok(x)),
Err(IoError { kind: EndOfFile, ..}) => None,
Err(y) => Some(Err(y))
}
}
}
/// A Buffer is a type of reader which has some form of internal buffering to
/// allow certain kinds of reading operations to be more optimized than others.
/// This type extends the `Reader` trait with a few methods that are not
/// possible to reasonably implement with purely a read interface.
pub trait Buffer: Reader {
/// Fills the internal buffer of this object, returning the buffer contents.
/// Note that none of the contents will be "read" in the sense that later
/// calling `read` may return the same contents.
///
/// The `consume` function must be called with the number of bytes that are
/// consumed from this buffer returned to ensure that the bytes are never
/// returned twice.
///
/// # Error
///
/// This function will return an I/O error if the underlying reader was
/// read, but returned an error. Note that it is not an error to return a
/// 0-length buffer.
fn fill_buf<'a>(&'a mut self) -> IoResult<&'a [u8]>;
/// Tells this buffer that `amt` bytes have been consumed from the buffer,
/// so they should no longer be returned in calls to `read`.
fn consume(&mut self, amt: uint);
/// Reads the next line of input, interpreted as a sequence of UTF-8
/// encoded Unicode codepoints. If a newline is encountered, then the
/// newline is contained in the returned string.
///
/// # Example
///
/// ```rust
/// use std::io;
///
/// let mut reader = io::stdin();
/// let input = reader.read_line().ok().unwrap_or("nothing".to_string());
/// ```
///
/// # Error
///
/// This function has the same error semantics as `read_until`:
///
/// * All non-EOF errors will be returned immediately
/// * If an error is returned previously consumed bytes are lost
/// * EOF is only returned if no bytes have been read
/// * Reach EOF may mean that the delimiter is not present in the return
/// value
///
/// Additionally, this function can fail if the line of input read is not a
/// valid UTF-8 sequence of bytes.
fn read_line(&mut self) -> IoResult<String> {
self.read_until(b'\n').and_then(|line|
match String::from_utf8(line) {
Ok(s) => Ok(s),
Err(_) => Err(standard_error(InvalidInput)),
}
)
}
/// Reads a sequence of bytes leading up to a specified delimiter. Once the
/// specified byte is encountered, reading ceases and the bytes up to and
/// including the delimiter are returned.
///
/// # Error
///
/// If any I/O error is encountered other than EOF, the error is immediately
/// returned. Note that this may discard bytes which have already been read,
/// and those bytes will *not* be returned. It is recommended to use other
/// methods if this case is worrying.
///
/// If EOF is encountered, then this function will return EOF if 0 bytes
/// have been read, otherwise the pending byte buffer is returned. This
/// is the reason that the byte buffer returned may not always contain the
/// delimiter.
fn read_until(&mut self, byte: u8) -> IoResult<Vec<u8>> {
let mut res = Vec::new();
let mut used;
loop {
{
let available = match self.fill_buf() {
Ok(n) => n,
Err(ref e) if res.len() > 0 && e.kind == EndOfFile => {
used = 0;
break
}
Err(e) => return Err(e)
};
match available.iter().position(|&b| b == byte) {
Some(i) => {
res.push_all(available[..i + 1]);
used = i + 1;
break
}
None => {
res.push_all(available);
used = available.len();
}
}
}
self.consume(used);
}
self.consume(used);
Ok(res)
}
/// Reads the next utf8-encoded character from the underlying stream.
///
/// # Error
///
/// If an I/O error occurs, or EOF, then this function will return `Err`.
/// This function will also return error if the stream does not contain a
/// valid utf-8 encoded codepoint as the next few bytes in the stream.
fn read_char(&mut self) -> IoResult<char> {
let first_byte = try!(self.read_byte());
let width = str::utf8_char_width(first_byte);
if width == 1 { return Ok(first_byte as char) }
if width == 0 { return Err(standard_error(InvalidInput)) } // not utf8
let mut buf = [first_byte, 0, 0, 0];
{
let mut start = 1;
while start < width {
match try!(self.read(buf[mut start..width])) {
n if n == width - start => break,
n if n < width - start => { start += n; }
_ => return Err(standard_error(InvalidInput)),
}
}
}
match str::from_utf8(buf[..width]) {
Some(s) => Ok(s.char_at(0)),
None => Err(standard_error(InvalidInput))
}
}
}
/// Extension methods for the Buffer trait which are included in the prelude.
pub trait BufferPrelude {
/// Create an iterator that reads a utf8-encoded character on each iteration
/// until EOF.
///
/// # Error
///
/// Any error other than `EndOfFile` that is produced by the underlying Reader
/// is returned by the iterator and should be handled by the caller.
fn chars<'r>(&'r mut self) -> Chars<'r, Self>;
/// Create an iterator that reads a line on each iteration until EOF.
///
/// # Error
///
/// Any error other than `EndOfFile` that is produced by the underlying Reader
/// is returned by the iterator and should be handled by the caller.
fn lines<'r>(&'r mut self) -> Lines<'r, Self>;
}
impl<T: Buffer> BufferPrelude for T {
fn chars<'r>(&'r mut self) -> Chars<'r, T> {
Chars { buffer: self }
}
fn lines<'r>(&'r mut self) -> Lines<'r, T> {
Lines { buffer: self }
}
}
/// When seeking, the resulting cursor is offset from a base by the offset given
/// to the `seek` function. The base used is specified by this enumeration.
pub enum SeekStyle {
/// Seek from the beginning of the stream
SeekSet,
/// Seek from the end of the stream
SeekEnd,
/// Seek from the current position
SeekCur,
}
/// An object implementing `Seek` internally has some form of cursor which can
/// be moved within a stream of bytes. The stream typically has a fixed size,
/// allowing seeking relative to either end.
pub trait Seek {
/// Return position of file cursor in the stream
fn tell(&self) -> IoResult<u64>;
/// Seek to an offset in a stream
///
/// A successful seek clears the EOF indicator. Seeking beyond EOF is
/// allowed, but seeking before position 0 is not allowed.
///
/// # Errors
///
/// * Seeking to a negative offset is considered an error
/// * Seeking past the end of the stream does not modify the underlying
/// stream, but the next write may cause the previous data to be filled in
/// with a bit pattern.
fn seek(&mut self, pos: i64, style: SeekStyle) -> IoResult<()>;
}
/// A listener is a value that can consume itself to start listening for
/// connections.
///
/// Doing so produces some sort of Acceptor.
pub trait Listener<T, A: Acceptor<T>> {
/// Spin up the listener and start queuing incoming connections
///
/// # Error
///
/// Returns `Err` if this listener could not be bound to listen for
/// connections. In all cases, this listener is consumed.
fn listen(self) -> IoResult<A>;
}
/// An acceptor is a value that presents incoming connections
pub trait Acceptor<T> {
/// Wait for and accept an incoming connection
///
/// # Error
///
/// Returns `Err` if an I/O error is encountered.
fn accept(&mut self) -> IoResult<T>;
/// Create an iterator over incoming connection attempts.
///
/// Note that I/O errors will be yielded by the iterator itself.
fn incoming<'r>(&'r mut self) -> IncomingConnections<'r, Self> {
IncomingConnections { inc: self }
}
}
/// An infinite iterator over incoming connection attempts.
/// Calling `next` will block the task until a connection is attempted.
///
/// Since connection attempts can continue forever, this iterator always returns
/// `Some`. The `Some` contains the `IoResult` representing whether the
/// connection attempt was successful. A successful connection will be wrapped
/// in `Ok`. A failed connection is represented as an `Err`.
pub struct IncomingConnections<'a, A:'a> {
inc: &'a mut A,
}
impl<'a, T, A: Acceptor<T>> Iterator<IoResult<T>> for IncomingConnections<'a, A> {
fn next(&mut self) -> Option<IoResult<T>> {
Some(self.inc.accept())
}
}
/// Creates a standard error for a commonly used flavor of error. The `detail`
/// field of the returned error will always be `None`.
///
/// # Example
///
/// ```
/// use std::io;
///
/// let eof = io::standard_error(io::EndOfFile);
/// let einval = io::standard_error(io::InvalidInput);
/// ```
pub fn standard_error(kind: IoErrorKind) -> IoError {
let desc = match kind {
EndOfFile => "end of file",
IoUnavailable => "I/O is unavailable",
InvalidInput => "invalid input",
OtherIoError => "unknown I/O error",
FileNotFound => "file not found",
PermissionDenied => "permission denied",
ConnectionFailed => "connection failed",
Closed => "stream is closed",
ConnectionRefused => "connection refused",
ConnectionReset => "connection reset",
ConnectionAborted => "connection aborted",
NotConnected => "not connected",
BrokenPipe => "broken pipe",
PathAlreadyExists => "file already exists",
PathDoesntExist => "no such file",
MismatchedFileTypeForOperation => "mismatched file type",
ResourceUnavailable => "resource unavailable",
TimedOut => "operation timed out",
ShortWrite(..) => "short write",
NoProgress => "no progress",
};
IoError {
kind: kind,
desc: desc,
detail: None,
}
}
/// A mode specifies how a file should be opened or created. These modes are
/// passed to `File::open_mode` and are used to control where the file is
/// positioned when it is initially opened.
pub enum FileMode {
/// Opens a file positioned at the beginning.
Open,
/// Opens a file positioned at EOF.
Append,
/// Opens a file, truncating it if it already exists.
Truncate,
}
/// Access permissions with which the file should be opened. `File`s
/// opened with `Read` will return an error if written to.
pub enum FileAccess {
/// Read-only access, requests to write will result in an error
Read,
/// Write-only access, requests to read will result in an error
Write,
/// Read-write access, no requests are denied by default
ReadWrite,
}
/// Different kinds of files which can be identified by a call to stat
#[deriving(PartialEq, Show, Hash, Clone)]
pub enum FileType {
/// This is a normal file, corresponding to `S_IFREG`
TypeFile,
/// This file is a directory, corresponding to `S_IFDIR`
TypeDirectory,
/// This file is a named pipe, corresponding to `S_IFIFO`
TypeNamedPipe,
/// This file is a block device, corresponding to `S_IFBLK`
TypeBlockSpecial,
/// This file is a symbolic link to another file, corresponding to `S_IFLNK`
TypeSymlink,
/// The type of this file is not recognized as one of the other categories
TypeUnknown,
}
/// A structure used to describe metadata information about a file. This
/// structure is created through the `stat` method on a `Path`.
///
/// # Example
///
/// ```
/// # use std::io::fs::PathExtensions;
/// # fn main() {}
/// # fn foo() {
/// let info = match Path::new("foo.txt").stat() {
/// Ok(stat) => stat,
/// Err(e) => panic!("couldn't read foo.txt: {}", e),
/// };
///
/// println!("byte size: {}", info.size);
/// # }
/// ```
#[deriving(Hash)]
pub struct FileStat {
/// The size of the file, in bytes
pub size: u64,
/// The kind of file this path points to (directory, file, pipe, etc.)
pub kind: FileType,
/// The file permissions currently on the file
pub perm: FilePermission,
// FIXME(#10301): These time fields are pretty useless without an actual
// time representation, what are the milliseconds relative
// to?
/// The time that the file was created at, in platform-dependent
/// milliseconds
pub created: u64,
/// The time that this file was last modified, in platform-dependent
/// milliseconds
pub modified: u64,
/// The time that this file was last accessed, in platform-dependent
/// milliseconds
pub accessed: u64,
/// Information returned by stat() which is not guaranteed to be
/// platform-independent. This information may be useful on some platforms,
/// but it may have different meanings or no meaning at all on other
/// platforms.
///
/// Usage of this field is discouraged, but if access is desired then the
/// fields are located here.
#[unstable]
pub unstable: UnstableFileStat,
}
/// This structure represents all of the possible information which can be
/// returned from a `stat` syscall which is not contained in the `FileStat`
/// structure. This information is not necessarily platform independent, and may
/// have different meanings or no meaning at all on some platforms.
#[unstable]
#[deriving(Hash)]
pub struct UnstableFileStat {
/// The ID of the device containing the file.
pub device: u64,
/// The file serial number.
pub inode: u64,
/// The device ID.
pub rdev: u64,
/// The number of hard links to this file.
pub nlink: u64,
/// The user ID of the file.
pub uid: u64,
/// The group ID of the file.
pub gid: u64,
/// The optimal block size for I/O.
pub blksize: u64,
/// The blocks allocated for this file.
pub blocks: u64,
/// User-defined flags for the file.
pub flags: u64,
/// The file generation number.
pub gen: u64,
}
bitflags! {
#[doc = "A set of permissions for a file or directory is represented"]
#[doc = "by a set of flags which are or'd together."]
flags FilePermission: u32 {
const USER_READ = 0o400,
const USER_WRITE = 0o200,
const USER_EXECUTE = 0o100,
const GROUP_READ = 0o040,
const GROUP_WRITE = 0o020,
const GROUP_EXECUTE = 0o010,
const OTHER_READ = 0o004,
const OTHER_WRITE = 0o002,
const OTHER_EXECUTE = 0o001,
const USER_RWX = USER_READ.bits | USER_WRITE.bits | USER_EXECUTE.bits,
const GROUP_RWX = GROUP_READ.bits | GROUP_WRITE.bits | GROUP_EXECUTE.bits,
const OTHER_RWX = OTHER_READ.bits | OTHER_WRITE.bits | OTHER_EXECUTE.bits,
#[doc = "Permissions for user owned files, equivalent to 0644 on"]
#[doc = "unix-like systems."]
const USER_FILE = USER_READ.bits | USER_WRITE.bits | GROUP_READ.bits | OTHER_READ.bits,
#[doc = "Permissions for user owned directories, equivalent to 0755 on"]
#[doc = "unix-like systems."]
const USER_DIR = USER_RWX.bits | GROUP_READ.bits | GROUP_EXECUTE.bits |
OTHER_READ.bits | OTHER_EXECUTE.bits,
#[doc = "Permissions for user owned executables, equivalent to 0755"]
#[doc = "on unix-like systems."]
const USER_EXEC = USER_DIR.bits,
#[doc = "All possible permissions enabled."]
const ALL_PERMISSIONS = USER_RWX.bits | GROUP_RWX.bits | OTHER_RWX.bits,
// Deprecated names
#[allow(non_upper_case_globals)]
#[deprecated = "use USER_READ instead"]
const UserRead = USER_READ.bits,
#[allow(non_upper_case_globals)]
#[deprecated = "use USER_WRITE instead"]
const UserWrite = USER_WRITE.bits,
#[allow(non_upper_case_globals)]
#[deprecated = "use USER_EXECUTE instead"]
const UserExecute = USER_EXECUTE.bits,
#[allow(non_upper_case_globals)]
#[deprecated = "use GROUP_READ instead"]
const GroupRead = GROUP_READ.bits,
#[allow(non_upper_case_globals)]
#[deprecated = "use GROUP_WRITE instead"]
const GroupWrite = GROUP_WRITE.bits,
#[allow(non_upper_case_globals)]
#[deprecated = "use GROUP_EXECUTE instead"]
const GroupExecute = GROUP_EXECUTE.bits,
#[allow(non_upper_case_globals)]
#[deprecated = "use OTHER_READ instead"]
const OtherRead = OTHER_READ.bits,
#[allow(non_upper_case_globals)]
#[deprecated = "use OTHER_WRITE instead"]
const OtherWrite = OTHER_WRITE.bits,
#[allow(non_upper_case_globals)]
#[deprecated = "use OTHER_EXECUTE instead"]
const OtherExecute = OTHER_EXECUTE.bits,
#[allow(non_upper_case_globals)]
#[deprecated = "use USER_RWX instead"]
const UserRWX = USER_RWX.bits,
#[allow(non_upper_case_globals)]
#[deprecated = "use GROUP_RWX instead"]
const GroupRWX = GROUP_RWX.bits,
#[allow(non_upper_case_globals)]
#[deprecated = "use OTHER_RWX instead"]
const OtherRWX = OTHER_RWX.bits,
#[doc = "Deprecated: use `USER_FILE` instead."]
#[allow(non_upper_case_globals)]
#[deprecated = "use USER_FILE instead"]
const UserFile = USER_FILE.bits,
#[doc = "Deprecated: use `USER_DIR` instead."]
#[allow(non_upper_case_globals)]
#[deprecated = "use USER_DIR instead"]
const UserDir = USER_DIR.bits,
#[doc = "Deprecated: use `USER_EXEC` instead."]
#[allow(non_upper_case_globals)]
#[deprecated = "use USER_EXEC instead"]
const UserExec = USER_EXEC.bits,
#[doc = "Deprecated: use `ALL_PERMISSIONS` instead"]
#[allow(non_upper_case_globals)]
#[deprecated = "use ALL_PERMISSIONS instead"]
const AllPermissions = ALL_PERMISSIONS.bits,
}
}
impl Default for FilePermission {
#[inline]
fn default() -> FilePermission { FilePermission::empty() }
}
impl fmt::Show for FilePermission {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{:04o}", self.bits)
}
}
#[cfg(test)]
mod tests {
use self::BadReaderBehavior::*;
use super::{IoResult, Reader, MemReader, NoProgress, InvalidInput};
use prelude::*;
use uint;
#[deriving(Clone, PartialEq, Show)]
enum BadReaderBehavior {
GoodBehavior(uint),
BadBehavior(uint)
}
struct BadReader<T> {
r: T,
behavior: Vec<BadReaderBehavior>,
}
impl<T: Reader> BadReader<T> {
fn new(r: T, behavior: Vec<BadReaderBehavior>) -> BadReader<T> {
BadReader { behavior: behavior, r: r }
}
}
impl<T: Reader> Reader for BadReader<T> {
fn read(&mut self, buf: &mut [u8]) -> IoResult<uint> {
let BadReader { ref mut behavior, ref mut r } = *self;
loop {
if behavior.is_empty() {
// fall back on good
return r.read(buf);
}
match behavior.as_mut_slice()[0] {
GoodBehavior(0) => (),
GoodBehavior(ref mut x) => {
*x -= 1;
return r.read(buf);
}
BadBehavior(0) => (),
BadBehavior(ref mut x) => {
*x -= 1;
return Ok(0);
}
};
behavior.remove(0);
}
}
}
#[test]
fn test_read_at_least() {
let mut r = BadReader::new(MemReader::new(b"hello, world!".to_vec()),
vec![GoodBehavior(uint::MAX)]);
let buf = &mut [0u8, ..5];
assert!(r.read_at_least(1, buf).unwrap() >= 1);
assert!(r.read_exact(5).unwrap().len() == 5); // read_exact uses read_at_least
assert!(r.read_at_least(0, buf).is_ok());
let mut r = BadReader::new(MemReader::new(b"hello, world!".to_vec()),
vec![BadBehavior(50), GoodBehavior(uint::MAX)]);
assert!(r.read_at_least(1, buf).unwrap() >= 1);
let mut r = BadReader::new(MemReader::new(b"hello, world!".to_vec()),
vec![BadBehavior(1), GoodBehavior(1),
BadBehavior(50), GoodBehavior(uint::MAX)]);
assert!(r.read_at_least(1, buf).unwrap() >= 1);
assert!(r.read_at_least(1, buf).unwrap() >= 1);
let mut r = BadReader::new(MemReader::new(b"hello, world!".to_vec()),
vec![BadBehavior(uint::MAX)]);
assert_eq!(r.read_at_least(1, buf).unwrap_err().kind, NoProgress);
let mut r = MemReader::new(b"hello, world!".to_vec());
assert_eq!(r.read_at_least(5, buf).unwrap(), 5);
assert_eq!(r.read_at_least(6, buf).unwrap_err().kind, InvalidInput);
}
#[test]
fn test_push_at_least() {
let mut r = BadReader::new(MemReader::new(b"hello, world!".to_vec()),
vec![GoodBehavior(uint::MAX)]);
let mut buf = Vec::new();
assert!(r.push_at_least(1, 5, &mut buf).unwrap() >= 1);
assert!(r.push_at_least(0, 5, &mut buf).is_ok());
let mut r = BadReader::new(MemReader::new(b"hello, world!".to_vec()),
vec![BadBehavior(50), GoodBehavior(uint::MAX)]);
assert!(r.push_at_least(1, 5, &mut buf).unwrap() >= 1);
let mut r = BadReader::new(MemReader::new(b"hello, world!".to_vec()),
vec![BadBehavior(1), GoodBehavior(1),
BadBehavior(50), GoodBehavior(uint::MAX)]);
assert!(r.push_at_least(1, 5, &mut buf).unwrap() >= 1);
assert!(r.push_at_least(1, 5, &mut buf).unwrap() >= 1);
let mut r = BadReader::new(MemReader::new(b"hello, world!".to_vec()),
vec![BadBehavior(uint::MAX)]);
assert_eq!(r.push_at_least(1, 5, &mut buf).unwrap_err().kind, NoProgress);
let mut r = MemReader::new(b"hello, world!".to_vec());
assert_eq!(r.push_at_least(5, 1, &mut buf).unwrap_err().kind, InvalidInput);
}
#[test]
fn test_show() {
use super::*;
assert_eq!(format!("{}", USER_READ), "0400".to_string());
assert_eq!(format!("{}", USER_FILE), "0644".to_string());
assert_eq!(format!("{}", USER_EXEC), "0755".to_string());
assert_eq!(format!("{}", USER_RWX), "0700".to_string());
assert_eq!(format!("{}", GROUP_RWX), "0070".to_string());
assert_eq!(format!("{}", OTHER_RWX), "0007".to_string());
assert_eq!(format!("{}", ALL_PERMISSIONS), "0777".to_string());
assert_eq!(format!("{}", USER_READ | USER_WRITE | OTHER_WRITE), "0602".to_string());
}
fn _ensure_buffer_is_object_safe<T: Buffer>(x: &T) -> &Buffer {
x as &Buffer
}
}<|fim▁end|> | * Iterate over the lines of a file
|
<|file_name|>info.js<|end_file_name|><|fim▁begin|>/*
© Copyright Adam Aharony (a.k.a. Cringy Adam)
All rights reserved
Twitter: @AdamAharony, Discord: @Cringy Adam#4611
*/
exports.run = (client, message, args) => {
message.delete();
message.channel.send('', {
embed: {
author: {
name: client.user.username
},
color: 0x008AF3,
title: "CringyBot Selfbot edition info:",
description: 'This selfbot is made by Adam Aharony (bot origionally from @XeliteXirish (a.k.a. Cringy Adam).\n(Twitter: @AdamAharony)\nAiming to make the discord experience much better.\nSpecial thanks to Jayden#5395 for helping me with some commands.',
timestamp: new Date(),
footer: {
text: 'CringyBot Selfbot edition',
<|fim▁hole|> icon_url: client.user.avatarURL
}
}
});
};<|fim▁end|> | |
<|file_name|>middleware.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright (C) 2012 Rodrigo Alves Lima
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#<|fim▁hole|># GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.core.urlresolvers import resolve
class URLNameMiddleware:
def process_view(self, request, *args):
request.url_name = resolve(request.path).url_name
return None<|fim▁end|> | # This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
<|file_name|>commands.rs<|end_file_name|><|fim▁begin|>use types::{FromRedisValue, ToRedisArgs, RedisResult, NumericBehavior};
use client::Client;
use connection::{Connection, ConnectionLike};
use cmd::{cmd, Cmd, Pipeline, Iter};
macro_rules! implement_commands {
(
$(
$(#[$attr:meta])+
fn $name:ident<$($tyargs:ident : $ty:ident),*>(
$($argname:ident: $argty:ty),*) $body:block
)*
) =>
(
/// Implements common redis commands for connection like objects. This
/// allows you to send commands straight to a connection or client. It
/// is also implemented for redis results of clients which makes for
/// very convenient access in some basic cases.
///
/// This allows you to use nicer syntax for some common operations.
/// For instance this code:
///
/// ```rust,no_run
/// # fn do_something() -> redis::RedisResult<()> {
/// let client = try!(redis::Client::open("redis://127.0.0.1/"));
/// let con = try!(client.get_connection());
/// redis::cmd("SET").arg("my_key").arg(42i).execute(&con);
/// assert_eq!(redis::cmd("GET").arg("my_key").query(&con), Ok(42i));
/// # Ok(()) }
/// ```
///
/// Will become this:
///
/// ```rust,no_run
/// # fn do_something() -> redis::RedisResult<()> {
/// use redis::Commands;
/// let client = try!(redis::Client::open("redis://127.0.0.1/"));
/// let con = try!(client.get_connection());
/// assert_eq!(con.get("my_key"), Ok(42i));
/// # Ok(()) }
/// ```
pub trait Commands : ConnectionLike {
$(
$(#[$attr])*
#[inline]
fn $name<$($tyargs: $ty,)* RV: FromRedisValue>(
&self $(, $argname: $argty)*) -> RedisResult<RV>
{ ($body).query(self) }
)*
/// Incrementally iterate the keys space.
#[inline]
fn scan<RV: FromRedisValue>(&self) -> RedisResult<Iter<RV>> {
cmd("SCAN").cursor_arg(0).iter(self)
}
/// Incrementally iterate the keys space for keys matching a pattern.
#[inline]
fn scan_match<P: ToRedisArgs, RV: FromRedisValue>(&self, pattern: P) -> RedisResult<Iter<RV>> {
cmd("SCAN").cursor_arg(0).arg("MATCH").arg(pattern).iter(self)
}
/// Incrementally iterate hash fields and associated values.
#[inline]
fn hscan<K: ToRedisArgs, RV: FromRedisValue>(&self, key: K) -> RedisResult<Iter<RV>> {
cmd("HSCAN").arg(key).cursor_arg(0).iter(self)
}
/// Incrementally iterate hash fields and associated values for
/// field names matching a pattern.
#[inline]
fn hscan_match<K: ToRedisArgs, P: ToRedisArgs, RV: FromRedisValue>
(&self, key: K, pattern: P) -> RedisResult<Iter<RV>> {
cmd("HSCAN").arg(key).cursor_arg(0).arg("MATCH").arg(pattern).iter(self)
}
/// Incrementally iterate set elements.
#[inline]
fn sscan<K: ToRedisArgs, RV: FromRedisValue>(&self, key: K) -> RedisResult<Iter<RV>> {
cmd("SSCAN").arg(key).cursor_arg(0).iter(self)
}
/// Incrementally iterate set elements for elements matching a pattern.
#[inline]
fn sscan_match<K: ToRedisArgs, P: ToRedisArgs, RV: FromRedisValue>
(&self, key: K, pattern: P) -> RedisResult<Iter<RV>> {
cmd("SSCAN").arg(key).cursor_arg(0).arg("MATCH").arg(pattern).iter(self)
}
/// Incrementally iterate sorted set elements.
#[inline]
fn zscan<K: ToRedisArgs, RV: FromRedisValue>(&self, key: K) -> RedisResult<Iter<RV>> {
cmd("ZSCAN").arg(key).cursor_arg(0).iter(self)
}
/// Incrementally iterate sorted set elements for elements matching a pattern.
#[inline]
fn zscan_match<K: ToRedisArgs, P: ToRedisArgs, RV: FromRedisValue>
(&self, key: K, pattern: P) -> RedisResult<Iter<RV>> {
cmd("ZSCAN").arg(key).cursor_arg(0).arg("MATCH").arg(pattern).iter(self)
}
}
/// Implements common redis commands for pipelines. Unlike the regular
/// commands trait, this returns the pipeline rather than a result
/// directly. Other than that it works the same however.
pub trait PipelineCommands {
#[doc(hidden)]
#[inline]
fn perform(&mut self, con: &Cmd) -> &mut Self;
$(
$(#[$attr])*
#[inline]
fn $name<'a $(, $tyargs: $ty)*>(
&mut self $(, $argname: $argty)*) -> &mut Self
{ self.perform($body) }
)*
}
)
}
implement_commands!(
// most common operations
#[doc="Get the value of a key. If key is a vec this becomes an `MGET`."]
fn get<K: ToRedisArgs>(key: K) {
cmd(if key.is_single_arg() { "GET" } else { "MGET" }).arg(key)
}
#[doc="Set the string value of a key."]
fn set<K: ToRedisArgs, V: ToRedisArgs>(key: K, value: V) {
cmd("SET").arg(key).arg(value)
}
#[doc="Sets multiple keys to their values."]
fn set_multiple<K: ToRedisArgs, V: ToRedisArgs>(items: &[(K, V)]) {
cmd("MSET").arg(items)
}
#[doc="Set the value and expiration of a key."]
fn set_ex<K: ToRedisArgs, V: ToRedisArgs>(key: K, value: V, seconds: uint) {
cmd("SETEX").arg(key).arg(value).arg(seconds)
}
#[doc="Set the value of a key, only if the key does not exist"]
fn set_nx<K: ToRedisArgs, V: ToRedisArgs>(key: K, value: V) {
cmd("SETNX").arg(key).arg(value)
}
#[doc="Sets multiple keys to their values failing if at least one already exists."]
fn mset_nx<K: ToRedisArgs, V: ToRedisArgs>(items: &[(K, V)]) {
cmd("MSETNX").arg(items)
}
#[doc="Set the string value of a key and return its old value."]
fn getset<K: ToRedisArgs, V: ToRedisArgs>(key: K, value: V) {
cmd("GETSET").arg(key).arg(value)
}
#[doc="Delete one or more keys."]
fn del<K: ToRedisArgs>(key: K) {
cmd("DEL").arg(key)
}
#[doc="Determine if a key exists."]
fn exists<K: ToRedisArgs>(key: K) {
cmd("EXISTS").arg(key)
}
#[doc="Set a key's time to live in seconds."]
fn expire<K: ToRedisArgs>(key: K, seconds: uint) {
cmd("EXPIRE").arg(key).arg(seconds)
}
#[doc="Set the expiration for a key as a UNIX timestamp."]
fn expire_at<K: ToRedisArgs>(key: K, ts: uint) {
cmd("EXPIREAT").arg(key).arg(ts)
}
#[doc="Set a key's time to live in milliseconds."]
fn pexpire<K: ToRedisArgs>(key: K, ms: uint) {
cmd("PEXPIRE").arg(key).arg(ms)
}
#[doc="Set the expiration for a key as a UNIX timestamp in milliseconds."]
fn pexpire_at<K: ToRedisArgs>(key: K, ts: uint) {
cmd("PEXPIREAT").arg(key).arg(ts)
}
#[doc="Remove the expiration from a key."]
fn persist<K: ToRedisArgs>(key: K) {
cmd("PERSIST").arg(key)
}
<|fim▁hole|>
#[doc="Rename a key, only if the new key does not exist."]
fn rename_nx<K: ToRedisArgs>(key: K, new_key: K) {
cmd("RENAMENX").arg(key).arg(new_key)
}
// common string operations
#[doc="Append a value to a key."]
fn append<K: ToRedisArgs, V: ToRedisArgs>(key: K, value: V) {
cmd("APPEND").arg(key).arg(value)
}
#[doc="Increment the numeric value of a key by the given amount. This
issues a `INCRBY` or `INCRBYFLOAT` depending on the type."]
fn incr<K: ToRedisArgs, V: ToRedisArgs>(key: K, delta: V) {
cmd(if delta.describe_numeric_behavior() == NumericBehavior::NumberIsFloat {
"INCRBYFLOAT"
} else {
"INCRBY"
}).arg(key).arg(delta)
}
#[doc="Sets or clears the bit at offset in the string value stored at key."]
fn setbit<K: ToRedisArgs>(key: K, offset: uint, value: bool) {
cmd("SETBIT").arg(key).arg(offset).arg(value)
}
#[doc="Returns the bit value at offset in the string value stored at key."]
fn getbit<K: ToRedisArgs>(key: K, offset: uint) {
cmd("GETBIT").arg(key).arg(offset)
}
#[doc="Count set bits in a string."]
fn bitcount<K: ToRedisArgs>(key: K) {
cmd("BITCOUNT").arg(key)
}
#[doc="Count set bits in a string in a range."]
fn bitcount_range<K: ToRedisArgs>(key: K, start: uint, end: uint) {
cmd("BITCOUNT").arg(key).arg(start).arg(end)
}
#[doc="Perform a bitwise AND between multiple keys (containing string values)
and store the result in the destination key."]
fn bit_and<K: ToRedisArgs>(dstkey: K, srckeys: K) {
cmd("BITOP").arg("AND").arg(dstkey).arg(srckeys)
}
#[doc="Perform a bitwise OR between multiple keys (containing string values)
and store the result in the destination key."]
fn bit_or<K: ToRedisArgs>(dstkey: K, srckeys: K) {
cmd("BITOP").arg("OR").arg(dstkey).arg(srckeys)
}
#[doc="Perform a bitwise XOR between multiple keys (containing string values)
and store the result in the destination key."]
fn bit_xor<K: ToRedisArgs>(dstkey: K, srckeys: K) {
cmd("BITOP").arg("XOR").arg(dstkey).arg(srckeys)
}
#[doc="Perform a bitwise NOT of the key (containing string values)
and store the result in the destination key."]
fn bit_not<K: ToRedisArgs>(dstkey: K, srckey: K) {
cmd("BITOP").arg("NOT").arg(dstkey).arg(srckey)
}
#[doc="Get the length of the value stored in a key."]
fn strlen<K: ToRedisArgs>(key: K) {
cmd("STRLEN").arg(key)
}
// hash operations
#[doc="Gets a single (or multiple) fields from a hash."]
fn hget<K: ToRedisArgs, F: ToRedisArgs>(key: K, field: F) {
cmd(if field.is_single_arg() { "HGET" } else { "HMGET" }).arg(key).arg(field)
}
#[doc="Deletes a single (or multiple) fields from a hash."]
fn hdel<K: ToRedisArgs, F: ToRedisArgs>(key: K, field: F) {
cmd("HDEL").arg(key).arg(field)
}
#[doc="Sets a single field in a hash."]
fn hset<K: ToRedisArgs, F: ToRedisArgs, V: ToRedisArgs>(key: K, field: F, value: V) {
cmd("HSET").arg(key).arg(field).arg(value)
}
#[doc="Sets a single field in a hash if it does not exist."]
fn hset_nx<K: ToRedisArgs, F: ToRedisArgs, V: ToRedisArgs>(key: K, field: F, value: V) {
cmd("HSETNX").arg(key).arg(field).arg(value)
}
#[doc="Sets a multiple fields in a hash."]
fn hset_multiple<K: ToRedisArgs, F: ToRedisArgs, V: ToRedisArgs>(key: K, items: &[(F, V)]) {
cmd("HMSET").arg(key).arg(items)
}
#[doc="Increments a value."]
fn hincr<K: ToRedisArgs, F: ToRedisArgs, D: ToRedisArgs>(key: K, field: F, delta: D) {
cmd(if delta.describe_numeric_behavior() == NumericBehavior::NumberIsFloat {
"HINCRBYFLOAT"
} else {
"HINCRBY"
}).arg(key).arg(field).arg(delta)
}
#[doc="Checks if a field in a hash exists."]
fn hexists<K: ToRedisArgs, F: ToRedisArgs>(key: K, field: F) {
cmd("HEXISTS").arg(key).arg(field)
}
#[doc="Gets all the keys in a hash."]
fn hkeys<K: ToRedisArgs>(key: K) {
cmd("HKEYS").arg(key)
}
#[doc="Gets all the values in a hash."]
fn hvals<K: ToRedisArgs>(key: K) {
cmd("HVALS").arg(key)
}
#[doc="Gets all the fields and values in a hash."]
fn hgetall<K: ToRedisArgs>(key: K) {
cmd("HGETALL").arg(key)
}
#[doc="Gets the length of a hash."]
fn hlen<K: ToRedisArgs>(key: K) {
cmd("HLEN").arg(key)
}
// list operations
#[doc="Remove and get the first element in a list, or block until one is available."]
fn blpop<K: ToRedisArgs>(key: K, timeout: uint) {
cmd("BLPOP").arg(key).arg(timeout)
}
#[doc="Remove and get the last element in a list, or block until one is available."]
fn brpop<K: ToRedisArgs>(key: K, timeout: uint) {
cmd("BRPOP").arg(key).arg(timeout)
}
#[doc="Pop a value from a list, push it to another list and return it;
or block until one is available."]
fn brpoplpush<K: ToRedisArgs>(srckey: K, dstkey: K, timeout: uint) {
cmd("BRPOPLPUSH").arg(srckey).arg(dstkey).arg(timeout)
}
#[doc="Get an element from a list by its index."]
fn lindex<K: ToRedisArgs>(key: K, index: int) {
cmd("LINDEX").arg(key).arg(index)
}
#[doc="Insert an element before another element in a list."]
fn linsert_before<K: ToRedisArgs, P: ToRedisArgs, V: ToRedisArgs>(
key: K, pivot: P, value: V) {
cmd("LINSERT").arg(key).arg("BEFORE").arg(pivot).arg(value)
}
#[doc="Insert an element after another element in a list."]
fn linsert_after<K: ToRedisArgs, P: ToRedisArgs, V: ToRedisArgs>(
key: K, pivot: P, value: V) {
cmd("LINSERT").arg(key).arg("AFTER").arg(pivot).arg(value)
}
#[doc="Returns the length of the list stored at key."]
fn llen<K: ToRedisArgs>(key: K) {
cmd("LLEN").arg(key)
}
#[doc="Removes and returns the first element of the list stored at key."]
fn lpop<K: ToRedisArgs>(key: K) {
cmd("LPOP").arg(key)
}
#[doc="Insert all the specified values at the head of the list stored at key."]
fn lpush<K: ToRedisArgs, V: ToRedisArgs>(key: K, value: V) {
cmd("LPUSH").arg(key).arg(value)
}
#[doc="Inserts a value at the head of the list stored at key, only if key already exists and
holds a list."]
fn lpush_exists<K: ToRedisArgs, V: ToRedisArgs>(key: K, value: V) {
cmd("LPUSHX").arg(key).arg(value)
}
#[doc="Returns the specified elements of the list stored at key."]
fn lrange<K: ToRedisArgs>(key: K, start: int, stop: int) {
cmd("LRANGE").arg(key).arg(start).arg(stop)
}
#[doc="Removes the first count occurrences of elements equal to value
from the list stored at key."]
fn lrem<K: ToRedisArgs, V: ToRedisArgs>(key: K, count: int, value: V) {
cmd("LREM").arg(key).arg(count).arg(value)
}
#[doc="Trim an existing list so that it will contain only the specified
range of elements specified."]
fn ltrim<K: ToRedisArgs>(key: K, start: int, stop: int) {
cmd("LTRIM").arg(key).arg(start).arg(stop)
}
#[doc="Removes and returns the last element of the list stored at key."]
fn rpop<K: ToRedisArgs>(key: K) {
cmd("RPOP").arg(key)
}
#[doc="Pop a value from a list, push it to another list and return it."]
fn rpoplpush<K: ToRedisArgs>(key: K, dstkey: K) {
cmd("RPOPLPUSH").arg(key).arg(dstkey)
}
#[doc="Insert all the specified values at the tail of the list stored at key."]
fn rpush<K: ToRedisArgs, V: ToRedisArgs>(key: K, value: V) {
cmd("RPUSH").arg(key).arg(value)
}
#[doc="Inserts value at the tail of the list stored at key, only if key already exists and
holds a list."]
fn rpush_exists<K: ToRedisArgs, V: ToRedisArgs>(key: K, value: V) {
cmd("RPUSHX").arg(key).arg(value)
}
// set commands
#[doc="Add one or more members to a set."]
fn sadd<K: ToRedisArgs, M: ToRedisArgs>(key: K, member: M) {
cmd("SADD").arg(key).arg(member)
}
#[doc="Get the number of members in a set."]
fn scard<K: ToRedisArgs>(key: K) {
cmd("SCARD").arg(key)
}
#[doc="Subtract multiple sets."]
fn sdiff<K: ToRedisArgs>(keys: K) {
cmd("SDIFF").arg(keys)
}
#[doc="Subtract multiple sets and store the resulting set in a key."]
fn sdiffstore<K: ToRedisArgs>(dstkey: K, keys: K) {
cmd("SDIFFSTORE").arg(dstkey).arg(keys)
}
#[doc="Intersect multiple sets."]
fn sinter<K: ToRedisArgs>(keys: K) {
cmd("SINTER").arg(keys)
}
#[doc="Intersect multiple sets and store the resulting set in a key."]
fn sdinterstore<K: ToRedisArgs>(dstkey: K, keys: K) {
cmd("SINTERSTORE").arg(dstkey).arg(keys)
}
#[doc="Determine if a given value is a member of a set."]
fn sismember<K: ToRedisArgs, M: ToRedisArgs>(key: K, member: M) {
cmd("SISMEMBER").arg(key).arg(member)
}
#[doc="Get all the members in a set."]
fn smembers<K: ToRedisArgs>(key: K) {
cmd("SMEMBERS").arg(key)
}
#[doc="Move a member from one set to another."]
fn smove<K: ToRedisArgs, M: ToRedisArgs>(srckey: K, dstkey: K, member: M) {
cmd("SMOVE").arg(srckey).arg(dstkey).arg(member)
}
#[doc="Remove and return a random member from a set."]
fn spop<K: ToRedisArgs>(key: K) {
cmd("SPOP").arg(key)
}
#[doc="Get one random member from a set."]
fn srandmember<K: ToRedisArgs>(key: K) {
cmd("SRANDMEMBER").arg(key)
}
#[doc="Get multiple random members from a set."]
fn srandmember_multiple<K: ToRedisArgs>(key: K, count: uint) {
cmd("SRANDMEMBER").arg(key).arg(count)
}
#[doc="Remove one or more members from a set."]
fn srem<K: ToRedisArgs, M: ToRedisArgs>(key: K, member: M) {
cmd("SREM").arg(key).arg(member)
}
#[doc="Add multiple sets."]
fn sunion<K: ToRedisArgs>(keys: K) {
cmd("SUNION").arg(keys)
}
#[doc="Add multiple sets and store the resulting set in a key."]
fn sunionstore<K: ToRedisArgs>(dstkey: K, keys: K) {
cmd("SUNIONSTORE").arg(dstkey).arg(keys)
}
// sorted set commands
#[doc="Add one member to a sorted set, or update its score
if it already exists."]
fn zadd<K: ToRedisArgs, S: ToRedisArgs, M: ToRedisArgs>(key: K, member: M, score: S) {
cmd("ZADD").arg(key).arg(score).arg(member)
}
#[doc="Add multiple members to a sorted set, or update its score
if it already exists."]
fn zadd_multiple<K: ToRedisArgs, S: ToRedisArgs, M: ToRedisArgs>(key: K, items: &[(S, M)]) {
cmd("ZADD").arg(key).arg(items)
}
#[doc="Get the number of members in a sorted set."]
fn zcard<K: ToRedisArgs>(key: K) {
cmd("ZCARD").arg(key)
}
#[doc="Count the members in a sorted set with scores within the given values."]
fn zcount<K: ToRedisArgs, M: ToRedisArgs, MM: ToRedisArgs>(key: K, min: M, max: MM) {
cmd("ZCOUNT").arg(key).arg(min).arg(max)
}
#[doc="Count the members in a sorted set with scores within the given values."]
fn zincr<K: ToRedisArgs, M: ToRedisArgs, D: ToRedisArgs>(key: K, member: M, delta: D) {
cmd("ZINCRBY").arg(key).arg(delta).arg(member)
}
#[doc="Intersect multiple sorted sets and store the resulting sorted set in
a new key using SUM as aggregation function."]
fn zinterstore<K: ToRedisArgs>(dstkey: K, keys: &[K]) {
cmd("ZINTERSTORE").arg(dstkey).arg(keys.len()).arg(keys)
}
#[doc="Intersect multiple sorted sets and store the resulting sorted set in
a new key using MIN as aggregation function."]
fn zinterstore_min<K: ToRedisArgs>(dstkey: K, keys: &[K]) {
cmd("ZINTERSTORE").arg(dstkey).arg(keys.len()).arg(keys).arg("AGGREGATE").arg("MIN")
}
#[doc="Intersect multiple sorted sets and store the resulting sorted set in
a new key using MAX as aggregation function."]
fn zinterstore_max<K: ToRedisArgs>(dstkey: K, keys: &[K]) {
cmd("ZINTERSTORE").arg(dstkey).arg(keys.len()).arg(keys).arg("AGGREGATE").arg("MAX")
}
#[doc="Count the number of members in a sorted set between a given lexicographical range."]
fn zlexcount<K: ToRedisArgs, L: ToRedisArgs>(key: K, min: L, max: L) {
cmd("ZLEXCOUNT").arg(key).arg(min).arg(max)
}
#[doc="Return a range of members in a sorted set, by index"]
fn zrange<K: ToRedisArgs>(key: K, start: int, stop: int) {
cmd("ZRANGE").arg(key).arg(start).arg(stop)
}
#[doc="Return a range of members in a sorted set, by index with scores."]
fn zrange_withscores<K: ToRedisArgs>(key: K, start: int, stop: int) {
cmd("ZRANGE").arg(key).arg(start).arg(stop).arg("WITHSCORES")
}
#[doc="Return a range of members in a sorted set, by lexicographical range."]
fn zrangebylex<K: ToRedisArgs, M: ToRedisArgs, MM: ToRedisArgs>(key: K, min: M, max: MM) {
cmd("ZRANGEBYLEX").arg(key).arg(min).arg(max)
}
#[doc="Return a range of members in a sorted set, by lexicographical
range with offset and limit."]
fn zrangebylex_limit<K: ToRedisArgs, M: ToRedisArgs, MM: ToRedisArgs>(
key: K, min: M, max: MM, offset: int, count: int) {
cmd("ZRANGEBYLEX").arg(key).arg(min).arg(max).arg("LIMIT").arg(offset).arg(count)
}
#[doc="Return a range of members in a sorted set, by lexicographical range."]
fn zrevrangebylex<K: ToRedisArgs, MM: ToRedisArgs, M: ToRedisArgs>(key: K, max: MM, min: M) {
cmd("ZREVRANGEBYLEX").arg(key).arg(max).arg(min)
}
#[doc="Return a range of members in a sorted set, by lexicographical
range with offset and limit."]
fn zrevrangebylex_limit<K: ToRedisArgs, MM: ToRedisArgs, M: ToRedisArgs>(
key: K, max: MM, min: M, offset: int, count: int) {
cmd("ZREVRANGEBYLEX").arg(key).arg(max).arg(min).arg("LIMIT").arg(offset).arg(count)
}
#[doc="Return a range of members in a sorted set, by score."]
fn zrangebyscore<K: ToRedisArgs, M: ToRedisArgs, MM: ToRedisArgs>(key: K, min: M, max: MM) {
cmd("ZRANGEBYSCORE").arg(key).arg(min).arg(max)
}
#[doc="Return a range of members in a sorted set, by score with scores."]
fn zrangebyscore_withscores<K: ToRedisArgs, M: ToRedisArgs, MM: ToRedisArgs>(key: K, min: M, max: MM) {
cmd("ZRANGEBYSCORE").arg(key).arg(min).arg(max).arg("WITHSCORES")
}
#[doc="Return a range of members in a sorted set, by score with limit."]
fn zrangebyscore_limit<K: ToRedisArgs, M: ToRedisArgs, MM: ToRedisArgs>
(key: K, min: M, max: MM, offset: int, count: int) {
cmd("ZRANGEBYSCORE").arg(key).arg(min).arg(max).arg("LIMIT").arg(offset).arg(count)
}
#[doc="Return a range of members in a sorted set, by score with limit with scores."]
fn zrangebyscore_limit_withscores<K: ToRedisArgs, M: ToRedisArgs, MM: ToRedisArgs>
(key: K, min: M, max: MM, offset: int, count: int) {
cmd("ZRANGEBYSCORE").arg(key).arg(min).arg(max).arg("WITHSCORES")
.arg("LIMIT").arg(offset).arg(count)
}
#[doc="Determine the index of a member in a sorted set."]
fn zrank<K: ToRedisArgs, M: ToRedisArgs>(key: K, member: M) {
cmd("ZRANK").arg(key).arg(member)
}
#[doc="Remove one or more members from a sorted set."]
fn zrem<K: ToRedisArgs, M: ToRedisArgs>(key: K, members: M) {
cmd("ZREM").arg(key).arg(members)
}
#[doc="Remove all members in a sorted set between the given lexicographical range."]
fn zrembylex<K: ToRedisArgs, M: ToRedisArgs, MM: ToRedisArgs>(key: K, min: M, max: MM) {
cmd("ZREMBYLEX").arg(key).arg(min).arg(max)
}
#[doc="Remove all members in a sorted set within the given indexes."]
fn zrembyrank<K: ToRedisArgs>(key: K, start: int, stop: int) {
cmd("ZREMBYRANK").arg(key).arg(start).arg(stop)
}
#[doc="Remove all members in a sorted set within the given scores."]
fn zrembyscore<K: ToRedisArgs, M: ToRedisArgs, MM: ToRedisArgs>(key: K, min: M, max: MM) {
cmd("ZREMBYSCORE").arg(key).arg(min).arg(max)
}
#[doc="Return a range of members in a sorted set, by index, with scores
ordered from high to low."]
fn zrevrange<K: ToRedisArgs>(key: K, start: int, stop: int) {
cmd("ZREVRANGE").arg(key).arg(start).arg(stop)
}
#[doc="Return a range of members in a sorted set, by index, with scores
ordered from high to low."]
fn zrevrange_withscores<K: ToRedisArgs>(key: K, start: int, stop: int) {
cmd("ZREVRANGE").arg(key).arg(start).arg(stop).arg("WITHSCORES")
}
#[doc="Return a range of members in a sorted set, by score."]
fn zrevrangebyscore<K: ToRedisArgs, MM: ToRedisArgs, M: ToRedisArgs>(key: K, max: MM, min: M) {
cmd("ZREVRANGEBYSCORE").arg(key).arg(max).arg(min)
}
#[doc="Return a range of members in a sorted set, by score with scores."]
fn zrevrangebyscore_withscores<K: ToRedisArgs, MM: ToRedisArgs, M: ToRedisArgs>(key: K, max: MM, min: M) {
cmd("ZREVRANGEBYSCORE").arg(key).arg(max).arg(min).arg("WITHSCORES")
}
#[doc="Return a range of members in a sorted set, by score with limit."]
fn zrevrangebyscore_limit<K: ToRedisArgs, MM: ToRedisArgs, M: ToRedisArgs>
(key: K, max: MM, min: M, offset: int, count: int) {
cmd("ZREVRANGEBYSCORE").arg(key).arg(max).arg(min).arg("LIMIT").arg(offset).arg(count)
}
#[doc="Return a range of members in a sorted set, by score with limit with scores."]
fn zrevrangebyscore_limit_withscores<K: ToRedisArgs, MM: ToRedisArgs, M: ToRedisArgs>
(key: K, max: MM, min: M, offset: int, count: int) {
cmd("ZREVRANGEBYSCORE").arg(key).arg(max).arg(min).arg("WITHSCORES")
.arg("LIMIT").arg(offset).arg(count)
}
#[doc="Determine the index of a member in a sorted set, with scores ordered from high to low."]
fn zrevrank<K: ToRedisArgs, M: ToRedisArgs>(key: K, member: M) {
cmd("ZREVRANK").arg(key).arg(member)
}
#[doc="Get the score associated with the given member in a sorted set."]
fn zscore<K: ToRedisArgs, M: ToRedisArgs>(key: K, member: M) {
cmd("ZSCORE").arg(key).arg(member)
}
#[doc="Unions multiple sorted sets and store the resulting sorted set in
a new key using SUM as aggregation function."]
fn zunionstore<K: ToRedisArgs>(dstkey: K, keys: &[K]) {
cmd("ZUNIONSTORE").arg(dstkey).arg(keys.len()).arg(keys)
}
#[doc="Unions multiple sorted sets and store the resulting sorted set in
a new key using MIN as aggregation function."]
fn zunionstore_min<K: ToRedisArgs>(dstkey: K, keys: &[K]) {
cmd("ZUNIONSTORE").arg(dstkey).arg(keys.len()).arg(keys).arg("AGGREGATE").arg("MIN")
}
#[doc="Unions multiple sorted sets and store the resulting sorted set in
a new key using MAX as aggregation function."]
fn zunionstore_max<K: ToRedisArgs>(dstkey: K, keys: &[K]) {
cmd("ZUNIONSTORE").arg(dstkey).arg(keys.len()).arg(keys).arg("AGGREGATE").arg("MAX")
}
// hyperloglog commands
#[doc="Adds the specified elements to the specified HyperLogLog."]
fn pfadd<K: ToRedisArgs, E: ToRedisArgs>(key: K, element: E) {
cmd("PFADD").arg(key).arg(element)
}
#[doc="Return the approximated cardinality of the set(s) observed by the
HyperLogLog at key(s)."]
fn pfcount<K: ToRedisArgs>(key: K) {
cmd("PFCOUNT").arg(key)
}
#[doc="Merge N different HyperLogLogs into a single one."]
fn pfmerge<K: ToRedisArgs>(dstkey: K, srckeys: K) {
cmd("PFMERGE").arg(dstkey).arg(srckeys)
}
)
impl Commands for Connection {}
impl Commands for Client {}
impl<T: Commands+ConnectionLike> Commands for RedisResult<T> {}
impl PipelineCommands for Pipeline {
fn perform(&mut self, cmd: &Cmd) -> &mut Pipeline {
self.add_command(cmd)
}
}<|fim▁end|> | #[doc="Rename a key."]
fn rename<K: ToRedisArgs>(key: K, new_key: K) {
cmd("RENAME").arg(key).arg(new_key)
} |
<|file_name|>icon.component.ts<|end_file_name|><|fim▁begin|>import { ChangeDetectionStrategy, Component, ElementRef, Input } from '@angular/core';
import { IconLoadingService } from 'app/core/icon-loading.service';
import { empty } from 'app/shared/helper';
import { SvgIcon } from 'app/core/svg-icon';
/**
* Shows an SVG icon
*/
@Component({
// tslint:disable-next-line:component-selector
selector: 'icon',
templateUrl: 'icon.component.html',
changeDetection: ChangeDetectionStrategy.OnPush,<|fim▁hole|>})
export class IconComponent {
@Input() set icon(icon: SvgIcon | string) {
this.element.innerHTML = this.iconLoader.svg(icon);
}
private _size: string;
@Input() get size(): string {
return this._size;
}
set size(size: string) {
if (empty(size)) {
this._size = '1.2rem';
} else if (typeof size === 'number' || /^\d+$/.test(size)) {
this._size = `${size}px`;
} else {
this._size = size;
}
const style = this.element.style;
style.height = this._size;
style.width = this._size;
}
private get element(): HTMLElement {
return this.elementRef.nativeElement as HTMLElement;
}
constructor(
private elementRef: ElementRef,
private iconLoader: IconLoadingService) {
}
}<|fim▁end|> | |
<|file_name|>test_build_auxiliary_coordinate.py<|end_file_name|><|fim▁begin|># (C) British Crown Copyright 2014 - 2015, Met Office
#
# This file is part of Iris.
#
# Iris is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Iris is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Iris. If not, see <http://www.gnu.org/licenses/>.
"""
Test function :func:`iris.fileformats._pyke_rules.compiled_krb.\
fc_rules_cf_fc.build_auxilliary_coordinate`.
"""
from __future__ import (absolute_import, division, print_function)
from six.moves import (filter, input, map, range, zip) # noqa
# import iris tests first so that some things can be initialised before
# importing anything else
import iris.tests as tests
import numpy as np
import mock
from iris.coords import AuxCoord
from iris.fileformats._pyke_rules.compiled_krb.fc_rules_cf_fc import \
build_auxiliary_coordinate
class TestBoundsVertexDim(tests.IrisTest):
def setUp(self):
# Create coordinate cf variables and pyke engine.
points = np.arange(6).reshape(2, 3)
self.cf_coord_var = mock.Mock(
dimensions=('foo', 'bar'),
cf_name='wibble',
standard_name=None,
long_name='wibble',
units='m',
shape=points.shape,
dtype=points.dtype,
__getitem__=lambda self, key: points[key])
self.engine = mock.Mock(
cube=mock.Mock(),
cf_var=mock.Mock(dimensions=('foo', 'bar')),
filename='DUMMY',
provides=dict(coordinates=[]))
# Create patch for deferred loading that prevents attempted
# file access. This assumes that self.cf_bounds_var is
# defined in the test case.
def patched__getitem__(proxy_self, keys):
variable = None
for var in (self.cf_coord_var, self.cf_bounds_var):
if proxy_self.variable_name == var.cf_name:
return var[keys]
raise RuntimeError()
self.deferred_load_patch = mock.patch(
'iris.fileformats.netcdf.NetCDFDataProxy.__getitem__',
new=patched__getitem__)
def test_slowest_varying_vertex_dim(self):
# Create the bounds cf variable.
bounds = np.arange(24).reshape(4, 2, 3)
self.cf_bounds_var = mock.Mock(
dimensions=('nv', 'foo', 'bar'),
cf_name='wibble_bnds',
shape=bounds.shape,
dtype=bounds.dtype,
__getitem__=lambda self, key: bounds[key])
# Expected bounds on the resulting coordinate should be rolled so that
# the vertex dimension is at the end.
expected_bounds = np.rollaxis(bounds, 0, bounds.ndim)
expected_coord = AuxCoord(
self.cf_coord_var[:],
long_name=self.cf_coord_var.long_name,
var_name=self.cf_coord_var.cf_name,
units=self.cf_coord_var.units,
bounds=expected_bounds)
# Patch the helper function that retrieves the bounds cf variable.
# This avoids the need for setting up further mocking of cf objects.
get_cf_bounds_var_patch = mock.patch(
'iris.fileformats._pyke_rules.compiled_krb.'
'fc_rules_cf_fc.get_cf_bounds_var',
return_value=self.cf_bounds_var)
# Asserts must lie within context manager because of deferred loading.
with self.deferred_load_patch, get_cf_bounds_var_patch:
build_auxiliary_coordinate(self.engine, self.cf_coord_var)
# Test that expected coord is built and added to cube.
self.engine.cube.add_aux_coord.assert_called_with(
expected_coord, [0, 1])
# Test that engine.provides container is correctly populated.
expected_list = [(expected_coord, self.cf_coord_var.cf_name)]
self.assertEqual(self.engine.provides['coordinates'],
expected_list)
def test_fastest_varying_vertex_dim(self):
bounds = np.arange(24).reshape(2, 3, 4)
self.cf_bounds_var = mock.Mock(
dimensions=('foo', 'bar', 'nv'),
cf_name='wibble_bnds',
shape=bounds.shape,
dtype=bounds.dtype,
__getitem__=lambda self, key: bounds[key])
expected_coord = AuxCoord(
self.cf_coord_var[:],
long_name=self.cf_coord_var.long_name,
var_name=self.cf_coord_var.cf_name,
units=self.cf_coord_var.units,
bounds=bounds)
get_cf_bounds_var_patch = mock.patch(
'iris.fileformats._pyke_rules.compiled_krb.'
'fc_rules_cf_fc.get_cf_bounds_var',
return_value=self.cf_bounds_var)
# Asserts must lie within context manager because of deferred loading.
with self.deferred_load_patch, get_cf_bounds_var_patch:
build_auxiliary_coordinate(self.engine, self.cf_coord_var)
# Test that expected coord is built and added to cube.
self.engine.cube.add_aux_coord.assert_called_with(
expected_coord, [0, 1])
# Test that engine.provides container is correctly populated.
expected_list = [(expected_coord, self.cf_coord_var.cf_name)]
self.assertEqual(self.engine.provides['coordinates'],
expected_list)
def test_fastest_with_different_dim_names(self):
# Despite the dimension names ('x', and 'y') differing from the coord's<|fim▁hole|> dimensions=('x', 'y', 'nv'),
cf_name='wibble_bnds',
shape=bounds.shape,
dtype=bounds.dtype,
__getitem__=lambda self, key: bounds[key])
expected_coord = AuxCoord(
self.cf_coord_var[:],
long_name=self.cf_coord_var.long_name,
var_name=self.cf_coord_var.cf_name,
units=self.cf_coord_var.units,
bounds=bounds)
get_cf_bounds_var_patch = mock.patch(
'iris.fileformats._pyke_rules.compiled_krb.'
'fc_rules_cf_fc.get_cf_bounds_var',
return_value=self.cf_bounds_var)
# Asserts must lie within context manager because of deferred loading.
with self.deferred_load_patch, get_cf_bounds_var_patch:
build_auxiliary_coordinate(self.engine, self.cf_coord_var)
# Test that expected coord is built and added to cube.
self.engine.cube.add_aux_coord.assert_called_with(
expected_coord, [0, 1])
# Test that engine.provides container is correctly populated.
expected_list = [(expected_coord, self.cf_coord_var.cf_name)]
self.assertEqual(self.engine.provides['coordinates'],
expected_list)
if __name__ == '__main__':
tests.main()<|fim▁end|> | # which are 'foo' and 'bar' (as permitted by the cf spec),
# this should still work because the vertex dim is the fastest varying.
bounds = np.arange(24).reshape(2, 3, 4)
self.cf_bounds_var = mock.Mock( |
<|file_name|>pipeline_options_validator_test.py<|end_file_name|><|fim▁begin|>#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Unit tests for the pipeline options validator module."""
from __future__ import absolute_import
import logging
import unittest
from builtins import object
from hamcrest import assert_that
from hamcrest import contains_string
from hamcrest import only_contains
from hamcrest.core.base_matcher import BaseMatcher
from apache_beam.internal import pickler
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options_validator import PipelineOptionsValidator
# Mock runners to use for validations.
class MockRunners(object):
class DataflowRunner(object):
pass
class TestDataflowRunner(object):
pass
class OtherRunner(object):
pass
# Matcher that always passes for testing on_success_matcher option
class AlwaysPassMatcher(BaseMatcher):
def _matches(self, item):
return True
class SetupTest(unittest.TestCase):
def check_errors_for_arguments(self, errors, args):
"""Checks that there is exactly one error for each given argument."""
missing = []
remaining = list(errors)
for arg in args:
found = False
for error in remaining:
if arg in error:
remaining.remove(error)
found = True
break
if not found:
missing.append('Missing error for: ' + arg)
# Return missing and remaining (not matched) errors.
return missing + remaining
def test_local_runner(self):
runner = MockRunners.OtherRunner()
options = PipelineOptions([])
validator = PipelineOptionsValidator(options, runner)
errors = validator.validate()
self.assertEqual(len(errors), 0)
def test_missing_required_options(self):
options = PipelineOptions([''])
runner = MockRunners.DataflowRunner()
validator = PipelineOptionsValidator(options, runner)
errors = validator.validate()
self.assertEqual(
self.check_errors_for_arguments(
errors,
['project', 'staging_location', 'temp_location']),
[])
def test_gcs_path(self):
def get_validator(temp_location, staging_location):
options = ['--project=example:example', '--job_name=job']
if temp_location is not None:
options.append('--temp_location=' + temp_location)
if staging_location is not None:
options.append('--staging_location=' + staging_location)
pipeline_options = PipelineOptions(options)
runner = MockRunners.DataflowRunner()
validator = PipelineOptionsValidator(pipeline_options, runner)
return validator
test_cases = [
{'temp_location': None,
'staging_location': 'gs://foo/bar',
'errors': ['temp_location']},
{'temp_location': None,
'staging_location': None,
'errors': ['staging_location', 'temp_location']},
{'temp_location': 'gs://foo/bar',
'staging_location': None,
'errors': []},
{'temp_location': 'gs://foo/bar',
'staging_location': 'gs://ABC/bar',
'errors': ['staging_location']},
{'temp_location': 'gcs:/foo/bar',
'staging_location': 'gs://foo/bar',
'errors': ['temp_location']},
{'temp_location': 'gs:/foo/bar',
'staging_location': 'gs://foo/bar',
'errors': ['temp_location']},
{'temp_location': 'gs://ABC/bar',
'staging_location': 'gs://foo/bar',
'errors': ['temp_location']},
{'temp_location': 'gs://ABC/bar',
'staging_location': 'gs://foo/bar',
'errors': ['temp_location']},
{'temp_location': 'gs://foo',
'staging_location': 'gs://foo/bar',
'errors': ['temp_location']},
{'temp_location': 'gs://foo/',
'staging_location': 'gs://foo/bar',
'errors': []},
{'temp_location': 'gs://foo/bar',
'staging_location': 'gs://foo/bar',
'errors': []},
]
for case in test_cases:
errors = get_validator(case['temp_location'],
case['staging_location']).validate()
self.assertEqual(
self.check_errors_for_arguments(errors, case['errors']), [])
def test_project(self):
def get_validator(project):
options = ['--job_name=job', '--staging_location=gs://foo/bar',
'--temp_location=gs://foo/bar']
if project is not None:<|fim▁hole|> validator = PipelineOptionsValidator(pipeline_options, runner)
return validator
test_cases = [
{'project': None, 'errors': ['project']},
{'project': '12345', 'errors': ['project']},
{'project': 'FOO', 'errors': ['project']},
{'project': 'foo:BAR', 'errors': ['project']},
{'project': 'fo', 'errors': ['project']},
{'project': 'foo', 'errors': []},
{'project': 'foo:bar', 'errors': []},
]
for case in test_cases:
errors = get_validator(case['project']).validate()
self.assertEqual(
self.check_errors_for_arguments(errors, case['errors']), [])
def test_job_name(self):
def get_validator(job_name):
options = ['--project=example:example', '--staging_location=gs://foo/bar',
'--temp_location=gs://foo/bar']
if job_name is not None:
options.append('--job_name=' + job_name)
pipeline_options = PipelineOptions(options)
runner = MockRunners.DataflowRunner()
validator = PipelineOptionsValidator(pipeline_options, runner)
return validator
test_cases = [
{'job_name': None, 'errors': []},
{'job_name': '12345', 'errors': ['job_name']},
{'job_name': 'FOO', 'errors': ['job_name']},
{'job_name': 'foo:bar', 'errors': ['job_name']},
{'job_name': 'fo', 'errors': []},
{'job_name': 'foo', 'errors': []},
]
for case in test_cases:
errors = get_validator(case['job_name']).validate()
self.assertEqual(
self.check_errors_for_arguments(errors, case['errors']), [])
def test_num_workers(self):
def get_validator(num_workers):
options = ['--project=example:example', '--job_name=job',
'--staging_location=gs://foo/bar',
'--temp_location=gs://foo/bar']
if num_workers is not None:
options.append('--num_workers=' + num_workers)
pipeline_options = PipelineOptions(options)
runner = MockRunners.DataflowRunner()
validator = PipelineOptionsValidator(pipeline_options, runner)
return validator
test_cases = [
{'num_workers': None, 'errors': []},
{'num_workers': '1', 'errors': []},
{'num_workers': '0', 'errors': ['num_workers']},
{'num_workers': '-1', 'errors': ['num_workers']},
]
for case in test_cases:
errors = get_validator(case['num_workers']).validate()
self.assertEqual(
self.check_errors_for_arguments(errors, case['errors']), [])
def test_is_service_runner(self):
test_cases = [
{
'runner': MockRunners.OtherRunner(),
'options': [],
'expected': False,
},
{
'runner': MockRunners.OtherRunner(),
'options': ['--dataflow_endpoint=https://dataflow.googleapis.com'],
'expected': False,
},
{
'runner': MockRunners.OtherRunner(),
'options': ['--dataflow_endpoint=https://dataflow.googleapis.com/'],
'expected': False,
},
{
'runner': MockRunners.DataflowRunner(),
'options': ['--dataflow_endpoint=https://another.service.com'],
'expected': False,
},
{
'runner': MockRunners.DataflowRunner(),
'options': ['--dataflow_endpoint=https://another.service.com/'],
'expected': False,
},
{
'runner': MockRunners.DataflowRunner(),
'options': ['--dataflow_endpoint=https://dataflow.googleapis.com'],
'expected': True,
},
{
'runner': MockRunners.DataflowRunner(),
'options': ['--dataflow_endpoint=https://dataflow.googleapis.com/'],
'expected': True,
},
{
'runner': MockRunners.DataflowRunner(),
'options': [],
'expected': True,
},
]
for case in test_cases:
validator = PipelineOptionsValidator(
PipelineOptions(case['options']), case['runner'])
self.assertEqual(validator.is_service_runner(), case['expected'])
def test_dataflow_job_file_and_template_location_mutually_exclusive(self):
runner = MockRunners.OtherRunner()
options = PipelineOptions([
'--template_location', 'abc',
'--dataflow_job_file', 'def'
])
validator = PipelineOptionsValidator(options, runner)
errors = validator.validate()
self.assertTrue(errors)
def test_validate_template_location(self):
runner = MockRunners.OtherRunner()
options = PipelineOptions([
'--template_location', 'abc',
])
validator = PipelineOptionsValidator(options, runner)
errors = validator.validate()
self.assertFalse(errors)
def test_validate_dataflow_job_file(self):
runner = MockRunners.OtherRunner()
options = PipelineOptions([
'--dataflow_job_file', 'abc'
])
validator = PipelineOptionsValidator(options, runner)
errors = validator.validate()
self.assertFalse(errors)
def test_test_matcher(self):
def get_validator(matcher):
options = ['--project=example:example',
'--job_name=job',
'--staging_location=gs://foo/bar',
'--temp_location=gs://foo/bar',]
if matcher:
options.append('%s=%s' % ('--on_success_matcher', matcher.decode()))
pipeline_options = PipelineOptions(options)
runner = MockRunners.TestDataflowRunner()
return PipelineOptionsValidator(pipeline_options, runner)
test_case = [
{'on_success_matcher': None,
'errors': []},
{'on_success_matcher': pickler.dumps(AlwaysPassMatcher()),
'errors': []},
{'on_success_matcher': b'abc',
'errors': ['on_success_matcher']},
{'on_success_matcher': pickler.dumps(object),
'errors': ['on_success_matcher']},
]
for case in test_case:
errors = get_validator(case['on_success_matcher']).validate()
self.assertEqual(
self.check_errors_for_arguments(errors, case['errors']), [])
def test_transform_name_mapping_without_update(self):
options = ['--project=example:example',
'--staging_location=gs://foo/bar',
'--temp_location=gs://foo/bar',
'--transform_name_mapping={\"fromPardo\":\"toPardo\"}']
pipeline_options = PipelineOptions(options)
runner = MockRunners.DataflowRunner()
validator = PipelineOptionsValidator(pipeline_options, runner)
errors = validator.validate()
assert_that(errors, only_contains(
contains_string('Transform name mapping option is only useful when '
'--update and --streaming is specified')))
def test_transform_name_mapping_invalid_format(self):
options = ['--project=example:example',
'--staging_location=gs://foo/bar',
'--temp_location=gs://foo/bar',
'--update',
'--job_name=test',
'--streaming',
'--transform_name_mapping={\"fromPardo\":123}']
pipeline_options = PipelineOptions(options)
runner = MockRunners.DataflowRunner()
validator = PipelineOptionsValidator(pipeline_options, runner)
errors = validator.validate()
assert_that(errors, only_contains(
contains_string('Invalid transform name mapping format.')))
if __name__ == '__main__':
logging.getLogger().setLevel(logging.INFO)
unittest.main()<|fim▁end|> | options.append('--project=' + project)
pipeline_options = PipelineOptions(options)
runner = MockRunners.DataflowRunner() |
<|file_name|>HtmlAlternateLink.js<|end_file_name|><|fim▁begin|>const pathModule = require('path');
const expect = require('../../unexpected-with-plugins');
const AssetGraph = require('../../../lib/AssetGraph');
<|fim▁hole|> const assetGraph = new AssetGraph({
root: pathModule.resolve(
__dirname,
'../../../testdata/relations/Html/HtmlAlternateLink/'
),
});
await assetGraph.loadAssets('index.html');
await assetGraph.populate();
expect(assetGraph, 'to contain relations', 'HtmlAlternateLink', 4);
expect(assetGraph, 'to contain assets', 'Rss', 2);
expect(assetGraph, 'to contain asset', 'Atom');
expect(assetGraph, 'to contain asset', 'Xml');
});
});<|fim▁end|> | describe('relations/HtmlAlternateLink', function () {
it('should handle a simple test case', async function () { |
<|file_name|>motor_queue_proc.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import itertools
import time
import rospy
from std_msgs.msg import String
class QueueProc(object):
def __init__(self):
rospy.init_node('motor_queue_proc', anonymous=True)
self.state_change = rospy.Publisher('/motor/state_change', String, queue_size=10)
rospy.Subscriber("/motor/commands", String, self.parse_string)
self.queue = []
self.timer_class = TimeTimer
#
# Parses published strings and loads the next item into the queue
#
def parse_string(self, data):
commands = ["".join(x) for _, x in itertools.groupby(data.data, key=str.isdigit)]
queue_start = len(self.queue)
i = 0
while i < len(commands):
action = commands[i]
val = commands[i+1]
if action == "f":
self.queue.append(["forward", int(val)])
elif action == "b":
self.queue.append(["backward", int(val)])
elif action == "r":
rounded = int(val)%360
if rounded > 180:
rounded = 360-rounded
self.queue.append(["left", rounded])
else:
self.queue.append(["right", rounded])
elif action == "flush":
self.queue = []
i += 2
if queue_start == 0:
self.timer_expire()
#
# changes the state and sets a timer based on the next item in the queue
#
def timer_expire(self):
if len(self.queue) == 0:
self.state_change.publish("stop")
return
nxt = self.queue.pop(0)
if (nxt[0] == "left" or nxt[0] == "right"):
self.state_change.publish("stop")
time.sleep(2)
self.state_change.publish(nxt[0])
tmer = self.timer_class(nxt[1], self.timer_expire, (nxt[0] == "left" or nxt[0] == "right"))
tmer.start()
#
# General timer class, does nothing
#
class GeneralTimer():
def __init__(self, distance, callback, is_angle):
self.callback = callback<|fim▁hole|> def get_time(self):
return 0
#
# A time based timer
#
class TimeTimer(GeneralTimer):
def __init__(self, distance, callback, is_angle):
self.callback = callback
#distance in m*10
self.distance = distance
self.is_angle = is_angle
#meters per second
self.mps = 1
#amount of angles turned per second
self.aps = 40
def start(self):
rospy.Timer(rospy.Duration(self.get_time()), self.timer_callback, True)
def timer_callback(self, tEvent):
self.callback()
def get_time(self):
if not self.is_angle:
return float(self.distance)/(self.mps*10)
else:
return self.distance/self.aps
if __name__ == "__main__":
proc = QueueProc()
rospy.spin()<|fim▁end|> |
def start(self):
self.callback()
|
<|file_name|>tlv-msg-list.py<|end_file_name|><|fim▁begin|>msg_list["Echo Request"] = { "type" : "1" }
msg_list["Echo Response"] = { "type" : "2" }
msg_list["Version Not Supported Indication"] = { "type" : "3" }
msg_list["Create Session Request"] = { "type" : "32" }
msg_list["Create Session Response"] = { "type" : "33" }
msg_list["Delete Session Request"] = { "type" : "36" }
msg_list["Delete Session Response"] = { "type" : "37" }
msg_list["Modify Bearer Request"] = { "type" : "34" }
msg_list["Modify Bearer Response"] = { "type" : "35" }
msg_list["Remote UE Report Notification"] = { "type" : "40" }
msg_list["Remote UE Report Acknowledge"] = { "type" : "41" }
msg_list["Change Notification Request"] = { "type" : "38" }
msg_list["Change Notification Response"] = { "type" : "39" }
msg_list["Resume Notification"] = { "type" : "164" }
msg_list["Resume Acknowledge"] = { "type" : "165" }
msg_list["Modify Bearer Command"] = { "type" : "64" }
msg_list["Modify Bearer Failure Indication"] = { "type" : "65" }
msg_list["Delete Bearer Command"] = { "type" : "66" }
msg_list["Delete Bearer Failure Indication"] = { "type" : "67" }
msg_list["Bearer Resource Command"] = { "type" : "68" }
msg_list["Bearer Resource Failure Indication"] = { "type" : "69" }
msg_list["Downlink Data Notification Failure Indication"] = { "type" : "70" }
msg_list["Trace Session Activation"] = { "type" : "71" }
msg_list["Trace Session Deactivation"] = { "type" : "72" }
msg_list["Stop Paging Indication"] = { "type" : "73" }
msg_list["Create Bearer Request"] = { "type" : "95" }
msg_list["Create Bearer Response"] = { "type" : "96" }
msg_list["Update Bearer Request"] = { "type" : "97" }
msg_list["Update Bearer Response"] = { "type" : "98" }
msg_list["Delete Bearer Request"] = { "type" : "99" }
msg_list["Delete Bearer Response"] = { "type" : "100" }
msg_list["Delete PDN Connection Set Request"] = { "type" : "101" }
msg_list["Delete PDN Connection Set Response"] = { "type" : "102" }
msg_list["PGW Downlink Triggering Notification"] = { "type" : "103" }
msg_list["PGW Downlink Triggering Acknowledge"] = { "type" : "104" }
msg_list["Suspend Notification"] = { "type" : "162" }
msg_list["Suspend Acknowledge"] = { "type" : "163" }
msg_list["Create Forwarding Tunnel Request"] = { "type" : "160" }
msg_list["Create Forwarding Tunnel Response"] = { "type" : "161" }
msg_list["Create Indirect Data Forwarding Tunnel Request"] = { "type" : "166" }
msg_list["Create Indirect Data Forwarding Tunnel Response"] = { "type" : "167" }
msg_list["Delete Indirect Data Forwarding Tunnel Request"] = { "type" : "168" }
msg_list["Delete Indirect Data Forwarding Tunnel Response"] = { "type" : "169" }
msg_list["Release Access Bearers Request"] = { "type" : "170" }
msg_list["Release Access Bearers Response"] = { "type" : "171" }
msg_list["Downlink Data Notification"] = { "type" : "176" }
msg_list["Downlink Data Notification Acknowledge"] = { "type" : "177" }
msg_list["PGW Restart Notification"] = { "type" : "179" }
msg_list["PGW Restart Notification Acknowledge"] = { "type" : "180" }
msg_list["Update PDN Connection Set Request"] = { "type" : "200" }
msg_list["Update PDN Connection Set Response"] = { "type" : "201" }
msg_list["Modify Access Bearers Request"] = { "type" : "211" }<|fim▁hole|><|fim▁end|> | msg_list["Modify Access Bearers Response"] = { "type" : "212" } |
<|file_name|>Utils.java<|end_file_name|><|fim▁begin|>package com.uservoice.uservoicesdk.ui;
import android.annotation.SuppressLint;
import android.content.Context;
import android.content.Intent;
import android.graphics.Color;
import android.net.Uri;
import android.support.v4.app.FragmentActivity;
import android.util.TypedValue;
import android.view.View;
import android.webkit.WebChromeClient;
import android.webkit.WebSettings.PluginState;
import android.webkit.WebView;
import android.widget.ImageView;
import android.widget.TextView;
import com.uservoice.uservoicesdk.R;
import com.uservoice.uservoicesdk.Session;
import com.uservoice.uservoicesdk.activity.TopicActivity;
import com.uservoice.uservoicesdk.dialog.ArticleDialogFragment;
import com.uservoice.uservoicesdk.dialog.SuggestionDialogFragment;
import com.uservoice.uservoicesdk.model.Article;
import com.uservoice.uservoicesdk.model.BaseModel;
import com.uservoice.uservoicesdk.model.Suggestion;
import com.uservoice.uservoicesdk.model.Topic;
import java.util.Locale;
public class Utils {
@SuppressLint("SetJavaScriptEnabled")
public static void displayArticle(WebView webView, Article article, Context context) {
String styles = "iframe, img { width: 100%; }";
if (isDarkTheme(context)) {
webView.setBackgroundColor(Color.BLACK);
styles += "body { background-color: #000000; color: #F6F6F6; } a { color: #0099FF; }";
}
String html = String.format("<html><head><meta charset=\"utf-8\"><link rel=\"stylesheet\" type=\"text/css\" href=\"http://cdn.uservoice.com/stylesheets/vendor/typeset.css\"/><style>%s</style></head><body class=\"typeset\" style=\"font-family: sans-serif; margin: 1em\"><h3>%s</h3>%s</body></html>", styles, article.getTitle(), article.getHtml());
webView.setWebChromeClient(new WebChromeClient());
webView.getSettings().setJavaScriptEnabled(true);
webView.getSettings().setPluginState(PluginState.ON);
webView.loadUrl(String.format("data:text/html;charset=utf-8,%s", Uri.encode(html)));<|fim▁hole|> float[] hsv = new float[3];
context.getTheme().resolveAttribute(android.R.attr.textColorPrimary, tv, true);
Color.colorToHSV(context.getResources().getColor(tv.resourceId), hsv);
return hsv[2] > 0.5f;
}
@SuppressLint("DefaultLocale")
public static String getQuantityString(View view, int id, int count) {
return String.format("%,d %s", count, view.getContext().getResources().getQuantityString(id, count));
}
public static void displayInstantAnswer(View view, BaseModel model) {
TextView title = (TextView) view.findViewById(R.id.uv_title);
TextView detail = (TextView) view.findViewById(R.id.uv_detail);
View suggestionDetails = view.findViewById(R.id.uv_suggestion_details);
ImageView image = (ImageView) view.findViewById(R.id.uv_icon);
if (model instanceof Article) {
Article article = (Article) model;
image.setImageResource(R.drawable.uv_article);
title.setText(article.getTitle());
if (article.getTopicName() != null) {
detail.setVisibility(View.VISIBLE);
detail.setText(article.getTopicName());
} else {
detail.setVisibility(View.GONE);
}
suggestionDetails.setVisibility(View.GONE);
} else if (model instanceof Suggestion) {
Suggestion suggestion = (Suggestion) model;
image.setImageResource(R.drawable.uv_idea);
title.setText(suggestion.getTitle());
detail.setVisibility(View.VISIBLE);
detail.setText(suggestion.getForumName());
if (suggestion.getStatus() != null) {
View statusColor = suggestionDetails.findViewById(R.id.uv_suggestion_status_color);
TextView status = (TextView) suggestionDetails.findViewById(R.id.uv_suggestion_status);
int color = Color.parseColor(suggestion.getStatusColor());
suggestionDetails.setVisibility(View.VISIBLE);
status.setText(suggestion.getStatus().toUpperCase(Locale.getDefault()));
status.setTextColor(color);
statusColor.setBackgroundColor(color);
} else {
suggestionDetails.setVisibility(View.GONE);
}
}
}
public static void showModel(FragmentActivity context, BaseModel model) {
if (model instanceof Article) {
ArticleDialogFragment fragment = new ArticleDialogFragment((Article) model);
fragment.show(context.getSupportFragmentManager(), "ArticleDialogFragment");
} else if (model instanceof Suggestion) {
SuggestionDialogFragment fragment = new SuggestionDialogFragment((Suggestion) model);
fragment.show(context.getSupportFragmentManager(), "SuggestionDialogFragment");
} else if (model instanceof Topic) {
Session.getInstance().setTopic((Topic) model);
context.startActivity(new Intent(context, TopicActivity.class));
}
}
}<|fim▁end|> | }
public static boolean isDarkTheme(Context context) {
TypedValue tv = new TypedValue(); |
<|file_name|>poly.rs<|end_file_name|><|fim▁begin|>// Copyright (C) 2020 Inderjit Gill <[email protected]>
// This file is part of Seni
// Seni is free software: you can redistribute it and/or modify
// it under the terms of the GNU Affero General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Seni is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the<|fim▁hole|>
use crate::error::{Error, Result};
use crate::matrix::Matrix;
use crate::render_list::RenderList;
use crate::rgb::Rgb;
use crate::uvmapper::UvMapping;
use log::error;
pub fn render(
render_list: &mut RenderList,
matrix: &Matrix,
coords: &[(f32, f32)],
colours: &[Rgb],
uvm: &UvMapping,
) -> Result<()> {
let num_vertices = coords.len();
if colours.len() != num_vertices {
error!("render_poly: coords and colours length mismatch");
return Err(Error::Geometry);
} else if num_vertices < 3 {
return Ok(());
}
let (x, y) = coords[0];
render_list.prepare_to_add_triangle_strip(matrix, num_vertices, x, y)?;
let rp = render_list
.render_packets
.last_mut()
.ok_or(Error::Geometry)?;
let rpg = rp.get_mut_render_packet_geometry()?;
for i in 0..num_vertices {
let (x, y) = coords[i];
rpg.add_vertex(matrix, x, y, &colours[i], uvm.map[4], uvm.map[5])
}
Ok(())
}<|fim▁end|> | // GNU Affero General Public License for more details.
// You should have received a copy of the GNU Affero General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>. |
<|file_name|>colorfx.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" encoding="utf-8"?>
<!DOCTYPE TS>
<TS version="2.1" language="de_DE">
<context>
<name>ArtisticSolidColor</name>
<message>
<source>Irregular</source>
<translation>Unregelmäßigkeit</translation>
</message>
<message>
<source>Horiz Offset</source>
<translation>Horizontaler Offset</translation>
</message>
<message>
<source>Vert Offset</source>
<translation>Vertikaler Offset</translation>
</message>
<message>
<source>Noise</source>
<translation>Rauschen</translation>
</message>
</context>
<context>
<name>MovingSolidColor</name>
<message>
<source>Offset</source>
<translation>Offset</translation>
</message>
<message>
<source>Horiz Offset</source>
<translation>Horizontaler Offset</translation>
</message>
<message>
<source>Vert Offset</source>
<translation>Vertikaler Offset</translation>
</message>
</context>
<context>
<name>OutlineViewerStyle</name>
<message>
<source>OutlineViewer(OnlyDebug)</source>
<translation>Umriss-Betrachter (Nur Debug)</translation>
</message>
<message>
<source>Control Point</source>
<translation>Kontrollpunkt</translation>
</message>
<message>
<source>Center Line</source>
<translation>Mittellinie</translation>
</message>
<message>
<source>Outline Mode</source>
<translation>Umriss-Modus</translation>
</message>
<message>
<source>Distance</source>
<translation>Distanz</translation>
</message>
<message>
<source>distance</source>
<translation>Abstand</translation>
</message>
</context>
<context>
<name>ShadowStyle</name>
<message>
<source>Hatched Shading</source>
<translation>Schraffur</translation>
</message>
<message>
<source>Angle</source>
<translation>Winkel</translation>
</message>
<message>
<source>Density</source>
<translation>Dichte</translation>
</message>
<message>
<source>Length</source>
<translation>Länge</translation>
</message>
</context>
<context>
<name>ShadowStyle2</name>
<message>
<source>Plain Shadow</source>
<translation>Schatten (Standard)</translation>
</message>
<message>
<source>Angle</source>
<translation>Winkel</translation>
</message>
<message>
<source>Size</source>
<translation>Größe</translation>
</message>
</context>
<context>
<name>TAirbrushRasterStyle</name>
<message>
<source>Airbrush</source>
<translation>Airbrush</translation>
</message>
<message>
<source>Blur value</source>
<translation>Weichheitsgrad</translation>
</message>
</context>
<context>
<name>TBiColorStrokeStyle</name>
<message>
<source>Shade</source>
<translation>Schattierung</translation>
</message>
</context>
<context>
<name>TBlendRasterStyle</name>
<message>
<source>Blend</source>
<translation>Mischen</translation>
</message>
</context>
<context>
<name>TBlendStrokeStyle2</name>
<message>
<source>Fade</source>
<translation>Einblendung</translation>
</message>
<message>
<source>Border Fade</source>
<translation>Rand-Blende</translation>
</message>
<message>
<source>Fade In</source>
<translation>Einblenden</translation>
</message>
<message>
<source>Fade Out</source>
<translation>Ausblenden</translation>
</message>
</context>
<context>
<name>TBraidStrokeStyle</name>
<message>
<source>Plait</source>
<translation>Geflochten</translation>
</message>
<message>
<source>Twirl</source>
<translation>Wirbel</translation>
</message>
</context>
<context>
<name>TBubbleStrokeStyle</name>
<message>
<source>Bubbles</source>
<translation>Blasen</translation>
</message>
</context>
<context>
<name>TChainStrokeStyle</name>
<message>
<source>Chain</source>
<translation>Kette</translation>
</message>
</context>
<context>
<name>TChalkFillStyle</name>
<message>
<source>Chalk</source>
<translation>Kreide</translation>
</message>
<message>
<source>Density</source>
<translation>Dichte</translation>
</message>
<message>
<source>Dot Size</source>
<translation>Punktgröße</translation>
</message>
</context>
<context>
<name>TChalkStrokeStyle2</name>
<message>
<source>Chalk</source>
<translation>Kreide</translation>
</message>
<message>
<source>Border Fade</source>
<translation>Rand-Blende</translation>
</message>
<message>
<source>Density</source>
<translation>Dichte</translation>
</message>
<message>
<source>Fade In</source>
<translation>Einblenden</translation>
</message>
<message>
<source>Fade Out</source>
<translation>Ausblenden</translation>
</message>
<message>
<source>Noise</source>
<translation>Rauschen</translation>
</message>
</context>
<context>
<name>TCheckedFillStyle</name>
<message>
<source>Square</source>
<translation>Quadrat</translation>
</message>
<message>
<source>Horiz Dist</source>
<translation>Horizontaler Abstand</translation>
</message>
<message>
<source>Horiz Angle</source>
<translation>Horizontaler Winkel</translation>
</message>
<message>
<source>Vert Dist</source>
<translation>Vertikaler Abstand</translation>
</message>
<message>
<source>Vert Angle</source>
<translation>Vertikaler Winkel</translation>
</message>
<message>
<source>Thickness</source>
<translation>Dicke</translation>
</message>
</context>
<context>
<name>TChessFillStyle</name>
<message>
<source>Chessboard</source>
<translation>Schachbrett</translation>
</message>
<message>
<source>Horiz Size</source>
<translation>Horizontale Größe</translation>
</message>
<message>
<source>Vert Size</source>
<translation>Vertikale Größe</translation>
</message>
<message>
<source>Angle</source>
<translation>Winkel</translation>
</message>
</context>
<context>
<name>TCircleStripeFillStyle</name>
<message>
<source>Concentric</source>
<translation>Konzentrisch</translation>
</message>
<message>
<source>X Position</source>
<translation>X-Position</translation>
</message>
<message>
<source>Y Position</source>
<translation>Y-Position</translation>
</message>
<message>
<source>Distance</source>
<translation>Distanz</translation>
</message>
<message>
<source>Thickness</source>
<translation>Dicke</translation>
</message>
</context>
<context>
<name>TCrystallizeStrokeStyle</name>
<message>
<source>Tulle</source>
<translation>Tüll</translation>
</message>
<message>
<source>Crease</source>
<translation>Falte</translation>
</message>
<message>
<source>Opacity</source>
<translation>Opazität</translation>
</message>
</context>
<context>
<name>TDottedFillStyle</name>
<message>
<source>Polka Dots</source>
<translation>Gepunktet</translation>
</message>
<message>
<source>Dot Size</source>
<translation>Punktgröße</translation>
</message>
<message>
<source>Dot Distance</source>
<translation>Punktdistanz</translation>
</message>
</context>
<context>
<name>TDottedLineStrokeStyle</name>
<message>
<source>Vanishing</source>
<translation>Verdünnung</translation>
</message>
<message>
<source>Fade In</source>
<translation>Einblende</translation>
</message>
<message>
<source>Dash</source>
<translation>Gestrichelt</translation>
</message>
<message>
<source>Fade Out</source>
<translation>Ausblenden</translation>
</message>
<message>
<source>Gap</source>
<translation>Lücke</translation>
</message>
</context>
<context>
<name>TDualColorStrokeStyle2</name>
<message>
<source>Striped</source>
<translation>Gestreift</translation>
</message>
<message>
<source>Distance</source>
<translation>Distanz</translation>
</message>
</context>
<context>
<name>TFriezeStrokeStyle2</name>
<message>
<source>Curl</source>
<translation>Kringel</translation>
</message>
<message>
<source>Twirl</source>
<translation>Wirbel</translation>
</message>
<message>
<source>Thickness</source>
<translation>Dicke</translation>
</message><|fim▁hole|> <message>
<source>Herringbone</source>
<translation>Fischgrätenstich</translation>
</message>
<message>
<source>Angle</source>
<translation>Winkel</translation>
</message>
<message>
<source>Size</source>
<translation>Größe</translation>
</message>
</context>
<context>
<name>TGraphicPenStrokeStyle</name>
<message>
<source>Dashes</source>
<translation>Strichelung</translation>
</message>
<message>
<source>Density</source>
<translation>Dichte</translation>
</message>
</context>
<context>
<name>TLinGradFillStyle</name>
<message>
<source>Linear Gradient</source>
<translation>Linearer Verlauf</translation>
</message>
<message>
<source>Angle</source>
<translation>Winkel</translation>
</message>
<message>
<source>X Position</source>
<translation>X-Position</translation>
</message>
<message>
<source>Y Position</source>
<translation>Y-Position</translation>
</message>
<message>
<source>Smoothness</source>
<translation>Glätte</translation>
</message>
</context>
<context>
<name>TLongBlendStrokeStyle2</name>
<message>
<source>Watercolor</source>
<translation>Wasserfarbe</translation>
</message>
<message>
<source>Distance</source>
<translation></translation>
</message>
</context>
<context>
<name>TMatrioskaStrokeStyle</name>
<message>
<source>Toothpaste</source>
<translation>Zahnpasta</translation>
</message>
<message>
<source>Stripes</source>
<translation>Streifen</translation>
</message>
</context>
<context>
<name>TMosaicFillStyle</name>
<message>
<source>Stained Glass</source>
<translation>Farbglas</translation>
</message>
<message>
<source>Size</source>
<translation>Größe</translation>
</message>
<message>
<source>Distortion</source>
<translation>Verzerrung</translation>
</message>
<message>
<source>Min Thick</source>
<translation>Minimale Dicke</translation>
</message>
<message>
<source>Max Thick</source>
<translation>Maximale Dicke</translation>
</message>
</context>
<context>
<name>TMultiLineStrokeStyle2</name>
<message>
<source>Gouache</source>
<translation>Gouache</translation>
</message>
<message>
<source>Density</source>
<translation>Dichte</translation>
</message>
<message>
<source>Size</source>
<translation>Größe</translation>
</message>
<message>
<source>Thickness</source>
<translation>Dicke</translation>
</message>
<message>
<source>Noise</source>
<translation>Rauschen</translation>
</message>
</context>
<context>
<name>TNoColorRasterStyle</name>
<message>
<source>Markup</source>
<translation>Markierung</translation>
</message>
</context>
<context>
<name>TNormal2StrokeStyle</name>
<message>
<source>Bump</source>
<translation>Beule</translation>
</message>
<message>
<source>Light X Pos</source>
<translation>Licht X-Position</translation>
</message>
<message>
<source>Light Y Pos</source>
<translation>Licht Y-Position</translation>
</message>
<message>
<source>Shininess</source>
<translation>Glanz</translation>
</message>
<message>
<source>Plastic</source>
<translation>Plastik</translation>
</message>
</context>
<context>
<name>TPatchFillStyle</name>
<message>
<source>Beehive</source>
<translation>Bienenwaben</translation>
</message>
<message>
<source>Size</source>
<translation>Größe</translation>
</message>
<message>
<source>Distortion</source>
<translation>Verzerrung</translation>
</message>
<message>
<source>Thickness</source>
<translation>Dicke</translation>
</message>
</context>
<context>
<name>TPointShadowFillStyle</name>
<message>
<source>Sponge Shading</source>
<translation>Schwammschattierung</translation>
</message>
<message>
<source>Angle</source>
<translation>Winkel</translation>
</message>
<message>
<source>Density</source>
<translation>Dichte</translation>
</message>
<message>
<source>Size</source>
<translation>Größe</translation>
</message>
<message>
<source>Point Size</source>
<translation>Punktgröße</translation>
</message>
</context>
<context>
<name>TRadGradFillStyle</name>
<message>
<source>Radial Gradient</source>
<translation>Radialer Verlauf</translation>
</message>
<message>
<source>X Position</source>
<translation>X-Position</translation>
</message>
<message>
<source>Y Position</source>
<translation>Y-Position</translation>
</message>
<message>
<source>Radius</source>
<translation>Radius</translation>
</message>
<message>
<source>Smoothness</source>
<translation>Glätte</translation>
</message>
</context>
<context>
<name>TRopeStrokeStyle</name>
<message>
<source>Rope</source>
<translation>Seil</translation>
</message>
<message>
<source>Tilt</source>
<translation>Neigung</translation>
</message>
</context>
<context>
<name>TRubberFillStyle</name>
<message>
<source>Blob</source>
<translation>Klecks</translation>
</message>
<message>
<source>Intensity</source>
<translation>Intensität</translation>
</message>
</context>
<context>
<name>TSawToothStrokeStyle</name>
<message>
<source>Jagged</source>
<translation>Gezackt</translation>
</message>
<message>
<source>Distance</source>
<translation>Distanz</translation>
</message>
</context>
<context>
<name>TSinStrokeStyle</name>
<message>
<source>Wave</source>
<translation>Welle</translation>
</message>
<message>
<source>Frequency</source>
<translation>Frequenz</translation>
</message>
</context>
<context>
<name>TSketchStrokeStyle</name>
<message>
<source>Fuzz</source>
<translation>Flaum</translation>
</message>
<message>
<source>Density</source>
<translation>Dichte</translation>
</message>
</context>
<context>
<name>TSprayStrokeStyle</name>
<message>
<source>Circlets</source>
<translation>Kreischen</translation>
</message>
<message>
<source>Border Fade</source>
<translation>Rand-Blende</translation>
</message>
<message>
<source>Density</source>
<translation>Dichte</translation>
</message>
<message>
<source>Size</source>
<translation>Größe</translation>
</message>
</context>
<context>
<name>TStripeFillStyle</name>
<message>
<source>Banded</source>
<translation>Gebändert</translation>
</message>
<message>
<source>Distance</source>
<translation>Distanz</translation>
</message>
<message>
<source>Angle</source>
<translation>Winkel</translation>
</message>
<message>
<source>Thickness</source>
<translation>Dicke</translation>
</message>
</context>
<context>
<name>TTissueStrokeStyle</name>
<message>
<source>Gauze</source>
<translation>Gaze</translation>
</message>
<message>
<source>Density</source>
<translation>Dichte</translation>
</message>
<message>
<source>Border Size</source>
<translation>Randgröße</translation>
</message>
</context>
<context>
<name>TTwirlStrokeStyle</name>
<message>
<source>Ribbon</source>
<translation>Schleife</translation>
</message>
<message>
<source>Twirl</source>
<translation>Wirbel</translation>
</message>
<message>
<source>Shade</source>
<translation>Schattierung</translation>
</message>
</context>
<context>
<name>TZigzTSinStrokeStyleagStrokeStyle</name>
<message>
<source>Thickness</source>
<translation>Dicke</translation>
</message>
</context>
<context>
<name>TZigzagStrokeStyle</name>
<message>
<source>Zigzag</source>
<translation>Zick-Zack</translation>
</message>
<message>
<source>Min Distance</source>
<translation>Minimale Distanz</translation>
</message>
<message>
<source>Max Distance</source>
<translation>Maximale Distanz</translation>
</message>
<message>
<source>Min Angle</source>
<translation>Minimaler Winkel</translation>
</message>
<message>
<source>Max Angle</source>
<translation>Maximaler Winkel</translation>
</message>
<message>
<source>Thickness</source>
<translation>Dicke</translation>
</message>
</context>
</TS><|fim▁end|> | </context>
<context>
<name>TFurStrokeStyle</name> |
<|file_name|>__openerp__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2012 - INECO PARTNERSHIP LIMITED (<http://www.ineco.co.th>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{<|fim▁hole|> 'version': '0.1',
'category': 'Extended',
'description': """
""",
'author': 'Mr.Tititab Srisookco',
'website': 'http://www.ineco.co.th',
'depends': ['base','purchase','stock'],
'data': [],
'demo': [],
'test':[],
'update_xml': [
'stock_view.xml',
],
'installable': True,
'images': [],
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:<|fim▁end|> | 'name': 'Ineco Purchase Sequence', |
<|file_name|>models.py<|end_file_name|><|fim▁begin|><|fim▁hole|>
Author: Karandeep Singh Nagra
'''
from django.contrib.auth.models import User, Group, Permission
from django.core.urlresolvers import reverse
from django.db import models
from base.models import UserProfile
class Thread(models.Model):
'''
The Thread model. Used to group messages.
'''
owner = models.ForeignKey(
UserProfile,
help_text="The user who started this thread.",
)
subject = models.CharField(
blank=False,
null=False,
max_length=254,
help_text="Subject of this thread.",
)
start_date = models.DateTimeField(
auto_now_add=True,
help_text="The date this thread was started.",
)
change_date = models.DateTimeField(
auto_now_add=True,
help_text="The last time this thread was modified.",
)
number_of_messages = models.PositiveSmallIntegerField(
default=1,
help_text="The number of messages in this thread.",
)
active = models.BooleanField(
default=True,
help_text="Whether this thread is still active.",
)
views = models.PositiveIntegerField(
default=0,
help_text="The number times this thread has been viewed.",
)
followers = models.ManyToManyField(
User,
blank=True,
null=True,
related_name="following",
help_text="Users following this thread",
)
def __unicode__(self):
return self.subject
class Meta:
ordering = ['-change_date']
def is_thread(self):
return True
def get_view_url(self):
return reverse("threads:view_thread", kwargs={"pk": self.pk})
class Message(models.Model):
'''
The Message model. Contains a body, owner, and post_date, referenced by thread.
'''
body = models.TextField(
blank=False,
null=False,
help_text="Body of this message.",
)
owner = models.ForeignKey(
UserProfile,
help_text="The user who posted this message.",
)
post_date = models.DateTimeField(
auto_now_add=True,
help_text="The date this message was posted.",
)
thread = models.ForeignKey(
Thread,
help_text="The thread to which this message belongs.",
)
edited = models.BooleanField(
default=False,
)
def __str__(self):
return self.__unicode__()
def __unicode__(self):
return self.body
class Meta:
ordering = ['post_date']
def is_message(self):
return True
def pre_save_thread(sender, instance, **kwargs):
thread = instance
thread.number_of_messages = thread.message_set.count()
def post_save_thread(sender, instance, created, **kwargs):
thread = instance
if not created and thread.number_of_messages == 0:
thread.delete()
def post_save_message(sender, instance, created, **kwargs):
message = instance
thread = message.thread
if created:
thread.change_date = message.post_date
thread.save()
def post_delete_message(sender, instance, **kwargs):
message = instance
message.thread.save()
# Connect signals with their respective functions from above.
# When a message is created, update that message's thread's change_date to the post_date of that message.
models.signals.post_save.connect(post_save_message, sender=Message)
models.signals.post_delete.connect(post_delete_message, sender=Message)
models.signals.pre_save.connect(pre_save_thread, sender=Thread)
models.signals.post_save.connect(post_save_thread, sender=Thread)<|fim▁end|> | '''
Project: Farnsworth |
<|file_name|>tfidfanalysis.py<|end_file_name|><|fim▁begin|>import os
import codecs, re, time, string, logging, math
from operator import itemgetter
from nltk import FreqDist
from nltk.corpus import stopwords
import textmining
from scipy import spatial
from . import filehandler
def most_frequent_terms(*args):
tdm = textmining.TermDocumentMatrix(simple_tokenize_remove_our_stopwords)
for doc in args:
tdm.add_doc(doc)
freqs = []
for d in tdm.sparse:
f = [(freq, name) for (name, freq) in list(d.items())]
f.sort(reverse=True)
freqs.append(f)
return freqs
def doc_to_words(document):
'''
Turn a document into a list of all the words in it
# TODO: include word stemming
'''
t1 = time.time()
words = re.findall(r"[\w']+|[.,!?;]", document, re.UNICODE)
t2 = time.time()
words = [w.lower() for w in words]
t3 = time.time()<|fim▁hole|> t4 = time.time()
logging.debug(" tokenize: %d" % (t2-t1))
logging.debug(" ignore_case: %d" % (t3-t2))
logging.debug(" remove punctuation: %d" % (t4-t3))
return words
# TODO add a langauge param to remove spanish stop words too
def term_frequency(words):
'''
Turn a list of words into a NLTK frequency distribution object
'''
t1 = time.time()
fdist = FreqDist(words)
# remove stopwords here rather than in corpus text for speed
# http://stackoverflow.com/questions/7154312/how-do-i-remove-entries-within-a-counter-object-with-a-loop-without-invoking-a-r
for w in list(fdist):
if w in stopwords.words('english'):
del fdist[w]
t2 = time.time()
logging.debug(" create term freq: %d" % (t2-t1))
return fdist
def _count_incidence(lookup, term):
if term in lookup:
lookup[term] += 1
else:
lookup[term] = 1
def inverse_document_frequency(list_of_fdist_objects):
'''
Turn a list of words lists into a document frequency
'''
doc_count = len(list_of_fdist_objects)
term_doc_incidence = {}
t1 = time.time()
[_count_incidence(term_doc_incidence,term) \
for fdist in list_of_fdist_objects \
for term in list(fdist.keys()) ]
t2 = time.time()
idf = { term: math.log(float(doc_count)/float(incidence)) for term, incidence in term_doc_incidence.items() }
t3 = time.time()
logging.debug(" create df: %d" % (t2-t1))
logging.debug(" create idf: %d" % (t3-t2))
return idf
def tf_idf(list_of_file_paths):
'''
Compute and return tf-idf from a list of file paths (sorted by tfidf desc)
'''
doc_list = [ filehandler.convert_to_txt(file_path) for file_path in list_of_file_paths ]
tf_list = [ term_frequency( doc_to_words(doc) ) for doc in doc_list ] # a list of FreqDist objects
idf = inverse_document_frequency(tf_list)
tf_idf_list = [ [{'term':term, 'tfidf':frequency*idf[term], 'frequency': frequency} for term, frequency in tf.items()] for tf in tf_list ]
tf_idf_list = [ sorted(tf_idf, key=itemgetter('tfidf'), reverse=True) for tf_idf in tf_idf_list ]
return tf_idf_list
def simple_tokenize_remove_our_stopwords(document):
"""
Clean up a document and split into a list of words, removing stopwords.
Converts document (a string) to lowercase and strips out everything
which is not a lowercase letter. Then removes stopwords.
"""
document = document.lower()
document = re.sub('[^a-z\']', ' ', document)
words = document.strip().split()
# Remove stopwords
words = [word for word in words if word not in stopwords.words('english')]
return words
def cosine_similarity(list_of_file_paths):
# Create some very short sample documents
doc_list = [ filehandler.convert_to_txt(file_path) for file_path in list_of_file_paths ]
# Initialize class to create term-document matrix
tdm = textmining.TermDocumentMatrix(tokenizer=simple_tokenize_remove_our_stopwords)
for doc in doc_list:
tdm.add_doc(doc)
results = []
is_first_row1 = True
for row1 in tdm.rows(cutoff=1):
if is_first_row1:
is_first_row1 = False
continue
is_first_row2 = True
cols = []
for row2 in tdm.rows(cutoff=1):
if is_first_row2:
is_first_row2 = False
continue
cols.append( 1 - spatial.distance.cosine(row1,row2) )
results.append(cols)
return results<|fim▁end|> | words = [w for w in words if not w in string.punctuation] |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>pub mod io;
use io::{BitInputStream, BitOutputStream};
pub const MAX_SYMBOLS: usize = 256;
#[derive(Clone)]
enum Tree {
Leaf(usize, u8),
Node(usize, Box<Tree>, Box<Tree>)
}
fn freq_tree(t:&Tree) -> usize {
match *t {
Tree::Leaf(f, _) => f,
Tree::Node(f, _, _)=> f
}
}
fn freq(t:&Option<Tree>) -> usize {
match *t {
None => 0,
Some(ref t) => freq_tree(t)
}
}
fn dump_codes(tree:&Tree, codes: &mut Vec<String>, prefix: &mut String) {
match *tree {
Tree::Leaf(_, sym) =>
codes[sym as usize] = prefix.clone(),
Tree::Node(_, ref left, ref right) => {
prefix.push('0');
dump_codes(left, codes, prefix);
prefix.pop();
prefix.push('1');
dump_codes(right, codes, prefix);
prefix.pop();
}
}
}
fn write_tree(tree:&Tree, writer:&mut BitOutputStream) {
match *tree {
Tree::Leaf(_, sym) => {
writer.write_bit(1);
writer.write_byte(u16::from(sym));
}
Tree::Node(_, ref left, ref right) => {
writer.write_bit(0);
write_tree(left, writer);
write_tree(right, writer);
}
}
}
fn read_char(tree: &Tree, reader: &mut BitInputStream) -> u8 {
match *tree {
Tree::Leaf(_, sym) => {
sym
}
Tree::Node(_, ref left, ref right) => {
if reader.read_bool() {
read_char(right, reader)
} else {
read_char(left, reader)
}
}
}
}
struct Heap {
data: Vec<Option<Tree>>,
last: usize
}
impl Heap {
pub fn new(size:usize) -> Heap {
Heap {
data : vec![None;size+1],
last : 0
}
}
pub fn insert(&mut self, elem:Tree) {
self.last += 1;
self.data[self.last] = Some(elem);
let mut j = self.last;
while j > 1 {
if freq(&self.data[j]) < freq(&self.data[j/2]) {
self.data.swap(j, j/2);
}
j /= 2;
}
}
pub fn extract(&mut self) -> Option<Tree> {
if self.last == 0 {
None
} else {
let min = self.data[1].clone();
self.data[1] = self.data[self.last].clone();
self.last -= 1;
let mut j = 1;
while 2 * j <= self.last {
let mut k = 2 * j;
if k < self.last && freq(&self.data[k+1]) < freq(&self.data[k]) {
k += 1;
}
if freq(&self.data[j]) < freq(&self.data[k]) {
break;
}
self.data.swap(j, k);
j = k;
}
min
}
}
pub fn size(&mut self) -> usize {
self.last
}
}
pub struct HuffTree {
tree: Tree,
codes: Vec<String>
}
impl HuffTree {
pub fn build(mut reader: &mut BitInputStream) -> HuffTree {
let freqs :[usize; MAX_SYMBOLS] = HuffTree::calc_frecuencies(&mut reader);
let mut heap = Heap::new(MAX_SYMBOLS);
for (s, &f) in freqs.iter().enumerate() {
if f > 0 {
heap.insert(Tree::Leaf(f, s as u8));
}
}
while heap.size() > 1 {
let l = heap.extract().unwrap();
let r = heap.extract().unwrap();
heap.insert(Tree::Node(freq_tree(&l)+freq_tree(&r), Box::new(l), Box::new(r)))
}
let tree = heap.extract().unwrap();
let codes = HuffTree::build_codes(&tree);
HuffTree {
tree,
codes
}
}
fn read_tree(reader: &mut BitInputStream) -> Tree {
let flag = reader.read_bool();
if flag {
Tree::Leaf(0, reader.read_char())
} else {
let l = HuffTree::read_tree(reader);
let r = HuffTree::read_tree(reader);
Tree::Node(0, Box::new(l), Box::new(r))<|fim▁hole|> }
pub fn read(reader: &mut BitInputStream) -> HuffTree {
HuffTree {
tree: HuffTree::read_tree(reader),
codes: vec![]
}
}
pub fn write_to(&mut self, writer: &mut BitOutputStream) {
write_tree(&self.tree, writer);
}
pub fn write_symbols(&mut self, reader: &mut BitInputStream, writer: &mut BitOutputStream) {
let bytes = reader.get_bytes();
let len = bytes.len();
writer.write_int(len as u32);
for &symbol in bytes.iter() {
let code = &self.codes[symbol as usize];
for c in code.chars() {
writer.write_bit(c.to_digit(2).unwrap() as u8);
}
}
}
pub fn compress(&mut self, mut reader: &mut BitInputStream, mut writer: &mut BitOutputStream) {
self.write_to(&mut writer);
self.write_symbols(&mut reader, &mut writer);
writer.close();
}
pub fn decompress(&mut self, reader: &mut BitInputStream, writer: &mut BitOutputStream) {
let len = reader.read_int() as usize;
for _ in 0..len {
writer.write_byte(u16::from(read_char(&self.tree, reader)));
}
}
fn build_codes(tree: &Tree) -> Vec<String> {
let mut prefix:String = "".into();
let mut codes: Vec<String> = vec!["".into(); MAX_SYMBOLS];
dump_codes(&tree, &mut codes, &mut prefix);
codes.clone()
}
fn calc_frecuencies(reader: &mut BitInputStream) -> [usize; MAX_SYMBOLS] {
let mut freqs : [usize; MAX_SYMBOLS] = [0; MAX_SYMBOLS];
let bytes = reader.get_bytes();
for &b in bytes.iter() {
freqs[b as usize] += 1;
}
freqs
}
}<|fim▁end|> | } |
<|file_name|>tabulate.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright (c) 2011-2019 Sergey Astanin
# https://bitbucket.org/astanin/python-tabulate
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
# -*- coding: utf-8 -*-
"""Pretty-print tabular data."""
from __future__ import print_function
from __future__ import unicode_literals
from collections import namedtuple
from platform import python_version_tuple
import re
import math
if python_version_tuple() >= ("3", "3", "0"):
from collections.abc import Iterable
else:
from collections import Iterable
if python_version_tuple()[0] < "3":
from itertools import izip_longest
from functools import partial
_none_type = type(None)
_bool_type = bool
_int_type = int
_long_type = long # noqa
_float_type = float
_text_type = unicode # noqa
_binary_type = str
def _is_file(f):
return isinstance(f, file) # noqa
else:
from itertools import zip_longest as izip_longest
from functools import reduce, partial
_none_type = type(None)
_bool_type = bool
_int_type = int
_long_type = int
_float_type = float
_text_type = str
_binary_type = bytes
basestring = str
import io
def _is_file(f):
return isinstance(f, io.IOBase)
try:
import wcwidth # optional wide-character (CJK) support
except ImportError:
wcwidth = None
__all__ = ["tabulate", "tabulate_formats", "simple_separated_format"]
__version__ = "0.8.4"
# minimum extra space in headers
MIN_PADDING = 2
# Whether or not to preserve leading/trailing whitespace in data.
PRESERVE_WHITESPACE = False
_DEFAULT_FLOATFMT = "g"
_DEFAULT_MISSINGVAL = ""
# if True, enable wide-character (CJK) support
WIDE_CHARS_MODE = wcwidth is not None
Line = namedtuple("Line", ["begin", "hline", "sep", "end"])
DataRow = namedtuple("DataRow", ["begin", "sep", "end"])
# A table structure is suppposed to be:
#
# --- lineabove ---------
# headerrow
# --- linebelowheader ---
# datarow
# --- linebewteenrows ---
# ... (more datarows) ...
# --- linebewteenrows ---
# last datarow
# --- linebelow ---------
#
# TableFormat's line* elements can be
#
# - either None, if the element is not used,
# - or a Line tuple,
# - or a function: [col_widths], [col_alignments] -> string.
#
# TableFormat's *row elements can be
#
# - either None, if the element is not used,
# - or a DataRow tuple,
# - or a function: [cell_values], [col_widths], [col_alignments] -> string.
#
# padding (an integer) is the amount of white space around data values.
#
# with_header_hide:
#
# - either None, to display all table elements unconditionally,
# - or a list of elements not to be displayed if the table has column
# headers.
#
TableFormat = namedtuple("TableFormat", ["lineabove", "linebelowheader",
"linebetweenrows", "linebelow",
"headerrow", "datarow",
"padding", "with_header_hide"])
def _pipe_segment_with_colons(align, colwidth):
"""Return a segment of a horizontal line with optional colons which
indicate column's alignment (as in `pipe` output format)."""
w = colwidth
if align in ["right", "decimal"]:
return ('-' * (w - 1)) + ":"
elif align == "center":
return ":" + ('-' * (w - 2)) + ":"
elif align == "left":
return ":" + ('-' * (w - 1))
else:
return '-' * w
def _pipe_line_with_colons(colwidths, colaligns):
"""Return a horizontal line with optional colons to indicate column's
alignment (as in `pipe` output format)."""
segments = [_pipe_segment_with_colons(a, w)
for a, w in zip(colaligns, colwidths)]
return "|" + "|".join(segments) + "|"
def _mediawiki_row_with_attrs(separator, cell_values, colwidths, colaligns):
alignment = {"left": '',
"right": 'align="right"| ',
"center": 'align="center"| ',
"decimal": 'align="right"| '}
# hard-coded padding _around_ align attribute and value together
# rather than padding parameter which affects only the value
values_with_attrs = [' ' + alignment.get(a, '') + c + ' '
for c, a in zip(cell_values, colaligns)]
colsep = separator*2
return (separator + colsep.join(values_with_attrs)).rstrip()
def _textile_row_with_attrs(cell_values, colwidths, colaligns):
cell_values[0] += ' '
alignment = {"left": "<.", "right": ">.", "center": "=.", "decimal": ">."}
values = (alignment.get(a, '') + v for a, v in zip(colaligns, cell_values))
return '|' + '|'.join(values) + '|'
def _html_begin_table_without_header(colwidths_ignore, colaligns_ignore):
# this table header will be suppressed if there is a header row
return "\n".join(["<table>", "<tbody>"])
def _html_row_with_attrs(celltag, cell_values, colwidths, colaligns):
alignment = {"left": '',
"right": ' style="text-align: right;"',
"center": ' style="text-align: center;"',
"decimal": ' style="text-align: right;"'}
values_with_attrs = ["<{0}{1}>{2}</{0}>".format(
celltag, alignment.get(a, ''), c)
for c, a in zip(cell_values, colaligns)]
rowhtml = "<tr>" + "".join(values_with_attrs).rstrip() + "</tr>"
if celltag == "th": # it's a header row, create a new table header
rowhtml = "\n".join(["<table>",
"<thead>",
rowhtml,
"</thead>",
"<tbody>"])
return rowhtml
def _moin_row_with_attrs(celltag, cell_values, colwidths, colaligns,
header=''):
alignment = {"left": '',
"right": '<style="text-align: right;">',
"center": '<style="text-align: center;">',
"decimal": '<style="text-align: right;">'}
values_with_attrs = ["{0}{1} {2} ".format(celltag,
alignment.get(a, ''),
header+c+header)
for c, a in zip(cell_values, colaligns)]
return "".join(values_with_attrs)+"||"
def _latex_line_begin_tabular(colwidths, colaligns, booktabs=False):
alignment = {"left": "l", "right": "r", "center": "c", "decimal": "r"}
tabular_columns_fmt = "".join([alignment.get(a, "l") for a in colaligns])
return "\n".join(["\\begin{tabular}{" + tabular_columns_fmt + "}",
"\\toprule" if booktabs else "\\hline"])
LATEX_ESCAPE_RULES = {r"&": r"\&", r"%": r"\%", r"$": r"\$", r"#": r"\#",
r"_": r"\_", r"^": r"\^{}", r"{": r"\{", r"}": r"\}",
r"~": r"\textasciitilde{}", "\\": r"\textbackslash{}",
r"<": r"\ensuremath{<}", r">": r"\ensuremath{>}"}
def _latex_row(cell_values, colwidths, colaligns, escrules=LATEX_ESCAPE_RULES):
def escape_char(c):
return escrules.get(c, c)
escaped_values = ["".join(map(escape_char, cell)) for cell in cell_values]
rowfmt = DataRow("", "&", "\\\\")
return _build_simple_row(escaped_values, rowfmt)
def _rst_escape_first_column(rows, headers):
def escape_empty(val):
if isinstance(val, (_text_type, _binary_type)) and not val.strip():
return ".."
else:
return val
new_headers = list(headers)
new_rows = []
if headers:
new_headers[0] = escape_empty(headers[0])
for row in rows:
new_row = list(row)
if new_row:
new_row[0] = escape_empty(row[0])
new_rows.append(new_row)
return new_rows, new_headers
_table_formats = {"simple":
TableFormat(lineabove=Line("", "-", " ", ""),
linebelowheader=Line("", "-", " ", ""),
linebetweenrows=None,
linebelow=Line("", "-", " ", ""),
headerrow=DataRow("", " ", ""),
datarow=DataRow("", " ", ""),
padding=0,
with_header_hide=["lineabove", "linebelow"]),
"plain":
TableFormat(lineabove=None, linebelowheader=None,
linebetweenrows=None, linebelow=None,
headerrow=DataRow("", " ", ""),
datarow=DataRow("", " ", ""),
padding=0, with_header_hide=None),
"grid":
TableFormat(lineabove=Line("+", "-", "+", "+"),
linebelowheader=Line("+", "=", "+", "+"),
linebetweenrows=Line("+", "-", "+", "+"),
linebelow=Line("+", "-", "+", "+"),
headerrow=DataRow("|", "|", "|"),
datarow=DataRow("|", "|", "|"),
padding=1, with_header_hide=None),
"fancy_grid":
TableFormat(lineabove=Line("â•’", "â•", "╤", "â••"),
linebelowheader=Line("╞", "â•", "╪", "â•¡"),
linebetweenrows=Line("├", "─", "┼", "┤"),
linebelow=Line("╘", "â•", "â•§", "â•›"),
headerrow=DataRow("│", "│", "│"),
datarow=DataRow("│", "│", "│"),
padding=1, with_header_hide=None),
"github":
TableFormat(lineabove=Line("|", "-", "|", "|"),
linebelowheader=Line("|", "-", "|", "|"),
linebetweenrows=None,
linebelow=None,
headerrow=DataRow("|", "|", "|"),
datarow=DataRow("|", "|", "|"),
padding=1,
with_header_hide=["lineabove"]),
"pipe":
TableFormat(lineabove=_pipe_line_with_colons,
linebelowheader=_pipe_line_with_colons,
linebetweenrows=None,
linebelow=None,
headerrow=DataRow("|", "|", "|"),
datarow=DataRow("|", "|", "|"),
padding=1,
with_header_hide=["lineabove"]),
"orgtbl":
TableFormat(lineabove=None,
linebelowheader=Line("|", "-", "+", "|"),
linebetweenrows=None,
linebelow=None,
headerrow=DataRow("|", "|", "|"),
datarow=DataRow("|", "|", "|"),
padding=1, with_header_hide=None),
"jira":
TableFormat(lineabove=None,
linebelowheader=None,
linebetweenrows=None,
linebelow=None,
headerrow=DataRow("||", "||", "||"),
datarow=DataRow("|", "|", "|"),
padding=1, with_header_hide=None),
"presto":
TableFormat(lineabove=None,
linebelowheader=Line("", "-", "+", ""),
linebetweenrows=None,
linebelow=None,
headerrow=DataRow("", "|", ""),
datarow=DataRow("", "|", ""),
padding=1, with_header_hide=None),
"psql":
TableFormat(lineabove=Line("+", "-", "+", "+"),
linebelowheader=Line("|", "-", "+", "|"),
linebetweenrows=None,
linebelow=Line("+", "-", "+", "+"),
headerrow=DataRow("|", "|", "|"),
datarow=DataRow("|", "|", "|"),
padding=1, with_header_hide=None),
"rst":
TableFormat(lineabove=Line("", "=", " ", ""),
linebelowheader=Line("", "=", " ", ""),
linebetweenrows=None,
linebelow=Line("", "=", " ", ""),
headerrow=DataRow("", " ", ""),
datarow=DataRow("", " ", ""),
padding=0, with_header_hide=None),
"mediawiki":
TableFormat(lineabove=Line(
"{| class=\"wikitable\" style=\"text-align: left;\"",
"", "", "\n|+ <!-- caption -->\n|-"),
linebelowheader=Line("|-", "", "", ""),
linebetweenrows=Line("|-", "", "", ""),
linebelow=Line("|}", "", "", ""),
headerrow=partial(
_mediawiki_row_with_attrs, "!"),
datarow=partial(_mediawiki_row_with_attrs, "|"),
padding=0, with_header_hide=None),
"moinmoin":
TableFormat(lineabove=None,
linebelowheader=None,
linebetweenrows=None,
linebelow=None,
headerrow=partial(_moin_row_with_attrs, "||",
header="'''"),
datarow=partial(_moin_row_with_attrs, "||"),
padding=1, with_header_hide=None),
"youtrack":
TableFormat(lineabove=None,
linebelowheader=None,
linebetweenrows=None,
linebelow=None,
headerrow=DataRow("|| ", " || ", " || "),
datarow=DataRow("| ", " | ", " |"),
padding=1, with_header_hide=None),
"html":
TableFormat(lineabove=_html_begin_table_without_header,
linebelowheader="",
linebetweenrows=None,
linebelow=Line("</tbody>\n</table>", "", "", ""),
headerrow=partial(_html_row_with_attrs, "th"),
datarow=partial(_html_row_with_attrs, "td"),
padding=0, with_header_hide=["lineabove"]),
"latex":
TableFormat(lineabove=_latex_line_begin_tabular,
linebelowheader=Line("\\hline", "", "", ""),
linebetweenrows=None,
linebelow=Line("\\hline\n\\end{tabular}", "", "",
""),
headerrow=_latex_row,
datarow=_latex_row,
padding=1, with_header_hide=None),
"latex_raw":
TableFormat(lineabove=_latex_line_begin_tabular,
linebelowheader=Line("\\hline", "", "", ""),
linebetweenrows=None,
linebelow=Line("\\hline\n\\end{tabular}", "", "",
""),
headerrow=partial(_latex_row, escrules={}),
datarow=partial(_latex_row, escrules={}),
padding=1, with_header_hide=None),
"latex_booktabs":
TableFormat(lineabove=partial(_latex_line_begin_tabular,
booktabs=True),
linebelowheader=Line("\\midrule", "", "", ""),
linebetweenrows=None,
linebelow=Line("\\bottomrule\n\\end{tabular}",
"", "", ""),
headerrow=_latex_row,
datarow=_latex_row,
padding=1, with_header_hide=None),
"tsv":
TableFormat(lineabove=None, linebelowheader=None,
linebetweenrows=None, linebelow=None,
headerrow=DataRow("", "\t", ""),
datarow=DataRow("", "\t", ""),
padding=0, with_header_hide=None),
"textile":
TableFormat(lineabove=None, linebelowheader=None,
linebetweenrows=None, linebelow=None,
headerrow=DataRow("|_. ", "|_.", "|"),
datarow=_textile_row_with_attrs,
padding=1, with_header_hide=None)}
tabulate_formats = list(sorted(_table_formats.keys()))
# The table formats for which multiline cells will be folded into subsequent
# table rows. The key is the original format specified at the API. The value is
# the format that will be used to represent the original format.
multiline_formats = {
"plain": "plain",
"simple": "simple",
"grid": "grid",
"fancy_grid": "fancy_grid",
"pipe": "pipe",
"orgtbl": "orgtbl",
"jira": "jira",
"presto": "presto",
"psql": "psql",
"rst": "rst",
}
# TODO: Add multiline support for the remaining table formats:
# - mediawiki: Replace \n with <br>
# - moinmoin: TBD
# - youtrack: TBD
# - html: Replace \n with <br>
# - latex*: Use "makecell" package: In header, replace X\nY with
# \thead{X\\Y} and in data row, replace X\nY with \makecell{X\\Y}
# - tsv: TBD
# - textile: Replace \n with <br/> (must be well-formed XML)
_multiline_codes = re.compile(r"\r|\n|\r\n")
_multiline_codes_bytes = re.compile(b"\r|\n|\r\n")
# ANSI color codes
_invisible_codes = re.compile(r"\x1b\[\d+[;\d]*m|\x1b\[\d*\;\d*\;\d*m")
# ANSI color codes
_invisible_codes_bytes = re.compile(b"\x1b\[\d+[;\d]*m|\x1b\[\d*\;\d*\;\d*m")
def simple_separated_format(separator):
"""Construct a simple TableFormat with columns separated by a separator.
>>> tsv = simple_separated_format("\\t") ; \
tabulate([["foo", 1], ["spam", 23]], tablefmt=tsv) == 'foo \\t 1\\nspam\\t23'
True
""" # noqa
return TableFormat(None, None, None, None,
headerrow=DataRow('', separator, ''),
datarow=DataRow('', separator, ''),
padding=0, with_header_hide=None)
def _isconvertible(conv, string):
try:
conv(string)
return True
except (ValueError, TypeError):
return False
def _isnumber(string):
"""
>>> _isnumber("123.45")
True
>>> _isnumber("123")
True
>>> _isnumber("spam")
False
>>> _isnumber("123e45678")
False
>>> _isnumber("inf")
True
"""
if not _isconvertible(float, string):
return False
elif isinstance(string, (_text_type, _binary_type)) and (
math.isinf(float(string)) or math.isnan(float(string))):
return string.lower() in ['inf', '-inf', 'nan']
return True
def _isint(string, inttype=int):
"""
>>> _isint("123")
True
>>> _isint("123.45")
False
"""
return (type(string) is inttype or
(isinstance(string, _binary_type) or
isinstance(string, _text_type))
and _isconvertible(inttype, string))
def _isbool(string):
"""
>>> _isbool(True)
True
>>> _isbool("False")
True
>>> _isbool(1)
False
"""
return (type(string) is _bool_type or
(isinstance(string, (_binary_type, _text_type))
and string in ("True", "False")))
def _type(string, has_invisible=True, numparse=True):
"""The least generic type (type(None), int, float, str, unicode).
>>> _type(None) is type(None)
True
>>> _type("foo") is type("")
True
>>> _type("1") is type(1)
True
>>> _type('\x1b[31m42\x1b[0m') is type(42)
True
>>> _type('\x1b[31m42\x1b[0m') is type(42)
True
"""
if has_invisible and \
(isinstance(string, _text_type) or isinstance(string, _binary_type)):
string = _strip_invisible(string)
if string is None:
return _none_type
elif hasattr(string, "isoformat"): # datetime.datetime, date, and time
return _text_type
elif _isbool(string):
return _bool_type
elif _isint(string) and numparse:
return int
elif _isint(string, _long_type) and numparse:
return int
elif _isnumber(string) and numparse:
return float
elif isinstance(string, _binary_type):
return _binary_type
else:
return _text_type
def _afterpoint(string):
"""Symbols after a decimal point, -1 if the string lacks the decimal point.
>>> _afterpoint("123.45")
2
>>> _afterpoint("1001")
-1
>>> _afterpoint("eggs")
-1
>>> _afterpoint("123e45")
2
"""
if _isnumber(string):
if _isint(string):
return -1
else:
pos = string.rfind(".")
pos = string.lower().rfind("e") if pos < 0 else pos
if pos >= 0:
return len(string) - pos - 1
else:
return -1 # no point
else:
return -1 # not a number
def _padleft(width, s):
"""Flush right.
>>> _padleft(6, '\u044f\u0439\u0446\u0430') == ' \u044f\u0439\u0446\u0430'
True
"""
fmt = "{0:>%ds}" % width
return fmt.format(s)
def _padright(width, s):
"""Flush left.
>>> _padright(6, '\u044f\u0439\u0446\u0430') == '\u044f\u0439\u0446\u0430 '
True
""" # noqa
fmt = "{0:<%ds}" % width
return fmt.format(s)
def _padboth(width, s):
"""Center string.
>>> _padboth(6, '\u044f\u0439\u0446\u0430') == ' \u044f\u0439\u0446\u0430 '
True
"""
fmt = "{0:^%ds}" % width
return fmt.format(s)
def _padnone(ignore_width, s):
return s
def _strip_invisible(s):
"Remove invisible ANSI color codes."
if isinstance(s, _text_type):
return re.sub(_invisible_codes, "", s)
else: # a bytestring
return re.sub(_invisible_codes_bytes, "", s)
def _visible_width(s):
"""Visible width of a printed string. ANSI color codes are removed.
>>> _visible_width('\x1b[31mhello\x1b[0m'), _visible_width("world")
(5, 5)
"""
# optional wide-character support
if wcwidth is not None and WIDE_CHARS_MODE:
len_fn = wcwidth.wcswidth
else:
len_fn = len
if isinstance(s, _text_type) or isinstance(s, _binary_type):
return len_fn(_strip_invisible(s))
else:
return len_fn(_text_type(s))
def _is_multiline(s):
if isinstance(s, _text_type):
return bool(re.search(_multiline_codes, s))
else: # a bytestring
return bool(re.search(_multiline_codes_bytes, s))
def _multiline_width(multiline_s, line_width_fn=len):
"""Visible width of a potentially multiline content."""
return max(map(line_width_fn, re.split("[\r\n]", multiline_s)))
def _choose_width_fn(has_invisible, enable_widechars, is_multiline):
"""Return a function to calculate visible cell width."""
if has_invisible:
line_width_fn = _visible_width
elif enable_widechars: # optional wide-character support if available
line_width_fn = wcwidth.wcswidth
else:
line_width_fn = len
if is_multiline:
width_fn = lambda s: _multiline_width(s, line_width_fn) # noqa
else:
width_fn = line_width_fn
return width_fn
def _align_column_choose_padfn(strings, alignment, has_invisible):
if alignment == "right":
if not PRESERVE_WHITESPACE:
strings = [s.strip() for s in strings]
padfn = _padleft
elif alignment == "center":
if not PRESERVE_WHITESPACE:
strings = [s.strip() for s in strings]
padfn = _padboth
elif alignment == "decimal":
if has_invisible:
decimals = [_afterpoint(_strip_invisible(s)) for s in strings]
else:
decimals = [_afterpoint(s) for s in strings]
maxdecimals = max(decimals)
strings = [s + (maxdecimals - decs) * " "
for s, decs in zip(strings, decimals)]
padfn = _padleft
elif not alignment:
padfn = _padnone
else:
if not PRESERVE_WHITESPACE:
strings = [s.strip() for s in strings]
padfn = _padright
return strings, padfn
def _align_column(strings, alignment, minwidth=0,
has_invisible=True, enable_widechars=False,
is_multiline=False):
"""[string] -> [padded_string]"""
strings, padfn = _align_column_choose_padfn(strings, alignment,
has_invisible)
width_fn = _choose_width_fn(has_invisible, enable_widechars, is_multiline)
s_widths = list(map(width_fn, strings))
maxwidth = max(max(s_widths), minwidth)
# TODO: refactor column alignment in single-line and multiline modes
if is_multiline:
if not enable_widechars and not has_invisible:
padded_strings = [
"\n".join([padfn(maxwidth, s) for s in ms.splitlines()])
for ms in strings]
else:
# enable wide-character width corrections
s_lens = [max((len(s) for s in re.split("[\r\n]", ms)))
for ms in strings]
visible_widths = [maxwidth - (w - l)
for w, l in zip(s_widths, s_lens)]
# wcswidth and _visible_width don't count invisible characters;
# padfn doesn't need to apply another correction
padded_strings = ["\n".join([padfn(w, s)
for s in (ms.splitlines() or ms)])
for ms, w in zip(strings, visible_widths)]
else: # single-line cell values
if not enable_widechars and not has_invisible:
padded_strings = [padfn(maxwidth, s) for s in strings]
else:
# enable wide-character width corrections
s_lens = list(map(len, strings))
visible_widths = [maxwidth - (w - l)
for w, l in zip(s_widths, s_lens)]
# wcswidth and _visible_width don't count invisible characters;
# padfn doesn't need to apply another correction
padded_strings = [padfn(w, s)
for s, w in zip(strings, visible_widths)]
return padded_strings
def _more_generic(type1, type2):
types = {_none_type: 0,
_bool_type: 1,
int: 2,
float: 3,
_binary_type: 4,
_text_type: 5}
invtypes = {5: _text_type,
4: _binary_type,
3: float,
2: int,
1: _bool_type,
0: _none_type}
moregeneric = max(types.get(type1, 5), types.get(type2, 5))
return invtypes[moregeneric]
def _column_type(strings, has_invisible=True, numparse=True):
"""The least generic type all column values are convertible to.
>>> _column_type([True, False]) is _bool_type
True
>>> _column_type(["1", "2"]) is _int_type
True
>>> _column_type(["1", "2.3"]) is _float_type
True
>>> _column_type(["1", "2.3", "four"]) is _text_type
True
>>> _column_type(["four", '\u043f\u044f\u0442\u044c']) is _text_type
True
>>> _column_type([None, "brux"]) is _text_type
True
>>> _column_type([1, 2, None]) is _int_type
True
>>> import datetime as dt
>>> _column_type([dt.datetime(1991,2,19), dt.time(17,35)]) is _text_type
True
"""
types = [_type(s, has_invisible, numparse)
for s in strings]
return reduce(_more_generic, types, _bool_type)
def _format(val, valtype, floatfmt, missingval="", has_invisible=True):
"""Format a value according to its type.
Unicode is supported:
>>> hrow = ['\u0431\u0443\u043a\u0432\u0430', '\u0446\u0438\u0444\u0440\u0430'] ; \
tbl = [['\u0430\u0437', 2], ['\u0431\u0443\u043a\u0438', 4]] ; \
good_result = '\\u0431\\u0443\\u043a\\u0432\\u0430 \\u0446\\u0438\\u0444\\u0440\\u0430\\n------- -------\\n\\u0430\\u0437 2\\n\\u0431\\u0443\\u043a\\u0438 4' ; \
tabulate(tbl, headers=hrow) == good_result
True
""" # noqa
if val is None:
return missingval
if valtype in [int, _text_type]:
return "{0}".format(val)
elif valtype is _binary_type:
try:
return _text_type(val, "ascii")
except TypeError:
return _text_type(val)
elif valtype is float:
is_a_colored_number = (has_invisible and
isinstance(val, (_text_type, _binary_type)))
if is_a_colored_number:
raw_val = _strip_invisible(val)
formatted_val = format(float(raw_val), floatfmt)
return val.replace(raw_val, formatted_val)
else:
return format(float(val), floatfmt)
else:
return "{0}".format(val)
def _align_header(header, alignment, width, visible_width, is_multiline=False,
width_fn=None):
"Pad string header to width chars given known visible_width of the header."
if is_multiline:
header_lines = re.split(_multiline_codes, header)
padded_lines = [_align_header(h, alignment, width, width_fn(h))
for h in header_lines]
return "\n".join(padded_lines)
# else: not multiline
ninvisible = len(header) - visible_width
width += ninvisible
if alignment == "left":
return _padright(width, header)
elif alignment == "center":
return _padboth(width, header)
elif not alignment:
return "{0}".format(header)
else:
return _padleft(width, header)
def _prepend_row_index(rows, index):
"""Add a left-most index column."""
if index is None or index is False:
return rows
if len(index) != len(rows):
print('index=', index)
print('rows=', rows)
raise ValueError('index must be as long as the number of data rows')
rows = [[v]+list(row) for v, row in zip(index, rows)]
return rows
def _bool(val):
"A wrapper around standard bool() which doesn't throw on NumPy arrays"
try:
return bool(val)
except ValueError: # val is likely to be a numpy array with many elements
return False
def _normalize_tabular_data(tabular_data, headers, showindex="default"):
"""Transform a supported data type to a list of lists, and a list of headers.
Supported tabular data types:
* list-of-lists or another iterable of iterables
* list of named tuples (usually used with headers="keys")
* list of dicts (usually used with headers="keys")
* list of OrderedDicts (usually used with headers="keys")
* 2D NumPy arrays
* NumPy record arrays (usually used with headers="keys")
* dict of iterables (usually used with headers="keys")
* pandas.DataFrame (usually used with headers="keys")
The first row can be used as headers if headers="firstrow",
column indices can be used as headers if headers="keys".
If showindex="default", show row indices of the pandas.DataFrame.
If showindex="always", show row indices for all types of data.
If showindex="never", don't show row indices for all types of data.
If showindex is an iterable, show its values as row indices.
"""
try:
bool(headers)
except ValueError: # numpy.ndarray, pandas.core.index.Index, ...
headers = list(headers)
index = None
if hasattr(tabular_data, "keys") and hasattr(tabular_data, "values"):
# dict-like and pandas.DataFrame?
if hasattr(tabular_data.values, "__call__"):
# likely a conventional dict
keys = tabular_data.keys()
# columns have to be transposed
rows = list(izip_longest(*tabular_data.values()))
elif hasattr(tabular_data, "index"):
# values is a property, has .index => it's likely a
# pandas.DataFrame (pandas 0.11.0)
keys = list(tabular_data)
if tabular_data.index.name is not None:
if isinstance(tabular_data.index.name, list):
keys[:0] = tabular_data.index.name
else:
keys[:0] = [tabular_data.index.name]
# values matrix doesn't need to be transposed
vals = tabular_data.values
# for DataFrames add an index per default
index = list(tabular_data.index)
rows = [list(row) for row in vals]
else:
raise ValueError(
"tabular data doesn't appear to be a dict or a DataFrame")
if headers == "keys":
headers = list(map(_text_type, keys)) # headers should be strings
else: # it's a usual an iterable of iterables, or a NumPy array
rows = list(tabular_data)
if (headers == "keys" and not rows):
# an empty table (issue #81)
headers = []
elif (headers == "keys" and
hasattr(tabular_data, "dtype") and
getattr(tabular_data.dtype, "names")):
# numpy record array
headers = tabular_data.dtype.names
elif (headers == "keys"
and len(rows) > 0
and isinstance(rows[0], tuple)
and hasattr(rows[0], "_fields")):
# namedtuple
headers = list(map(_text_type, rows[0]._fields))
elif (len(rows) > 0
and isinstance(rows[0], dict)):
# dict or OrderedDict
uniq_keys = set() # implements hashed lookup
keys = [] # storage for set
if headers == "firstrow":
firstdict = rows[0] if len(rows) > 0 else {}
keys.extend(firstdict.keys())
uniq_keys.update(keys)
rows = rows[1:]
for row in rows:
for k in row.keys():
# Save unique items in input order
if k not in uniq_keys:
keys.append(k)
uniq_keys.add(k)
if headers == 'keys':
headers = keys
elif isinstance(headers, dict):
# a dict of headers for a list of dicts
headers = [headers.get(k, k) for k in keys]
headers = list(map(_text_type, headers))
elif headers == "firstrow":
if len(rows) > 0:
headers = [firstdict.get(k, k) for k in keys]
headers = list(map(_text_type, headers))
else:
headers = []
elif headers:
raise ValueError(
'headers for a list of dicts is not a dict or a keyword')
rows = [[row.get(k) for k in keys] for row in rows]
elif (headers == "keys"
and hasattr(tabular_data, "description")
and hasattr(tabular_data, "fetchone")
and hasattr(tabular_data, "rowcount")):
# Python Database API cursor object (PEP 0249)
# print tabulate(cursor, headers='keys')
headers = [column[0] for column in tabular_data.description]
elif headers == "keys" and len(rows) > 0:
# keys are column indices
headers = list(map(_text_type, range(len(rows[0]))))
# take headers from the first row if necessary
if headers == "firstrow" and len(rows) > 0:
if index is not None:
headers = [index[0]] + list(rows[0])
index = index[1:]
else:
headers = rows[0]
headers = list(map(_text_type, headers)) # headers should be strings
rows = rows[1:]
headers = list(map(_text_type, headers))
rows = list(map(list, rows))
# add or remove an index column
showindex_is_a_str = type(showindex) in [_text_type, _binary_type]
if showindex == "default" and index is not None:
rows = _prepend_row_index(rows, index)
elif isinstance(showindex, Iterable) and not showindex_is_a_str:
rows = _prepend_row_index(rows, list(showindex))
elif showindex == "always" or (_bool(showindex) and
not showindex_is_a_str):
if index is None:
index = list(range(len(rows)))
rows = _prepend_row_index(rows, index)
elif showindex == "never" or (not _bool(showindex) and
not showindex_is_a_str):
pass
# pad with empty headers for initial columns if necessary
if headers and len(rows) > 0:
nhs = len(headers)
ncols = len(rows[0])
if nhs < ncols:
headers = [""]*(ncols - nhs) + headers
return rows, headers
def tabulate(tabular_data, headers=(), tablefmt="simple",
floatfmt=_DEFAULT_FLOATFMT, numalign="decimal", stralign="left",
missingval=_DEFAULT_MISSINGVAL, showindex="default",
disable_numparse=False, colalign=None):
"""Format a fixed width table for pretty printing.
>>> print(tabulate([[1, 2.34], [-56, "8.999"], ["2", "10001"]]))
--- ---------
1 2.34
-56 8.999
2 10001
--- ---------
The first required argument (`tabular_data`) can be a
list-of-lists (or another iterable of iterables), a list of named
tuples, a dictionary of iterables, an iterable of dictionaries,
a two-dimensional NumPy array, NumPy record array, or a Pandas'
dataframe.
Table headers
-------------
To print nice column headers, supply the second argument (`headers`):
- `headers` can be an explicit list of column headers
- if `headers="firstrow"`, then the first row of data is used
- if `headers="keys"`, then dictionary keys or column indices are used
Otherwise a headerless table is produced.
If the number of headers is less than the number of columns, they
are supposed to be names of the last columns. This is consistent
with the plain-text format of R and Pandas' dataframes.
>>> print(tabulate([["sex","age"],["Alice","F",24],["Bob","M",19]],
... headers="firstrow"))
sex age
----- ----- -----
Alice F 24
Bob M 19
By default, pandas.DataFrame data have an additional column called
row index. To add a similar column to all other types of data,
use `showindex="always"` or `showindex=True`. To suppress row indices
for all types of data, pass `showindex="never" or `showindex=False`.
To add a custom row index column, pass `showindex=some_iterable`.
>>> print(tabulate([["F",24],["M",19]], showindex="always"))
- - --
0 F 24
1 M 19
- - --
Column alignment
----------------
`tabulate` tries to detect column types automatically, and aligns
the values properly. By default it aligns decimal points of the
numbers (or flushes integer numbers to the right), and flushes
everything else to the left. Possible column alignments
(`numalign`, `stralign`) are: "right", "center", "left", "decimal"
(only for `numalign`), and None (to disable alignment).
Table formats
-------------
`floatfmt` is a format specification used for columns which
contain numeric data with a decimal point. This can also be
a list or tuple of format strings, one per column.
`None` values are replaced with a `missingval` string (like
`floatfmt`, this can also be a list of values for different
columns):
>>> print(tabulate([["spam", 1, None],
... ["eggs", 42, 3.14],
... ["other", None, 2.7]], missingval="?"))
----- -- ----
spam 1 ?
eggs 42 3.14
other ? 2.7
----- -- ----
Various plain-text table formats (`tablefmt`) are supported:
'plain', 'simple', 'grid', 'pipe', 'orgtbl', 'rst', 'mediawiki',
'latex', 'latex_raw' and 'latex_booktabs'. Variable `tabulate_formats`
contains the list of currently supported formats.
"plain" format doesn't use any pseudographics to draw tables,
it separates columns with a double space:
>>> print(tabulate([["spam", 41.9999], ["eggs", "451.0"]],
... ["strings", "numbers"], "plain"))
strings numbers
spam 41.9999
eggs 451
>>> print(tabulate([["spam", 41.9999], ["eggs", "451.0"]], tablefmt="plain"))
spam 41.9999
eggs 451
"simple" format is like Pandoc simple_tables:
>>> print(tabulate([["spam", 41.9999], ["eggs", "451.0"]],
... ["strings", "numbers"], "simple"))
strings numbers
--------- ---------
spam 41.9999
eggs 451
>>> print(tabulate([["spam", 41.9999], ["eggs", "451.0"]], tablefmt="simple"))
---- --------
spam 41.9999
eggs 451
---- --------
"grid" is similar to tables produced by Emacs table.el package or
Pandoc grid_tables:
>>> print(tabulate([["spam", 41.9999], ["eggs", "451.0"]],
... ["strings", "numbers"], "grid"))
+-----------+-----------+
| strings | numbers |
+===========+===========+
| spam | 41.9999 |
+-----------+-----------+
| eggs | 451 |
+-----------+-----------+
>>> print(tabulate([["spam", 41.9999], ["eggs", "451.0"]], tablefmt="grid"))
+------+----------+
| spam | 41.9999 |
+------+----------+
| eggs | 451 |
+------+----------+
"fancy_grid" draws a grid using box-drawing characters:
>>> print(tabulate([["spam", 41.9999], ["eggs", "451.0"]],
... ["strings", "numbers"], "fancy_grid"))
â•’â•â•â•â•â•â•â•â•â•â•â•╤â•â•â•â•â•â•â•â•â•â•â•â••
│ strings │ numbers │
╞â•â•â•â•â•â•â•â•â•â•â•╪â•â•â•â•â•â•â•â•â•â•â•â•¡
│ spam │ 41.9999 │
├───────────┼───────────┤
│ eggs │ 451 │
╘â•â•â•â•â•â•â•â•â•â•â•â•§â•â•â•â•â•â•â•â•â•â•â•â•›
"pipe" is like tables in PHP Markdown Extra extension or Pandoc
pipe_tables:
>>> print(tabulate([["spam", 41.9999], ["eggs", "451.0"]],
... ["strings", "numbers"], "pipe"))
| strings | numbers |
|:----------|----------:|
| spam | 41.9999 |
| eggs | 451 |
"presto" is like tables produce by the Presto CLI:
>>> print(tabulate([["spam", 41.9999], ["eggs", "451.0"]],
... ["strings", "numbers"], "presto"))
strings | numbers
-----------+-----------
spam | 41.9999
eggs | 451
>>> print(tabulate([["spam", 41.9999], ["eggs", "451.0"]], tablefmt="pipe"))
|:-----|---------:|
| spam | 41.9999 |
| eggs | 451 |
"orgtbl" is like tables in Emacs org-mode and orgtbl-mode. They
are slightly different from "pipe" format by not using colons to
define column alignment, and using a "+" sign to indicate line
intersections:
>>> print(tabulate([["spam", 41.9999], ["eggs", "451.0"]],
... ["strings", "numbers"], "orgtbl"))
| strings | numbers |
|-----------+-----------|
| spam | 41.9999 |
| eggs | 451 |
>>> print(tabulate([["spam", 41.9999], ["eggs", "451.0"]], tablefmt="orgtbl"))
| spam | 41.9999 |
| eggs | 451 |
"rst" is like a simple table format from reStructuredText; please
note that reStructuredText accepts also "grid" tables:
>>> print(tabulate([["spam", 41.9999], ["eggs", "451.0"]],
... ["strings", "numbers"], "rst"))
========= =========
strings numbers
========= =========
spam 41.9999
eggs 451
========= =========
>>> print(tabulate([["spam", 41.9999], ["eggs", "451.0"]], tablefmt="rst"))
==== ========
spam 41.9999
eggs 451
==== ========
"mediawiki" produces a table markup used in Wikipedia and on other
MediaWiki-based sites:
>>> print(tabulate([["strings", "numbers"], ["spam", 41.9999], ["eggs", "451.0"]],
... headers="firstrow", tablefmt="mediawiki"))
{| class="wikitable" style="text-align: left;"
|+ <!-- caption -->
|-
! strings !! align="right"| numbers
|-
| spam || align="right"| 41.9999
|-
| eggs || align="right"| 451
|}
"html" produces HTML markup:
>>> print(tabulate([["strings", "numbers"], ["spam", 41.9999], ["eggs", "451.0"]],
... headers="firstrow", tablefmt="html"))
<table>
<thead>
<tr><th>strings </th><th style="text-align: right;"> numbers</th></tr>
</thead>
<tbody>
<tr><td>spam </td><td style="text-align: right;"> 41.9999</td></tr>
<tr><td>eggs </td><td style="text-align: right;"> 451 </td></tr>
</tbody>
</table>
"latex" produces a tabular environment of LaTeX document markup:
>>> print(tabulate([["spam", 41.9999], ["eggs", "451.0"]], tablefmt="latex"))
\\begin{tabular}{lr}
\\hline
spam & 41.9999 \\\\
eggs & 451 \\\\
\\hline
\\end{tabular}
"latex_raw" is similar to "latex", but doesn't escape special characters,
such as backslash and underscore, so LaTeX commands may embedded into
cells' values:
>>> print(tabulate([["spam$_9$", 41.9999], ["\\\\emph{eggs}", "451.0"]], tablefmt="latex_raw"))
\\begin{tabular}{lr}
\\hline
spam$_9$ & 41.9999 \\\\
\\emph{eggs} & 451 \\\\
\\hline
\\end{tabular}
"latex_booktabs" produces a tabular environment of LaTeX document markup
using the booktabs.sty package:
>>> print(tabulate([["spam", 41.9999], ["eggs", "451.0"]], tablefmt="latex_booktabs"))
\\begin{tabular}{lr}
\\toprule
spam & 41.9999 \\\\
eggs & 451 \\\\
\\bottomrule
\end{tabular}
Number parsing
--------------
By default, anything which can be parsed as a number is a number.
This ensures numbers represented as strings are aligned properly.
This can lead to weird results for particular strings such as
specific git SHAs e.g. "42992e1" will be parsed into the number
429920 and aligned as such.
To completely disable number parsing (and alignment), use
`disable_numparse=True`. For more fine grained control, a list column
indices is used to disable number parsing only on those columns
e.g. `disable_numparse=[0, 2]` would disable number parsing only on the
first and third columns.
""" # noqa
if tabular_data is None:
tabular_data = []
list_of_lists, headers = _normalize_tabular_data(
tabular_data, headers, showindex=showindex)
# empty values in the first column of RST tables should be
# escaped (issue #82)
# "" should be escaped as "\\ " or ".."
if tablefmt == 'rst':
list_of_lists, headers = _rst_escape_first_column(list_of_lists,
headers)
# optimization: look for ANSI control codes once,
# enable smart width functions only if a control code is found
plain_text = '\t'.join(['\t'.join(map(_text_type, headers))] +
['\t'.join(map(_text_type, row))
for row in list_of_lists])
has_invisible = re.search(_invisible_codes, plain_text)
enable_widechars = wcwidth is not None and WIDE_CHARS_MODE
if tablefmt in multiline_formats and _is_multiline(plain_text):
tablefmt = multiline_formats.get(tablefmt, tablefmt)
is_multiline = True
else:
is_multiline = False
width_fn = _choose_width_fn(has_invisible, enable_widechars, is_multiline)
# format rows and columns, convert numeric values to strings
cols = list(izip_longest(*list_of_lists))
numparses = _expand_numparse(disable_numparse, len(cols))
coltypes = [_column_type(col, numparse=np) for col, np in
zip(cols, numparses)]
if isinstance(floatfmt, basestring): # old version
# just duplicate the string to use in each column
float_formats = len(cols) * [floatfmt]
else: # if floatfmt is list, tuple etc we have one per column
float_formats = list(floatfmt)
if len(float_formats) < len(cols):
float_formats.extend((len(cols)-len(float_formats)) *
[_DEFAULT_FLOATFMT])
if isinstance(missingval, basestring):
missing_vals = len(cols) * [missingval]
else:
missing_vals = list(missingval)
if len(missing_vals) < len(cols):
missing_vals.extend((len(cols)-len(missing_vals)) *
[_DEFAULT_MISSINGVAL])
cols = [[_format(v, ct, fl_fmt, miss_v, has_invisible) for v in c]
for c, ct, fl_fmt, miss_v in zip(cols, coltypes, float_formats,
missing_vals)]
# align columns
aligns = [numalign if ct in [int, float] else stralign for ct in coltypes]
if colalign is not None:
assert isinstance(colalign, Iterable)
for idx, align in enumerate(colalign):
aligns[idx] = align
minwidths = [width_fn(h) + MIN_PADDING
for h in headers] if headers else [0]*len(cols)
cols = [_align_column(c, a, minw, has_invisible, enable_widechars,
is_multiline)
for c, a, minw in zip(cols, aligns, minwidths)]
if headers:
# align headers and add headers
t_cols = cols or [['']] * len(headers)
t_aligns = aligns or [stralign] * len(headers)
minwidths = [max(minw, max(width_fn(cl)
for cl in c)) for minw, c in zip(minwidths, t_cols)]
headers = [_align_header(h, a, minw, width_fn(h), is_multiline,
width_fn)
for h, a, minw in zip(headers, t_aligns, minwidths)]
rows = list(zip(*cols))
else:
minwidths = [max(width_fn(cl) for cl in c) for c in cols]
rows = list(zip(*cols))
if not isinstance(tablefmt, TableFormat):
tablefmt = _table_formats.get(tablefmt, _table_formats["simple"])
return _format_table(tablefmt, headers, rows, minwidths, aligns,
is_multiline)
def _expand_numparse(disable_numparse, column_count):
"""
Return a list of bools of length `column_count` which indicates whether
number parsing should be used on each column.
If `disable_numparse` is a list of indices, each of those indices are
False, and everything else is True.
If `disable_numparse` is a bool, then the returned list is all the same.
"""
if isinstance(disable_numparse, Iterable):
numparses = [True] * column_count
for index in disable_numparse:
numparses[index] = False
return numparses
else:
return [not disable_numparse] * column_count
def _pad_row(cells, padding):
if cells:
pad = " "*padding
padded_cells = [pad + cell + pad for cell in cells]
return padded_cells
else:
return cells
def _build_simple_row(padded_cells, rowfmt):
"Format row according to DataRow format without padding."
begin, sep, end = rowfmt
return (begin + sep.join(padded_cells) + end).rstrip()
def _build_row(padded_cells, colwidths, colaligns, rowfmt):
"Return a string which represents a row of data cells."
if not rowfmt:
return None
if hasattr(rowfmt, "__call__"):
return rowfmt(padded_cells, colwidths, colaligns)
else:
return _build_simple_row(padded_cells, rowfmt)
def _append_basic_row(lines, padded_cells, colwidths, colaligns, rowfmt):
lines.append(_build_row(padded_cells, colwidths, colaligns, rowfmt))
return lines
def _append_multiline_row(lines, padded_multiline_cells, padded_widths,
colaligns, rowfmt, pad):
colwidths = [w - 2*pad for w in padded_widths]
cells_lines = [c.splitlines() for c in padded_multiline_cells]
nlines = max(map(len, cells_lines)) # number of lines in the row
# vertically pad cells where some lines are missing
cells_lines = [(cl + [' '*w]*(nlines - len(cl)))
for cl, w in zip(cells_lines, colwidths)]
lines_cells = [[cl[i] for cl in cells_lines] for i in range(nlines)]
for ln in lines_cells:
padded_ln = _pad_row(ln, pad)
_append_basic_row(lines, padded_ln, colwidths, colaligns, rowfmt)
return lines
def _build_line(colwidths, colaligns, linefmt):
"Return a string which represents a horizontal line."
if not linefmt:
return None
if hasattr(linefmt, "__call__"):
return linefmt(colwidths, colaligns)
else:
begin, fill, sep, end = linefmt
cells = [fill*w for w in colwidths]
return _build_simple_row(cells, (begin, sep, end))
def _append_line(lines, colwidths, colaligns, linefmt):
lines.append(_build_line(colwidths, colaligns, linefmt))
return lines
def _format_table(fmt, headers, rows, colwidths, colaligns, is_multiline):
"""Produce a plain-text representation of the table."""
lines = []
hidden = fmt.with_header_hide if (headers and fmt.with_header_hide) else []
pad = fmt.padding
headerrow = fmt.headerrow
padded_widths = [(w + 2*pad) for w in colwidths]
if is_multiline:
# do it later, in _append_multiline_row
pad_row = lambda row, _: row # noqa
append_row = partial(_append_multiline_row, pad=pad)
else:
pad_row = _pad_row
append_row = _append_basic_row
padded_headers = pad_row(headers, pad)
padded_rows = [pad_row(row, pad) for row in rows]
if fmt.lineabove and "lineabove" not in hidden:
_append_line(lines, padded_widths, colaligns, fmt.lineabove)
if padded_headers:
append_row(lines, padded_headers, padded_widths, colaligns, headerrow)
if fmt.linebelowheader and "linebelowheader" not in hidden:
_append_line(lines, padded_widths, colaligns, fmt.linebelowheader)
if padded_rows and fmt.linebetweenrows and "linebetweenrows" not in hidden:
# initial rows with a line below
for row in padded_rows[:-1]:
append_row(lines, row, padded_widths, colaligns, fmt.datarow)
_append_line(lines, padded_widths, colaligns, fmt.linebetweenrows)
# the last row without a line below
append_row(lines, padded_rows[-1], padded_widths, colaligns,
fmt.datarow)
else:
for row in padded_rows:
append_row(lines, row, padded_widths, colaligns, fmt.datarow)
if fmt.linebelow and "linebelow" not in hidden:
_append_line(lines, padded_widths, colaligns, fmt.linebelow)
if headers or rows:
return "\n".join(lines)
else: # a completely empty table
return ""
def _main():
"""\
Usage: tabulate [options] [FILE ...]
Pretty-print tabular data.
See also https://bitbucket.org/astanin/python-tabulate
FILE a filename of the file with tabular data;
if "-" or missing, read data from stdin.
Options:
-h, --help show this message
-1, --header use the first row of data as a table header
-o FILE, --output FILE print table to FILE (default: stdout)
-s REGEXP, --sep REGEXP use a custom column separator (default: whitespace)
-F FPFMT, --float FPFMT floating point number format (default: g)
-f FMT, --format FMT set output table format; supported formats:<|fim▁hole|> plain, simple, grid, fancy_grid, pipe, orgtbl,
rst, mediawiki, html, latex, latex_raw,
latex_booktabs, tsv
(default: simple)
""" # noqa
import getopt
import sys
import textwrap
usage = textwrap.dedent(_main.__doc__)
try:
opts, args = getopt.getopt(sys.argv[1:],
"h1o:s:F:A:f:",
["help", "header", "output", "sep=",
"float=", "align=", "format="])
except getopt.GetoptError as e:
print(e)
print(usage)
sys.exit(2)
headers = []
floatfmt = _DEFAULT_FLOATFMT
colalign = None
tablefmt = "simple"
sep = r"\s+"
outfile = "-"
for opt, value in opts:
if opt in ["-1", "--header"]:
headers = "firstrow"
elif opt in ["-o", "--output"]:
outfile = value
elif opt in ["-F", "--float"]:
floatfmt = value
elif opt in ["-C", "--colalign"]:
colalign = value.split()
elif opt in ["-f", "--format"]:
if value not in tabulate_formats:
print("%s is not a supported table format" % value)
print(usage)
sys.exit(3)
tablefmt = value
elif opt in ["-s", "--sep"]:
sep = value
elif opt in ["-h", "--help"]:
print(usage)
sys.exit(0)
files = [sys.stdin] if not args else args
with (sys.stdout if outfile == "-" else open(outfile, "w")) as out:
for f in files:
if f == "-":
f = sys.stdin
if _is_file(f):
_pprint_file(f, headers=headers, tablefmt=tablefmt,
sep=sep, floatfmt=floatfmt, file=out,
colalign=colalign)
else:
with open(f) as fobj:
_pprint_file(fobj, headers=headers, tablefmt=tablefmt,
sep=sep, floatfmt=floatfmt, file=out,
colalign=colalign)
def _pprint_file(fobject, headers, tablefmt, sep, floatfmt, file, colalign):
rows = fobject.readlines()
table = [re.split(sep, r.rstrip()) for r in rows if r.strip()]
print(tabulate(table, headers, tablefmt, floatfmt=floatfmt,
colalign=colalign), file=file)
if __name__ == "__main__":
_main()<|fim▁end|> | |
<|file_name|>base.py<|end_file_name|><|fim▁begin|>#
import os
import unittest
<|fim▁hole|>
@classmethod
def datafilename(cls, name):
fname = os.path.join(
os.path.dirname(__file__),
"data",
name,
)
assert os.path.exists(fname)
return fname<|fim▁end|> | class TestBase(unittest.TestCase): |
<|file_name|>forecast.py<|end_file_name|><|fim▁begin|>import forecastio
class ForecastAPI:
_API_KEY = "8eefab4d187a39b993ca9c875fef6159"
_LAZY = False
_LAT = 0
_LNG = 0
_forecast = ()
def __init__(self,key,lat,lng,lazy=False):
self._LAT = lat
self._LNG = lng
self._API_KEY = key
self._LAZY = lazy
self._forecast = forecastio.load_forecast(self._API_KEY,self._LAT,self._LNG,lazy=lazy)
def get_7day_forecast_detailed(self):
return self._forecast.daily().data
"""
Help getting cloud data from the future
"""
def get_7_day_cloudCover(self):
c_data = self._forecast.daily().data
cloud_results = {}
for day in c_data:
cloud_results[day.time.isoformat()] = day.cloudCover
return cloud_results
"""
Helper on getting cloud sunrise and sunset data
"""<|fim▁hole|> count = 0
for day in c_data:
if count < n_days:
sun_results[day.time.isoformat()] = {"sunrise":day.sunriseTime,"sunset":day.sunsetTime,"stat":day.icon,"cloudcover":day.cloudCover}
count = count + 1
return sun_results
"""
Helper on getting cloud sunrise and sunset data from the past
"""
def get_historical_day_minimal_solar(self,days):
#TODO get temp just for reference
sun_results = {}
for day in days:
print "getting date for %s"%day
self._forecast = forecastio.load_forecast(self._API_KEY,self._LAT,self._LNG,lazy=self._LAZY,time=day)
c_data = self._forecast.daily().data
for f_day in c_data:
print "adding date for %s"%f_day
sun_results[day.isoformat()] = {"sunrise":f_day.sunriseTime,"sunset":f_day.sunsetTime,"stat":f_day.icon,"cloudcover":f_day.cloudCover}
return sun_results<|fim▁end|> | def get_n_day_minimal_solar(self,n_days):
c_data = self._forecast.daily().data
sun_results = {} |
<|file_name|>regions-variance-contravariant-use-covariant.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license<|fim▁hole|>// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Test that a type which is covariant with respect to its region
// parameter yields an error when used in a contravariant way.
//
// Note: see variance-regions-*.rs for the tests that check that the
// variance inference works in the first place.
// This is contravariant with respect to 'a, meaning that
// Contravariant<'long> <: Contravariant<'short> iff
// 'short <= 'long
struct Contravariant<'a> {
f: &'a int
}
fn use_<'short,'long>(c: Contravariant<'short>,
s: &'short int,
l: &'long int,
_where:Option<&'short &'long ()>) {
// Test whether Contravariant<'short> <: Contravariant<'long>. Since
// 'short <= 'long, this would be true if the Contravariant type were
// covariant with respect to its parameter 'a.
let _: Contravariant<'long> = c; //~ ERROR mismatched types
//~^ ERROR cannot infer an appropriate lifetime
}
fn main() {}<|fim▁end|> | |
<|file_name|>keys_to_twist_with_ramps.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# BEGIN ALL
import rospy
import math
from std_msgs.msg import String
from geometry_msgs.msg import Twist
key_mapping = { 'w': [ 0, 1], 'x': [ 0, -1],
'a': [ 1, 0], 'd': [-1, 0],
's': [ 0, 0] }
g_twist_pub = None
g_target_twist = None
g_last_twist = None
g_last_send_time = None
g_vel_scales = [0.1, 0.1] # default to very slow
g_vel_ramps = [1, 1] # units: meters per second^2
# BEGIN RAMP
def ramped_vel(v_prev, v_target, t_prev, t_now, ramp_rate):
# compute maximum velocity step
step = ramp_rate * (t_now - t_prev).to_sec()
sign = 1.0 if (v_target > v_prev) else -1.0
error = math.fabs(v_target - v_prev)
if error < step: # we can get there within this timestep. we're done.
return v_target
else:
return v_prev + sign * step # take a step towards the target
# END RAMP
def ramped_twist(prev, target, t_prev, t_now, ramps):
tw = Twist()
tw.angular.z = ramped_vel(prev.angular.z, target.angular.z, t_prev,
t_now, ramps[0])
tw.linear.x = ramped_vel(prev.linear.x, target.linear.x, t_prev,
t_now, ramps[1])
return tw
def send_twist():
global g_last_twist_send_time, g_target_twist, g_last_twist,\
g_vel_scales, g_vel_ramps, g_twist_pub<|fim▁hole|> g_last_twist_send_time = t_now
g_twist_pub.publish(g_last_twist)
def keys_cb(msg):
global g_target_twist, g_last_twist, g_vel_scales
if len(msg.data) == 0 or not key_mapping.has_key(msg.data[0]):
return # unknown key.
vels = key_mapping[msg.data[0]]
g_target_twist.angular.z = vels[0] * g_vel_scales[0]
g_target_twist.linear.x = vels[1] * g_vel_scales[1]
def fetch_param(name, default):
if rospy.has_param(name):
return rospy.get_param(name)
else:
print "parameter [%s] not defined. Defaulting to %.3f" % (name, default)
return default
if __name__ == '__main__':
rospy.init_node('keys_to_twist')
g_last_twist_send_time = rospy.Time.now()
g_twist_pub = rospy.Publisher('cmd_vel', Twist, queue_size=1)
rospy.Subscriber('keys', String, keys_cb)
g_target_twist = Twist() # initializes to zero
g_last_twist = Twist()
g_vel_scales[0] = fetch_param('~angular_scale', 0.1)
g_vel_scales[1] = fetch_param('~linear_scale', 0.1)
g_vel_ramps[0] = fetch_param('~angular_accel', 1.0)
g_vel_ramps[1] = fetch_param('~linear_accel', 1.0)
rate = rospy.Rate(20)
while not rospy.is_shutdown():
send_twist()
rate.sleep()
# END ALL<|fim▁end|> | t_now = rospy.Time.now()
g_last_twist = ramped_twist(g_last_twist, g_target_twist,
g_last_twist_send_time, t_now, g_vel_ramps) |
<|file_name|>AccessibleText.java<|end_file_name|><|fim▁begin|>/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
<|fim▁hole|> * the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* @author Dennis Ushakov
*/
package javax.accessibility;
import com.gaecompat.javax.swing.text.AttributeSet;
import com.google.code.appengine.awt.Point;
import com.google.code.appengine.awt.Rectangle;
public interface AccessibleText {
static final int CHARACTER = 1;
static final int WORD = 2;
static final int SENTENCE = 3;
int getIndexAtPoint(Point p);
Rectangle getCharacterBounds(int i);
int getCharCount();
int getCaretPosition();
String getAtIndex(int part, int index);
String getAfterIndex(int part, int index);
String getBeforeIndex(int part, int index);
AttributeSet getCharacterAttribute(int i);
int getSelectionStart();
int getSelectionEnd();
String getSelectedText();
}<|fim▁end|> | * The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
|
<|file_name|>make-autosuspend-rules.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
# SPDX-License-Identifier: LGPL-2.1+
# Generate autosuspend rules for devices that have been tested to work properly
# with autosuspend by the Chromium OS team. Based on
# https://chromium.googlesource.com/chromiumos/platform2/+/master/power_manager/udev/gen_autosuspend_rules.py<|fim▁hole|>
print('# pci:v<00VENDOR>d<00DEVICE> (8 uppercase hexadecimal digits twice)')
for entry in chromiumos.gen_autosuspend_rules.PCI_IDS:
vendor, device = entry.split(':')
vendor = int(vendor, 16)
device = int(device, 16)
print('pci:v{:08X}d{:08X}*'.format(vendor, device))
print('# usb:v<VEND>p<PROD> (4 uppercase hexadecimal digits twice')
for entry in chromiumos.gen_autosuspend_rules.USB_IDS:
vendor, product = entry.split(':')
vendor = int(vendor, 16)
product = int(product, 16)
print('usb:v{:04X}p{:04X}*'.format(vendor, product))
print(' ID_AUTOSUSPEND=1')<|fim▁end|> |
import chromiumos.gen_autosuspend_rules |
<|file_name|>main_filters.cc<|end_file_name|><|fim▁begin|><|fim▁hole|>#include <string>
#include <utility>
#include "DotMapOutput.hh"
#include "EdgeType.hh"
#include "Filter.hh"
#include "Map.hh"
#include "RawTextMapInput.hh"
using namespace std;
bool small_edges_(Edge<double>& e) { return e.distance() < 450.0; }
bool roads_(Edge<double>& e) { return e.type() == EdgeType::Road; }
bool planes_(Edge<double>& e) { return e.type() == EdgeType::Plane; }
int main(void) {
Map m;
RawTextMapInput in(m);
// Read the map from test.yolo
in << "tests/graphtests/test.yolo";
// Base city, arbitrarily chosen for the test
City* base = m.getCity(0);
// Create filters : accept only small edges (<450km), only roads and only planes
GenericFilter<double>* small_edges = new Filter<double>(small_edges_);
GenericFilter<double>* roads = new Filter<double>(roads_);
GenericFilter<double>* planes = new Filter<double>(planes_);
// Accepted edges must be either planes or both small and roads
GenericFilter<double>* global_filter = *planes || (*small_edges && roads);
// City and its neighbors
cout << *base << " neighbors accessible by plane or {with distance lower than 450 km and accessible by road}: ";
for (auto city : m.neighbors(base, global_filter))
cout << *city << " ";
cout << endl;
// Free memory, it's ecological! :-)
delete global_filter;
delete small_edges;
delete roads;
delete planes;
return 0;
}<|fim▁end|> | #include <iostream> |
<|file_name|>prepare_dpf4.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
#
#
#
# $Header: /opt/cvs/python/packages/share1.5/AutoDockTools/Utilities24/prepare_dpf4.py,v 1.14.4.1 2011/12/01 17:16:33 rhuey Exp $
#
import string
import os.path
from MolKit import Read
from AutoDockTools.DockingParameters import DockingParameters, DockingParameter4FileMaker, genetic_algorithm_list, \
genetic_algorithm_local_search_list4, local_search_list4,\
simulated_annealing_list4
def usage():
print "Usage: prepare_dpf4.py -l pdbqt_file -r pdbqt_file"
print " -l ligand_filename"
print " -r receptor_filename"
print
print "Optional parameters:"
print " [-o output dpf_filename]"
print " [-i template dpf_filename]"
print " [-x flexres_filename]"
print " [-p parameter_name=new_value]"
print " [-k list of parameters to write]"
print " [-e write epdb dpf ]"
print " [-v] verbose output"
print " [-L] use local search parameters"
print " [-S] use simulated annealing search parameters"
print " [-s] seed population using ligand's present conformation"
print
print "Prepare a docking parameter file (DPF) for AutoDock4."
print
print " The DPF will by default be <ligand>_<receptor>.dpf. This"
print "may be overridden using the -o flag."
if __name__ == '__main__':
import getopt
import sys
try:
opt_list, args = getopt.getopt(sys.argv[1:], 'sLShvl:r:i:o:x:p:k:e')
except getopt.GetoptError, msg:
print 'prepare_dpf4.py: %s' % msg
usage()
sys.exit(2)
receptor_filename = ligand_filename = None
dpf_filename = None
template_filename = None
flexres_filename = None
parameters = []
parameter_list = genetic_algorithm_local_search_list4
pop_seed = False
verbose = None
epdb_output = False
for o, a in opt_list:
if verbose: print "o=", o, ' a=', a
if o in ('-v', '--v'):
verbose = 1
if verbose: print 'verbose output'
if o in ('-l', '--l'): #ligand filename
ligand_filename = a
if verbose: print 'ligand_filename =', ligand_filename
if o in ('-r', '--r'): #receptor filename
receptor_filename = a
if verbose: print 'receptor_filename =', receptor_filename
if o in ('-x', '--x'): #flexres_filename
flexres_filename = a
if verbose: print 'flexres_filename =', flexres_filename
if o in ('-i', '--i'): #input reference
template_filename = a
if verbose: print 'template_filename =', template_filename
if o in ('-o', '--o'): #output filename
dpf_filename = a
if verbose: print 'output dpf_filename =', dpf_filename
if o in ('-p', '--p'): #parameter
parameters.append(a)
if verbose: print 'parameters =', parameters
if o in ('-e', '--e'):
epdb_output = True
if verbose: print 'output epdb file'
parameter_list = epdb_list4_2
if o in ('-k', '--k'): #parameter_list_to_write
parameter_list = a
if verbose: print 'parameter_list =', parameter_list
if o in ('-L', '--L'): #parameter_list_to_write
local_search = 1
parameter_list = local_search_list4
if verbose: print 'parameter_list =', parameter_list
if o in ('-S', '--S'): #parameter_list_to_write
parameter_list = simulated_annealing_list4
if verbose: print 'parameter_list =', parameter_list
if o in ('-h', '--'):
usage()
sys.exit()
if o in ('-s'):
pop_seed = True
if (not receptor_filename) or (not ligand_filename):
print "prepare_dpf4.py: ligand and receptor filenames"
print " must be specified."
usage()
sys.exit()
#9/2011: fixing local_search bugs:
# specifically:
# 1. quaternion0 0 0 0 0
# 2. dihe0 0 0 0 0 0 <one per rotatable bond>
# 3. about == tran0
# 4. remove tstep qstep and dstep
# 5. remove ls_search_freq
local_search = parameter_list==local_search_list4
dm = DockingParameter4FileMaker(verbose=verbose)
if template_filename is not None: #setup values by reading dpf
dm.dpo.read(template_filename)
dm.set_ligand(ligand_filename)
dm.set_receptor(receptor_filename)
if flexres_filename is not None:
flexmol = Read(flexres_filename)[0]
flexres_types = flexmol.allAtoms.autodock_element
lig_types = dm.dpo['ligand_types']['value'].split()
all_types = lig_types
for t in flexres_types:
if t not in all_types:
all_types.append(t)
all_types_string = all_types[0]
if len(all_types)>1:
for t in all_types[1:]:
all_types_string = all_types_string + " " + t
if verbose: print "adding ", t, " to all_types->", all_types_string
dm.dpo['ligand_types']['value'] = all_types_string
dm.dpo['flexres']['value'] = flexres_filename
dm.dpo['flexres_flag']['value'] = True
#dm.set_docking_parameters( ga_num_evals=1750000,ga_pop_size=150, ga_run=20, rmstol=2.0)
kw = {}
for p in parameters:
key,newvalue = string.split(p, '=')
#detect string reps of lists: eg "[1.,1.,1.]"
if newvalue[0]=='[':
nv = []
for item in newvalue[1:-1].split(','):
nv.append(float(item))
#print "nv=", nv
newvalue = nv
if key=='epdb_flag':
print "setting epdb_flag to", newvalue
kw['epdb_flag'] = 1
elif key=='set_psw1':
print "setting psw1_flag to", newvalue
kw['set_psw1'] = 1
kw['set_sw1'] = 0
elif key=='set_sw1':
print "setting set_sw1 to", newvalue
kw['set_sw1'] = 1
kw['set_psw1'] = 0
elif key=='include_1_4_interactions_flag':
kw['include_1_4_interactions'] = 1
elif 'flag' in key:
if newvalue in ['1','0']:
newvalue = int(newvalue)
if newvalue =='False':
newvalue = False
if newvalue =='True':
newvalue = True
elif local_search and 'about' in key:
kw['about'] = newvalue
kw['tran0'] = newvalue
else:
kw[key] = newvalue<|fim▁hole|> #special hack for output_pop_file
if key=='output_pop_file':
parameter_list.insert(parameter_list.index('set_ga'), key)
else:
parameter_list.append(key)
dm.write_dpf(dpf_filename, parameter_list, pop_seed)
#prepare_dpf4.py -l indinavir.pdbq -r 1hsg.pdbqs -p ga_num_evals=20000000 -p ga_pop_size=150 -p ga_run=17 -i ref.dpf -o testing.dpf<|fim▁end|> | apply(dm.set_docking_parameters, (), kw)
if key not in parameter_list: |
<|file_name|>Cn3d_backbone_style.hpp<|end_file_name|><|fim▁begin|>/* $Id$
* ===========================================================================
*
* PUBLIC DOMAIN NOTICE
* National Center for Biotechnology Information
*
* This software/database is a "United States Government Work" under the
* terms of the United States Copyright Act. It was written as part of
* the author's official duties as a United States Government employee and
* thus cannot be copyrighted. This software/database is freely available
* to the public for use. The National Library of Medicine and the U.S.
* Government have not placed any restriction on its use or reproduction.
*
* Although all reasonable efforts have been taken to ensure the accuracy
* and reliability of the software and data, the NLM and the U.S.
* Government do not and cannot warrant the performance or results that
* may be obtained by using this software or data. The NLM and the U.S.
* Government disclaim all warranties, express or implied, including
* warranties of performance, merchantability or fitness for any particular
* purpose.
*
* Please cite the author in any work or product based on this material.<|fim▁hole|> *
* ===========================================================================
*
*/
/// @file Cn3d_backbone_style.hpp
/// User-defined methods of the data storage class.
///
/// This file was originally generated by application DATATOOL
/// using the following specifications:
/// 'cn3d.asn'.
///
/// New methods or data members can be added to it if needed.
/// See also: Cn3d_backbone_style_.hpp
#ifndef OBJECTS_CN3D_CN3D_BACKBONE_STYLE_HPP
#define OBJECTS_CN3D_CN3D_BACKBONE_STYLE_HPP
// generated includes
#include <objects/cn3d/Cn3d_backbone_style_.hpp>
// generated classes
BEGIN_NCBI_SCOPE
BEGIN_objects_SCOPE // namespace ncbi::objects::
/////////////////////////////////////////////////////////////////////////////
class NCBI_CN3D_EXPORT CCn3d_backbone_style : public CCn3d_backbone_style_Base
{
typedef CCn3d_backbone_style_Base Tparent;
public:
// constructor
CCn3d_backbone_style(void);
// destructor
~CCn3d_backbone_style(void);
private:
// Prohibit copy constructor and assignment operator
CCn3d_backbone_style(const CCn3d_backbone_style& value);
CCn3d_backbone_style& operator=(const CCn3d_backbone_style& value);
};
/////////////////// CCn3d_backbone_style inline methods
// constructor
inline
CCn3d_backbone_style::CCn3d_backbone_style(void)
{
}
/////////////////// end of CCn3d_backbone_style inline methods
END_objects_SCOPE // namespace ncbi::objects::
END_NCBI_SCOPE
#endif // OBJECTS_CN3D_CN3D_BACKBONE_STYLE_HPP
/* Original file checksum: lines: 86, chars: 2588, CRC32: dfafc7fa */<|fim▁end|> | |
<|file_name|>skip.ts<|end_file_name|><|fim▁begin|>import { AsyncIterableX } from '../../asynciterable/asynciterablex';
import { SkipAsyncIterable } from '../../asynciterable/operators/skip';
/**
* @ignore
*/
export function skipProto<T>(this: AsyncIterableX<T>, count: number): AsyncIterableX<T> {
return new SkipAsyncIterable<T>(this, count);
}
AsyncIterableX.prototype.skip = skipProto;
<|fim▁hole|>}<|fim▁end|> | declare module '../../asynciterable/asynciterablex' {
interface AsyncIterableX<T> {
skip: typeof skipProto;
} |
<|file_name|>generate_stripmap_index.py<|end_file_name|><|fim▁begin|>#!python
# coding: utf-8
# edit by gistnu
# reference from lejedi76
# https://gis.stackexchange.com/questions/173127/generating-equal-sized-polygons-along-line-with-pyqgis
from qgis.core import QgsMapLayerRegistry, QgsGeometry, QgsField, QgsFeature, QgsPoint
from PyQt4.QtCore import QVariant
def getAllbbox(layer, width, height, srid, overlap):
for feature in layer.selectedFeatures():
geom = feature.geometry()
if geom.type() <> QGis.Line:
print "Geometry type should be a LineString"
return 2
bbox = QgsVectorLayer("Polygon?crs=epsg:"+str(srid),
layer.name()+'_id_'+str(feature.id()),
"memory")
gid = QgsField("gid", QVariant.Int, "int")
angle = QgsField("angle", QVariant.Double, "double")
attributes = [gid, angle]
bbox.startEditing()
bboxProvider = bbox.dataProvider()
bboxProvider.addAttributes(attributes)
curs = 0
numbbox = geom.length()/(width)
step = 1.0/numbbox
stepnudge = (1.0-overlap) * step
pageFeatures = []
r = 1
currangle = 0
while curs <= 1:
# print 'r =' + str(r)
# print 'curs = ' + str(curs)
startpoint = geom.interpolate(curs*geom.length())
endpoint = geom.interpolate((curs+step)*geom.length())
x_start = startpoint.asPoint().x()
y_start = startpoint.asPoint().y()
x_end = endpoint.asPoint().x()
y_end = endpoint.asPoint().y()
print 'x_start :' + str(x_start)
print 'y_start :' + str(y_start)
currline = QgsGeometry().fromWkt('LINESTRING({} {}, {} {})'.format(x_start, y_start, x_end, y_end))
currpoly = QgsGeometry().fromWkt(<|fim▁hole|>
currpoly.rotate(currangle, QgsPoint(0,0))
currpoly.translate(x_start, y_start)
currpoly.asPolygon()
page = currpoly
curs = curs + stepnudge
feat = QgsFeature()
feat.setAttributes([r, currangle])
feat.setGeometry(page)
pageFeatures.append(feat)
r = r + 1
bboxProvider.addFeatures(pageFeatures)
bbox.commitChanges()
QgsMapLayerRegistry.instance().addMapLayer(bbox)
return 0
layer = iface.activeLayer()
getAllbbox(layer, 100, 200, 32647, 0.2) #layer, width, height, crs, overlap<|fim▁end|> | 'POLYGON((0 0, 0 {height},{width} {height}, {width} 0, 0 0))'.format(height=height, width=width))
currpoly.translate(0,-height/2)
azimuth = startpoint.asPoint().azimuth(endpoint.asPoint())
currangle = (startpoint.asPoint().azimuth(endpoint.asPoint())+270)%360 |
<|file_name|>treemap.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
'''
Creates an html treemap of disk usage, using the Google Charts API
'''
import json
import os
import subprocess
import sys
def memoize(fn):
stored_results = {}
def memoized(*args):
try:
return stored_results[args]
except KeyError:
result = stored_results[args] = fn(*args)
return result
return memoized
@memoize
def get_folder_size(folder):
total_size = os.path.getsize(folder)
for item in os.listdir(folder):
itempath = os.path.join(folder, item)
if os.path.isfile(itempath):
total_size += os.path.getsize(itempath)
elif os.path.isdir(itempath):
total_size += get_folder_size(itempath)
return total_size
def usage_iter(root):
root = os.path.abspath(root)
root_size = get_folder_size(root)
root_string = "{0}\n{1}".format(root, root_size)
yield [root_string, None, root_size]
for parent, dirs, files in os.walk(root):
for dirname in dirs:
fullpath = os.path.join(parent, dirname)
try:
this_size = get_folder_size(fullpath)
parent_size = get_folder_size(parent)
this_string = "{0}\n{1}".format(fullpath, this_size)
parent_string = "{0}\n{1}".format(parent, parent_size)
yield [this_string, parent_string, this_size]
except OSError:
continue
def json_usage(root):
root = os.path.abspath(root)
result = [['Path', 'Parent', 'Usage']]
result.extend(entry for entry in usage_iter(root))
return json.dumps(result)
def main(args):
'''Populates an html template using JSON-formatted output from the
Linux 'du' utility and prints the result'''
html = '''
<html>
<head>
<script type="text/javascript" src="https://www.google.com/jsapi"></script>
<script type="text/javascript">
google.load("visualization", "1", {packages:["treemap"]});
google.setOnLoadCallback(drawChart);
function drawChart() {
// Create and populate the data table.
var data = google.visualization.arrayToDataTable(%s);
// Create and draw the visualization.
var tree = new google.visualization.TreeMap(document.getElementById('chart_div'));
tree.draw(data, { headerHeight: 15, fontColor: 'black' });
}
</script>
</head>
<body>
<div id="chart_div" style="width: 900px; height: 500px;"></div>
<p style="text-align: center">Click to descend. Right-click to ascend.</p>
</body>
</html>
''' % json_usage(args[0])
# ''' % du2json(get_usage(args[0]))
print html
if __name__ == "__main__":<|fim▁hole|><|fim▁end|> | main(sys.argv[1:] or ['.']) |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | """backtest.py, backunttest.py and coveragetest.py are all taken from coverage.py version 3.7.1""" |
<|file_name|>ID1server_request.hpp<|end_file_name|><|fim▁begin|>/* $Id$
* ===========================================================================
*
* PUBLIC DOMAIN NOTICE
* National Center for Biotechnology Information
*
* This software/database is a "United States Government Work" under the
* terms of the United States Copyright Act. It was written as part of
* the author's official duties as a United States Government employee and
* thus cannot be copyrighted. This software/database is freely available
* to the public for use. The National Library of Medicine and the U.S.
* Government have not placed any restriction on its use or reproduction.
*
* Although all reasonable efforts have been taken to ensure the accuracy
* and reliability of the software and data, the NLM and the U.S.
* Government do not and cannot warrant the performance or results that
* may be obtained by using this software or data. The NLM and the U.S.
* Government disclaim all warranties, express or implied, including
* warranties of performance, merchantability or fitness for any particular
* purpose.
*
* Please cite the author in any work or product based on this material.
*
* ===========================================================================
*
*/
/// @file ID1server_request.hpp
/// User-defined methods of the data storage class.
///
/// This file was originally generated by application DATATOOL
/// using the following specifications:
/// 'id1.asn'.
///
/// New methods or data members can be added to it if needed.
/// See also: ID1server_request_.hpp
#ifndef OBJECTS_ID1_ID1SERVER_REQUEST_HPP
#define OBJECTS_ID1_ID1SERVER_REQUEST_HPP
// generated includes
#include <objects/id1/ID1server_request_.hpp>
// generated classes
BEGIN_NCBI_SCOPE
BEGIN_objects_SCOPE // namespace ncbi::objects::
/////////////////////////////////////////////////////////////////////////////
class NCBI_ID1_EXPORT CID1server_request : public CID1server_request_Base
{
typedef CID1server_request_Base Tparent;
public:
// constructor
CID1server_request(void);
// destructor
~CID1server_request(void);
private:<|fim▁hole|>
};
/////////////////// CID1server_request inline methods
// constructor
inline
CID1server_request::CID1server_request(void)
{
}
/////////////////// end of CID1server_request inline methods
END_objects_SCOPE // namespace ncbi::objects::
END_NCBI_SCOPE
#endif // OBJECTS_ID1_ID1SERVER_REQUEST_HPP
/* Original file checksum: lines: 86, chars: 2544, CRC32: 2b307884 */<|fim▁end|> | // Prohibit copy constructor and assignment operator
CID1server_request(const CID1server_request& value);
CID1server_request& operator=(const CID1server_request& value); |
<|file_name|>server.cc<|end_file_name|><|fim▁begin|>#include "server.h"
int ConvertNumberToString(int num,char *str)
{
if (num==0)
{
str[0]='0';
// str[1]='0';
return 1;
}
if (num<0)
{
str[0]='-';
num=-num;
}
else
{
str[0]='+';
}
int broj_zn=0,num1=num;
while (num1>0)
{
num1=num1/16;
broj_zn++;
}
int i;
for (i=broj_zn-1;i>=0;i--)
{
if (num%16<10)
{
str[i]=48+num%16;
}
else
{
str[i]=65+num%16-10;
}
num=num/16;
}
return broj_zn;
}
int GetPoseData(ServerPacket *sp,char *res,char mask)
{
int count=0,i;
res[count++]=2;
res[count++]='s';
res[count++]='M';
res[count++]='A';
res[count++]=' ';
res[count++]='m';
res[count++]='N';
res[count++]='P';
res[count++]='O';
res[count++]='S';
res[count++]='G';
res[count++]='e';
res[count++]='t';
res[count++]='D';
res[count++]='a';
res[count++]='t';
res[count++]='a';
res[count++]=3;
res[count++]=2;
res[count++]='s';
res[count++]='A';
res[count++]='N';
res[count++]=' ';
res[count++]='m';
res[count++]='N';
res[count++]='P';
res[count++]='O';
res[count++]='S';
res[count++]='G';
res[count++]='e';
res[count++]='t';
res[count++]='D';
res[count++]='a';
res[count++]='t';
res[count++]='a';
res[count++]=' ';
res[count++]='0'; //version
res[count++]=' ';
res[count++]='0'; //error
res[count++]=' ';
res[count++]='0'; //wait
res[count++]=' ';
res[count++]=mask; //mask
res[count++]=' ';
res[count++]='1'; //pose data follow
res[count++]=' ';
char str[100];
int size;
size=ConvertNumberToString(sp->m_.PoseData_.x,str);
for (i=0;i<size;i++)
{
res[count++]=str[i];
}
res[count++]=' ';
size=ConvertNumberToString(sp->m_.PoseData_.y,str);
for (i=0;i<size;i++)
{
res[count++]=str[i];
}
res[count++]=' ';
size=ConvertNumberToString(sp->m_.PoseData_.phi,str);
for (i=0;i<size;i++)
{
res[count++]=str[i];
}
res[count++]=' ';
size=ConvertNumberToString(sp->m_.PoseData_.optionalPoseData,str);
for (i=0;i<size;i++)
{
res[count++]=str[i];
}
if (sp->m_.PoseData_.optionalPoseData==1)
{
res[count++]=' ';
size=ConvertNumberToString(sp->m_.PoseData_.outputMode,str);
for (i=0;i<size;i++)
{
res[count++]=str[i];
}
res[count++]=' ';
size=ConvertNumberToString(sp->m_.PoseData_.timeStamp,str);
for (i=0;i<size;i++)
{
res[count++]=str[i];
}
res[count++]=' ';
size=ConvertNumberToString(sp->m_.PoseData_.meanDeviation,str);
for (i=0;i<size;i++)
{
res[count++]=str[i];
}
res[count++]=' ';
size=ConvertNumberToString(sp->m_.PoseData_.positionMode,str);
for (i=0;i<size;i++)
{
res[count++]=str[i];
}
res[count++]=' ';
size=ConvertNumberToString(sp->m_.PoseData_.infoState,str);
for (i=0;i<size;i++)
{
res[count++]=str[i];
}
res[count++]=' ';
size=ConvertNumberToString(sp->m_.PoseData_.numUsedReflectors,str);
for (i=0;i<size;i++)
{
res[count++]=str[i];
}
}
// printf("gotov pose data (slijedi reflector data)\n");
res[count++]=' ';
int j;
if (mask=='0' || mask=='2')
{
res[count++]='1'; //landmark data follow
res[count++]=' ';
size=ConvertNumberToString(sp->m_.ReflectorData_.filter,str);
for (i=0;i<size;i++)
{
res[count++]=str[i];
}
res[count++]=' ';
size=ConvertNumberToString(sp->m_.ReflectorData_.num_reflector,str);
for (i=0;i<size;i++)
{
res[count++]=str[i];
}
// printf("ref count= %d\n",sp->m_.ReflectorData_.num_reflector);
for (j=0;j<sp->m_.ReflectorData_.num_reflector;j++)
{
res[count++]=' ';
size=ConvertNumberToString(sp->m_.ReflectorData_.cart[j],str);
for (i=0;i<size;i++)
{
res[count++]=str[i];
}
res[count++]=' ';
size=ConvertNumberToString(sp->m_.ReflectorData_.x[j],str);
for (i=0;i<size;i++)
{
res[count++]=str[i];
}
res[count++]=' ';
size=ConvertNumberToString(sp->m_.ReflectorData_.y[j],str);
for (i=0;i<size;i++)
{
res[count++]=str[i];
}
res[count++]=' ';
size=ConvertNumberToString(sp->m_.ReflectorData_.polar[j],str);
for (i=0;i<size;i++)
{
res[count++]=str[i];
}
res[count++]=' ';
size=ConvertNumberToString(sp->m_.ReflectorData_.dist[j],str);
for (i=0;i<size;i++)
{
res[count++]=str[i];
}
res[count++]=' ';
size=ConvertNumberToString(sp->m_.ReflectorData_.phi[j],str);
for (i=0;i<size;i++)
{
res[count++]=str[i];
}
res[count++]=' ';
size=ConvertNumberToString(sp->m_.ReflectorData_.optional[j],str);
for (i=0;i<size;i++)
{
res[count++]=str[i];
}
if (sp->m_.ReflectorData_.optional[j]==1)
{
res[count++]=' ';
size=ConvertNumberToString(sp->m_.ReflectorData_.LocalID[j],str);
for (i=0;i<size;i++)
{
res[count++]=str[i];
}
res[count++]=' ';
size=ConvertNumberToString(sp->m_.ReflectorData_.GlobalID[j],str);
for (i=0;i<size;i++)
{
res[count++]=str[i];
}
res[count++]=' ';
size=ConvertNumberToString(sp->m_.ReflectorData_.type[j],str);
for (i=0;i<size;i++)
{
res[count++]=str[i];
}
res[count++]=' ';
size=ConvertNumberToString(sp->m_.ReflectorData_.subtype[j],str);
for (i=0;i<size;i++)
{
res[count++]=str[i];
}
res[count++]=' ';
size=ConvertNumberToString(sp->m_.ReflectorData_.quality[j],str);
for (i=0;i<size;i++)
{
res[count++]=str[i];
}
res[count++]=' ';
size=ConvertNumberToString(sp->m_.ReflectorData_.timestamp[j],str);
for (i=0;i<size;i++)
{
res[count++]=str[i];
}
res[count++]=' ';
size=ConvertNumberToString(sp->m_.ReflectorData_.size[j],str);
for (i=0;i<size;i++)
{
res[count++]=str[i];
}
res[count++]=' ';
size=ConvertNumberToString(sp->m_.ReflectorData_.hitCount[j],str);
for (i=0;i<size;i++)
{
res[count++]=str[i];
}
res[count++]=' ';
size=ConvertNumberToString(sp->m_.ReflectorData_.meanEchoAmplitude[j],str);
for (i=0;i<size;i++)
{
res[count++]=str[i];
}
res[count++]=' ';
size=ConvertNumberToString(sp->m_.ReflectorData_.meanEchoAmplitude[j],str);
for (i=0;i<size;i++)
{
res[count++]=str[i];
}
res[count++]=' ';
size=ConvertNumberToString(sp->m_.ReflectorData_.indexStart[j],str);
for (i=0;i<size;i++)
{
res[count++]=str[i];
}
res[count++]=' ';
size=ConvertNumberToString(sp->m_.ReflectorData_.indexEnd[j],str);
for (i=0;i<size;i++)
{
res[count++]=str[i];
}
}
}
}
else
{
res[count++]='0';
}
// printf("gotov landmark data (slijedi distance data)\n");
res[count++]=' ';
if (mask=='0')
{
res[count++]='0';
}
else
{
res[count++]='1';
res[count++]=' ';
res[count++]='D';
res[count++]='I';
res[count++]='S';
res[count++]='T';
res[count++]='1';
res[count++]=' '; //multiplier
res[count++]='1';
res[count++]=' ';
res[count++]='0'; //offset
res[count++]=' ';
size=ConvertNumberToString((int) (sp->m_.start_angle*1000),str);
for (i=0;i<size;i++)
{
res[count++]=str[i];
}
// printf("gotov start %d\n",count);
res[count++]=' ';
// printf("%d\n",(int)(sp->m_.step_angle*1000));
size=ConvertNumberToString((int)(sp->m_.step_angle*1000),str);
// printf("%d\n",size);
for (i=0;i<size;i++)
{
res[count++]=str[i];
}
// printf("gotov step\n");
res[count++]=' ';
size=ConvertNumberToString((int) (sp->m_.timestamp),str);
for (i=0;i<size;i++)
{
res[count++]=str[i];
}
// printf("gotov timestamp\n");
res[count++]=' ';
size=ConvertNumberToString(sp->m_.meas_num,str);
for (i=0;i<size;i++)
{
res[count++]=str[i];
}
// printf("dist count=%d\n",sp->m_.meas_num);
for (j=0;j<sp->m_.meas_num;j++)
{
res[count++]=' ';
size=ConvertNumberToString((int) sp->m_.distance[j],str);
for (i=0;i<size;i++)
{
res[count++]=str[i];
}
}
res[count++]=' ';
res[count++]='0'; //remission
for (i=0;i<count;i++)
{
// printf("%c",res[i]);
}
// printf("\n");
}
res[count++]=3;
return count;
}
int DoMapping(ServerPacket *sp,char *res)
{
char str[100];
int size;
int count=0,i;
res[count++]=2;
res[count++]='s';
res[count++]='M';
res[count++]='A';
res[count++]=' ';
res[count++]='m';
res[count++]='N';
res[count++]='M';
res[count++]='A';
res[count++]='P';
res[count++]='D';
res[count++]='o';
res[count++]='M';
res[count++]='a';
res[count++]='p';
res[count++]='p';
res[count++]='i';
res[count++]='n';
res[count++]='g';
res[count++]=3;
res[count++]=2;
res[count++]='s';
res[count++]='A';
res[count++]='N';
res[count++]=' ';
res[count++]='m';
res[count++]='N';
res[count++]='M';
res[count++]='A';
res[count++]='P';
res[count++]='D';
res[count++]='o';
res[count++]='M';
res[count++]='a';
res[count++]='p';
res[count++]='p';
res[count++]='i';
res[count++]='n';
res[count++]='g';
res[count++]=' ';
res[count++]='0'; //error
res[count++]=' ';
res[count++]='1'; //landmark data follow
res[count++]=' ';
size=ConvertNumberToString(sp->m_.ReflectorData_.filter,str);
for (i=0;i<size;i++)
{
res[count++]=str[i];
}
res[count++]=' ';
size=ConvertNumberToString(sp->m_.ReflectorData_.num_reflector,str);
for (i=0;i<size;i++)
{
res[count++]=str[i];
}
int j;
// printf("ref count= %d\n",sp->m_.ReflectorData_.num_reflector);
for (j=0;j<sp->m_.ReflectorData_.num_reflector;j++)
{
res[count++]=' ';
size=ConvertNumberToString(sp->m_.ReflectorData_.cart[j],str);
for (i=0;i<size;i++)
{
res[count++]=str[i];
}
res[count++]=' ';
size=ConvertNumberToString(sp->m_.ReflectorData_.x[j],str);
for (i=0;i<size;i++)
{
res[count++]=str[i];
}
res[count++]=' ';
size=ConvertNumberToString(sp->m_.ReflectorData_.y[j],str);
for (i=0;i<size;i++)
{
res[count++]=str[i];
}
res[count++]=' ';
res[count++]='0'; //no polar data follow
res[count++]=' ';
size=ConvertNumberToString(sp->m_.ReflectorData_.optional[j],str);
for (i=0;i<size;i++)
{
res[count++]=str[i];
}
if (sp->m_.ReflectorData_.optional[j]==1)
{
res[count++]=' ';
size=ConvertNumberToString(sp->m_.ReflectorData_.LocalID[j],str);
for (i=0;i<size;i++)
{
res[count++]=str[i];
}
res[count++]=' ';
size=ConvertNumberToString(sp->m_.ReflectorData_.GlobalID[j],str);
for (i=0;i<size;i++)
{
res[count++]=str[i];
}
res[count++]=' ';
size=ConvertNumberToString(sp->m_.ReflectorData_.type[j],str);
for (i=0;i<size;i++)
{
res[count++]=str[i];
}
res[count++]=' ';
size=ConvertNumberToString(sp->m_.ReflectorData_.subtype[j],str);
for (i=0;i<size;i++)
{
res[count++]=str[i];
}
res[count++]=' ';
size=ConvertNumberToString(sp->m_.ReflectorData_.quality[j],str);
for (i=0;i<size;i++)
{
res[count++]=str[i];
}
res[count++]=' ';
size=ConvertNumberToString(sp->m_.ReflectorData_.timestamp[j],str);
for (i=0;i<size;i++)
{
res[count++]=str[i];
}
res[count++]=' ';
size=ConvertNumberToString(sp->m_.ReflectorData_.size[j],str);
for (i=0;i<size;i++)
{
res[count++]=str[i];
}
res[count++]=' ';
size=ConvertNumberToString(sp->m_.ReflectorData_.hitCount[j],str);
for (i=0;i<size;i++)
{
res[count++]=str[i];
}
res[count++]=' ';
size=ConvertNumberToString(sp->m_.ReflectorData_.meanEchoAmplitude[j],str);
for (i=0;i<size;i++)
{
res[count++]=str[i];
}
res[count++]=' ';
size=ConvertNumberToString(sp->m_.ReflectorData_.meanEchoAmplitude[j],str);
for (i=0;i<size;i++)
{
res[count++]=str[i];
}
res[count++]=' ';
size=ConvertNumberToString(sp->m_.ReflectorData_.indexStart[j],str);
for (i=0;i<size;i++)
{
res[count++]=str[i];
}
res[count++]=' ';
size=ConvertNumberToString(sp->m_.ReflectorData_.indexEnd[j],str);
for (i=0;i<size;i++)
{
res[count++]=str[i];
}
}
}
// printf("gotov landmark data (slijedi distance data)\n");
res[count++]=' ';
res[count++]='1';
res[count++]=' ';
res[count++]='D';
res[count++]='I';
res[count++]='S';
res[count++]='T';
res[count++]='1';
res[count++]=' '; //multiplier
res[count++]='1';
res[count++]=' ';
res[count++]='0'; //offset
res[count++]=' ';
size=ConvertNumberToString((int) (sp->m_.start_angle*1000),str);
for (i=0;i<size;i++)
{
res[count++]=str[i];
}
// printf("gotov start %d\n",count);
res[count++]=' ';
// printf("%d\n",(int)(sp->m_.step_angle*1000));
size=ConvertNumberToString((int)(sp->m_.step_angle*1000),str);
// printf("%d\n",size);
for (i=0;i<size;i++)
{
res[count++]=str[i];
}
// printf("gotov step\n");
res[count++]=' ';
size=ConvertNumberToString((int) (sp->m_.timestamp),str);
for (i=0;i<size;i++)
{
res[count++]=str[i];
}
// printf("gotov timestamp\n");
res[count++]=' ';
size=ConvertNumberToString(sp->m_.meas_num,str);
for (i=0;i<size;i++)
{
res[count++]=str[i];
}
// printf("dist count=%d\n",sp->m_.meas_num);
for (j=0;j<sp->m_.meas_num;j++)
{
res[count++]=' ';
size=ConvertNumberToString((int) sp->m_.distance[j],str);
for (i=0;i<size;i++)
{
res[count++]=str[i];
}
}
res[count++]=' ';
res[count++]='0'; //remission
res[count++]=3;
for (i=0;i<count;i++)
{
// printf("%c",res[i]);
}
// printf("\n");
return count;
}
int GetPose(ServerPacket *sp,char *res)
{
int count=0,i;
res[count++]=2;
res[count++]='s';
res[count++]='M';
res[count++]='A';
res[count++]=' ';
res[count++]='m';
res[count++]='N';
res[count++]='P';
res[count++]='O';
res[count++]='S';
res[count++]='G';
res[count++]='e';
res[count++]='t';
res[count++]='P';
res[count++]='o';
res[count++]='s';
res[count++]='e';
res[count++]=3;
res[count++]=2;
res[count++]='s';
res[count++]='A';
res[count++]='N';
res[count++]=' ';
res[count++]='m';
res[count++]='N';
res[count++]='P';
res[count++]='O';
res[count++]='S';
res[count++]='G';
res[count++]='e';
res[count++]='t';
res[count++]='P';
res[count++]='o';
res[count++]='s';
res[count++]='e';
res[count++]=' ';
res[count++]='0'; //version
res[count++]=' ';
res[count++]='0'; //error
res[count++]=' ';
res[count++]='0'; //wait
res[count++]=' ';
res[count++]='1'; //pose data follow
res[count++]=' ';
char str[100];
int size;
size=ConvertNumberToString(sp->m_.PoseData_.x,str);
for (i=0;i<size;i++)
{
res[count++]=str[i];
}
res[count++]=' ';
size=ConvertNumberToString(sp->m_.PoseData_.y,str);
for (i=0;i<size;i++)
{
res[count++]=str[i];
}
res[count++]=' ';
size=ConvertNumberToString(sp->m_.PoseData_.phi,str);
for (i=0;i<size;i++)
{
res[count++]=str[i];
}
res[count++]=' ';
size=ConvertNumberToString(sp->m_.PoseData_.optionalPoseData,str);
for (i=0;i<size;i++)
{
res[count++]=str[i];
}
if (sp->m_.PoseData_.optionalPoseData==1)
{
res[count++]=' ';
size=ConvertNumberToString(sp->m_.PoseData_.outputMode,str);
for (i=0;i<size;i++)
{
res[count++]=str[i];
}
res[count++]=' ';
size=ConvertNumberToString(sp->m_.PoseData_.timeStamp,str);
for (i=0;i<size;i++)
{
res[count++]=str[i];
}
res[count++]=' ';
size=ConvertNumberToString(sp->m_.PoseData_.meanDeviation,str);
for (i=0;i<size;i++)
{
res[count++]=str[i];
}
res[count++]=' ';
size=ConvertNumberToString(sp->m_.PoseData_.positionMode,str);
for (i=0;i<size;i++)
{
res[count++]=str[i];
}
res[count++]=' ';
size=ConvertNumberToString(sp->m_.PoseData_.infoState,str);
for (i=0;i<size;i++)
{
res[count++]=str[i];
}
res[count++]=' ';
size=ConvertNumberToString(sp->m_.PoseData_.numUsedReflectors,str);
for (i=0;i<size;i++)
{
res[count++]=str[i];
}
}
res[count++]=3;
// printf("gotov pose data (slijedi reflector data)\n");
return count;
}
int GetLandmarkData(ServerPacket *sp,char *res)
{
int count=0,i;
res[count++]=2;
res[count++]='s';
res[count++]='M';
res[count++]='A';
res[count++]=' ';
res[count++]='m';
res[count++]='N';
res[count++]='L';
res[count++]='M';
res[count++]='D';
res[count++]='G';
res[count++]='e';
res[count++]='t';
res[count++]='D';
res[count++]='a';
res[count++]='t';
res[count++]='a';
res[count++]=3;
res[count++]=2;
res[count++]='s';
res[count++]='A';
res[count++]='N';
res[count++]=' ';
res[count++]='m';
res[count++]='N';
res[count++]='L';
res[count++]='M';
res[count++]='D';
res[count++]='G';
res[count++]='e';
res[count++]='t';
res[count++]='D';
res[count++]='a';
res[count++]='t';
res[count++]='a';
res[count++]=' ';
res[count++]='0'; //version
res[count++]=' ';
res[count++]='0'; //error
res[count++]=' ';
res[count++]='0'; //wait
res[count++]=' ';
res[count++]='1'; //mask
res[count++]=' ';
res[count++]='1'; //landmark data follow
res[count++]=' ';
char str[100];
int size;
size=ConvertNumberToString(sp->m_.ReflectorData_.filter,str);
for (i=0;i<size;i++)
{
res[count++]=str[i];
}
res[count++]=' ';
size=ConvertNumberToString(sp->m_.ReflectorData_.num_reflector,str);
for (i=0;i<size;i++)
{
res[count++]=str[i];
}
int j;
// printf("ref count= %d\n",sp->m_.ReflectorData_.num_reflector);
for (j=0;j<sp->m_.ReflectorData_.num_reflector;j++)
{
res[count++]=' ';
size=ConvertNumberToString(sp->m_.ReflectorData_.cart[j],str);
for (i=0;i<size;i++)
{
res[count++]=str[i];
}
res[count++]=' ';
size=ConvertNumberToString(sp->m_.ReflectorData_.x[j],str);
for (i=0;i<size;i++)
{
res[count++]=str[i];
}
res[count++]=' ';
size=ConvertNumberToString(sp->m_.ReflectorData_.y[j],str);
for (i=0;i<size;i++)
{
res[count++]=str[i];
}
res[count++]=' ';
size=ConvertNumberToString(sp->m_.ReflectorData_.polar[j],str);
for (i=0;i<size;i++)
{
res[count++]=str[i];
}
res[count++]=' ';
size=ConvertNumberToString(sp->m_.ReflectorData_.dist[j],str);
for (i=0;i<size;i++)
{
res[count++]=str[i];
}
res[count++]=' ';
size=ConvertNumberToString(sp->m_.ReflectorData_.phi[j],str);
for (i=0;i<size;i++)
{
res[count++]=str[i];
}
res[count++]=' ';
size=ConvertNumberToString(sp->m_.ReflectorData_.optional[j],str);
for (i=0;i<size;i++)
{
res[count++]=str[i];
}
if (sp->m_.ReflectorData_.optional[j]==1)
{
res[count++]=' ';
size=ConvertNumberToString(sp->m_.ReflectorData_.LocalID[j],str);
for (i=0;i<size;i++)
{
res[count++]=str[i];
}
res[count++]=' ';
size=ConvertNumberToString(sp->m_.ReflectorData_.GlobalID[j],str);
for (i=0;i<size;i++)
{
res[count++]=str[i];
}
res[count++]=' ';
size=ConvertNumberToString(sp->m_.ReflectorData_.type[j],str);
for (i=0;i<size;i++)
{
res[count++]=str[i];
}
res[count++]=' ';
size=ConvertNumberToString(sp->m_.ReflectorData_.subtype[j],str);
for (i=0;i<size;i++)
{
res[count++]=str[i];
}
res[count++]=' ';
size=ConvertNumberToString(sp->m_.ReflectorData_.quality[j],str);
for (i=0;i<size;i++)
{
res[count++]=str[i];
}
res[count++]=' ';
size=ConvertNumberToString(sp->m_.ReflectorData_.timestamp[j],str);
for (i=0;i<size;i++)
{
res[count++]=str[i];
}
res[count++]=' ';
size=ConvertNumberToString(sp->m_.ReflectorData_.size[j],str);
for (i=0;i<size;i++)
{
res[count++]=str[i];
}
res[count++]=' ';
size=ConvertNumberToString(sp->m_.ReflectorData_.hitCount[j],str);
for (i=0;i<size;i++)
{
res[count++]=str[i];
}
res[count++]=' ';
size=ConvertNumberToString(sp->m_.ReflectorData_.meanEchoAmplitude[j],str);
for (i=0;i<size;i++)
{
res[count++]=str[i];
}
res[count++]=' ';
size=ConvertNumberToString(sp->m_.ReflectorData_.meanEchoAmplitude[j],str);
for (i=0;i<size;i++)
{
res[count++]=str[i];
}
res[count++]=' ';
size=ConvertNumberToString(sp->m_.ReflectorData_.indexStart[j],str);
for (i=0;i<size;i++)
{
res[count++]=str[i];
}
res[count++]=' ';
size=ConvertNumberToString(sp->m_.ReflectorData_.indexEnd[j],str);
for (i=0;i<size;i++)
{
res[count++]=str[i];
}
}
}
// printf("gotov landmark data (slijedi distance data)\n");
res[count++]=' ';
res[count++]='1';
res[count++]=' ';
res[count++]='D';
res[count++]='I';
res[count++]='S';
res[count++]='T';
res[count++]='1';
res[count++]=' '; //multiplier
res[count++]='1';
res[count++]=' ';
res[count++]='0'; //offset
res[count++]=' ';
size=ConvertNumberToString((int) (sp->m_.start_angle*1000),str);
for (i=0;i<size;i++)
{
res[count++]=str[i];
}
// printf("gotov start %d\n",count);
res[count++]=' ';
// printf("%d\n",(int)(sp->m_.step_angle*1000));
size=ConvertNumberToString((int)(sp->m_.step_angle*1000),str);
// printf("%d\n",size);
for (i=0;i<size;i++)
{
res[count++]=str[i];
}
// printf("gotov step\n");
res[count++]=' ';
size=ConvertNumberToString((int) (sp->m_.timestamp),str);<|fim▁hole|>// printf("gotov timestamp\n");
res[count++]=' ';
size=ConvertNumberToString(sp->m_.meas_num,str);
for (i=0;i<size;i++)
{
res[count++]=str[i];
}
// printf("dist count=%d\n",sp->m_.meas_num);
for (j=0;j<sp->m_.meas_num;j++)
{
res[count++]=' ';
size=ConvertNumberToString((int) sp->m_.distance[j],str);
for (i=0;i<size;i++)
{
res[count++]=str[i];
}
}
res[count++]=' ';
res[count++]='0'; //remission
res[count++]=3;
for (i=0;i<count;i++)
{
// printf("%c",res[i]);
}
// printf("\n");
return count;
}
int ChangeState(char *res,char c)
{
int count=0;
res[count++]=2;
res[count++]='s';
res[count++]='M';
res[count++]='A';
res[count++]=' ';
res[count++]='m';
res[count++]='N';
res[count++]='E';
res[count++]='V';
res[count++]='A';
res[count++]='C';
res[count++]='h';
res[count++]='a';
res[count++]='n';
res[count++]='g';
res[count++]='e';
res[count++]='S';
res[count++]='t';
res[count++]='a';
res[count++]='t';
res[count++]='e';
res[count++]=3;
res[count++]=2;
res[count++]='s';
res[count++]='A';
res[count++]='N';
res[count++]=' ';
res[count++]='m';
res[count++]='N';
res[count++]='E';
res[count++]='V';
res[count++]='A';
res[count++]='C';
res[count++]='h';
res[count++]='a';
res[count++]='n';
res[count++]='g';
res[count++]='e';
res[count++]='S';
res[count++]='t';
res[count++]='a';
res[count++]='t';
res[count++]='e';
res[count++]=' ';
res[count++]='0';
res[count++]=' ';
res[count++]=c;
res[count++]=3;
return count;
}
int CheckRequest(char* buffer,int count,ServerPacket *sp,char *res,int &last)
{
int i,t=0;
int poc=0;
int kr=-1;
char *mes;
int mescount=0;
int sval;
int resp_size=0;
for (i=0;i<count;i++)
{
if (buffer[i]==2 && t==0)
{
poc=i;
t=1;
}
if (buffer[i]==3 && t==1)
{
kr=i;
t=2;
break;
}
}
// printf("t=%d\n",t);
if (t==2)
{
last=kr+1;
mes=buffer+poc;
mescount=kr-poc+1;
// if (mescount>17)
// {
if (strncmp(&mes[1],"sMN mNLMDGetData",16)==0)
{
printf("get landmark data\n");
sem_wait(&(sp->sem1_));
resp_size=GetLandmarkData(sp,res);
sem_post(&(sp->sem1_));
return resp_size;
}
else if (strncmp(&mes[1],"sMN mNPOSGetData",16)==0)
{
printf("get pose data\n");
sem_wait(&(sp->sem1_));
resp_size=GetPoseData(sp,res,mes[mescount-2]);
sem_post(&(sp->sem1_));
return resp_size;
}
else if (strncmp(&mes[1],"SMN MNPOSGetPose",16)==0)
{
printf("get pose\n");
sem_wait(&(sp->sem1_));
resp_size=GetPose(sp,res);
sem_post(&(sp->sem1_));
return resp_size;
}
else if (strncmp(&mes[1],"SMN MNMAPDoMapping",17)==0)
{
}
else if (strncmp(&mes[1],"sMN mNEVAChangeState",17)==0)
{
printf("change state %c\n",mes[mescount-2]);
resp_size=ChangeState(res,mes[mescount-2]);
return resp_size;
}
else
{
sem_wait(&(sp->sem2_));
for (i=0;i<mescount;i++)
{
sp->req[i]=mes[i];
}
sp->req_size=mescount;
sp->request=1;
sem_post(&(sp->sem2_));
// printf("test\n");
while (1)
{
// printf("request=%d\n",sp->request);
sem_wait(&(sp->sem3_));
if (sp->request==0)
{
// printf("request=%d\n",sp->request);
// printf("%d\n",sp->resp_size);
// sem_wait(&(sp->sem3_));
for (i=0;i<sp->resp_size;i++)
{
res[i]=sp->resp[i];
// printf ("%c %d\n",res[i],i);
}
// printf("\n");
resp_size=sp->resp_size;
sem_post(&(sp->sem3_));
return resp_size;
}
sem_post(&(sp->sem3_));
for (i=0;i<10000;i++)
{
}
}
// }
}
}
else
{
last=0;
return 0;
}
}
void *Server(void *arg)
{
ServerPacket *sp=(ServerPacket *) arg;
/* while (1)
{
sem_wait(&(sp->sem1_));
printf("ja1\n");
sem_post(&(sp->sem1_));
}
*/
int sockfd=sp->sockfd;
struct sockaddr_in self;
char buffer[MAXBUF];
char buffer1[1000];
char res[20000];
/*---Create streaming socket---*/
/* if ( (sockfd = socket(AF_INET, SOCK_STREAM, 0)) < 0 )
{
perror("Socket");
exit(errno);
}
/*---Initialize address/port structure---*/
/* bzero(&self, sizeof(self));
self.sin_family = AF_INET;
self.sin_port = htons(MY_PORT);
self.sin_addr.s_addr = INADDR_ANY;
/*---Assign a port number to the socket---*/
/* if ( bind(sockfd, (struct sockaddr*)&self, sizeof(self)) != 0 )
{
perror("socket--bind");
exit(errno);
}
/*---Make it a "listening socket"---*/
/* if ( listen(sockfd, 20) != 0 )
{
perror("socket--listen");
exit(errno);
}
/*---Forever... ---*/
int count=0,count1=0,count2=0,countres=0;
int i,last;
int clientfd;
struct sockaddr_in client_addr;
int addrlen=sizeof(client_addr);
while (1)
{
/*---accept a connection (creating a data pipe)---*/
clientfd = accept(sockfd, (struct sockaddr*)&client_addr, (socklen_t*) &addrlen);
sem_wait(&(sp->sem4_));
sp->accepted=1;
sem_post(&(sp->sem4_));
printf("%s:%d connected\n", inet_ntoa(client_addr.sin_addr), ntohs(client_addr.sin_port));
while (1)
{
/*---Echo back anything sent---*/
/*send(clientfd, buffer, */count=recv(clientfd, buffer, MAXBUF, MSG_DONTWAIT);//, 0);
for (i=0;i<count;i++)
{
// printf("%d %c\n",clientfd,buffer[i]);
}
if (count>0)
{
// printf("duljina klijentske poruke = %d\n",count);
for (i=0;i<count;i++)
{
buffer1[count1]= buffer[i];
count1++;
}
countres=CheckRequest(buffer1,count1,sp,res,last);
if (last>0){
for (int i=last;i<count1;i++)
{
buffer1[i-last]= buffer1[i];
}
count1=count1-last;
}
// printf("countres=%d\n",countres);
}
if ( countres>0)
{
// printf("clientfd=%d\n",clientfd);
send(clientfd,res,countres,0);
// printf("poslana poruka duljine %d\n",countres);
countres=0;
}
usleep(5000);
/* for (i=0;i<10000;i++)
{
}*/
}
/*---Close data connection---*/
close(clientfd);
}
/*---Clean up (should never get here!)---*/
close(sockfd);
}<|fim▁end|> | for (i=0;i<size;i++)
{
res[count++]=str[i];
} |
<|file_name|>context.py<|end_file_name|><|fim▁begin|>"""Askbot template context processor that makes some parameters
from the django settings, all parameters from the askbot livesettings
and the application available for the templates
"""
from django.conf import settings
import askbot
from askbot import api
from askbot.conf import settings as askbot_settings
from askbot.skins.loaders import get_skin
from askbot.utils import url_utils
def application_settings(request):
"""The context processor function"""
my_settings = askbot_settings.as_dict()
my_settings['LANGUAGE_CODE'] = settings.LANGUAGE_CODE
my_settings['ASKBOT_URL'] = settings.ASKBOT_URL<|fim▁hole|> my_settings['LOGOUT_REDIRECT_URL'] = url_utils.get_logout_redirect_url()
return {
'settings': my_settings,
'skin': get_skin(request),
'moderation_items': api.get_info_on_moderation_items(request.user)
}<|fim▁end|> | my_settings['DEBUG'] = settings.DEBUG
my_settings['ASKBOT_VERSION'] = askbot.get_version()
my_settings['LOGIN_URL'] = url_utils.get_login_url()
my_settings['LOGOUT_URL'] = url_utils.get_logout_url() |
<|file_name|>test_guest_vlan_range.py<|end_file_name|><|fim▁begin|>""" P1 tests for Dedicating Guest Vlan Ranges
"""
# Import Local Modules
from marvin.cloudstackAPI import *
from marvin.cloudstackTestCase import *
from marvin.lib.base import *
from marvin.lib.common import *
from marvin.lib.utils import *
from nose.plugins.attrib import attr
class TestDedicateGuestVlanRange(cloudstackTestCase):
@classmethod
def setUpClass(cls):
testClient = super(TestDedicateGuestVlanRange, cls).getClsTestClient()
cls.apiclient = testClient.getApiClient()
cls.services = testClient.getParsedTestDataConfig()
# Get Zone, Domain
cls.domain = get_domain(cls.apiclient)
cls.zone = get_zone(cls.apiclient, testClient.getZoneForTests())
# Create Account
cls.account = Account.create(
cls.apiclient,
cls.services["account"],
domainid=cls.domain.id
)
cls._cleanup = [
cls.account,
]
cls.physical_network, cls.free_vlan = setNonContiguousVlanIds(cls.apiclient, cls.zone.id)
return
@classmethod
def tearDownClass(cls):
try:
# Cleanup resources used<|fim▁hole|> cls.physical_network.update(cls.apiclient,
id=cls.physical_network.id,
vlan=cls.physical_network.vlan)
cleanup_resources(cls.apiclient, cls._cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
def setUp(self):
self.apiclient = self.testClient.getApiClient()
self.dbclient = self.testClient.getDbConnection()
self.cleanup = []
return
def tearDown(self):
try:
# Clean up
cleanup_resources(self.apiclient, self.cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
@attr(tags=["advanced", "guestvlanrange", "dedicate", "release"], required_hardware="false")
def test_dedicateGuestVlanRange(self):
"""Test guest vlan range dedication
"""
"""Assume a physical network is available
"""
"""
# Validate the following:
# 1. List the available physical network using ListPhysicalNetwork
# 2. Add a Guest Vlan range to the available physical network using UpdatePhysicalNetwork
# 3. Dedicate the created guest vlan range to user account using DedicateGuestVlanRange
# 4. Verify vlan range is dedicated with listDedicatedGuestVlanRanges
# 5. Release the dedicated guest vlan range back to the system
# 6. Verify guest vlan range has been released, verify with listDedicatedGuestVlanRanges
# 7. Remove the added guest vlan range using UpdatePhysicalNetwork
"""
self.debug("Adding guest vlan range")
new_vlan = self.physical_network.vlan + "," + self.free_vlan["partial_range"][0]
# new_vlan = self.free_vlan["partial_range"][0]
addGuestVlanRangeResponse = self.physical_network.update(self.apiclient,
id=self.physical_network.id, vlan=new_vlan)
# id=self.physical_network.id, vlan=self.free_vlan["partial_range"][0])
self.debug("Dedicating guest vlan range");
dedicate_guest_vlan_range_response = PhysicalNetwork.dedicate(
self.apiclient,
self.free_vlan["partial_range"][0],
physicalnetworkid=self.physical_network.id,
account=self.account.name,
domainid=self.account.domainid
)
list_dedicated_guest_vlan_range_response = PhysicalNetwork.listDedicated(
self.apiclient,
id=dedicate_guest_vlan_range_response.id
)
dedicated_guest_vlan_response = list_dedicated_guest_vlan_range_response[0]
self.assertEqual(
dedicated_guest_vlan_response.account,
self.account.name,
"Check account name is in listDedicatedGuestVlanRanges as the account the range is dedicated to"
)
self.debug("Releasing guest vlan range");
dedicate_guest_vlan_range_response.release(self.apiclient)
list_dedicated_guest_vlan_range_response = PhysicalNetwork.listDedicated(self.apiclient)
self.assertEqual(
list_dedicated_guest_vlan_range_response,
None,
"Check vlan range is not available in listDedicatedGuestVlanRanges"
)<|fim▁end|> | removeGuestVlanRangeResponse = \ |
<|file_name|>search.py<|end_file_name|><|fim▁begin|>"""
Copyright (C) 2017 João Barroca <[email protected]>
This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published
by the Free Software Foundation, either version 3 of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
# Importing the libraries
from domainIndependent import *
from operator import itemgetter
import itertools<|fim▁hole|># General Graphic Search (with goal state verification only after choosing a leaf node!)
def gs(problem, strategy):
node = {'state': problem.initialState, 'parent': [], 'actions': [], 'g': 0, 'f': 0}
frontier = [node]
exploredSet = []
iterCounter = itertools.count(start = 0)
nodesCounter = itertools.count(start = 1)
iteration = 0
generatedNodes = 1
while True:
# when there are no more nodes to explore and we didn't found a solution yet, return Failure
if not frontier:
iteration = next(iterCounter)
return None, iteration, len(frontier), generatedNodes
# chooses the node with the lowest cost
# first we sort by max and then we take the last element of the list
# this allow us to choose the vertice with the last lowest cost that was
# appended to list (in case of ties)
sortedFrontier = sorted(frontier, key = itemgetter('f'), reverse = True)
node = sortedFrontier[-1]
# and remove it from the frontier
frontier.remove(node)
# checks if the chosen node its a goal state before expand it
if problem.goalState(node) is True:
iteration = next(iterCounter)
return execute(node), iteration, len(frontier), generatedNodes
iteration = next(iterCounter)
# Debugging process
#gsDebug(iteration, node, frontier)
# adds the node being explored to the explored set
exploredSet.append(node)
# expand the node and get the child nodes
childNodes = childNodesGetter(problem, node, strategy)
for child in childNodes:
generatedNodes = next(nodesCounter)
# checks if the child node has already been explored or if it is already in the frontier
if not inExploredList(problem, child, exploredSet) and not inFrontier(problem, child, frontier):
frontier.append(child)<|fim▁end|> | |
<|file_name|>queryservice.go<|end_file_name|><|fim▁begin|>// Copyright 2015, Google Inc. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Package queryservice contains the interface for the service definition
// of the Query Service.
package queryservice
import (
"io"
"golang.org/x/net/context"
"github.com/youtube/vitess/go/sqltypes"
"github.com/youtube/vitess/go/vt/tabletserver/querytypes"
querypb "github.com/youtube/vitess/go/vt/proto/query"
)
// QueryService is the interface implemented by the tablet's query service.
// All streaming methods accept a callback function that will be called for
// each response. If the callback returns an error, that error is returned
// back by the function, except in the case of io.EOF in which case the stream
// will be terminated with no error. Streams can also be terminated by canceling
// the context.
// This API is common for both server and client implementations. All functions
// must be safe to be called concurrently.
type QueryService interface {
// Transaction management
// Begin returns the transaction id to use for further operations
Begin(ctx context.Context, target *querypb.Target) (int64, error)
// Commit commits the current transaction
Commit(ctx context.Context, target *querypb.Target, transactionID int64) error
// Rollback aborts the current transaction
Rollback(ctx context.Context, target *querypb.Target, transactionID int64) error
// Prepare prepares the specified transaction.
Prepare(ctx context.Context, target *querypb.Target, transactionID int64, dtid string) (err error)
// CommitPrepared commits the prepared transaction.
CommitPrepared(ctx context.Context, target *querypb.Target, dtid string) (err error)
// RollbackPrepared rolls back the prepared transaction.
RollbackPrepared(ctx context.Context, target *querypb.Target, dtid string, originalID int64) (err error)
// CreateTransaction creates the metadata for a 2PC transaction.
CreateTransaction(ctx context.Context, target *querypb.Target, dtid string, participants []*querypb.Target) (err error)
// StartCommit atomically commits the transaction along with the
// decision to commit the associated 2pc transaction.
StartCommit(ctx context.Context, target *querypb.Target, transactionID int64, dtid string) (err error)
// SetRollback transitions the 2pc transaction to the Rollback state.
// If a transaction id is provided, that transaction is also rolled back.
SetRollback(ctx context.Context, target *querypb.Target, dtid string, transactionID int64) (err error)
// ConcludeTransaction deletes the 2pc transaction metadata
// essentially resolving it.
ConcludeTransaction(ctx context.Context, target *querypb.Target, dtid string) (err error)
// ReadTransaction returns the metadata for the sepcified dtid.
ReadTransaction(ctx context.Context, target *querypb.Target, dtid string) (metadata *querypb.TransactionMetadata, err error)
// Query execution
Execute(ctx context.Context, target *querypb.Target, sql string, bindVariables map[string]interface{}, transactionID int64, options *querypb.ExecuteOptions) (*sqltypes.Result, error)
StreamExecute(ctx context.Context, target *querypb.Target, sql string, bindVariables map[string]interface{}, options *querypb.ExecuteOptions, callback func(*sqltypes.Result) error) error
ExecuteBatch(ctx context.Context, target *querypb.Target, queries []querytypes.BoundQuery, asTransaction bool, transactionID int64, options *querypb.ExecuteOptions) ([]sqltypes.Result, error)
// Combo methods, they also return the transactionID from the
// Begin part. If err != nil, the transactionID may still be<|fim▁hole|> // non-zero, and needs to be propagated back (like for a DB
// Integrity Error)
BeginExecute(ctx context.Context, target *querypb.Target, sql string, bindVariables map[string]interface{}, options *querypb.ExecuteOptions) (*sqltypes.Result, int64, error)
BeginExecuteBatch(ctx context.Context, target *querypb.Target, queries []querytypes.BoundQuery, asTransaction bool, options *querypb.ExecuteOptions) ([]sqltypes.Result, int64, error)
// Messaging methods.
MessageStream(ctx context.Context, target *querypb.Target, name string, callback func(*sqltypes.Result) error) error
MessageAck(ctx context.Context, target *querypb.Target, name string, ids []*querypb.Value) (count int64, err error)
// SplitQuery is a MapReduce helper function
// This version of SplitQuery supports multiple algorithms and multiple split columns.
// See the documentation of SplitQueryRequest in 'proto/vtgate.proto' for more information.
SplitQuery(ctx context.Context, target *querypb.Target, query querytypes.BoundQuery, splitColumns []string, splitCount int64, numRowsPerQueryPart int64, algorithm querypb.SplitQueryRequest_Algorithm) ([]querytypes.QuerySplit, error)
// UpdateStream streams updates from the provided position or timestamp.
UpdateStream(ctx context.Context, target *querypb.Target, position string, timestamp int64, callback func(*querypb.StreamEvent) error) error
// StreamHealth streams health status.
StreamHealth(ctx context.Context, callback func(*querypb.StreamHealthResponse) error) error
// HandlePanic will be called if any of the functions panic.
HandlePanic(err *error)
// Close must be called for releasing resources.
Close(ctx context.Context) error
}
type resultStreamer struct {
done chan struct{}
ch chan *sqltypes.Result
err error
}
func (rs *resultStreamer) Recv() (*sqltypes.Result, error) {
select {
case <-rs.done:
return nil, rs.err
case qr := <-rs.ch:
return qr, nil
}
}
// ExecuteWithStreamer performs a StreamExecute, but returns a *sqltypes.ResultStream to iterate on.
// This function should only be used for legacy code. New usage should directly use StreamExecute.
func ExecuteWithStreamer(ctx context.Context, conn QueryService, target *querypb.Target, sql string, bindVariables map[string]interface{}, options *querypb.ExecuteOptions) sqltypes.ResultStream {
rs := &resultStreamer{
done: make(chan struct{}),
ch: make(chan *sqltypes.Result),
}
go func() {
defer close(rs.done)
rs.err = conn.StreamExecute(ctx, target, sql, bindVariables, options, func(qr *sqltypes.Result) error {
select {
case <-ctx.Done():
return io.EOF
case rs.ch <- qr:
}
return nil
})
if rs.err == nil {
rs.err = io.EOF
}
}()
return rs
}<|fim▁end|> | |
<|file_name|>JSONArray.hpp<|end_file_name|><|fim▁begin|>/*================================================================================
code generated by: java2cpp
author: Zoran Angelov, mailto://[email protected]
class: org.json.JSONArray
<|fim▁hole|>#ifndef J2CPP_INCLUDE_IMPLEMENTATION
#ifndef J2CPP_ORG_JSON_JSONARRAY_HPP_DECL
#define J2CPP_ORG_JSON_JSONARRAY_HPP_DECL
namespace j2cpp { namespace java { namespace lang { class String; } } }
namespace j2cpp { namespace java { namespace lang { class Object; } } }
namespace j2cpp { namespace java { namespace util { class Collection; } } }
namespace j2cpp { namespace org { namespace json { class JSONObject; } } }
namespace j2cpp { namespace org { namespace json { class JSONTokener; } } }
#include <java/lang/Object.hpp>
#include <java/lang/String.hpp>
#include <java/util/Collection.hpp>
#include <org/json/JSONObject.hpp>
#include <org/json/JSONTokener.hpp>
namespace j2cpp {
namespace org { namespace json {
class JSONArray;
class JSONArray
: public object<JSONArray>
{
public:
J2CPP_DECLARE_CLASS
J2CPP_DECLARE_METHOD(0)
J2CPP_DECLARE_METHOD(1)
J2CPP_DECLARE_METHOD(2)
J2CPP_DECLARE_METHOD(3)
J2CPP_DECLARE_METHOD(4)
J2CPP_DECLARE_METHOD(5)
J2CPP_DECLARE_METHOD(6)
J2CPP_DECLARE_METHOD(7)
J2CPP_DECLARE_METHOD(8)
J2CPP_DECLARE_METHOD(9)
J2CPP_DECLARE_METHOD(10)
J2CPP_DECLARE_METHOD(11)
J2CPP_DECLARE_METHOD(12)
J2CPP_DECLARE_METHOD(13)
J2CPP_DECLARE_METHOD(14)
J2CPP_DECLARE_METHOD(15)
J2CPP_DECLARE_METHOD(16)
J2CPP_DECLARE_METHOD(17)
J2CPP_DECLARE_METHOD(18)
J2CPP_DECLARE_METHOD(19)
J2CPP_DECLARE_METHOD(20)
J2CPP_DECLARE_METHOD(21)
J2CPP_DECLARE_METHOD(22)
J2CPP_DECLARE_METHOD(23)
J2CPP_DECLARE_METHOD(24)
J2CPP_DECLARE_METHOD(25)
J2CPP_DECLARE_METHOD(26)
J2CPP_DECLARE_METHOD(27)
J2CPP_DECLARE_METHOD(28)
J2CPP_DECLARE_METHOD(29)
J2CPP_DECLARE_METHOD(30)
J2CPP_DECLARE_METHOD(31)
J2CPP_DECLARE_METHOD(32)
J2CPP_DECLARE_METHOD(33)
J2CPP_DECLARE_METHOD(34)
J2CPP_DECLARE_METHOD(35)
J2CPP_DECLARE_METHOD(36)
J2CPP_DECLARE_METHOD(37)
J2CPP_DECLARE_METHOD(38)
J2CPP_DECLARE_METHOD(39)
J2CPP_DECLARE_METHOD(40)
J2CPP_DECLARE_METHOD(41)
J2CPP_DECLARE_METHOD(42)
explicit JSONArray(jobject jobj)
: object<JSONArray>(jobj)
{
}
operator local_ref<java::lang::Object>() const;
JSONArray();
JSONArray(local_ref< java::util::Collection > const&);
JSONArray(local_ref< org::json::JSONTokener > const&);
JSONArray(local_ref< java::lang::String > const&);
jint length();
local_ref< org::json::JSONArray > put(jboolean);
local_ref< org::json::JSONArray > put(jdouble);
local_ref< org::json::JSONArray > put(jint);
local_ref< org::json::JSONArray > put(jlong);
local_ref< org::json::JSONArray > put(local_ref< java::lang::Object > const&);
local_ref< org::json::JSONArray > put(jint, jboolean);
local_ref< org::json::JSONArray > put(jint, jdouble);
local_ref< org::json::JSONArray > put(jint, jint);
local_ref< org::json::JSONArray > put(jint, jlong);
local_ref< org::json::JSONArray > put(jint, local_ref< java::lang::Object > const&);
jboolean isNull(jint);
local_ref< java::lang::Object > get(jint);
local_ref< java::lang::Object > opt(jint);
jboolean getBoolean(jint);
jboolean optBoolean(jint);
jboolean optBoolean(jint, jboolean);
jdouble getDouble(jint);
jdouble optDouble(jint);
jdouble optDouble(jint, jdouble);
jint getInt(jint);
jint optInt(jint);
jint optInt(jint, jint);
jlong getLong(jint);
jlong optLong(jint);
jlong optLong(jint, jlong);
local_ref< java::lang::String > getString(jint);
local_ref< java::lang::String > optString(jint);
local_ref< java::lang::String > optString(jint, local_ref< java::lang::String > const&);
local_ref< org::json::JSONArray > getJSONArray(jint);
local_ref< org::json::JSONArray > optJSONArray(jint);
local_ref< org::json::JSONObject > getJSONObject(jint);
local_ref< org::json::JSONObject > optJSONObject(jint);
local_ref< org::json::JSONObject > toJSONObject(local_ref< org::json::JSONArray > const&);
local_ref< java::lang::String > join(local_ref< java::lang::String > const&);
local_ref< java::lang::String > toString();
local_ref< java::lang::String > toString(jint);
jboolean equals(local_ref< java::lang::Object > const&);
jint hashCode();
}; //class JSONArray
} //namespace json
} //namespace org
} //namespace j2cpp
#endif //J2CPP_ORG_JSON_JSONARRAY_HPP_DECL
#else //J2CPP_INCLUDE_IMPLEMENTATION
#ifndef J2CPP_ORG_JSON_JSONARRAY_HPP_IMPL
#define J2CPP_ORG_JSON_JSONARRAY_HPP_IMPL
namespace j2cpp {
org::json::JSONArray::operator local_ref<java::lang::Object>() const
{
return local_ref<java::lang::Object>(get_jobject());
}
org::json::JSONArray::JSONArray()
: object<org::json::JSONArray>(
call_new_object<
org::json::JSONArray::J2CPP_CLASS_NAME,
org::json::JSONArray::J2CPP_METHOD_NAME(0),
org::json::JSONArray::J2CPP_METHOD_SIGNATURE(0)
>()
)
{
}
org::json::JSONArray::JSONArray(local_ref< java::util::Collection > const &a0)
: object<org::json::JSONArray>(
call_new_object<
org::json::JSONArray::J2CPP_CLASS_NAME,
org::json::JSONArray::J2CPP_METHOD_NAME(1),
org::json::JSONArray::J2CPP_METHOD_SIGNATURE(1)
>(a0)
)
{
}
org::json::JSONArray::JSONArray(local_ref< org::json::JSONTokener > const &a0)
: object<org::json::JSONArray>(
call_new_object<
org::json::JSONArray::J2CPP_CLASS_NAME,
org::json::JSONArray::J2CPP_METHOD_NAME(2),
org::json::JSONArray::J2CPP_METHOD_SIGNATURE(2)
>(a0)
)
{
}
org::json::JSONArray::JSONArray(local_ref< java::lang::String > const &a0)
: object<org::json::JSONArray>(
call_new_object<
org::json::JSONArray::J2CPP_CLASS_NAME,
org::json::JSONArray::J2CPP_METHOD_NAME(3),
org::json::JSONArray::J2CPP_METHOD_SIGNATURE(3)
>(a0)
)
{
}
jint org::json::JSONArray::length()
{
return call_method<
org::json::JSONArray::J2CPP_CLASS_NAME,
org::json::JSONArray::J2CPP_METHOD_NAME(4),
org::json::JSONArray::J2CPP_METHOD_SIGNATURE(4),
jint
>(get_jobject());
}
local_ref< org::json::JSONArray > org::json::JSONArray::put(jboolean a0)
{
return call_method<
org::json::JSONArray::J2CPP_CLASS_NAME,
org::json::JSONArray::J2CPP_METHOD_NAME(5),
org::json::JSONArray::J2CPP_METHOD_SIGNATURE(5),
local_ref< org::json::JSONArray >
>(get_jobject(), a0);
}
local_ref< org::json::JSONArray > org::json::JSONArray::put(jdouble a0)
{
return call_method<
org::json::JSONArray::J2CPP_CLASS_NAME,
org::json::JSONArray::J2CPP_METHOD_NAME(6),
org::json::JSONArray::J2CPP_METHOD_SIGNATURE(6),
local_ref< org::json::JSONArray >
>(get_jobject(), a0);
}
local_ref< org::json::JSONArray > org::json::JSONArray::put(jint a0)
{
return call_method<
org::json::JSONArray::J2CPP_CLASS_NAME,
org::json::JSONArray::J2CPP_METHOD_NAME(7),
org::json::JSONArray::J2CPP_METHOD_SIGNATURE(7),
local_ref< org::json::JSONArray >
>(get_jobject(), a0);
}
local_ref< org::json::JSONArray > org::json::JSONArray::put(jlong a0)
{
return call_method<
org::json::JSONArray::J2CPP_CLASS_NAME,
org::json::JSONArray::J2CPP_METHOD_NAME(8),
org::json::JSONArray::J2CPP_METHOD_SIGNATURE(8),
local_ref< org::json::JSONArray >
>(get_jobject(), a0);
}
local_ref< org::json::JSONArray > org::json::JSONArray::put(local_ref< java::lang::Object > const &a0)
{
return call_method<
org::json::JSONArray::J2CPP_CLASS_NAME,
org::json::JSONArray::J2CPP_METHOD_NAME(9),
org::json::JSONArray::J2CPP_METHOD_SIGNATURE(9),
local_ref< org::json::JSONArray >
>(get_jobject(), a0);
}
local_ref< org::json::JSONArray > org::json::JSONArray::put(jint a0, jboolean a1)
{
return call_method<
org::json::JSONArray::J2CPP_CLASS_NAME,
org::json::JSONArray::J2CPP_METHOD_NAME(10),
org::json::JSONArray::J2CPP_METHOD_SIGNATURE(10),
local_ref< org::json::JSONArray >
>(get_jobject(), a0, a1);
}
local_ref< org::json::JSONArray > org::json::JSONArray::put(jint a0, jdouble a1)
{
return call_method<
org::json::JSONArray::J2CPP_CLASS_NAME,
org::json::JSONArray::J2CPP_METHOD_NAME(11),
org::json::JSONArray::J2CPP_METHOD_SIGNATURE(11),
local_ref< org::json::JSONArray >
>(get_jobject(), a0, a1);
}
local_ref< org::json::JSONArray > org::json::JSONArray::put(jint a0, jint a1)
{
return call_method<
org::json::JSONArray::J2CPP_CLASS_NAME,
org::json::JSONArray::J2CPP_METHOD_NAME(12),
org::json::JSONArray::J2CPP_METHOD_SIGNATURE(12),
local_ref< org::json::JSONArray >
>(get_jobject(), a0, a1);
}
local_ref< org::json::JSONArray > org::json::JSONArray::put(jint a0, jlong a1)
{
return call_method<
org::json::JSONArray::J2CPP_CLASS_NAME,
org::json::JSONArray::J2CPP_METHOD_NAME(13),
org::json::JSONArray::J2CPP_METHOD_SIGNATURE(13),
local_ref< org::json::JSONArray >
>(get_jobject(), a0, a1);
}
local_ref< org::json::JSONArray > org::json::JSONArray::put(jint a0, local_ref< java::lang::Object > const &a1)
{
return call_method<
org::json::JSONArray::J2CPP_CLASS_NAME,
org::json::JSONArray::J2CPP_METHOD_NAME(14),
org::json::JSONArray::J2CPP_METHOD_SIGNATURE(14),
local_ref< org::json::JSONArray >
>(get_jobject(), a0, a1);
}
jboolean org::json::JSONArray::isNull(jint a0)
{
return call_method<
org::json::JSONArray::J2CPP_CLASS_NAME,
org::json::JSONArray::J2CPP_METHOD_NAME(15),
org::json::JSONArray::J2CPP_METHOD_SIGNATURE(15),
jboolean
>(get_jobject(), a0);
}
local_ref< java::lang::Object > org::json::JSONArray::get(jint a0)
{
return call_method<
org::json::JSONArray::J2CPP_CLASS_NAME,
org::json::JSONArray::J2CPP_METHOD_NAME(16),
org::json::JSONArray::J2CPP_METHOD_SIGNATURE(16),
local_ref< java::lang::Object >
>(get_jobject(), a0);
}
local_ref< java::lang::Object > org::json::JSONArray::opt(jint a0)
{
return call_method<
org::json::JSONArray::J2CPP_CLASS_NAME,
org::json::JSONArray::J2CPP_METHOD_NAME(17),
org::json::JSONArray::J2CPP_METHOD_SIGNATURE(17),
local_ref< java::lang::Object >
>(get_jobject(), a0);
}
jboolean org::json::JSONArray::getBoolean(jint a0)
{
return call_method<
org::json::JSONArray::J2CPP_CLASS_NAME,
org::json::JSONArray::J2CPP_METHOD_NAME(18),
org::json::JSONArray::J2CPP_METHOD_SIGNATURE(18),
jboolean
>(get_jobject(), a0);
}
jboolean org::json::JSONArray::optBoolean(jint a0)
{
return call_method<
org::json::JSONArray::J2CPP_CLASS_NAME,
org::json::JSONArray::J2CPP_METHOD_NAME(19),
org::json::JSONArray::J2CPP_METHOD_SIGNATURE(19),
jboolean
>(get_jobject(), a0);
}
jboolean org::json::JSONArray::optBoolean(jint a0, jboolean a1)
{
return call_method<
org::json::JSONArray::J2CPP_CLASS_NAME,
org::json::JSONArray::J2CPP_METHOD_NAME(20),
org::json::JSONArray::J2CPP_METHOD_SIGNATURE(20),
jboolean
>(get_jobject(), a0, a1);
}
jdouble org::json::JSONArray::getDouble(jint a0)
{
return call_method<
org::json::JSONArray::J2CPP_CLASS_NAME,
org::json::JSONArray::J2CPP_METHOD_NAME(21),
org::json::JSONArray::J2CPP_METHOD_SIGNATURE(21),
jdouble
>(get_jobject(), a0);
}
jdouble org::json::JSONArray::optDouble(jint a0)
{
return call_method<
org::json::JSONArray::J2CPP_CLASS_NAME,
org::json::JSONArray::J2CPP_METHOD_NAME(22),
org::json::JSONArray::J2CPP_METHOD_SIGNATURE(22),
jdouble
>(get_jobject(), a0);
}
jdouble org::json::JSONArray::optDouble(jint a0, jdouble a1)
{
return call_method<
org::json::JSONArray::J2CPP_CLASS_NAME,
org::json::JSONArray::J2CPP_METHOD_NAME(23),
org::json::JSONArray::J2CPP_METHOD_SIGNATURE(23),
jdouble
>(get_jobject(), a0, a1);
}
jint org::json::JSONArray::getInt(jint a0)
{
return call_method<
org::json::JSONArray::J2CPP_CLASS_NAME,
org::json::JSONArray::J2CPP_METHOD_NAME(24),
org::json::JSONArray::J2CPP_METHOD_SIGNATURE(24),
jint
>(get_jobject(), a0);
}
jint org::json::JSONArray::optInt(jint a0)
{
return call_method<
org::json::JSONArray::J2CPP_CLASS_NAME,
org::json::JSONArray::J2CPP_METHOD_NAME(25),
org::json::JSONArray::J2CPP_METHOD_SIGNATURE(25),
jint
>(get_jobject(), a0);
}
jint org::json::JSONArray::optInt(jint a0, jint a1)
{
return call_method<
org::json::JSONArray::J2CPP_CLASS_NAME,
org::json::JSONArray::J2CPP_METHOD_NAME(26),
org::json::JSONArray::J2CPP_METHOD_SIGNATURE(26),
jint
>(get_jobject(), a0, a1);
}
jlong org::json::JSONArray::getLong(jint a0)
{
return call_method<
org::json::JSONArray::J2CPP_CLASS_NAME,
org::json::JSONArray::J2CPP_METHOD_NAME(27),
org::json::JSONArray::J2CPP_METHOD_SIGNATURE(27),
jlong
>(get_jobject(), a0);
}
jlong org::json::JSONArray::optLong(jint a0)
{
return call_method<
org::json::JSONArray::J2CPP_CLASS_NAME,
org::json::JSONArray::J2CPP_METHOD_NAME(28),
org::json::JSONArray::J2CPP_METHOD_SIGNATURE(28),
jlong
>(get_jobject(), a0);
}
jlong org::json::JSONArray::optLong(jint a0, jlong a1)
{
return call_method<
org::json::JSONArray::J2CPP_CLASS_NAME,
org::json::JSONArray::J2CPP_METHOD_NAME(29),
org::json::JSONArray::J2CPP_METHOD_SIGNATURE(29),
jlong
>(get_jobject(), a0, a1);
}
local_ref< java::lang::String > org::json::JSONArray::getString(jint a0)
{
return call_method<
org::json::JSONArray::J2CPP_CLASS_NAME,
org::json::JSONArray::J2CPP_METHOD_NAME(30),
org::json::JSONArray::J2CPP_METHOD_SIGNATURE(30),
local_ref< java::lang::String >
>(get_jobject(), a0);
}
local_ref< java::lang::String > org::json::JSONArray::optString(jint a0)
{
return call_method<
org::json::JSONArray::J2CPP_CLASS_NAME,
org::json::JSONArray::J2CPP_METHOD_NAME(31),
org::json::JSONArray::J2CPP_METHOD_SIGNATURE(31),
local_ref< java::lang::String >
>(get_jobject(), a0);
}
local_ref< java::lang::String > org::json::JSONArray::optString(jint a0, local_ref< java::lang::String > const &a1)
{
return call_method<
org::json::JSONArray::J2CPP_CLASS_NAME,
org::json::JSONArray::J2CPP_METHOD_NAME(32),
org::json::JSONArray::J2CPP_METHOD_SIGNATURE(32),
local_ref< java::lang::String >
>(get_jobject(), a0, a1);
}
local_ref< org::json::JSONArray > org::json::JSONArray::getJSONArray(jint a0)
{
return call_method<
org::json::JSONArray::J2CPP_CLASS_NAME,
org::json::JSONArray::J2CPP_METHOD_NAME(33),
org::json::JSONArray::J2CPP_METHOD_SIGNATURE(33),
local_ref< org::json::JSONArray >
>(get_jobject(), a0);
}
local_ref< org::json::JSONArray > org::json::JSONArray::optJSONArray(jint a0)
{
return call_method<
org::json::JSONArray::J2CPP_CLASS_NAME,
org::json::JSONArray::J2CPP_METHOD_NAME(34),
org::json::JSONArray::J2CPP_METHOD_SIGNATURE(34),
local_ref< org::json::JSONArray >
>(get_jobject(), a0);
}
local_ref< org::json::JSONObject > org::json::JSONArray::getJSONObject(jint a0)
{
return call_method<
org::json::JSONArray::J2CPP_CLASS_NAME,
org::json::JSONArray::J2CPP_METHOD_NAME(35),
org::json::JSONArray::J2CPP_METHOD_SIGNATURE(35),
local_ref< org::json::JSONObject >
>(get_jobject(), a0);
}
local_ref< org::json::JSONObject > org::json::JSONArray::optJSONObject(jint a0)
{
return call_method<
org::json::JSONArray::J2CPP_CLASS_NAME,
org::json::JSONArray::J2CPP_METHOD_NAME(36),
org::json::JSONArray::J2CPP_METHOD_SIGNATURE(36),
local_ref< org::json::JSONObject >
>(get_jobject(), a0);
}
local_ref< org::json::JSONObject > org::json::JSONArray::toJSONObject(local_ref< org::json::JSONArray > const &a0)
{
return call_method<
org::json::JSONArray::J2CPP_CLASS_NAME,
org::json::JSONArray::J2CPP_METHOD_NAME(37),
org::json::JSONArray::J2CPP_METHOD_SIGNATURE(37),
local_ref< org::json::JSONObject >
>(get_jobject(), a0);
}
local_ref< java::lang::String > org::json::JSONArray::join(local_ref< java::lang::String > const &a0)
{
return call_method<
org::json::JSONArray::J2CPP_CLASS_NAME,
org::json::JSONArray::J2CPP_METHOD_NAME(38),
org::json::JSONArray::J2CPP_METHOD_SIGNATURE(38),
local_ref< java::lang::String >
>(get_jobject(), a0);
}
local_ref< java::lang::String > org::json::JSONArray::toString()
{
return call_method<
org::json::JSONArray::J2CPP_CLASS_NAME,
org::json::JSONArray::J2CPP_METHOD_NAME(39),
org::json::JSONArray::J2CPP_METHOD_SIGNATURE(39),
local_ref< java::lang::String >
>(get_jobject());
}
local_ref< java::lang::String > org::json::JSONArray::toString(jint a0)
{
return call_method<
org::json::JSONArray::J2CPP_CLASS_NAME,
org::json::JSONArray::J2CPP_METHOD_NAME(40),
org::json::JSONArray::J2CPP_METHOD_SIGNATURE(40),
local_ref< java::lang::String >
>(get_jobject(), a0);
}
jboolean org::json::JSONArray::equals(local_ref< java::lang::Object > const &a0)
{
return call_method<
org::json::JSONArray::J2CPP_CLASS_NAME,
org::json::JSONArray::J2CPP_METHOD_NAME(41),
org::json::JSONArray::J2CPP_METHOD_SIGNATURE(41),
jboolean
>(get_jobject(), a0);
}
jint org::json::JSONArray::hashCode()
{
return call_method<
org::json::JSONArray::J2CPP_CLASS_NAME,
org::json::JSONArray::J2CPP_METHOD_NAME(42),
org::json::JSONArray::J2CPP_METHOD_SIGNATURE(42),
jint
>(get_jobject());
}
J2CPP_DEFINE_CLASS(org::json::JSONArray,"org/json/JSONArray")
J2CPP_DEFINE_METHOD(org::json::JSONArray,0,"<init>","()V")
J2CPP_DEFINE_METHOD(org::json::JSONArray,1,"<init>","(Ljava/util/Collection;)V")
J2CPP_DEFINE_METHOD(org::json::JSONArray,2,"<init>","(Lorg/json/JSONTokener;)V")
J2CPP_DEFINE_METHOD(org::json::JSONArray,3,"<init>","(Ljava/lang/String;)V")
J2CPP_DEFINE_METHOD(org::json::JSONArray,4,"length","()I")
J2CPP_DEFINE_METHOD(org::json::JSONArray,5,"put","(Z)Lorg/json/JSONArray;")
J2CPP_DEFINE_METHOD(org::json::JSONArray,6,"put","(D)Lorg/json/JSONArray;")
J2CPP_DEFINE_METHOD(org::json::JSONArray,7,"put","(I)Lorg/json/JSONArray;")
J2CPP_DEFINE_METHOD(org::json::JSONArray,8,"put","(J)Lorg/json/JSONArray;")
J2CPP_DEFINE_METHOD(org::json::JSONArray,9,"put","(Ljava/lang/Object;)Lorg/json/JSONArray;")
J2CPP_DEFINE_METHOD(org::json::JSONArray,10,"put","(IZ)Lorg/json/JSONArray;")
J2CPP_DEFINE_METHOD(org::json::JSONArray,11,"put","(ID)Lorg/json/JSONArray;")
J2CPP_DEFINE_METHOD(org::json::JSONArray,12,"put","(II)Lorg/json/JSONArray;")
J2CPP_DEFINE_METHOD(org::json::JSONArray,13,"put","(IJ)Lorg/json/JSONArray;")
J2CPP_DEFINE_METHOD(org::json::JSONArray,14,"put","(ILjava/lang/Object;)Lorg/json/JSONArray;")
J2CPP_DEFINE_METHOD(org::json::JSONArray,15,"isNull","(I)Z")
J2CPP_DEFINE_METHOD(org::json::JSONArray,16,"get","(I)Ljava/lang/Object;")
J2CPP_DEFINE_METHOD(org::json::JSONArray,17,"opt","(I)Ljava/lang/Object;")
J2CPP_DEFINE_METHOD(org::json::JSONArray,18,"getBoolean","(I)Z")
J2CPP_DEFINE_METHOD(org::json::JSONArray,19,"optBoolean","(I)Z")
J2CPP_DEFINE_METHOD(org::json::JSONArray,20,"optBoolean","(IZ)Z")
J2CPP_DEFINE_METHOD(org::json::JSONArray,21,"getDouble","(I)D")
J2CPP_DEFINE_METHOD(org::json::JSONArray,22,"optDouble","(I)D")
J2CPP_DEFINE_METHOD(org::json::JSONArray,23,"optDouble","(ID)D")
J2CPP_DEFINE_METHOD(org::json::JSONArray,24,"getInt","(I)I")
J2CPP_DEFINE_METHOD(org::json::JSONArray,25,"optInt","(I)I")
J2CPP_DEFINE_METHOD(org::json::JSONArray,26,"optInt","(II)I")
J2CPP_DEFINE_METHOD(org::json::JSONArray,27,"getLong","(I)J")
J2CPP_DEFINE_METHOD(org::json::JSONArray,28,"optLong","(I)J")
J2CPP_DEFINE_METHOD(org::json::JSONArray,29,"optLong","(IJ)J")
J2CPP_DEFINE_METHOD(org::json::JSONArray,30,"getString","(I)Ljava/lang/String;")
J2CPP_DEFINE_METHOD(org::json::JSONArray,31,"optString","(I)Ljava/lang/String;")
J2CPP_DEFINE_METHOD(org::json::JSONArray,32,"optString","(ILjava/lang/String;)Ljava/lang/String;")
J2CPP_DEFINE_METHOD(org::json::JSONArray,33,"getJSONArray","(I)Lorg/json/JSONArray;")
J2CPP_DEFINE_METHOD(org::json::JSONArray,34,"optJSONArray","(I)Lorg/json/JSONArray;")
J2CPP_DEFINE_METHOD(org::json::JSONArray,35,"getJSONObject","(I)Lorg/json/JSONObject;")
J2CPP_DEFINE_METHOD(org::json::JSONArray,36,"optJSONObject","(I)Lorg/json/JSONObject;")
J2CPP_DEFINE_METHOD(org::json::JSONArray,37,"toJSONObject","(Lorg/json/JSONArray;)Lorg/json/JSONObject;")
J2CPP_DEFINE_METHOD(org::json::JSONArray,38,"join","(Ljava/lang/String;)Ljava/lang/String;")
J2CPP_DEFINE_METHOD(org::json::JSONArray,39,"toString","()Ljava/lang/String;")
J2CPP_DEFINE_METHOD(org::json::JSONArray,40,"toString","(I)Ljava/lang/String;")
J2CPP_DEFINE_METHOD(org::json::JSONArray,41,"equals","(Ljava/lang/Object;)Z")
J2CPP_DEFINE_METHOD(org::json::JSONArray,42,"hashCode","()I")
} //namespace j2cpp
#endif //J2CPP_ORG_JSON_JSONARRAY_HPP_IMPL
#endif //J2CPP_INCLUDE_IMPLEMENTATION<|fim▁end|> | ================================================================================*/
|
<|file_name|>test_summary.py<|end_file_name|><|fim▁begin|>from datetime import datetime, date
import pytest
from pytz import UTC
from uber.config import c
from uber.models import Attendee, Session
from uber.site_sections import summary
@pytest.fixture
def birthdays():
dates = [
date(1964, 12, 30),
date(1964, 12, 31),
date(1964, 1, 1),
date(1964, 1, 2),
date(1964, 1, 9),
date(1964, 1, 10),
date(1964, 1, 11),
date(1964, 1, 12),
date(1964, 1, 30),
date(1964, 1, 31),
date(1964, 2, 1),
date(1964, 2, 2),
date(1964, 2, 27),
date(1964, 2, 28),
date(1964, 2, 29),
date(1964, 3, 1),
date(1964, 3, 2)]
attendees = []
for d in dates:
attendees.append(Attendee(
placeholder=True,
first_name='Born on',
last_name=d.strftime('%B %-d, %Y'),
ribbon=c.VOLUNTEER_RIBBON,
staffing=True,
birthdate=d))
ids = []
with Session() as session:
session.bulk_insert(attendees)
ids = [a.id for a in attendees]
yield ids
with Session() as session:
session.query(Attendee).filter(Attendee.id.in_(ids)).delete(
synchronize_session=False)
class TestBirthdayCalendar(object):
@pytest.mark.parametrize('year', [None, 2027, 2028])
def test_attendee_birthday_calendar(
self,
admin_attendee,
year,
birthdays,
monkeypatch):
if year:
assert str(year)
response = summary.Root().attendee_birthday_calendar(year=year)
else:
assert str(datetime.now(UTC).year)
response = summary.Root().attendee_birthday_calendar()
if isinstance(response, bytes):
response = response.decode('utf-8')
lines = response.strip().split('\n')
assert len(lines) == (17 + 1) # Extra line for the header
@pytest.mark.parametrize('epoch,eschaton,expected', [
(datetime(2018, 1, 10), datetime(2018, 1, 11), 2), # Normal dates
(datetime(2017, 12, 31), datetime(2018, 1, 1), 2), # Crossing the year
(datetime(2018, 1, 31), datetime(2018, 2, 1), 2), # Crossing the month
(datetime(2018, 2, 28), datetime(2018, 3, 1), 3), # Leap day
(datetime(2018, 1, 1), datetime(2018, 3, 4), 15), # Multi-month
(datetime(2017, 12, 28), datetime(2018, 3, 4), 17), # Everybody
])
def test_event_birthday_calendar(
self,
admin_attendee,
epoch,
eschaton,
expected,
birthdays,
monkeypatch):
monkeypatch.setattr(c, 'EPOCH', epoch)
monkeypatch.setattr(c, 'ESCHATON', eschaton)
response = summary.Root().event_birthday_calendar()
if isinstance(response, bytes):
response = response.decode('utf-8')
lines = response.strip().split('\n')
assert len(lines) == (expected + 1) # Extra line for the header
def test_event_birthday_calendar_correct_birthday_years(
self,
admin_attendee,
birthdays,
monkeypatch):<|fim▁hole|>
monkeypatch.setattr(c, 'EPOCH', datetime(2017, 12, 31))
monkeypatch.setattr(c, 'ESCHATON', datetime(2018, 1, 1))
response = summary.Root().event_birthday_calendar()
if isinstance(response, bytes):
response = response.decode('utf-8')
assert '"Born on December 31, 1964\'s Birthday",2017-12-31' in response
assert '"Born on January 1, 1964\'s Birthday",2018-01-01' in response
lines = response.strip().split('\n')
assert len(lines) == (2 + 1) # Extra line for the header<|fim▁end|> | |
<|file_name|>thread.rs<|end_file_name|><|fim▁begin|>/* Waylos, a kernel built in rust
Copyright (C) 2015 Waylon Cude
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
use memory;
use io;
use screen::SCREEN;
use core::fmt::Write;
const THREAD_TABLE_ADDR: u64 = 0x300000;
extern {
fn setup_registers(instruction_addr: u64);
fn save_registers();
fn restore_registers();
fn setup_stack_kernel();
fn setup_stack_user();
fn reset_cr3();
fn set_cr3();
fn setup_stack_register();
}
#[derive(Clone,Copy)]
#[repr(packed)]
struct Thread {
enabled: u8, //I'm not sure if this can be a bool
page_addr: u64, //Pointer to the PML4
}
#[repr(packed)]
pub struct Thread_Table { //Im pretty sure this shouldn't implement Copy
current_page_table: u64,
current_process_id: usize, //This has to be 64 bits, but I can only index with usize
greatest_process_id: usize, //For assigning new threads
threads: [Thread; 5000], //This should be around 1 MiB
}
#[no_mangle]
pub extern fn create_thread_memory_area(paddr: u64) -> u64{
unsafe {
let page_addr = paddr & 0xFFFFFFFFF000;
(*(page_addr as *mut memory::PageTable)).set_entry(511,*(0x100000 as *const u64));
memory::create_page(0xFFFFF00000000000,page_addr);
loop{}
memory::create_page(0xFFFFF00000003000,page_addr);
memory::create_page(0xFFFFF00000002000,page_addr);
let index = (*(0x300000 as *const Thread_Table)).greatest_process_id;
(*(0x300000 as *mut Thread_Table)).threads[index] = Thread {enabled: 1, page_addr: page_addr};
(*(0x300000 as *mut Thread_Table)).greatest_process_id +=1;
page_addr}
//*(0x100008 as *mut u64) = page_addr;//For some reason passing this in the stack doesn't work
// setup_stack_register(); //Rust tries to restore a non-existent stack-frame here
// loop {}
// setup_registers(addr); //Also sets up the stack and cr3
}
/*#[no_mangle]
pub extern fn user_thread_create(addr: u64) {
unsafe {
save_registers();
create_thread_memory_area(memory::palloc(),addr); //Setup page table
setup_stack_user();
reset_cr3();<|fim▁hole|>}*/
/*#[no_mangle]
pub extern fn first_thread_create(addr: u64) {
unsafe {
//save_registers();
create_thread_memory_area(memory::palloc(),addr); //Setup page table
loop {}
setup_stack_kernel();
//reset_cr3();
//restore_registers();
}
}*/
#[no_mangle]
pub extern fn thread_table_create() {
unsafe {
for i in 0..5000 {
(*(THREAD_TABLE_ADDR as *mut Thread_Table)).threads[i].enabled=0;
}
(*(THREAD_TABLE_ADDR as *mut Thread_Table)).current_process_id = 0;
(*(THREAD_TABLE_ADDR as *mut Thread_Table)).greatest_process_id = 0;
}
}
#[no_mangle]
pub extern fn thread_table_switch() {
unsafe{
(*(THREAD_TABLE_ADDR as *mut Thread_Table)).current_process_id += 1;
while (*(THREAD_TABLE_ADDR as *mut Thread_Table)).threads[(*(THREAD_TABLE_ADDR as *mut Thread_Table)).current_process_id].enabled != 1 {
if (*(THREAD_TABLE_ADDR as *mut Thread_Table)).current_process_id >= (*(THREAD_TABLE_ADDR as *mut Thread_Table)).greatest_process_id {
(*(THREAD_TABLE_ADDR as *mut Thread_Table)).current_process_id = 0;
} else {
(*(THREAD_TABLE_ADDR as *mut Thread_Table)).current_process_id += 1;
}
}
//write!(SCREEN,"PID {}\n",(*(THREAD_TABLE_ADDR as *mut Thread_Table)).current_process_id);
(*(THREAD_TABLE_ADDR as *mut Thread_Table)).current_page_table = (*(THREAD_TABLE_ADDR as *const Thread_Table)).threads[(*(THREAD_TABLE_ADDR as *const Thread_Table)).current_process_id].page_addr;
//io::outb(0x3F8,(48+(*(THREAD_TABLE_ADDR as *mut Thread_Table)).current_process_id) as u8);
}
}<|fim▁end|> | restore_registers();
} |
<|file_name|>test_class.py<|end_file_name|><|fim▁begin|><|fim▁hole|>import pytest
from eos_data_distribution import DirTools
from gi.repository import GLib
ITER_COUNT = 10
class TestClass:
@pytest.mark.timeout(timeout=3, method='thread')
def test_0(self, tmpdir):
loop = GLib.MainLoop()
self.__called = 0
def cb_changed(M, p, m, f, o, evt, d=None, e=None):
print('signal', e, p, f, o, evt, d)
assert e == 'created'
self.__called += 1
d = tmpdir.mkdir("ndn")
m = DirTools.Monitor(str(d))
[m.connect(s, cb_changed, s) for s in ['created']]
[d.mkdir(str(i)) for i in range(ITER_COUNT)]
GLib.timeout_add_seconds(2, lambda: loop.quit())
loop.run()
assert self.__called == ITER_COUNT<|fim▁end|> | |
<|file_name|>lossy.rs<|end_file_name|><|fim▁begin|>// Copyright 2012-2017 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use char;
use str as core_str;
use fmt;
use fmt::Write;
use mem;
/// Lossy UTF-8 string.
#[unstable(feature = "str_internals", issue = "0")]
pub struct Utf8Lossy {
bytes: [u8]
}
impl Utf8Lossy {
pub fn from_str(s: &str) -> &Utf8Lossy {
Utf8Lossy::from_bytes(s.as_bytes())
}
pub fn from_bytes(bytes: &[u8]) -> &Utf8Lossy {
unsafe { mem::transmute(bytes) }
}
pub fn chunks(&self) -> Utf8LossyChunksIter {
Utf8LossyChunksIter { source: &self.bytes }
}
}
/// Iterator over lossy UTF-8 string
#[unstable(feature = "str_internals", issue = "0")]
#[allow(missing_debug_implementations)]
pub struct Utf8LossyChunksIter<'a> {
source: &'a [u8],
}
#[unstable(feature = "str_internals", issue = "0")]
#[derive(PartialEq, Eq, Debug)]
pub struct Utf8LossyChunk<'a> {
/// Sequence of valid chars.
/// Can be empty between broken UTF-8 chars.
pub valid: &'a str,
/// Single broken char, empty if none.
/// Empty iff iterator item is last.
pub broken: &'a [u8],
}
impl<'a> Iterator for Utf8LossyChunksIter<'a> {
type Item = Utf8LossyChunk<'a>;
fn next(&mut self) -> Option<Utf8LossyChunk<'a>> {
if self.source.len() == 0 {
return None;
}
const TAG_CONT_U8: u8 = 128;
fn safe_get(xs: &[u8], i: usize) -> u8 {
*xs.get(i).unwrap_or(&0)
}
let mut i = 0;
while i < self.source.len() {
let i_ = i;
let byte = unsafe { *self.source.get_unchecked(i) };
i += 1;
if byte < 128 {
} else {
let w = core_str::utf8_char_width(byte);
macro_rules! error { () => ({
unsafe {
let r = Utf8LossyChunk {
valid: core_str::from_utf8_unchecked(&self.source[0..i_]),
broken: &self.source[i_..i],
};
self.source = &self.source[i..];
return Some(r);
}
})}
<|fim▁hole|> }
i += 1;
}
3 => {
match (byte, safe_get(self.source, i)) {
(0xE0, 0xA0 ..= 0xBF) => (),
(0xE1 ..= 0xEC, 0x80 ..= 0xBF) => (),
(0xED, 0x80 ..= 0x9F) => (),
(0xEE ..= 0xEF, 0x80 ..= 0xBF) => (),
_ => {
error!();
}
}
i += 1;
if safe_get(self.source, i) & 192 != TAG_CONT_U8 {
error!();
}
i += 1;
}
4 => {
match (byte, safe_get(self.source, i)) {
(0xF0, 0x90 ..= 0xBF) => (),
(0xF1 ..= 0xF3, 0x80 ..= 0xBF) => (),
(0xF4, 0x80 ..= 0x8F) => (),
_ => {
error!();
}
}
i += 1;
if safe_get(self.source, i) & 192 != TAG_CONT_U8 {
error!();
}
i += 1;
if safe_get(self.source, i) & 192 != TAG_CONT_U8 {
error!();
}
i += 1;
}
_ => {
error!();
}
}
}
}
let r = Utf8LossyChunk {
valid: unsafe { core_str::from_utf8_unchecked(self.source) },
broken: &[],
};
self.source = &[];
Some(r)
}
}
impl fmt::Display for Utf8Lossy {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
// If we're the empty string then our iterator won't actually yield
// anything, so perform the formatting manually
if self.bytes.len() == 0 {
return "".fmt(f)
}
for Utf8LossyChunk { valid, broken } in self.chunks() {
// If we successfully decoded the whole chunk as a valid string then
// we can return a direct formatting of the string which will also
// respect various formatting flags if possible.
if valid.len() == self.bytes.len() {
assert!(broken.is_empty());
return valid.fmt(f)
}
f.write_str(valid)?;
if !broken.is_empty() {
f.write_char(char::REPLACEMENT_CHARACTER)?;
}
}
Ok(())
}
}
impl fmt::Debug for Utf8Lossy {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.write_char('"')?;
for Utf8LossyChunk { valid, broken } in self.chunks() {
// Valid part.
// Here we partially parse UTF-8 again which is suboptimal.
{
let mut from = 0;
for (i, c) in valid.char_indices() {
let esc = c.escape_debug();
// If char needs escaping, flush backlog so far and write, else skip
if esc.len() != 1 {
f.write_str(&valid[from..i])?;
for c in esc {
f.write_char(c)?;
}
from = i + c.len_utf8();
}
}
f.write_str(&valid[from..])?;
}
// Broken parts of string as hex escape.
for &b in broken {
write!(f, "\\x{:02x}", b)?;
}
}
f.write_char('"')
}
}<|fim▁end|> | match w {
2 => {
if safe_get(self.source, i) & 192 != TAG_CONT_U8 {
error!(); |
<|file_name|>measures.py<|end_file_name|><|fim▁begin|># -*-coding: utf-8 -*-
import math
import numpy as np
import relations
def _avg_difference(npiece, side):
if side == relations.LEFT:
difference = npiece[:, 0] - npiece[:, 1]
elif side == relations.RIGHT:
difference = npiece[:, -1] - npiece[:, -2]
elif side == relations.UP:
difference = npiece[0, :] - npiece[1, :]
else:
difference = npiece[-1, :] - npiece[-2, :]
return sum(difference)/float(len(difference))
def _gradient(pieces_difference, average_side_difference):
grad = pieces_difference - average_side_difference
grad_t = np.transpose(grad)
cov = np.cov(grad_t)
try:
cov_inv = np.linalg.inv(cov)
except np.linalg.LinAlgError as e:
cov_inv = np.ones((3, 3))
<|fim▁hole|> return grad.dot(cov_inv).dot(grad_t)
def mgc(np1, np2, relation):
if relation == relations.LEFT:
grad_12 = _gradient(np2[:, 0] - np1[:, -1], _avg_difference(np1, relations.RIGHT))
grad_21 = _gradient(np1[:, -1] - np2[:, 0], _avg_difference(np2, relations.LEFT))
else:
grad_12 = _gradient(np2[0, :] - np1[-1, :], _avg_difference(np1, relations.DOWN))
grad_21 = _gradient(np1[-1, :] - np2[0, :], _avg_difference(np2, relations.UP))
return np.sum(grad_12 + grad_21)
def rgb(np1, np2, relation):
if relation == relations.LEFT:
difference = np1[:, -1] - np2[:, 0]
else:
difference = np1[-1, :] - np2[0, :]
exponent = np.vectorize(lambda x: math.pow(x, 2))
dissimilarity = np.sum(exponent(difference))
return math.sqrt(dissimilarity)
def rgb_mgc(*args):
return rgb(*args)*mgc(*args)<|fim▁end|> | |
<|file_name|>navbar-directive.js<|end_file_name|><|fim▁begin|>'use strict';
import angular from 'angular';
import NavbarTpl from './navbar.html';
import NavbarService from './navbar-service';
import NavbarCtrl from './navbar-ctrl';
function navbar(NavbarService) {
return {
restrict: 'E',
scope: {
name: '@',
version: '@',
linkTo: '@'
},
templateUrl: NavbarTpl,
link: (scope, el, attr) => {
scope.navbar = NavbarService.getNavbar();
}
}
}<|fim▁hole|>export default angular.module('directives.navbar', [NavbarService])
.directive('navbar', ['NavbarService', navbar])
.controller('NavbarCtrl', ['$scope', '$document', NavbarCtrl])
.name;<|fim▁end|> | |
<|file_name|>api.traversing.js<|end_file_name|><|fim▁begin|>(function() {
var $, expect, fruits;
$ = require('../');
expect = require('expect.js');
/*
Examples
*/
fruits = '<ul id = "fruits">\n <li class = "apple">Apple</li>\n <li class = "orange">Orange</li>\n <li class = "pear">Pear</li>\n</ul>'.replace(/(\n|\s{2})/g, '');
/*
Tests
*/
describe('$(...)', function() {
describe('.find', function() {
it('() : should return this', function() {
return expect($('ul', fruits).find()[0].name).to.equal('ul');
});
it('(single) : should find one descendant', function() {
return expect($('#fruits', fruits).find('.apple')[0].attribs["class"]).to.equal('apple');
});
it('(many) : should find all matching descendant', function() {
return expect($('#fruits', fruits).find('li')).to.have.length(3);
});
it('(many) : should merge all selected elems with matching descendants');
it('(invalid single) : should return empty if cant find', function() {
return expect($('ul', fruits).find('blah')).to.have.length(0);
});
return it('should return empty if search already empty result', function() {
return expect($('#fruits').find('li')).to.have.length(0);
});
});
describe('.children', function() {
it('() : should get all children', function() {
return expect($('ul', fruits).children()).to.have.length(3);
});
it('(selector) : should return children matching selector', function() {
return expect($('ul', fruits).children('.orange').hasClass('orange')).to.be.ok;
});
it('(invalid selector) : should return empty', function() {
return expect($('ul', fruits).children('.lulz')).to.have.length(0);
});
return it('should only match immediate children, not ancestors');
});
describe('.next', function() {
it('() : should return next element', function() {
return expect($('.orange', fruits).next().hasClass('pear')).to.be.ok;
});
return it('(no next) : should return null (?)');
});
describe('.prev', function() {
it('() : should return previous element', function() {
return expect($('.orange', fruits).prev().hasClass('apple')).to.be.ok;
});
return it('(no prev) : should return null (?)');
});
describe('.siblings', function() {
it('() : should get all the siblings', function() {
return expect($('.orange', fruits).siblings()).to.have.length(2);
});
return it('(selector) : should get all siblings that match the selector', function() {
return expect($('.orange', fruits).siblings('li')).to.have.length(2);
});
});
describe('.each', function() {<|fim▁hole|> $('li', fruits).each(function(i, elem) {
return items[i] = elem;
});
expect(items[0].attribs["class"]).to.equal('apple');
expect(items[1].attribs["class"]).to.equal('orange');
return expect(items[2].attribs["class"]).to.equal('pear');
});
});
describe('.first', function() {
it('() : should return the first item', function() {
var elem, src;
src = $("<span>foo</span><span>bar</span><span>baz</span>");
elem = src.first();
expect(elem.length).to.equal(1);
return expect(elem.html()).to.equal('foo');
});
return it('() : should return an empty object for an empty object', function() {
var first, src;
src = $();
first = src.first();
expect(first.length).to.equal(0);
return expect(first.html()).to.be(null);
});
});
describe('.last', function() {
it('() : should return the last element', function() {
var elem, src;
src = $("<span>foo</span><span>bar</span><span>baz</span>");
elem = src.last();
expect(elem.length).to.equal(1);
return expect(elem.html()).to.equal('baz');
});
return it('() : should return an empty object for an empty object', function() {
var last, src;
src = $();
last = src.last();
expect(last.length).to.equal(0);
return expect(last.html()).to.be(null);
});
});
describe('.first & .last', function() {
return it('() : should return same object if only one object', function() {
var first, last, src;
src = $("<span>bar</span>");
first = src.first();
last = src.last();
expect(first.html()).to.equal(last.html());
expect(first.length).to.equal(1);
expect(first.html()).to.equal('bar');
expect(last.length).to.equal(1);
return expect(last.html()).to.equal('bar');
});
});
return describe('.eq', function() {
return it('(i) : should return the element at the specified index', function() {
expect($('li', fruits).eq(0).text()).to.equal('Apple');
expect($('li', fruits).eq(1).text()).to.equal('Orange');
expect($('li', fruits).eq(2).text()).to.equal('Pear');
expect($('li', fruits).eq(3).text()).to.equal('');
return expect($('li', fruits).eq(-1).text()).to.equal('Pear');
});
});
});
}).call(this);<|fim▁end|> | return it('( (i, elem) -> ) : should loop selected returning fn with (i, elem)', function() {
var items;
items = []; |
<|file_name|>request_service.ts<|end_file_name|><|fim▁begin|>import {Injectable} from 'angular2/core';
import {Http} from 'angular2/http';
import {Request} from '../models/request'
import {Observable} from 'rxjs/Observable';
import {Jsonp} from 'angular2/http';
import 'rxjs/add/operator/map'
@Injectable()
export class RequestService {
constructor(private _http: Http, private _jsonp: Jsonp) {
}
public list_requests(company_id) {
var request_list: Request[] = [];
return this._http.get(`/companies/${company_id}/requests`)<|fim▁hole|> }
}<|fim▁end|> | // .toPromise()
// .then(response => response.json()) |
<|file_name|>0020_remove_old_attr_value_field.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Deleting field 'ObjAttribute.db_value'
db.delete_column('objects_objattribute', 'db_value')
db.rename_column('objects_objattribute', 'db_value2', 'db_value')
def backwards(self, orm):
# Adding field 'ObjAttribute.db_value'
db.add_column('objects_objattribute', 'db_value',
self.gf('django.db.models.fields.TextField')(null=True, blank=True),
keep_default=False)
db.rename_column('objects_objattribute', 'db_value', 'db_value2')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'objects.alias': {
'Meta': {'object_name': 'Alias'},
'db_key': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'db_obj': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['objects.ObjectDB']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'objects.objattribute': {
'Meta': {'object_name': 'ObjAttribute'},
'db_date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'db_key': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'db_lock_storage': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'db_obj': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['objects.ObjectDB']"}),
'db_value2': ('src.utils.picklefield.PickledObjectField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'objects.objectdb': {
'Meta': {'object_name': 'ObjectDB'},
'db_cmdset_storage': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'db_date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'db_destination': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'destinations_set'", 'null': 'True', 'to': "orm['objects.ObjectDB']"}),
'db_home': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'homes_set'", 'null': 'True', 'to': "orm['objects.ObjectDB']"}),
'db_key': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'db_location': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'locations_set'", 'null': 'True', 'to': "orm['objects.ObjectDB']"}),
'db_lock_storage': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'db_permissions': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'db_player': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['players.PlayerDB']", 'null': 'True', 'blank': 'True'}),
'db_sessid': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'db_typeclass_path': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'objects.objectnick': {
'Meta': {'unique_together': "(('db_nick', 'db_type', 'db_obj'),)", 'object_name': 'ObjectNick'},
'db_nick': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'db_obj': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['objects.ObjectDB']"}),
'db_real': ('django.db.models.fields.TextField', [], {}),
'db_type': ('django.db.models.fields.CharField', [], {'default': "'inputline'", 'max_length': '16', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},<|fim▁hole|> 'Meta': {'object_name': 'PlayerDB'},
'db_cmdset_storage': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}),
'db_date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'db_is_connected': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'db_key': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'db_lock_storage': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'db_permissions': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'db_typeclass_path': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'unique': 'True'})
}
}
complete_apps = ['objects']<|fim▁end|> | 'players.playerdb': { |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>pub use self::os::{cc_t};
pub use self::os::{speed_t};
pub use self::os::{tcflag_t};
pub use self::os::{termios};
pub use self::os::{NCCS};
pub use self::os::{VEOF};
pub use self::os::{VEOL};
pub use self::os::{VERASE};
pub use self::os::{VINTR};
pub use self::os::{VKILL};
pub use self::os::{VMIN};
pub use self::os::{VQUIT};
pub use self::os::{VSTART};
pub use self::os::{VSTOP};
pub use self::os::{VSUSP};
pub use self::os::{VTIME};
pub use self::os::{BRKINT};
pub use self::os::{ICRNL};
pub use self::os::{IGNBRK};
pub use self::os::{IGNCR};
pub use self::os::{IGNPAR};
pub use self::os::{INLCR};
pub use self::os::{INPCK};
pub use self::os::{ISTRIP};
pub use self::os::{IXANY};
pub use self::os::{IXOFF};<|fim▁hole|>pub use self::os::{PARMRK};
pub use self::os::{OPOST};
pub use self::os::{ONLCR};
pub use self::os::{OCRNL};
pub use self::os::{ONOCR};
pub use self::os::{ONLRET};
pub use self::os::{OFDEL};
pub use self::os::{OFILL};
pub use self::os::{NLDLY};
pub use self::os::{NL0};
pub use self::os::{NL1};
pub use self::os::{CRDLY};
pub use self::os::{CR0};
pub use self::os::{CR1};
pub use self::os::{CR2};
pub use self::os::{CR3};
pub use self::os::{TABDLY};
pub use self::os::{TAB0};
pub use self::os::{TAB1};
pub use self::os::{TAB2};
pub use self::os::{TAB3};
pub use self::os::{BSDLY};
pub use self::os::{BS0};
pub use self::os::{BS1};
pub use self::os::{VTDLY};
pub use self::os::{VT0};
pub use self::os::{VT1};
pub use self::os::{FFDLY};
pub use self::os::{FF0};
pub use self::os::{FF1};
pub use self::os::{B0};
pub use self::os::{B50};
pub use self::os::{B75};
pub use self::os::{B110};
pub use self::os::{B134};
pub use self::os::{B150};
pub use self::os::{B200};
pub use self::os::{B300};
pub use self::os::{B600};
pub use self::os::{B1200};
pub use self::os::{B1800};
pub use self::os::{B2400};
pub use self::os::{B4800};
pub use self::os::{B9600};
pub use self::os::{B19200};
pub use self::os::{B38400};
pub use self::os::{CSIZE};
pub use self::os::{CS5};
pub use self::os::{CS6};
pub use self::os::{CS7};
pub use self::os::{CS8};
pub use self::os::{CSTOPB};
pub use self::os::{CREAD};
pub use self::os::{PARENB};
pub use self::os::{PARODD};
pub use self::os::{HUPCL};
pub use self::os::{CLOCAL};
pub use self::os::{ECHO};
pub use self::os::{ECHOE};
pub use self::os::{ECHOK};
pub use self::os::{ECHONL};
pub use self::os::{ICANON};
pub use self::os::{IEXTEN};
pub use self::os::{ISIG};
pub use self::os::{NOFLSH};
pub use self::os::{TOSTOP};
pub use self::os::{TCSANOW};
pub use self::os::{TCSADRAIN};
pub use self::os::{TCSAFLUSH};
pub use self::os::{TCIFLUSH};
pub use self::os::{TCIOFLUSH};
pub use self::os::{TCOFLUSH};
pub use self::os::{TCIOFF};
pub use self::os::{TCION};
pub use self::os::{TCOOFF};
pub use self::os::{TCOON};
use {int_t};
use sys::types::{pid_t};
#[cfg(target_os = "linux")]
#[path = "linux/mod.rs"]
mod os;
pub fn cfgetispeed(termios_p: &termios) -> speed_t {
extern { fn cfgetispeed(termios_p: *const termios) -> speed_t; }
unsafe { cfgetispeed(termios_p as *const _) }
}
pub fn cfgetospeed(termios_p: &termios) -> speed_t {
extern { fn cfgetospeed(termios_p: *const termios) -> speed_t; }
unsafe { cfgetospeed(termios_p as *const _) }
}
pub fn cfsetispeed(termios_p: &mut termios, speed: speed_t) -> int_t {
extern { fn cfsetispeed(termios_p: *mut termios, speed: speed_t) -> int_t; }
unsafe { cfsetispeed(termios_p as *mut _, speed) }
}
pub fn cfsetospeed(termios_p: &mut termios, speed: speed_t) -> int_t {
extern { fn cfsetospeed(termios_p: *mut termios, speed: speed_t) -> int_t; }
unsafe { cfsetospeed(termios_p as *mut _, speed) }
}
pub fn tcdrain(fd: int_t) -> int_t {
extern { fn tcdrain(fd: int_t) -> int_t; }
unsafe { tcdrain(fd) }
}
pub fn tcflow(fd: int_t, action: int_t) -> int_t {
extern { fn tcflow(fd: int_t, action: int_t) -> int_t; }
unsafe { tcflow(fd, action) }
}
pub fn tcflush(fd: int_t, queue_selector: int_t) -> int_t {
extern { fn tcflush(fd: int_t, queue_selector: int_t) -> int_t; }
unsafe { tcflush(fd, queue_selector) }
}
pub fn tcgetattr(fd: int_t, termios_p: &mut termios) -> int_t {
extern { fn tcgetattr(fd: int_t, termios_p: *mut termios) -> int_t; }
unsafe { tcgetattr(fd, termios_p as *mut _) }
}
pub fn tcgetsid(fd: int_t) -> pid_t {
extern { fn tcgetsid(fd: int_t) -> pid_t; }
unsafe { tcgetsid(fd) }
}
pub fn tcsendbreak(fd: int_t, duration: int_t) -> int_t {
extern { fn tcsendbreak(fd: int_t, duration: int_t) -> int_t; }
unsafe { tcsendbreak(fd, duration) }
}
pub fn tcsetattr(fd: int_t, optional_actions: int_t, termios_p: &termios) -> int_t {
extern { fn tcsetattr(fd: int_t, optional_actions: int_t,
termios_p: *const termios) -> int_t; }
unsafe { tcsetattr(fd, optional_actions, termios_p as *const _) }
}<|fim▁end|> | pub use self::os::{IXON}; |
<|file_name|>op.rs<|end_file_name|><|fim▁begin|>#[derive(Debug, PartialEq)]
pub enum InfixOp {
// A + B
Add,
// A - B
Sub,
// A / B
Div,
// A * B
Mul,
// A % B
Mod,
// A ^ B
Pow,
// A equals B (traditionally A == B)
Equ,
// A < B
Lt,
// A <= B
Lte,
// A > B
Gt,
// A >= B
Gte,
}
impl InfixOp {
pub fn get_precedence(&self) -> u8 {
// Precedence(High -> Low):
// 9. () | [] .
// 8. not | negate
// 7. * / %
// 6. + -
// 5. < | <= | > | >=
// 4. == !=
// 3. bitwise and | bitwise or | bitwise xor | ^ (pow - not sure where this goes)
// 2. logical and | logical or
// 1. ,
match *self {
InfixOp::Mul => 7,
InfixOp::Div => 7,
InfixOp::Mod => 7,
InfixOp::Add => 6,
InfixOp::Sub => 6,
InfixOp::Lt => 5,
InfixOp::Lte => 5,
InfixOp::Gt => 5,
InfixOp::Gte => 5,<|fim▁hole|> InfixOp::Equ => 4,
InfixOp::Pow => 3 // Not sure about this one
}
}
}
#[derive(Debug, PartialEq)]
pub enum UnaryOp {
// -A
Negate,
// not A (traditionally !A)
Not
}<|fim▁end|> | |
<|file_name|>foobar.py<|end_file_name|><|fim▁begin|>import os
import webapp2
from mako.template import Template
from mako.lookup import TemplateLookup
class MainHandler(webapp2.RequestHandler):
def get(self):
template_values = {
'some_foo': 'foo',
'some_bar': 'bar'
}
# the template file in our GAE app directory
path = os.path.join(os.path.dirname(__file__), 'templates/foobar.tmpl')
# make a new template instance
templ = Template(filename=path)
# unpack the dictionary to become keyword arguments and render<|fim▁hole|><|fim▁end|> | self.response.out.write(templ.render(**template_values))
app = webapp2.WSGIApplication([('/foobar', MainHandler)], debug=True) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.