prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>gtypes.cpp<|end_file_name|><|fim▁begin|>/*
* File: gtypes.cpp
* ----------------
* This file implements the classes in the gtypes.h interface.
*
* @version 2016/10/14
* - modified floating-point equality tests to use floatingPointEqual function
* @version 2015/07/05
* - using global hashing functions rather than global variables
* @version 2014/10/08
* - removed 'using namespace' statement
*/
#include "gtypes.h"
#include <cmath>
#include <string>
#include "error.h"
#include "gmath.h"
#include "hashcode.h"
#include "strlib.h"
/*
* Implementation notes: GPoint class
* ----------------------------------
* The GPoint class itself is entirely straightforward. The relational
* operators compare the x components first, followed by the y component.
* The hashCode function computes the exclusive-or of the individual words.
*/
GPoint::GPoint() {
x = 0;
y = 0;
}
GPoint::GPoint(double x, double y) {
this->x = x;
this->y = y;
}
double GPoint::getX() const {
return x;
}
double GPoint::getY() const {
return y;
}
std::string GPoint::toString() const {
return "(" + realToString(x) + ", " + realToString(y) + ")";
}
std::ostream& operator <<(std::ostream& os, const GPoint& pt) {
return os << pt.toString();
}
bool operator ==(const GPoint& p1, const GPoint& p2) {
return floatingPointEqual(p1.x, p2.x)
&& floatingPointEqual(p1.y, p2.y);
}
bool operator !=(const GPoint& p1, const GPoint& p2) {
return !(p1 == p2);
}
int hashCode(const GPoint& pt) {
int hash = 0;
for (size_t i = 0; i < sizeof(double) / sizeof(int); i++) {
hash ^= ((int *) &pt.x)[i] ^ ((int *) &pt.y)[i];
}
return hashMask() & hash;
}
/*
* Implementation notes: GDimension class
* --------------------------------------
* The GDimension class itself is entirely straightforward. The
* relational operators compare the width first, followed by the height.
* The hashCode function computes the exclusive-or of the individual words.
*/
GDimension::GDimension() {
width = 0;
height = 0;
}
GDimension::GDimension(double width, double height) {
this->width = width;
this->height = height;
}
double GDimension::getWidth() const {
return width;
}
double GDimension::getHeight() const {
return height;
}
std::string GDimension::toString() const {
return "(" + realToString(width) + ", " + realToString(height) + ")";
}
std::ostream& operator <<(std::ostream& os, const GDimension& dim) {
return os << dim.toString();
}
bool operator ==(const GDimension& d1, const GDimension& d2) {
return floatingPointEqual(d1.width, d2.width)
&& floatingPointEqual(d1.height, d2.height);
}<|fim▁hole|>}
int hashCode(const GDimension& dim) {
int hash = 0;
for (size_t i = 0; i < sizeof(double) / sizeof(int); i++) {
hash ^= ((int *) &dim.width)[i] ^ ((int *) &dim.height)[i];
}
return hashMask() & hash;
}
/*
* Implementation notes: GRectangle class
* --------------------------------------
* The GRectangle class itself is entirely straightforward. The
* relational operators compare the components in the following order:
* x, y, width, height. The hashCode function computes the exclusive-or
* of the individual words.
*/
GRectangle::GRectangle() {
x = 0;
y = 0;
width = 0;
height = 0;
}
GRectangle::GRectangle(double x, double y, double width, double height) {
this->x = x;
this->y = y;
this->width = width;
this->height = height;
}
double GRectangle::getX() const {
return x;
}
double GRectangle::getY() const {
return y;
}
double GRectangle::getWidth() const {
return width;
}
double GRectangle::getHeight() const {
return height;
}
bool GRectangle::isEmpty() const {
return width <= 0 || height <= 0;
}
bool GRectangle::contains(double x, double y) const {
return x >= this->x && y >= this->y
&& x < this->x + width
&& y < this->y + height;
}
bool GRectangle::contains(GPoint pt) const {
return contains(pt.getX(), pt.getY());
}
std::string GRectangle::toString() const {
return "(" + realToString(x) + ", " + realToString(y) + ", "
+ realToString(width) + ", " + realToString(height) + ")";
}
std::ostream& operator <<(std::ostream& os, const GRectangle& rect) {
return os << rect.toString();
}
bool operator ==(const GRectangle& r1, const GRectangle& r2) {
return floatingPointEqual(r1.x, r2.x)
&& floatingPointEqual(r1.y, r2.y)
&& floatingPointEqual(r1.width, r2.width)
&& floatingPointEqual(r1.height, r2.height);
}
bool operator !=(const GRectangle& r1, const GRectangle& r2) {
return !(r1 == r2);
}
int hashCode(const GRectangle& r) {
int hash = 0;
for (size_t i = 0; i < sizeof(double) / sizeof(int); i++) {
hash ^= ((int *) &r.x)[i] ^ ((int *) &r.y)[i];
hash ^= ((int *) &r.width)[i] ^ ((int *) &r.height)[i];
}
return hashMask() & hash;
}<|fim▁end|>
|
bool operator !=(const GDimension& d1, const GDimension& d2) {
return !(d1 == d2);
|
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>extern crate crypto;
extern crate regex;
use std::env;
use std::str::FromStr;
use std::io::{BufRead, BufReader};
use std::fs::File;
use std::collections::{HashMap, HashSet};
use crypto::digest::Digest;
use crypto::md5::Md5;
use regex::Regex;
#[derive(Debug)]
enum Lights {
On,
Off,
Toggle
}
#[derive(Debug)]
enum Ops {
And,
Or,
Not,
Lsh,
Rsh,
//this is not in the file, but simplifes some things
Id
}
//this is a neat lil feature, hm
//look into whether I can map directly to fn pointers or some such
impl FromStr for Ops {
type Err = ();
fn from_str(s: &str) -> Result<Ops, ()> {
match s {
"AND" => Ok(Ops::And),
"OR" => Ok(Ops::Or),
"NOT" => Ok(Ops::Not),
"LSHIFT" => Ok(Ops::Lsh),
"RSHIFT" => Ok(Ops::Rsh),
_ => Err(())
}
}
}
struct Wire {
name: String,
op: Ops,
//I am... not a fan of this
//there is probably a Right Way using generics
//need to read more docs, variable type _and_ number is... idk
//for now accept this is "a (good|bad) programmer can write javascript in any language"
//essentially faking dynamic typing by parsing out ints on the fly lol
items: (String, String)
}
impl Wire {
fn new(line: &str) -> (String, Wire) {
let words = line.split(" ").collect::<Vec<_>>();
let name: &str = words[words.len() - 1];
let (op, items) = match words.len() {
3 => (Ops::Id, ("", words[0])),
4 => (Ops::Not, ("", words[1])),
5 => (Ops::from_str(words[1]).unwrap(), (words[0], words[2])),
_ => panic!("this shouldn't happen")
};
let wire = Wire {
name: name.to_string(),
op: op,
//this to_string shit is clearly The Wrong Thing
//but &'static str seems Even More Wrong
//really I want uh... str, but scoped to the function instantiating the struct
//rather than this function
items: (items.0.to_string(), items.1.to_string())
};
(name.to_string(), wire)
}
//so this takes something from the items tuple and returns a u16
//either parsed out, or by calling the parent
//the clever/dangerous bit is in output, matching on ops
//where I get to make assumptions about whether the left item is empty
fn input(&self, item: &str, map: &HashMap<String, Wire>, cache: &mut HashMap<String, u16>) -> u16 {
if cache.contains_key(item) {
return *cache.get(item).unwrap();
}
//this was... a lot more clever before the cache
let val = item.parse::<u16>();
match val {
Ok(val) => {
cache.insert(self.name.to_string(), val);
val
},
Err(_) => {
let val = map.get(item).unwrap().output(map, cache);
cache.insert(item.to_string(), val);
val
}
}
}
fn output(&self, map: &HashMap<String, Wire>, cache: &mut HashMap<String, u16>) -> u16 {
//println!("{} {:?} {}", self.items.0, self.op, self.items.1);
match self.op {
Ops::Id => self.input(&self.items.1, map, cache),
Ops::Not => !self.input(&self.items.1, map, cache),
Ops::And => self.input(&self.items.0, map, cache) & self.input(&self.items.1, map, cache),
Ops::Or => self.input(&self.items.0, map, cache) | self.input(&self.items.1, map, cache),
Ops::Rsh => self.input(&self.items.0, map, cache) >> self.input(&self.items.1, map, cache),
Ops::Lsh => self.input(&self.items.0, map, cache) << self.input(&self.items.1, map, cache)
}
}
}
struct Advent;
impl Advent {
fn d1(&self) -> (i32, usize) {
let input = include_bytes!("../data/d1");
let mut floor = 0;
let mut basement = 0;
for i in 0..input.len() {
if input[i] == 0x28 {
floor += 1;
} else if input[i] == 0x29 {
floor -= 1;
}
if floor == -1 && basement == 0 {
basement = i+1;
}
}
(floor, basement)
}
fn d2(&self) -> (i32, i32) {
let f = File::open("data/d2").unwrap();
let f = BufReader::new(f);
let mut paper = 0;
let mut ribbon = 0;
for line in f.lines() {
let line = line.unwrap();
//zzz can't [l,w,h] yet sadly
let lwh = line.split("x")
.map(|v| v.parse::<i32>().unwrap())
.collect::<Vec<i32>>();
let (a, b, c) = (lwh[0] * lwh[1], lwh[0] * lwh[2], lwh[1] * lwh[2]);
paper += 2 * (a + b + c) + least(a, b, c);
let (p1, p2, p3) = (2 * (lwh[0] + lwh[1]), 2 * (lwh[0] + lwh[2]), 2 * (lwh[1] + lwh[2]));
ribbon += least(p1, p2, p3) + lwh[0] * lwh[1] * lwh[2];
}
(paper, ribbon)
}
fn d3(&self) -> (usize, usize) {
let input = include_bytes!("../data/d3");
//part 1
let mut pos = (0, 0);
let mut visited = HashSet::with_capacity(input.len());
visited.insert((0, 0));
//part 2
let mut pos_s = (0, 0);
let mut pos_r = (0, 0);
let mut visited_sr = HashSet::with_capacity(input.len());
visited_sr.insert((0, 0));
for i in 0..input.len() {
//part 1
match input[i] {
0x5e => pos = (pos.0, pos.1 + 1),
0x76 => pos = (pos.0, pos.1 - 1),
0x3e => pos = (pos.0 + 1, pos.1),
0x3c => pos = (pos.0 - 1, pos.1),
_ => ()
}
visited.insert(pos);
//part 2
//lol yy pp yy pp oh well
if i % 2 == 0 {
match input[i] {
0x5e => pos_s = (pos_s.0, pos_s.1 + 1),
0x76 => pos_s = (pos_s.0, pos_s.1 - 1),
0x3e => pos_s = (pos_s.0 + 1, pos_s.1),
0x3c => pos_s = (pos_s.0 - 1, pos_s.1),
_ => ()
}
visited_sr.insert(pos_s);
} else {
match input[i] {
0x5e => pos_r = (pos_r.0, pos_r.1 + 1),
0x76 => pos_r = (pos_r.0, pos_r.1 - 1),
0x3e => pos_r = (pos_r.0 + 1, pos_r.1),
0x3c => pos_r = (pos_r.0 - 1, pos_r.1),
_ => ()
}
visited_sr.insert(pos_r);
}
}
(visited.len(), visited_sr.len())
}
fn d4(&self, hash_start: &str) -> usize {
let key = "iwrupvqb";
let mut md5 = Md5::new();
let mut i = 0;
let mut result = String::new();
while !result.starts_with(&hash_start) {
i += 1;
let attempt = format!("{}{}", key, i);
md5.input_str(&attempt);
result = md5.result_str();
md5.reset();
}
i
}
fn d5(&self) -> (i32, i32) {
let f = File::open("data/d5").unwrap();
let f = BufReader::new(f);
//regex macro sadly also gated atm
let vowel = Regex::new("[aeiou]").unwrap();
let blacklist = Regex::new("ab|cd|pq|xy").unwrap();
let mut hits = 0;
let mut hits_2 = 0;
for line in f.lines() {
let line = line.unwrap();
let line_b = line.as_bytes();
//part 1
let mut test_double = false;
for i in 0..line_b.len()-1 {
if line_b[i] == line_b[i+1] {
test_double = true;
break;
}
}
if vowel.find_iter(&line).count() > 2 && !blacklist.is_match(&line) && test_double {
hits += 1;
}
//part 2
//this is so absurd w/o backrefs lol
let mut got_dubs = false;
let mut got_split = false;
for i in 0..line_b.len()-1 {
if !got_dubs {
let dubs = Regex::new(&format!("{}{}", line_b[i] as char, line_b[i+1] as char)).unwrap();
let dubs_count = dubs.find_iter(&line).count();
//I'm angry that this works because it shouldn't
//the dataset doesn't have an item to break it
if dubs_count > 1 {
got_dubs = true;
}
}
if i < line_b.len()-2 && line_b[i] == line_b[i+2] {
got_split = true;
}
}
if got_dubs && got_split {
hits_2 += 1;
}
}
(hits, hits_2)
}
//rather just keep one fn than yp the whole thing to change like five lines
fn d6(&self) -> (usize, usize) {
let mut grid = vec![false; 1000000];
let mut grid_scalar = vec![0; 1000000];
let nums = Regex::new("[0-9]+").unwrap();
let w = 1000;
let f = File::open("data/d6").unwrap();
let f = BufReader::new(f);
for line in f.lines() {
let line = line.unwrap();
//this is driving me nuts, can prolly be accomplished in one line
//but I can't figure out how to get it to do the alloc in the map
let coords: Vec<_> = nums.find_iter(&line).map(|tupl| &line[tupl.0..tupl.1]).collect();
let coords: Vec<_> = coords.iter().map(|val| val.parse::<usize>().unwrap()).collect();
let action = if line.starts_with("turn on") {
Lights::On
} else if line.starts_with("turn off") {
Lights::Off
} else if line.starts_with("toggle") {
Lights::Toggle
} else {
panic!("this should not happen");
};
for i in coords[0]..coords[2]+1 {
let m = i * w;
for j in coords[1]..coords[3]+1 {
let n = m + j;
grid[n] = match action {
Lights::On => true,
Lights::Off => false,
Lights::Toggle => !grid[n]
};
match action {
Lights::On => {
grid_scalar[n] += 1;
},
Lights::Off => {
if grid_scalar[n] > 0 {
grid_scalar[n] -= 1;
}
},
Lights::Toggle => {
grid_scalar[n] += 2;
}
}
}
}
}
let mut count = 0;
let mut count_scalar = 0;
for i in 0..1000000 {
if grid[i] {
count += 1;
}
count_scalar += grid_scalar[i];
}
(count, count_scalar)
}
fn d7(&self) -> (u16, u16) {
let f = File::open("data/d7").unwrap();
let f = BufReader::new(f);
let mut map = HashMap::new();
let mut cache = HashMap::new();
for line in f.lines() {
let line = line.unwrap();
let (name, wire) = Wire::new(&line);
map.insert(name, wire);
}
let map = map;
let a1 = map.get("a").unwrap().output(&map, &mut cache);
cache.clear();
//cf line 75, String is obviously not what I actually want
cache.insert("b".to_string(), a1);
let a2 = map.get("a").unwrap().output(&map, &mut cache);
(a1, a2)
}
fn d8(&self) -> (usize, usize) {
let f = File::open("data/d8").unwrap();
let f = BufReader::new(f);
let mut count = 0;
let mut count2 = 0;
for line in f.lines() {
let line = line.unwrap();
let line = line.as_bytes();
//quotes
count += 2;
count2 += 4;
let mut i = 0;
while i < line.len() {
//backslash, step ahead
if line[i] == 0x5c {
i += 1;
//hex escape
if line[i] == 0x78 {
count += 3;
count2 += 1;
//quote or backslash
} else {
count += 1;
count2 += 2;
}
}
//step ahead. two steps cleanly avoids catching escaped backslash
i += 1;
}
}
(count, count2)
}
fn d9(&self) -> (usize, usize) {
let f = File::open("data/d9").unwrap();
let f = BufReader::new(f);
let mut graph: [[usize; 8]; 8] = [[0; 8]; 8];
//this is naive but the input is well-ordered so eh
let mut x = 0;
let mut y = 1;
for line in f.lines() {
let cost = line.unwrap()
.split(" ")
.last().unwrap()
.parse::<usize>().unwrap();
graph[x][y] = cost;<|fim▁hole|> graph[y][x] = cost;
if y >= 7 {
x += 1;
y = x + 1;
} else {
y += 1;
}
}
let mut vec = vec!(0,1,2,3,4,5,6,7);
let mut routes = Vec::new();
permute(8, &mut vec, &mut routes);
let routes = routes;
let mut shortest = std::usize::MAX;
let mut longest = 0;
for route in routes {
let mut sum = 0;
for node in 0..route.len()-1 {
sum += graph[route[node] as usize][route[node+1] as usize];
}
if sum < shortest {
shortest = sum;
}
if sum > longest {
longest = sum;
}
}
(shortest, longest)
}
fn d10(&self) -> (usize, usize) {
let mut vec = vec!(1,3,2,1,1,3,1,1,1,2).to_vec();
for _ in 0..40 {
vec = look_n_say(&vec);
}
let len1 = vec.len();
for _ in 0..10 {
vec = look_n_say(&vec);
}
let len2 = vec.len();
(len1, len2)
}
fn d11(&self, input: &str) -> String {
let mut pw = input.as_bytes().to_vec();
let mut dubs = false;
let mut trips = false;
let mut clean = true;
while !(dubs && trips && clean) {
dubs = false;
trips = false;
clean = true;
let mut first_dub = 0;
pw[7] += 1;
for i in (0..8).rev() {
//above z
if pw[i] == 0x7b {
pw[i-1] += 1;
for j in i..8 {
pw[j] = 0x61;
}
}
}
//wanted to avoid 2 passes but, laziest bug fix
for i in (0..8).rev() {
//iol
if pw[i] == 0x69 || pw[i] == 0x6c || pw[i] == 0x6f {
pw[i] += 1;
if i != 7 {
for j in i+1..8 {
pw[j] = 0x61;
}
pw[7] -= 1;
}
clean = false;
break;
}
if i > 1 && pw[i] == (pw[i-1] + 1) && pw[i] == (pw[i-2] + 2) {
trips = true;
}
if i > 0 && pw[i] == pw[i-1] {
if first_dub == 0 {
first_dub = pw[i];
} else if pw[i] != first_dub {
dubs = true;
}
}
}
}
std::str::from_utf8(&pw).unwrap().to_string()
}
fn d12(&self) -> i32 {
let json = include_str!("../data/d12.json");
let num = Regex::new("-*[0-9]+").unwrap();
let sum = num.find_iter(&json)
.map(|tupl| json[tupl.0..tupl.1].parse::<i32>().unwrap())
.fold(0i32, |acc, val| acc + val);
println!("{}", sum);
sum
}
}
fn least(x: i32, y: i32, z: i32) -> i32 {
if x < y && x < z {x} else if y < z {y} else {z}
}
fn permute(n: usize, vec: &mut Vec<u8>, acc: &mut Vec<Vec<u8>>) {
if n == 1 {
acc.push((*vec).to_vec());
} else {
for i in 0..n-1 {
permute(n-1, vec, acc);
if n % 2 == 0 {
let swap = vec[i];
vec[i] = vec[n-1];
vec[n-1] = swap;
} else {
let swap = vec[0];
vec[0] = vec[n-1];
vec[n-1] = swap;
}
}
permute(n-1, vec, acc);
}
}
fn look_n_say(arr: &Vec<u8>) -> Vec<u8> {
let mut prev = 0;
let mut count = 0;
let mut buff = Vec::new();
for n in arr {
if *n == prev {
count += 1;
} else {
if count > 0 {
buff.push(count);
buff.push(prev);
}
prev = *n;
count = 1;
}
}
if count > 0 {
buff.push(count);
buff.push(prev);
}
buff
}
fn main() {
let args: Vec<_> = env::args().collect();
if args.len() > 1 {
match &*args[1] {
"d1" => {
let (x, y) = Advent.d1();
println!("floor: {}\nbasement: {}", x, y);
},
"d2" => {
let (x, y) = Advent.d2();
println!("paper: {}\nribbon: {}", x, y);
},
"d3" => {
let (x, y) = Advent.d3();
println!("presents:\n y1: {}\n y2: {}", x, y);
},
"d4" => {
println!("this might take awhile");
let x = Advent.d4("00000");
let y = Advent.d4("000000");
println!("five-char: {}\nsix-char: {}", x, y);
},
"d5" => {
let (x, y) = Advent.d5();
println!("hits1: {}\nhits2: {}", x, y);
},
"d6" => {
let (x, y) = Advent.d6();
println!("count: {}\nscalar: {}", x, y);
},
"d7" => {
let (x, y) = Advent.d7();
println!("first pass: {}\nsecond pass: {}", x, y);
},
"d8" => {
let (x, y) = Advent.d8();
println!("count: {}\ncount2: {}", x, y);
},
"d9" => {
let (x, y) = Advent.d9();
println!("shortest: {}\nlongest: {}", x, y);
},
"d10" => {
let (x, y) = Advent.d10();
println!("length 40: {}\nlength 50: {}", x, y);
},
"d11" => {
let x = Advent.d11("vzbxkghb");
let y = Advent.d11("vzbxxyzz");
println!("pw 1: {}\npw 2: {}", x, y);
},
"d12" => {
let x = Advent.d12();
println!("sum: {}", x);
},
"scratch" => {
},
_ => println!("something happened")
}
}
}
#[test]
fn test_d1() {
let (x, y) = Advent.d1();
assert_eq!(x, 232);
assert_eq!(y, 1783);
}
#[test]
fn test_d2() {
let (x, y) = Advent.d2();
assert_eq!(x, 1606483);
assert_eq!(y, 3842356);
}
#[test]
fn test_d3() {
let (x, y) = Advent.d3();
assert_eq!(x, 2572);
assert_eq!(y, 2631);
}
//the problem itself calls for five- and six-char collisions
//that is uh, like ten million hashes to compute
//so, this instead
#[test]
fn test_d4_three() {
let x = Advent.d4("000");
assert_eq!(x, 2215);
}
#[test]
#[ignore]
fn test_d4_five() {
let x = Advent.d4("00000");
assert_eq!(x, 346386);
}
#[test]
#[ignore]
fn test_d4_six() {
let x = Advent.d4("000000");
assert_eq!(x, 9958218);
}
#[test]
fn test_d5() {
let (x, y) = Advent.d5();
assert_eq!(x, 238);
assert_eq!(y, 69);
}
#[test]
fn test_d6() {
let (x, y) = Advent.d6();
assert_eq!(x, 400410);
assert_eq!(y, 15343601);
}
#[test]
fn test_d7() {
let (x, y) = Advent.d7();
assert_eq!(x, 3176);
assert_eq!(y, 14710);
}
#[test]
fn test_d8() {
let (x, y) = Advent.d8();
assert_eq!(x, 1333);
assert_eq!(y, 2046);
}
#[test]
fn test_d9() {
let (x, y) = Advent.d9();
assert_eq!(x, 207);
assert_eq!(y, 804);
}
#[test]
fn test_d10() {
let (x, y) = Advent.d10();
assert_eq!(x, 492982);
assert_eq!(y, 6989950);
}
#[test]
fn test_d11() {
let x = Advent.d11("vzbxkghb");
let y = Advent.d11("vzbxxyzz");
assert_eq!(x, "vzbxxyzz");
assert_eq!(y, "vzcaabcc");
}<|fim▁end|>
| |
<|file_name|>ff_profile_tests.py<|end_file_name|><|fim▁begin|># Licensed to the Software Freedom Conservancy (SFC) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The SFC licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.<|fim▁hole|>import zipfile
try:
from io import BytesIO
except ImportError:
from cStringIO import StringIO as BytesIO
try:
unicode
except NameError:
unicode = str
from selenium import webdriver
from selenium.webdriver.common.proxy import Proxy, ProxyType
from selenium.test.selenium.webdriver.common.webserver import SimpleWebServer
class TestFirefoxProfile:
def setup_method(self, method):
self.capabilities = {'marionette': False}
self.driver = webdriver.Firefox(capabilities=self.capabilities)
self.webserver = SimpleWebServer()
self.webserver.start()
def test_that_we_can_accept_a_profile(self):
profile1 = webdriver.FirefoxProfile()
profile1.set_preference("browser.startup.homepage_override.mstone", "")
profile1.set_preference("startup.homepage_welcome_url", self.webserver.where_is('simpleTest.html'))
profile1.update_preferences()
profile2 = webdriver.FirefoxProfile(profile1.path)
driver = webdriver.Firefox(
capabilities=self.capabilities,
firefox_profile=profile2)
title = driver.title
driver.quit()
assert "Hello WebDriver" == title
def test_that_prefs_are_written_in_the_correct_format(self):
# The setup gave us a browser but we dont need it
self.driver.quit()
profile = webdriver.FirefoxProfile()
profile.set_preference("sample.preference", "hi there")
profile.update_preferences()
assert 'hi there' == profile.default_preferences["sample.preference"]
encoded = profile.encoded
decoded = base64.decodestring(encoded)
fp = BytesIO(decoded)
zip = zipfile.ZipFile(fp, "r")
for entry in zip.namelist():
if entry.endswith("user.js"):
user_js = zip.read(entry)
for line in user_js.splitlines():
if line.startswith(b'user_pref("sample.preference",'):
assert line.endswith(b'hi there");')
# there should be only one user.js
break
fp.close()
def test_that_unicode_prefs_are_written_in_the_correct_format(self):
# The setup gave us a browser but we dont need it
self.driver.quit()
profile = webdriver.FirefoxProfile()
profile.set_preference('sample.preference.2', unicode('hi there'))
profile.update_preferences()
assert 'hi there' == profile.default_preferences["sample.preference.2"]
encoded = profile.encoded
decoded = base64.decodestring(encoded)
fp = BytesIO(decoded)
zip = zipfile.ZipFile(fp, "r")
for entry in zip.namelist():
if entry.endswith('user.js'):
user_js = zip.read(entry)
for line in user_js.splitlines():
if line.startswith(b'user_pref("sample.preference.2",'):
assert line.endswith(b'hi there");')
# there should be only one user.js
break
fp.close()
def test_that_integer_prefs_are_written_in_the_correct_format(self):
# The setup gave us a browser but we dont need it
self.driver.quit()
profile = webdriver.FirefoxProfile()
profile.set_preference("sample.int.preference", 12345)
profile.update_preferences()
assert 12345 == profile.default_preferences["sample.int.preference"]
def test_that_boolean_prefs_are_written_in_the_correct_format(self):
# The setup gave us a browser but we dont need it
self.driver.quit()
profile = webdriver.FirefoxProfile()
profile.set_preference("sample.bool.preference", True)
profile.update_preferences()
assert profile.default_preferences["sample.bool.preference"] is True
def test_that_we_delete_the_profile(self):
path = self.driver.firefox_profile.path
self.driver.quit()
assert not os.path.exists(path)
def test_profiles_do_not_share_preferences(self):
self.profile1 = webdriver.FirefoxProfile()
self.profile1.accept_untrusted_certs = False
self.profile2 = webdriver.FirefoxProfile()
# Default is true. Should remain so.
assert self.profile2.default_preferences["webdriver_accept_untrusted_certs"] is True
def test_none_proxy_is_set(self):
# The setup gave us a browser but we dont need it
self.driver.quit()
self.profile = webdriver.FirefoxProfile()
proxy = None
try:
self.profile.set_proxy(proxy)
assert False, "exception after passing empty proxy is expected"
except ValueError:
pass
assert "network.proxy.type" not in self.profile.default_preferences
def test_unspecified_proxy_is_set(self):
# The setup gave us a browser but we dont need it
self.driver.quit()
self.profile = webdriver.FirefoxProfile()
proxy = Proxy()
self.profile.set_proxy(proxy)
assert "network.proxy.type" not in self.profile.default_preferences
def test_manual_proxy_is_set_in_profile(self):
# The setup gave us a browser but we dont need it
self.driver.quit()
self.profile = webdriver.FirefoxProfile()
proxy = Proxy()
proxy.no_proxy = 'localhost, foo.localhost'
proxy.http_proxy = 'some.url:1234'
proxy.ftp_proxy = None
proxy.sslProxy = 'some2.url'
self.profile.set_proxy(proxy)
assert self.profile.default_preferences["network.proxy.type"] == ProxyType.MANUAL['ff_value']
assert self.profile.default_preferences["network.proxy.no_proxies_on"] == 'localhost, foo.localhost'
assert self.profile.default_preferences["network.proxy.http"] == 'some.url'
assert self.profile.default_preferences["network.proxy.http_port"] == 1234
assert self.profile.default_preferences["network.proxy.ssl"] == 'some2.url'
assert "network.proxy.ssl_port" not in self.profile.default_preferences
assert "network.proxy.ftp" not in self.profile.default_preferences
def test_pac_proxy_is_set_in_profile(self):
# The setup gave us a browser but we dont need it
self.driver.quit()
self.profile = webdriver.FirefoxProfile()
proxy = Proxy()
proxy.proxy_autoconfig_url = 'http://some.url:12345/path'
self.profile.set_proxy(proxy)
assert self.profile.default_preferences["network.proxy.type"] == ProxyType.PAC['ff_value']
assert self.profile.default_preferences["network.proxy.autoconfig_url"] == 'http://some.url:12345/path'
def test_autodetect_proxy_is_set_in_profile(self):
# The setup gave us a browser but we dont need it
self.driver.quit()
self.profile = webdriver.FirefoxProfile()
proxy = Proxy()
proxy.auto_detect = True
self.profile.set_proxy(proxy)
assert self.profile.default_preferences["network.proxy.type"] == ProxyType.AUTODETECT['ff_value']
def teardown_method(self, method):
try:
self.driver.quit()
except:
pass # don't care since we may have killed the browser above
self.webserver.stop()
def _pageURL(self, name):
return self.webserver.where_is(name + '.html')
def _loadSimplePage(self):
self._loadPage("simpleTest")
def _loadPage(self, name):
self.driver.get(self._pageURL(name))
def teardown_module(module):
try:
TestFirefoxProfile.driver.quit()
except:
pass # Don't Care since we may have killed the browser above<|fim▁end|>
|
import base64
import os
|
<|file_name|>minification.py<|end_file_name|><|fim▁begin|><|fim▁hole|># -*- coding: utf-8 -*-
"""
JS and CSS minification
============================
Author: Toni Heittola ([email protected])
This plugin will create dynamic datatable with charting features from given yaml-datafile.
"""
import os
import sys
import io
import argparse
import textwrap
from IPython import embed
__version_info__ = ('0', '1', '0')
__version__ = '.'.join(__version_info__)
def main(argv):
parser = argparse.ArgumentParser(
prefix_chars='-+',
formatter_class=argparse.RawDescriptionHelpFormatter,
description=textwrap.dedent('''\
JS and CSS minification
---------------------------------------------
Author: Toni Heittola ( [email protected] )
'''))
parser.add_argument('-v', '--version', action='version', version='%(prog)s ' + __version__)
args = parser.parse_args()
print("JS and CSS minification")
print("-----------------------")
minify_css_directory2(source='css', target='css.min')
minify_js_directory(source='js', target='js.min')
def minify_css_directory(source, target):
"""
Move CSS resources from source directory to target directory and minify. Using csscompressor.
"""
from csscompressor import compress
if os.path.isdir(source):
if not os.path.exists(target):
os.makedirs(target)
for root, dirs, files in os.walk(source):
for current_file in files:
if current_file.endswith(".css"):
current_file_path = os.path.join(root, current_file)
print(" " + current_file_path)
with open(current_file_path) as css_file:
with open(os.path.join(target, current_file.replace('.css', '.min.css')), "w") as minified_file:
minified_file.write(compress(css_file.read()))
def minify_css_directory2(source, target):
"""
Move CSS resources from source directory to target directory and minify. Using rcssmin.
"""
import rcssmin
if os.path.isdir(source):
if not os.path.exists(target):
os.makedirs(target)
for root, dirs, files in os.walk(source):
for current_file in files:
if current_file.endswith(".css"):
current_file_path = os.path.join(root, current_file)
print(" " + current_file_path)
with open(current_file_path) as css_file:
with open(os.path.join(target, current_file.replace('.css', '.min.css')), "w") as minified_file:
minified_file.write(rcssmin.cssmin(css_file.read(), keep_bang_comments=True))
bundle_data = []
for root, dirs, files in os.walk(target):
for current_file in files:
if current_file.endswith(".css") and current_file != 'datatable.bundle.min.css':
current_file_path = os.path.join(root, current_file)
css_file = open(current_file_path, "r")
bundle_data += css_file.readlines()
css_file.close()
bundle_filename = os.path.join(target, 'datatable.bundle.min.css')
bundle_file = open(bundle_filename, 'w+')
bundle_file.write(''.join(bundle_data))
bundle_file.close()
print(" " + bundle_filename)
def minify_js_directory(source, target):
"""
Move JS resources from source directory to target directory and minify.
"""
from jsmin import jsmin
if os.path.isdir(source):
if not os.path.exists(target):
os.makedirs(target)
for root, dirs, files in os.walk(source):
for current_file in files:
if current_file.endswith(".js"):
current_file_path = os.path.join(root, current_file)
print(" " + current_file_path)
with open(current_file_path) as js_file:
with open(os.path.join(target, current_file.replace('.js', '.min.js')), "w") as minified_file:
minified_file.write(jsmin(js_file.read()))
bundle_data = []
for root, dirs, files in os.walk(target):
for current_file in files:
if current_file.endswith(".js") and current_file != 'datatable.bundle.min.js':
current_file_path = os.path.join(root, current_file)
js_file = open(current_file_path, "r")
bundle_data += js_file.readlines()
js_file.close()
bundle_filename = os.path.join(target, 'datatable.bundle.min.js')
bundle_file = open(bundle_filename, 'w+')
bundle_file.write(''.join(bundle_data))
bundle_file.close()
print(" " + bundle_filename)
if __name__ == "__main__":
try:
sys.exit(main(sys.argv))
except (ValueError, IOError) as e:
sys.exit(e)<|fim▁end|>
|
#!/usr/bin/env python
|
<|file_name|>bitcoin_eo.ts<|end_file_name|><|fim▁begin|><TS language="eo" version="2.1">
<context>
<name>AddressBookPage</name>
<message>
<source>Create a new address</source>
<translation>Krei novan adreson</translation>
</message>
<message>
<source>&New</source>
<translation>&Nova</translation>
</message>
<message>
<source>Copy the currently selected address to the system clipboard</source>
<translation>Kopii elektitan adreson al la tondejo</translation>
</message>
<message>
<source>&Copy</source>
<translation>&Kopii</translation>
</message>
<message>
<source>C&lose</source>
<translation>&Fermi</translation>
</message>
<message>
<source>&Copy Address</source>
<translation>&Kopii Adreson</translation>
</message>
<message>
<source>Delete the currently selected address from the list</source>
<translation>Forigi la elektitan adreson el la listo</translation>
</message>
<message>
<source>Export the data in the current tab to a file</source>
<translation>Eksporti al dosiero la datumojn el la aktuala langeto</translation>
</message>
<message>
<source>&Export</source>
<translation>&Eksporti</translation>
</message>
<message>
<source>&Delete</source>
<translation>&Forigi</translation>
</message>
<message>
<source>Choose the address to send coins to</source>
<translation>Elektu la alsendotan adreson</translation>
</message>
<message>
<source>Choose the address to receive coins with</source>
<translation>Elektu la ricevontan adreson</translation>
</message>
<message>
<source>C&hoose</source>
<translation>&Elekti</translation>
</message>
<message>
<source>Sending addresses</source>
<translation>Sendaj adresoj</translation>
</message>
<message>
<source>Receiving addresses</source>
<translation>Ricevaj adresoj</translation>
</message>
<message>
<source>These are your Emercoin addresses for sending payments. Always check the amount and the receiving address before sending coins.</source>
<translation>Jen viaj Emermon-adresoj por sendi pagojn. Zorge kontrolu la sumon kaj la alsendan adreson antaŭ ol sendi.</translation>
</message>
<message>
<source>These are your Emercoin addresses for receiving payments. It is recommended to use a new receiving address for each transaction.</source>
<translation>Jen viaj emermonaj adresoj por ricevi pagojn. Estas konsilinde uzi apartan ricevan adreson por ĉiu transakcio.</translation>
</message>
<message>
<source>Copy &Label</source>
<translation>Kopii &Etikedon</translation>
</message>
<message>
<source>&Edit</source>
<translation>&Redakti</translation>
</message>
<message>
<source>Export Address List</source>
<translation>Eksporti Adresliston</translation>
</message>
<message>
<source>Comma separated file (*.csv)</source>
<translation>Perkome disigita dosiero (*.csv)</translation>
</message>
<message>
<source>Exporting Failed</source>
<translation>ekspotado malsukcesinta</translation>
</message>
</context>
<context>
<name>AddressTableModel</name>
<message>
<source>Label</source>
<translation>Etikedo</translation>
</message>
<message>
<source>Address</source>
<translation>Adreso</translation>
</message>
<message>
<source>(no label)</source>
<translation>(neniu etikedo)</translation>
</message>
</context>
<context>
<name>AskPassphraseDialog</name>
<message>
<source>Passphrase Dialog</source>
<translation>Dialogo pri pasfrazo</translation>
</message>
<message>
<source>Enter passphrase</source>
<translation>Enigu pasfrazon</translation>
</message>
<message>
<source>New passphrase</source>
<translation>Nova pasfrazo</translation>
</message>
<message>
<source>Repeat new passphrase</source>
<translation>Ripetu la novan pasfrazon</translation>
</message>
<message>
<source>Encrypt wallet</source>
<translation>Ĉifri la monujon</translation>
</message>
<message>
<source>This operation needs your wallet passphrase to unlock the wallet.</source>
<translation>Ĉi tiu operacio bezonas vian monujan pasfrazon, por malŝlosi la monujon.</translation>
</message>
<message>
<source>Unlock wallet</source>
<translation>Malŝlosi la monujon</translation>
</message>
<message>
<source>This operation needs your wallet passphrase to decrypt the wallet.</source>
<translation>Ĉi tiu operacio bezonas vian monujan pasfrazon, por malĉifri la monujon.</translation>
</message>
<message>
<source>Decrypt wallet</source>
<translation>Malĉifri la monujon</translation>
</message>
<message>
<source>Change passphrase</source>
<translation>Ŝanĝi la pasfrazon</translation>
</message>
<message>
<source>Enter the old and new passphrase to the wallet.</source>
<translation>Tajpu la malnovan kaj novan monujajn pasfrazojn.</translation>
</message>
<message>
<source>Confirm wallet encryption</source>
<translation>Konfirmo de ĉifrado de la monujo</translation>
</message>
<message>
<source>Warning: If you encrypt your wallet and lose your passphrase, you will <b>LOSE ALL OF YOUR EMERCOINS</b>!</source>
<translation>Atentu! Se vi ĉifras vian monujon kaj perdas la pasfrazon, vi <b>PERDOS LA TUTON DE VIA EMERMONO<b>!</translation>
</message>
<message>
<source>Are you sure you wish to encrypt your wallet?</source>
<translation>Ĉu vi certas, ke vi volas ĉifri la monujon?</translation>
</message>
<message>
<source>IMPORTANT: Any previous backups you have made of your wallet file should be replaced with the newly generated, encrypted wallet file. For security reasons, previous backups of the unencrypted wallet file will become useless as soon as you start using the new, encrypted wallet.</source>
<translation>GRAVE: antaŭaj sekur-kopioj de via monujo-dosiero estas forigindaj kiam vi havas nove kreitan ĉifritan monujo-dosieron. Pro sekureco, antaŭaj kopioj de la neĉifrita dosiero ne plu funkcios tuj kiam vi ekuzos la novan ĉifritan dosieron.</translation>
</message>
<message>
<source>Warning: The Caps Lock key is on!</source>
<translation>Atentu: la majuskla baskulo estas ŝaltita!</translation>
</message>
<message>
<source>Wallet encrypted</source>
<translation>La monujo estas ĉifrita</translation>
</message>
<message>
<source>Emercoin will close now to finish the encryption process. Remember that encrypting your wallet cannot fully protect your emercoins from being stolen by malware infecting your computer.</source>
<translation>Emermono nun fermiĝos por fini la ĉifradon. Memoru, ke eĉ ĉifrado ne protektas kontraŭ ĉiu atako, ekz. se viruso infektus vian komputilon.</translation>
</message>
<message>
<source>Wallet encryption failed</source>
<translation>Ĉifrado de la monujo fiaskis</translation>
</message>
<message>
<source>Wallet encryption failed due to an internal error. Your wallet was not encrypted.</source>
<translation>Ĉifrado de monujo fiaskis pro interna eraro. Via monujo ne estas ĉifrita.</translation>
</message>
<message>
<source>The supplied passphrases do not match.</source>
<translation>La pasfrazoj entajpitaj ne samas.</translation>
</message>
<message>
<source>Wallet unlock failed</source>
<translation>Malŝloso de la monujo fiaskis</translation>
</message>
<message>
<source>The passphrase entered for the wallet decryption was incorrect.</source>
<translation>La pasfrazo enigita por ĉifrado de monujo ne ĝustas.</translation>
</message>
<message>
<source>Wallet decryption failed</source>
<translation>Malĉifrado de la monujo fiaskis</translation>
</message>
<message>
<source>Wallet passphrase was successfully changed.</source>
<translation>Vi sukcese ŝanĝis la pasfrazon de la monujo.</translation>
</message>
</context>
<context>
<name>BitcoinGUI</name>
<message>
<source>Sign &message...</source>
<translation>Subskribi &mesaĝon...</translation>
</message>
<message>
<source>Synchronizing with network...</source>
<translation>Sinkronigante kun reto...</translation>
</message>
<message>
<source>&Overview</source>
<translation>&Superrigardo</translation>
</message>
<message>
<source>Node</source>
<translation>Nodo</translation>
</message>
<message>
<source>Show general overview of wallet</source>
<translation>Vidigi ĝeneralan superrigardon de la monujo</translation>
</message>
<message>
<source>&Transactions</source>
<translation>&Transakcioj</translation>
</message>
<message>
<source>Browse transaction history</source>
<translation>Esplori historion de transakcioj</translation>
</message>
<message>
<source>E&xit</source>
<translation>&Eliri</translation>
</message>
<message>
<source>Quit application</source>
<translation>Eliri la aplikaĵon</translation>
</message>
<message>
<source>About &Qt</source>
<translation>Pri &Qt</translation>
</message>
<message>
<source>Show information about Qt</source>
<translation>Vidigi informojn pri Qt</translation>
</message>
<message>
<source>&Options...</source>
<translation>&Agordoj...</translation>
</message>
<message>
<source>&Encrypt Wallet...</source>
<translation>Ĉifri &Monujon...</translation>
</message>
<message>
<source>&Backup Wallet...</source>
<translation>&Krei sekurkopion de la monujo...</translation>
</message>
<message>
<source>&Change Passphrase...</source>
<translation>Ŝanĝi &Pasfrazon...</translation>
</message>
<message>
<source>&Sending addresses...</source>
<translation>&Sendaj adresoj...</translation>
</message>
<message>
<source>&Receiving addresses...</source>
<translation>&Ricevaj adresoj...</translation>
</message>
<message>
<source>Open &URI...</source>
<translation>Malfermi &URI-on...</translation>
</message>
<message>
<source>Emercoin Core client</source>
<translation>kliento de emermon-kerno</translation>
</message>
<message>
<source>Importing blocks from disk...</source>
<translation>Importado de blokoj el disko...</translation>
</message>
<message>
<source>Reindexing blocks on disk...</source>
<translation>Reindeksado de blokoj sur disko...</translation>
</message>
<message>
<source>Send coins to a Emercoin address</source>
<translation>Sendi monon al Emermon-adreso</translation>
</message>
<message>
<source>Modify configuration options for Emercoin</source>
<translation>Modifi agordaĵojn por Emermono</translation>
</message>
<message>
<source>Backup wallet to another location</source>
<translation>Krei alilokan sekurkopion de monujo</translation>
</message>
<message>
<source>Change the passphrase used for wallet encryption</source>
<translation>Ŝanĝi la pasfrazon por ĉifri la monujon</translation>
</message>
<message>
<source>&Debug window</source>
<translation>Sen&cimiga fenestro</translation>
</message>
<message>
<source>Open debugging and diagnostic console</source>
<translation>Malfermi konzolon de sencimigo kaj diagnozo</translation>
</message>
<message>
<source>&Verify message...</source>
<translation>&Kontroli mesaĝon...</translation>
</message>
<message>
<source>Emercoin</source>
<translation>Emermono</translation>
</message>
<message>
<source>Wallet</source>
<translation>Monujo</translation>
</message>
<message>
<source>&Send</source>
<translation>&Sendi</translation>
</message>
<message>
<source>&Receive</source>
<translation>&Ricevi</translation>
</message>
<message>
<source>&Show / Hide</source>
<translation>&Montri / Kaŝi</translation>
</message>
<message>
<source>Show or hide the main Window</source>
<translation>Montri aŭ kaŝi la ĉefan fenestron</translation>
</message>
<message>
<source>Encrypt the private keys that belong to your wallet</source>
<translation>Ĉifri la privatajn ŝlosilojn de via monujo</translation>
</message>
<message>
<source>Sign messages with your Emercoin addresses to prove you own them</source>
<translation>Subskribi mesaĝojn per via Emermon-adresoj por pravigi, ke vi estas la posedanto</translation>
</message>
<message>
<source>Verify messages to ensure they were signed with specified Emercoin addresses</source>
<translation>Kontroli mesaĝojn por kontroli ĉu ili estas subskribitaj per specifaj Emermon-adresoj</translation>
</message>
<message>
<source>&File</source>
<translation>&Dosiero</translation>
</message>
<message>
<source>&Settings</source>
<translation>&Agordoj</translation>
</message>
<message>
<source>&Help</source>
<translation>&Helpo</translation>
</message>
<message>
<source>Tabs toolbar</source>
<translation>Langeto-breto</translation>
</message>
<message>
<source>Emercoin Core</source>
<translation>Kerno de Emermono</translation>
</message>
<message>
<source>Request payments (generates QR codes and emercoin: URIs)</source>
<translation>Peti pagon (kreas QR-kodojn kaj URI-ojn kun prefikso emercoin:)</translation>
</message>
<message>
<source>&About Emercoin Core</source>
<translation>&Pri la Emermona Kerno</translation>
</message>
<message>
<source>Show the list of used sending addresses and labels</source>
<translation>Vidigi la liston de uzitaj sendaj adresoj kaj etikedoj</translation>
</message>
<message>
<source>Show the list of used receiving addresses and labels</source>
<translation>Vidigi la liston de uzitaj ricevaj adresoj kaj etikedoj</translation>
</message>
<message>
<source>Open a emercoin: URI or payment request</source>
<translation>Malfermi emercoin:-URI-on aŭ pagpeton</translation>
</message>
<message>
<source>&Command-line options</source>
<translation>&Komandliniaj agordaĵoj</translation>
</message>
<message numerus="yes">
<source>%n active connection(s) to Emercoin network</source>
<translation><numerusform>%n aktiva konekto al la emermona reto</numerusform><numerusform>%n aktivaj konektoj al la emermona reto</numerusform></translation>
</message>
<message>
<source>No block source available...</source>
<translation>Neniu fonto de blokoj trovebla...</translation>
</message>
<message numerus="yes">
<source>%n hour(s)</source>
<translation><numerusform>%n horo</numerusform><numerusform>%n horoj</numerusform></translation>
</message>
<message numerus="yes">
<source>%n day(s)</source>
<translation><numerusform>%n tago</numerusform><numerusform>%n tagoj</numerusform></translation>
</message>
<message numerus="yes">
<source>%n week(s)</source>
<translation><numerusform>%n semajno</numerusform><numerusform>%n semajnoj</numerusform></translation>
</message>
<message>
<source>%1 and %2</source>
<translation>%1 kaj %2</translation>
</message>
<message numerus="yes">
<source>%n year(s)</source>
<translation><numerusform>%n jaro</numerusform><numerusform>%n jaroj</numerusform></translation>
</message>
<message>
<source>%1 behind</source>
<translation>mankas %1</translation>
</message>
<message>
<source>Last received block was generated %1 ago.</source>
<translation>Lasta ricevita bloko kreiĝis antaŭ %1.</translation>
</message>
<message>
<source>Transactions after this will not yet be visible.</source>
<translation>Transakcioj por tio ankoraŭ ne videblas.</translation>
</message>
<message>
<source>Error</source>
<translation>Eraro</translation>
</message>
<message>
<source>Warning</source>
<translation>Averto</translation>
</message>
<message>
<source>Information</source>
<translation>Informoj</translation>
</message>
<message>
<source>Up to date</source>
<translation>Ĝisdata</translation>
</message>
<message>
<source>Catching up...</source>
<translation>Ĝisdatigante...</translation>
</message>
<message>
<source>Sent transaction</source>
<translation>Sendita transakcio</translation>
</message>
<message>
<source>Incoming transaction</source>
<translation>Envenanta transakcio</translation>
</message>
<message>
<source>Date: %1
Amount: %2
Type: %3
Address: %4
</source>
<translation>Dato: %1
Sumo: %2
Tipo: %3
Adreso: %4
</translation>
</message>
<message>
<source>Wallet is <b>encrypted</b> and currently <b>unlocked</b></source>
<translation>Monujo estas <b>ĉifrita</b> kaj aktuale <b>malŝlosita</b></translation>
</message>
<message>
<source>Wallet is <b>encrypted</b> and currently <b>locked</b></source>
<translation>Monujo estas <b>ĉifrita</b> kaj aktuale <b>ŝlosita</b></translation>
</message>
</context>
<context>
<name>ClientModel</name>
<message>
<source>Network Alert</source>
<translation>Reta Averto</translation>
</message>
</context>
<context>
<name>CoinControlDialog</name>
<message>
<source>Quantity:</source>
<translation>Kvanto:</translation>
</message>
<message>
<source>Bytes:</source>
<translation>Bajtoj:</translation>
</message>
<message>
<source>Amount:</source>
<translation>Sumo:</translation>
</message>
<message>
<source>Priority:</source>
<translation>Prioritato:</translation>
</message>
<message>
<source>Fee:</source>
<translation>Krompago:</translation>
</message>
<message>
<source>After Fee:</source>
<translation>Post krompago:</translation>
</message>
<message>
<source>Change:</source>
<translation>Restmono:</translation>
</message>
<message>
<source>(un)select all</source>
<translation>(mal)elekti ĉion</translation>
</message>
<message>
<source>Tree mode</source>
<translation>Arboreĝimo</translation>
</message>
<message>
<source>List mode</source>
<translation>Listreĝimo</translation>
</message>
<message>
<source>Amount</source>
<translation>Sumo</translation>
</message>
<message>
<source>Date</source>
<translation>Dato</translation>
</message>
<message>
<source>Confirmations</source>
<translation>Konfirmoj</translation>
</message>
<message>
<source>Confirmed</source>
<translation>Konfirmita</translation>
</message>
<message>
<source>Priority</source>
<translation>Prioritato</translation>
</message>
<message>
<source>Copy address</source>
<translation>Kopii adreson</translation>
</message>
<message>
<source>Copy label</source>
<translation>Kopii etikedon</translation>
</message>
<message>
<source>Copy amount</source>
<translation>Kopii sumon</translation>
</message>
<message>
<source>Copy transaction ID</source>
<translation>Kopii transakcian ID-on</translation>
</message>
<message>
<source>Lock unspent</source>
<translation>Ŝlosi la neelspezitajn</translation>
</message>
<message>
<source>Unlock unspent</source>
<translation>Malŝlosi la neelspezitajn</translation>
</message>
<message>
<source>Copy quantity</source>
<translation>Kopii kvanton</translation>
</message>
<message>
<source>Copy fee</source>
<translation>Kopii krompagon</translation>
</message>
<message>
<source>Copy after fee</source>
<translation>Kopii post krompago</translation>
</message>
<message>
<source>Copy bytes</source>
<translation>Kopii bajtojn</translation>
</message>
<message>
<source>Copy priority</source>
<translation>Kopii prioritaton</translation>
</message>
<message>
<source>Copy change</source>
<translation>Kopii restmonon</translation>
</message>
<message>
<source>highest</source>
<translation>plej alta</translation>
</message>
<message>
<source>higher</source>
<translation>pli alta</translation>
</message>
<message>
<source>high</source>
<translation>alta</translation>
</message>
<message>
<source>medium-high</source>
<translation>mezalta</translation>
</message>
<message>
<source>medium</source>
<translation>meza</translation>
</message>
<message>
<source>low-medium</source>
<translation>mezmalalta</translation>
</message>
<message>
<source>low</source>
<translation>malalta</translation>
</message>
<message>
<source>lower</source>
<translation>pli malalta</translation>
</message>
<message>
<source>lowest</source>
<translation>plej malalta</translation>
</message>
<message>
<source>(%1 locked)</source>
<translation>(%1 ŝlosita)</translation>
</message>
<message>
<source>none</source>
<translation>neniu</translation>
</message>
<message>
<source>yes</source>
<translation>jes</translation>
</message>
<message>
<source>no</source>
<translation>ne</translation>
</message>
<message>
<source>This label turns red, if the transaction size is greater than 1000 bytes.</source>
<translation>Tiu ĉi etikedo ruĝiĝas se la grando de la transakcio estas pli ol 1000 bajtoj.</translation>
</message>
<message>
<source>This means a fee of at least %1 per kB is required.</source>
<translation>Tio signifas, ke krompago de almenaŭ po %1 por ĉiu kB estas deviga.</translation>
</message>
<message>
<source>Can vary +/- 1 byte per input.</source>
<translation>Povas varii po +/- 1 bajton por ĉiu enigo.</translation>
</message>
<message>
<source>Transactions with higher priority are more likely to get included into a block.</source>
<translation>Transakcioj kun pli alta prioritato havas pli altan ŝancon inkluziviĝi en bloko.</translation>
</message>
<message>
<source>This label turns red, if any recipient receives an amount smaller than %1.</source>
<translation>Tiu ĉi etikedo ruĝiĝas se iu ajn ricevonto ricevos sumon malpli ol %1.</translation>
</message>
<message>
<source>(no label)</source>
<translation>(neniu etikedo)</translation>
</message>
<message>
<source>change from %1 (%2)</source>
<translation>restmono de %1 (%2)</translation>
</message>
<message>
<source>(change)</source>
<translation>(restmono)</translation>
</message>
</context>
<context>
<name>EditAddressDialog</name>
<message>
<source>Edit Address</source>
<translation>Redakti Adreson</translation>
</message>
<message>
<source>&Label</source>
<translation>&Etikedo</translation>
</message>
<message>
<source>The label associated with this address list entry</source>
<translation>La etikedo ligita al tiu ĉi adreslistero</translation>
</message>
<message>
<source>The address associated with this address list entry. This can only be modified for sending addresses.</source>
<translation>La adreso ligita al tiu ĉi adreslistero. Eblas modifi tion nur por sendaj adresoj.</translation>
</message>
<message>
<source>&Address</source>
<translation>&Adreso</translation>
</message>
<message>
<source>New receiving address</source>
<translation>Nova adreso por ricevi</translation>
</message>
<message>
<source>New sending address</source>
<translation>Nova adreso por sendi</translation>
</message>
<message>
<source>Edit receiving address</source>
<translation>Redakti adreson por ricevi</translation>
</message>
<message>
<source>Edit sending address</source>
<translation>Redakti adreson por sendi</translation>
</message>
<message>
<source>The entered address "%1" is already in the address book.</source>
<translation>La adreso enigita "%1" jam ekzistas en la adresaro.</translation>
</message>
<message>
<source>The entered address "%1" is not a valid Emercoin address.</source>
<translation>La adreso enigita "%1" ne estas valida Emermon-adreso.</translation>
</message>
<message>
<source>Could not unlock wallet.</source>
<translation>Ne eblis malŝlosi monujon.</translation>
</message>
<message>
<source>New key generation failed.</source>
<translation>Fiaskis kreo de nova ŝlosilo.</translation>
</message>
</context>
<context>
<name>FreespaceChecker</name>
<message>
<source>A new data directory will be created.</source>
<translation>Kreiĝos nova dosierujo por la datumoj.</translation>
</message>
<message>
<source>name</source>
<translation>nomo</translation>
</message>
<message>
<source>Directory already exists. Add %1 if you intend to create a new directory here.</source>
<translation>Tiu dosierujo jam ekzistas. Aldonu %1 si vi volas krei novan dosierujon ĉi tie.</translation>
</message>
<message>
<source>Path already exists, and is not a directory.</source>
<translation>Vojo jam ekzistas, kaj ne estas dosierujo.</translation>
</message>
<message>
<source>Cannot create data directory here.</source>
<translation>Ne eblas krei dosierujon por datumoj ĉi tie.</translation>
</message>
</context>
<context>
<name>HelpMessageDialog</name>
<message>
<source>Emercoin Core</source>
<translation>Kerno de Emermono</translation>
</message>
<message>
<source>version</source>
<translation>versio</translation>
</message>
<message>
<source>About Emercoin Core</source>
<translation>Pri la Emermona Kerno</translation>
</message>
<message>
<source>Command-line options</source>
<translation>Komandliniaj agordaĵoj</translation>
</message>
<message>
<source>Usage:</source>
<translation>Uzado:</translation>
</message>
<message>
<source>command-line options</source>
<translation>komandliniaj agordaĵoj</translation>
</message>
<message>
<source>UI options</source>
<translation>UI-agordaĵoj</translation>
</message>
<message>
<source>Set language, for example "de_DE" (default: system locale)</source>
<translation>Agordi lingvon, ekzemple "de_DE" (defaŭlte: tiu de la sistemo)</translation>
</message>
<message>
<source>Start minimized</source>
<translation>Lanĉiĝi plejete</translation>
</message>
<message>
<source>Show splash screen on startup (default: 1)</source>
<translation>Montri salutŝildon dum lanĉo (defaŭlte: 1)</translation>
</message>
<message>
<source>Choose data directory on startup (default: 0)</source>
<translation>Elekti dosierujon por datumoj dum lanĉo (defaŭlte: 0)</translation>
</message>
</context>
<context>
<name>Intro</name>
<message>
<source>Welcome</source>
<translation>Bonvenon</translation>
</message>
<message>
<source>Welcome to Emercoin Core.</source>
<translation>Bonvenon al la emermona kerno, Emercoin Core.</translation>
</message>
<message>
<source>As this is the first time the program is launched, you can choose where Emercoin Core will store its data.</source>
<translation>Dum tiu ĉi unua uzo de la programo, vi povas elekti lokon, kie Emercoin Core stokos siajn datumojn.</translation>
</message>
<message>
<source>Emercoin Core will download and store a copy of the Emercoin block chain. At least %1GB of data will be stored in this directory, and it will grow over time. The wallet will also be stored in this directory.</source>
<translation>Emercoin Core elŝutos kaj konservos kopion de la emermona blokĉeno. Almenaŭ %1GB da datumoj konserviĝos en tiu loko, kaj tio poiome kreskos. Ankaŭ via monujo konserviĝos en tiu dosierujo.</translation>
</message>
<message>
<source>Use the default data directory</source>
<translation>Uzi la defaŭltan dosierujon por datumoj</translation>
</message>
<message>
<source>Use a custom data directory:</source>
<translation>Uzi alian dosierujon por datumoj:</translation>
</message>
<message>
<source>Emercoin Core</source>
<translation>Kerno de Emermono</translation>
</message>
<message>
<source>Error</source>
<translation>Eraro</translation>
</message>
</context>
<context>
<name>OpenURIDialog</name>
<message>
<source>Open URI</source>
<translation>Malfermi URI-on</translation>
</message>
<message>
<source>Open payment request from URI or file</source>
<translation>Malfermi pagpeton el URI aŭ dosiero</translation>
</message>
<message>
<source>URI:</source>
<translation>URI:</translation>
</message>
<message>
<source>Select payment request file</source>
<translation>Elektu la dosieron de la pagpeto</translation>
</message>
<message>
<source>Select payment request file to open</source>
<translation>Elektu la malfermotan dosieron de la pagpeto</translation>
</message>
</context>
<context>
<name>OptionsDialog</name>
<message>
<source>Options</source>
<translation>Agordaĵoj</translation>
</message>
<message>
<source>&Main</source>
<translation>Ĉ&efa</translation>
</message>
<message>
<source>Automatically start Emercoin after logging in to the system.</source>
<translation>Aŭtomate lanĉi Emermonon post ensaluto al la sistemo.</translation>
</message>
<message>
<source>&Start Emercoin on system login</source>
<translation>&Lanĉi Emermonon tuj post ensaluto al la sistemo</translation>
</message>
<message>
<source>Size of &database cache</source>
<translation>Dosiergrando de &datumbasa kaŝmemoro</translation>
</message>
<message>
<source>MB</source>
<translation>MB</translation>
</message>
<message>
<source>Reset all client options to default.</source>
<translation>Reagordi ĉion al defaŭlataj valoroj.</translation>
</message>
<message>
<source>&Reset Options</source>
<translation>&Rekomenci agordadon</translation>
</message>
<message>
<source>&Network</source>
<translation>&Reto</translation>
</message>
<message>
<source>Automatically open the Emercoin client port on the router. This only works when your router supports UPnP and it is enabled.</source>
<translation>Aŭtomate malfermi la kursilan pordon por Emermono. Tio funkcias nur se via kursilo havas la UPnP-funkcion, kaj se tiu ĉi estas ŝaltita.</translation>
</message>
<message>
<source>Map port using &UPnP</source>
<translation>Mapigi pordon per &UPnP</translation>
</message>
<message>
<source>Proxy &IP:</source>
<translation>Prokurila &IP:</translation>
</message>
<message>
<source>&Port:</source>
<translation>&Pordo:</translation>
</message>
<message>
<source>Port of the proxy (e.g. 9050)</source>
<translation>la pordo de la prokurilo (ekz. 9050)</translation>
</message>
<message>
<source>&Window</source>
<translation>&Fenestro</translation>
</message>
<message>
<source>Show only a tray icon after minimizing the window.</source>
<translation>Montri nur sistempletan piktogramon post minimumigo de la fenestro.</translation>
</message>
<message>
<source>&Minimize to the tray instead of the taskbar</source>
<translation>&Minimumigi al la sistempleto anstataŭ al la taskopleto</translation>
</message>
<message>
<source>Minimize instead of exit the application when the window is closed. When this option is enabled, the application will be closed only after selecting Quit in the menu.</source>
<translation>Minimumigi la aplikaĵon anstataŭ eliri kaj ĉesi kiam la fenestro estas fermita. Se tiu ĉi estas agordita, la aplikaĵo ĉesas nur kiam oni elektas "Eliri" el la menuo.</translation>
</message>
<message>
<source>M&inimize on close</source>
<translation>M&inimumigi je fermo</translation>
</message>
<message>
<source>&Display</source>
<translation>&Aspekto</translation>
</message>
<message>
<source>User Interface &language:</source>
<translation>&Lingvo de la fasado:</translation>
</message>
<message>
<source>The user interface language can be set here. This setting will take effect after restarting Emercoin.</source>
<translation>Vi povas elekti la lingvon uzata en la aplikaĵo ĉi tie. Tiu ekefikos nur post relanĉo de Emermono.</translation>
</message>
<message>
<source>&Unit to show amounts in:</source>
<translation>&Unuo por vidigi sumojn:</translation>
</message>
<message>
<source>Choose the default subdivision unit to show in the interface and when sending coins.</source>
<translation>Elekti la defaŭltan manieron por montri emermonajn sumojn en la interfaco, kaj kiam vi sendos emermonon.</translation>
</message>
<message>
<source>Whether to show coin control features or not.</source>
<translation>Ĉu montri detalan adres-regilon, aŭ ne.</translation>
</message>
<message>
<source>&OK</source>
<translation>&Bone</translation>
</message>
<message>
<source>&Cancel</source>
<translation>&Nuligi</translation>
</message>
<message>
<source>default</source>
<translation>defaŭlta</translation>
</message>
<message>
<source>none</source>
<translation>neniu</translation>
</message>
<message>
<source>Confirm options reset</source>
<translation>Konfirmi reŝargo de agordoj</translation>
</message>
<message>
<source>The supplied proxy address is invalid.</source>
<translation>La prokurila adreso estas malvalida.</translation>
</message>
</context>
<context>
<name>OverviewPage</name>
<message>
<source>Form</source>
<translation>Formularo</translation>
</message>
<message>
<source>The displayed information may be out of date. Your wallet automatically synchronizes with the Emercoin network after a connection is established, but this process has not completed yet.</source>
<translation>Eblas, ke la informoj videblaj ĉi tie estas eksdataj. Via monujo aŭtomate sinkoniĝas kun la emermona reto kiam ili konektiĝas, sed tiu procezo ankoraŭ ne finfariĝis.</translation>
</message>
<message>
<source>Your current spendable balance</source>
<translation>via aktuala elspezebla saldo</translation>
</message>
<message>
<source>Total of transactions that have yet to be confirmed, and do not yet count toward the spendable balance</source>
<translation>la sumo de transakcioj ankoraŭ ne konfirmitaj, kiuj ankoraŭ ne elspezeblas</translation>
</message>
<message>
<source>Immature:</source>
<translation>Nematura:</translation>
</message>
<message>
<source>Mined balance that has not yet matured</source>
<translation>Minita saldo, kiu ankoraŭ ne maturiĝis</translation>
</message>
<message>
<source>Total:</source>
<translation>Totalo:</translation>
</message>
<message>
<source>Your current total balance</source>
<translation>via aktuala totala saldo</translation>
</message>
<message>
<source>out of sync</source>
<translation>nesinkronigita</translation>
</message>
</context>
<context>
<name>PaymentServer</name>
<message>
<source>URI handling</source>
<translation>Traktado de URI-oj</translation>
</message>
<message>
<source>Invalid payment address %1</source>
<translation>Nevalida pagadreso %1</translation>
</message>
<message>
<source>Requested payment amount of %1 is too small (considered dust).</source>
<translation>La petita pagosumo de %1 estas tro malgranda (konsiderata kiel polvo).</translation>
</message>
<message>
<source>Payment request error</source>
<translation>Eraro dum pagopeto</translation>
</message>
<message>
<source>Cannot start emercoin: click-to-pay handler</source>
<translation>Ne eblas lanĉi la ilon 'klaki-por-pagi'</translation>
</message>
<message>
<source>Refund from %1</source>
<translation>Repago de %1</translation>
</message>
<message>
<source>Error communicating with %1: %2</source>
<translation>Eraro dum komunikado kun %1: %2</translation>
</message>
<message>
<source>Bad response from server %1</source>
<translation>Malbona respondo de la servilo %1</translation>
</message>
<message>
<source>Payment acknowledged</source>
<translation>Pago agnoskita</translation>
</message>
<message>
<source>Network request error</source>
<translation>Eraro dum ret-peto</translation>
</message>
</context>
<context>
<name>PeerTableModel</name>
</context>
<context>
<name>QObject</name>
<message>
<source>Amount</source>
<translation>Sumo</translation>
</message>
<message>
<source>%1 h</source>
<translation>%1 h</translation>
</message>
<message>
<source>%1 m</source>
<translation>%1 m</translation>
</message>
<message>
<source>N/A</source>
<translation>neaplikebla</translation>
</message>
</context>
<context>
<name>QRImageWidget</name>
<message>
<source>&Save Image...</source>
<translation>&Konservi Bildon...</translation>
</message>
<message>
<source>&Copy Image</source>
<translation>&Kopii Bildon</translation>
</message>
<message>
<source>Save QR Code</source>
<translation>Konservi QR-kodon</translation>
</message>
<message>
<source>PNG Image (*.png)</source>
<translation>PNG-bildo (*.png)</translation>
</message>
</context>
<context>
<name>RPCConsole</name>
<message>
<source>Client name</source>
<translation>Nomo de kliento</translation>
</message>
<message>
<source>N/A</source>
<translation>neaplikebla</translation>
</message>
<message>
<source>Client version</source>
<translation>Versio de kliento</translation>
</message>
<message>
<source>&Information</source>
<translation>&Informoj</translation>
</message>
<message>
<source>Debug window</source>
<translation>Sencimiga fenestro</translation>
</message>
<message>
<source>General</source>
<translation>Ĝenerala</translation>
</message>
<message>
<source>Using OpenSSL version</source>
<translation>uzas OpenSSL-version</translation>
</message>
<message>
<source>Startup time</source>
<translation>Horo de lanĉo</translation>
</message>
<message>
<source>Network</source>
<translation>Reto</translation>
</message>
<message>
<source>Name</source>
<translation>Nomo</translation>
</message><|fim▁hole|> <message>
<source>Number of connections</source>
<translation>Nombro de konektoj</translation>
</message>
<message>
<source>Block chain</source>
<translation>Blokĉeno</translation>
</message>
<message>
<source>Current number of blocks</source>
<translation>Aktuala nombro de blokoj</translation>
</message>
<message>
<source>Last block time</source>
<translation>Horo de la lasta bloko</translation>
</message>
<message>
<source>&Open</source>
<translation>&Malfermi</translation>
</message>
<message>
<source>&Console</source>
<translation>&Konzolo</translation>
</message>
<message>
<source>&Network Traffic</source>
<translation>&Reta Trafiko</translation>
</message>
<message>
<source>&Clear</source>
<translation>&Forigi ĉion</translation>
</message>
<message>
<source>Totals</source>
<translation>Totaloj</translation>
</message>
<message>
<source>In:</source>
<translation>En:</translation>
</message>
<message>
<source>Out:</source>
<translation>El:</translation>
</message>
<message>
<source>Build date</source>
<translation>Dato de kompilado</translation>
</message>
<message>
<source>Debug log file</source>
<translation>Sencimiga protokoldosiero</translation>
</message>
<message>
<source>Open the Emercoin debug log file from the current data directory. This can take a few seconds for large log files.</source>
<translation>Malfermi la sencimiga protokoldosiero de Emermono el la aktuala dosierujo por datumoj. Tio eble daŭros plurajn sekundojn por granda protokoldosiero.</translation>
</message>
<message>
<source>Clear console</source>
<translation>Malplenigi konzolon</translation>
</message>
<message>
<source>Welcome to the Emercoin RPC console.</source>
<translation>Bonvenon al la RPC-konzolo de Emermono.</translation>
</message>
<message>
<source>Use up and down arrows to navigate history, and <b>Ctrl-L</b> to clear screen.</source>
<translation>Uzu la sagojn supran kaj malsupran por esplori la historion, kaj <b>stir-L</b> por malplenigi la ekranon.</translation>
</message>
<message>
<source>Type <b>help</b> for an overview of available commands.</source>
<translation>Tajpu <b>help</b> por superrigardo de la disponeblaj komandoj.</translation>
</message>
<message>
<source>%1 B</source>
<translation>%1 B</translation>
</message>
<message>
<source>%1 KB</source>
<translation>%1 KB</translation>
</message>
<message>
<source>%1 MB</source>
<translation>%1 MB</translation>
</message>
<message>
<source>%1 GB</source>
<translation>%1 GB</translation>
</message>
</context>
<context>
<name>ReceiveCoinsDialog</name>
<message>
<source>&Amount:</source>
<translation>&Kvanto:</translation>
</message>
<message>
<source>&Label:</source>
<translation>&Etikedo:</translation>
</message>
<message>
<source>&Message:</source>
<translation>&Mesaĝo:</translation>
</message>
<message>
<source>Reuse one of the previously used receiving addresses. Reusing addresses has security and privacy issues. Do not use this unless re-generating a payment request made before.</source>
<translation>Reuzi unu el la jam uzitaj ricevaj adresoj. Reuzo de adresoj povas krei problemojn pri sekureco kaj privateco. Ne uzu tiun ĉi funkcion krom por rekrei antaŭe faritan pagopeton.</translation>
</message>
<message>
<source>R&euse an existing receiving address (not recommended)</source>
<translation>R&euzi ekzistantan ricevan adreson (malrekomendinda)</translation>
</message>
<message>
<source>Clear all fields of the form.</source>
<translation>Malplenigi ĉiujn kampojn de la formularo.</translation>
</message>
<message>
<source>Clear</source>
<translation>Forigi</translation>
</message>
<message>
<source>&Request payment</source>
<translation>&Peti pagon</translation>
</message>
<message>
<source>Show</source>
<translation>Vidigi</translation>
</message>
<message>
<source>Remove</source>
<translation>Forigi</translation>
</message>
<message>
<source>Copy label</source>
<translation>Kopii etikedon</translation>
</message>
<message>
<source>Copy message</source>
<translation>Kopiu mesaĝon</translation>
</message>
<message>
<source>Copy amount</source>
<translation>Kopii sumon</translation>
</message>
</context>
<context>
<name>ReceiveRequestDialog</name>
<message>
<source>QR Code</source>
<translation>QR-kodo</translation>
</message>
<message>
<source>Copy &URI</source>
<translation>Kopii &URI</translation>
</message>
<message>
<source>Copy &Address</source>
<translation>Kopii &Adreson</translation>
</message>
<message>
<source>&Save Image...</source>
<translation>&Konservi Bildon...</translation>
</message>
<message>
<source>Request payment to %1</source>
<translation>Peti pagon al %1</translation>
</message>
<message>
<source>Payment information</source>
<translation>Paginformoj</translation>
</message>
<message>
<source>URI</source>
<translation>URI</translation>
</message>
<message>
<source>Address</source>
<translation>Adreso</translation>
</message>
<message>
<source>Amount</source>
<translation>Sumo</translation>
</message>
<message>
<source>Label</source>
<translation>Etikedo</translation>
</message>
<message>
<source>Message</source>
<translation>Mesaĝo</translation>
</message>
<message>
<source>Resulting URI too long, try to reduce the text for label / message.</source>
<translation>La rezultanta URI estas tro longa. Provu malplilongigi la tekston de la etikedo / mesaĝo.</translation>
</message>
<message>
<source>Error encoding URI into QR Code.</source>
<translation>Eraro de kodigo de URI en la QR-kodon.</translation>
</message>
</context>
<context>
<name>RecentRequestsTableModel</name>
<message>
<source>Date</source>
<translation>Dato</translation>
</message>
<message>
<source>Label</source>
<translation>Etikedo</translation>
</message>
<message>
<source>Message</source>
<translation>Mesaĝo</translation>
</message>
<message>
<source>Amount</source>
<translation>Sumo</translation>
</message>
<message>
<source>(no label)</source>
<translation>(neniu etikedo)</translation>
</message>
<message>
<source>(no message)</source>
<translation>(neniu mesaĝo)</translation>
</message>
</context>
<context>
<name>SendCoinsDialog</name>
<message>
<source>Send Coins</source>
<translation>Sendi Monon</translation>
</message>
<message>
<source>Coin Control Features</source>
<translation>Monregaj Opcioj</translation>
</message>
<message>
<source>Inputs...</source>
<translation>Enigoj...</translation>
</message>
<message>
<source>Insufficient funds!</source>
<translation>Nesufiĉa mono!</translation>
</message>
<message>
<source>Quantity:</source>
<translation>Kvanto:</translation>
</message>
<message>
<source>Bytes:</source>
<translation>Bajtoj:</translation>
</message>
<message>
<source>Amount:</source>
<translation>Sumo:</translation>
</message>
<message>
<source>Priority:</source>
<translation>Prioritato:</translation>
</message>
<message>
<source>Fee:</source>
<translation>Krompago:</translation>
</message>
<message>
<source>After Fee:</source>
<translation>Post krompago:</translation>
</message>
<message>
<source>Change:</source>
<translation>Restmono:</translation>
</message>
<message>
<source>Send to multiple recipients at once</source>
<translation>Sendi samtempe al pluraj ricevantoj</translation>
</message>
<message>
<source>Add &Recipient</source>
<translation>Aldoni &Ricevonton</translation>
</message>
<message>
<source>Clear all fields of the form.</source>
<translation>Malplenigi ĉiujn kampojn de la formularo.</translation>
</message>
<message>
<source>Clear &All</source>
<translation>&Forigi ĉion</translation>
</message>
<message>
<source>Balance:</source>
<translation>Saldo:</translation>
</message>
<message>
<source>Confirm the send action</source>
<translation>Konfirmi la sendon</translation>
</message>
<message>
<source>S&end</source>
<translation>Ŝendi</translation>
</message>
<message>
<source>Confirm send coins</source>
<translation>Konfirmi sendon de emermono</translation>
</message>
<message>
<source>%1 to %2</source>
<translation>%1 al %2</translation>
</message>
<message>
<source>Copy quantity</source>
<translation>Kopii kvanton</translation>
</message>
<message>
<source>Copy amount</source>
<translation>Kopii sumon</translation>
</message>
<message>
<source>Copy fee</source>
<translation>Kopii krompagon</translation>
</message>
<message>
<source>Copy after fee</source>
<translation>Kopii post krompago</translation>
</message>
<message>
<source>Copy bytes</source>
<translation>Kopii bajtojn</translation>
</message>
<message>
<source>Copy priority</source>
<translation>Kopii prioritaton</translation>
</message>
<message>
<source>Copy change</source>
<translation>Kopii restmonon</translation>
</message>
<message>
<source>Total Amount %1 (= %2)</source>
<translation>Totala Sumo %1 (= %2)</translation>
</message>
<message>
<source>or</source>
<translation>aŭ</translation>
</message>
<message>
<source>The recipient address is not valid, please recheck.</source>
<translation>La adreso de la ricevonto ne validas. Bonvolu kontroli.</translation>
</message>
<message>
<source>The amount to pay must be larger than 0.</source>
<translation>La pagenda sumo devas esti pli ol 0.</translation>
</message>
<message>
<source>The amount exceeds your balance.</source>
<translation>La sumo estas pli granda ol via saldo.</translation>
</message>
<message>
<source>The total exceeds your balance when the %1 transaction fee is included.</source>
<translation>La sumo kun la %1 krompago estas pli granda ol via saldo.</translation>
</message>
<message>
<source>Duplicate address found, can only send to each address once per send operation.</source>
<translation>Iu adreso estas ripetita. Vi povas sendi al ĉiu adreso po unufoje en iu send-operacio.</translation>
</message>
<message>
<source>Transaction creation failed!</source>
<translation>Kreo de transakcio fiaskis!</translation>
</message>
<message>
<source>Warning: Invalid Emercoin address</source>
<translation>Averto: Nevalida Emermon-adreso</translation>
</message>
<message>
<source>(no label)</source>
<translation>(neniu etikedo)</translation>
</message>
<message>
<source>Are you sure you want to send?</source>
<translation>Ĉu vi certas, ke vi volas sendi?</translation>
</message>
<message>
<source>added as transaction fee</source>
<translation>aldonita kiel krompago</translation>
</message>
</context>
<context>
<name>SendCoinsEntry</name>
<message>
<source>A&mount:</source>
<translation>&Sumo:</translation>
</message>
<message>
<source>Pay &To:</source>
<translation>&Ricevonto:</translation>
</message>
<message>
<source>Enter a label for this address to add it to your address book</source>
<translation>Tajpu etikedon por tiu ĉi adreso kaj aldonu ĝin al via adresaro</translation>
</message>
<message>
<source>&Label:</source>
<translation>&Etikedo:</translation>
</message>
<message>
<source>Choose previously used address</source>
<translation>Elektu la jam uzitan adreson</translation>
</message>
<message>
<source>This is a normal payment.</source>
<translation>Tio estas normala pago.</translation>
</message>
<message>
<source>Alt+A</source>
<translation>Alt+A</translation>
</message>
<message>
<source>Paste address from clipboard</source>
<translation>Alglui adreson el tondejo</translation>
</message>
<message>
<source>Alt+P</source>
<translation>Alt+P</translation>
</message>
<message>
<source>Remove this entry</source>
<translation>Forigu ĉi tiun enskribon</translation>
</message>
<message>
<source>Message:</source>
<translation>Mesaĝo:</translation>
</message>
<message>
<source>Enter a label for this address to add it to the list of used addresses</source>
<translation>Tajpu etikedon por tiu ĉi adreso por aldoni ĝin al la listo de uzitaj adresoj</translation>
</message>
<message>
<source>Pay To:</source>
<translation>Pagi Al:</translation>
</message>
<message>
<source>Memo:</source>
<translation>Memorando:</translation>
</message>
</context>
<context>
<name>ShutdownWindow</name>
<message>
<source>Do not shut down the computer until this window disappears.</source>
<translation>Ne sistemfermu ĝis ĉi tiu fenestro malaperas.</translation>
</message>
</context>
<context>
<name>SignVerifyMessageDialog</name>
<message>
<source>Signatures - Sign / Verify a Message</source>
<translation>Subskriboj - Subskribi / Kontroli mesaĝon</translation>
</message>
<message>
<source>&Sign Message</source>
<translation>&Subskribi Mesaĝon</translation>
</message>
<message>
<source>You can sign messages with your addresses to prove you own them. Be careful not to sign anything vague, as phishing attacks may try to trick you into signing your identity over to them. Only sign fully-detailed statements you agree to.</source>
<translation>Vi povas subskribi mesaĝon per viaj adresoj, por pravigi ke vi estas la posedanto de tiuj adresoj. Atentu, ke vi ne subskriu ion neprecizan, ĉar trompisto povus ruzi kontraŭ vi kaj ŝteli vian identecon. Subskribu nur plene detaligitaj deklaroj pri kiuj vi konsentas.</translation>
</message>
<message>
<source>Choose previously used address</source>
<translation>Elektu la jam uzitan adreson</translation>
</message>
<message>
<source>Alt+A</source>
<translation>Alt+A</translation>
</message>
<message>
<source>Paste address from clipboard</source>
<translation>Alglui adreson de tondejo</translation>
</message>
<message>
<source>Alt+P</source>
<translation>Alt+P</translation>
</message>
<message>
<source>Enter the message you want to sign here</source>
<translation>Tajpu la mesaĝon, kiun vi volas sendi, cîi tie</translation>
</message>
<message>
<source>Signature</source>
<translation>Subskribo</translation>
</message>
<message>
<source>Copy the current signature to the system clipboard</source>
<translation>Kopii la aktualan subskribon al la tondejo</translation>
</message>
<message>
<source>Sign the message to prove you own this Emercoin address</source>
<translation>Subskribi la mesaĝon por pravigi, ke vi estas la posedanto de tiu Emermon-adreso</translation>
</message>
<message>
<source>Sign &Message</source>
<translation>Subskribi &Mesaĝon</translation>
</message>
<message>
<source>Reset all sign message fields</source>
<translation>Reagordigi ĉiujn prisubskribajn kampojn</translation>
</message>
<message>
<source>Clear &All</source>
<translation>&Forigi Ĉion</translation>
</message>
<message>
<source>&Verify Message</source>
<translation>&Kontroli Mesaĝon</translation>
</message>
<message>
<source>Enter the signing address, message (ensure you copy line breaks, spaces, tabs, etc. exactly) and signature below to verify the message. Be careful not to read more into the signature than what is in the signed message itself, to avoid being tricked by a man-in-the-middle attack.</source>
<translation>Enmeti la subskriban adreson, la mesaĝon (kune kun ĉiu linisalto, spaceto, taboj, ktp. precize) kaj la subskribon ĉi sube por kontroli la mesaĝon. Atentu, ke vi ne komprenu per la subskribo pli ol la enhavo de la mesaĝo mem, por eviti homo-en-la-mezo-atakon.</translation>
</message>
<message>
<source>Verify the message to ensure it was signed with the specified Emercoin address</source>
<translation>Kontroli la mesaĝon por pravigi, ke ĝi ja estas subskribita per la specifa Emermon-adreso</translation>
</message>
<message>
<source>Verify &Message</source>
<translation>Kontroli &Mesaĝon</translation>
</message>
<message>
<source>Reset all verify message fields</source>
<translation>Reagordigi ĉiujn prikontrolajn kampojn</translation>
</message>
<message>
<source>Click "Sign Message" to generate signature</source>
<translation>Klaku "Subskribi Mesaĝon" por krei subskribon</translation>
</message>
<message>
<source>The entered address is invalid.</source>
<translation>La adreso, kiun vi enmetis, estas nevalida.</translation>
</message>
<message>
<source>Please check the address and try again.</source>
<translation>Bonvolu kontroli la adreson kaj reprovi.</translation>
</message>
<message>
<source>The entered address does not refer to a key.</source>
<translation>La adreso, kiun vi enmetis, referencas neniun ŝlosilon.</translation>
</message>
<message>
<source>Wallet unlock was cancelled.</source>
<translation>Malŝloso de monujo estas nuligita.</translation>
</message>
<message>
<source>Private key for the entered address is not available.</source>
<translation>La privata ŝlosilo por la enigita adreso ne disponeblas.</translation>
</message>
<message>
<source>Message signing failed.</source>
<translation>Subskribo de mesaĝo fiaskis.</translation>
</message>
<message>
<source>Message signed.</source>
<translation>Mesaĝo estas subskribita.</translation>
</message>
<message>
<source>The signature could not be decoded.</source>
<translation>Ne eblis malĉifri la subskribon.</translation>
</message>
<message>
<source>Please check the signature and try again.</source>
<translation>Bonvolu kontroli la subskribon kaj reprovu.</translation>
</message>
<message>
<source>The signature did not match the message digest.</source>
<translation>La subskribo ne kongruis kun la mesaĝ-kompilaĵo.</translation>
</message>
<message>
<source>Message verification failed.</source>
<translation>Kontrolo de mesaĝo malsukcesis.</translation>
</message>
<message>
<source>Message verified.</source>
<translation>Mesaĝo sukcese kontrolita.</translation>
</message>
</context>
<context>
<name>SplashScreen</name>
<message>
<source>Emercoin Core</source>
<translation>Kerno de Emermono</translation>
</message>
<message>
<source>The Emercoin Core developers</source>
<translation>La programistoj de Emermona Kerno</translation>
</message>
<message>
<source>[testnet]</source>
<translation>[testnet]</translation>
</message>
</context>
<context>
<name>TrafficGraphWidget</name>
<message>
<source>KB/s</source>
<translation>KB/s</translation>
</message>
</context>
<context>
<name>TransactionDesc</name>
<message>
<source>Open until %1</source>
<translation>Malferma ĝis %1</translation>
</message>
<message>
<source>%1/offline</source>
<translation>%1/senkonekte</translation>
</message>
<message>
<source>%1/unconfirmed</source>
<translation>%1/nekonfirmite</translation>
</message>
<message>
<source>%1 confirmations</source>
<translation>%1 konfirmoj</translation>
</message>
<message>
<source>Status</source>
<translation>Stato</translation>
</message>
<message numerus="yes">
<source>, broadcast through %n node(s)</source>
<translation><numerusform>, elsendita(j) tra %n nodo</numerusform><numerusform>, elsendita(j) tra %n nodoj</numerusform></translation>
</message>
<message>
<source>Date</source>
<translation>Dato</translation>
</message>
<message>
<source>Source</source>
<translation>Fonto</translation>
</message>
<message>
<source>Generated</source>
<translation>Kreita</translation>
</message>
<message>
<source>From</source>
<translation>De</translation>
</message>
<message>
<source>To</source>
<translation>Al</translation>
</message>
<message>
<source>own address</source>
<translation>propra adreso</translation>
</message>
<message>
<source>label</source>
<translation>etikedo</translation>
</message>
<message>
<source>Credit</source>
<translation>Kredito</translation>
</message>
<message numerus="yes">
<source>matures in %n more block(s)</source>
<translation><numerusform>maturiĝos post %n bloko</numerusform><numerusform>maturiĝos post %n blokoj</numerusform></translation>
</message>
<message>
<source>not accepted</source>
<translation>ne akceptita</translation>
</message>
<message>
<source>Debit</source>
<translation>Debeto</translation>
</message>
<message>
<source>Transaction fee</source>
<translation>Krompago</translation>
</message>
<message>
<source>Net amount</source>
<translation>Neta sumo</translation>
</message>
<message>
<source>Message</source>
<translation>Mesaĝo</translation>
</message>
<message>
<source>Comment</source>
<translation>Komento</translation>
</message>
<message>
<source>Transaction ID</source>
<translation>Transakcia ID</translation>
</message>
<message>
<source>Merchant</source>
<translation>Vendisto</translation>
</message>
<message>
<source>Generated coins must mature %1 blocks before they can be spent. When you generated this block, it was broadcast to the network to be added to the block chain. If it fails to get into the chain, its state will change to "not accepted" and it won't be spendable. This may occasionally happen if another node generates a block within a few seconds of yours.</source>
<translation>Kreitaj moneroj devas esti maturaj je %1 blokoj antaŭ ol eblas elspezi ilin. Kiam vi generis tiun ĉi blokon, ĝi estis elsendita al la reto por aldono al la blokĉeno. Se tiu aldono malsukcesas, ĝia stato ŝanĝiĝos al "neakceptita" kaj ne eblos elspezi ĝin. Tio estas malofta, sed povas okazi se alia bloko estas kreita je preskaŭ la sama momento kiel la via.</translation>
</message>
<message>
<source>Debug information</source>
<translation>Sencimigaj informoj</translation>
</message>
<message>
<source>Transaction</source>
<translation>Transakcio</translation>
</message>
<message>
<source>Inputs</source>
<translation>Enigoj</translation>
</message>
<message>
<source>Amount</source>
<translation>Sumo</translation>
</message>
<message>
<source>true</source>
<translation>vera</translation>
</message>
<message>
<source>false</source>
<translation>malvera</translation>
</message>
<message>
<source>, has not been successfully broadcast yet</source>
<translation>, ankoraŭ ne elsendita sukcese</translation>
</message>
<message numerus="yes">
<source>Open for %n more block(s)</source>
<translation><numerusform>Malferma dum ankoraŭ %n bloko</numerusform><numerusform>Malferma dum ankoraŭ %n blokoj</numerusform></translation>
</message>
<message>
<source>unknown</source>
<translation>nekonata</translation>
</message>
</context>
<context>
<name>TransactionDescDialog</name>
<message>
<source>Transaction details</source>
<translation>Transakciaj detaloj</translation>
</message>
<message>
<source>This pane shows a detailed description of the transaction</source>
<translation>Tiu ĉi panelo montras detalan priskribon de la transakcio</translation>
</message>
</context>
<context>
<name>TransactionTableModel</name>
<message>
<source>Date</source>
<translation>Dato</translation>
</message>
<message>
<source>Type</source>
<translation>Tipo</translation>
</message>
<message>
<source>Address</source>
<translation>Adreso</translation>
</message>
<message numerus="yes">
<source>Open for %n more block(s)</source>
<translation><numerusform>Malferma dum ankoraŭ %n bloko</numerusform><numerusform>Malferma dum ankoraŭ %n blokoj</numerusform></translation>
</message>
<message>
<source>Open until %1</source>
<translation>Malferma ĝis %1</translation>
</message>
<message>
<source>Confirmed (%1 confirmations)</source>
<translation>Konfirmita (%1 konfirmoj)</translation>
</message>
<message>
<source>This block was not received by any other nodes and will probably not be accepted!</source>
<translation>Tiun ĉi blokon ne ricevis ajna alia nodo, kaj ĝi verŝajne ne akceptiĝos!</translation>
</message>
<message>
<source>Generated but not accepted</source>
<translation>Kreita sed ne akceptita</translation>
</message>
<message>
<source>Offline</source>
<translation>Senkonekte</translation>
</message>
<message>
<source>Unconfirmed</source>
<translation>Nekonfirmita</translation>
</message>
<message>
<source>Received with</source>
<translation>Ricevita kun</translation>
</message>
<message>
<source>Received from</source>
<translation>Ricevita de</translation>
</message>
<message>
<source>Sent to</source>
<translation>Sendita al</translation>
</message>
<message>
<source>Payment to yourself</source>
<translation>Pago al vi mem</translation>
</message>
<message>
<source>Mined</source>
<translation>Minita</translation>
</message>
<message>
<source>(n/a)</source>
<translation>neaplikebla</translation>
</message>
<message>
<source>Transaction status. Hover over this field to show number of confirmations.</source>
<translation>Transakcia stato. Ŝvebi super tiu ĉi kampo por montri la nombron de konfirmoj.</translation>
</message>
<message>
<source>Date and time that the transaction was received.</source>
<translation>Dato kaj horo kiam la transakcio alvenis.</translation>
</message>
<message>
<source>Type of transaction.</source>
<translation>Tipo de transakcio.</translation>
</message>
<message>
<source>Destination address of transaction.</source>
<translation>Celadreso de la transakcio.</translation>
</message>
<message>
<source>Amount removed from or added to balance.</source>
<translation>Sumo elprenita de aŭ aldonita al la saldo.</translation>
</message>
</context>
<context>
<name>TransactionView</name>
<message>
<source>All</source>
<translation>Ĉiuj</translation>
</message>
<message>
<source>Today</source>
<translation>Hodiaŭ</translation>
</message>
<message>
<source>This week</source>
<translation>Ĉi-semajne</translation>
</message>
<message>
<source>This month</source>
<translation>Ĉi-monate</translation>
</message>
<message>
<source>Last month</source>
<translation>Pasintmonate</translation>
</message>
<message>
<source>This year</source>
<translation>Ĉi-jare</translation>
</message>
<message>
<source>Range...</source>
<translation>Intervalo...</translation>
</message>
<message>
<source>Received with</source>
<translation>Ricevita kun</translation>
</message>
<message>
<source>Sent to</source>
<translation>Sendita al</translation>
</message>
<message>
<source>To yourself</source>
<translation>Al vi mem</translation>
</message>
<message>
<source>Mined</source>
<translation>Minita</translation>
</message>
<message>
<source>Other</source>
<translation>Aliaj</translation>
</message>
<message>
<source>Enter address or label to search</source>
<translation>Tajpu adreson aŭ etikedon por serĉi</translation>
</message>
<message>
<source>Min amount</source>
<translation>Minimuma sumo</translation>
</message>
<message>
<source>Copy address</source>
<translation>Kopii adreson</translation>
</message>
<message>
<source>Copy label</source>
<translation>Kopii etikedon</translation>
</message>
<message>
<source>Copy amount</source>
<translation>Kopii sumon</translation>
</message>
<message>
<source>Copy transaction ID</source>
<translation>Kopii transakcian ID-on</translation>
</message>
<message>
<source>Edit label</source>
<translation>Redakti etikedon</translation>
</message>
<message>
<source>Show transaction details</source>
<translation>Montri detalojn de transakcio</translation>
</message>
<message>
<source>Exporting Failed</source>
<translation>ekspotado malsukcesinta</translation>
</message>
<message>
<source>Comma separated file (*.csv)</source>
<translation>Perkome disigita dosiero (*.csv)</translation>
</message>
<message>
<source>Confirmed</source>
<translation>Konfirmita</translation>
</message>
<message>
<source>Date</source>
<translation>Dato</translation>
</message>
<message>
<source>Type</source>
<translation>Tipo</translation>
</message>
<message>
<source>Label</source>
<translation>Etikedo</translation>
</message>
<message>
<source>Address</source>
<translation>Adreso</translation>
</message>
<message>
<source>ID</source>
<translation>ID</translation>
</message>
<message>
<source>Range:</source>
<translation>Intervalo:</translation>
</message>
<message>
<source>to</source>
<translation>al</translation>
</message>
</context>
<context>
<name>UnitDisplayStatusBarControl</name>
</context>
<context>
<name>WalletFrame</name>
</context>
<context>
<name>WalletModel</name>
<message>
<source>Send Coins</source>
<translation>Sendi Emermonon</translation>
</message>
</context>
<context>
<name>WalletView</name>
<message>
<source>&Export</source>
<translation>&Eksporti</translation>
</message>
<message>
<source>Export the data in the current tab to a file</source>
<translation>Eksporti la datumojn el la aktuala langeto al dosiero</translation>
</message>
<message>
<source>Backup Wallet</source>
<translation>Krei sekurkopion de monujo</translation>
</message>
<message>
<source>Wallet Data (*.dat)</source>
<translation>Monuj-datumoj (*.dat)</translation>
</message>
<message>
<source>Backup Failed</source>
<translation>Malsukcesis sekurkopio</translation>
</message>
<message>
<source>Backup Successful</source>
<translation>Sukcesis krei sekurkopion</translation>
</message>
</context>
<context>
<name>bitcoin-core</name>
<message>
<source>Options:</source>
<translation>Agordoj:</translation>
</message>
<message>
<source>Specify data directory</source>
<translation>Specifi dosieron por datumoj</translation>
</message>
<message>
<source>Connect to a node to retrieve peer addresses, and disconnect</source>
<translation>Konekti al nodo por ricevi adresojn de samtavolanoj, kaj malkonekti</translation>
</message>
<message>
<source>Specify your own public address</source>
<translation>Specifi vian propran publikan adreson</translation>
</message>
<message>
<source>Accept command line and JSON-RPC commands</source>
<translation>Akcepti komandojn JSON-RPC kaj el komandlinio</translation>
</message>
<message>
<source>Run in the background as a daemon and accept commands</source>
<translation>Ruli fone kiel demono kaj akcepti komandojn</translation>
</message>
<message>
<source>Use the test network</source>
<translation>Uzi la test-reton</translation>
</message>
<message>
<source>Accept connections from outside (default: 1 if no -proxy or -connect)</source>
<translation>Akcepti konektojn el ekstere (defaŭlte: 1 se ne estas -proxy nek -connect)</translation>
</message>
<message>
<source>Bind to given address and always listen on it. Use [host]:port notation for IPv6</source>
<translation>Bindi al donita adreso kaj ĉiam aŭskulti per ĝi. Uzu la formaton [gastigo]:pordo por IPv6</translation>
</message>
<message>
<source>Execute command when a wallet transaction changes (%s in cmd is replaced by TxID)</source>
<translation>Plenumi komandon kiam monuja transakcio ŝanĝiĝas (%s en cmd anstataŭiĝas per TxID)</translation>
</message>
<message>
<source>This is a pre-release test build - use at your own risk - do not use for mining or merchant applications</source>
<translation>Tiu ĉi estas antaŭeldona testa versio - uzu laŭ via propra risko - ne uzu por minado aŭ por aplikaĵoj por vendistoj</translation>
</message>
<message>
<source>Warning: -paytxfee is set very high! This is the transaction fee you will pay if you send a transaction.</source>
<translation>Averto: -paytxfee estas agordita per tre alta valoro! Tio estas la krompago, kion vi pagos se vi sendas la transakcion.</translation>
</message>
<message>
<source>Warning: The network does not appear to fully agree! Some miners appear to be experiencing issues.</source>
<translation>Averto: La reto ne tute konsentas! Kelkaj minantoj ŝajne spertas problemojn aktuale.</translation>
</message>
<message>
<source>Warning: We do not appear to fully agree with our peers! You may need to upgrade, or other nodes may need to upgrade.</source>
<translation>Averto: ŝajne ni ne tute konsentas kun niaj samtavolanoj! Eble vi devas ĝisdatigi vian klienton, aŭ eble aliaj nodoj faru same.</translation>
</message>
<message>
<source>Warning: error reading wallet.dat! All keys read correctly, but transaction data or address book entries might be missing or incorrect.</source>
<translation>Averto: okazis eraro dum lego de wallet.dat! Ĉiuj ŝlosiloj sukcese legiĝis, sed la transakciaj datumoj aŭ la adresaro eble mankas aŭ malĝustas.</translation>
</message>
<message>
<source>Warning: wallet.dat corrupt, data salvaged! Original wallet.dat saved as wallet.{timestamp}.bak in %s; if your balance or transactions are incorrect you should restore from a backup.</source>
<translation>Averto: via wallet.dat estas difektita, sed la datumoj sukcese saviĝis! La originala wallet.dat estas nun konservita kiel wallet.{timestamp}.bak en %s; se via saldo aŭ transakcioj estas malĝustaj vi devus restaŭri per alia sekurkopio.</translation>
</message>
<message>
<source><category> can be:</source>
<translation><category> povas esti:</translation>
</message>
<message>
<source>Attempt to recover private keys from a corrupt wallet.dat</source>
<translation>Provo ripari privatajn ŝlosilojn el difektita wallet.dat</translation>
</message>
<message>
<source>Block creation options:</source>
<translation>Blok-kreaj agordaĵoj:</translation>
</message>
<message>
<source>Connect only to the specified node(s)</source>
<translation>Konekti nur al specifita(j) nodo(j)</translation>
</message>
<message>
<source>Corrupted block database detected</source>
<translation>Difektita blokdatumbazo trovita</translation>
</message>
<message>
<source>Discover own IP address (default: 1 when listening and no -externalip)</source>
<translation>Malkovri la propran IP-adreson (defaŭlte: 1 dum aŭskultado sen -externalip)</translation>
</message>
<message>
<source>Do you want to rebuild the block database now?</source>
<translation>Ĉu vi volas rekonstrui la blokdatumbazon nun?</translation>
</message>
<message>
<source>Error initializing block database</source>
<translation>Eraro dum pravalorizado de blokdatumbazo</translation>
</message>
<message>
<source>Error initializing wallet database environment %s!</source>
<translation>Eraro dum pravalorizado de monuj-datumbaza ĉirkaŭaĵo %s!</translation>
</message>
<message>
<source>Error loading block database</source>
<translation>Eraro dum ŝargado de blokdatumbazo</translation>
</message>
<message>
<source>Error opening block database</source>
<translation>Eraro dum malfermado de blokdatumbazo</translation>
</message>
<message>
<source>Error: Disk space is low!</source>
<translation>Eraro: restas malmulte da diskospaco!</translation>
</message>
<message>
<source>Failed to listen on any port. Use -listen=0 if you want this.</source>
<translation>Ne sukcesis aŭskulti ajnan pordon. Uzu -listen=0 se tion vi volas.</translation>
</message>
<message>
<source>Incorrect or no genesis block found. Wrong datadir for network?</source>
<translation>Geneza bloko aŭ netrovita aŭ neĝusta. Ĉu eble la datadir de la reto malĝustas?</translation>
</message>
<message>
<source>Invalid -onion address: '%s'</source>
<translation>Nevalida -onion-adreso: '%s'</translation>
</message>
<message>
<source>Not enough file descriptors available.</source>
<translation>Nesufiĉa nombro de dosierpriskribiloj disponeblas.</translation>
</message>
<message>
<source>Rebuild block chain index from current blk000??.dat files</source>
<translation>Rekontrui blokĉenan indekson el la aktualaj blk000??.dat dosieroj</translation>
</message>
<message>
<source>Specify wallet file (within data directory)</source>
<translation>Specifi monujan dosieron (ene de dosierujo por datumoj)</translation>
</message>
<message>
<source>Verifying blocks...</source>
<translation>Kontrolado de blokoj...</translation>
</message>
<message>
<source>Verifying wallet...</source>
<translation>Kontrolado de monujo...</translation>
</message>
<message>
<source>Wallet %s resides outside data directory %s</source>
<translation>Monujo %s troviĝas ekster la dosierujo por datumoj %s</translation>
</message>
<message>
<source>Wallet options:</source>
<translation>Monujaj opcioj:</translation>
</message>
<message>
<source>You need to rebuild the database using -reindex to change -txindex</source>
<translation>Vi devas rekontrui la datumbazon kun -reindex por ŝanĝi -txindex</translation>
</message>
<message>
<source>Imports blocks from external blk000??.dat file</source>
<translation>Importas blokojn el ekstera dosiero blk000??.dat</translation>
</message>
<message>
<source>Execute command when a relevant alert is received or we see a really long fork (%s in cmd is replaced by message)</source>
<translation>Plenumi komandon kiam rilata alerto riceviĝas, aŭ kiam ni vidas tre longan forkon (%s en cms anstataŭiĝas per mesaĝo)</translation>
</message>
<message>
<source>Information</source>
<translation>Informoj</translation>
</message>
<message>
<source>Invalid amount for -minrelaytxfee=<amount>: '%s'</source>
<translation>Nevalida sumo por -minrelaytxfee=<amount>: '%s'</translation>
</message>
<message>
<source>Invalid amount for -mintxfee=<amount>: '%s'</source>
<translation>Nevalida sumo por -mintxfee=<amount>: '%s'</translation>
</message>
<message>
<source>Send trace/debug info to console instead of debug.log file</source>
<translation>Sendi spurajn/sencimigajn informojn al la konzolo anstataŭ al dosiero debug.log</translation>
</message>
<message>
<source>Shrink debug.log file on client startup (default: 1 when no -debug)</source>
<translation>Malpligrandigi la sencimigan protokol-dosieron kiam kliento lanĉiĝas (defaŭlte: 1 kiam mankas -debug)</translation>
</message>
<message>
<source>Signing transaction failed</source>
<translation>Subskriba transakcio fiaskis</translation>
</message>
<message>
<source>This is experimental software.</source>
<translation>ĝi estas eksperimenta programo</translation>
</message>
<message>
<source>Transaction amount too small</source>
<translation>Transakcia sumo tro malgranda</translation>
</message>
<message>
<source>Transaction amounts must be positive</source>
<translation>Transakcia sumo devas esti pozitiva</translation>
</message>
<message>
<source>Transaction too large</source>
<translation>Transakcio estas tro granda</translation>
</message>
<message>
<source>Use UPnP to map the listening port (default: 1 when listening)</source>
<translation>Uzi UPnP por mapi la aŭskultan pordon (defaŭlte: 1 dum aŭskultado)</translation>
</message>
<message>
<source>Username for JSON-RPC connections</source>
<translation>Salutnomo por konektoj JSON-RPC</translation>
</message>
<message>
<source>Warning</source>
<translation>Averto</translation>
</message>
<message>
<source>Warning: This version is obsolete, upgrade required!</source>
<translation>Averto: tiu ĉi versio estas eksdata. Vi bezonas ĝisdatigon!</translation>
</message>
<message>
<source>wallet.dat corrupt, salvage failed</source>
<translation>wallet.dat estas difektita, riparo malsukcesis</translation>
</message>
<message>
<source>Password for JSON-RPC connections</source>
<translation>Pasvorto por konektoj JSON-RPC</translation>
</message>
<message>
<source>Execute command when the best block changes (%s in cmd is replaced by block hash)</source>
<translation>Plenumi komandon kiam plej bona bloko ŝanĝiĝas (%s en cmd anstataŭiĝas per bloka haketaĵo)</translation>
</message>
<message>
<source>Upgrade wallet to latest format</source>
<translation>Ĝisdatigi monujon al plej lasta formato</translation>
</message>
<message>
<source>Rescan the block chain for missing wallet transactions</source>
<translation>Reskani la blokĉenon por mankantaj monujaj transakcioj</translation>
</message>
<message>
<source>Use OpenSSL (https) for JSON-RPC connections</source>
<translation>Uzi OpenSSL (https) por konektoj JSON-RPC</translation>
</message>
<message>
<source>This help message</source>
<translation>Tiu ĉi helpmesaĝo</translation>
</message>
<message>
<source>Allow DNS lookups for -addnode, -seednode and -connect</source>
<translation>Permesi DNS-elserĉojn por -addnote, -seednote kaj -connect</translation>
</message>
<message>
<source>Loading addresses...</source>
<translation>Ŝarĝante adresojn...</translation>
</message>
<message>
<source>Error loading wallet.dat: Wallet corrupted</source>
<translation>Eraro dum ŝargado de wallet.dat: monujo difektita</translation>
</message>
<message>
<source>Error loading wallet.dat</source>
<translation>Eraro dum ŝargado de wallet.dat</translation>
</message>
<message>
<source>Invalid -proxy address: '%s'</source>
<translation>Nevalid adreso -proxy: '%s'</translation>
</message>
<message>
<source>Unknown network specified in -onlynet: '%s'</source>
<translation>Nekonata reto specifita en -onlynet: '%s'</translation>
</message>
<message>
<source>Cannot resolve -bind address: '%s'</source>
<translation>Ne eblas trovi la adreson -bind: '%s'</translation>
</message>
<message>
<source>Cannot resolve -externalip address: '%s'</source>
<translation>Ne eblas trovi la adreson -externalip: '%s'</translation>
</message>
<message>
<source>Invalid amount for -paytxfee=<amount>: '%s'</source>
<translation>Nevalida sumo por -paytxfee=<amount>: '%s'</translation>
</message>
<message>
<source>Insufficient funds</source>
<translation>Nesufiĉa mono</translation>
</message>
<message>
<source>Loading block index...</source>
<translation>Ŝarĝante blok-indekson...</translation>
</message>
<message>
<source>Add a node to connect to and attempt to keep the connection open</source>
<translation>Aldoni nodon por alkonekti kaj provi daŭrigi la malferman konekton</translation>
</message>
<message>
<source>Loading wallet...</source>
<translation>Ŝargado de monujo...</translation>
</message>
<message>
<source>Cannot downgrade wallet</source>
<translation>Ne eblas malpromocii monujon</translation>
</message>
<message>
<source>Cannot write default address</source>
<translation>Ne eblas skribi defaŭltan adreson</translation>
</message>
<message>
<source>Rescanning...</source>
<translation>Reskanado...</translation>
</message>
<message>
<source>Done loading</source>
<translation>Ŝargado finiĝis</translation>
</message>
<message>
<source>Error</source>
<translation>Eraro</translation>
</message>
</context>
</TS><|fim▁end|>
| |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python<|fim▁hole|><|fim▁end|>
|
# -*- coding: utf-8 -*_
|
<|file_name|>home.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding:utf-8 -*-
"""
@author: Will<|fim▁hole|>
class ImportFrom(forms.Form):
HOST_TYPE=((1,"001"),(2,"002")) #替換爲文件
host_type = forms.IntegerField(
widget=forms.Select(choices=HOST_TYPE)
)
hostname = forms.CharField()
def __init__(self,*args,**kwargs):
super(ImportFrom,self).__init__(*args,**kwargs)
HOST_TYPE=((1,"001"),(2,"002")) #替換爲文件
self.fields['host_type'].widget.choices = models.userInfo.objects.all().values_list("id","name")
models.userInfo.objects.get()
models.userInfo.objects.filter()<|fim▁end|>
|
"""
from django import forms
from app01 import models
|
<|file_name|>elixir.js<|end_file_name|><|fim▁begin|>module.exports = function(hljs) {
var ELIXIR_IDENT_RE = '[a-zA-Z_][a-zA-Z0-9_.]*(\\!|\\?)?';
var ELIXIR_METHOD_RE = '[a-zA-Z_]\\w*[!?=]?|[-+~]\\@|<<|>>|=~|===?|<=>|[<>]=?|\\*\\*|[-/+%^&*~`|]|\\[\\]=?';
var ELIXIR_KEYWORDS =
'and false then defined module in return redo retry end for true self when ' +
'next until do begin unless nil break not case cond alias while ensure or ' +
'include use alias fn quote require import with|0';
var SUBST = {
className: 'subst',
begin: '#\\{', end: '}',
lexemes: ELIXIR_IDENT_RE,
keywords: ELIXIR_KEYWORDS
};
var SIGIL_DELIMITERS = '[/|([{<"\']'
var LOWERCASE_SIGIL = {
className: 'string',
begin: '~[a-z]' + '(?=' + SIGIL_DELIMITERS + ')',
contains: [
{
endsParent:true,
contains: [{
contains: [hljs.BACKSLASH_ESCAPE, SUBST],
variants: [
{ begin: /"/, end: /"/ },
{ begin: /'/, end: /'/ },
{ begin: /\//, end: /\// },
{ begin: /\|/, end: /\|/ },
{ begin: /\(/, end: /\)/ },
{ begin: /\[/, end: /\]/ },
{ begin: /\{/, end: /\}/ },
{ begin: /</, end: />/ }
]
}]
},
],
};
var UPCASE_SIGIL = {
className: 'string',
begin: '~[A-Z]' + '(?=' + SIGIL_DELIMITERS + ')',
contains: [
{ begin: /"/, end: /"/ },
{ begin: /'/, end: /'/ },
{ begin: /\//, end: /\// },
{ begin: /\|/, end: /\|/ },
{ begin: /\(/, end: /\)/ },
{ begin: /\[/, end: /\]/ },
{ begin: /\{/, end: /\}/ },
{ begin: /\</, end: /\>/ }
]<|fim▁hole|> className: 'string',
contains: [hljs.BACKSLASH_ESCAPE, SUBST],
variants: [
{
begin: /"""/, end: /"""/,
},
{
begin: /'''/, end: /'''/,
},
{
begin: /~S"""/, end: /"""/,
contains: []
},
{
begin: /~S"/, end: /"/,
contains: []
},
{
begin: /~S'''/, end: /'''/,
contains: []
},
{
begin: /~S'/, end: /'/,
contains: []
},
{
begin: /'/, end: /'/
},
{
begin: /"/, end: /"/
},
]
};
var FUNCTION = {
className: 'function',
beginKeywords: 'def defp defmacro', end: /\B\b/, // the mode is ended by the title
contains: [
hljs.inherit(hljs.TITLE_MODE, {
begin: ELIXIR_IDENT_RE,
endsParent: true
})
]
};
var CLASS = hljs.inherit(FUNCTION, {
className: 'class',
beginKeywords: 'defimpl defmodule defprotocol defrecord', end: /\bdo\b|$|;/
});
var ELIXIR_DEFAULT_CONTAINS = [
STRING,
UPCASE_SIGIL,
LOWERCASE_SIGIL,
hljs.HASH_COMMENT_MODE,
CLASS,
FUNCTION,
{
begin: '::'
},
{
className: 'symbol',
begin: ':(?![\\s:])',
contains: [STRING, {begin: ELIXIR_METHOD_RE}],
relevance: 0
},
{
className: 'symbol',
begin: ELIXIR_IDENT_RE + ':(?!:)',
relevance: 0
},
{
className: 'number',
begin: '(\\b0o[0-7_]+)|(\\b0b[01_]+)|(\\b0x[0-9a-fA-F_]+)|(-?\\b[1-9][0-9_]*(.[0-9_]+([eE][-+]?[0-9]+)?)?)',
relevance: 0
},
{
className: 'variable',
begin: '(\\$\\W)|((\\$|\\@\\@?)(\\w+))'
},
{
begin: '->'
},
{ // regexp container
begin: '(' + hljs.RE_STARTERS_RE + ')\\s*',
contains: [
hljs.HASH_COMMENT_MODE,
{
className: 'regexp',
illegal: '\\n',
contains: [hljs.BACKSLASH_ESCAPE, SUBST],
variants: [
{
begin: '/', end: '/[a-z]*'
},
{
begin: '%r\\[', end: '\\][a-z]*'
}
]
}
],
relevance: 0
}
];
SUBST.contains = ELIXIR_DEFAULT_CONTAINS;
return {
lexemes: ELIXIR_IDENT_RE,
keywords: ELIXIR_KEYWORDS,
contains: ELIXIR_DEFAULT_CONTAINS
};
};<|fim▁end|>
|
};
var STRING = {
|
<|file_name|>SearchOrbView.java<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package android.support.v17.leanback.widget;
import android.animation.ArgbEvaluator;
import android.animation.ValueAnimator;
import android.content.Context;
import android.content.res.Resources;
import android.content.res.TypedArray;
import android.graphics.Color;
import android.graphics.Rect;
import android.graphics.drawable.Drawable;
import android.graphics.drawable.GradientDrawable;
import android.support.annotation.ColorInt;
import android.support.v17.leanback.R;
import android.util.AttributeSet;
import android.view.LayoutInflater;
import android.view.View;
import android.widget.FrameLayout;
import android.widget.ImageView;
/**
* <p>A widget that draws a search affordance, represented by a round background and an icon.</p>
*
* The background color and icon can be customized.
*/
public class SearchOrbView extends FrameLayout implements View.OnClickListener {
private OnClickListener mListener;
private View mRootView;
private View mSearchOrbView;
private ImageView mIcon;
private Drawable mIconDrawable;
private Colors mColors;
private final float mFocusedZoom;
private final int mPulseDurationMs;
private final int mScaleDurationMs;
private final float mUnfocusedZ;
private final float mFocusedZ;
private ValueAnimator mColorAnimator;
/**
* A set of colors used to display the search orb.
*/
public static class Colors {
private static final float sBrightnessAlpha = 0.15f;
/**
* Constructs a color set using the given color for the search orb.
* Other colors are provided by the framework.
*
* @param color The main search orb color.
*/
public Colors(@ColorInt int color) {
this(color, color);
}
/**
* Constructs a color set using the given colors for the search orb.
* Other colors are provided by the framework.
*
* @param color The main search orb color.
* @param brightColor A brighter version of the search orb used for animation.
*/
public Colors(@ColorInt int color, @ColorInt int brightColor) {
this(color, brightColor, Color.TRANSPARENT);
}
/**
* Constructs a color set using the given colors.
*
* @param color The main search orb color.
* @param brightColor A brighter version of the search orb used for animation.
* @param iconColor A color used to tint the search orb icon.
*/
public Colors(@ColorInt int color, @ColorInt int brightColor, @ColorInt int iconColor) {
this.color = color;
this.brightColor = brightColor == color ? getBrightColor(color) : brightColor;
this.iconColor = iconColor;
}
/**
* The main color of the search orb.
*/
@ColorInt
public int color;
/**
* A brighter version of the search orb used for animation.
*/
@ColorInt
public int brightColor;
/**
* A color used to tint the search orb icon.
*/
@ColorInt
public int iconColor;
/**
* Computes a default brighter version of the given color.
*/
public static int getBrightColor(int color) {
final float brightnessValue = 0xff * sBrightnessAlpha;
int red = (int)(Color.red(color) * (1 - sBrightnessAlpha) + brightnessValue);
int green = (int)(Color.green(color) * (1 - sBrightnessAlpha) + brightnessValue);
int blue = (int)(Color.blue(color) * (1 - sBrightnessAlpha) + brightnessValue);
int alpha = (int)(Color.alpha(color) * (1 - sBrightnessAlpha) + brightnessValue);
return Color.argb(alpha, red, green, blue);
}
}
private final ArgbEvaluator mColorEvaluator = new ArgbEvaluator();
private final ValueAnimator.AnimatorUpdateListener mUpdateListener =
new ValueAnimator.AnimatorUpdateListener() {
@Override
public void onAnimationUpdate(ValueAnimator animator) {
Integer color = (Integer) animator.getAnimatedValue();
setOrbViewColor(color.intValue());
}
};
private ValueAnimator mShadowFocusAnimator;
private final ValueAnimator.AnimatorUpdateListener mFocusUpdateListener =
new ValueAnimator.AnimatorUpdateListener() {
@Override
public void onAnimationUpdate(ValueAnimator animation) {
setSearchOrbZ(animation.getAnimatedFraction());
}
};
private void setSearchOrbZ(float fraction) {
ShadowHelper.getInstance().setZ(mSearchOrbView,
mUnfocusedZ + fraction * (mFocusedZ - mUnfocusedZ));
}
public SearchOrbView(Context context) {
this(context, null);
}
public SearchOrbView(Context context, AttributeSet attrs) {
this(context, attrs, R.attr.searchOrbViewStyle);
}
public SearchOrbView(Context context, AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
final Resources res = context.getResources();
LayoutInflater inflater = (LayoutInflater) context
.getSystemService(Context.LAYOUT_INFLATER_SERVICE);
mRootView = inflater.inflate(getLayoutResourceId(), this, true);
mSearchOrbView = mRootView.findViewById(R.id.search_orb);
mIcon = (ImageView) mRootView.findViewById(R.id.icon);
mFocusedZoom = context.getResources().getFraction(
R.fraction.lb_search_orb_focused_zoom, 1, 1);
mPulseDurationMs = context.getResources().getInteger(
R.integer.lb_search_orb_pulse_duration_ms);
mScaleDurationMs = context.getResources().getInteger(
R.integer.lb_search_orb_scale_duration_ms);
mFocusedZ = context.getResources().getDimensionPixelSize(
R.dimen.lb_search_orb_focused_z);
mUnfocusedZ = context.getResources().getDimensionPixelSize(
R.dimen.lb_search_orb_unfocused_z);
TypedArray a = context.obtainStyledAttributes(attrs, R.styleable.lbSearchOrbView,
defStyleAttr, 0);
Drawable img = a.getDrawable(R.styleable.lbSearchOrbView_searchOrbIcon);
if (img == null) {
img = res.getDrawable(R.drawable.lb_ic_in_app_search);
}
setOrbIcon(img);
int defColor = res.getColor(R.color.lb_default_search_color);
int color = a.getColor(R.styleable.lbSearchOrbView_searchOrbColor, defColor);
int brightColor = a.getColor(
R.styleable.lbSearchOrbView_searchOrbBrightColor, color);
int iconColor = a.getColor(R.styleable.lbSearchOrbView_searchOrbIconColor, Color.TRANSPARENT);
setOrbColors(new Colors(color, brightColor, iconColor));
a.recycle();
setFocusable(true);
setClipChildren(false);
setOnClickListener(this);
setSoundEffectsEnabled(false);
setSearchOrbZ(0);
// Icon has no background, but must be on top of the search orb view
ShadowHelper.getInstance().setZ(mIcon, mFocusedZ);
}
int getLayoutResourceId() {
return R.layout.lb_search_orb;
}
void scaleOrbViewOnly(float scale) {
mSearchOrbView.setScaleX(scale);
mSearchOrbView.setScaleY(scale);
}
float getFocusedZoom() {
return mFocusedZoom;
}
@Override
public void onClick(View view) {
if (null != mListener) {
mListener.onClick(view);
}
}
private void startShadowFocusAnimation(boolean gainFocus, int duration) {
if (mShadowFocusAnimator == null) {
mShadowFocusAnimator = ValueAnimator.ofFloat(0f, 1f);
mShadowFocusAnimator.addUpdateListener(mFocusUpdateListener);
}
if (gainFocus) {
mShadowFocusAnimator.start();
} else {
mShadowFocusAnimator.reverse();
}
mShadowFocusAnimator.setDuration(duration);
}
@Override
protected void onFocusChanged(boolean gainFocus, int direction, Rect previouslyFocusedRect) {
super.onFocusChanged(gainFocus, direction, previouslyFocusedRect);
animateOnFocus(gainFocus);
}<|fim▁hole|> void animateOnFocus(boolean hasFocus) {
final float zoom = hasFocus ? mFocusedZoom : 1f;
mRootView.animate().scaleX(zoom).scaleY(zoom).setDuration(mScaleDurationMs).start();
startShadowFocusAnimation(hasFocus, mScaleDurationMs);
enableOrbColorAnimation(hasFocus);
}
/**
* Sets the orb icon.
* @param icon the drawable to be used as the icon
*/
public void setOrbIcon(Drawable icon) {
mIconDrawable = icon;
mIcon.setImageDrawable(mIconDrawable);
}
/**
* Returns the orb icon
* @return the drawable used as the icon
*/
public Drawable getOrbIcon() {
return mIconDrawable;
}
/**
* Sets the on click listener for the orb.
* @param listener The listener.
*/
public void setOnOrbClickedListener(OnClickListener listener) {
mListener = listener;
if (null != listener) {
setVisibility(View.VISIBLE);
} else {
setVisibility(View.INVISIBLE);
}
}
/**
* Sets the background color of the search orb.
* Other colors will be provided by the framework.
*
* @param color the RGBA color
*/
public void setOrbColor(int color) {
setOrbColors(new Colors(color, color, Color.TRANSPARENT));
}
/**
* Sets the search orb colors.
* Other colors are provided by the framework.
* @deprecated Use {@link #setOrbColors(Colors)} instead.
*/
@Deprecated
public void setOrbColor(@ColorInt int color, @ColorInt int brightColor) {
setOrbColors(new Colors(color, brightColor, Color.TRANSPARENT));
}
/**
* Returns the orb color
* @return the RGBA color
*/
@ColorInt
public int getOrbColor() {
return mColors.color;
}
/**
* Sets the {@link Colors} used to display the search orb.
*/
public void setOrbColors(Colors colors) {
mColors = colors;
mIcon.setColorFilter(mColors.iconColor);
if (mColorAnimator == null) {
setOrbViewColor(mColors.color);
} else {
enableOrbColorAnimation(true);
}
}
/**
* Returns the {@link Colors} used to display the search orb.
*/
public Colors getOrbColors() {
return mColors;
}
/**
* Enables or disables the orb color animation.
*
* <p>
* Orb color animation is handled automatically when the orb is focused/unfocused,
* however, an app may choose to override the current animation state, for example
* when an activity is paused.
* </p>
*/
public void enableOrbColorAnimation(boolean enable) {
if (mColorAnimator != null) {
mColorAnimator.end();
mColorAnimator = null;
}
if (enable) {
// TODO: set interpolator (material if available)
mColorAnimator = ValueAnimator.ofObject(mColorEvaluator,
mColors.color, mColors.brightColor, mColors.color);
mColorAnimator.setRepeatCount(ValueAnimator.INFINITE);
mColorAnimator.setDuration(mPulseDurationMs * 2);
mColorAnimator.addUpdateListener(mUpdateListener);
mColorAnimator.start();
}
}
private void setOrbViewColor(int color) {
if (mSearchOrbView.getBackground() instanceof GradientDrawable) {
((GradientDrawable) mSearchOrbView.getBackground()).setColor(color);
}
}
@Override
protected void onDetachedFromWindow() {
// Must stop infinite animation to prevent activity leak
enableOrbColorAnimation(false);
super.onDetachedFromWindow();
}
}<|fim▁end|>
| |
<|file_name|>OWS.d.ts<|end_file_name|><|fim▁begin|>import XML from './XML';
<|fim▁hole|> readFromNode(node: Element): any;
}<|fim▁end|>
|
export default class OWS extends XML {
constructor();
readFromDocument(doc: Document): any;
|
<|file_name|>_public_ip_addresses_operations.py<|end_file_name|><|fim▁begin|># coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class PublicIPAddressesOperations:
"""PublicIPAddressesOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2019_12_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def _delete_initial(
self,
resource_group_name: str,
public_ip_address_name: str,
**kwargs: Any
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-12-01"
accept = "application/json"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'publicIpAddressName': self._serialize.url("public_ip_address_name", public_ip_address_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/publicIPAddresses/{publicIpAddressName}'} # type: ignore
async def begin_delete(
self,
resource_group_name: str,
public_ip_address_name: str,
**kwargs: Any
) -> AsyncLROPoller[None]:
"""Deletes the specified public IP address.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param public_ip_address_name: The name of the subnet.
:type public_ip_address_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
public_ip_address_name=public_ip_address_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'publicIpAddressName': self._serialize.url("public_ip_address_name", public_ip_address_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/publicIPAddresses/{publicIpAddressName}'} # type: ignore
async def get(
self,
resource_group_name: str,
public_ip_address_name: str,
expand: Optional[str] = None,
**kwargs: Any
) -> "_models.PublicIPAddress":
"""Gets the specified public IP address in a specified resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param public_ip_address_name: The name of the subnet.
:type public_ip_address_name: str
:param expand: Expands referenced resources.
:type expand: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PublicIPAddress, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2019_12_01.models.PublicIPAddress
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.PublicIPAddress"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-12-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'publicIpAddressName': self._serialize.url("public_ip_address_name", public_ip_address_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('PublicIPAddress', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/publicIPAddresses/{publicIpAddressName}'} # type: ignore
async def _create_or_update_initial(
self,
resource_group_name: str,
public_ip_address_name: str,
parameters: "_models.PublicIPAddress",
**kwargs: Any
) -> "_models.PublicIPAddress":
cls = kwargs.pop('cls', None) # type: ClsType["_models.PublicIPAddress"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-12-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'publicIpAddressName': self._serialize.url("public_ip_address_name", public_ip_address_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'PublicIPAddress')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('PublicIPAddress', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('PublicIPAddress', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/publicIPAddresses/{publicIpAddressName}'} # type: ignore
async def begin_create_or_update(
self,
resource_group_name: str,
public_ip_address_name: str,
parameters: "_models.PublicIPAddress",
**kwargs: Any
) -> AsyncLROPoller["_models.PublicIPAddress"]:
"""Creates or updates a static or dynamic public IP address.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param public_ip_address_name: The name of the public IP address.
:type public_ip_address_name: str
:param parameters: Parameters supplied to the create or update public IP address operation.
:type parameters: ~azure.mgmt.network.v2019_12_01.models.PublicIPAddress
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either PublicIPAddress or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2019_12_01.models.PublicIPAddress]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.PublicIPAddress"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._create_or_update_initial(
resource_group_name=resource_group_name,
public_ip_address_name=public_ip_address_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('PublicIPAddress', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'publicIpAddressName': self._serialize.url("public_ip_address_name", public_ip_address_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/publicIPAddresses/{publicIpAddressName}'} # type: ignore
async def update_tags(
self,
resource_group_name: str,
public_ip_address_name: str,
parameters: "_models.TagsObject",
**kwargs: Any
) -> "_models.PublicIPAddress":
"""Updates public IP address tags.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param public_ip_address_name: The name of the public IP address.
:type public_ip_address_name: str
:param parameters: Parameters supplied to update public IP address tags.
:type parameters: ~azure.mgmt.network.v2019_12_01.models.TagsObject
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PublicIPAddress, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2019_12_01.models.PublicIPAddress
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.PublicIPAddress"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-12-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.update_tags.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'publicIpAddressName': self._serialize.url("public_ip_address_name", public_ip_address_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'TagsObject')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('PublicIPAddress', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
update_tags.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/publicIPAddresses/{publicIpAddressName}'} # type: ignore
def list_all(
self,
**kwargs: Any
) -> AsyncIterable["_models.PublicIPAddressListResult"]:
"""Gets all the public IP addresses in a subscription.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either PublicIPAddressListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2019_12_01.models.PublicIPAddressListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.PublicIPAddressListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-12-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_all.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('PublicIPAddressListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_all.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/publicIPAddresses'} # type: ignore
def list(
self,
resource_group_name: str,
**kwargs: Any
) -> AsyncIterable["_models.PublicIPAddressListResult"]:
"""Gets all public IP addresses in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either PublicIPAddressListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2019_12_01.models.PublicIPAddressListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.PublicIPAddressListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-12-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('PublicIPAddressListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/publicIPAddresses'} # type: ignore
def list_virtual_machine_scale_set_public_ip_addresses(
self,
resource_group_name: str,
virtual_machine_scale_set_name: str,
**kwargs: Any
) -> AsyncIterable["_models.PublicIPAddressListResult"]:
"""Gets information about all public IP addresses on a virtual machine scale set level.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_machine_scale_set_name: The name of the virtual machine scale set.
:type virtual_machine_scale_set_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either PublicIPAddressListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2019_12_01.models.PublicIPAddressListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.PublicIPAddressListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-10-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_virtual_machine_scale_set_public_ip_addresses.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualMachineScaleSetName': self._serialize.url("virtual_machine_scale_set_name", virtual_machine_scale_set_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('PublicIPAddressListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_virtual_machine_scale_set_public_ip_addresses.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{virtualMachineScaleSetName}/publicipaddresses'} # type: ignore
def list_virtual_machine_scale_set_vm_public_ip_addresses(
self,
resource_group_name: str,
virtual_machine_scale_set_name: str,
virtualmachine_index: str,
network_interface_name: str,
ip_configuration_name: str,
**kwargs: Any
) -> AsyncIterable["_models.PublicIPAddressListResult"]:
"""Gets information about all public IP addresses in a virtual machine IP configuration in a
virtual machine scale set.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_machine_scale_set_name: The name of the virtual machine scale set.
:type virtual_machine_scale_set_name: str
:param virtualmachine_index: The virtual machine index.
:type virtualmachine_index: str
:param network_interface_name: The network interface name.
:type network_interface_name: str
:param ip_configuration_name: The IP configuration name.
:type ip_configuration_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either PublicIPAddressListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2019_12_01.models.PublicIPAddressListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.PublicIPAddressListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-10-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_virtual_machine_scale_set_vm_public_ip_addresses.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualMachineScaleSetName': self._serialize.url("virtual_machine_scale_set_name", virtual_machine_scale_set_name, 'str'),
'virtualmachineIndex': self._serialize.url("virtualmachine_index", virtualmachine_index, 'str'),
'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'),
'ipConfigurationName': self._serialize.url("ip_configuration_name", ip_configuration_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('PublicIPAddressListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_virtual_machine_scale_set_vm_public_ip_addresses.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{virtualMachineScaleSetName}/virtualMachines/{virtualmachineIndex}/networkInterfaces/{networkInterfaceName}/ipconfigurations/{ipConfigurationName}/publicipaddresses'} # type: ignore
async def get_virtual_machine_scale_set_public_ip_address(
self,
resource_group_name: str,
virtual_machine_scale_set_name: str,
virtualmachine_index: str,
network_interface_name: str,
ip_configuration_name: str,
public_ip_address_name: str,
expand: Optional[str] = None,
**kwargs: Any
) -> "_models.PublicIPAddress":
"""Get the specified public IP address in a virtual machine scale set.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_machine_scale_set_name: The name of the virtual machine scale set.
:type virtual_machine_scale_set_name: str
:param virtualmachine_index: The virtual machine index.
:type virtualmachine_index: str
:param network_interface_name: The name of the network interface.
:type network_interface_name: str
:param ip_configuration_name: The name of the IP configuration.
:type ip_configuration_name: str
:param public_ip_address_name: The name of the public IP Address.
:type public_ip_address_name: str
:param expand: Expands referenced resources.
:type expand: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PublicIPAddress, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2019_12_01.models.PublicIPAddress
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.PublicIPAddress"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-10-01"
accept = "application/json"
# Construct URL
url = self.get_virtual_machine_scale_set_public_ip_address.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualMachineScaleSetName': self._serialize.url("virtual_machine_scale_set_name", virtual_machine_scale_set_name, 'str'),
'virtualmachineIndex': self._serialize.url("virtualmachine_index", virtualmachine_index, 'str'),
'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'),
'ipConfigurationName': self._serialize.url("ip_configuration_name", ip_configuration_name, 'str'),
'publicIpAddressName': self._serialize.url("public_ip_address_name", public_ip_address_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response<|fim▁hole|>
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('PublicIPAddress', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_virtual_machine_scale_set_public_ip_address.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{virtualMachineScaleSetName}/virtualMachines/{virtualmachineIndex}/networkInterfaces/{networkInterfaceName}/ipconfigurations/{ipConfigurationName}/publicipaddresses/{publicIpAddressName}'} # type: ignore<|fim▁end|>
| |
<|file_name|>theme.module.ts<|end_file_name|><|fim▁begin|>/**
* @license
* Copyright Akveo. All Rights Reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*/
import { ModuleWithProviders, NgModule } from '@angular/core';
import { RouterModule } from '@angular/router';
import { CommonModule } from '@angular/common';
import { FormsModule, ReactiveFormsModule } from '@angular/forms';
import {
NbLayoutModule,
NbMenuModule,
NbTabsetModule,
NbSidebarModule,
NbCardModule,
NbCheckboxModule,
NbIconModule,
NbSelectModule,
NbInputModule, NbButtonModule,
} from '@nebular/theme';
import { NbEvaIconsModule } from '@nebular/eva-icons';
import {
NgdHeaderComponent,
NgdHeroComponent,
NgdIconCardComponent,
NgdTextCardComponent,
NgdFooterComponent,
NgdFragmentTargetDirective,
NgdPageTocComponent,
NgdPageTabsComponent,
NgdColorSwatchDirective,
NgdDescriptionDirective,
NgdSearchComponent,
} from './components/';
import {
NgdHighlightService,
NgdTextService,
NgdTabbedService,
NgdStructureService,
NgdCodeLoaderService,
NgdIframeCommunicatorService,
NgdStylesService,
NgdVersionService,
NgdVisibilityService,
NgdPaginationService,
NgdAnalytics,
NgdMenuService,
NgdMetadataService,
} from './services';
import { AkveoServicesBanner } from './components/hubspot-cta/akveo-services-banner.component';
@NgModule({
imports: [
CommonModule,
NbLayoutModule,
NbSidebarModule,
NbCardModule,
NbMenuModule,
NbTabsetModule,
NbIconModule,
NbButtonModule,
NbSelectModule,
NbInputModule,
NbEvaIconsModule,
RouterModule,
],
declarations: [
NgdHeaderComponent,
NgdHeroComponent,
NgdIconCardComponent,
NgdTextCardComponent,
NgdFooterComponent,
NgdFragmentTargetDirective,
NgdPageTocComponent,
NgdPageTabsComponent,
NgdColorSwatchDirective,
NgdDescriptionDirective,
NgdSearchComponent,
AkveoServicesBanner,
],
exports: [
CommonModule,
RouterModule,
ReactiveFormsModule,
FormsModule,
NbIconModule,
NbLayoutModule,
NbSidebarModule,
NbCardModule,
NbMenuModule,
NbTabsetModule,
NbCheckboxModule,
NgdHeaderComponent,
NgdHeroComponent,
NgdIconCardComponent,
NgdTextCardComponent,
NgdFooterComponent,
NgdFragmentTargetDirective,
NgdPageTocComponent,<|fim▁hole|> AkveoServicesBanner,
],
})
export class NgdThemeModule {
static forRoot(): ModuleWithProviders {
return <ModuleWithProviders>{
ngModule: NgdThemeModule,
providers: [
NgdHighlightService,
NgdTextService,
NgdTabbedService,
NgdStructureService,
NgdCodeLoaderService,
NgdIframeCommunicatorService,
NgdStylesService,
NgdVersionService,
NgdPaginationService,
NgdAnalytics,
NgdMenuService,
NgdVisibilityService,
NgdMetadataService,
],
};
}
}<|fim▁end|>
|
NgdPageTabsComponent,
NgdColorSwatchDirective,
NgdDescriptionDirective,
|
<|file_name|>AssertXml.java<|end_file_name|><|fim▁begin|>/* Copyright (c) 2018 lib4j
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* You should have received a copy of The MIT License (MIT) along with this
* program. If not, see <http://opensource.org/licenses/MIT/>.
*/
package org.lib4j.test;
import java.util.HashMap;
import java.util.Map;
import javax.xml.XMLConstants;
import javax.xml.transform.Source;
import javax.xml.xpath.XPath;
import javax.xml.xpath.XPathConstants;
import javax.xml.xpath.XPathExpression;
import javax.xml.xpath.XPathExpressionException;
import javax.xml.xpath.XPathFactory;
import org.junit.Assert;
import org.junit.ComparisonFailure;
import org.lib4j.xml.dom.DOMStyle;
import org.lib4j.xml.dom.DOMs;
import org.w3c.dom.Attr;
import org.w3c.dom.Element;
import org.w3c.dom.NamedNodeMap;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.xmlunit.builder.Input;
import org.xmlunit.diff.Comparison;
import org.xmlunit.diff.ComparisonListener;
import org.xmlunit.diff.ComparisonResult;
import org.xmlunit.diff.DOMDifferenceEngine;
import org.xmlunit.diff.DifferenceEngine;
public class AssertXml {
private XPath newXPath() {
final XPath xPath = XPathFactory.newInstance().newXPath();
xPath.setNamespaceContext(new SimpleNamespaceContext(prefixToNamespaceURI));
return xPath;
}
public static AssertXml compare(final Element controlElement, final Element testElement) {
final Map<String,String> prefixToNamespaceURI = new HashMap<>();
prefixToNamespaceURI.put("xsi", "http://www.w3.org/2001/XMLSchema-instance");
final NamedNodeMap attributes = controlElement.getAttributes();
for (int i = 0; i < attributes.getLength(); i++) {
final Attr attribute = (Attr)attributes.item(i);
if (XMLConstants.XMLNS_ATTRIBUTE_NS_URI.equals(attribute.getNamespaceURI()) && "xmlns".equals(attribute.getPrefix()))
prefixToNamespaceURI.put(attribute.getLocalName(), attribute.getNodeValue());
}
return new AssertXml(prefixToNamespaceURI, controlElement, testElement);
}
private final Map<String,String> prefixToNamespaceURI;
private final Element controlElement;
private final Element testElement;
private AssertXml(final Map<String,String> prefixToNamespaceURI, final Element controlElement, final Element testElement) {
if (!controlElement.getPrefix().equals(testElement.getPrefix()))
throw new IllegalArgumentException("Prefixes of control and test elements must be the same: " + controlElement.getPrefix() + " != " + testElement.getPrefix());
this.prefixToNamespaceURI = prefixToNamespaceURI;
this.controlElement = controlElement;
this.testElement = testElement;
}
public void addAttribute(final Element element, final String xpath, final String name, final String value) throws XPathExpressionException {
final XPathExpression expression = newXPath().compile(xpath);
final NodeList nodes = (NodeList)expression.evaluate(element, XPathConstants.NODESET);
for (int i = 0; i < nodes.getLength(); ++i) {
final Node node = nodes.item(i);
if (!(node instanceof Element))
throw new UnsupportedOperationException("Only support addition of attributes to elements");
final Element target = (Element)node;
final int colon = name.indexOf(':');
final String namespaceURI = colon == -1 ? node.getNamespaceURI() : node.getOwnerDocument().lookupNamespaceURI(name.substring(0, colon));
target.setAttributeNS(namespaceURI, name, value);
}
}
public void remove(final Element element, final String ... xpaths) throws XPathExpressionException {
for (final String xpath : xpaths) {
final XPathExpression expression = newXPath().compile(xpath);
final NodeList nodes = (NodeList)expression.evaluate(element, XPathConstants.NODESET);
for (int i = 0; i < nodes.getLength(); ++i) {
final Node node = nodes.item(i);
if (node instanceof Attr) {
final Attr attribute = (Attr)node;
attribute.getOwnerElement().removeAttributeNode(attribute);
}
else {
node.getParentNode().removeChild(node);
}
}
}
}
public void replace(final Element element, final String xpath, final String name, final String value) throws XPathExpressionException {
final XPathExpression expression = newXPath().compile(xpath);
final NodeList nodes = (NodeList)expression.evaluate(element, XPathConstants.NODESET);
for (int i = 0; i < nodes.getLength(); ++i) {
final Node node = nodes.item(i);
if (node instanceof Attr) {
final Attr attribute = (Attr)node;
if (name == null) {
attribute.setValue(value);
}
else {
final int colon = name.indexOf(':');
final String namespaceURI = colon == -1 ? attribute.getNamespaceURI() : attribute.getOwnerDocument().lookupNamespaceURI(name.substring(0, colon));
final Element owner = attribute.getOwnerElement();
owner.removeAttributeNode(attribute);
owner.setAttributeNS(namespaceURI, name, value);
}
}
else {<|fim▁hole|> }
}
public void replace(final Element element, final String xpath, final String value) throws XPathExpressionException {
replace(element, xpath, null, value);
}
public void assertEqual() {
final String prefix = controlElement.getPrefix();
final String controlXml = DOMs.domToString(controlElement, DOMStyle.INDENT, DOMStyle.INDENT_ATTRS);
final String testXml = DOMs.domToString(testElement, DOMStyle.INDENT, DOMStyle.INDENT_ATTRS);
final Source controlSource = Input.fromString(controlXml).build();
final Source testSource = Input.fromString(testXml).build();
final DifferenceEngine diffEngine = new DOMDifferenceEngine();
diffEngine.addDifferenceListener(new ComparisonListener() {
@Override
public void comparisonPerformed(final Comparison comparison, final ComparisonResult result) {
final String controlXPath = comparison.getControlDetails().getXPath() == null ? null : comparison.getControlDetails().getXPath().replaceAll("/([^@])", "/" + prefix + ":$1");
if (controlXPath == null || controlXPath.matches("^.*\\/@[:a-z]+$") || controlXPath.contains("text()"))
return;
try {
Assert.assertEquals(controlXml, testXml);
}
catch (final ComparisonFailure e) {
final StackTraceElement[] stackTrace = e.getStackTrace();
int i;
for (i = 3; i < stackTrace.length; i++)
if (!stackTrace[i].getClassName().startsWith("org.xmlunit.diff"))
break;
final StackTraceElement[] filtered = new StackTraceElement[stackTrace.length - ++i];
System.arraycopy(stackTrace, i, filtered, 0, stackTrace.length - i);
e.setStackTrace(filtered);
throw e;
}
Assert.fail(comparison.toString());
}
});
diffEngine.compare(controlSource, testSource);
}
}<|fim▁end|>
|
throw new UnsupportedOperationException("Only support replacement of attribute values");
}
|
<|file_name|>conf.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# MAVProxy documentation build configuration file, created by
# sphinx-quickstart on Wed Aug 19 05:17:36 2015.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
import shlex
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [<|fim▁hole|> 'sphinx.ext.ifconfig',
'sphinx.ext.viewcode',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'last_letter'
copyright = u'2014, George Zogopoulos - Papaliakos'
author = u'George Zogopoulos - Papaliakos'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.5'
# The full version, including alpha/beta/rc tags.
release = '0.5'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = True
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
html_theme_path = ["_static/themes", ]
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
html_domain_indices = False
# If false, no index is generated.
html_use_index = False
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
html_show_sourcelink = False
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
html_show_sphinx = False
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
html_show_copyright = False
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr'
#html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# Now only 'ja' uses this config value
#html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'last_letter_doc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
# Latex figure (float) alignment
#'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'last_letter.tex', u'last_letter Documentation',
u'George Zogopoulos - Papaliakos', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'last_letter', u'last_letter Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'last_letter', u'last_letter Documentation',
author, 'last_letter', 'A collection of ROS packages for UAV simulation and autopilot development.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
# -- Options for Epub output ----------------------------------------------
# Bibliographic Dublin Core info.
epub_title = project
epub_author = author
epub_publisher = author
epub_copyright = copyright
# The basename for the epub file. It defaults to the project name.
#epub_basename = project
# The HTML theme for the epub output. Since the default themes are not optimized
# for small screen space, using the same theme for HTML and epub output is
# usually not wise. This defaults to 'epub', a theme designed to save visual
# space.
#epub_theme = 'epub'
# The language of the text. It defaults to the language option
# or 'en' if the language is not set.
#epub_language = ''
# The scheme of the identifier. Typical schemes are ISBN or URL.
#epub_scheme = ''
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
#epub_identifier = ''
# A unique identification for the text.
#epub_uid = ''
# A tuple containing the cover image and cover page html template filenames.
#epub_cover = ()
# A sequence of (type, uri, title) tuples for the guide element of content.opf.
#epub_guide = ()
# HTML files that should be inserted before the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_pre_files = []
# HTML files shat should be inserted after the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_post_files = []
# A list of files that should not be packed into the epub file.
epub_exclude_files = ['search.html']
# The depth of the table of contents in toc.ncx.
#epub_tocdepth = 3
# Allow duplicate toc entries.
#epub_tocdup = True
# Choose between 'default' and 'includehidden'.
#epub_tocscope = 'default'
# Fix unsupported image types using the Pillow.
#epub_fix_images = False
# Scale large images.
#epub_max_image_width = 0
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#epub_show_urls = 'inline'
# If false, no index is generated.
#epub_use_index = True<|fim▁end|>
|
'sphinx.ext.autodoc',
'sphinx.ext.todo',
'sphinx.ext.pngmath',
|
<|file_name|>custom_transforms.py<|end_file_name|><|fim▁begin|>from sklearn.base import BaseEstimator, TransformerMixin
import numpy as np
class PositionalSelector(BaseEstimator, TransformerMixin):
def __init__(self, positions):
self.positions = positions
def fit(self, X, y=None):<|fim▁hole|> return np.array(X)[:, self.positions]
class StripString(BaseEstimator, TransformerMixin):
def fit(self, X, y=None):
return self
def transform(self, X):
strip = np.vectorize(str.strip)
return strip(np.array(X))
class SimpleOneHotEncoder(BaseEstimator, TransformerMixin):
def fit(self, X, y=None):
self.values = []
for c in range(X.shape[1]):
Y = X[:, c]
values = {v: i for i, v in enumerate(np.unique(Y))}
self.values.append(values)
return self
def transform(self, X):
X = np.array(X)
matrices = []
for c in range(X.shape[1]):
Y = X[:, c]
matrix = np.zeros(shape=(len(Y), len(self.values[c])), dtype=np.int8)
for i, x in enumerate(Y):
if x in self.values[c]:
matrix[i][self.values[c][x]] = 1
matrices.append(matrix)
res = np.concatenate(matrices, axis=1)
return res<|fim▁end|>
|
return self
def transform(self, X):
|
<|file_name|>main.cpp<|end_file_name|><|fim▁begin|>/* $Id$ */
/** @file
*
* VBox frontends: Qt GUI ("VirtualBox"):
* The main() function
*/
/*
* Copyright (C) 2006-2009 Oracle Corporation
*
* This file is part of VirtualBox Open Source Edition (OSE), as
* available from http://www.virtualbox.org. This file is free software;
* you can redistribute it and/or modify it under the terms of the GNU
* General Public License (GPL) as published by the Free Software
* Foundation, in version 2 as it comes in the "COPYING" file of the
* VirtualBox OSE distribution. VirtualBox OSE is distributed in the
* hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
*/
#ifdef VBOX_WITH_PRECOMPILED_HEADERS
#include "precomp.h"
#ifdef Q_WS_MAC
# include "UICocoaApplication.h"
#endif /* Q_WS_MAC */
#else /* !VBOX_WITH_PRECOMPILED_HEADERS */
#include "VBoxGlobal.h"
#include "UIMessageCenter.h"
#include "UISelectorWindow.h"
#include "VBoxUtils.h"
#ifdef Q_WS_MAC
# include "UICocoaApplication.h"
#endif
#ifdef Q_WS_X11
#include <QFontDatabase>
#include <iprt/env.h>
#endif
#include <QCleanlooksStyle>
#include <QPlastiqueStyle>
#include <QMessageBox>
#include <QLocale>
#include <QTranslator>
#ifdef Q_WS_X11
# include <X11/Xlib.h>
#endif
#include <iprt/buildconfig.h>
#include <iprt/err.h>
#include <iprt/initterm.h>
#include <iprt/process.h>
#include <iprt/stream.h>
#include <VBox/err.h>
#include <VBox/version.h>
#endif /* !VBOX_WITH_PRECOMPILED_HEADERS */
#ifdef VBOX_WITH_HARDENING
# include <VBox/sup.h>
#endif
#ifdef RT_OS_LINUX
# include <unistd.h>
#endif
#include <cstdio>
/* XXX Temporarily. Don't rely on the user to hack the Makefile himself! */
QString g_QStrHintLinuxNoMemory = QApplication::tr(
"This error means that the kernel driver was either not able to "
"allocate enough memory or that some mapping operation failed."
);
QString g_QStrHintLinuxNoDriver = QApplication::tr(
"The VirtualBox Linux kernel driver (vboxdrv) is either not loaded or "
"there is a permission problem with /dev/vboxdrv. Please reinstall the kernel "
"module by executing<br/><br/>"
" <font color=blue>'/etc/init.d/vboxdrv setup'</font><br/><br/>"
"as root. If it is available in your distribution, you should install the "
"DKMS package first. This package keeps track of Linux kernel changes and "
"recompiles the vboxdrv kernel module if necessary."
);
QString g_QStrHintOtherWrongDriverVersion = QApplication::tr(
"The VirtualBox kernel modules do not match this version of "
"VirtualBox. The installation of VirtualBox was apparently not "
"successful. Please try completely uninstalling and reinstalling "
"VirtualBox."
);
QString g_QStrHintLinuxWrongDriverVersion = QApplication::tr(
"The VirtualBox kernel modules do not match this version of "
"VirtualBox. The installation of VirtualBox was apparently not "
"successful. Executing<br/><br/>"
" <font color=blue>'/etc/init.d/vboxdrv setup'</font><br/><br/>"
"may correct this. Make sure that you do not mix the "
"OSE version and the PUEL version of VirtualBox."
);
QString g_QStrHintOtherNoDriver = QApplication::tr(
"Make sure the kernel module has been loaded successfully."
);
/* I hope this isn't (C), (TM) or (R) Microsoft support ;-) */
QString g_QStrHintReinstall = QApplication::tr(
"Please try reinstalling VirtualBox."
);
#if defined(DEBUG) && defined(Q_WS_X11) && defined(RT_OS_LINUX)
#include <signal.h>
#include <execinfo.h>
/* get REG_EIP from ucontext.h */
#ifndef __USE_GNU
#define __USE_GNU
#endif
#include <ucontext.h>
#ifdef RT_ARCH_AMD64
# define REG_PC REG_RIP
#else
# define REG_PC REG_EIP
#endif
/**
* the signal handler that prints out a backtrace of the call stack.
* the code is taken from http://www.linuxjournal.com/article/6391.
*/
void bt_sighandler (int sig, siginfo_t *info, void *secret) {
void *trace[16];
char **messages = (char **)NULL;
int i, trace_size = 0;
ucontext_t *uc = (ucontext_t *)secret;
/* Do something useful with siginfo_t */
if (sig == SIGSEGV)
Log (("GUI: Got signal %d, faulty address is %p, from %p\n",
sig, info->si_addr, uc->uc_mcontext.gregs[REG_PC]));
else
Log (("GUI: Got signal %d\n", sig));
trace_size = backtrace (trace, 16);
/* overwrite sigaction with caller's address */
trace[1] = (void *) uc->uc_mcontext.gregs [REG_PC];
messages = backtrace_symbols (trace, trace_size);
/* skip first stack frame (points here) */
Log (("GUI: [bt] Execution path:\n"));
for (i = 1; i < trace_size; ++i)
Log (("GUI: [bt] %s\n", messages[i]));
exit (0);
}
#endif /* DEBUG && X11 && LINUX*/
#if defined(RT_OS_DARWIN)
# include <dlfcn.h>
# include <sys/mman.h>
# include <iprt/asm.h>
# include <iprt/system.h>
/** Really ugly hack to shut up a silly check in AppKit. */
static void ShutUpAppKit(void)
{
/* Check for Snow Leopard or higher */
char szInfo[64];
int rc = RTSystemQueryOSInfo (RTSYSOSINFO_RELEASE, szInfo, sizeof(szInfo));
if ( RT_SUCCESS (rc)
&& szInfo[0] == '1') /* higher than 1x.x.x */
{
/*
* Find issetguid() and make it always return 0 by modifying the code.
*/
void *addr = dlsym(RTLD_DEFAULT, "issetugid");
int rc = mprotect((void *)((uintptr_t)addr & ~(uintptr_t)0xfff), 0x2000, PROT_WRITE|PROT_READ|PROT_EXEC);
if (!rc)
ASMAtomicWriteU32((volatile uint32_t *)addr, 0xccc3c031); /* xor eax, eax; ret; int3 */
}
}
#endif /* DARWIN */
static void QtMessageOutput (QtMsgType type, const char *msg)
{
#ifndef Q_WS_X11
NOREF(msg);
#endif
switch (type)
{
case QtDebugMsg:
Log (("Qt DEBUG: %s\n", msg));
break;
case QtWarningMsg:
Log (("Qt WARNING: %s\n", msg));
#ifdef Q_WS_X11
/* Needed for instance for the message ``cannot connect to X server'' */
RTStrmPrintf(g_pStdErr, "Qt WARNING: %s\n", msg);
#endif
break;
case QtCriticalMsg:
Log (("Qt CRITICAL: %s\n", msg));
#ifdef Q_WS_X11
/* Needed for instance for the message ``cannot connect to X server'' */
RTStrmPrintf(g_pStdErr, "Qt CRITICAL: %s\n", msg);
#endif
break;
case QtFatalMsg:
Log (("Qt FATAL: %s\n", msg));
#ifdef Q_WS_X11
RTStrmPrintf(g_pStdErr, "Qt FATAL: %s\n", msg);
#endif
}
}
/**
* Show all available command line parameters.
*/
static void showHelp()
{
QString mode = "", dflt = "";
#ifdef VBOX_GUI_USE_SDL
mode += "sdl";
#endif
#ifdef VBOX_GUI_USE_QIMAGE
if (!mode.isEmpty())
mode += "|";
mode += "image";
#endif
#ifdef VBOX_GUI_USE_DDRAW
if (!mode.isEmpty())
mode += "|";
mode += "ddraw";
#endif
#ifdef VBOX_GUI_USE_QUARTZ2D
if (!mode.isEmpty())
mode += "|";
mode += "quartz2d";
#endif
#if defined (Q_WS_MAC) && defined (VBOX_GUI_USE_QUARTZ2D)
dflt = "quartz2d";
#elif (defined (Q_WS_WIN32) || defined (Q_WS_PM)) && defined (VBOX_GUI_USE_QIMAGE)
dflt = "image";
#elif defined (Q_WS_X11) && defined (VBOX_GUI_USE_SDL)
dflt = "sdl";
#else
dflt = "image";
#endif
RTPrintf(VBOX_PRODUCT " Manager %s\n"
"(C) 2005-" VBOX_C_YEAR " " VBOX_VENDOR "\n"
"All rights reserved.\n"
"\n"
"Usage:\n"
" --startvm <vmname|UUID> start a VM by specifying its UUID or name\n"
" --seamless switch to seamless mode during startup\n"
" --fullscreen switch to fullscreen mode during startup\n"
" --rmode %-18s select different render mode (default is %s)\n"
" --no-startvm-errormsgbox do not show a message box for VM start errors\n"
# ifdef VBOX_GUI_WITH_PIDFILE
" --pidfile <file> create a pidfile file when a VM is up and running\n"
# endif
# ifdef VBOX_WITH_DEBUGGER_GUI
" --dbg enable the GUI debug menu\n"
" --debug like --dbg and show debug windows at VM startup\n"
" --debug-command-line like --dbg and show command line window at VM startup\n"
" --debug-statistics like --dbg and show statistics window at VM startup\n"
" --no-debug disable the GUI debug menu and debug windows\n"
" --start-paused start the VM in the paused state\n"
" --start-running start the VM running (for overriding --debug*)\n"
"\n"
# endif
"Expert options:\n"
" --disable-patm disable code patching (ignored by AMD-V/VT-x)\n"
" --disable-csam disable code scanning (ignored by AMD-V/VT-x)\n"
" --recompile-supervisor recompiled execution of supervisor code (*)\n"
" --recompile-user recompiled execution of user code (*)\n"
" --recompile-all recompiled execution of all code, with disabled\n"
" code patching and scanning\n"
" (*) For AMD-V/VT-x setups the effect is --recompile-all.\n"
"\n"
# ifdef VBOX_WITH_DEBUGGER_GUI
"The following environment (and extra data) variables are evaluated:\n"
" VBOX_GUI_DBG_ENABLED (GUI/Dbg/Enabled)\n"
" enable the GUI debug menu if set\n"
" VBOX_GUI_DBG_AUTO_SHOW (GUI/Dbg/AutoShow)\n"
" show debug windows at VM startup\n"
" VBOX_GUI_NO_DEBUGGER disable the GUI debug menu and debug windows\n"
# endif
"\n",
RTBldCfgVersion(),
mode.toLatin1().constData(),
dflt.toLatin1().constData());
/** @todo Show this as a dialog on windows. */
}
extern "C" DECLEXPORT(int) TrustedMain (int argc, char **argv, char ** /*envp*/)
{
LogFlowFuncEnter();
# if defined(RT_OS_DARWIN)
ShutUpAppKit();
# endif
for (int i=0; i<argc; i++)
if ( !strcmp(argv[i], "-h")
|| !strcmp(argv[i], "-?")
|| !strcmp(argv[i], "-help")
|| !strcmp(argv[i], "--help"))
{
showHelp();
return 0;
}
#if defined(DEBUG) && defined(Q_WS_X11) && defined(RT_OS_LINUX)
/* install our signal handler to backtrace the call stack */
struct sigaction sa;
sa.sa_sigaction = bt_sighandler;
sigemptyset (&sa.sa_mask);
sa.sa_flags = SA_RESTART | SA_SIGINFO;
sigaction (SIGSEGV, &sa, NULL);
sigaction (SIGBUS, &sa, NULL);
sigaction (SIGUSR1, &sa, NULL);
#endif
#ifdef QT_MAC_USE_COCOA
/* Instantiate our NSApplication derivative before QApplication
* forces NSApplication to be instantiated. */
UICocoaApplication::instance();
#endif
qInstallMsgHandler (QtMessageOutput);
int rc = 1; /* failure */
/* scope the QApplication variable */
{
#ifdef Q_WS_X11
/* Qt has a complex algorithm for selecting the right visual which
* doesn't always seem to work. So we naively choose a visual - the
* default one - ourselves and pass that to Qt. This means that we
* also have to open the display ourselves.
* We check the Qt parameter list and handle Qt's -display argument
* ourselves, since we open the display connection. We also check the
* to see if the user has passed Qt's -visual parameter, and if so we
* assume that the user wants Qt to handle visual selection after all,
* and don't supply a visual. */
char *pszDisplay = NULL;
bool useDefaultVisual = true;
for (int i = 0; i < argc; ++i)
{
if (!::strcmp(argv[i], "-display") && (i + 1 < argc))
/* What if it isn't? Rely on QApplication to complain? */
{
pszDisplay = argv[i + 1];
++i;
}
else if (!::strcmp(argv[i], "-visual"))
useDefaultVisual = false;
}
Display *pDisplay = XOpenDisplay(pszDisplay);
if (!pDisplay)
{
RTPrintf(pszDisplay ? "Failed to open the X11 display \"%s\"!\n"
: "Failed to open the X11 display!\n",
pszDisplay);
return 0;
}
Visual *pVisual = useDefaultVisual
? DefaultVisual(pDisplay, DefaultScreen(pDisplay))
: NULL;
/* Now create the application object */
QApplication a (pDisplay, argc, argv, (Qt::HANDLE) pVisual);
#else /* Q_WS_X11 */
QApplication a (argc, argv);
#endif /* Q_WS_X11 */
/* Qt4.3 version has the QProcess bug which freezing the application
* for 30 seconds. This bug is internally used at initialization of
* Cleanlooks style. So we have to change this style to another one.
* See http://trolltech.com/developer/task-tracker/index_html?id=179200&method=entry
* for details. */
if (VBoxGlobal::qtRTVersionString().startsWith ("4.3") &&
qobject_cast <QCleanlooksStyle*> (QApplication::style()))
QApplication::setStyle (new QPlastiqueStyle);
#ifdef Q_OS_SOLARIS
/* Use plastique look 'n feel for Solaris instead of the default motif (Qt 4.7.x) */
QApplication::setStyle (new QPlastiqueStyle);
#endif
#ifdef Q_WS_X11
/* This patch is not used for now on Solaris & OpenSolaris because
* there is no anti-aliasing enabled by default, Qt4 to be rebuilt. */
#ifndef Q_OS_SOLARIS
/* Cause Qt4 has the conflict with fontconfig application as a result
* sometimes substituting some fonts with non scaleable-anti-aliased
* bitmap font we are reseting substitutes for the current application
* font family if it is non scaleable-anti-aliased. */
QFontDatabase fontDataBase;
QString currentFamily (QApplication::font().family());
bool isCurrentScaleable = fontDataBase.isScalable (currentFamily);
/*
LogFlowFunc (("Font: Current family is '%s'. It is %s.\n",
currentFamily.toLatin1().constData(),
isCurrentScaleable ? "scalable" : "not scalable"));
QStringList subFamilies (QFont::substitutes (currentFamily));
foreach (QString sub, subFamilies)
{
bool isSubScalable = fontDataBase.isScalable (sub);<|fim▁hole|> isSubScalable ? "scalable" : "not scalable"));
}
*/
QString subFamily (QFont::substitute (currentFamily));
bool isSubScaleable = fontDataBase.isScalable (subFamily);
if (isCurrentScaleable && !isSubScaleable)
QFont::removeSubstitution (currentFamily);
#endif /* Q_OS_SOLARIS */
#endif
#ifdef Q_WS_WIN
/* Drag in the sound drivers and DLLs early to get rid of the delay taking
* place when the main menu bar (or any action from that menu bar) is
* activated for the first time. This delay is especially annoying if it
* happens when the VM is executing in real mode (which gives 100% CPU
* load and slows down the load process that happens on the main GUI
* thread to several seconds). */
PlaySound (NULL, NULL, 0);
#endif
#ifdef Q_WS_MAC
::darwinDisableIconsInMenus();
#endif /* Q_WS_MAC */
#ifdef Q_WS_X11
/* version check (major.minor are sensitive, fix number is ignored) */
if (VBoxGlobal::qtRTVersion() < (VBoxGlobal::qtCTVersion() & 0xFFFF00))
{
QString msg =
QApplication::tr ("Executable <b>%1</b> requires Qt %2.x, found Qt %3.")
.arg (qAppName())
.arg (VBoxGlobal::qtCTVersionString().section ('.', 0, 1))
.arg (VBoxGlobal::qtRTVersionString());
QMessageBox::critical (
0, QApplication::tr ("Incompatible Qt Library Error"),
msg, QMessageBox::Abort, 0);
qFatal ("%s", msg.toAscii().constData());
}
#endif
/* load a translation based on the current locale */
VBoxGlobal::loadLanguage();
do
{
if (!vboxGlobal().isValid())
break;
if (vboxGlobal().processArgs())
return 0;
msgCenter().checkForMountedWrongUSB();
VBoxGlobalSettings settings = vboxGlobal().settings();
/* Process known keys */
bool noSelector = settings.isFeatureActive ("noSelector");
if (vboxGlobal().isVMConsoleProcess())
{
#ifdef VBOX_GUI_WITH_SYSTRAY
if (vboxGlobal().trayIconInstall())
{
/* Nothing to do here yet. */
}
#endif
if (vboxGlobal().startMachine (vboxGlobal().managedVMUuid()))
{
vboxGlobal().setMainWindow (vboxGlobal().vmWindow());
rc = a.exec();
}
}
else if (noSelector)
{
msgCenter().cannotRunInSelectorMode();
}
else
{
#ifdef VBOX_BLEEDING_EDGE
msgCenter().showBEBWarning();
#else
# ifndef DEBUG
/* Check for BETA version */
QString vboxVersion (vboxGlobal().virtualBox().GetVersion());
if (vboxVersion.contains ("BETA"))
{
/* Allow to prevent this message */
QString str = vboxGlobal().virtualBox().
GetExtraData(GUI_PreventBetaWarning);
if (str != vboxVersion)
msgCenter().showBETAWarning();
}
# endif
#endif
vboxGlobal().setMainWindow (&vboxGlobal().selectorWnd());
#ifdef VBOX_GUI_WITH_SYSTRAY
if (vboxGlobal().trayIconInstall())
{
/* Nothing to do here yet. */
}
if (false == vboxGlobal().isTrayMenu())
{
#endif
vboxGlobal().selectorWnd().show();
#ifdef VBOX_WITH_REGISTRATION_REQUEST
vboxGlobal().showRegistrationDialog (false /* aForce */);
#endif
#ifdef VBOX_GUI_WITH_SYSTRAY
}
do
{
#endif
rc = a.exec();
#ifdef VBOX_GUI_WITH_SYSTRAY
} while (vboxGlobal().isTrayMenu());
#endif
}
}
while (0);
}
LogFlowFunc (("rc=%d\n", rc));
LogFlowFuncLeave();
return rc;
}
#ifndef VBOX_WITH_HARDENING
int main (int argc, char **argv, char **envp)
{
/* Initialize VBox Runtime. Initialize the SUPLib as well only if we
* are really about to start a VM. Don't do this if we are only starting
* the selector window. */
bool fInitSUPLib = false;
for (int i = 1; i < argc; i++)
{
/* NOTE: the check here must match the corresponding check for the
* options to start a VM in hardenedmain.cpp and VBoxGlobal.cpp exactly,
* otherwise there will be weird error messages. */
if ( !::strcmp(argv[i], "--startvm")
|| !::strcmp(argv[i], "-startvm"))
{
fInitSUPLib = true;
break;
}
}
int rc = RTR3InitExe(argc, &argv, fInitSUPLib ? RTR3INIT_FLAGS_SUPLIB : 0);
if (RT_FAILURE(rc))
{
QApplication a (argc, &argv[0]);
#ifdef Q_OS_SOLARIS
/* Use plastique look 'n feel for Solaris instead of the default motif (Qt 4.7.x) */
QApplication::setStyle (new QPlastiqueStyle);
#endif
QString msgTitle = QApplication::tr ("VirtualBox - Runtime Error");
QString msgText = "<html>";
switch (rc)
{
case VERR_VM_DRIVER_NOT_INSTALLED:
case VERR_VM_DRIVER_LOAD_ERROR:
msgText += QApplication::tr (
"<b>Cannot access the kernel driver!</b><br/><br/>");
# ifdef RT_OS_LINUX
msgText += g_QStrHintLinuxNoDriver;
# else
msgText += g_QStrHintOtherNoDriver;
# endif
break;
# ifdef RT_OS_LINUX
case VERR_NO_MEMORY:
msgText += g_QStrHintLinuxNoMemory;
break;
# endif
case VERR_VM_DRIVER_NOT_ACCESSIBLE:
msgText += QApplication::tr ("Kernel driver not accessible");
break;
case VERR_VM_DRIVER_VERSION_MISMATCH:
# ifdef RT_OS_LINUX
msgText += g_QStrHintLinuxWrongDriverVersion;
# else
msgText += g_QStrHintOtherWrongDriverVersion;
# endif
break;
default:
msgText += QApplication::tr (
"Unknown error %2 during initialization of the Runtime"
).arg (rc);
break;
}
msgText += "</html>";
QMessageBox::critical (
0, /* parent */
msgTitle,
msgText,
QMessageBox::Abort, /* button0 */
0); /* button1 */
return 1;
}
return TrustedMain (argc, argv, envp);
}
#else /* VBOX_WITH_HARDENING */
/**
* Hardened main failed, report the error without any unnecessary fuzz.
*
* @remarks Do not call IPRT here unless really required, it might not be
* initialized.
*/
extern "C" DECLEXPORT(void) TrustedError (const char *pszWhere, SUPINITOP enmWhat, int rc, const char *pszMsgFmt, va_list va)
{
# if defined(RT_OS_DARWIN)
ShutUpAppKit();
# endif
/*
* Init the Qt application object. This is a bit hackish as we
* don't have the argument vector handy.
*/
int argc = 0;
char *argv[2] = { NULL, NULL };
QApplication a (argc, &argv[0]);
/*
* Compose and show the error message.
*/
QString msgTitle = QApplication::tr ("VirtualBox - Error In %1").arg (pszWhere);
char msgBuf[1024];
vsprintf (msgBuf, pszMsgFmt, va);
QString msgText = QApplication::tr (
"<html><b>%1 (rc=%2)</b><br/><br/>").arg (msgBuf).arg (rc);
switch (enmWhat)
{
case kSupInitOp_Driver:
# ifdef RT_OS_LINUX
msgText += g_QStrHintLinuxNoDriver;
# else
msgText += g_QStrHintOtherNoDriver;
# endif
break;
# ifdef RT_OS_LINUX
case kSupInitOp_IPRT:
if (rc == VERR_NO_MEMORY)
msgText += g_QStrHintLinuxNoMemory;
else
# endif
if (rc == VERR_VM_DRIVER_VERSION_MISMATCH)
# ifdef RT_OS_LINUX
msgText += g_QStrHintLinuxWrongDriverVersion;
# else
msgText += g_QStrHintOtherWrongDriverVersion;
# endif
else
msgText += g_QStrHintReinstall;
break;
case kSupInitOp_Integrity:
case kSupInitOp_RootCheck:
msgText += g_QStrHintReinstall;
break;
default:
/* no hints here */
break;
}
msgText += "</html>";
# ifdef RT_OS_LINUX
sleep(2);
# endif
QMessageBox::critical (
0, /* parent */
msgTitle, /* title */
msgText, /* text */
QMessageBox::Abort, /* button0 */
0); /* button1 */
qFatal ("%s", msgText.toAscii().constData());
}
#endif /* VBOX_WITH_HARDENING */<|fim▁end|>
|
LogFlowFunc (("Font: Substitute family is '%s'. It is %s.\n",
sub.toLatin1().constData(),
|
<|file_name|>intra_mixed_exchange.cpp<|end_file_name|><|fim▁begin|>/*
This file is part of VROOM.
Copyright (c) 2015-2021, Julien Coupey.
All rights reserved (see LICENSE).
*/
#include "problems/vrptw/operators/intra_mixed_exchange.h"
namespace vroom {
namespace vrptw {
IntraMixedExchange::IntraMixedExchange(const Input& input,
const utils::SolutionState& sol_state,
TWRoute& tw_s_route,
Index s_vehicle,
Index s_rank,
Index t_rank,
bool check_t_reverse)
: cvrp::IntraMixedExchange(input,
sol_state,
static_cast<RawRoute&>(tw_s_route),
s_vehicle,
s_rank,
t_rank,
check_t_reverse),
_tw_s_route(tw_s_route) {
}<|fim▁hole|>
bool IntraMixedExchange::is_valid() {
bool valid = cvrp::IntraMixedExchange::is_valid();
if (valid) {
s_is_normal_valid =
s_is_normal_valid &&
_tw_s_route.is_valid_addition_for_tw(_input,
_moved_jobs.begin(),
_moved_jobs.end(),
_first_rank,
_last_rank);
if (check_t_reverse) {
std::swap(_moved_jobs[_t_edge_first], _moved_jobs[_t_edge_last]);
s_is_reverse_valid =
s_is_reverse_valid &&
_tw_s_route.is_valid_addition_for_tw(_input,
_moved_jobs.begin(),
_moved_jobs.end(),
_first_rank,
_last_rank);
// Reset to initial situation before potential application.
std::swap(_moved_jobs[_t_edge_first], _moved_jobs[_t_edge_last]);
}
valid = s_is_normal_valid or s_is_reverse_valid;
}
return valid;
}
void IntraMixedExchange::apply() {
assert(!reverse_t_edge or
(_input.jobs[t_route[t_rank]].type == JOB_TYPE::SINGLE and
_input.jobs[t_route[t_rank + 1]].type == JOB_TYPE::SINGLE));
if (reverse_t_edge) {
std::swap(_moved_jobs[_t_edge_first], _moved_jobs[_t_edge_last]);
}
_tw_s_route.replace(_input,
_moved_jobs.begin(),
_moved_jobs.end(),
_first_rank,
_last_rank);
}
std::vector<Index> IntraMixedExchange::addition_candidates() const {
return {s_vehicle};
}
} // namespace vrptw
} // namespace vroom<|fim▁end|>
| |
<|file_name|>base.py<|end_file_name|><|fim▁begin|>from __future__ import absolute_import, unicode_literals
from django.db.models.lookups import Lookup
from django.db.models.query import QuerySet
from django.db.models.sql.where import SubqueryConstraint, WhereNode
from django.utils.six import text_type
class FilterError(Exception):
pass
class FieldError(Exception):
pass
class BaseSearchQuery(object):
DEFAULT_OPERATOR = 'or'
def __init__(self, queryset, query_string, fields=None, operator=None, order_by_relevance=True):
self.queryset = queryset
self.query_string = query_string
self.fields = fields
self.operator = operator or self.DEFAULT_OPERATOR
self.order_by_relevance = order_by_relevance
def _get_filterable_field(self, field_attname):
# Get field
field = dict(
(field.get_attname(self.queryset.model), field)
for field in self.queryset.model.get_filterable_search_fields()
).get(field_attname, None)
return field
def _process_lookup(self, field, lookup, value):
raise NotImplementedError
def _connect_filters(self, filters, connector, negated):
raise NotImplementedError
def _process_filter(self, field_attname, lookup, value):
# Get the field
field = self._get_filterable_field(field_attname)
if field is None:
raise FieldError(
'Cannot filter search results with field "' + field_attname + '". Please add index.FilterField(\'' +
field_attname + '\') to ' + self.queryset.model.__name__ + '.search_fields.'
)
# Process the lookup
result = self._process_lookup(field, lookup, value)
if result is None:
raise FilterError(
'Could not apply filter on search results: "' + field_attname + '__' +
lookup + ' = ' + text_type(value) + '". Lookup "' + lookup + '"" not recognised.'
)
return result
def _get_filters_from_where_node(self, where_node):
# Check if this is a leaf node
if isinstance(where_node, Lookup):
field_attname = where_node.lhs.target.attname
lookup = where_node.lookup_name
value = where_node.rhs
# Ignore pointer fields that show up in specific page type queries
if field_attname.endswith('_ptr_id'):
return
# Process the filter
return self._process_filter(field_attname, lookup, value)
elif isinstance(where_node, SubqueryConstraint):
raise FilterError('Could not apply filter on search results: Subqueries are not allowed.')
elif isinstance(where_node, WhereNode):
# Get child filters
connector = where_node.connector
child_filters = [self._get_filters_from_where_node(child) for child in where_node.children]
child_filters = [child_filter for child_filter in child_filters if child_filter]
return self._connect_filters(child_filters, connector, where_node.negated)
else:
raise FilterError('Could not apply filter on search results: Unknown where node: ' + str(type(where_node)))
def _get_filters_from_queryset(self):
return self._get_filters_from_where_node(self.queryset.query.where)
class BaseSearchResults(object):
def __init__(self, backend, query, prefetch_related=None):
self.backend = backend
self.query = query
self.prefetch_related = prefetch_related
self.start = 0
self.stop = None
self._results_cache = None
self._count_cache = None
self._score_field = None
def _set_limits(self, start=None, stop=None):
if stop is not None:
if self.stop is not None:
self.stop = min(self.stop, self.start + stop)
else:
self.stop = self.start + stop
if start is not None:
if self.stop is not None:
self.start = min(self.stop, self.start + start)
else:
self.start = self.start + start
def _clone(self):
klass = self.__class__
new = klass(self.backend, self.query, prefetch_related=self.prefetch_related)
new.start = self.start
new.stop = self.stop
return new
def _do_search(self):
raise NotImplementedError
def _do_count(self):
raise NotImplementedError
def results(self):
if self._results_cache is None:
self._results_cache = self._do_search()
return self._results_cache
def count(self):
if self._count_cache is None:
if self._results_cache is not None:
self._count_cache = len(self._results_cache)
else:
self._count_cache = self._do_count()
return self._count_cache
def __getitem__(self, key):
new = self._clone()
if isinstance(key, slice):
# Set limits
start = int(key.start) if key.start else None
stop = int(key.stop) if key.stop else None
new._set_limits(start, stop)
# Copy results cache
if self._results_cache is not None:
new._results_cache = self._results_cache[key]
return new
else:
if self._results_cache is not None:
return self._results_cache[key]
new.start = self.start + key
new.stop = self.start + key + 1
return list(new)[0]
def __iter__(self):
return iter(self.results())
def __len__(self):
return len(self.results())
def __repr__(self):
data = list(self[:21])
if len(data) > 20:
data[-1] = "...(remaining elements truncated)..."
return '<SearchResults %r>' % data
def annotate_score(self, field_name):
clone = self._clone()
clone._score_field = field_name
return clone
class BaseSearchBackend(object):
query_class = None
results_class = None
rebuilder_class = None
def __init__(self, params):
pass
def get_index_for_model(self, model):
return None<|fim▁hole|>
def get_rebuilder(self):
return None
def reset_index(self):
raise NotImplementedError
def add_type(self, model):
raise NotImplementedError
def refresh_index(self):
raise NotImplementedError
def add(self, obj):
raise NotImplementedError
def add_bulk(self, model, obj_list):
raise NotImplementedError
def delete(self, obj):
raise NotImplementedError
def search(self, query_string, model_or_queryset, fields=None, filters=None,
prefetch_related=None, operator=None, order_by_relevance=True):
# Find model/queryset
if isinstance(model_or_queryset, QuerySet):
model = model_or_queryset.model
queryset = model_or_queryset
else:
model = model_or_queryset
queryset = model_or_queryset.objects.all()
# # Model must be a class that is in the index
# if not class_is_indexed(model):
# return []
# Check that theres still a query string after the clean up
if query_string == "":
return []
# Only fields that are indexed as a SearchField can be passed in fields
if fields:
allowed_fields = {field.field_name for field in model.get_searchable_search_fields()}
for field_name in fields:
if field_name not in allowed_fields:
raise FieldError(
'Cannot search with field "' + field_name + '". Please add index.SearchField(\'' +
field_name + '\') to ' + model.__name__ + '.search_fields.'
)
# Apply filters to queryset
if filters:
queryset = queryset.filter(**filters)
# Prefetch related
if prefetch_related:
for prefetch in prefetch_related:
queryset = queryset.prefetch_related(prefetch)
# Check operator
if operator is not None:
operator = operator.lower()
if operator not in ['or', 'and']:
raise ValueError("operator must be either 'or' or 'and'")
# Search
search_query = self.query_class(
queryset, query_string, fields=fields, operator=operator, order_by_relevance=order_by_relevance
)
return self.results_class(self, search_query)<|fim▁end|>
| |
<|file_name|>ChromeTraceBuildListenerTest.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2013-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.event.listener;
import static org.hamcrest.Matchers.notNullValue;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.junit.Assume.assumeTrue;
import com.facebook.buck.artifact_cache.ArtifactCacheConnectEvent;
import com.facebook.buck.artifact_cache.CacheResult;
import com.facebook.buck.artifact_cache.HttpArtifactCacheEvent;
import com.facebook.buck.event.CommandEvent;
import com.facebook.buck.artifact_cache.HttpArtifactCacheEventFetchData;
import com.facebook.buck.event.ArtifactCompressionEvent;
import com.facebook.buck.event.BuckEventBus;
import com.facebook.buck.event.BuckEventBusFactory;
import com.facebook.buck.event.ChromeTraceEvent;
import com.facebook.buck.event.CompilerPluginDurationEvent;
import com.facebook.buck.event.PerfEventId;
import com.facebook.buck.event.SimplePerfEvent;
import com.facebook.buck.io.ProjectFilesystem;
import com.facebook.buck.jvm.java.AnnotationProcessingEvent;
import com.facebook.buck.jvm.java.tracing.JavacPhaseEvent;
import com.facebook.buck.log.InvocationInfo;
import com.facebook.buck.model.BuildId;
import com.facebook.buck.model.BuildTarget;
import com.facebook.buck.model.BuildTargetFactory;
import com.facebook.buck.rules.BuildEvent;
import com.facebook.buck.rules.BuildRuleEvent;
import com.facebook.buck.rules.BuildRuleKeys;
import com.facebook.buck.rules.BuildRuleResolver;
import com.facebook.buck.rules.BuildRuleStatus;
import com.facebook.buck.rules.BuildRuleSuccessType;
import com.facebook.buck.rules.DefaultTargetNodeToBuildRuleTransformer;
import com.facebook.buck.rules.FakeBuildRule;
import com.facebook.buck.rules.RuleKey;
import com.facebook.buck.rules.SourcePathResolver;
import com.facebook.buck.rules.TargetGraph;
import com.facebook.buck.step.StepEvent;
import com.facebook.buck.timing.Clock;
import com.facebook.buck.timing.FakeClock;
import com.facebook.buck.timing.IncrementingFakeClock;
import com.facebook.buck.util.HumanReadableException;
import com.facebook.buck.util.ObjectMappers;
import com.facebook.buck.util.perf.PerfStatsTracking;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.collect.FluentIterable;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ImmutableSortedSet;
import com.google.common.collect.Iterables;
import com.google.gson.Gson;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import java.io.BufferedReader;
import java.io.File;
import java.io.IOException;
import java.io.InputStreamReader;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Locale;
import java.util.Optional;
import java.util.TimeZone;
import java.util.UUID;
import java.util.concurrent.TimeUnit;
import java.util.zip.GZIPInputStream;
public class ChromeTraceBuildListenerTest {
private static final long TIMESTAMP_NANOS = 1409702151000000000L;
private static final String EXPECTED_DIR =
"buck-out/log/2014-09-02_23h55m51s_no_sub_command_BUILD_ID/";
@Rule
public TemporaryFolder tmpDir = new TemporaryFolder();
private InvocationInfo invocationInfo;
@Before
public void setUp() throws IOException {
invocationInfo = InvocationInfo.builder()
.setTimestampMillis(TimeUnit.NANOSECONDS.toMillis(TIMESTAMP_NANOS))
.setBuckLogDir(tmpDir.getRoot().toPath().resolve("buck-out/log"))
.setBuildId(new BuildId("BUILD_ID"))
.setSubCommand("no_sub_command")
.setIsDaemon(false)
.setSuperConsoleEnabled(false)
.build();
}
@Test
public void testDeleteFiles() throws IOException {
ProjectFilesystem projectFilesystem = new ProjectFilesystem(tmpDir.getRoot().toPath());
String tracePath = invocationInfo.getLogDirectoryPath().resolve("build.trace").toString();
File traceFile = new File(tracePath);
projectFilesystem.createParentDirs(tracePath);
traceFile.createNewFile();
traceFile.setLastModified(0);
for (int i = 0; i < 10; ++i) {
File oldResult = new File(
String.format("%s/build.100%d.trace", invocationInfo.getLogDirectoryPath(), i));
oldResult.createNewFile();
oldResult.setLastModified(TimeUnit.SECONDS.toMillis(i));
}
ChromeTraceBuildListener listener = new ChromeTraceBuildListener(
projectFilesystem,
invocationInfo,
new FakeClock(TIMESTAMP_NANOS),
ObjectMappers.newDefaultInstance(),
Locale.US,
TimeZone.getTimeZone("America/Los_Angeles"),
/* tracesToKeep */ 3,
false);
listener.outputTrace(invocationInfo.getBuildId());
ImmutableList<String> files = FluentIterable.
from(Arrays.asList(projectFilesystem.listFiles(invocationInfo.getLogDirectoryPath()))).
filter(input -> input.toString().endsWith(".trace")).
transform(File::getName).
toList();
assertEquals(4, files.size());
assertEquals(
ImmutableSortedSet.of(
"build.trace",
"build.1009.trace",
"build.1008.trace",
"build.2014-09-02.16-55-51.BUILD_ID.trace"),
ImmutableSortedSet.copyOf(files));
}
@Test
public void testBuildJson() throws IOException {
ProjectFilesystem projectFilesystem = new ProjectFilesystem(tmpDir.getRoot().toPath());
ObjectMapper mapper = ObjectMappers.newDefaultInstance();
BuildId buildId = new BuildId("ChromeTraceBuildListenerTestBuildId");
ChromeTraceBuildListener listener = new ChromeTraceBuildListener(
projectFilesystem,
invocationInfo,
new FakeClock(TIMESTAMP_NANOS),
mapper,
Locale.US,
TimeZone.getTimeZone("America/Los_Angeles"),
/* tracesToKeep */ 42,
false);
BuildTarget target = BuildTargetFactory.newInstance("//fake:rule");
FakeBuildRule rule = new FakeBuildRule(
target,
new SourcePathResolver(
new BuildRuleResolver(
TargetGraph.EMPTY,
new DefaultTargetNodeToBuildRuleTransformer())
),
ImmutableSortedSet.of());
RuleKey ruleKey = new RuleKey("abc123");
String stepShortName = "fakeStep";
String stepDescription = "I'm a Fake Step!";
UUID stepUuid = UUID.randomUUID();
ImmutableSet<BuildTarget> buildTargets = ImmutableSet.of(target);
Iterable<String> buildArgs = Iterables.transform(buildTargets, Object::toString);
Clock fakeClock = new IncrementingFakeClock(TimeUnit.MILLISECONDS.toNanos(1));
BuckEventBus eventBus = BuckEventBusFactory.newInstance(fakeClock, buildId);
eventBus.register(listener);
CommandEvent.Started commandEventStarted = CommandEvent.started(
"party",
ImmutableList.of("arg1", "arg2"),
/* isDaemon */ true);
eventBus.post(commandEventStarted);
eventBus.post(new PerfStatsTracking.MemoryPerfStatsEvent(
/* freeMemoryBytes */ 1024 * 1024L,
/* totalMemoryBytes */ 3 * 1024 * 1024L,
/* timeSpentInGcMs */ -1,
/* currentMemoryBytesUsageByPool */ ImmutableMap.of("flower", 42L * 1024 * 1024)));
ArtifactCacheConnectEvent.Started artifactCacheConnectEventStarted =
ArtifactCacheConnectEvent.started();
eventBus.post(artifactCacheConnectEventStarted);
eventBus.post(ArtifactCacheConnectEvent.finished(artifactCacheConnectEventStarted));
BuildEvent.Started buildEventStarted = BuildEvent.started(buildArgs);
eventBus.post(buildEventStarted);
<|fim▁hole|> HttpArtifactCacheEvent.newFetchStartedEvent(ruleKey);
eventBus.post(artifactCacheEventStarted);
eventBus.post(
HttpArtifactCacheEvent.newFinishedEventBuilder(artifactCacheEventStarted)
.setFetchDataBuilder(
HttpArtifactCacheEventFetchData.builder()
.setFetchResult(CacheResult.hit("http")))
.build());
ArtifactCompressionEvent.Started artifactCompressionStartedEvent =
ArtifactCompressionEvent.started(
ArtifactCompressionEvent.Operation.COMPRESS, ImmutableSet.of(ruleKey));
eventBus.post(artifactCompressionStartedEvent);
eventBus.post(ArtifactCompressionEvent.finished(artifactCompressionStartedEvent));
eventBus.post(BuildRuleEvent.started(rule));
eventBus.post(StepEvent.started(stepShortName, stepDescription, stepUuid));
JavacPhaseEvent.Started runProcessorsStartedEvent = JavacPhaseEvent.started(
target,
JavacPhaseEvent.Phase.RUN_ANNOTATION_PROCESSORS,
ImmutableMap.of());
eventBus.post(runProcessorsStartedEvent);
String annotationProcessorName = "com.facebook.FakeProcessor";
AnnotationProcessingEvent.Operation operation = AnnotationProcessingEvent.Operation.PROCESS;
int annotationRound = 1;
boolean isLastRound = false;
AnnotationProcessingEvent.Started annotationProcessingEventStarted =
AnnotationProcessingEvent.started(
target,
annotationProcessorName,
operation,
annotationRound,
isLastRound);
eventBus.post(annotationProcessingEventStarted);
HttpArtifactCacheEvent.Scheduled httpScheduled = HttpArtifactCacheEvent.newStoreScheduledEvent(
Optional.of("TARGET_ONE"), ImmutableSet.of(ruleKey));
HttpArtifactCacheEvent.Started httpStarted =
HttpArtifactCacheEvent.newStoreStartedEvent(httpScheduled);
eventBus.post(httpStarted);
HttpArtifactCacheEvent.Finished httpFinished =
HttpArtifactCacheEvent.newFinishedEventBuilder(httpStarted).build();
eventBus.post(httpFinished);
final CompilerPluginDurationEvent.Started processingPartOneStarted =
CompilerPluginDurationEvent.started(
target,
annotationProcessorName,
"processingPartOne",
ImmutableMap.of());
eventBus.post(processingPartOneStarted);
eventBus.post(
CompilerPluginDurationEvent.finished(
processingPartOneStarted,
ImmutableMap.of()));
eventBus.post(AnnotationProcessingEvent.finished(annotationProcessingEventStarted));
eventBus.post(
JavacPhaseEvent.finished(runProcessorsStartedEvent, ImmutableMap.of()));
eventBus.post(StepEvent.finished(
StepEvent.started(stepShortName, stepDescription, stepUuid),
0));
eventBus.post(
BuildRuleEvent.finished(
rule,
BuildRuleKeys.of(ruleKey),
BuildRuleStatus.SUCCESS,
CacheResult.miss(),
Optional.of(BuildRuleSuccessType.BUILT_LOCALLY),
Optional.empty(),
Optional.empty(),
Optional.empty(),
Optional.empty()));
try (final SimplePerfEvent.Scope scope1 = SimplePerfEvent.scope(
eventBus,
PerfEventId.of("planning"),
ImmutableMap.<String, Object>of("nefarious", "true"))) {
try (final SimplePerfEvent.Scope scope2 = SimplePerfEvent.scope(
eventBus,
PerfEventId.of("scheming"))) {
scope2.appendFinishedInfo("success", "false");
}
}
eventBus.post(BuildEvent.finished(buildEventStarted, 0));
eventBus.post(CommandEvent.finished(commandEventStarted, /* exitCode */ 0));
listener.outputTrace(new BuildId("BUILD_ID"));
File resultFile = new File(tmpDir.getRoot(), "buck-out/log/build.trace");
List<ChromeTraceEvent> originalResultList = mapper.readValue(
resultFile,
new TypeReference<List<ChromeTraceEvent>>() {});
List<ChromeTraceEvent> resultListCopy = new ArrayList<>();
resultListCopy.addAll(originalResultList);
ImmutableMap<String, String> emptyArgs = ImmutableMap.of();
assertNextResult(
resultListCopy,
"process_name",
ChromeTraceEvent.Phase.METADATA,
ImmutableMap.of("name", "buck"));
assertNextResult(
resultListCopy,
"party",
ChromeTraceEvent.Phase.BEGIN,
ImmutableMap.of("command_args", "arg1 arg2"));
assertNextResult(
resultListCopy,
"memory",
ChromeTraceEvent.Phase.COUNTER,
ImmutableMap.of(
"used_memory_mb", "2",
"free_memory_mb", "1",
"total_memory_mb", "3",
"time_spent_in_gc_sec", "0",
"pool_flower_mb", "42"));
assertNextResult(
resultListCopy,
"artifact_connect",
ChromeTraceEvent.Phase.BEGIN,
emptyArgs);
assertNextResult(
resultListCopy,
"artifact_connect",
ChromeTraceEvent.Phase.END,
emptyArgs);
assertNextResult(
resultListCopy,
"build",
ChromeTraceEvent.Phase.BEGIN,
emptyArgs);
assertNextResult(
resultListCopy,
"http_artifact_fetch",
ChromeTraceEvent.Phase.BEGIN,
ImmutableMap.of("rule_key", "abc123"));
assertNextResult(
resultListCopy,
"http_artifact_fetch",
ChromeTraceEvent.Phase.END,
ImmutableMap.of(
"rule_key", "abc123",
"success", "true",
"cache_result", "HTTP_HIT"));
assertNextResult(
resultListCopy,
"artifact_compress",
ChromeTraceEvent.Phase.BEGIN,
ImmutableMap.of("rule_key", "abc123"));
assertNextResult(
resultListCopy,
"artifact_compress",
ChromeTraceEvent.Phase.END,
ImmutableMap.of("rule_key", "abc123"));
// BuildRuleEvent.Started
assertNextResult(
resultListCopy,
"//fake:rule",
ChromeTraceEvent.Phase.BEGIN,
ImmutableMap.of());
assertNextResult(
resultListCopy,
"fakeStep",
ChromeTraceEvent.Phase.BEGIN,
emptyArgs);
assertNextResult(
resultListCopy,
"run annotation processors",
ChromeTraceEvent.Phase.BEGIN,
emptyArgs);
assertNextResult(
resultListCopy,
"com.facebook.FakeProcessor.process",
ChromeTraceEvent.Phase.BEGIN,
emptyArgs);
assertNextResult(
resultListCopy,
"http_artifact_store",
ChromeTraceEvent.Phase.BEGIN,
ImmutableMap.of(
"rule_key", "abc123"));
assertNextResult(
resultListCopy,
"http_artifact_store",
ChromeTraceEvent.Phase.END,
ImmutableMap.of(
"success", "true",
"rule_key", "abc123"));
assertNextResult(
resultListCopy,
"processingPartOne",
ChromeTraceEvent.Phase.BEGIN,
emptyArgs);
assertNextResult(
resultListCopy,
"processingPartOne",
ChromeTraceEvent.Phase.END,
emptyArgs);
assertNextResult(
resultListCopy,
"com.facebook.FakeProcessor.process",
ChromeTraceEvent.Phase.END,
emptyArgs);
assertNextResult(
resultListCopy,
"run annotation processors",
ChromeTraceEvent.Phase.END,
emptyArgs);
assertNextResult(
resultListCopy,
"fakeStep",
ChromeTraceEvent.Phase.END,
ImmutableMap.of(
"description", "I'm a Fake Step!",
"exit_code", "0"));
assertNextResult(
resultListCopy,
"//fake:rule",
ChromeTraceEvent.Phase.END,
ImmutableMap.of(
"cache_result", "miss",
"success_type", "BUILT_LOCALLY"));
assertNextResult(
resultListCopy,
"planning",
ChromeTraceEvent.Phase.BEGIN,
ImmutableMap.of("nefarious", "true"));
assertNextResult(
resultListCopy,
"scheming",
ChromeTraceEvent.Phase.BEGIN,
emptyArgs);
assertNextResult(
resultListCopy,
"scheming",
ChromeTraceEvent.Phase.END,
ImmutableMap.of("success", "false"));
assertNextResult(
resultListCopy,
"planning",
ChromeTraceEvent.Phase.END,
emptyArgs);
assertNextResult(
resultListCopy,
"build",
ChromeTraceEvent.Phase.END,
emptyArgs);
assertNextResult(
resultListCopy,
"party",
ChromeTraceEvent.Phase.END,
ImmutableMap.of(
"command_args", "arg1 arg2",
"daemon", "true"));
assertEquals(0, resultListCopy.size());
}
private static void assertNextResult(
List<ChromeTraceEvent> resultList,
String expectedName,
ChromeTraceEvent.Phase expectedPhase,
ImmutableMap<String, String> expectedArgs) {
assertTrue(resultList.size() > 0);
assertEquals(expectedName, resultList.get(0).getName());
assertEquals(expectedPhase, resultList.get(0).getPhase());
assertEquals(expectedArgs, resultList.get(0).getArgs());
resultList.remove(0);
}
@Test
public void testOutputFailed() throws IOException {
ProjectFilesystem projectFilesystem = new ProjectFilesystem(tmpDir.getRoot().toPath());
assumeTrue("Can make the root directory read-only", tmpDir.getRoot().setReadOnly());
try {
ChromeTraceBuildListener listener = new ChromeTraceBuildListener(
projectFilesystem,
invocationInfo,
new FakeClock(TIMESTAMP_NANOS),
ObjectMappers.newDefaultInstance(),
Locale.US,
TimeZone.getTimeZone("America/Los_Angeles"),
/* tracesToKeep */ 3,
false);
listener.outputTrace(invocationInfo.getBuildId());
fail("Expected an exception.");
} catch (HumanReadableException e) {
assertEquals(
"Unable to write trace file: java.nio.file.AccessDeniedException: " +
projectFilesystem.resolve(projectFilesystem.getBuckPaths().getBuckOut()),
e.getMessage());
} finally {
tmpDir.getRoot().setWritable(true);
}
}
@Test
public void outputFileUsesCurrentTime() throws IOException {
ProjectFilesystem projectFilesystem = new ProjectFilesystem(tmpDir.getRoot().toPath());
ChromeTraceBuildListener listener = new ChromeTraceBuildListener(
projectFilesystem,
invocationInfo,
new FakeClock(TIMESTAMP_NANOS),
ObjectMappers.newDefaultInstance(),
Locale.US,
TimeZone.getTimeZone("America/Los_Angeles"),
/* tracesToKeep */ 1,
false);
listener.outputTrace(invocationInfo.getBuildId());
assertTrue(
projectFilesystem.exists(
Paths.get(EXPECTED_DIR + "build.2014-09-02.16-55-51.BUILD_ID.trace")));
}
@Test
public void canCompressTraces() throws IOException {
ProjectFilesystem projectFilesystem = new ProjectFilesystem(tmpDir.getRoot().toPath());
ChromeTraceBuildListener listener = new ChromeTraceBuildListener(
projectFilesystem,
invocationInfo,
new FakeClock(TIMESTAMP_NANOS),
ObjectMappers.newDefaultInstance(),
Locale.US,
TimeZone.getTimeZone("America/Los_Angeles"),
/* tracesToKeep */ 1,
true);
listener.outputTrace(invocationInfo.getBuildId());
Path tracePath = Paths.get(EXPECTED_DIR + "build.2014-09-02.16-55-51.BUILD_ID.trace.gz");
assertTrue(projectFilesystem.exists(tracePath));
BufferedReader reader = new BufferedReader(
new InputStreamReader(
new GZIPInputStream(projectFilesystem.newFileInputStream(tracePath))));
List<?> elements = new Gson().fromJson(reader, List.class);
assertThat(elements, notNullValue());
}
}<|fim▁end|>
|
HttpArtifactCacheEvent.Started artifactCacheEventStarted =
|
<|file_name|>HReconnectMatchElementsRHS.py<|end_file_name|><|fim▁begin|>from core.himesis import Himesis, HimesisPostConditionPattern
import cPickle as pickle
from uuid import UUID
class HReconnectMatchElementsRHS(HimesisPostConditionPattern):
def __init__(self):
"""
Creates the himesis graph representing the AToM3 model HReconnectMatchElementsRHS.
"""
# Flag this instance as compiled now
self.is_compiled = True
super(HReconnectMatchElementsRHS, self).__init__(name='HReconnectMatchElementsRHS', num_nodes=3, edges=[])
# Add the edges
self.add_edges([(2, 0), (0, 1)])
# Set the graph attributes
self["mm__"] = pickle.loads("""(lp1
S'MT_post__GM2AUTOSAR_MM'
p2
aS'MoTifRule'
p3
a.""")
self["MT_action__"] = """#===============================================================================
# This code is executed after the rule has been applied.
# You can access a node labelled n matched by this rule by: PostNode('n').
# To access attribute x of node n, use: PostNode('n')['x'].
#===============================================================================
pass
"""
self["name"] = """"""
self["GUID__"] = UUID('ce9c5429-6e4c-4782-a83a-17e240381cb6')
# Set the node attributes
self.vs[0]["mm__"] = """MT_post__match_contains"""
self.vs[0]["MT_label__"] = """3"""
self.vs[0]["GUID__"] = UUID('789662d8-ab7d-4640-a710-abbc847de320')
self.vs[1]["mm__"] = """MT_post__MetaModelElement_S"""
self.vs[1]["MT_label__"] = """2"""
self.vs[1]["MT_post__classtype"] = """
#===============================================================================
# You can access the value of the current node's attribute value by: attr_value.
# If the current node shall be created you MUST initialize it here!
# You can access a node labelled n by: PreNode('n').
# To access attribute x of node n, use: PreNode('n')['x'].
# Note that the attribute values are those before the match is rewritten.
# The order in which this code is executed depends on the label value
# of the encapsulating node.
# The given action must return the new value of the attribute.
#===============================================================================
return attr_value
"""
self.vs[1]["MT_post__name"] = """
#===============================================================================
# You can access the value of the current node's attribute value by: attr_value.
# If the current node shall be created you MUST initialize it here!
# You can access a node labelled n by: PreNode('n').
# To access attribute x of node n, use: PreNode('n')['x'].
# Note that the attribute values are those before the match is rewritten.
# The order in which this code is executed depends on the label value
# of the encapsulating node.
# The given action must return the new value of the attribute.
#===============================================================================
return attr_value
"""
self.vs[1]["GUID__"] = UUID('7e5e306f-cb65-40df-9e60-63b9fe83b79b')
self.vs[2]["mm__"] = """MT_post__MatchModel"""
self.vs[2]["MT_label__"] = """1"""
self.vs[2]["GUID__"] = UUID('3c85bf70-be4a-40d8-9bcb-c138195ad20e')
from HReconnectMatchElementsLHS import HReconnectMatchElementsLHS
self.pre = HReconnectMatchElementsLHS()
def action(self, PostNode, graph):
"""<|fim▁hole|> """
#===============================================================================
# This code is executed after the rule has been applied.
# You can access a node labelled n matched by this rule by: PostNode('n').
# To access attribute x of node n, use: PostNode('n')['x'].
#===============================================================================
pass
def execute(self, packet, match):
"""
Transforms the current match of the packet according to the rule %s.
Pivots are also assigned, if any.
@param packet: The input packet.
@param match: The match to rewrite.
"""
graph = packet.graph
# Build a dictionary {label: node index} mapping each label of the pattern to a node in the graph to rewrite.
# Because of the uniqueness property of labels in a rule, we can store all LHS labels
# and subsequently add the labels corresponding to the nodes to be created.
labels = match.copy()
#===============================================================================
# Update attribute values
#===============================================================================
#===============================================================================
# Create new nodes
#===============================================================================
# match_contains3
new_node = graph.add_node()
labels['3'] = new_node
graph.vs[new_node][Himesis.Constants.META_MODEL] = 'match_contains'
#===============================================================================
# Create new edges
#===============================================================================
# MatchModel1 -> match_contains3
graph.add_edges([(labels['1'], labels['3'])])
# match_contains3 -> MetaModelElement_S2
graph.add_edges([(labels['3'], labels['2'])])
#===============================================================================
# Set the output pivots
#===============================================================================
#===============================================================================
# Perform the post-action
#===============================================================================
try:
self.action(lambda i: graph.vs[labels[i]], graph)
except Exception, e:
raise Exception('An error has occurred while applying the post-action', e)
#===============================================================================
# Finally, delete nodes (this will automatically delete the adjacent edges)
#===============================================================================<|fim▁end|>
|
Executable constraint code.
@param PostNode: Function taking an integer as parameter
and returns the node corresponding to that label.
|
<|file_name|>ReadRecInd.java<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2007 Esmertec AG.
* Copyright (C) 2007 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ws.com.google.android.mms.pdu;
import ws.com.google.android.mms.InvalidHeaderValueException;
public class ReadRecInd extends GenericPdu {
/**
* Constructor, used when composing a M-ReadRec.ind pdu.
*
* @param from the from value
* @param messageId the message ID value
* @param mmsVersion current viersion of mms
* @param readStatus the read status value
* @param to the to value
* @throws InvalidHeaderValueException if parameters are invalid.
* NullPointerException if messageId or to is null.
*/
public ReadRecInd(EncodedStringValue from,
byte[] messageId,
int mmsVersion,
int readStatus,
EncodedStringValue[] to) throws InvalidHeaderValueException {
super();
setMessageType(PduHeaders.MESSAGE_TYPE_READ_REC_IND);
setFrom(from);
setMessageId(messageId);
setMmsVersion(mmsVersion);
setTo(to);
setReadStatus(readStatus);
}
/**
* Constructor with given headers.
*
* @param headers Headers for this PDU.
*/
ReadRecInd(PduHeaders headers) {
super(headers);
}
/**
* Get Date value.
*
* @return the value
*/
public long getDate() {
return mPduHeaders.getLongInteger(PduHeaders.DATE);
}
/**
* Set Date value.
*
* @param value the value
*/
public void setDate(long value) {
mPduHeaders.setLongInteger(value, PduHeaders.DATE);
}
/**
* Get Message-ID value.
*
* @return the value
*/
public byte[] getMessageId() {
return mPduHeaders.getTextString(PduHeaders.MESSAGE_ID);
}
/**
* Set Message-ID value.
*
* @param value the value
* @throws NullPointerException if the value is null.
*/
public void setMessageId(byte[] value) {
mPduHeaders.setTextString(value, PduHeaders.MESSAGE_ID);
}
/**
* Get To value.
*
* @return the value
*/
public EncodedStringValue[] getTo() {
return mPduHeaders.getEncodedStringValues(PduHeaders.TO);
}
/**
* Set To value.
*
* @param value the value
* @throws NullPointerException if the value is null.<|fim▁hole|> }
/**
* Get X-MMS-Read-status value.
*
* @return the value
*/
public int getReadStatus() {
return mPduHeaders.getOctet(PduHeaders.READ_STATUS);
}
/**
* Set X-MMS-Read-status value.
*
* @param value the value
* @throws InvalidHeaderValueException if the value is invalid.
*/
public void setReadStatus(int value) throws InvalidHeaderValueException {
mPduHeaders.setOctet(value, PduHeaders.READ_STATUS);
}
/*
* Optional, not supported header fields:
*
* public byte[] getApplicId() {return null;}
* public void setApplicId(byte[] value) {}
*
* public byte[] getAuxApplicId() {return null;}
* public void getAuxApplicId(byte[] value) {}
*
* public byte[] getReplyApplicId() {return 0x00;}
* public void setReplyApplicId(byte[] value) {}
*/
}<|fim▁end|>
|
*/
public void setTo(EncodedStringValue[] value) {
mPduHeaders.setEncodedStringValues(value, PduHeaders.TO);
|
<|file_name|>decorators.py<|end_file_name|><|fim▁begin|># Author: Zhang Huangbin <[email protected]>
import web
session = web.config.get('_session')
def require_login(func):
def proxyfunc(self, *args, **kw):
if session.get('logged') is True:
return func(self, *args, **kw)
else:
session.kill()
raise web.seeother('/login?msg=loginRequired')
return proxyfunc<|fim▁hole|>def require_global_admin(func):
def proxyfunc(self, *args, **kw):
if session.get('domainGlobalAdmin') is True:
return func(self, *args, **kw)
else:
if session.get('logged'):
raise web.seeother('/domains?msg=PERMISSION_DENIED')
else:
raise web.seeother('/login?msg=PERMISSION_DENIED')
return proxyfunc
def csrf_protected(f):
def decorated(*args, **kw):
inp = web.input()
if not ('csrf_token' in inp and \
inp.csrf_token == session.pop('csrf_token', None)):
return web.render('error_csrf.html')
return f(*args, **kw)
return decorated<|fim▁end|>
| |
<|file_name|>media.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
eve.io.media
~~~~~~~~~~~~
Media storage for Eve-powered APIs.
:copyright: (c) 2014 by Nicola Iarocci.
:license: BSD, see LICENSE for more details.
"""
class MediaStorage(object):
""" The MediaStorage class provides a standardized API for storing files,
along with a set of default behaviors that all other storage systems can
inherit or override as necessary.
..versioneadded:: 0.3
"""
def __init__(self, app=None):
"""
:param app: the flask application (eve itself). This can be used by
the class to access, amongst other things, the app.config object to
retrieve class-specific settings.
"""
self.app = app
def get(self, id_or_filename):
""" Opens the file given by name or unique id. Note that although the
returned file is guaranteed to be a File object, it might actually be
some subclass. Returns None if no file was found.
"""
raise NotImplementedError
def put(self, content, filename=None, content_type=None):
""" Saves a new file using the storage system, preferably with the name<|fim▁hole|> specified. If there already exists a file with this name name, the
storage system may modify the filename as necessary to get a unique
name. Depending on the storage system, a unique id or the actual name
of the stored file will be returned. The content type argument is used
to appropriately identify the file when it is retrieved.
.. versionchanged:: 0.5
Allow filename to be optional (#414).
"""
raise NotImplementedError
def delete(self, id_or_filename):
""" Deletes the file referenced by name or unique id. If deletion is
not supported on the target storage system this will raise
NotImplementedError instead
"""
raise NotImplementedError
def exists(self, id_or_filename):
""" Returns True if a file referenced by the given name or unique id
already exists in the storage system, or False if the name is available
for a new file.
"""
raise NotImplementedError<|fim▁end|>
| |
<|file_name|>TestWarDeployment.java<|end_file_name|><|fim▁begin|>/**
* Copyright (C) 2011, 2012 camunda services GmbH
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.camunda.bpm.integrationtest.deployment.war;
import org.camunda.bpm.engine.RepositoryService;
import org.camunda.bpm.integrationtest.util.AbstractFoxPlatformIntegrationTest;
import org.jboss.arquillian.container.test.api.Deployment;
import org.jboss.arquillian.junit.Arquillian;
import org.jboss.shrinkwrap.api.spec.WebArchive;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
<|fim▁hole|>@RunWith(Arquillian.class)
public class TestWarDeployment extends AbstractFoxPlatformIntegrationTest {
@Deployment
public static WebArchive processArchive() {
return initWebArchiveDeployment()
.addAsResource("org/camunda/bpm/integrationtest/testDeployProcessArchive.bpmn20.xml");
}
@Test
public void testDeployProcessArchive() {
Assert.assertNotNull(processEngine);
RepositoryService repositoryService = processEngine.getRepositoryService();
long count = repositoryService.createProcessDefinitionQuery()
.processDefinitionKey("testDeployProcessArchive")
.count();
Assert.assertEquals(1, count);
}
}<|fim▁end|>
| |
<|file_name|>tokens_controller.go<|end_file_name|><|fim▁begin|>/*
Copyright 2014 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package serviceaccount
import (
"bytes"
"fmt"
"time"
"github.com/golang/glog"
apierrors "k8s.io/apimachinery/pkg/api/errors"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/fields"
"k8s.io/apimachinery/pkg/runtime"
"k8s.io/apimachinery/pkg/types"
utilerrors "k8s.io/apimachinery/pkg/util/errors"
utilruntime "k8s.io/apimachinery/pkg/util/runtime"
"k8s.io/apimachinery/pkg/util/sets"
"k8s.io/apimachinery/pkg/util/wait"
"k8s.io/apimachinery/pkg/watch"
"k8s.io/client-go/tools/cache"
"k8s.io/client-go/util/workqueue"
"k8s.io/kubernetes/pkg/api"
"k8s.io/kubernetes/pkg/api/v1"
"k8s.io/kubernetes/pkg/client/clientset_generated/clientset"
clientretry "k8s.io/kubernetes/pkg/client/retry"
"k8s.io/kubernetes/pkg/registry/core/secret"
"k8s.io/kubernetes/pkg/serviceaccount"
"k8s.io/kubernetes/pkg/util/metrics"
)
// RemoveTokenBackoff is the recommended (empirical) retry interval for removing
// a secret reference from a service account when the secret is deleted. It is
// exported for use by custom secret controllers.
var RemoveTokenBackoff = wait.Backoff{
Steps: 10,
Duration: 100 * time.Millisecond,
Jitter: 1.0,
}
// TokensControllerOptions contains options for the TokensController
type TokensControllerOptions struct {
// TokenGenerator is the generator to use to create new tokens
TokenGenerator serviceaccount.TokenGenerator
// ServiceAccountResync is the time.Duration at which to fully re-list service accounts.
// If zero, re-list will be delayed as long as possible
ServiceAccountResync time.Duration
// SecretResync is the time.Duration at which to fully re-list secrets.
// If zero, re-list will be delayed as long as possible
SecretResync time.Duration
// This CA will be added in the secrets of service accounts
RootCA []byte
// MaxRetries controls the maximum number of times a particular key is retried before giving up
// If zero, a default max is used
MaxRetries int
}
// NewTokensController returns a new *TokensController.
func NewTokensController(cl clientset.Interface, options TokensControllerOptions) *TokensController {
maxRetries := options.MaxRetries
if maxRetries == 0 {
maxRetries = 10
}
e := &TokensController{
client: cl,
token: options.TokenGenerator,
rootCA: options.RootCA,
syncServiceAccountQueue: workqueue.NewNamedRateLimitingQueue(workqueue.DefaultControllerRateLimiter(), "serviceaccount_tokens_service"),
syncSecretQueue: workqueue.NewNamedRateLimitingQueue(workqueue.DefaultControllerRateLimiter(), "serviceaccount_tokens_secret"),
maxRetries: maxRetries,
}
if cl != nil && cl.Core().RESTClient().GetRateLimiter() != nil {
metrics.RegisterMetricAndTrackRateLimiterUsage("serviceaccount_controller", cl.Core().RESTClient().GetRateLimiter())
}
e.serviceAccounts, e.serviceAccountController = cache.NewInformer(
&cache.ListWatch{
ListFunc: func(options metav1.ListOptions) (runtime.Object, error) {
return e.client.Core().ServiceAccounts(metav1.NamespaceAll).List(options)
},
WatchFunc: func(options metav1.ListOptions) (watch.Interface, error) {
return e.client.Core().ServiceAccounts(metav1.NamespaceAll).Watch(options)
},
},
&v1.ServiceAccount{},
options.ServiceAccountResync,
cache.ResourceEventHandlerFuncs{
AddFunc: e.queueServiceAccountSync,
UpdateFunc: e.queueServiceAccountUpdateSync,
DeleteFunc: e.queueServiceAccountSync,
},
)
tokenSelector := fields.SelectorFromSet(map[string]string{api.SecretTypeField: string(v1.SecretTypeServiceAccountToken)})
e.secrets, e.secretController = cache.NewIndexerInformer(
&cache.ListWatch{
ListFunc: func(options metav1.ListOptions) (runtime.Object, error) {
options.FieldSelector = tokenSelector.String()
return e.client.Core().Secrets(metav1.NamespaceAll).List(options)
},
WatchFunc: func(options metav1.ListOptions) (watch.Interface, error) {
options.FieldSelector = tokenSelector.String()
return e.client.Core().Secrets(metav1.NamespaceAll).Watch(options)
},
},
&v1.Secret{},
options.SecretResync,
cache.ResourceEventHandlerFuncs{
AddFunc: e.queueSecretSync,
UpdateFunc: e.queueSecretUpdateSync,
DeleteFunc: e.queueSecretSync,
},
cache.Indexers{"namespace": cache.MetaNamespaceIndexFunc},
)
return e
}
// TokensController manages ServiceAccountToken secrets for ServiceAccount objects
type TokensController struct {
client clientset.Interface
token serviceaccount.TokenGenerator
rootCA []byte
serviceAccounts cache.Store
secrets cache.Indexer
// Since we join two objects, we'll watch both of them with controllers.
serviceAccountController cache.Controller
secretController cache.Controller
// syncServiceAccountQueue handles service account events:
// * ensures a referenced token exists for service accounts which still exist
// * ensures tokens are removed for service accounts which no longer exist
// key is "<namespace>/<name>/<uid>"
syncServiceAccountQueue workqueue.RateLimitingInterface
// syncSecretQueue handles secret events:
// * deletes tokens whose service account no longer exists
// * updates tokens with missing token or namespace data, or mismatched ca data
// * ensures service account secret references are removed for tokens which are deleted
// key is a secretQueueKey{}
syncSecretQueue workqueue.RateLimitingInterface
maxRetries int
}
// Runs controller blocks until stopCh is closed
func (e *TokensController) Run(workers int, stopCh <-chan struct{}) {
defer utilruntime.HandleCrash()
// Start controllers (to fill stores, call informers, fill work queues)
go e.serviceAccountController.Run(stopCh)
go e.secretController.Run(stopCh)
// Wait for stores to fill
for !e.serviceAccountController.HasSynced() || !e.secretController.HasSynced() {
time.Sleep(100 * time.Millisecond)
}
// Spawn workers to process work queues
for i := 0; i < workers; i++ {
go wait.Until(e.syncServiceAccount, 0, stopCh)
go wait.Until(e.syncSecret, 0, stopCh)
}
// Block until stop channel is closed
<-stopCh
// Shut down queues
e.syncServiceAccountQueue.ShutDown()
e.syncSecretQueue.ShutDown()
}
func (e *TokensController) queueServiceAccountSync(obj interface{}) {
if serviceAccount, ok := obj.(*v1.ServiceAccount); ok {
e.syncServiceAccountQueue.Add(makeServiceAccountKey(serviceAccount))
}
}
func (e *TokensController) queueServiceAccountUpdateSync(oldObj interface{}, newObj interface{}) {
if serviceAccount, ok := newObj.(*v1.ServiceAccount); ok {
e.syncServiceAccountQueue.Add(makeServiceAccountKey(serviceAccount))
}
}
// complete optionally requeues key, then calls queue.Done(key)
func (e *TokensController) retryOrForget(queue workqueue.RateLimitingInterface, key interface{}, requeue bool) {
if !requeue {
queue.Forget(key)
return
}
requeueCount := queue.NumRequeues(key)
if requeueCount < e.maxRetries {
queue.AddRateLimited(key)
return
}
glog.V(4).Infof("retried %d times: %#v", requeueCount, key)
queue.Forget(key)
}
func (e *TokensController) queueSecretSync(obj interface{}) {
if secret, ok := obj.(*v1.Secret); ok {
e.syncSecretQueue.Add(makeSecretQueueKey(secret))
}
}
func (e *TokensController) queueSecretUpdateSync(oldObj interface{}, newObj interface{}) {
if secret, ok := newObj.(*v1.Secret); ok {
e.syncSecretQueue.Add(makeSecretQueueKey(secret))
}
}
func (e *TokensController) syncServiceAccount() {
key, quit := e.syncServiceAccountQueue.Get()
if quit {
return
}
defer e.syncServiceAccountQueue.Done(key)
retry := false
defer func() {
e.retryOrForget(e.syncServiceAccountQueue, key, retry)
}()
saInfo, err := parseServiceAccountKey(key)
if err != nil {
glog.Error(err)
return
}
sa, err := e.getServiceAccount(saInfo.namespace, saInfo.name, saInfo.uid, false)
switch {
case err != nil:
glog.Error(err)
retry = true
case sa == nil:
// service account no longer exists, so delete related tokens
glog.V(4).Infof("syncServiceAccount(%s/%s), service account deleted, removing tokens", saInfo.namespace, saInfo.name)
sa = &v1.ServiceAccount{ObjectMeta: metav1.ObjectMeta{Namespace: saInfo.namespace, Name: saInfo.name, UID: saInfo.uid}}
retry, err = e.deleteTokens(sa)
if err != nil {
glog.Errorf("error deleting serviceaccount tokens for %s/%s: %v", saInfo.namespace, saInfo.name, err)
}
default:
// ensure a token exists and is referenced by this service account
retry, err = e.ensureReferencedToken(sa)
if err != nil {
glog.Errorf("error synchronizing serviceaccount %s/%s: %v", saInfo.namespace, saInfo.name, err)
}
}
}
func (e *TokensController) syncSecret() {
key, quit := e.syncSecretQueue.Get()
if quit {
return
}
defer e.syncSecretQueue.Done(key)
// Track whether or not we should retry this sync
retry := false
defer func() {
e.retryOrForget(e.syncSecretQueue, key, retry)
}()
secretInfo, err := parseSecretQueueKey(key)
if err != nil {
glog.Error(err)
return
}
secret, err := e.getSecret(secretInfo.namespace, secretInfo.name, secretInfo.uid, false)
switch {
case err != nil:
glog.Error(err)
retry = true
case secret == nil:
// If the service account exists
if sa, saErr := e.getServiceAccount(secretInfo.namespace, secretInfo.saName, secretInfo.saUID, false); saErr == nil && sa != nil {
// secret no longer exists, so delete references to this secret from the service account
if err := clientretry.RetryOnConflict(RemoveTokenBackoff, func() error {
return e.removeSecretReference(secretInfo.namespace, secretInfo.saName, secretInfo.saUID, secretInfo.name)
}); err != nil {
glog.Error(err)
}
}
default:
// Ensure service account exists
sa, saErr := e.getServiceAccount(secretInfo.namespace, secretInfo.saName, secretInfo.saUID, true)
switch {
case saErr != nil:
glog.Error(saErr)
retry = true
case sa == nil:
// Delete token
glog.V(4).Infof("syncSecret(%s/%s), service account does not exist, deleting token", secretInfo.namespace, secretInfo.name)
if retriable, err := e.deleteToken(secretInfo.namespace, secretInfo.name, secretInfo.uid); err != nil {
glog.Errorf("error deleting serviceaccount token %s/%s for service account %s: %v", secretInfo.namespace, secretInfo.name, secretInfo.saName, err)
retry = retriable
}
default:
// Update token if needed
if retriable, err := e.generateTokenIfNeeded(sa, secret); err != nil {
glog.Errorf("error populating serviceaccount token %s/%s for service account %s: %v", secretInfo.namespace, secretInfo.name, secretInfo.saName, err)
retry = retriable
}
}
}
}
func (e *TokensController) deleteTokens(serviceAccount *v1.ServiceAccount) ( /*retry*/ bool, error) {
tokens, err := e.listTokenSecrets(serviceAccount)
if err != nil {
// don't retry on cache lookup errors
return false, err
}
retry := false
errs := []error{}
for _, token := range tokens {
r, err := e.deleteToken(token.Namespace, token.Name, token.UID)
if err != nil {
errs = append(errs, err)
}
if r {
retry = true
}
}
return retry, utilerrors.NewAggregate(errs)
}
func (e *TokensController) deleteToken(ns, name string, uid types.UID) ( /*retry*/ bool, error) {
var opts *metav1.DeleteOptions
if len(uid) > 0 {
opts = &metav1.DeleteOptions{Preconditions: &metav1.Preconditions{UID: &uid}}
}
err := e.client.Core().Secrets(ns).Delete(name, opts)
// NotFound doesn't need a retry (it's already been deleted)
// Conflict doesn't need a retry (the UID precondition failed)
if err == nil || apierrors.IsNotFound(err) || apierrors.IsConflict(err) {
return false, nil
}
// Retry for any other error
return true, err
}
// ensureReferencedToken makes sure at least one ServiceAccountToken secret exists, and is included in the serviceAccount's Secrets list
func (e *TokensController) ensureReferencedToken(serviceAccount *v1.ServiceAccount) ( /* retry */ bool, error) {
if hasToken, err := e.hasReferencedToken(serviceAccount); err != nil {
// Don't retry cache lookup errors
return false, err
} else if hasToken {
// A service account token already exists, and is referenced, short-circuit
return false, nil
}
// We don't want to update the cache's copy of the service account
// so add the secret to a freshly retrieved copy of the service account
serviceAccounts := e.client.Core().ServiceAccounts(serviceAccount.Namespace)
liveServiceAccount, err := serviceAccounts.Get(serviceAccount.Name, metav1.GetOptions{})
if err != nil {
// Retry if we cannot fetch the live service account (for a NotFound error, either the live lookup or our cache are stale)
return true, err
}
if liveServiceAccount.ResourceVersion != serviceAccount.ResourceVersion {
// Retry if our liveServiceAccount doesn't match our cache's resourceVersion (either the live lookup or our cache are stale)
glog.V(4).Infof("liveServiceAccount.ResourceVersion (%s) does not match cache (%s), retrying", liveServiceAccount.ResourceVersion, serviceAccount.ResourceVersion)
return true, nil
}
// Build the secret
secret := &v1.Secret{
ObjectMeta: metav1.ObjectMeta{
Name: secret.Strategy.GenerateName(fmt.Sprintf("%s-token-", serviceAccount.Name)),
Namespace: serviceAccount.Namespace,
Annotations: map[string]string{
v1.ServiceAccountNameKey: serviceAccount.Name,
v1.ServiceAccountUIDKey: string(serviceAccount.UID),
},
},
Type: v1.SecretTypeServiceAccountToken,
Data: map[string][]byte{},
}
// Generate the token
token, err := e.token.GenerateToken(*serviceAccount, *secret)
if err != nil {
// retriable error
return true, err
}
secret.Data[v1.ServiceAccountTokenKey] = []byte(token)
secret.Data[v1.ServiceAccountNamespaceKey] = []byte(serviceAccount.Namespace)
if e.rootCA != nil && len(e.rootCA) > 0 {
secret.Data[v1.ServiceAccountRootCAKey] = e.rootCA
}
// Save the secret
createdToken, err := e.client.Core().Secrets(serviceAccount.Namespace).Create(secret)
if err != nil {
// retriable error
return true, err
}
// Manually add the new token to the cache store.
// This prevents the service account update (below) triggering another token creation, if the referenced token couldn't be found in the store
e.secrets.Add(createdToken)
// Try to add a reference to the newly created token to the service account
addedReference := false
err = clientretry.RetryOnConflict(clientretry.DefaultRetry, func() error {
// refresh liveServiceAccount on every retry
defer func() { liveServiceAccount = nil }()
// fetch the live service account if needed, and verify the UID matches and that we still need a token
if liveServiceAccount == nil {
liveServiceAccount, err = serviceAccounts.Get(serviceAccount.Name, metav1.GetOptions{})
if err != nil {
return err
}<|fim▁hole|>
if liveServiceAccount.UID != serviceAccount.UID {
// If we don't have the same service account, stop trying to add a reference to the token made for the old service account.
return nil
}
if hasToken, err := e.hasReferencedToken(liveServiceAccount); err != nil {
// Don't retry cache lookup errors
return nil
} else if hasToken {
// A service account token already exists, and is referenced, short-circuit
return nil
}
}
// Try to add a reference to the token
liveServiceAccount.Secrets = append(liveServiceAccount.Secrets, v1.ObjectReference{Name: secret.Name})
if _, err := serviceAccounts.Update(liveServiceAccount); err != nil {
return err
}
addedReference = true
return nil
})
if !addedReference {
// we weren't able to use the token, try to clean it up.
glog.V(2).Infof("deleting secret %s/%s because reference couldn't be added (%v)", secret.Namespace, secret.Name, err)
deleteOpts := &metav1.DeleteOptions{Preconditions: &metav1.Preconditions{UID: &createdToken.UID}}
if deleteErr := e.client.Core().Secrets(createdToken.Namespace).Delete(createdToken.Name, deleteOpts); deleteErr != nil {
glog.Error(deleteErr) // if we fail, just log it
}
}
if err != nil {
if apierrors.IsConflict(err) || apierrors.IsNotFound(err) {
// if we got a Conflict error, the service account was updated by someone else, and we'll get an update notification later
// if we got a NotFound error, the service account no longer exists, and we don't need to create a token for it
return false, nil
}
// retry in all other cases
return true, err
}
// success!
return false, nil
}
// hasReferencedToken returns true if the serviceAccount references a service account token secret
func (e *TokensController) hasReferencedToken(serviceAccount *v1.ServiceAccount) (bool, error) {
if len(serviceAccount.Secrets) == 0 {
return false, nil
}
allSecrets, err := e.listTokenSecrets(serviceAccount)
if err != nil {
return false, err
}
referencedSecrets := getSecretReferences(serviceAccount)
for _, secret := range allSecrets {
if referencedSecrets.Has(secret.Name) {
return true, nil
}
}
return false, nil
}
func (e *TokensController) secretUpdateNeeded(secret *v1.Secret) (bool, bool, bool) {
caData := secret.Data[v1.ServiceAccountRootCAKey]
needsCA := len(e.rootCA) > 0 && bytes.Compare(caData, e.rootCA) != 0
needsNamespace := len(secret.Data[v1.ServiceAccountNamespaceKey]) == 0
tokenData := secret.Data[v1.ServiceAccountTokenKey]
needsToken := len(tokenData) == 0
return needsCA, needsNamespace, needsToken
}
// generateTokenIfNeeded populates the token data for the given Secret if not already set
func (e *TokensController) generateTokenIfNeeded(serviceAccount *v1.ServiceAccount, cachedSecret *v1.Secret) ( /* retry */ bool, error) {
// Check the cached secret to see if changes are needed
if needsCA, needsNamespace, needsToken := e.secretUpdateNeeded(cachedSecret); !needsCA && !needsToken && !needsNamespace {
return false, nil
}
// We don't want to update the cache's copy of the secret
// so add the token to a freshly retrieved copy of the secret
secrets := e.client.Core().Secrets(cachedSecret.Namespace)
liveSecret, err := secrets.Get(cachedSecret.Name, metav1.GetOptions{})
if err != nil {
// Retry for any error other than a NotFound
return !apierrors.IsNotFound(err), err
}
if liveSecret.ResourceVersion != cachedSecret.ResourceVersion {
// our view of the secret is not up to date
// we'll get notified of an update event later and get to try again
glog.V(2).Infof("secret %s/%s is not up to date, skipping token population", liveSecret.Namespace, liveSecret.Name)
return false, nil
}
needsCA, needsNamespace, needsToken := e.secretUpdateNeeded(liveSecret)
if !needsCA && !needsToken && !needsNamespace {
return false, nil
}
if liveSecret.Annotations == nil {
liveSecret.Annotations = map[string]string{}
}
if liveSecret.Data == nil {
liveSecret.Data = map[string][]byte{}
}
// Set the CA
if needsCA {
liveSecret.Data[v1.ServiceAccountRootCAKey] = e.rootCA
}
// Set the namespace
if needsNamespace {
liveSecret.Data[v1.ServiceAccountNamespaceKey] = []byte(liveSecret.Namespace)
}
// Generate the token
if needsToken {
token, err := e.token.GenerateToken(*serviceAccount, *liveSecret)
if err != nil {
return false, err
}
liveSecret.Data[v1.ServiceAccountTokenKey] = []byte(token)
}
// Set annotations
liveSecret.Annotations[v1.ServiceAccountNameKey] = serviceAccount.Name
liveSecret.Annotations[v1.ServiceAccountUIDKey] = string(serviceAccount.UID)
// Save the secret
_, err = secrets.Update(liveSecret)
if apierrors.IsConflict(err) || apierrors.IsNotFound(err) {
// if we got a Conflict error, the secret was updated by someone else, and we'll get an update notification later
// if we got a NotFound error, the secret no longer exists, and we don't need to populate a token
return false, nil
}
if err != nil {
return true, err
}
return false, nil
}
// removeSecretReference updates the given ServiceAccount to remove a reference to the given secretName if needed.
func (e *TokensController) removeSecretReference(saNamespace string, saName string, saUID types.UID, secretName string) error {
// We don't want to update the cache's copy of the service account
// so remove the secret from a freshly retrieved copy of the service account
serviceAccounts := e.client.Core().ServiceAccounts(saNamespace)
serviceAccount, err := serviceAccounts.Get(saName, metav1.GetOptions{})
// Ignore NotFound errors when attempting to remove a reference
if apierrors.IsNotFound(err) {
return nil
}
if err != nil {
return err
}
// Short-circuit if the UID doesn't match
if len(saUID) > 0 && saUID != serviceAccount.UID {
return nil
}
// Short-circuit if the secret is no longer referenced
if !getSecretReferences(serviceAccount).Has(secretName) {
return nil
}
// Remove the secret
secrets := []v1.ObjectReference{}
for _, s := range serviceAccount.Secrets {
if s.Name != secretName {
secrets = append(secrets, s)
}
}
serviceAccount.Secrets = secrets
_, err = serviceAccounts.Update(serviceAccount)
// Ignore NotFound errors when attempting to remove a reference
if apierrors.IsNotFound(err) {
return nil
}
return err
}
func (e *TokensController) getServiceAccount(ns string, name string, uid types.UID, fetchOnCacheMiss bool) (*v1.ServiceAccount, error) {
// Look up in cache
obj, exists, err := e.serviceAccounts.GetByKey(makeCacheKey(ns, name))
if err != nil {
return nil, err
}
if exists {
sa, ok := obj.(*v1.ServiceAccount)
if !ok {
return nil, fmt.Errorf("expected *v1.ServiceAccount, got %#v", sa)
}
// Ensure UID matches if given
if len(uid) == 0 || uid == sa.UID {
return sa, nil
}
}
if !fetchOnCacheMiss {
return nil, nil
}
// Live lookup
sa, err := e.client.Core().ServiceAccounts(ns).Get(name, metav1.GetOptions{})
if apierrors.IsNotFound(err) {
return nil, nil
}
if err != nil {
return nil, err
}
// Ensure UID matches if given
if len(uid) == 0 || uid == sa.UID {
return sa, nil
}
return nil, nil
}
func (e *TokensController) getSecret(ns string, name string, uid types.UID, fetchOnCacheMiss bool) (*v1.Secret, error) {
// Look up in cache
obj, exists, err := e.secrets.GetByKey(makeCacheKey(ns, name))
if err != nil {
return nil, err
}
if exists {
secret, ok := obj.(*v1.Secret)
if !ok {
return nil, fmt.Errorf("expected *v1.Secret, got %#v", secret)
}
// Ensure UID matches if given
if len(uid) == 0 || uid == secret.UID {
return secret, nil
}
}
if !fetchOnCacheMiss {
return nil, nil
}
// Live lookup
secret, err := e.client.Core().Secrets(ns).Get(name, metav1.GetOptions{})
if apierrors.IsNotFound(err) {
return nil, nil
}
if err != nil {
return nil, err
}
// Ensure UID matches if given
if len(uid) == 0 || uid == secret.UID {
return secret, nil
}
return nil, nil
}
// listTokenSecrets returns a list of all of the ServiceAccountToken secrets that
// reference the given service account's name and uid
func (e *TokensController) listTokenSecrets(serviceAccount *v1.ServiceAccount) ([]*v1.Secret, error) {
namespaceSecrets, err := e.secrets.ByIndex("namespace", serviceAccount.Namespace)
if err != nil {
return nil, err
}
items := []*v1.Secret{}
for _, obj := range namespaceSecrets {
secret := obj.(*v1.Secret)
if serviceaccount.IsServiceAccountToken(secret, serviceAccount) {
items = append(items, secret)
}
}
return items, nil
}
// serviceAccountNameAndUID is a helper method to get the ServiceAccount Name and UID from the given secret
// Returns "","" if the secret is not a ServiceAccountToken secret
// If the name or uid annotation is missing, "" is returned instead
func serviceAccountNameAndUID(secret *v1.Secret) (string, string) {
if secret.Type != v1.SecretTypeServiceAccountToken {
return "", ""
}
return secret.Annotations[v1.ServiceAccountNameKey], secret.Annotations[v1.ServiceAccountUIDKey]
}
func getSecretReferences(serviceAccount *v1.ServiceAccount) sets.String {
references := sets.NewString()
for _, secret := range serviceAccount.Secrets {
references.Insert(secret.Name)
}
return references
}
// serviceAccountQueueKey holds information we need to sync a service account.
// It contains enough information to look up the cached service account,
// or delete owned tokens if the service account no longer exists.
type serviceAccountQueueKey struct {
namespace string
name string
uid types.UID
}
func makeServiceAccountKey(sa *v1.ServiceAccount) interface{} {
return serviceAccountQueueKey{
namespace: sa.Namespace,
name: sa.Name,
uid: sa.UID,
}
}
func parseServiceAccountKey(key interface{}) (serviceAccountQueueKey, error) {
queueKey, ok := key.(serviceAccountQueueKey)
if !ok || len(queueKey.namespace) == 0 || len(queueKey.name) == 0 || len(queueKey.uid) == 0 {
return serviceAccountQueueKey{}, fmt.Errorf("invalid serviceaccount key: %#v", key)
}
return queueKey, nil
}
// secretQueueKey holds information we need to sync a service account token secret.
// It contains enough information to look up the cached service account,
// or delete the secret reference if the secret no longer exists.
type secretQueueKey struct {
namespace string
name string
uid types.UID
saName string
// optional, will be blank when syncing tokens missing the service account uid annotation
saUID types.UID
}
func makeSecretQueueKey(secret *v1.Secret) interface{} {
return secretQueueKey{
namespace: secret.Namespace,
name: secret.Name,
uid: secret.UID,
saName: secret.Annotations[v1.ServiceAccountNameKey],
saUID: types.UID(secret.Annotations[v1.ServiceAccountUIDKey]),
}
}
func parseSecretQueueKey(key interface{}) (secretQueueKey, error) {
queueKey, ok := key.(secretQueueKey)
if !ok || len(queueKey.namespace) == 0 || len(queueKey.name) == 0 || len(queueKey.uid) == 0 || len(queueKey.saName) == 0 {
return secretQueueKey{}, fmt.Errorf("invalid secret key: %#v", key)
}
return queueKey, nil
}
// produce the same key format as cache.MetaNamespaceKeyFunc
func makeCacheKey(namespace, name string) string {
return namespace + "/" + name
}<|fim▁end|>
| |
<|file_name|>tomate2.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python2
from __future__ import division
import os
import gi
gi.require_version("Gtk", "3.0")
from gi.repository import GObject as gobject
from gi.repository import Gtk
from gi.repository import Gdk
from gi.repository import Notify
from gi.repository import AppIndicator3 as appindicator
from time import time
from math import floor
Gdk.threads_init()
class TomateConfig(object):
#Parameters
MIN_WORK_TIME = 60 * 10 # min work time in seconds
DONE_WORK_TIME = 60 * 25 # finished work time in seconds
POLL_INTERVAL = 5 # polling interval in seconds
class States(object):
IDLE = 1
WORKING = 2
OK = 3
DONE = 4
STATE_MESSAGES = {
States.IDLE : 'Idle',
States.WORKING : 'Working',
States.OK : 'Ok',
States.DONE : 'Done'
}
STATE_ICONS = {
States.IDLE : 'idle',
States.WORKING : 'working',
States.OK : 'ok',
States.DONE : 'done'
}
class Pomodoro:
def __init__(self):
# we start with an idle state
self.state = States.IDLE
self.tick_interval = TomateConfig.POLL_INTERVAL
self.start_working_time = 0
def init_ui(self):
Notify.init("Tomate")
self.ind = self.build_indicator()
menu = self.build_menu()
self.ind.set_menu(menu)
def build_indicator(self):
ind = appindicator.Indicator.new(
"Tomate",
self.get_icon(self.state),
appindicator.IndicatorCategory.APPLICATION_STATUS)
ind.set_status(appindicator.IndicatorStatus.ACTIVE)
return ind
def build_menu(self):
menu = Gtk.Menu()
self.st_menu = Gtk.MenuItem("Start")
self.st_menu.connect('activate',self.icon_click)
menu.append(self.st_menu)
mi = Gtk.ImageMenuItem("Quit")
img = Gtk.Image.new_from_stock(Gtk.STOCK_QUIT, Gtk.IconSize.MENU)
mi.set_image(img)
mi.connect('activate',Gtk.main_quit)
menu.append(mi)
menu.show_all()
return menu
def get_icon(self, state):
return self.icon_directory() + "/img/" + STATE_ICONS[state] + ".png"
def format_time(self,seconds):
if seconds < 60:
return "%d seconds" % seconds
minutes = floor( seconds / 60 )
hours = floor( minutes / 60 )
days = floor( hours / 24 )
d_string = ''
h_string = ''<|fim▁hole|> m_string = ''
if minutes < 60:
if minutes > 1: return "%d minutes" % minutes
else: return "1 minute"
if days > 0:
hours = hours - ( days * 24 )
minutes = minutes - ( days * 24 * 60 )
if days == 1: d_string = "1 day "
else: d_string = "%d day%s " % (days, 's')
if hours > 0:
minutes = minutes - (hours * 60)
if hours == 1: h_string = '1 hour '
else: h_string = "%d hours " % hours
if minutes > 0 :
if minutes == 1: m_string = 'and 1 minute'
else: m_string = "and %d minutes" % minutes
return d_string + h_string + m_string
def set_state(self, state, time):
old_state=self.state
if self.state == state:
return
if state == States.IDLE:
delta = time - self.start_working_time
self.st_menu.set_label("Start")
if old_state == States.OK:
self.tooltip = "Good, you worked for " + self.format_time(delta) + "!"
elif old_state == States.WORKING:
self.tooltip = "Not good: worked for only " + self.format_time(delta)
elif old_state == States.DONE:
self.tooltip = "Good, you worked for " + self.format_time(delta) + "! \
Time for a break!"
elif state == States.WORKING:
self.start_working_time = time
delta = time - self.start_working_time
self.tooltip = "Working for " + self.format_time(delta) + "..."
self.st_menu.set_label("Working for %s... stop" % self.format_time(delta))
elif state == States.OK:
delta = time - self.start_working_time
self.tooltip = "Good, you worked for " + self.format_time(delta) + "!"
elif state == States.DONE:
self.tooltip = "Worked enough, take a break!"
self.state=state
self.ind.set_icon(self.get_icon(state))
self.show_notification(self.state, self.tooltip)
def show_notification(self, state, notification):
try:
nw = Notify.Notification.new("Tomate state changed to " +
STATE_MESSAGES[state],
notification, self.get_icon(state))
nw.show()
except:
pass
def icon_directory(self):
return os.path.dirname(os.path.realpath(__file__)) + os.path.sep
def icon_click(self, dummy):
if self.state == States.IDLE:
self.set_state(States.WORKING, time())
else:
self.set_state(States.IDLE, time())
def update(self, time):
"""This method is called everytime a tick interval occurs"""
delta = time - self.start_working_time
if self.state == States.IDLE:
pass
else:
self.st_menu.set_label("Working for %s... stop" % self.format_time(delta))
if self.state == States.WORKING:
if delta > TomateConfig.MIN_WORK_TIME:
self.set_state(States.OK, time)
elif self.state == States.OK:
if delta > TomateConfig.DONE_WORK_TIME:
self.set_state(States.DONE, time)
def tick(self):
self.update(time())
source_id = gobject.timeout_add(self.tick_interval*1000, self.tick)
def main(self):
# All PyGTK applications must have a gtk.main(). Control ends here
# and waits for an event to occur (like a key press or mouse event).
source_id = gobject.timeout_add(self.tick_interval, self.tick)
self.init_ui()
Gtk.main()
# If the program is run directly or passed as an argument to the python
# interpreter then create a Pomodoro instance and show it
if __name__ == "__main__":
app = Pomodoro()
app.main()<|fim▁end|>
| |
<|file_name|>global.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Abstractions for global scopes.
//!
//! This module contains smart pointers to global scopes, to simplify writing
//! code that works in workers as well as window scopes.
use dom::bindings::conversions::native_from_reflector_jsmanaged;
use dom::bindings::js::{JS, JSRef, Root, Unrooted};
use dom::bindings::utils::{Reflectable, Reflector};
use dom::workerglobalscope::{WorkerGlobalScope, WorkerGlobalScopeHelpers};
use dom::window::{self, WindowHelpers};
use devtools_traits::DevtoolsControlChan;
use script_task::{ScriptChan, ScriptPort, ScriptMsg, ScriptTask};
use msg::constellation_msg::{PipelineId, WorkerId};
use net_traits::ResourceTask;
use js::{JSCLASS_IS_GLOBAL, JSCLASS_IS_DOMJSCLASS};
use js::glue::{GetGlobalForObjectCrossCompartment};
use js::jsapi::{JSContext, JSObject};
use js::jsapi::{JS_GetClass};
use url::Url;
/// A freely-copyable reference to a rooted global object.
#[derive(Copy)]
pub enum GlobalRef<'a> {
/// A reference to a `Window` object.
Window(JSRef<'a, window::Window>),
/// A reference to a `WorkerGlobalScope` object.
Worker(JSRef<'a, WorkerGlobalScope>),
}
/// A stack-based rooted reference to a global object.
pub enum GlobalRoot {
/// A root for a `Window` object.
Window(Root<window::Window>),
/// A root for a `WorkerGlobalScope` object.
Worker(Root<WorkerGlobalScope>),
}
/// A traced reference to a global object, for use in fields of traced Rust
/// structures.
#[jstraceable]
#[must_root]
pub enum GlobalField {
/// A field for a `Window` object.
Window(JS<window::Window>),
/// A field for a `WorkerGlobalScope` object.
Worker(JS<WorkerGlobalScope>),
}
/// An unrooted reference to a global object.
#[must_root]
pub enum GlobalUnrooted {
/// An unrooted reference to a `Window` object.
Window(Unrooted<window::Window>),
/// An unrooted reference to a `WorkerGlobalScope` object.
Worker(Unrooted<WorkerGlobalScope>),
}
impl<'a> GlobalRef<'a> {
/// Get the `JSContext` for the `JSRuntime` associated with the thread
/// this global object is on.
pub fn get_cx(&self) -> *mut JSContext {
match *self {
GlobalRef::Window(ref window) => window.get_cx(),
GlobalRef::Worker(ref worker) => worker.get_cx(),
}
}
/// Extract a `Window`, causing task failure if the global object is not
/// a `Window`.
pub fn as_window<'b>(&'b self) -> JSRef<'b, window::Window> {
match *self {
GlobalRef::Window(window) => window,
GlobalRef::Worker(_) => panic!("expected a Window scope"),
}
}
/// Get the `PipelineId` for this global scope.
pub fn pipeline(&self) -> PipelineId {
match *self {
GlobalRef::Window(window) => window.pipeline(),
GlobalRef::Worker(worker) => worker.pipeline(),
}
}
/// Get `DevtoolsControlChan` to send messages to Devtools
/// task when available.
pub fn devtools_chan(&self) -> Option<DevtoolsControlChan> {
match *self {
GlobalRef::Window(window) => window.devtools_chan(),
GlobalRef::Worker(worker) => worker.devtools_chan(),
}
}
/// Get the `ResourceTask` for this global scope.
pub fn resource_task(&self) -> ResourceTask {
match *self {
GlobalRef::Window(ref window) => window.resource_task().clone(),
GlobalRef::Worker(ref worker) => worker.resource_task().clone(),
}
}
/// Get next worker id.
pub fn get_next_worker_id(&self) -> WorkerId {
match *self {
GlobalRef::Window(ref window) => window.get_next_worker_id(),
GlobalRef::Worker(ref worker) => worker.get_next_worker_id()
}
}
/// Get the URL for this global scope.
pub fn get_url(&self) -> Url {
match *self {
GlobalRef::Window(ref window) => window.get_url(),
GlobalRef::Worker(ref worker) => worker.get_url().clone(),
}
}
/// `ScriptChan` used to send messages to the event loop of this global's
/// thread.
pub fn script_chan(&self) -> Box<ScriptChan+Send> {
match *self {
GlobalRef::Window(ref window) => window.script_chan(),
GlobalRef::Worker(ref worker) => worker.script_chan(),
}
}
/// Create a new sender/receiver pair that can be used to implement an on-demand
/// event loop. Used for implementing web APIs that require blocking semantics
/// without resorting to nested event loops.
pub fn new_script_pair(&self) -> (Box<ScriptChan+Send>, Box<ScriptPort+Send>) {
match *self {
GlobalRef::Window(ref window) => window.new_script_pair(),
GlobalRef::Worker(ref worker) => worker.new_script_pair(),
}
}
/// Process a single event as if it were the next event in the task queue for
/// this global.
pub fn process_event(&self, msg: ScriptMsg) {
match *self {
GlobalRef::Window(_) => ScriptTask::process_event(msg),
GlobalRef::Worker(ref worker) => worker.process_event(msg),
}
}
}
impl<'a> Reflectable for GlobalRef<'a> {
fn reflector<'b>(&'b self) -> &'b Reflector {
match *self {
GlobalRef::Window(ref window) => window.reflector(),
GlobalRef::Worker(ref worker) => worker.reflector(),
}
}
}
impl GlobalRoot {
/// Obtain a safe reference to the global object that cannot outlive the
/// lifetime of this root.
pub fn r<'c>(&'c self) -> GlobalRef<'c> {
match *self {
GlobalRoot::Window(ref window) => GlobalRef::Window(window.r()),
GlobalRoot::Worker(ref worker) => GlobalRef::Worker(worker.r()),
}<|fim▁hole|> }
}
impl GlobalField {
/// Create a new `GlobalField` from a rooted reference.
pub fn from_rooted(global: &GlobalRef) -> GlobalField {
match *global {
GlobalRef::Window(window) => GlobalField::Window(JS::from_rooted(window)),
GlobalRef::Worker(worker) => GlobalField::Worker(JS::from_rooted(worker)),
}
}
/// Create a stack-bounded root for this reference.
pub fn root(&self) -> GlobalRoot {
match *self {
GlobalField::Window(ref window) => GlobalRoot::Window(window.root()),
GlobalField::Worker(ref worker) => GlobalRoot::Worker(worker.root()),
}
}
}
impl GlobalUnrooted {
/// Create a stack-bounded root for this reference.
pub fn root(&self) -> GlobalRoot {
match *self {
GlobalUnrooted::Window(ref window) => GlobalRoot::Window(window.root()),
GlobalUnrooted::Worker(ref worker) => GlobalRoot::Worker(worker.root()),
}
}
}
/// Returns the global object of the realm that the given JS object was created in.
#[allow(unrooted_must_root)]
pub fn global_object_for_js_object(obj: *mut JSObject) -> GlobalUnrooted {
unsafe {
let global = GetGlobalForObjectCrossCompartment(obj);
let clasp = JS_GetClass(global);
assert!(((*clasp).flags & (JSCLASS_IS_DOMJSCLASS | JSCLASS_IS_GLOBAL)) != 0);
match native_from_reflector_jsmanaged(global) {
Ok(window) => return GlobalUnrooted::Window(window),
Err(_) => (),
}
match native_from_reflector_jsmanaged(global) {
Ok(worker) => return GlobalUnrooted::Worker(worker),
Err(_) => (),
}
panic!("found DOM global that doesn't unwrap to Window or WorkerGlobalScope")
}
}<|fim▁end|>
| |
<|file_name|>reader.rs<|end_file_name|><|fim▁begin|>// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! A wrapper around any Reader to treat it as an RNG.
use io::Reader;
use rand::Rng;
use result::{Ok, Err};
use slice::SlicePrelude;
/// An RNG that reads random bytes straight from a `Reader`. This will
/// work best with an infinite reader, but this is not required.
///
/// # Panics
///
/// It will panic if it there is insufficient data to fulfill a request.
///
/// # Example
///
/// ```rust
/// use std::rand::{reader, Rng};
/// use std::io::MemReader;
///
/// let mut rng = reader::ReaderRng::new(MemReader::new(vec!(1,2,3,4,5,6,7,8)));
/// println!("{:x}", rng.gen::<uint>());
/// ```
pub struct ReaderRng<R> {
reader: R
}
impl<R: Reader> ReaderRng<R> {
/// Create a new `ReaderRng` from a `Reader`.
pub fn new(r: R) -> ReaderRng<R> {
ReaderRng {
reader: r
}
}
}
impl<R: Reader> Rng for ReaderRng<R> {
fn next_u32(&mut self) -> u32 {
// This is designed for speed: reading a LE integer on a LE
// platform just involves blitting the bytes into the memory
// of the u32, similarly for BE on BE; avoiding byteswapping.
if cfg!(target_endian="little") {
self.reader.read_le_u32().unwrap()<|fim▁hole|> fn next_u64(&mut self) -> u64 {
// see above for explanation.
if cfg!(target_endian="little") {
self.reader.read_le_u64().unwrap()
} else {
self.reader.read_be_u64().unwrap()
}
}
fn fill_bytes(&mut self, v: &mut [u8]) {
if v.len() == 0 { return }
match self.reader.read_at_least(v.len(), v) {
Ok(_) => {}
Err(e) => panic!("ReaderRng.fill_bytes error: {}", e)
}
}
}
#[cfg(test)]
mod test {
use prelude::*;
use super::ReaderRng;
use io::MemReader;
use num::Int;
use rand::Rng;
#[test]
fn test_reader_rng_u64() {
// transmute from the target to avoid endianness concerns.
let v = vec![0u8, 0, 0, 0, 0, 0, 0, 1,
0 , 0, 0, 0, 0, 0, 0, 2,
0, 0, 0, 0, 0, 0, 0, 3];
let mut rng = ReaderRng::new(MemReader::new(v));
assert_eq!(rng.next_u64(), 1_u64.to_be());
assert_eq!(rng.next_u64(), 2_u64.to_be());
assert_eq!(rng.next_u64(), 3_u64.to_be());
}
#[test]
fn test_reader_rng_u32() {
let v = vec![0u8, 0, 0, 1, 0, 0, 0, 2, 0, 0, 0, 3];
let mut rng = ReaderRng::new(MemReader::new(v));
assert_eq!(rng.next_u32(), 1_u32.to_be());
assert_eq!(rng.next_u32(), 2_u32.to_be());
assert_eq!(rng.next_u32(), 3_u32.to_be());
}
#[test]
fn test_reader_rng_fill_bytes() {
let v = [1u8, 2, 3, 4, 5, 6, 7, 8];
let mut w = [0u8, .. 8];
let mut rng = ReaderRng::new(MemReader::new(v.as_slice().to_vec()));
rng.fill_bytes(&mut w);
assert!(v == w);
}
#[test]
#[should_fail]
fn test_reader_rng_insufficient_bytes() {
let mut rng = ReaderRng::new(MemReader::new(vec!()));
let mut v = [0u8, .. 3];
rng.fill_bytes(&mut v);
}
}<|fim▁end|>
|
} else {
self.reader.read_be_u32().unwrap()
}
}
|
<|file_name|>CollectionEntity.tsx<|end_file_name|><|fim▁begin|>import { Box, Link, Sans, Serif, color } from "@artsy/palette"
import { CollectionEntity_collection } from "v2/__generated__/CollectionEntity_collection.graphql"
import { track } from "v2/System/Analytics"
import * as Schema from "v2/System/Analytics/Schema"
import currency from "currency.js"
import React from "react"
import { createFragmentContainer, graphql } from "react-relay"
import { data as sd } from "sharify"
import styled from "styled-components"
import { resize } from "v2/Utils/resizer"
export interface CollectionProps {
collection: CollectionEntity_collection
}
export const Background = styled(Box)<{ collectionImage: string }>`
height: 175px;
width: 100%;
background: ${color("black30")};
background-image: url(${props => props.collectionImage});
display: inline-flex;
position: relative;
background-size: cover;
background-position: center;
`
const CollectionTitle = styled(Serif)`
width: max-content;
`
export const StyledLink = styled(Link)`
text-decoration: none;
&:hover {
text-decoration: none;
}
`
@track()
export class CollectionEntity extends React.Component<CollectionProps> {
@track<CollectionProps>(({ collection }) => ({
action_type: Schema.ActionType.Click,
context_module: Schema.ContextModule.CollectionsRail,
context_page_owner_type: Schema.OwnerType.Article,
destination_path: `${sd.APP_URL}/collection/${collection.slug}`,
type: Schema.Type.Thumbnail,
}))
onLinkClick() {
// noop
}
render() {
const { collection } = this.props
return (<|fim▁hole|> href={`${sd.APP_URL}/collection/${collection.slug}`}
onClick={this.onLinkClick.bind(this)}
>
<Background
// @ts-expect-error STRICT_NULL_CHECK
collectionImage={resize(collection.headerImage, {
width: 645,
height: 275,
quality: 80,
})}
/>
<CollectionTitle size="4">{collection.title}</CollectionTitle>
<Sans size="2">
Works from ${/* @ts-expect-error STRICT_NULL_CHECK */}
{currency(collection.price_guidance, {
separator: ",",
precision: 0,
}).format()}
</Sans>
</StyledLink>
</Box>
)
}
}
export const CollectionEntityFragmentContainer = createFragmentContainer(
CollectionEntity,
{
collection: graphql`
fragment CollectionEntity_collection on MarketingCollection {
slug
headerImage
title
price_guidance: priceGuidance
show_on_editorial: showOnEditorial
}
`,
}
)<|fim▁end|>
|
<Box mb={3} width="100%">
<StyledLink
|
<|file_name|>publications.client.routes.js<|end_file_name|><|fim▁begin|>'use strict';
// Setting up route
angular.module('publications').config(['$stateProvider',
function ($stateProvider) {
// publications state routing
$stateProvider
.state('publications', {
abstract: true,
url: '/publications',
template: '<ui-view/>'
})
.state('publications.list', {
url: '',
templateUrl: 'modules/publications/client/views/list-publications.client.view.html',
data: {
roles: ['user', 'admin']
}
})
.state('publications.search', {
url: '/search',
templateUrl: 'modules/publications/client/views/pagination-publications.client.view.html',
data: {
roles: ['user', 'admin']
}
})
.state('publications.create', {
url: '/create',
templateUrl: 'modules/publications/client/views/create-publication.client.view.html',
data: {
roles: ['user', 'admin']
}
})
.state('publications.view', {
url: '/:publicationId',<|fim▁hole|> })
.state('publications.edit', {
url: '/:publicationId/edit',
templateUrl: 'modules/publications/client/views/edit-publication.client.view.html',
data: {
roles: ['user', 'admin']
}
});
}
]);<|fim▁end|>
|
templateUrl: 'modules/publications/client/views/view-publication.client.view.html',
data: {
roles: ['user', 'admin']
}
|
<|file_name|>agent.rs<|end_file_name|><|fim▁begin|>use actix::prelude::*;
use actors::{AddA2ARoute, HandleA2AMsg, RouteA2AMsg, RouteA2ConnMsg};
use actors::agent_connection::{AgentConnection, AgentConnectionConfig};
use actors::router::Router;
use domain::a2a::*;
use domain::a2connection::*;
use domain::config::WalletStorageConfig;
use domain::invite::ForwardAgentDetail;
use failure::{err_msg, Error, Fail};
use futures::*;
use indy::{did, pairwise, wallet, pairwise::Pairwise, ErrorCode, IndyError};
use std::convert::Into;
use std::collections::HashMap;
use utils::futures::*;
use utils::rand;
use serde_json;
#[allow(unused)] //FIXME:
pub struct Agent {
wallet_handle: i32,
owner_did: String,
owner_verkey: String,
did: String,
verkey: String,
forward_agent_detail: ForwardAgentDetail,
router: Addr<Router>,
configs: HashMap<String, String>
}
impl Agent {
pub fn create(owner_did: &str,
owner_verkey: &str,
router: Addr<Router>,
forward_agent_detail: ForwardAgentDetail,
wallet_storage_config: WalletStorageConfig) -> BoxedFuture<(String, String, String, String), Error> {
trace!("Agent::create >> {:?}, {:?}, {:?}, {:?}",
owner_did, owner_verkey, forward_agent_detail, wallet_storage_config);<|fim▁hole|> let wallet_key = rand::rand_string(10);
let wallet_config = json!({
"id": wallet_id.clone(),
"storage_type": wallet_storage_config.xtype,
"storage_config": wallet_storage_config.config,
}).to_string();
let wallet_credentials = json!({
"key": wallet_key.clone(),
"storage_credentials": wallet_storage_config.credentials,
}).to_string();
let owner_did = owner_did.to_string();
let owner_verkey = owner_verkey.to_string();
future::ok(())
.and_then(move |_|
wallet::create_wallet(&wallet_config, &wallet_credentials)
.map(|_| (wallet_config, wallet_credentials))
.map_err(|err| err.context("Can't create Agent wallet.").into())
)
.and_then(move |(wallet_config, wallet_credentials)| {
wallet::open_wallet(wallet_config.as_ref(), wallet_credentials.as_ref())
.map_err(|err| err.context("Can't open Agent wallet.`").into())
})
.and_then(|wallet_handle| {
did::create_and_store_my_did(wallet_handle, "{}")
.map(move |(did, verkey)| (wallet_handle, did, verkey))
.map_err(|err| err.context("Can't get Agent did key").into())
})
.and_then(move |(wallet_handle, did, verkey)| {
let agent = Agent {
wallet_handle,
verkey: verkey.clone(),
did: did.clone(),
owner_did,
owner_verkey,
router: router.clone(),
forward_agent_detail,
configs: HashMap::new()
};
let agent = agent.start();
router
.send(AddA2ARoute(did.clone(), agent.clone().recipient()))
.from_err()
.map(move |_| (wallet_id, wallet_key, did, verkey))
.map_err(|err: Error| err.context("Can't add route for Agent").into())
})
.into_box()
}
pub fn restore(wallet_id: &str,
wallet_key: &str,
did: &str,
owner_did: &str,
owner_verkey: &str,
router: Addr<Router>,
forward_agent_detail: ForwardAgentDetail,
wallet_storage_config: WalletStorageConfig) -> BoxedFuture<(), Error> {
trace!("Agent::restore >> {:?}, {:?}, {:?}, {:?}, {:?}, {:?}",
wallet_id, did, owner_did, owner_verkey, forward_agent_detail, wallet_storage_config);
let wallet_config = json!({
"id": wallet_id.clone(),
"storage_type": wallet_storage_config.xtype,
"storage_config": wallet_storage_config.config,
}).to_string();
let wallet_credentials = json!({
"key": wallet_key.clone(),
"storage_credentials": wallet_storage_config.credentials,
}).to_string();
let did = did.to_string();
let owner_did = owner_did.to_string();
let owner_verkey = owner_verkey.to_string();
future::ok(())
.and_then(move |_| {
wallet::open_wallet(wallet_config.as_ref(), wallet_credentials.as_ref())
.map_err(|err| err.context("Can't open Agent wallet.").into())
})
.and_then(move |wallet_handle| {
did::key_for_local_did(wallet_handle, &did)
.map(move |verkey| (wallet_handle, did, verkey))
.map_err(|err| err.context("Can't get Agent did verkey.").into())
})
.and_then(move |(wallet_handle, did, verkey)| {
did::get_did_metadata(wallet_handle, &did)
.then(|res| match res {
Err(IndyError { error_code: ErrorCode::WalletItemNotFound, .. }) => Ok("{}".to_string()),
r => r
})
.map(move |metadata| (wallet_handle, did, verkey, metadata))
.map_err(|err| err.context("Can't get Agent DID Metadata.").into())
})
.and_then(move |(wallet_handle, did, verkey, metadata)| {
// Resolve information about existing connections from the wallet
// and start Agent Connection actor for each exists connection
let configs: HashMap<String, String> = serde_json::from_str(&metadata).expect("Can't restore Agent config.");
Agent::_restore_connections(wallet_handle,
&owner_did,
&owner_verkey,
&forward_agent_detail,
router.clone(),
configs.clone())
.map(move |_| (wallet_handle, did, verkey, owner_did, owner_verkey, forward_agent_detail, router, configs))
})
.and_then(move |(wallet_handle, did, verkey, owner_did, owner_verkey, forward_agent_detail, router, configs)| {
let agent = Agent {
wallet_handle,
verkey: verkey.clone(),
did: did.clone(),
owner_did,
owner_verkey,
router: router.clone(),
forward_agent_detail,
configs
};
let agent = agent.start();
router
.send(AddA2ARoute(did.clone(), agent.clone().recipient()))
.from_err()
.map_err(|err: Error| err.context("Can't add route for Agent.").into())
})
.into_box()
}
fn _restore_connections(wallet_handle: i32,
owner_did: &str,
owner_verkey: &str,
forward_agent_detail: &ForwardAgentDetail,
router: Addr<Router>,
agent_configs: HashMap<String, String>) -> ResponseFuture<(), Error> {
trace!("Agent::_restore_connections >> {:?}, {:?}, {:?}, {:?}",
wallet_handle, owner_did, owner_verkey, forward_agent_detail);
let owner_did = owner_did.to_string();
let owner_verkey = owner_verkey.to_string();
let forward_agent_detail = forward_agent_detail.clone();
future::ok(())
.and_then(move |_| Self::get_pairwise_list(wallet_handle).into_box())
.and_then(move |pairwise_list| {
let futures: Vec<_> = pairwise_list
.iter()
.map(move |pairwise| {
AgentConnection::restore(wallet_handle,
&owner_did,
&owner_verkey,
&pairwise.my_did,
&pairwise.their_did,
&pairwise.metadata,
&forward_agent_detail,
router.clone(),
agent_configs.clone())
})
.collect();
future::join_all(futures)
.map(|_| ())
.map_err(|err| err.context("Can't restore Agent connections.").into())
})
.into_box()
}
fn handle_a2a_msg(&mut self,
msg: Vec<u8>) -> ResponseActFuture<Self, Vec<u8>, Error> {
trace!("Agent::handle_a2a_msg >> {:?}", msg);
future::ok(())
.into_actor(self)
.and_then(move |_, slf, _| {
A2AMessage::parse_authcrypted(slf.wallet_handle, &slf.verkey, &msg)
.map_err(|err| err.context("Can't unbundle message.").into())
.into_actor(slf)
})
.and_then(|(sender_vk, mut msgs), slf, _| {
match msgs.pop() {
Some(A2AMessage::Version1(A2AMessageV1::Forward(msg))) => {
slf.router
.send(RouteA2AMsg(msg.fwd, msg.msg))
.from_err()
.and_then(|res| res)
.into_actor(slf)
.into_box()
}
Some(A2AMessage::Version2(A2AMessageV2::Forward(msg))) => {
let msg_ = ftry_act!(slf, serde_json::to_vec(&msg.msg));
slf.router
.send(RouteA2AMsg(msg.fwd, msg_))
.from_err()
.and_then(|res| res)
.into_actor(slf)
.into_box()
}
Some(msg) => slf.handle_agent_msg(sender_vk, msg),
_ => err_act!(slf, err_msg("Unsupported message"))
}
})
.into_box()
}
fn handle_agent_msg(&mut self,
sender_vk: String,
msg: A2AMessage) -> ResponseActFuture<Self, Vec<u8>, Error> {
trace!("Agent::handle_agent_msg >> {:?}, {:?}", sender_vk, msg);
match msg {
A2AMessage::Version1(msg) => self.handle_agent_msg_v1(sender_vk, msg),
A2AMessage::Version2(msg) => self.handle_agent_msg_v2(sender_vk, msg)
}
}
fn handle_agent_msg_v1(&mut self,
sender_vk: String,
msg: A2AMessageV1) -> ResponseActFuture<Self, Vec<u8>, Error> {
trace!("Agent::handle_agent_msg_v1 >> {:?}, {:?}", sender_vk, msg);
match msg {
A2AMessageV1::CreateKey(msg) => self.handle_create_key_v1(msg),
A2AMessageV1::GetMessagesByConnections(msg) => self.handle_get_messages_by_connections_v1(msg),
A2AMessageV1::UpdateMessageStatusByConnections(msg) => self.handle_update_messages_by_connections_v1(msg),
A2AMessageV1::UpdateConfigs(msg) => self.handle_update_configs_v1(msg),
A2AMessageV1::GetConfigs(msg) => self.handle_get_configs_v1(msg),
A2AMessageV1::RemoveConfigs(msg) => self.handle_remove_configs_v1(msg),
A2AMessageV1::UpdateComMethod(msg) => self.handle_update_com_method_v1(msg),
_ => err_act!(self, err_msg("Unsupported message"))
}
.and_then(move |msgs, slf, _|
A2AMessage::bundle_authcrypted(slf.wallet_handle, &slf.verkey, &sender_vk, &msgs)
.map_err(|err| err.context("Can't bundle and authcrypt message.").into())
.into_actor(slf)
.into_box()
)
.into_box()
}
fn handle_agent_msg_v2(&mut self,
sender_vk: String,
msg: A2AMessageV2) -> ResponseActFuture<Self, Vec<u8>, Error> {
trace!("Agent::handle_agent_msg_v2 >> {:?}, {:?}", sender_vk, msg);
match msg {
A2AMessageV2::CreateKey(msg) => self.handle_create_key_v2(msg),
A2AMessageV2::GetMessagesByConnections(msg) => self.handle_get_messages_by_connections_v2(msg),
A2AMessageV2::UpdateMessageStatusByConnections(msg) => self.handle_update_messages_by_connections_v2(msg),
A2AMessageV2::UpdateConfigs(msg) => self.handle_update_configs_v2(msg),
A2AMessageV2::GetConfigs(msg) => self.handle_get_configs_v2(msg),
A2AMessageV2::RemoveConfigs(msg) => self.handle_remove_configs_v2(msg),
_ => err_act!(self, err_msg("Unsupported message"))
}
.and_then(move |msg,slf, _|
A2AMessage::pack_v2(slf.wallet_handle, Some(&slf.verkey), &sender_vk, &msg)
.map_err(|err| err.context("Can't pack message.").into())
.into_actor(slf)
.into_box()
)
.into_box()
}
fn handle_get_messages_by_connections_v1(&mut self,
msg: GetMessagesByConnections) -> ResponseActFuture<Self, Vec<A2AMessage>, Error> {
trace!("Agent::handle_get_messages_by_connections_v1 >> {:?}", msg);
self.handle_get_messages_by_connections(msg)
.map(|msgs: Vec<A2ConnMessage>| {
vec![A2AMessage::Version1(A2AMessageV1::MessagesByConnections(
MessagesByConnections {
msgs: msgs.into_iter().map(|msg| msg.into()).collect()
}))]
})
.into_actor(self)
.into_box()
}
fn handle_get_messages_by_connections_v2(&mut self,
msg: GetMessagesByConnections) -> ResponseActFuture<Self, A2AMessageV2, Error> {
trace!("Agent::handle_get_messages_by_connections_v2 >> {:?}", msg);
self.handle_get_messages_by_connections(msg)
.map(|msgs: Vec<A2ConnMessage>| {
A2AMessageV2::MessagesByConnections(
MessagesByConnections {
msgs: msgs.into_iter().map(|msg| msg.into()).collect()
})
})
.into_actor(self)
.into_box()
}
fn handle_get_messages_by_connections(&mut self,
msg: GetMessagesByConnections) -> ResponseFuture<Vec<A2ConnMessage>, Error> {
trace!("Agent::handle_get_messages_by_connections >> {:?}", msg);
let GetMessagesByConnections { exclude_payload, uids, status_codes, pairwise_dids } = msg;
let router = self.router.clone();
let wallet_handle = self.wallet_handle.clone();
let msg = GetMessages { exclude_payload, uids, status_codes };
future::ok(())
.and_then(move |_| Self::get_pairwise_list(wallet_handle).into_box())
.and_then(move |pairwise_list| {
let pairwises: Vec<_> = pairwise_list
.into_iter()
.filter(|pairwise| pairwise_dids.is_empty() || pairwise_dids.contains(&pairwise.their_did))
.collect();
if !pairwise_dids.is_empty() && pairwises.is_empty() {
return err!(err_msg("Pairwise DID not found.")).into_box();
}
let futures: Vec<_> = pairwises
.iter()
.map(move |pairwise| {
router
.send(RouteA2ConnMsg(pairwise.my_did.clone(), A2ConnMessage::GetMessages(msg.clone())))
.from_err()
.and_then(|res| res)
.into_box()
})
.collect();
future::join_all(futures)
.map_err(|err| err.context("Can't get Agent Connection messages").into())
.into_box()
})
.into_box()
}
fn handle_update_messages_by_connections_v1(&mut self,
msg: UpdateMessageStatusByConnections) -> ResponseActFuture<Self, Vec<A2AMessage>, Error> {
trace!("Agent::handle_update_messages_by_connections_v1 >> {:?}", msg);
self.handle_update_messages_by_connections(msg)
.map(|uids_by_conn: Vec<A2ConnMessage>| {
vec![A2AMessage::Version1(A2AMessageV1::MessageStatusUpdatedByConnections(
MessageStatusUpdatedByConnections {
updated_uids_by_conn: uids_by_conn.into_iter().map(|msg| msg.into()).collect(),
failed: Vec::new(),
}))]
})
.into_actor(self)
.into_box()
}
fn handle_update_messages_by_connections_v2(&mut self,
msg: UpdateMessageStatusByConnections) -> ResponseActFuture<Self, A2AMessageV2, Error> {
trace!("Agent::handle_update_messages_by_connections_v2 >> {:?}", msg);
self.handle_update_messages_by_connections(msg)
.map(|uids_by_conn: Vec<A2ConnMessage>| {
A2AMessageV2::MessageStatusUpdatedByConnections(
MessageStatusUpdatedByConnections {
updated_uids_by_conn: uids_by_conn.into_iter().map(|msg| msg.into()).collect(),
failed: Vec::new(),
})
})
.into_actor(self)
.into_box()
}
fn handle_update_messages_by_connections(&mut self,
msg: UpdateMessageStatusByConnections) -> ResponseFuture<Vec<A2ConnMessage>, Error> {
trace!("Agent::handle_update_messages_by_connections >> {:?}", msg);
let UpdateMessageStatusByConnections { uids_by_conn, status_code } = msg;
let router = self.router.clone();
let wallet_handle = self.wallet_handle.clone();
future::ok(())
.and_then(move |_| Self::get_pairwise_list(wallet_handle).into_box())
.and_then(move |pairwise_list| {
let futures: Vec<_> = pairwise_list
.iter()
.filter_map(|pairwise| uids_by_conn
.iter()
.find(|uid_by_conn| uid_by_conn.pairwise_did == pairwise.their_did)
.map(|uid_by_conn| (uid_by_conn, pairwise))
)
.map(move |(uid_by_conn, pairwise)|
router
.send(RouteA2ConnMsg(pairwise.my_did.clone(), A2ConnMessage::UpdateMessages(
UpdateMessageStatus { uids: uid_by_conn.uids.clone(), status_code: status_code.clone() }
)))
.from_err()
.and_then(|res| res)
.into_box()
)
.collect();
future::join_all(futures)
.map_err(|err| err.context("Can't get Agent Connection messages").into())
})
.into_box()
}
fn get_pairwise_list(wallet_handle: i32) -> ResponseFuture<Vec<Pairwise>, Error> {
future::ok(())
.and_then(move |_| {
pairwise::list_pairwise(wallet_handle)
.map_err(|err| err.context("Can't get Agent pairwise list").into())
.into_box()
})
.and_then(move |pairwise_list| {
let pairwise_list = ftry!(serde_json::from_str::<Vec<String>>(&pairwise_list));
pairwise_list
.iter()
.map(|pairwise| serde_json::from_str::<Pairwise>(&pairwise))
.collect::<Result<Vec<_>, _>>()
.map_err(|err| err.context("Can't deserialize Agent pairwise").into())
.into_future()
.into_box()
})
.into_box()
}
fn handle_create_key_v1(&mut self,
msg: CreateKey) -> ResponseActFuture<Self, Vec<A2AMessage>, Error> {
trace!("Agent::handle_create_key_v1 >> {:?}", msg);
let CreateKey { for_did, for_did_verkey } = msg;
self.handle_create_key(&for_did, &for_did_verkey)
.map(|(pairwise_did, pairwise_did_verkey), _, _| {
vec![A2AMessage::Version1(A2AMessageV1::KeyCreated(KeyCreated {
with_pairwise_did: pairwise_did,
with_pairwise_did_verkey: pairwise_did_verkey,
}))]
})
.into_box()
}
fn handle_create_key_v2(&mut self,
msg: CreateKey) -> ResponseActFuture<Self, A2AMessageV2, Error> {
trace!("Agent::handle_create_key_v2 >> {:?}", msg);
let CreateKey { for_did, for_did_verkey, .. } = msg;
self.handle_create_key(&for_did, &for_did_verkey)
.map(|(pairwise_did, pairwise_did_verkey), _, _| {
A2AMessageV2::KeyCreated(KeyCreated {
with_pairwise_did: pairwise_did,
with_pairwise_did_verkey: pairwise_did_verkey,
})
})
.into_box()
}
fn handle_create_key(&mut self,
for_did: &str,
for_did_verkey: &str) -> ResponseActFuture<Self, (String, String), Error> {
trace!("Agent::handle_create_key >> {:?}, {:?}", for_did, for_did_verkey);
let for_did = for_did.to_string();
let for_did_verkey = for_did_verkey.to_string();
let their_did_info = json!({
"did": for_did,
"verkey": for_did_verkey,
}).to_string();
future::ok(())
.into_actor(self)
.and_then(move |_, slf, _|
slf.check_no_pairwise_exists(&for_did)
.map(|_| for_did)
.into_actor(slf)
)
.and_then(move |for_did, slf, _|
did::store_their_did(slf.wallet_handle, &their_did_info)
.map_err(|err| err.context("Can't store their DID for Forward Agent Connection pairwise.").into())
.map(|_| for_did)
.into_actor(slf)
)
.and_then(|for_did, slf, _| {
did::create_and_store_my_did(slf.wallet_handle, "{}")
.map_err(|err| err.context("Can't create DID for agent pairwise connection.").into())
.map(|(pairwise_did, pairwise_did_verkey)| (for_did, pairwise_did, pairwise_did_verkey))
.into_actor(slf)
})
.and_then(|(for_did, pairwise_did, pairwise_did_verkey), slf, _| {
pairwise::create_pairwise(slf.wallet_handle, &for_did, &pairwise_did, None)
.map_err(|err| err.context("Can't store agent pairwise connection.").into())
.map(|_| (for_did, pairwise_did, pairwise_did_verkey))
.into_actor(slf)
})
.and_then(move |(for_did, pairwise_did, pairwise_did_verkey), slf, _| {
let config = AgentConnectionConfig {
wallet_handle: slf.wallet_handle,
owner_did: slf.owner_did.to_string(),
owner_verkey: slf.owner_verkey.to_string(),
user_pairwise_did: for_did.to_string(),
user_pairwise_verkey: for_did_verkey.to_string(),
agent_pairwise_did: pairwise_did.to_string(),
agent_pairwise_verkey: pairwise_did_verkey.to_string(),
agent_configs: slf.configs.clone(),
forward_agent_detail: slf.forward_agent_detail.clone(),
};
AgentConnection::create(config, slf.router.clone())
.map(|_| (pairwise_did, pairwise_did_verkey))
.into_actor(slf)
})
.into_box()
}
fn handle_update_com_method_v1(&mut self, _msg: UpdateComMethod) -> ResponseActFuture<Self, Vec<A2AMessage>, Error> {
trace!("UpdateComMethod: {:?}", _msg);
let messages = vec![A2AMessage::Version1(A2AMessageV1::ComMethodUpdated(ComMethodUpdated {id: "123".to_string()}))];
ok_act!(self, messages)
}
fn handle_update_configs_v1(&mut self, msg: UpdateConfigs) -> ResponseActFuture<Self, Vec<A2AMessage>, Error> {
self.handle_update_configs(msg)
.map(|_, _, _| {
vec![A2AMessage::Version1(A2AMessageV1::ConfigsUpdated(ConfigsUpdated {}))]
})
.into_box()
}
fn handle_update_configs_v2(&mut self, msg: UpdateConfigs) -> ResponseActFuture<Self, A2AMessageV2, Error> {
self.handle_update_configs(msg)
.map(|_, _, _| {
A2AMessageV2::ConfigsUpdated(ConfigsUpdated {})
})
.into_box()
}
fn handle_update_configs(&mut self, msg: UpdateConfigs) -> ResponseActFuture<Self, (), Error> {
for config_option in msg.configs {
match config_option.name.as_str() {
"name" | "logo_url" => self.configs.insert(config_option.name, config_option.value),
_ => continue
};
}
let config_metadata = ftry_act!(self, serde_json::to_string(&self.configs));
future::ok(())
.into_actor(self)
.and_then(move |_, slf, _| {
did::set_did_metadata(slf.wallet_handle, &slf.did, config_metadata.to_string().as_str())
.map_err(|err| err.context("Can't store config data as DID metadata.").into())
.into_actor(slf)
})
.into_box()
}
fn handle_get_configs_v1(&mut self, msg: GetConfigs) -> ResponseActFuture<Self, Vec<A2AMessage>, Error> {
let configs: Vec<ConfigOption> = self.handle_get_configs(msg);
let messages = vec![A2AMessage::Version1(A2AMessageV1::Configs(Configs { configs }))];
ok_act!(self, messages)
}
fn handle_get_configs_v2(&mut self, msg: GetConfigs) -> ResponseActFuture<Self, A2AMessageV2, Error> {
let configs: Vec<ConfigOption> = self.handle_get_configs(msg);
let messages = A2AMessageV2::Configs(Configs { configs });
ok_act!(self, messages)
}
fn handle_get_configs(&mut self, msg: GetConfigs) -> Vec<ConfigOption> {
self.configs.iter()
.filter(|(k, _)| msg.configs.contains(k))
.map(|(k, v)| ConfigOption { name: k.clone(), value: v.clone() })
.collect()
}
fn handle_remove_configs_v1(&mut self, msg: RemoveConfigs) -> ResponseActFuture<Self, Vec<A2AMessage>, Error> {
self.handle_remove_configs(msg)
.map(|_, _, _| {
vec![A2AMessage::Version1(A2AMessageV1::ConfigsRemoved(ConfigsRemoved {}))]
})
.into_box()
}
fn handle_remove_configs_v2(&mut self, msg: RemoveConfigs) -> ResponseActFuture<Self, A2AMessageV2, Error> {
self.handle_remove_configs(msg)
.map(|_, _, _| {
A2AMessageV2::ConfigsRemoved(ConfigsRemoved {})
})
.into_box()
}
fn handle_remove_configs(&mut self, msg: RemoveConfigs) -> ResponseActFuture<Self, (), Error> {
self.configs.retain(|k, _| !msg.configs.contains(k));
let config_metadata = ftry_act!(self, serde_json::to_string(&self.configs));
future::ok(())
.into_actor(self)
.and_then(move |_, slf, _| {
did::set_did_metadata(slf.wallet_handle, &slf.did, config_metadata.to_string().as_str())
.map_err(|err| err.context("Can't store config data as DID metadata.").into())
.into_actor(slf)
})
.into_box()
}
fn check_no_pairwise_exists(&mut self,
did: &str) -> ResponseFuture<(), Error> {
pairwise::is_pairwise_exists(self.wallet_handle, did)
.map_err(|err| err.context("Can't check if agent pairwise connection exists.").into())
.and_then(|is_exist|
if is_exist {
err!(err_msg("Agent pairwise connection already exists.")).into()
} else {
future::ok(()).into_box()
}
)
.into_box()
}
}
impl Actor for Agent {
type Context = Context<Self>;
}
impl Handler<HandleA2AMsg> for Agent {
type Result = ResponseActFuture<Self, Vec<u8>, Error>;
fn handle(&mut self, msg: HandleA2AMsg, _: &mut Self::Context) -> Self::Result {
trace!("Handler<AgentMsgsBundle>::handle >> {:?}", msg);
self.handle_a2a_msg(msg.0)
}
}
#[cfg(test)]
mod tests {
use actors::ForwardA2AMsg;
use super::*;
use utils::to_i8;
use utils::tests::*;
use domain::status::MessageStatusCode;
#[test]
fn agent_create_key_works() {
run_test(|forward_agent| {
future::ok(())
.and_then(|()| {
setup_agent(forward_agent)
})
.and_then(move |(e_wallet_handle, agent_did, agent_verkey, _, _, forward_agent)| {
let (user_pw_did, user_pw_verkey) = did::create_and_store_my_did(e_wallet_handle, "{}").wait().unwrap();
let create_key_msg = compose_create_key(e_wallet_handle, &agent_did, &agent_verkey, &user_pw_did, &user_pw_verkey).wait().unwrap();
forward_agent
.send(ForwardA2AMsg(create_key_msg))
.from_err()
.and_then(|res| res)
.map(move |key_created_msg| (e_wallet_handle, key_created_msg, agent_verkey))
})
.map(|(e_wallet_handle, key_created_msg, agent_verkey)| {
let (sender_vk, key) = decompose_key_created(e_wallet_handle, &key_created_msg).wait().unwrap();
assert_eq!(sender_vk, agent_verkey);
assert!(!key.with_pairwise_did.is_empty());
assert!(!key.with_pairwise_did_verkey.is_empty());
wallet::close_wallet(e_wallet_handle).wait().unwrap();
})
});
}
#[test]
fn agent_get_messages_by_connection_works() {
run_agent_test(|(e_wallet_handle, agent_did, agent_verkey, agent_pw_did, agent_pw_vk, forward_agent)| {
future::ok(())
.and_then(move |_| {
let msg = compose_create_general_message(e_wallet_handle,
&agent_did,
&agent_verkey,
&agent_pw_did,
&agent_pw_vk,
RemoteMessageType::CredOffer).wait().unwrap();
forward_agent
.send(ForwardA2AMsg(msg))
.from_err()
.and_then(|res| res)
.map(move |resp| (e_wallet_handle, resp, agent_did, agent_verkey, forward_agent, agent_pw_did, agent_pw_vk))
})
.and_then(move |(e_wallet_handle, resp, agent_did, agent_verkey, forward_agent, agent_pw_did, agent_pw_vk)| {
let (_, msg_uid) = decompose_general_message_created(e_wallet_handle, &resp).wait().unwrap();
let msg = compose_get_messages_by_connection(e_wallet_handle,
&agent_did,
&agent_verkey,
&agent_pw_did,
&agent_pw_vk).wait().unwrap();
forward_agent
.send(ForwardA2AMsg(msg))
.from_err()
.and_then(|res| res)
.map(move |resp| (e_wallet_handle, resp, agent_verkey, msg_uid))
})
.map(|(e_wallet_handle, resp, agent_verkey, msg_uid)| {
let (sender_vk, messages) = decompose_get_messages_by_connection(e_wallet_handle, &resp).wait().unwrap();
assert_eq!(sender_vk, agent_verkey);
assert_eq!(1, messages.len());
let expected_message = MessagesByConnection {
did: EDGE_PAIRWISE_DID.to_string(),
msgs: vec![GetMessagesDetailResponse {
uid: msg_uid,
status_code: MessageStatusCode::Created,
sender_did: EDGE_PAIRWISE_DID.to_string(),
type_: RemoteMessageType::CredOffer,
payload: Some(MessageDetailPayload::V1(to_i8(&PAYLOAD.to_vec()))),
ref_msg_id: None,
}]
};
assert_eq!(expected_message, messages[0]);
e_wallet_handle
})
});
}
#[test]
fn agent_configs_happy_path() {
run_test(|forward_agent| {
future::ok(())
.and_then(|()| {
setup_agent(forward_agent)
})
.and_then(move |(e_wallet_handle, agent_did, agent_verkey, _, _, forward_agent)| {
let msg = compose_update_configs(e_wallet_handle,
&agent_did,
&agent_verkey).wait().unwrap();
forward_agent
.send(ForwardA2AMsg(msg))
.from_err()
.and_then(|res| res)
.map(move |_| (e_wallet_handle, agent_did, agent_verkey, forward_agent))
})
.and_then(move |(e_wallet_handle, agent_did, agent_verkey, forward_agent)| {
let msg = compose_get_configs(e_wallet_handle,
&agent_did,
&agent_verkey).wait().unwrap();
forward_agent
.send(ForwardA2AMsg(msg))
.from_err()
.and_then(|res| res)
.map(move |resp| (e_wallet_handle, resp, agent_did, agent_verkey, forward_agent))
})
.and_then(move |(e_wallet_handle, resp, agent_did, agent_verkey, forward_agent)| {
let configs = decompose_configs(e_wallet_handle, &resp).wait().unwrap();
assert_eq!(configs.len(), 2);
assert!(configs.contains(&ConfigOption { name: "name".to_string(), value: "super agent".to_string() }));
assert!(configs.contains(&ConfigOption { name: "logo_url".to_string(), value: "http://logo.url".to_string() }));
let msg = compose_remove_configs(e_wallet_handle,
&agent_did,
&agent_verkey).wait().unwrap();
forward_agent
.send(ForwardA2AMsg(msg))
.from_err()
.and_then(|res| res)
.map(move |_| (e_wallet_handle, agent_did, agent_verkey, forward_agent))
})
.and_then(move |(e_wallet_handle, agent_did, agent_verkey, forward_agent)| {
let msg = compose_get_configs(e_wallet_handle,
&agent_did,
&agent_verkey).wait().unwrap();
forward_agent
.send(ForwardA2AMsg(msg))
.from_err()
.and_then(|res| res)
.map(move |resp| (e_wallet_handle, resp))
})
.map(|(e_wallet_handle, resp)| {
let configs = decompose_configs(e_wallet_handle, &resp).wait().unwrap();
assert_eq!(configs.len(), 1);
assert!(!configs.contains(&ConfigOption { name: "name".to_string(), value: "super agent".to_string() }));
assert!(configs.contains(&ConfigOption { name: "logo_url".to_string(), value: "http://logo.url".to_string() }));
wallet::close_wallet(e_wallet_handle).wait().unwrap();
})
});
}
}<|fim▁end|>
|
let wallet_id = format!("dummy_{}_{}", owner_did, rand::rand_string(10));
|
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>#![feature(stmt_expr_attributes)]
extern crate winerunner;
use winerunner::core::Core;
use winerunner::cmd::ListVersionsCommand;
use winerunner::cmd::WinebootCommand;
use winerunner::cmd::WinecfgCommand;
use winerunner::cmd::WineserverCommand;
use winerunner::cmd::WineconsoleCommand;
use winerunner::cmd::InstallCommand;
#[cfg(feature = "devel")]
use winerunner::cmd::TestCommand;
fn main() {
let mut app = Core::new();
app.add_command(ListVersionsCommand::new());
app.add_command(InstallCommand::new());
app.add_command(WinecfgCommand::new());
app.add_command(WineconsoleCommand::new());
app.add_command(WineserverCommand::new());
app.add_command(WinebootCommand::new());<|fim▁hole|>
#[cfg(feature = "devel")]
app.add_command(TestCommand::new());
app.run();
}<|fim▁end|>
| |
<|file_name|>command.rs<|end_file_name|><|fim▁begin|>use self::SmtpInput::*;
use bytes::Bytes;
use std::fmt;
use std::net::{Ipv4Addr, Ipv6Addr, SocketAddr};
#[derive(Eq, PartialEq, Debug, Clone)]
pub enum SmtpInput {
Command(usize, usize, SmtpCommand),
Invalid(usize, usize, Bytes),
Incomplete(usize, usize, Bytes),
None(usize, usize, String),
Connect(SmtpConnection),
Disconnect,
StreamStart(usize),
StreamEnd(usize),
StreamData(usize, usize, Bytes),
}
impl SmtpInput {
pub fn len(&self) -> usize {
match self {
&Command(_, l, _) => l,
&Invalid(_, l, _) => l,
&Incomplete(_, l, _) => l,
&None(_, l, _) => l,
&Connect(_) => 0,
&Disconnect => 0,
&StreamStart(_) => 0,
&StreamEnd(_) => 0,
&StreamData(_, l, _) => l,
}
}
pub fn pos(self, pos: usize) -> Self {
match self {
Command(_, l, c) => SmtpInput::Command(pos, l, c),
Invalid(_, l, d) => SmtpInput::Invalid(pos, l, d),
Incomplete(_, l, d) => SmtpInput::Incomplete(pos, l, d),
None(_, l, s) => SmtpInput::None(pos, l, s),
Connect(c) => SmtpInput::Connect(c),
Disconnect => SmtpInput::Disconnect,
StreamStart(_) => SmtpInput::StreamStart(pos),
StreamEnd(_) => SmtpInput::StreamEnd(pos),
StreamData(_, l, d) => SmtpInput::StreamData(pos, l, d),
}
}
}
#[derive(Eq, PartialEq, Debug, Clone)]
pub enum SmtpCommand {
Helo(SmtpHelo),
Mail(SmtpMail),<|fim▁hole|> Rcpt(SmtpPath),
Expn(String),
Vrfy(String),
Help(Vec<String>),
Noop(Vec<String>),
Quit,
Rset,
Data,
Turn,
Unknown(String),
}
#[derive(Eq, PartialEq, Debug, Clone)]
pub enum SmtpHost {
Domain(String),
Ipv4(Ipv4Addr),
Ipv6(Ipv6Addr),
Invalid { label: String, literal: String },
Other { label: String, literal: String },
}
#[derive(Eq, PartialEq, Debug, Clone)]
pub enum SmtpPath {
Direct(SmtpAddress),
Relay(Vec<SmtpHost>, SmtpAddress),
Postmaster,
Null,
}
#[derive(Eq, PartialEq, Debug, Clone)]
pub enum SmtpAddress {
Mailbox(String, SmtpHost),
}
#[derive(Eq, PartialEq, Debug, Clone)]
pub enum SmtpHelo {
Helo(SmtpHost),
Ehlo(SmtpHost),
}
impl SmtpHelo {
pub fn host<'a>(&'a self) -> &'a SmtpHost {
use self::SmtpHelo::*;
match self {
&Helo(ref host) => host,
&Ehlo(ref host) => host,
}
}
pub fn name(&self) -> String {
format!("{}", self.host())
}
}
impl fmt::Display for SmtpPath {
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
match self {
&SmtpPath::Direct(ref addr) => match addr {
&SmtpAddress::Mailbox(ref name, ref host) => write!(f, "<{}@{}>", name, host),
},
&SmtpPath::Null => write!(f, "<>"),
&SmtpPath::Postmaster => write!(f, "<POSTMASTER>"),
&SmtpPath::Relay(_, ref addr) => match addr {
&SmtpAddress::Mailbox(ref name, ref host) => write!(f, "<{}@{}>", name, host),
},
}
}
}
impl fmt::Display for SmtpHost {
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
use self::SmtpHost::*;
match self {
&Domain(ref h) => f.write_str(h),
&Ipv4(ref h) => write!(f, "{}", h),
&Ipv6(ref h) => write!(f, "{}", h),
&Invalid {
ref label,
ref literal,
} => write!(f, "{}:{}", label, literal),
&Other {
ref label,
ref literal,
} => write!(f, "{}:{}", label, literal),
}
}
}
#[derive(Eq, PartialEq, Debug, Clone)]
pub struct SmtpConnection {
pub local_name: String,
pub local_addr: Option<SocketAddr>,
pub peer_addr: Option<SocketAddr>,
}
#[derive(Eq, PartialEq, Debug, Clone)]
pub enum SmtpMail {
Mail(SmtpPath),
Send(SmtpPath),
Saml(SmtpPath),
Soml(SmtpPath),
}
impl SmtpMail {
pub fn from(&self) -> &SmtpPath {
match self {
SmtpMail::Mail(p) => &p,
SmtpMail::Send(p) => &p,
SmtpMail::Saml(p) => &p,
SmtpMail::Soml(p) => &p,
}
}
}<|fim▁end|>
| |
<|file_name|>sceneParams_0.js<|end_file_name|><|fim▁begin|>var HLSP = {
/*
set squareness to 0 for a flat land
*/
// intensità colore land audioreattivab più bassa
mizu: {
cameraPositionY: 10,
seaLevel: 0,
displayText: '<b>CHAPTER ONE, MIZU</b><br/><i>TO BE TRAPPED INTO THE MORNING UNDERTOW</i>',
speed: 10,
modelsParams: ['stones', function(){return 1+Math.random()*40}, 3, true, true, 0],
tiles: 62,
repeatUV: 1,
bFactor: 0.5,
cFactor: 0.07594379703811609,
buildFreq: 10,
natural: 0.6834941733430447,
rainbow: 0.5641539208545766,
squareness: 0.022450016948639295,
map: 'white',
landRGB: 1966335,
horizonRGB: 0,
skyMap: 'sky4',
},
// fft1 più speedup moveSpeed
solar_valley: {
cameraPositionY: -180,
seaLevel: -450,
fogDensity: 0.00054,
displayText: '<b>CHAPTER TWO, SOLAR VALLEY</b><br><i>FIRE EXECUTION STOPPED BY CLOUDS</i>',
speed: 10,
modelsParams: ['stones', function(){return 1+Math.random()*5}, 40, true, false, -750],
tiles: 200,
repeatUV: 7,
bFactor: 0.6617959456178687,
cFactor: 0.3471716436028164,
buildFreq: 10,
natural: 0.18443493566399619,
rainbow: 0.03254734158776403,
squareness: 0.00001,
map: 'land3',
landRGB: 9675935,
horizonRGB: 3231404,
skyMap: 'sky4',
},
// camera underwater
escher_surfers: {
cameraPositionY: 40,
seaLevel: 50,
displayText: '<b>CHAPTER THREE, ESCHER SURFERS</b><br><i>TAKING REST ON K 11</i>',
speed: 15,
modelsParams: ['cube', 3, 1, true, true, 0 ],
tiles: 73,
repeatUV: 112,
bFactor: 1.001,
cFactor: 0,
buildFreq: 10,
natural: 0,
rainbow: 0.16273670793882017,
squareness: 0.08945796327125173,
map: 'pattern1',
landRGB: 16727705,
horizonRGB: 7935,
skyMap: 'sky1',
},
// sea level più basso
// modelli: cubid
currybox: {
cameraPositionY: 100,//HLE.WORLD_HEIGHT*.5,
seaLevel: -100,
displayText: '<b>CHAPTER FOUR, CURRYBOX</b><br><i>A FLAKE ON THE ROAD AND A KING AND HIS BONES</i>',
speed: 5,
modelsParams: [['cube'], function(){return 1+Math.random()*5}, 1, true, false,-100],
tiles: 145,
repeatUV: 1,
bFactor: 0.751,
cFactor: 0.054245569312940056,
buildFreq: 10,
natural: 0.176420247632921,
rainbow: 0.21934025248846812,
squareness: 0.01,
map: 'white',
landRGB: 13766158,
horizonRGB: 2665099,
skyMap: 'sky1',
},
// sealevel basso
galaxy_glacier: {
cameraPositionY: 50,
seaLevel: -100,
displayText: '<b>CHAPTER FIVE, GALAXY GLACIER</b><br><i>HITTING ICEBERGS BLAMES</i>',
speed: 2,
modelsParams: [null, 1, true, true],
tiles: 160,
repeatUV: 1,
bFactor: 0.287989180087759,
cFactor: 0.6148319562024518,
buildFreq: 61.5837970429,
natural: 0.4861551769529205,
rainbow: 0.099628324585666777,
squareness: 0.01198280149135716,
map: 'pattern5', //%
landRGB: 11187452,
horizonRGB: 6705,
skyMap: 'sky1',
},
firefly: {
cameraPositionY: 50,
displayText: '<b>CHAPTER SIX, FIREFLY</b>',
speed: 10,
modelsParams: ['sea', 1, true, true],
tiles: 100,
repeatUV: 1,
bFactor: 1,
cFactor: 1,
buildFreq: 1,
natural: 1,
rainbow: 0,
squareness: 0,
map: 'white',
landRGB: 2763306,
horizonRGB: 0,
skyMap: 'sky1',
},
//camera position.y -400
// partire sopra acqua, e poi gradualmente finire sott'acqua
//G
drift: {
cameraPositionY: -450,
seaLevel: 0,
displayText: '<b>CHAPTER SEVEN, DRIFT</b><br><i>LEAVING THE BOAT</i>',
speed: 3,
modelsParams: [['ducky'], function(){return 1+Math.random()*2}, 2, true, true, 0],
tiles: 128,
repeatUV: 0,
bFactor: 0.24952961883952426,
cFactor: 0.31,
buildFreq: 15.188759407623216,
natural: 0.3471716436028164,<|fim▁hole|> rainbow: 1.001,
squareness: 0.00001,
map: 'land1',
landRGB: 16777215,
horizonRGB: 6039170,
skyMap: 'sky2',
},
//H
hyperocean: {
cameraPositionY: 50,
displayText: '<b>CHAPTER EIGHT, HYPEROCEAN</b><br><i>CRAVING FOR LOVE LASTS FOR LIFE</i>',
speed: 8,//18,
modelsParams: ['space', 2, 40, true, false, 200],
tiles: 200,
repeatUV: 12,
bFactor: 1.001,
cFactor: 0.21934025248846812,
buildFreq: 15.188759407623216,
natural: 0.7051924010682208,
rainbow: 0.1952840495265842,
squareness: 0.00001,
map: 'land5',
landRGB: 14798516,
horizonRGB: 7173242,
skyMap: 'sky2',
},
// balene
// capovolgere di conseguenza modelli balene
//I
twin_horizon: {
cameraPositionY: 100,
displayText: '<b>CHAPTER NINE, TWIN HORIZON</b><br><i>ON THE RIGHT VISION TO THE RIGHT SEASON</i>',
speed: 10,
modelsParams: ['sea', function(){return 20+Math.random()*20}, 20, false, false, 550],
tiles: 99,
repeatUV: 1,
bFactor: 0.20445411338494512,
cFactor: 0.33632252974022836,
buildFreq: 45.50809304437684,
natural: 0.4448136683661085,
rainbow: 0,
squareness: 0.0013619887944460984,
map: 'white',
landRGB: 0x000fff,
horizonRGB: 16728899,
skyMap: 'sky1',
},
// da un certo punto random colors (quando il pezzo aumenta)
// da stesso punto aumenta velocità
// sea level basso
// modelli elettrodomestici / elettronica
//J
else: {
cameraPositionY: 50,
displayText: '<b>CHAPTER TEN, ELSE</b><br><i>DIE LIKE AN ELECTRIC MACHINE</i>',
speed: 10,
modelsParams: [['ducky'], function(){return 2+Math.random()*20}, 3, true, true, 0],
tiles: 104,
repeatUV: 128,
bFactor: 0.5641539208545766,
cFactor: 0,
buildFreq: 30.804098302357595,
natural: 0.0,
rainbow: 0.6458797021572349,
squareness: 0.013562721707765414,
map: 'pattern2',
landRGB: 65399,
horizonRGB: 0x000000,
skyMap: 'sky3',
},
// quando iniziano i kick randomizza landscape
// odissea nello spazio
// cielo stellato (via lattea)
//K
roger_water: {
cameraPositionY: 50,
displayText: '<b>CHAPTER ELEVEN, ROGER WATER</b><br><i>PROTECT WATER</i>',
speed: 10,
modelsParams: ['stones', function(){return 1+Math.random()*40}, 3, true, true, 0],
tiles: 80,
repeatUV: 1,
bFactor: 0,
cFactor: 0.20613316338917223,
buildFreq: 10,
natural: 1.001,
rainbow: 0.1735858218014082,
squareness: 0.00001,
map: 'white',
landRGB: 2105376,
horizonRGB: 0,
skyMap: 'sky1',
},
//L
alpha_11: {
cameraPositionY: 50,
displayText: '<b>CHAPTER TWELVE, ALPHA 11</b><br><i>A MASSIVE WAVE IS DRIVING ME HOME</i>',
speed: 1,
modelsParams: ['stones', function(){return 1+Math.random()*40}, 3, true, true, 0],
tiles: 6,
repeatUV: 1,
bFactor: 0,
cFactor: 0,
buildFreq: 44.48136683661085,
natural: 0,
rainbow: 0,
squareness: 0.00001,
map: 'white',
landRGB: 0,
horizonRGB: 3980219,
skyMap: 'sky1',
},
//M
blackpool: {
displayText: 'BLACKPOOL',
speed: -10,
modelsParams: ['space', 2, 400, true, false, 200],
cameraPositionY: 110,
seaLevel: 10,
// speed: 4,
// modelsParams: ['sea', 1, true, true],
tiles: 182,
repeatUV: 16.555478741450983,
bFactor: 0.6048772396441062,
cFactor: 0.016358953883098624,
buildFreq: 73.3797815423632,
natural: 0.9833741906510363,
rainbow: 0.10821609644148733,
squareness: 0.00599663055740593,
map: 'land3',
landRGB: 12105440,
horizonRGB: 2571781,
skyMap: 'sky1',
},
intro: {
cameraPositionY: 650,
seaLevel:0,
displayText: 'INTRO',
speed: 0,
modelsParams: ['sea', 1, true, true],
tiles: 100,
repeatUV: 1,
bFactor: 0,
cFactor: 0,
buildFreq: 10,
natural: 1,
rainbow: 0,
squareness: 0,
map: 'sky1',
landRGB: 0x111111,
horizonRGB: 0x6f6f6f,
skyMap: 'sky3'
}
}<|fim▁end|>
| |
<|file_name|>format.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from __future__ import print_function
# pylint: disable=W0141
import sys
from pandas.core.base import PandasObject
from pandas.core.common import adjoin, notnull
from pandas.core.index import Index, MultiIndex, _ensure_index
from pandas import compat
from pandas.compat import(StringIO, lzip, range, map, zip, reduce, u,
OrderedDict)
from pandas.util.terminal import get_terminal_size
from pandas.core.config import get_option, set_option
import pandas.core.common as com
import pandas.lib as lib
from pandas.tslib import iNaT, Timestamp, Timedelta, format_array_from_datetime
from pandas.tseries.index import DatetimeIndex
from pandas.tseries.period import PeriodIndex
import numpy as np
import itertools
import csv
docstring_to_string = """
Parameters
----------
frame : DataFrame
object to render
buf : StringIO-like, optional
buffer to write to
columns : sequence, optional
the subset of columns to write; default None writes all columns
col_space : int, optional
the minimum width of each column
header : bool, optional
whether to print column labels, default True
index : bool, optional
whether to print index (row) labels, default True
na_rep : string, optional
string representation of NAN to use, default 'NaN'
formatters : list or dict of one-parameter functions, optional
formatter functions to apply to columns' elements by position or name,
default None. The result of each function must be a unicode string.
List must be of length equal to the number of columns.
float_format : one-parameter function, optional
formatter function to apply to columns' elements if they are floats,
default None. The result of this function must be a unicode string.
sparsify : bool, optional
Set to False for a DataFrame with a hierarchical index to print every
multiindex key at each row, default True
justify : {'left', 'right'}, default None
Left or right-justify the column labels. If None uses the option from
the print configuration (controlled by set_option), 'right' out
of the box.
index_names : bool, optional
Prints the names of the indexes, default True
force_unicode : bool, default False
Always return a unicode result. Deprecated in v0.10.0 as string
formatting is now rendered to unicode by default.
Returns
-------
formatted : string (or unicode, depending on data and options)"""
class CategoricalFormatter(object):
def __init__(self, categorical, buf=None, length=True,
na_rep='NaN', name=False, footer=True):
self.categorical = categorical
self.buf = buf if buf is not None else StringIO(u(""))
self.name = name
self.na_rep = na_rep
self.length = length
self.footer = footer
def _get_footer(self):
footer = ''
if self.name:
name = com.pprint_thing(self.categorical.name,
escape_chars=('\t', '\r', '\n'))
footer += ('Name: %s' % name if self.categorical.name is not None
else '')
if self.length:
if footer:
footer += ', '
footer += "Length: %d" % len(self.categorical)
level_info = self.categorical._repr_categories_info()
# Levels are added in a newline
if footer:
footer += '\n'
footer += level_info
return compat.text_type(footer)
def _get_formatted_values(self):
return format_array(self.categorical.get_values(), None,
float_format=None,
na_rep=self.na_rep)
def to_string(self):
categorical = self.categorical
if len(categorical) == 0:
if self.footer:
return self._get_footer()
else:
return u('')
fmt_values = self._get_formatted_values()
result = ['%s' % i for i in fmt_values]
result = [i.strip() for i in result]
result = u(', ').join(result)
result = [u('[')+result+u(']')]
if self.footer:
footer = self._get_footer()
if footer:
result.append(footer)
return compat.text_type(u('\n').join(result))
class SeriesFormatter(object):
def __init__(self, series, buf=None, length=True, header=True,
na_rep='NaN', name=False, float_format=None, dtype=True,
max_rows=None):
self.series = series
self.buf = buf if buf is not None else StringIO()
self.name = name
self.na_rep = na_rep
self.header = header
self.length = length
self.max_rows = max_rows
if float_format is None:
float_format = get_option("display.float_format")
self.float_format = float_format
self.dtype = dtype
self._chk_truncate()
def _chk_truncate(self):
from pandas.tools.merge import concat
max_rows = self.max_rows
truncate_v = max_rows and (len(self.series) > max_rows)
series = self.series
if truncate_v:
if max_rows == 1:
row_num = max_rows
series = series.iloc[:max_rows]
else:
row_num = max_rows // 2
series = concat((series.iloc[:row_num], series.iloc[-row_num:]))
self.tr_row_num = row_num
self.tr_series = series
self.truncate_v = truncate_v
def _get_footer(self):
name = self.series.name
footer = u('')
if getattr(self.series.index, 'freq', None) is not None:
footer += 'Freq: %s' % self.series.index.freqstr
if self.name is not False and name is not None:
if footer:
footer += ', '
series_name = com.pprint_thing(name,
escape_chars=('\t', '\r', '\n'))
footer += ("Name: %s" %
series_name) if name is not None else ""
if self.length:
if footer:
footer += ', '
footer += 'Length: %d' % len(self.series)
if self.dtype is not False and self.dtype is not None:
name = getattr(self.tr_series.dtype, 'name', None)
if name:
if footer:
footer += ', '
footer += 'dtype: %s' % com.pprint_thing(name)
# level infos are added to the end and in a new line, like it is done for Categoricals
# Only added when we request a name
if name and com.is_categorical_dtype(self.tr_series.dtype):
level_info = self.tr_series.values._repr_categories_info()
if footer:
footer += "\n"
footer += level_info
return compat.text_type(footer)
def _get_formatted_index(self):
index = self.tr_series.index
is_multi = isinstance(index, MultiIndex)
if is_multi:
have_header = any(name for name in index.names)
fmt_index = index.format(names=True)
else:
have_header = index.name is not None
fmt_index = index.format(name=True)
return fmt_index, have_header
def _get_formatted_values(self):
return format_array(self.tr_series.get_values(), None,
float_format=self.float_format,
na_rep=self.na_rep)
def to_string(self):
series = self.tr_series
footer = self._get_footer()
if len(series) == 0:
return 'Series([], ' + footer + ')'
fmt_index, have_header = self._get_formatted_index()
fmt_values = self._get_formatted_values()
maxlen = max(len(x) for x in fmt_index) # max index len
pad_space = min(maxlen, 60)
if self.truncate_v:
n_header_rows = 0
row_num = self.tr_row_num
width = len(fmt_values[row_num-1])
if width > 3:
dot_str = '...'
else:
dot_str = '..'
dot_str = dot_str.center(width)
fmt_values.insert(row_num + n_header_rows, dot_str)
fmt_index.insert(row_num + 1, '')
result = adjoin(3, *[fmt_index[1:], fmt_values])
if self.header and have_header:
result = fmt_index[0] + '\n' + result
if footer:
result += '\n' + footer
return compat.text_type(u('').join(result))
def _strlen_func():
if compat.PY3: # pragma: no cover
_strlen = len
else:
encoding = get_option("display.encoding")
def _strlen(x):
try:
return len(x.decode(encoding))
except UnicodeError:
return len(x)
return _strlen
class TableFormatter(object):
is_truncated = False
show_dimensions = None
@property
def should_show_dimensions(self):
return self.show_dimensions is True or (self.show_dimensions == 'truncate' and
self.is_truncated)
def _get_formatter(self, i):
if isinstance(self.formatters, (list, tuple)):
if com.is_integer(i):
return self.formatters[i]
else:
return None
else:
if com.is_integer(i) and i not in self.columns:
i = self.columns[i]
return self.formatters.get(i, None)
class DataFrameFormatter(TableFormatter):
"""
Render a DataFrame
self.to_string() : console-friendly tabular output
self.to_html() : html table
self.to_latex() : LaTeX tabular environment table
"""
__doc__ = __doc__ if __doc__ else ''
__doc__ += docstring_to_string
def __init__(self, frame, buf=None, columns=None, col_space=None,
header=True, index=True, na_rep='NaN', formatters=None,
justify=None, float_format=None, sparsify=None,
index_names=True, line_width=None, max_rows=None,
max_cols=None, show_dimensions=False, **kwds):
self.frame = frame
self.buf = buf if buf is not None else StringIO()
self.show_index_names = index_names
if sparsify is None:
sparsify = get_option("display.multi_sparse")
self.sparsify = sparsify
self.float_format = float_format
self.formatters = formatters if formatters is not None else {}
self.na_rep = na_rep
self.col_space = col_space
self.header = header
self.index = index
self.line_width = line_width
self.max_rows = max_rows
self.max_cols = max_cols
self.max_rows_displayed = min(max_rows or len(self.frame),
len(self.frame))
self.show_dimensions = show_dimensions
if justify is None:
self.justify = get_option("display.colheader_justify")
else:
self.justify = justify
self.kwds = kwds
if columns is not None:
self.columns = _ensure_index(columns)
self.frame = self.frame[self.columns]
else:
self.columns = frame.columns
self._chk_truncate()
def _chk_truncate(self):
'''
Checks whether the frame should be truncated. If so, slices
the frame up.
'''
from pandas.tools.merge import concat
# Column of which first element is used to determine width of a dot col
self.tr_size_col = -1
# Cut the data to the information actually printed
max_cols = self.max_cols
max_rows = self.max_rows
if max_cols == 0 or max_rows == 0: # assume we are in the terminal (why else = 0)
(w, h) = get_terminal_size()
self.w = w
self.h = h
if self.max_rows == 0:
dot_row = 1
prompt_row = 1
if self.show_dimensions:
show_dimension_rows = 3
n_add_rows = self.header + dot_row + show_dimension_rows + prompt_row
max_rows_adj = self.h - n_add_rows # rows available to fill with actual data
self.max_rows_adj = max_rows_adj
# Format only rows and columns that could potentially fit the screen
if max_cols == 0 and len(self.frame.columns) > w:
max_cols = w
if max_rows == 0 and len(self.frame) > h:
max_rows = h
if not hasattr(self, 'max_rows_adj'):
self.max_rows_adj = max_rows
if not hasattr(self, 'max_cols_adj'):
self.max_cols_adj = max_cols
max_cols_adj = self.max_cols_adj
max_rows_adj = self.max_rows_adj
truncate_h = max_cols_adj and (len(self.columns) > max_cols_adj)
truncate_v = max_rows_adj and (len(self.frame) > max_rows_adj)
frame = self.frame
if truncate_h:
if max_cols_adj == 0:
col_num = len(frame.columns)
elif max_cols_adj == 1:
frame = frame.iloc[:, :max_cols]
col_num = max_cols
else:
col_num = (max_cols_adj // 2)
frame = concat((frame.iloc[:, :col_num], frame.iloc[:, -col_num:]), axis=1)
self.tr_col_num = col_num
if truncate_v:
if max_rows_adj == 0:
row_num = len(frame)
if max_rows_adj == 1:
row_num = max_rows
frame = frame.iloc[:max_rows, :]
else:
row_num = max_rows_adj // 2
frame = concat((frame.iloc[:row_num, :], frame.iloc[-row_num:, :]))
self.tr_row_num = row_num
self.tr_frame = frame
self.truncate_h = truncate_h
self.truncate_v = truncate_v
self.is_truncated = self.truncate_h or self.truncate_v
def _to_str_columns(self):
"""
Render a DataFrame to a list of columns (as lists of strings).
"""
_strlen = _strlen_func()
frame = self.tr_frame
# may include levels names also
str_index = self._get_formatted_index(frame)
str_columns = self._get_formatted_column_labels(frame)
if self.header:
stringified = []
for i, c in enumerate(frame):
cheader = str_columns[i]
max_colwidth = max(self.col_space or 0,
*(_strlen(x) for x in cheader))
fmt_values = self._format_col(i)
fmt_values = _make_fixed_width(fmt_values, self.justify,
minimum=max_colwidth)
max_len = max(np.max([_strlen(x) for x in fmt_values]),
max_colwidth)
if self.justify == 'left':
cheader = [x.ljust(max_len) for x in cheader]
else:
cheader = [x.rjust(max_len) for x in cheader]
stringified.append(cheader + fmt_values)
else:
stringified = []
for i, c in enumerate(frame):
fmt_values = self._format_col(i)
fmt_values = _make_fixed_width(fmt_values, self.justify,
minimum=(self.col_space or 0))
stringified.append(fmt_values)
strcols = stringified
if self.index:
strcols.insert(0, str_index)
# Add ... to signal truncated
truncate_h = self.truncate_h
truncate_v = self.truncate_v
if truncate_h:
col_num = self.tr_col_num
col_width = len(strcols[self.tr_size_col][0]) # infer from column header
strcols.insert(self.tr_col_num + 1, ['...'.center(col_width)] * (len(str_index)))
if truncate_v:
n_header_rows = len(str_index) - len(frame)
row_num = self.tr_row_num
for ix, col in enumerate(strcols):
cwidth = len(strcols[ix][row_num]) # infer from above row
is_dot_col = False
if truncate_h:
is_dot_col = ix == col_num + 1
if cwidth > 3 or is_dot_col:
my_str = '...'
else:
my_str = '..'
if ix == 0:
dot_str = my_str.ljust(cwidth)
elif is_dot_col:
cwidth = len(strcols[self.tr_size_col][0])
dot_str = my_str.center(cwidth)
else:
dot_str = my_str.rjust(cwidth)
strcols[ix].insert(row_num + n_header_rows, dot_str)
return strcols
def to_string(self):
"""
Render a DataFrame to a console-friendly tabular output.
"""
from pandas import Series
frame = self.frame
if len(frame.columns) == 0 or len(frame.index) == 0:
info_line = (u('Empty %s\nColumns: %s\nIndex: %s')
% (type(self.frame).__name__,
com.pprint_thing(frame.columns),
com.pprint_thing(frame.index)))
text = info_line
else:
strcols = self._to_str_columns()
if self.line_width is None: # no need to wrap around just print the whole frame
text = adjoin(1, *strcols)
elif not isinstance(self.max_cols, int) or self.max_cols > 0: # need to wrap around
text = self._join_multiline(*strcols)
else: # max_cols == 0. Try to fit frame to terminal
text = adjoin(1, *strcols).split('\n')
row_lens = Series(text).apply(len)
max_len_col_ix = np.argmax(row_lens)
max_len = row_lens[max_len_col_ix]
headers = [ele[0] for ele in strcols]
# Size of last col determines dot col size. See `self._to_str_columns
size_tr_col = len(headers[self.tr_size_col])
max_len += size_tr_col # Need to make space for largest row plus truncate dot col
dif = max_len - self.w
adj_dif = dif
col_lens = Series([Series(ele).apply(len).max() for ele in strcols])
n_cols = len(col_lens)
counter = 0
while adj_dif > 0 and n_cols > 1:
counter += 1
mid = int(round(n_cols / 2.))
mid_ix = col_lens.index[mid]
col_len = col_lens[mid_ix]
adj_dif -= (col_len + 1) # adjoin adds one
col_lens = col_lens.drop(mid_ix)
n_cols = len(col_lens)
max_cols_adj = n_cols - self.index # subtract index column
self.max_cols_adj = max_cols_adj
# Call again _chk_truncate to cut frame appropriately
# and then generate string representation
self._chk_truncate()
strcols = self._to_str_columns()
text = adjoin(1, *strcols)
self.buf.writelines(text)
if self.should_show_dimensions:
self.buf.write("\n\n[%d rows x %d columns]"
% (len(frame), len(frame.columns)))
def _join_multiline(self, *strcols):
lwidth = self.line_width
adjoin_width = 1
strcols = list(strcols)
if self.index:
idx = strcols.pop(0)
lwidth -= np.array([len(x) for x in idx]).max() + adjoin_width
col_widths = [np.array([len(x) for x in col]).max()
if len(col) > 0 else 0
for col in strcols]
col_bins = _binify(col_widths, lwidth)
nbins = len(col_bins)
if self.truncate_v:
nrows = self.max_rows_adj + 1
else:
nrows = len(self.frame)
str_lst = []
st = 0
for i, ed in enumerate(col_bins):
row = strcols[st:ed]
row.insert(0, idx)
if nbins > 1:
if ed <= len(strcols) and i < nbins - 1:
row.append([' \\'] + [' '] * (nrows - 1))
else:
row.append([' '] * nrows)
str_lst.append(adjoin(adjoin_width, *row))
st = ed
return '\n\n'.join(str_lst)
def to_latex(self, column_format=None, longtable=False):
"""
Render a DataFrame to a LaTeX tabular/longtable environment output.
"""
self.escape = self.kwds.get('escape', True)
# TODO: column_format is not settable in df.to_latex
def get_col_type(dtype):
if issubclass(dtype.type, np.number):
return 'r'
else:
return 'l'
frame = self.frame
if len(frame.columns) == 0 or len(frame.index) == 0:
info_line = (u('Empty %s\nColumns: %s\nIndex: %s')
% (type(self.frame).__name__,
frame.columns, frame.index))
strcols = [[info_line]]
else:
strcols = self._to_str_columns()
if self.index and isinstance(self.frame.index, MultiIndex):
clevels = self.frame.columns.nlevels
strcols.pop(0)
name = any(self.frame.columns.names)
for i, lev in enumerate(self.frame.index.levels):
lev2 = lev.format(name=name)
blank = ' ' * len(lev2[0])
lev3 = [blank] * clevels
for level_idx, group in itertools.groupby(
self.frame.index.labels[i]):
count = len(list(group))
lev3.extend([lev2[level_idx]] + [blank] * (count - 1))
strcols.insert(i, lev3)
if column_format is None:
dtypes = self.frame.dtypes.values
column_format = ''.join(map(get_col_type, dtypes))
if self.index:
index_format = 'l' * self.frame.index.nlevels
column_format = index_format + column_format
elif not isinstance(column_format,
compat.string_types): # pragma: no cover
raise AssertionError('column_format must be str or unicode, not %s'
% type(column_format))
def write(buf, frame, column_format, strcols, longtable=False):
if not longtable:
buf.write('\\begin{tabular}{%s}\n' % column_format)
buf.write('\\toprule\n')
else:
buf.write('\\begin{longtable}{%s}\n' % column_format)
buf.write('\\toprule\n')
nlevels = frame.columns.nlevels
for i, row in enumerate(zip(*strcols)):
if i == nlevels:
buf.write('\\midrule\n') # End of header
if longtable:
buf.write('\\endhead\n')
buf.write('\\midrule\n')
buf.write('\\multicolumn{3}{r}{{Continued on next '
'page}} \\\\\n')
buf.write('\midrule\n')
buf.write('\endfoot\n\n')
buf.write('\\bottomrule\n')
buf.write('\\endlastfoot\n')
if self.escape:
crow = [(x.replace('\\', '\\textbackslash') # escape backslashes first
.replace('_', '\\_')
.replace('%', '\\%')
.replace('$', '\\$')
.replace('#', '\\#')
.replace('{', '\\{')
.replace('}', '\\}')
.replace('~', '\\textasciitilde')
.replace('^', '\\textasciicircum')
.replace('&', '\\&') if x else '{}') for x in row]
else:
crow = [x if x else '{}' for x in row]
buf.write(' & '.join(crow))
buf.write(' \\\\\n')
if not longtable:
buf.write('\\bottomrule\n')
buf.write('\\end{tabular}\n')
else:
buf.write('\\end{longtable}\n')
if hasattr(self.buf, 'write'):
write(self.buf, frame, column_format, strcols, longtable)
elif isinstance(self.buf, compat.string_types):
with open(self.buf, 'w') as f:
write(f, frame, column_format, strcols, longtable)
else:
raise TypeError('buf is not a file name and it has no write '
'method')
def _format_col(self, i):
frame = self.tr_frame
formatter = self._get_formatter(i)
return format_array(
(frame.iloc[:, i]).get_values(),
formatter, float_format=self.float_format, na_rep=self.na_rep,
space=self.col_space
)
def to_html(self, classes=None):
"""
Render a DataFrame to a html table.
"""
html_renderer = HTMLFormatter(self, classes=classes,
max_rows=self.max_rows,
max_cols=self.max_cols)
if hasattr(self.buf, 'write'):
html_renderer.write_result(self.buf)
elif isinstance(self.buf, compat.string_types):
with open(self.buf, 'w') as f:
html_renderer.write_result(f)
else:
raise TypeError('buf is not a file name and it has no write '
' method')
def _get_formatted_column_labels(self, frame):
from pandas.core.index import _sparsify
def is_numeric_dtype(dtype):
return issubclass(dtype.type, np.number)
columns = frame.columns
if isinstance(columns, MultiIndex):
fmt_columns = columns.format(sparsify=False, adjoin=False)
fmt_columns = lzip(*fmt_columns)
dtypes = self.frame.dtypes.values
# if we have a Float level, they don't use leading space at all
restrict_formatting = any([l.is_floating for l in columns.levels])
need_leadsp = dict(zip(fmt_columns, map(is_numeric_dtype, dtypes)))
def space_format(x, y):
if y not in self.formatters and need_leadsp[x] and not restrict_formatting:
return ' ' + y
return y
str_columns = list(zip(*[[space_format(x, y) for y in x] for x in fmt_columns]))
if self.sparsify:
str_columns = _sparsify(str_columns)
str_columns = [list(x) for x in zip(*str_columns)]
else:
fmt_columns = columns.format()
dtypes = self.frame.dtypes
need_leadsp = dict(zip(fmt_columns, map(is_numeric_dtype, dtypes)))
str_columns = [[' ' + x
if not self._get_formatter(i) and need_leadsp[x]
else x]
for i, (col, x) in
enumerate(zip(columns, fmt_columns))]
if self.show_index_names and self.has_index_names:
for x in str_columns:
x.append('')
# self.str_columns = str_columns
return str_columns
@property
def has_index_names(self):
return _has_names(self.frame.index)
@property
def has_column_names(self):
return _has_names(self.frame.columns)
def _get_formatted_index(self, frame):
# Note: this is only used by to_string() and to_latex(), not by to_html().
index = frame.index
columns = frame.columns
show_index_names = self.show_index_names and self.has_index_names
show_col_names = (self.show_index_names and self.has_column_names)
fmt = self._get_formatter('__index__')
if isinstance(index, MultiIndex):
fmt_index = index.format(sparsify=self.sparsify, adjoin=False,
names=show_index_names,
formatter=fmt)
else:
fmt_index = [index.format(name=show_index_names, formatter=fmt)]
fmt_index = [tuple(_make_fixed_width(
list(x), justify='left', minimum=(self.col_space or 0)))
for x in fmt_index]
adjoined = adjoin(1, *fmt_index).split('\n')
# empty space for columns
if show_col_names:
col_header = ['%s' % x for x in self._get_column_name_list()]
else:
col_header = [''] * columns.nlevels
if self.header:
return col_header + adjoined
else:
return adjoined
def _get_column_name_list(self):
names = []
columns = self.frame.columns
if isinstance(columns, MultiIndex):
names.extend('' if name is None else name
for name in columns.names)
else:
names.append('' if columns.name is None else columns.name)
return names
class HTMLFormatter(TableFormatter):
indent_delta = 2
def __init__(self, formatter, classes=None, max_rows=None, max_cols=None):
self.fmt = formatter
self.classes = classes
self.frame = self.fmt.frame
self.columns = self.fmt.tr_frame.columns
self.elements = []
self.bold_rows = self.fmt.kwds.get('bold_rows', False)
self.escape = self.fmt.kwds.get('escape', True)
self.max_rows = max_rows or len(self.fmt.frame)
self.max_cols = max_cols or len(self.fmt.columns)
self.show_dimensions = self.fmt.show_dimensions
self.is_truncated = (self.max_rows < len(self.fmt.frame) or
self.max_cols < len(self.fmt.columns))
def write(self, s, indent=0):
rs = com.pprint_thing(s)
self.elements.append(' ' * indent + rs)
def write_th(self, s, indent=0, tags=None):
if (self.fmt.col_space is not None
and self.fmt.col_space > 0):
tags = (tags or "")
tags += 'style="min-width: %s;"' % self.fmt.col_space
return self._write_cell(s, kind='th', indent=indent, tags=tags)
def write_td(self, s, indent=0, tags=None):
return self._write_cell(s, kind='td', indent=indent, tags=tags)
def _write_cell(self, s, kind='td', indent=0, tags=None):
if tags is not None:
start_tag = '<%s %s>' % (kind, tags)
else:
start_tag = '<%s>' % kind
if self.escape:
# escape & first to prevent double escaping of &
esc = OrderedDict(
[('&', r'&'), ('<', r'<'), ('>', r'>')]
)
else:
esc = {}
rs = com.pprint_thing(s, escape_chars=esc).strip()
self.write(
'%s%s</%s>' % (start_tag, rs, kind), indent)
def write_tr(self, line, indent=0, indent_delta=4, header=False,
align=None, tags=None, nindex_levels=0):
if tags is None:
tags = {}
if align is None:
self.write('<tr>', indent)
else:
self.write('<tr style="text-align: %s;">' % align, indent)
indent += indent_delta
for i, s in enumerate(line):
val_tag = tags.get(i, None)
if header or (self.bold_rows and i < nindex_levels):
self.write_th(s, indent, tags=val_tag)
else:
self.write_td(s, indent, tags=val_tag)
indent -= indent_delta
self.write('</tr>', indent)
def write_result(self, buf):
indent = 0
frame = self.frame
_classes = ['dataframe'] # Default class.
if self.classes is not None:
if isinstance(self.classes, str):
self.classes = self.classes.split()
if not isinstance(self.classes, (list, tuple)):
raise AssertionError(('classes must be list or tuple, '
'not %s') % type(self.classes))
_classes.extend(self.classes)
self.write('<table border="1" class="%s">' % ' '.join(_classes),
indent)
indent += self.indent_delta
indent = self._write_header(indent)
indent = self._write_body(indent)
self.write('</table>', indent)
if self.should_show_dimensions:
by = chr(215) if compat.PY3 else unichr(215) # ×
self.write(u('<p>%d rows %s %d columns</p>') %
(len(frame), by, len(frame.columns)))
_put_lines(buf, self.elements)
def _write_header(self, indent):
truncate_h = self.fmt.truncate_h
row_levels = self.frame.index.nlevels
if not self.fmt.header:
# write nothing
return indent
def _column_header():
if self.fmt.index:
row = [''] * (self.frame.index.nlevels - 1)
else:
row = []
if isinstance(self.columns, MultiIndex):
if self.fmt.has_column_names and self.fmt.index:
row.append(single_column_table(self.columns.names))
else:
row.append('')
style = "text-align: %s;" % self.fmt.justify
row.extend([single_column_table(c, self.fmt.justify, style) for
c in self.columns])
else:
if self.fmt.index:
row.append(self.columns.name or '')
row.extend(self.columns)
return row
self.write('<thead>', indent)
row = []
indent += self.indent_delta
if isinstance(self.columns, MultiIndex):
template = 'colspan="%d" halign="left"'
if self.fmt.sparsify:
# GH3547
sentinel = com.sentinel_factory()
else:
sentinel = None
levels = self.columns.format(sparsify=sentinel,
adjoin=False, names=False)
level_lengths = _get_level_lengths(levels, sentinel)
inner_lvl = len(level_lengths) - 1
for lnum, (records, values) in enumerate(zip(level_lengths,
levels)):
if truncate_h:
# modify the header lines
ins_col = self.fmt.tr_col_num
if self.fmt.sparsify:
recs_new = {}
# Increment tags after ... col.
for tag, span in list(records.items()):
if tag >= ins_col:
recs_new[tag + 1] = span
elif tag + span > ins_col:
recs_new[tag] = span + 1
if lnum == inner_lvl:
values = values[:ins_col] + (u('...'),) + \
values[ins_col:]
else: # sparse col headers do not receive a ...
values = (values[:ins_col] + (values[ins_col - 1],) +
values[ins_col:])
else:
recs_new[tag] = span
# if ins_col lies between tags, all col headers get ...
if tag + span == ins_col:
recs_new[ins_col] = 1
values = values[:ins_col] + (u('...'),) + \
values[ins_col:]
records = recs_new
inner_lvl = len(level_lengths) - 1
if lnum == inner_lvl:
records[ins_col] = 1
else:
recs_new = {}
for tag, span in list(records.items()):
if tag >= ins_col:<|fim▁hole|> recs_new[tag] = span
recs_new[ins_col] = 1
records = recs_new
values = values[:ins_col] + [u('...')] + values[ins_col:]
name = self.columns.names[lnum]
row = [''] * (row_levels - 1) + ['' if name is None
else com.pprint_thing(name)]
if row == [""] and self.fmt.index is False:
row = []
tags = {}
j = len(row)
for i, v in enumerate(values):
if i in records:
if records[i] > 1:
tags[j] = template % records[i]
else:
continue
j += 1
row.append(v)
self.write_tr(row, indent, self.indent_delta, tags=tags,
header=True)
else:
col_row = _column_header()
align = self.fmt.justify
if truncate_h:
ins_col = row_levels + self.fmt.tr_col_num
col_row.insert(ins_col, '...')
self.write_tr(col_row, indent, self.indent_delta, header=True,
align=align)
if self.fmt.has_index_names:
row = [
x if x is not None else '' for x in self.frame.index.names
] + [''] * min(len(self.columns), self.max_cols)
if truncate_h:
ins_col = row_levels + self.fmt.tr_col_num
row.insert(ins_col, '')
self.write_tr(row, indent, self.indent_delta, header=True)
indent -= self.indent_delta
self.write('</thead>', indent)
return indent
def _write_body(self, indent):
self.write('<tbody>', indent)
indent += self.indent_delta
fmt_values = {}
for i in range(min(len(self.columns), self.max_cols)):
fmt_values[i] = self.fmt._format_col(i)
# write values
if self.fmt.index:
if isinstance(self.frame.index, MultiIndex):
self._write_hierarchical_rows(fmt_values, indent)
else:
self._write_regular_rows(fmt_values, indent)
else:
for i in range(len(self.frame)):
row = [fmt_values[j][i] for j in range(len(self.columns))]
self.write_tr(row, indent, self.indent_delta, tags=None)
indent -= self.indent_delta
self.write('</tbody>', indent)
indent -= self.indent_delta
return indent
def _write_regular_rows(self, fmt_values, indent):
truncate_h = self.fmt.truncate_h
truncate_v = self.fmt.truncate_v
ncols = len(self.fmt.tr_frame.columns)
nrows = len(self.fmt.tr_frame)
fmt = self.fmt._get_formatter('__index__')
if fmt is not None:
index_values = self.fmt.tr_frame.index.map(fmt)
else:
index_values = self.fmt.tr_frame.index.format()
row = []
for i in range(nrows):
if truncate_v and i == (self.fmt.tr_row_num):
str_sep_row = ['...' for ele in row]
self.write_tr(str_sep_row, indent, self.indent_delta, tags=None,
nindex_levels=1)
row = []
row.append(index_values[i])
row.extend(fmt_values[j][i] for j in range(ncols))
if truncate_h:
dot_col_ix = self.fmt.tr_col_num + 1
row.insert(dot_col_ix, '...')
self.write_tr(row, indent, self.indent_delta, tags=None,
nindex_levels=1)
def _write_hierarchical_rows(self, fmt_values, indent):
template = 'rowspan="%d" valign="top"'
truncate_h = self.fmt.truncate_h
truncate_v = self.fmt.truncate_v
frame = self.fmt.tr_frame
ncols = len(frame.columns)
nrows = len(frame)
row_levels = self.frame.index.nlevels
idx_values = frame.index.format(sparsify=False, adjoin=False, names=False)
idx_values = lzip(*idx_values)
if self.fmt.sparsify:
# GH3547
sentinel = com.sentinel_factory()
levels = frame.index.format(sparsify=sentinel, adjoin=False, names=False)
level_lengths = _get_level_lengths(levels, sentinel)
inner_lvl = len(level_lengths) - 1
if truncate_v:
# Insert ... row and adjust idx_values and
# level_lengths to take this into account.
ins_row = self.fmt.tr_row_num
for lnum, records in enumerate(level_lengths):
rec_new = {}
for tag, span in list(records.items()):
if tag >= ins_row:
rec_new[tag + 1] = span
elif tag + span > ins_row:
rec_new[tag] = span + 1
dot_row = list(idx_values[ins_row - 1])
dot_row[-1] = u('...')
idx_values.insert(ins_row, tuple(dot_row))
else:
rec_new[tag] = span
# If ins_row lies between tags, all cols idx cols receive ...
if tag + span == ins_row:
rec_new[ins_row] = 1
if lnum == 0:
idx_values.insert(ins_row, tuple([u('...')]*len(level_lengths)))
level_lengths[lnum] = rec_new
level_lengths[inner_lvl][ins_row] = 1
for ix_col in range(len(fmt_values)):
fmt_values[ix_col].insert(ins_row, '...')
nrows += 1
for i in range(nrows):
row = []
tags = {}
sparse_offset = 0
j = 0
for records, v in zip(level_lengths, idx_values[i]):
if i in records:
if records[i] > 1:
tags[j] = template % records[i]
else:
sparse_offset += 1
continue
j += 1
row.append(v)
row.extend(fmt_values[j][i] for j in range(ncols))
if truncate_h:
row.insert(row_levels - sparse_offset + self.fmt.tr_col_num, '...')
self.write_tr(row, indent, self.indent_delta, tags=tags,
nindex_levels=len(levels) - sparse_offset)
else:
for i in range(len(frame)):
idx_values = list(zip(*frame.index.format(sparsify=False,
adjoin=False,
names=False)))
row = []
row.extend(idx_values[i])
row.extend(fmt_values[j][i] for j in range(ncols))
if truncate_h:
row.insert(row_levels + self.fmt.tr_col_num, '...')
self.write_tr(row, indent, self.indent_delta, tags=None,
nindex_levels=frame.index.nlevels)
def _get_level_lengths(levels, sentinel=''):
from itertools import groupby
def _make_grouper():
record = {'count': 0}
def grouper(x):
if x != sentinel:
record['count'] += 1
return record['count']
return grouper
result = []
for lev in levels:
i = 0
f = _make_grouper()
recs = {}
for key, gpr in groupby(lev, f):
values = list(gpr)
recs[i] = len(values)
i += len(values)
result.append(recs)
return result
class CSVFormatter(object):
def __init__(self, obj, path_or_buf=None, sep=",", na_rep='', float_format=None,
cols=None, header=True, index=True, index_label=None,
mode='w', nanRep=None, encoding=None, quoting=None,
line_terminator='\n', chunksize=None, engine=None,
tupleize_cols=False, quotechar='"', date_format=None,
doublequote=True, escapechar=None, decimal='.'):
self.engine = engine # remove for 0.13
self.obj = obj
if path_or_buf is None:
path_or_buf = StringIO()
self.path_or_buf = path_or_buf
self.sep = sep
self.na_rep = na_rep
self.float_format = float_format
self.decimal = decimal
self.header = header
self.index = index
self.index_label = index_label
self.mode = mode
self.encoding = encoding
if quoting is None:
quoting = csv.QUOTE_MINIMAL
self.quoting = quoting
if quoting == csv.QUOTE_NONE:
# prevents crash in _csv
quotechar = None
self.quotechar = quotechar
self.doublequote = doublequote
self.escapechar = escapechar
self.line_terminator = line_terminator
self.date_format = date_format
# GH3457
if not self.obj.columns.is_unique and engine == 'python':
raise NotImplementedError("columns.is_unique == False not "
"supported with engine='python'")
self.tupleize_cols = tupleize_cols
self.has_mi_columns = isinstance(obj.columns, MultiIndex
) and not self.tupleize_cols
# validate mi options
if self.has_mi_columns:
if cols is not None:
raise TypeError("cannot specify cols with a MultiIndex on the "
"columns")
if cols is not None:
if isinstance(cols, Index):
cols = cols.to_native_types(na_rep=na_rep,
float_format=float_format,
date_format=date_format,
quoting=self.quoting)
else:
cols = np.asarray(list(cols))
self.obj = self.obj.loc[:, cols]
# update columns to include possible multiplicity of dupes
# and make sure sure cols is just a list of labels
cols = self.obj.columns
if isinstance(cols, Index):
cols = cols.to_native_types(na_rep=na_rep,
float_format=float_format,
date_format=date_format,
quoting=self.quoting)
else:
cols = np.asarray(list(cols))
# save it
self.cols = cols
# preallocate data 2d list
self.blocks = self.obj._data.blocks
ncols = sum(b.shape[0] for b in self.blocks)
self.data = [None] * ncols
if chunksize is None:
chunksize = (100000 // (len(self.cols) or 1)) or 1
self.chunksize = int(chunksize)
self.data_index = obj.index
if isinstance(obj.index, PeriodIndex):
self.data_index = obj.index.to_timestamp()
if (isinstance(self.data_index, DatetimeIndex) and
date_format is not None):
self.data_index = Index([x.strftime(date_format)
if notnull(x) else ''
for x in self.data_index])
self.nlevels = getattr(self.data_index, 'nlevels', 1)
if not index:
self.nlevels = 0
# original python implem. of df.to_csv
# invoked by df.to_csv(engine=python)
def _helper_csv(self, writer, na_rep=None, cols=None,
header=True, index=True,
index_label=None, float_format=None, date_format=None):
if cols is None:
cols = self.columns
has_aliases = isinstance(header, (tuple, list, np.ndarray, Index))
if has_aliases or header:
if index:
# should write something for index label
if index_label is not False:
if index_label is None:
if isinstance(self.obj.index, MultiIndex):
index_label = []
for i, name in enumerate(self.obj.index.names):
if name is None:
name = ''
index_label.append(name)
else:
index_label = self.obj.index.name
if index_label is None:
index_label = ['']
else:
index_label = [index_label]
elif not isinstance(index_label,
(list, tuple, np.ndarray, Index)):
# given a string for a DF with Index
index_label = [index_label]
encoded_labels = list(index_label)
else:
encoded_labels = []
if has_aliases:
if len(header) != len(cols):
raise ValueError(('Writing %d cols but got %d aliases'
% (len(cols), len(header))))
else:
write_cols = header
else:
write_cols = cols
encoded_cols = list(write_cols)
writer.writerow(encoded_labels + encoded_cols)
else:
encoded_cols = list(cols)
writer.writerow(encoded_cols)
if date_format is None:
date_formatter = lambda x: Timestamp(x)._repr_base
else:
def strftime_with_nulls(x):
x = Timestamp(x)
if notnull(x):
return x.strftime(date_format)
date_formatter = lambda x: strftime_with_nulls(x)
data_index = self.obj.index
if isinstance(self.obj.index, PeriodIndex):
data_index = self.obj.index.to_timestamp()
if isinstance(data_index, DatetimeIndex) and date_format is not None:
data_index = Index([date_formatter(x) for x in data_index])
values = self.obj.copy()
values.index = data_index
values.columns = values.columns.to_native_types(
na_rep=na_rep,
float_format=float_format,
date_format=date_format,
quoting=self.quoting)
values = values[cols]
series = {}
for k, v in compat.iteritems(values._series):
series[k] = v.values
nlevels = getattr(data_index, 'nlevels', 1)
for j, idx in enumerate(data_index):
row_fields = []
if index:
if nlevels == 1:
row_fields = [idx]
else: # handle MultiIndex
row_fields = list(idx)
for i, col in enumerate(cols):
val = series[col][j]
if lib.checknull(val):
val = na_rep
if float_format is not None and com.is_float(val):
val = float_format % val
elif isinstance(val, (np.datetime64, Timestamp)):
val = date_formatter(val)
row_fields.append(val)
writer.writerow(row_fields)
def save(self):
# create the writer & save
if hasattr(self.path_or_buf, 'write'):
f = self.path_or_buf
close = False
else:
f = com._get_handle(self.path_or_buf, self.mode,
encoding=self.encoding)
close = True
try:
writer_kwargs = dict(lineterminator=self.line_terminator,
delimiter=self.sep, quoting=self.quoting,
doublequote=self.doublequote,
escapechar=self.escapechar,
quotechar=self.quotechar)
if self.encoding is not None:
writer_kwargs['encoding'] = self.encoding
self.writer = com.UnicodeWriter(f, **writer_kwargs)
else:
self.writer = csv.writer(f, **writer_kwargs)
if self.engine == 'python':
# to be removed in 0.13
self._helper_csv(self.writer, na_rep=self.na_rep,
float_format=self.float_format,
cols=self.cols, header=self.header,
index=self.index,
index_label=self.index_label,
date_format=self.date_format)
else:
self._save()
finally:
if close:
f.close()
def _save_header(self):
writer = self.writer
obj = self.obj
index_label = self.index_label
cols = self.cols
has_mi_columns = self.has_mi_columns
header = self.header
encoded_labels = []
has_aliases = isinstance(header, (tuple, list, np.ndarray, Index))
if not (has_aliases or self.header):
return
if has_aliases:
if len(header) != len(cols):
raise ValueError(('Writing %d cols but got %d aliases'
% (len(cols), len(header))))
else:
write_cols = header
else:
write_cols = cols
if self.index:
# should write something for index label
if index_label is not False:
if index_label is None:
if isinstance(obj.index, MultiIndex):
index_label = []
for i, name in enumerate(obj.index.names):
if name is None:
name = ''
index_label.append(name)
else:
index_label = obj.index.name
if index_label is None:
index_label = ['']
else:
index_label = [index_label]
elif not isinstance(index_label, (list, tuple, np.ndarray, Index)):
# given a string for a DF with Index
index_label = [index_label]
encoded_labels = list(index_label)
else:
encoded_labels = []
if not has_mi_columns:
encoded_labels += list(write_cols)
# write out the mi
if has_mi_columns:
columns = obj.columns
# write out the names for each level, then ALL of the values for
# each level
for i in range(columns.nlevels):
# we need at least 1 index column to write our col names
col_line = []
if self.index:
# name is the first column
col_line.append(columns.names[i])
if isinstance(index_label, list) and len(index_label) > 1:
col_line.extend([''] * (len(index_label) - 1))
col_line.extend(columns.get_level_values(i))
writer.writerow(col_line)
# add blanks for the columns, so that we
# have consistent seps
encoded_labels.extend([''] * len(columns))
# write out the index label line
writer.writerow(encoded_labels)
def _save(self):
self._save_header()
nrows = len(self.data_index)
# write in chunksize bites
chunksize = self.chunksize
chunks = int(nrows / chunksize) + 1
for i in range(chunks):
start_i = i * chunksize
end_i = min((i + 1) * chunksize, nrows)
if start_i >= end_i:
break
self._save_chunk(start_i, end_i)
def _save_chunk(self, start_i, end_i):
data_index = self.data_index
# create the data for a chunk
slicer = slice(start_i, end_i)
for i in range(len(self.blocks)):
b = self.blocks[i]
d = b.to_native_types(slicer=slicer,
na_rep=self.na_rep,
float_format=self.float_format,
decimal=self.decimal,
date_format=self.date_format,
quoting=self.quoting)
for col_loc, col in zip(b.mgr_locs, d):
# self.data is a preallocated list
self.data[col_loc] = col
ix = data_index.to_native_types(slicer=slicer,
na_rep=self.na_rep,
float_format=self.float_format,
date_format=self.date_format,
quoting=self.quoting)
lib.write_csv_rows(self.data, ix, self.nlevels, self.cols, self.writer)
# from collections import namedtuple
# ExcelCell = namedtuple("ExcelCell",
# 'row, col, val, style, mergestart, mergeend')
class ExcelCell(object):
__fields__ = ('row', 'col', 'val', 'style', 'mergestart', 'mergeend')
__slots__ = __fields__
def __init__(self, row, col, val,
style=None, mergestart=None, mergeend=None):
self.row = row
self.col = col
self.val = val
self.style = style
self.mergestart = mergestart
self.mergeend = mergeend
header_style = {"font": {"bold": True},
"borders": {"top": "thin",
"right": "thin",
"bottom": "thin",
"left": "thin"},
"alignment": {"horizontal": "center", "vertical": "top"}}
class ExcelFormatter(object):
"""
Class for formatting a DataFrame to a list of ExcelCells,
Parameters
----------
df : dataframe
na_rep: na representation
float_format : string, default None
Format string for floating point numbers
cols : sequence, optional
Columns to write
header : boolean or list of string, default True
Write out column names. If a list of string is given it is
assumed to be aliases for the column names
index : boolean, default True
output row names (index)
index_label : string or sequence, default None
Column label for index column(s) if desired. If None is given, and
`header` and `index` are True, then the index names are used. A
sequence should be given if the DataFrame uses MultiIndex.
merge_cells : boolean, default False
Format MultiIndex and Hierarchical Rows as merged cells.
inf_rep : string, default `'inf'`
representation for np.inf values (which aren't representable in Excel)
A `'-'` sign will be added in front of -inf.
"""
def __init__(self, df, na_rep='', float_format=None, cols=None,
header=True, index=True, index_label=None, merge_cells=False,
inf_rep='inf'):
self.df = df
self.rowcounter = 0
self.na_rep = na_rep
self.columns = cols
if cols is None:
self.columns = df.columns
self.float_format = float_format
self.index = index
self.index_label = index_label
self.header = header
self.merge_cells = merge_cells
self.inf_rep = inf_rep
def _format_value(self, val):
if lib.checknull(val):
val = self.na_rep
elif com.is_float(val):
if np.isposinf(val):
val = self.inf_rep
elif np.isneginf(val):
val = '-%s' % self.inf_rep
elif self.float_format is not None:
val = float(self.float_format % val)
return val
def _format_header_mi(self):
has_aliases = isinstance(self.header, (tuple, list, np.ndarray, Index))
if not(has_aliases or self.header):
return
columns = self.columns
level_strs = columns.format(sparsify=True, adjoin=False, names=False)
level_lengths = _get_level_lengths(level_strs)
coloffset = 0
lnum = 0
if self.index and isinstance(self.df.index, MultiIndex):
coloffset = len(self.df.index[0]) - 1
if self.merge_cells:
# Format multi-index as a merged cells.
for lnum in range(len(level_lengths)):
name = columns.names[lnum]
yield ExcelCell(lnum, coloffset, name, header_style)
for lnum, (spans, levels, labels) in enumerate(zip(level_lengths,
columns.levels,
columns.labels)
):
values = levels.take(labels)
for i in spans:
if spans[i] > 1:
yield ExcelCell(lnum,
coloffset + i + 1,
values[i],
header_style,
lnum,
coloffset + i + spans[i])
else:
yield ExcelCell(lnum,
coloffset + i + 1,
values[i],
header_style)
else:
# Format in legacy format with dots to indicate levels.
for i, values in enumerate(zip(*level_strs)):
v = ".".join(map(com.pprint_thing, values))
yield ExcelCell(lnum, coloffset + i + 1, v, header_style)
self.rowcounter = lnum
def _format_header_regular(self):
has_aliases = isinstance(self.header, (tuple, list, np.ndarray, Index))
if has_aliases or self.header:
coloffset = 0
if self.index:
coloffset = 1
if isinstance(self.df.index, MultiIndex):
coloffset = len(self.df.index[0])
colnames = self.columns
if has_aliases:
if len(self.header) != len(self.columns):
raise ValueError(('Writing %d cols but got %d aliases'
% (len(self.columns), len(self.header))))
else:
colnames = self.header
for colindex, colname in enumerate(colnames):
yield ExcelCell(self.rowcounter, colindex + coloffset, colname,
header_style)
def _format_header(self):
if isinstance(self.columns, MultiIndex):
gen = self._format_header_mi()
else:
gen = self._format_header_regular()
gen2 = ()
if self.df.index.names:
row = [x if x is not None else ''
for x in self.df.index.names] + [''] * len(self.columns)
if reduce(lambda x, y: x and y, map(lambda x: x != '', row)):
gen2 = (ExcelCell(self.rowcounter, colindex, val, header_style)
for colindex, val in enumerate(row))
self.rowcounter += 1
return itertools.chain(gen, gen2)
def _format_body(self):
if isinstance(self.df.index, MultiIndex):
return self._format_hierarchical_rows()
else:
return self._format_regular_rows()
def _format_regular_rows(self):
has_aliases = isinstance(self.header, (tuple, list, np.ndarray, Index))
if has_aliases or self.header:
self.rowcounter += 1
coloffset = 0
# output index and index_label?
if self.index:
# chek aliases
# if list only take first as this is not a MultiIndex
if self.index_label and isinstance(self.index_label,
(list, tuple, np.ndarray, Index)):
index_label = self.index_label[0]
# if string good to go
elif self.index_label and isinstance(self.index_label, str):
index_label = self.index_label
else:
index_label = self.df.index.names[0]
if index_label and self.header is not False:
if self.merge_cells:
yield ExcelCell(self.rowcounter,
0,
index_label,
header_style)
self.rowcounter += 1
else:
yield ExcelCell(self.rowcounter - 1,
0,
index_label,
header_style)
# write index_values
index_values = self.df.index
if isinstance(self.df.index, PeriodIndex):
index_values = self.df.index.to_timestamp()
coloffset = 1
for idx, idxval in enumerate(index_values):
yield ExcelCell(self.rowcounter + idx, 0, idxval, header_style)
# Get a frame that will account for any duplicates in the column names.
col_mapped_frame = self.df.loc[:, self.columns]
# Write the body of the frame data series by series.
for colidx in range(len(self.columns)):
series = col_mapped_frame.iloc[:, colidx]
for i, val in enumerate(series):
yield ExcelCell(self.rowcounter + i, colidx + coloffset, val)
def _format_hierarchical_rows(self):
has_aliases = isinstance(self.header, (tuple, list, np.ndarray, Index))
if has_aliases or self.header:
self.rowcounter += 1
gcolidx = 0
if self.index:
index_labels = self.df.index.names
# check for aliases
if self.index_label and isinstance(self.index_label,
(list, tuple, np.ndarray, Index)):
index_labels = self.index_label
# if index labels are not empty go ahead and dump
if (any(x is not None for x in index_labels)
and self.header is not False):
if not self.merge_cells:
self.rowcounter -= 1
for cidx, name in enumerate(index_labels):
yield ExcelCell(self.rowcounter,
cidx,
name,
header_style)
self.rowcounter += 1
if self.merge_cells:
# Format hierarchical rows as merged cells.
level_strs = self.df.index.format(sparsify=True, adjoin=False,
names=False)
level_lengths = _get_level_lengths(level_strs)
for spans, levels, labels in zip(level_lengths,
self.df.index.levels,
self.df.index.labels):
values = levels.take(labels)
for i in spans:
if spans[i] > 1:
yield ExcelCell(self.rowcounter + i,
gcolidx,
values[i],
header_style,
self.rowcounter + i + spans[i] - 1,
gcolidx)
else:
yield ExcelCell(self.rowcounter + i,
gcolidx,
values[i],
header_style)
gcolidx += 1
else:
# Format hierarchical rows with non-merged values.
for indexcolvals in zip(*self.df.index):
for idx, indexcolval in enumerate(indexcolvals):
yield ExcelCell(self.rowcounter + idx,
gcolidx,
indexcolval,
header_style)
gcolidx += 1
# Get a frame that will account for any duplicates in the column names.
col_mapped_frame = self.df.loc[:, self.columns]
# Write the body of the frame data series by series.
for colidx in range(len(self.columns)):
series = col_mapped_frame.iloc[:, colidx]
for i, val in enumerate(series):
yield ExcelCell(self.rowcounter + i, gcolidx + colidx, val)
def get_formatted_cells(self):
for cell in itertools.chain(self._format_header(),
self._format_body()):
cell.val = self._format_value(cell.val)
yield cell
# ----------------------------------------------------------------------
# Array formatters
def format_array(values, formatter, float_format=None, na_rep='NaN',
digits=None, space=None, justify='right'):
if com.is_float_dtype(values.dtype):
fmt_klass = FloatArrayFormatter
elif com.is_integer_dtype(values.dtype):
fmt_klass = IntArrayFormatter
elif com.is_datetime64_dtype(values.dtype):
fmt_klass = Datetime64Formatter
elif com.is_timedelta64_dtype(values.dtype):
fmt_klass = Timedelta64Formatter
else:
fmt_klass = GenericArrayFormatter
if space is None:
space = get_option("display.column_space")
if float_format is None:
float_format = get_option("display.float_format")
if digits is None:
digits = get_option("display.precision")
fmt_obj = fmt_klass(values, digits=digits, na_rep=na_rep,
float_format=float_format,
formatter=formatter, space=space,
justify=justify)
return fmt_obj.get_result()
class GenericArrayFormatter(object):
def __init__(self, values, digits=7, formatter=None, na_rep='NaN',
space=12, float_format=None, justify='right'):
self.values = values
self.digits = digits
self.na_rep = na_rep
self.space = space
self.formatter = formatter
self.float_format = float_format
self.justify = justify
def get_result(self):
fmt_values = self._format_strings()
return _make_fixed_width(fmt_values, self.justify)
def _format_strings(self):
if self.float_format is None:
float_format = get_option("display.float_format")
if float_format is None:
fmt_str = '%% .%dg' % get_option("display.precision")
float_format = lambda x: fmt_str % x
else:
float_format = self.float_format
formatter = self.formatter if self.formatter is not None else \
(lambda x: com.pprint_thing(x, escape_chars=('\t', '\r', '\n')))
def _format(x):
if self.na_rep is not None and lib.checknull(x):
if x is None:
return 'None'
return self.na_rep
elif isinstance(x, PandasObject):
return '%s' % x
else:
# object dtype
return '%s' % formatter(x)
vals = self.values
is_float = lib.map_infer(vals, com.is_float) & notnull(vals)
leading_space = is_float.any()
fmt_values = []
for i, v in enumerate(vals):
if not is_float[i] and leading_space:
fmt_values.append(' %s' % _format(v))
elif is_float[i]:
fmt_values.append(float_format(v))
else:
fmt_values.append(' %s' % _format(v))
return fmt_values
class FloatArrayFormatter(GenericArrayFormatter):
"""
"""
def __init__(self, *args, **kwargs):
GenericArrayFormatter.__init__(self, *args, **kwargs)
if self.float_format is not None and self.formatter is None:
self.formatter = self.float_format
def _format_with(self, fmt_str):
def _val(x, threshold):
if notnull(x):
if (threshold is None or
abs(x) > get_option("display.chop_threshold")):
return fmt_str % x
else:
if fmt_str.endswith("e"): # engineering format
return "0"
else:
return fmt_str % 0
else:
return self.na_rep
threshold = get_option("display.chop_threshold")
fmt_values = [_val(x, threshold) for x in self.values]
return _trim_zeros(fmt_values, self.na_rep)
def _format_strings(self):
if self.formatter is not None:
fmt_values = [self.formatter(x) for x in self.values]
else:
fmt_str = '%% .%df' % (self.digits - 1)
fmt_values = self._format_with(fmt_str)
if len(fmt_values) > 0:
maxlen = max(len(x) for x in fmt_values)
else:
maxlen = 0
too_long = maxlen > self.digits + 5
abs_vals = np.abs(self.values)
# this is pretty arbitrary for now
has_large_values = (abs_vals > 1e8).any()
has_small_values = ((abs_vals < 10 ** (-self.digits+1)) &
(abs_vals > 0)).any()
if too_long and has_large_values:
fmt_str = '%% .%de' % (self.digits - 1)
fmt_values = self._format_with(fmt_str)
elif has_small_values:
fmt_str = '%% .%de' % (self.digits - 1)
fmt_values = self._format_with(fmt_str)
return fmt_values
class IntArrayFormatter(GenericArrayFormatter):
def _format_strings(self):
formatter = self.formatter or (lambda x: '% d' % x)
fmt_values = [formatter(x) for x in self.values]
return fmt_values
class Datetime64Formatter(GenericArrayFormatter):
def __init__(self, values, nat_rep='NaT', date_format=None, **kwargs):
super(Datetime64Formatter, self).__init__(values, **kwargs)
self.nat_rep = nat_rep
self.date_format = date_format
def _format_strings(self):
# we may have a tz, if so, then need to process element-by-element
# when DatetimeBlockWithTimezones is a reality this could be fixed
values = self.values
if not isinstance(values, DatetimeIndex):
values = DatetimeIndex(values)
if values.tz is None:
fmt_values = format_array_from_datetime(values.asi8.ravel(),
format=_get_format_datetime64_from_values(values, self.date_format),
na_rep=self.nat_rep).reshape(values.shape)
fmt_values = fmt_values.tolist()
else:
values = values.asobject
is_dates_only = _is_dates_only(values)
formatter = (self.formatter or _get_format_datetime64(is_dates_only, values, date_format=self.date_format))
fmt_values = [ formatter(x) for x in self.values ]
return fmt_values
def _is_dates_only(values):
# return a boolean if we are only dates (and don't have a timezone)
values = DatetimeIndex(values)
if values.tz is not None:
return False
values_int = values.asi8
consider_values = values_int != iNaT
one_day_nanos = (86400 * 1e9)
even_days = np.logical_and(consider_values, values_int % one_day_nanos != 0).sum() == 0
if even_days:
return True
return False
def _format_datetime64(x, tz=None, nat_rep='NaT'):
if x is None or lib.checknull(x):
return nat_rep
if tz is not None or not isinstance(x, Timestamp):
x = Timestamp(x, tz=tz)
return str(x)
def _format_datetime64_dateonly(x, nat_rep='NaT', date_format=None):
if x is None or lib.checknull(x):
return nat_rep
if not isinstance(x, Timestamp):
x = Timestamp(x)
if date_format:
return x.strftime(date_format)
else:
return x._date_repr
def _get_format_datetime64(is_dates_only, nat_rep='NaT', date_format=None):
if is_dates_only:
return lambda x, tz=None: _format_datetime64_dateonly(x,
nat_rep=nat_rep,
date_format=date_format)
else:
return lambda x, tz=None: _format_datetime64(x, tz=tz, nat_rep=nat_rep)
def _get_format_datetime64_from_values(values, date_format):
""" given values and a date_format, return a string format """
is_dates_only = _is_dates_only(values)
if is_dates_only:
return date_format or "%Y-%m-%d"
return None
class Timedelta64Formatter(GenericArrayFormatter):
def __init__(self, values, nat_rep='NaT', box=False, **kwargs):
super(Timedelta64Formatter, self).__init__(values, **kwargs)
self.nat_rep = nat_rep
self.box = box
def _format_strings(self):
formatter = self.formatter or _get_format_timedelta64(self.values, nat_rep=self.nat_rep,
box=self.box)
fmt_values = [formatter(x) for x in self.values]
return fmt_values
def _get_format_timedelta64(values, nat_rep='NaT', box=False):
"""
Return a formatter function for a range of timedeltas.
These will all have the same format argument
If box, then show the return in quotes
"""
values_int = values.astype(np.int64)
consider_values = values_int != iNaT
one_day_nanos = (86400 * 1e9)
even_days = np.logical_and(consider_values, values_int % one_day_nanos != 0).sum() == 0
all_sub_day = np.logical_and(consider_values, np.abs(values_int) >= one_day_nanos).sum() == 0
if even_days:
format = 'even_day'
elif all_sub_day:
format = 'sub_day'
else:
format = 'long'
def _formatter(x):
if x is None or lib.checknull(x):
return nat_rep
if not isinstance(x, Timedelta):
x = Timedelta(x)
result = x._repr_base(format=format)
if box:
result = "'{0}'".format(result)
return result
return _formatter
def _make_fixed_width(strings, justify='right', minimum=None):
if len(strings) == 0 or justify == 'all':
return strings
_strlen = _strlen_func()
max_len = np.max([_strlen(x) for x in strings])
if minimum is not None:
max_len = max(minimum, max_len)
conf_max = get_option("display.max_colwidth")
if conf_max is not None and max_len > conf_max:
max_len = conf_max
if justify == 'left':
justfunc = lambda self, x: self.ljust(x)
else:
justfunc = lambda self, x: self.rjust(x)
def just(x):
eff_len = max_len
if conf_max is not None:
if (conf_max > 3) & (_strlen(x) > max_len):
x = x[:eff_len - 3] + '...'
return justfunc(x, eff_len)
result = [just(x) for x in strings]
return result
def _trim_zeros(str_floats, na_rep='NaN'):
"""
Trims zeros and decimal points.
"""
trimmed = str_floats
def _cond(values):
non_na = [x for x in values if x != na_rep]
return (len(non_na) > 0 and all([x.endswith('0') for x in non_na]) and
not(any([('e' in x) or ('E' in x) for x in non_na])))
while _cond(trimmed):
trimmed = [x[:-1] if x != na_rep else x for x in trimmed]
# trim decimal points
return [x[:-1] if x.endswith('.') and x != na_rep else x for x in trimmed]
def single_column_table(column, align=None, style=None):
table = '<table'
if align is not None:
table += (' align="%s"' % align)
if style is not None:
table += (' style="%s"' % style)
table += '><tbody>'
for i in column:
table += ('<tr><td>%s</td></tr>' % str(i))
table += '</tbody></table>'
return table
def single_row_table(row): # pragma: no cover
table = '<table><tbody><tr>'
for i in row:
table += ('<td>%s</td>' % str(i))
table += '</tr></tbody></table>'
return table
def _has_names(index):
if isinstance(index, MultiIndex):
return any([x is not None for x in index.names])
else:
return index.name is not None
# ------------------------------------------------------------------------------
# Global formatting options
_initial_defencoding = None
def detect_console_encoding():
"""
Try to find the most capable encoding supported by the console.
slighly modified from the way IPython handles the same issue.
"""
import locale
global _initial_defencoding
encoding = None
try:
encoding = sys.stdout.encoding or sys.stdin.encoding
except AttributeError:
pass
# try again for something better
if not encoding or 'ascii' in encoding.lower():
try:
encoding = locale.getpreferredencoding()
except Exception:
pass
# when all else fails. this will usually be "ascii"
if not encoding or 'ascii' in encoding.lower():
encoding = sys.getdefaultencoding()
# GH3360, save the reported defencoding at import time
# MPL backends may change it. Make available for debugging.
if not _initial_defencoding:
_initial_defencoding = sys.getdefaultencoding()
return encoding
def get_console_size():
"""Return console size as tuple = (width, height).
Returns (None,None) in non-interactive session.
"""
display_width = get_option('display.width')
# deprecated.
display_height = get_option('display.height', silent=True)
# Consider
# interactive shell terminal, can detect term size
# interactive non-shell terminal (ipnb/ipqtconsole), cannot detect term
# size non-interactive script, should disregard term size
# in addition
# width,height have default values, but setting to 'None' signals
# should use Auto-Detection, But only in interactive shell-terminal.
# Simple. yeah.
if com.in_interactive_session():
if com.in_ipython_frontend():
# sane defaults for interactive non-shell terminal
# match default for width,height in config_init
from pandas.core.config import get_default_val
terminal_width = get_default_val('display.width')
terminal_height = get_default_val('display.height')
else:
# pure terminal
terminal_width, terminal_height = get_terminal_size()
else:
terminal_width, terminal_height = None, None
# Note if the User sets width/Height to None (auto-detection)
# and we're in a script (non-inter), this will return (None,None)
# caller needs to deal.
return (display_width or terminal_width, display_height or terminal_height)
class EngFormatter(object):
"""
Formats float values according to engineering format.
Based on matplotlib.ticker.EngFormatter
"""
# The SI engineering prefixes
ENG_PREFIXES = {
-24: "y",
-21: "z",
-18: "a",
-15: "f",
-12: "p",
-9: "n",
-6: "u",
-3: "m",
0: "",
3: "k",
6: "M",
9: "G",
12: "T",
15: "P",
18: "E",
21: "Z",
24: "Y"
}
def __init__(self, accuracy=None, use_eng_prefix=False):
self.accuracy = accuracy
self.use_eng_prefix = use_eng_prefix
def __call__(self, num):
""" Formats a number in engineering notation, appending a letter
representing the power of 1000 of the original number. Some examples:
>>> format_eng(0) # for self.accuracy = 0
' 0'
>>> format_eng(1000000) # for self.accuracy = 1,
# self.use_eng_prefix = True
' 1.0M'
>>> format_eng("-1e-6") # for self.accuracy = 2
# self.use_eng_prefix = False
'-1.00E-06'
@param num: the value to represent
@type num: either a numeric value or a string that can be converted to
a numeric value (as per decimal.Decimal constructor)
@return: engineering formatted string
"""
import decimal
import math
dnum = decimal.Decimal(str(num))
sign = 1
if dnum < 0: # pragma: no cover
sign = -1
dnum = -dnum
if dnum != 0:
pow10 = decimal.Decimal(int(math.floor(dnum.log10() / 3) * 3))
else:
pow10 = decimal.Decimal(0)
pow10 = pow10.min(max(self.ENG_PREFIXES.keys()))
pow10 = pow10.max(min(self.ENG_PREFIXES.keys()))
int_pow10 = int(pow10)
if self.use_eng_prefix:
prefix = self.ENG_PREFIXES[int_pow10]
else:
if int_pow10 < 0:
prefix = 'E-%02d' % (-int_pow10)
else:
prefix = 'E+%02d' % int_pow10
mant = sign * dnum / (10 ** pow10)
if self.accuracy is None: # pragma: no cover
format_str = u("% g%s")
else:
format_str = (u("%% .%if%%s") % self.accuracy)
formatted = format_str % (mant, prefix)
return formatted # .strip()
def set_eng_float_format(accuracy=3, use_eng_prefix=False):
"""
Alter default behavior on how float is formatted in DataFrame.
Format float in engineering format. By accuracy, we mean the number of
decimal digits after the floating point.
See also EngFormatter.
"""
set_option("display.float_format", EngFormatter(accuracy, use_eng_prefix))
set_option("display.column_space", max(12, accuracy + 9))
def _put_lines(buf, lines):
if any(isinstance(x, compat.text_type) for x in lines):
lines = [compat.text_type(x) for x in lines]
buf.write('\n'.join(lines))
def _binify(cols, line_width):
adjoin_width = 1
bins = []
curr_width = 0
i_last_column = len(cols) - 1
for i, w in enumerate(cols):
w_adjoined = w + adjoin_width
curr_width += w_adjoined
if i_last_column == i:
wrap = curr_width + 1 > line_width and i > 0
else:
wrap = curr_width + 2 > line_width and i > 0
if wrap:
bins.append(i)
curr_width = w_adjoined
bins.append(len(cols))
return bins
if __name__ == '__main__':
arr = np.array([746.03, 0.00, 5620.00, 1592.36])
# arr = np.array([11111111.1, 1.55])
# arr = [314200.0034, 1.4125678]
arr = np.array([327763.3119, 345040.9076, 364460.9915, 398226.8688,
383800.5172, 433442.9262, 539415.0568, 568590.4108,
599502.4276, 620921.8593, 620898.5294, 552427.1093,
555221.2193, 519639.7059, 388175.7, 379199.5854,
614898.25, 504833.3333, 560600., 941214.2857,
1134250., 1219550., 855736.85, 1042615.4286,
722621.3043, 698167.1818, 803750.])
fmt = FloatArrayFormatter(arr, digits=7)
print(fmt.get_result())<|fim▁end|>
|
recs_new[tag + 1] = span
else:
|
<|file_name|>Wave.java<|end_file_name|><|fim▁begin|>/*
Copyright (C) 2009-2013 Bengt Martensson.
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3 of the License, or (at
your option) any later version.
This program is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License along with
this program. If not, see http://www.gnu.org/licenses/.
*/
package org.harctoolbox.IrpMaster;
import com.beust.jcommander.JCommander;
import com.beust.jcommander.Parameter;
import com.beust.jcommander.ParameterException;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.PrintStream;
import java.io.UnsupportedEncodingException;
import java.util.ArrayList;
import javax.sound.sampled.AudioFileFormat;
import javax.sound.sampled.AudioFormat;
import javax.sound.sampled.AudioInputStream;
import javax.sound.sampled.AudioSystem;
import javax.sound.sampled.LineUnavailableException;
import javax.sound.sampled.SourceDataLine;
import javax.sound.sampled.UnsupportedAudioFileException;
/**
* This class generates (or analyzes) a wave audio file that can be played
* on standard audio equipment and fed to a pair of anti-parallel double IR sending diodes,
* which can thus control IR equipment.
*
*
* @see <a href="http://www.lirc.org/html/audio.html">http://www.lirc.org/html/audio.html</a>
*/
public class Wave {
private static int debug = 0; // presently not used
private static int epsilon8Bit = 2;
private static int epsilon16Bit = 257;
private static JCommander argumentParser;
private static CommandLineArgs commandLineArgs = new CommandLineArgs();
/**
* Returns a line to the audio mixer on the local machine, suitable for sound with
* the parameter values given. When not needed, the user should close the line with its close()-function.
*
* @param audioFormat
* @return open audio line
* @throws LineUnavailableException
*/
public static SourceDataLine getLine(AudioFormat audioFormat) throws LineUnavailableException {
SourceDataLine line = AudioSystem.getSourceDataLine(audioFormat);
line.open(audioFormat);
return line;
}
private static void usage(int exitcode) {
StringBuilder str = new StringBuilder(256);
argumentParser.usage(str);
str.append("\n"
+ "parameters: <protocol> <deviceno> [<subdevice_no>] commandno [<toggle>]\n"
+ " or <Pronto code>\n"
+ " or <importfile>");
(exitcode == IrpUtils.exitSuccess ? System.out : System.err).println(str);
IrpUtils.exit(exitcode);
}
/**
* Provides a command line interface to the export/import functions.
*
* <pre>
* Usage: Wave [options] [parameters]
* Options:
* -c, --config Path to IrpProtocols.ini
* Default: data/IrpProtocols.ini
* -h, --help, -? Display help message
* Default: false
* -m, --macrofile Macro filename
* -1, --nodivide Do not divide modulation frequency
* Default: false
* -t, --omittail Skip silence at end
* Default: false
* -o, --outfile Output filename
* Default: irpmaster.wav
* -p, --play Send the generated wave to the audio device of the
* local machine
* Default: false
* -r, --repeats Number of times to include the repeat sequence
* Default: 0
* -f, --samplefrequency Sample frequency in Hz
* Default: 44100
* -q, --square Modulate with square wave instead of sine
* Default: false
* -S, --stereo Generate two channels in anti-phase
* Default: false
* -v, --version Display version information
* Default: false
* -s, samplesize Sample size in bits
* Default: 8
*
* parameters: <em>protocol</em> <em>deviceno</em> [<em>subdevice_no</em>] <em>commandno</em> [<em>toggle</em>]
* or <em>ProntoCode</em>
* or <em>importfile</em>
* </pre>
* @param args
*/
public static void main(String[] args) {
argumentParser = new JCommander(commandLineArgs);
argumentParser.setProgramName("Wave");
try {
argumentParser.parse(args);
} catch (ParameterException ex) {
System.err.println(ex.getMessage());
usage(IrpUtils.exitUsageError);
}
if (commandLineArgs.helpRequensted)
usage(IrpUtils.exitSuccess);
if (commandLineArgs.versionRequested) {
System.out.println(Version.versionString);
System.out.println("JVM: " + System.getProperty("java.vendor") + " " + System.getProperty("java.version") + " " + System.getProperty("os.name") + "-" + System.getProperty("os.arch"));
System.out.println();
System.out.println(Version.licenseString);
System.exit(IrpUtils.exitSuccess);
}
if (commandLineArgs.macrofile == null && commandLineArgs.parameters.isEmpty()) {
System.err.println("Parameters missing");
usage(IrpUtils.exitUsageError);
}
try {
if (commandLineArgs.parameters.size() == 1) {
// Exactly one argument left -> input wave file
String inputfile = commandLineArgs.parameters.get(0);
Wave wave = new Wave(new File(inputfile));
ModulatedIrSequence seq = wave.analyze(!commandLineArgs.dontDivide);
//IrSignal irSignal = new
DecodeIR.invoke(seq);
wave.dump(new File(inputfile + ".tsv"));
if (commandLineArgs.play)
wave.play();
} else {
//if (commandLineArgs.macrofile != null) {
//IrSequence irSequence = IrSequence.parseMacro(commandLineArgs.irprotocolsIniFilename, commandLineArgs.macrofile);
//Wave wave = new Wave()
//}
IrSignal irSignal = new IrSignal(commandLineArgs.irprotocolsIniFilename, 0, commandLineArgs.parameters.toArray(new String[commandLineArgs.parameters.size()]));
File file = new File(commandLineArgs.outputfile);
Wave wave = new Wave(irSignal.toModulatedIrSequence(true, commandLineArgs.noRepeats, true), commandLineArgs.sampleFrequency, commandLineArgs.sampleSize,
commandLineArgs.stereo ? 2 : 1, false /* bigEndian */,
commandLineArgs.omitTail, commandLineArgs.square, !commandLineArgs.dontDivide);
wave.export(file);
if (commandLineArgs.play)
wave.play();
}
} catch (IOException | UnsupportedAudioFileException | LineUnavailableException | IrpMasterException ex) {
System.err.println(ex.getMessage());
System.exit(IrpUtils.exitFatalProgramFailure);
}
}
private int noFrames = -1;
private AudioFormat audioFormat;
private byte[] buf;
private Wave() {
}
/**
* Reads a wave file into a Wave object.
*
* @param file Wave file as input.
* @throws UnsupportedAudioFileException
* @throws IOException
*/
public Wave(File file) throws UnsupportedAudioFileException, IOException {
AudioInputStream af = AudioSystem.getAudioInputStream(file);
audioFormat = af.getFormat();
noFrames = (int) af.getFrameLength();
buf = new byte[noFrames*audioFormat.getFrameSize()];
int n = af.read(buf, 0, buf.length);
if (n != buf.length)
System.err.println("Too few bytes read: " + n + " < " + buf.length);
}
/**
* Generates a wave audio file from its arguments.
*
* @param freq Carrier frequency in Hz.
* @param data double array of durations in micro seconds.
* @param sampleFrequency Sample frequency of the generated wave file.
* @param sampleSize Sample size (8 or 16) in bits of the samples.
* @param channels If == 2, generates two channels in perfect anti-phase.
* @param bigEndian if true, use bigendian byte order for 16 bit samples.
* @param omitTail If true, the last trailing gap will be omitted.
* @param square if true, use a square wave for modulation, otherwise a sine.
* @param divide If true, divides the carrier frequency by 2, to be used with full-wave rectifiers, e.g. a pair of IR LEDs in anti-parallel.
* @throws IncompatibleArgumentException
*/
@SuppressWarnings("ValueOfIncrementOrDecrementUsed")
public Wave(double freq, double[] data,
int sampleFrequency, int sampleSize, int channels, boolean bigEndian,
boolean omitTail, boolean square, boolean divide)
throws IncompatibleArgumentException {
if (data == null || data.length == 0)
throw new IncompatibleArgumentException("Cannot create wave file from zero array.");
double sf = sampleFrequency/1000000.0;
int[] durationsInSamplePeriods = new int[omitTail ? data.length-1 : data.length];
int length = 0;
for (int i = 0; i < durationsInSamplePeriods.length; i++) {
durationsInSamplePeriods[i] = (int) Math.round(Math.abs(sf*data[i]));
length += durationsInSamplePeriods[i];
}
double c = sampleFrequency/freq;
buf = new byte[length*sampleSize/8*channels];
int index = 0;
for (int i = 0; i < data.length-1; i += 2) {
// Handle pulse, even index
for (int j = 0; j < durationsInSamplePeriods[i]; j++) {
double t = j/(divide ? 2*c : c);
double fraq = t - (int)t;
double s = square
? (fraq < 0.5 ? -1.0 : 1.0)
: Math.sin(2*Math.PI*(fraq));
if (sampleSize == 8) {
int val = (int) Math.round(Byte.MAX_VALUE*s);
buf[index++] = (byte) val;
if (channels == 2)
buf[index++] = (byte)-val;
} else {
int val = (int) Math.round(Short.MAX_VALUE*s);
byte low = (byte) (val & 0xFF);
byte high = (byte) (val >> 8);
buf[index++] = bigEndian ? high : low;
buf[index++] = bigEndian ? low : high;
if (channels == 2) {
val = -val;
low = (byte) (val & 0xFF);
high = (byte) (val >> 8);
buf[index++] = bigEndian ? high : low;
buf[index++] = bigEndian ? low : high;
}
}
}
// Gap, odd index
if (!omitTail || i < data.length - 2) {
for (int j = 0; j < durationsInSamplePeriods[i + 1]; j++) {
for (int ch = 0; ch < channels; ch++) {
buf[index++] = 0;
if (sampleSize == 16)
buf[index++] = 0;
}
}
}
}
audioFormat = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, sampleFrequency, sampleSize, channels, sampleSize/8*channels, sampleFrequency, bigEndian);
}
/**
* Generates a wave audio file from its arguments.
*
* @param irSequence ModulatedIrSequence to be used.
* @param sampleFrequency Sample frequency of the generated wave file.
* @param sampleSize Sample size (8 or 16) in bits of the samples.
* @param channels If == 2, generates two channels in perfect anti-phase.
* @param bigEndian if true, use bigendian byte order for 16 bit samples.
* @param omitTail If true, the last trailing gap will be omitted.
* @param square if true, use a square wave for modulation, otherwise a sine.
* @param divide If true, divides the carrier frequency by 2, to be used with full-wave rectifiers, e.g. a pair of IR LEDs in anti-parallel.
* @throws IncompatibleArgumentException
*/
public Wave(ModulatedIrSequence irSequence,
int sampleFrequency, int sampleSize, int channels, boolean bigEndian,
boolean omitTail, boolean square, boolean divide)
throws IncompatibleArgumentException {
this(irSequence.getFrequency(), irSequence.toDoubles(),
sampleFrequency, sampleSize, channels, bigEndian,
omitTail, square, divide);
}
/**
* Generates a wave audio file from its arguments.
*
* @param irSequence ModulatedIrSequence to be used.
* @param audioFormat AudioFormat bundling sampleFrequency, sample size, channels, and bigEndian together.
* @param omitTail If true, the last trailing gap will be omitted.
* @param square if true, use a square wave for modulation, otherwise a sine.
* @param divide If true, divides the carrier frequency by 2, to be used with full-wave rectifiers, e.g. a pair of IR LEDs in anti-parallel.
* @throws IncompatibleArgumentException
*/
public Wave(ModulatedIrSequence irSequence,
AudioFormat audioFormat,
boolean omitTail,
boolean square, boolean divide)
throws IncompatibleArgumentException {
this(irSequence,
(int) audioFormat.getSampleRate(), audioFormat.getSampleSizeInBits(),
audioFormat.getChannels(),
audioFormat.isBigEndian(),
omitTail, square, divide);
}
// set up integer data (left and right channel) from the byte array.
private int[][] computeData() {
int channels = audioFormat.getChannels();
int sampleSize = audioFormat.getSampleSizeInBits();
AudioFormat.Encoding encoding = audioFormat.getEncoding();
boolean bigEndian = audioFormat.isBigEndian();
int[][] data = new int[noFrames][channels];
if (encoding == AudioFormat.Encoding.PCM_UNSIGNED && sampleSize != 8) {
System.err.println("Case not yet implemented");
return null;
}
for (int frame = 0; frame < noFrames; frame++) {
if (sampleSize == 8) {
for (int ch = 0; ch < channels; ch++) {
int val = buf[channels*frame + ch];
if (encoding == AudioFormat.Encoding.PCM_UNSIGNED)
val += (val < 0) ? 128 : -128;
data[frame][ch] = val;
}
} else {
// sampleSize == 16
for (int ch = 0; ch < channels; ch++) {
int baseIndex = 2*(channels*frame + ch);
int high = buf[bigEndian ? baseIndex : baseIndex+1]; // may be negative
int low = buf[bigEndian ? baseIndex+1 : baseIndex]; // consider as unsigned
if (low < 0)
low += 256;
int value = 256*high + low;
data[frame][ch] = value;
}
}
}
return data;
}
/**
* Analyzes the data and computes a ModulatedIrSequence. Generates some messages on stderr.
*
* @param divide consider the carrier as having its frequency halved or not?
* @return ModulatedIrSequence computed from the data.
*/
public ModulatedIrSequence analyze(boolean divide) {
double sampleFrequency = audioFormat.getSampleRate();
int channels = audioFormat.getChannels();
System.err.println("Format is: " + audioFormat.toString() + ".");
System.err.println(String.format("%d frames = %7.6f seconds.", noFrames, noFrames/sampleFrequency));
int[][] data = computeData();
if (channels == 2) {
int noDiffPhase = 0;
int noDiffAntiphase = 0;
int noNonNulls = 0;
for (int i = 0; i < noFrames; i++) {
if (data[i][0] != 0 || data[i][1] != 0) { // do not count nulls
noNonNulls++;
if (data[i][0] != data[i][1])
noDiffPhase++;
if (data[i][0] != -data[i][1])
noDiffAntiphase++;
}
}
System.err.println("This is a 2-channel file. Left and right channel are "
+ (noDiffPhase == 0 ? "perfectly in phase."
: noDiffAntiphase == 0 ? "perfectly in antiphase."
: "neither completely in nor out of phase. Pairs in-phase:"
+ (noNonNulls - noDiffPhase) + ", pairs anti-phase: " + (noNonNulls - noDiffAntiphase)
+ " (out of " + noNonNulls + ")."));
System.err.println("Subsequent analysis will be base on the left channel exclusively.");
}
// Search the largest block of oscillations
ArrayList<Integer> durations = new ArrayList<>(noFrames);
int bestLength = -1; // length of longest block this far
int bestStart = -1;
boolean isInInterestingBlock = true;
int last = -1111111;
int epsilon = audioFormat.getSampleSizeInBits() == 8 ? epsilon8Bit : epsilon16Bit;
int firstNonNullIndex = 0; // Ignore leading silence, it is silly.
while (data[firstNonNullIndex][0] == 0)
firstNonNullIndex++;
if (firstNonNullIndex > 0)
System.err.println("The first " + firstNonNullIndex + " sample(s) are 0, ignored.");
int beg = firstNonNullIndex; // start of current block
for (int i = firstNonNullIndex; i < noFrames; i++) {
int value = data[i][0];
// two consecutive zeros -> interesting block ends
if (((Math.abs(value) <= epsilon && Math.abs(last) <= epsilon) || (i == noFrames - 1)) && isInInterestingBlock) {
isInInterestingBlock = false;
// evaluate just ended block
int currentLength = i - 1 - beg;
if (currentLength > bestLength) {
// longest this far
bestLength = currentLength;
bestStart = beg;
}
durations.add((int)Math.round(currentLength/sampleFrequency*1000000.0));
beg = i;
} else if (Math.abs(value) > epsilon && !isInInterestingBlock) {
// Interesting block starts
isInInterestingBlock = true;
int currentLength = i - 1 - beg;
durations.add((int) Math.round(currentLength/sampleFrequency*1000000.0));
beg = i;
}
last = value;
}
if (!isInInterestingBlock && noFrames - beg > 1)
durations.add((int)Math.round((noFrames - beg)/sampleFrequency*1000000.0));
if (durations.size() % 2 == 1)
durations.add(0);
// Found the longest interesting block, now evaluate frequency
int signchanges = 0;
last = 0;
for (int i = 0; i < bestLength; i++) {
int indx = i + bestStart;
int value = data[indx][0];
if (value != 0) {
if (value*last < 0)
signchanges++;
last = value;
}
}
double carrierFrequency = (divide ? 2 : 1)*sampleFrequency * signchanges/(2*bestLength);
System.err.println("Carrier frequency estimated to " + Math.round(carrierFrequency) + " Hz.");
int arr[] = new int[durations.size()];
int ind = 0;
for (Integer val : durations) {
arr[ind] = val;
ind++;
if (debug > 0)
System.err.print(val + " ");
}
if (debug > 0)
System.err.println();
try {
//return new IrSignal(arr, arr.length/2, 0, (int) Math.round(carrierFrequency));
return new ModulatedIrSequence(arr, carrierFrequency);
} catch (IncompatibleArgumentException ex) {
// cannot happen, we have insured that the data has even size.
return null;
}
}
/**
* Print the channels to a tab separated text file, for example for debugging purposes.
* This file can be imported in a spreadsheet.
*
* @param dumpfile Output file.
* @throws FileNotFoundException
*/
public void dump(File dumpfile) throws FileNotFoundException {
int data[][] = computeData();
double sampleRate = audioFormat.getSampleRate();
int channels = audioFormat.getChannels();
try (PrintStream stream = new PrintStream(dumpfile, IrpUtils.dumbCharsetName)) {
for (int i = 0; i < noFrames; i++) {
stream.print(String.format("%d\t%8.6f\t", i, i / sampleRate));
for (int ch = 0; ch < channels; ch++)
stream.print(data[i][ch] + (ch < channels - 1 ? "\t" : "\n"));
}
} catch (UnsupportedEncodingException ex) {
throw new InternalError();
}
}
/**
* Write the signal to the file given as argument.
* @param file Output File.
*/
public void export(File file) {
ByteArrayInputStream bs = new ByteArrayInputStream(buf);
bs.reset();
AudioInputStream ais = new AudioInputStream(bs, audioFormat, (long) buf.length/audioFormat.getFrameSize());
try {
int result = AudioSystem.write(ais, AudioFileFormat.Type.WAVE, file);
if (result <= buf.length)
System.err.println("Wrong number of bytes written: " + result + " < " + buf.length);
} catch (IOException e) {
System.err.println(e.getMessage());
}
}
/**
* Sends the generated wave to the line in argument, if possible.
* @param line Line to used. Should be open, and remains open. User must make sure AudioFormat is compatible.
* @throws LineUnavailableException
* @throws IOException
*/
public void play(SourceDataLine line) throws LineUnavailableException, IOException {
line.start();
int bytesWritten = line.write(buf, 0, buf.length);
if (bytesWritten != buf.length)
throw new IOException("Not all bytes written");
line.drain();
}
/**
* Sends the generated wave to the local machine's audio system, if possible.
* @throws LineUnavailableException
* @throws IOException<|fim▁hole|> */
public void play() throws LineUnavailableException, IOException {
try (SourceDataLine line = AudioSystem.getSourceDataLine(audioFormat)) {
line.open(audioFormat);
play(line);
}
}
private final static class CommandLineArgs {
@Parameter(names = {"-1", "--nodivide"}, description = "Do not divide modulation frequency")
boolean dontDivide = false;
@Parameter(names = {"-c", "--config"}, description = "Path to IrpProtocols.ini")
String irprotocolsIniFilename = "data/IrpProtocols.ini";
@Parameter(names = {"-h", "--help", "-?"}, description = "Display help message")
boolean helpRequensted = false;
@Parameter(names = {"-f", "--samplefrequency"}, description = "Sample frequency in Hz")
int sampleFrequency = 44100;
@Parameter(names = {"-m", "--macrofile"}, description = "Macro filename")
String macrofile = null;
@Parameter(names = {"-o", "--outfile"}, description = "Output filename")
String outputfile = "irpmaster.wav";
@Parameter(names = {"-p", "--play"}, description = "Send the generated wave to the audio device of the local machine")
boolean play = false;
@Parameter(names = {"-q", "--square"}, description = "Modulate with square wave instead of sine")
boolean square = false;
@Parameter(names = {"-r", "--repeats"}, description = "Number of times to include the repeat sequence")
int noRepeats = 0;
@Parameter(names = {"-s", "samplesize"}, description = "Sample size in bits")
int sampleSize = 8;
@Parameter(names = {"-S", "--stereo"}, description = "Generate two channels in anti-phase")
boolean stereo = false;
@Parameter(names = {"-t", "--omittail"}, description = "Skip silence at end")
boolean omitTail = false;
@Parameter(names = {"-v", "--version"}, description = "Display version information")
boolean versionRequested;
@Parameter(description = "[parameters]")
@SuppressWarnings("MismatchedQueryAndUpdateOfCollection")
private ArrayList<String> parameters = new ArrayList<>(64);
}
}<|fim▁end|>
| |
<|file_name|>index.js<|end_file_name|><|fim▁begin|>import React from 'react';
import ReactDOM from 'react-dom';
import _ from 'underscore';
import babel from 'babel-core/browser';
import esprima from 'esprima';
import escodegen from 'escodegen';
import estraverse from 'estraverse';
import Codemirror from 'react-codemirror';
import classNames from 'classnames';
import { iff, default as globalUtils } from 'app/utils/globalUtils';
import './styles/app.less';
import 'react-codemirror/node_modules/codemirror/lib/codemirror.css';
import 'react-codemirror/node_modules/codemirror/theme/material.css';
import 'app/modules/JsxMode';
const localStorage = window.localStorage;
const TAB_SOURCE = 'SOURCE';
const TAB_TRANSCODE = 'TRANSCODE';
const LiveDemoApp = React.createClass({
getInitialState() {
return {
sourceCode: '',
transCode: '',
transError: '',
tab: TAB_SOURCE,
func: function() { }
};
},
componentWillMount() {
this._setSource(localStorage.getItem('sourceCode') || '');
},
componentDidMount() {
this._renderPreview();
},
componentDidUpdate() {
this._renderPreview();
},
render() {
const {
sourceCode,
transCode,
tab,
transError
} = this.state;
const showSource = (tab === TAB_SOURCE);
<|fim▁hole|> theme: 'material',
tabSize: 2,
smartIndent: true,
indentWithTabs: false
};
const srcTabClassName = classNames({
'otsLiveDemoApp-tab': true,
'otsLiveDemoApp-active': showSource
});
const transTabClassName = classNames({
'otsLiveDemoApp-tab': true,
'otsLiveDemoApp-active': !showSource
});
console.log((transCode || transError));
return (
<div className='otsLiveDemoApp'>
<div className='otsLiveDemoApp-tabs'>
<button className={srcTabClassName} onClick={this._onSrcClick}>Source</button>
<button className={transTabClassName} onClick={this._onTransClick}>Transcode</button>
</div>
<div className='otsLiveDemoApp-src'>
<Codemirror
value={showSource ? sourceCode : (transCode || transError)}
onChange={this._onChangeEditor}
options={cmOptions}
/>
</div>
</div>
);
},
_onChangeEditor(value) {
const { tab } = this.state;
if (tab === TAB_SOURCE) {
this._setSource(value);
}
},
_onSrcClick() {
this.setState({
tab: TAB_SOURCE
});
},
_onTransClick() {
this.setState({
tab: TAB_TRANSCODE
});
},
_setSource(sourceCode) {
localStorage.setItem('sourceCode', sourceCode);
const dependencies = [];
let transCode;
let transError;
try {
const es5trans = babel.transform(sourceCode);
let uniqueId = 0;
estraverse.replace(es5trans.ast.program, {
enter(node, parent) {
if (
node.type === 'CallExpression' &&
node.callee.type === 'Identifier' &&
node.callee.name === 'require' &&
node.arguments.length === 1 &&
node.arguments[0].type === 'Literal'
) {
const dep = {
identifier: '__DEPENDENCY_'+ (uniqueId++) ,
depName: node.arguments[0].value
};
dependencies.push(dep);
return {
name: dep.identifier,
type: 'Identifier'
};
}
else if (
node.type === 'AssignmentExpression' &&
node.left.type === 'MemberExpression' &&
node.left.object.type === 'Identifier' &&
node.left.object.name === 'module' &&
node.left.property.type === 'Identifier' &&
node.left.property.name === 'exports'
) {
return {
type: 'ReturnStatement',
argument: node.right
}
}
}
});
transCode = escodegen.generate(es5trans.ast.program);
}
catch (e) {
const msg = 'Error transpiling source code: ';
transError = msg + e.toString();
globalUtils.error(msg, e);
}
this.setState({
sourceCode,
transCode,
transError
});
if (transCode) {
try {
const fnConstArgs = [{ what: 'aaa'}].concat(dependencies.map((dep) => {
return dep.identifier;
}));
fnConstArgs.push('exports');
fnConstArgs.push(transCode);
this.setState({
func: new (Function.prototype.bind.apply(Function, fnConstArgs))
});
}
catch(e) {
console.error('Runtime Error', e);
}
}
},
_renderPreview() {
const { func } = this.state;
const { Component, error } = (() => {
try {
return {
Component: func(React, {})
};
}
catch(e) {
return {
error: e
};
}
})();
try {
if (Component) {
ReactDOM.render(<Component />, document.getElementById('preview'));
}
else if (error) {
ReactDOM.render(<div className='otsLiveDemoApp-error'>{error.toString()}</div>, document.getElementById('preview'));
}
}
catch (e) {
globalUtils.error('Fatal error rendering preview: ', e);
}
}
});
ReactDOM.render(<LiveDemoApp />, document.getElementById('editor'));
// const newProgram = {
// type: 'Program',
// body: [
// {
// type: 'CallExpression',
// callee: {
// type: 'FunctionExpression',
// id: null,
// params: dependencies.map((dep) => {
// return {
// type: 'Identifier',
// name: dep.identifier
// }
// }),
// body: {
// type: 'BlockStatement',
// body: es5trans.ast.program.body
// }
// },
// arguments: []
// }
// ]
// };<|fim▁end|>
|
const cmOptions = {
lineNumbers: true,
readOnly: !showSource,
mode: 'jsx',
|
<|file_name|>sim_det_noise.py<|end_file_name|><|fim▁begin|># Copyright (c) 2015-2018 by the parties listed in the AUTHORS file.
# All rights reserved. Use of this source code is governed by
# a BSD-style license that can be found in the LICENSE file.
"""
sim_det_noise.py implements the noise simulation operator, OpSimNoise.
"""
import numpy as np
from ..op import Operator
from ..ctoast import sim_noise_sim_noise_timestream as sim_noise_timestream
from .. import timing as timing
class OpSimNoise(Operator):
"""
Operator which generates noise timestreams.
This passes through each observation and every process generates data
for its assigned samples. The dictionary for each observation should
include a unique 'ID' used in the random number generation. The
observation dictionary can optionally include a 'global_offset' member
that might be useful if you are splitting observations and want to
enforce reproducibility of a given sample, even when using
different-sized observations.
Args:
out (str): accumulate data to the cache with name <out>_<detector>.
If the named cache objects do not exist, then they are created.
realization (int): if simulating multiple realizations, the realization
index.
component (int): the component index to use for this noise simulation.
noise (str): PSD key in the observation dictionary.
"""
def __init__(self, out='noise', realization=0, component=0, noise='noise',
rate=None, altFFT=False):
# We call the parent class constructor, which currently does nothing
super().__init__()
self._out = out
self._oversample = 2
self._realization = realization
self._component = component
self._noisekey = noise
self._rate = rate
self._altfft = altFFT
def exec(self, data):
"""
Generate noise timestreams.
This iterates over all observations and detectors and generates
the noise timestreams based on the noise object for the current
observation.
Args:
data (toast.Data): The distributed data.
Raises:
KeyError: If an observation in data does not have noise
object defined under given key.<|fim▁hole|> for obs in data.obs:
obsindx = 0
if 'id' in obs:
obsindx = obs['id']
else:
print("Warning: observation ID is not set, using zero!")
telescope = 0
if 'telescope' in obs:
telescope = obs['telescope_id']
global_offset = 0
if 'global_offset' in obs:
global_offset = obs['global_offset']
tod = obs['tod']
if self._noisekey in obs:
nse = obs[self._noisekey]
else:
raise KeyError('Observation does not contain noise under '
'"{}"'.format(self._noisekey))
if tod.local_chunks is None:
raise RuntimeError('noise simulation for uniform distributed '
'samples not implemented')
# eventually we'll redistribute, to allow long correlations...
if self._rate is None:
times = tod.local_times()
else:
times = None
# Iterate over each chunk.
chunk_first = tod.local_samples[0]
for curchunk in range(tod.local_chunks[1]):
chunk_first += self.simulate_chunk(
tod=tod, nse=nse,
curchunk=curchunk, chunk_first=chunk_first,
obsindx=obsindx, times=times,
telescope=telescope, global_offset=global_offset)
return
def simulate_chunk(self, *, tod, nse, curchunk, chunk_first,
obsindx, times, telescope, global_offset):
"""
Simulate one chunk of noise for all detectors.
Args:
tod (toast.tod.TOD): TOD object for the observation.
nse (toast.tod.Noise): Noise object for the observation.
curchunk (int): The local index of the chunk to simulate.
chunk_first (int): First global sample index of the chunk.
obsindx (int): Observation index for random number stream.
times (int): Timestamps for effective sample rate.
telescope (int): Telescope index for random number stream.
global_offset (int): Global offset for random number stream.
Returns:
chunk_samp (int): Number of simulated samples
"""
autotimer = timing.auto_timer(type(self).__name__)
chunk_samp = tod.total_chunks[tod.local_chunks[0] + curchunk]
local_offset = chunk_first - tod.local_samples[0]
if self._rate is None:
# compute effective sample rate
rate = 1 / np.median(np.diff(
times[local_offset : local_offset+chunk_samp]))
else:
rate = self._rate
for key in nse.keys:
# Check if noise matching this PSD key is needed
weight = 0.
for det in tod.local_dets:
weight += np.abs(nse.weight(det, key))
if weight == 0:
continue
# Simulate the noise matching this key
#nsedata = sim_noise_timestream(
# self._realization, telescope, self._component, obsindx,
# nse.index(key), rate, chunk_first+global_offset, chunk_samp,
# self._oversample, nse.freq(key), nse.psd(key),
# self._altfft)[0]
nsedata = sim_noise_timestream(
self._realization, telescope, self._component, obsindx,
nse.index(key), rate, chunk_first+global_offset, chunk_samp,
self._oversample, nse.freq(key), nse.psd(key))
# Add the noise to all detectors that have nonzero weights
for det in tod.local_dets:
weight = nse.weight(det, key)
if weight == 0:
continue
cachename = '{}_{}'.format(self._out, det)
if tod.cache.exists(cachename):
ref = tod.cache.reference(cachename)
else:
ref = tod.cache.create(cachename, np.float64,
(tod.local_samples[1], ))
ref[local_offset : local_offset+chunk_samp] += weight*nsedata
del ref
return chunk_samp<|fim▁end|>
|
RuntimeError: If observations are not split into chunks.
"""
autotimer = timing.auto_timer(type(self).__name__)
|
<|file_name|>foreign2.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT<|fim▁hole|>// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
mod bar {
#[abi = "cdecl"]
#[nolink]
pub extern {}
}
mod zed {
#[abi = "cdecl"]
#[nolink]
pub extern {}
}
mod libc {
#[abi = "cdecl"]
#[nolink]
pub extern {
pub fn write(fd: int, buf: *u8, count: ::core::libc::size_t)
-> ::core::libc::ssize_t;
}
}
mod baz {
#[abi = "cdecl"]
#[nolink]
pub extern {}
}
pub fn main() { }<|fim▁end|>
| |
<|file_name|>SCfdiSignature.java<|end_file_name|><|fim▁begin|>/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package erp.cfd;
import java.io.Serializable;
/**
*
* @author Juan Barajas
*/
public final class SCfdiSignature implements Serializable {
private String msUuid;
private String msFechaTimbrado;
private String msSelloCFD;
private String msNoCertificadoSAT;
private String msSelloSAT;
private String msRfcEmisor;
private String msRfcReceptor;
private double mdTotalCy;
public SCfdiSignature() {
msUuid = "";
msFechaTimbrado = "";
msSelloCFD = "";
msNoCertificadoSAT = "";
msSelloSAT = "";<|fim▁hole|> msRfcReceptor = "";
mdTotalCy = 0;
}
public void setUuid(String s) { msUuid = s; }
public void setFechaTimbrado(String s) { msFechaTimbrado = s; }
public void setSelloCFD(String s) { msSelloCFD = s; }
public void setNoCertificadoSAT(String s) { msNoCertificadoSAT = s; }
public void setSelloSAT(String s) { msSelloSAT = s; }
public void setRfcEmisor(String s) { msRfcEmisor = s; }
public void setRfcReceptor(String s) { msRfcReceptor = s; }
public void setTotalCy(double d) { mdTotalCy = d; }
public String getUuid() { return msUuid; }
public String getFechaTimbrado() { return msFechaTimbrado; }
public String getSelloCFD() { return msSelloCFD; }
public String getNoCertificadoSAT() { return msNoCertificadoSAT; }
public String getSelloSAT() { return msSelloSAT; }
public String getRfcEmisor() { return msRfcEmisor; }
public String getRfcReceptor() { return msRfcReceptor; }
public double getTotalCy() { return mdTotalCy; }
}<|fim▁end|>
|
msRfcEmisor = "";
|
<|file_name|>index.ts<|end_file_name|><|fim▁begin|>export * from './baPageTop';
// export * from './baMsgCenter';
export * from './baSidebar';
export * from './baMenu/components/baMenuItem';
export * from './baMenu';
export * from './baContentTop';
export * from './baCard';<|fim▁hole|>export * from './baFullCalendar';
export * from './baPictureUploader';
export * from './baCheckbox';
export * from './baMultiCheckbox';<|fim▁end|>
|
export * from './baAmChart';
export * from './baChartistChart';
export * from './baBackTop';
|
<|file_name|>Test_pt.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" encoding="utf-8"?>
<!DOCTYPE TS>
<TS>
<context>
<name>TestGui::UnitTest</name>
<message>
<source>FreeCAD UnitTest</source>
<translation>Teste de unidade do FreeCAD</translation>
</message>
<message>
<source>Failures and errors</source>
<translation>Falhas e erros</translation>
</message>
<message>
<source>Description</source>
<translation>Descrição</translation>
</message>
<message>
<source>&Start</source>
<translation>&Início</translation>
</message>
<message>
<source>Alt+S</source>
<translation>Alt+I</translation>
</message>
<message>
<source>&Help</source>
<translation>A&juda</translation>
</message>
<message>
<source>F1</source>
<translation>F1</translation>
</message>
<message>
<source>&About</source>
<translation>S&obre</translation>
</message>
<message>
<source>Alt+A</source>
<translation>Alt+O</translation>
</message>
<message>
<source>&Close</source>
<translation>&Fechar</translation>
</message>
<message>
<source>Alt+C</source>
<translation>Alt+F</translation>
</message>
<message>
<source>Idle</source>
<translation>Ocioso</translation>
</message>
<message>
<source>Progress</source>
<translation>Progresso</translation>
</message>
<message>
<source><font color="#0000ff">0</font></source>
<translation><font color="#0000ff">0</font></translation>
</message><|fim▁hole|> <message>
<source>Errors:</source>
<translation>Erros:</translation>
</message>
<message>
<source>Failures:</source>
<translation>Falhas:</translation>
</message>
<message>
<source>Run:</source>
<translation>Executar:</translation>
</message>
<message>
<source>Test</source>
<translation>Teste</translation>
</message>
<message>
<source>Select test name:</source>
<translation>Selecione o nome do teste:</translation>
</message>
</context>
<context>
<name>TestGui::UnitTestDialog</name>
<message>
<source>Help</source>
<translation>Ajuda</translation>
</message>
<message>
<source>About FreeCAD UnitTest</source>
<translation>Sobre o teste de unidade do FreeCAD</translation>
</message>
<message>
<source>Copyright (c) Werner Mayer
FreeCAD UnitTest is part of FreeCAD and supports writing Unit Tests for own modules.</source>
<translation>Copyright (c) Werner Mayer
O teste de unidade do FreeCAD é parte do FreeCAD e suporta o desenvolvimento de testes de unidade para módulos próprios.</translation>
</message>
<message>
<source>Enter the name of a callable object which, when called, will return a TestCase.Click 'start', and the test thus produced will be run.
Double click on an error in the tree view to see more information about it,including the stack trace.</source>
<translation>Digite o nome de um objeto que pode ser chamado que, quando chamado, vai retornar um TestCase. Clique "Iniciar", e o teste assim produzido será executado. Dê um duplo clique sobre um erro na árvore para ver mais informações sobre ele, incluindo o rastreamento de pilha.</translation>
</message>
</context>
</TS><|fim▁end|>
|
<message>
<source>Remaining:</source>
<translation>Faltando:</translation>
</message>
|
<|file_name|>section-title.component.ts<|end_file_name|><|fim▁begin|>import { ChangeDetectionStrategy, Component } from '@angular/core';<|fim▁hole|> template: `
<div fxLayout fxLayoutAlign="start center">
<h2>
<i class="fas fa-caret-right" aria-hidden="true"></i>
<fa-icon [icon]="iconCaretRight" size="lg"></fa-icon>
<ng-content></ng-content>
</h2>
</div>
`,
styleUrls: ['section-title.component.scss'],
changeDetection: ChangeDetectionStrategy.OnPush
})
export class SectionTitleComponent {
iconCaretRight = faCaretRight;
}<|fim▁end|>
|
import { faCaretRight } from '@fortawesome/free-solid-svg-icons';
@Component({
selector: 'section-title',
|
<|file_name|>nuavatar.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# Copyright (c) 2015, Alcatel-Lucent Inc, 2017 Nokia
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from .fetchers import NUPermissionsFetcher
from .fetchers import NUMetadatasFetcher
from .fetchers import NUGlobalMetadatasFetcher
from bambou import NURESTObject
class NUAvatar(NURESTObject):
""" Represents a Avatar in the VSD
Notes:
Avatar
"""
__rest_name__ = "avatar"
__resource_name__ = "avatars"
## Constants
CONST_ENTITY_SCOPE_GLOBAL = "GLOBAL"
CONST_ENTITY_SCOPE_ENTERPRISE = "ENTERPRISE"
def __init__(self, **kwargs):
""" Initializes a Avatar instance
Notes:
You can specify all parameters while calling this methods.
A special argument named `data` will enable you to load the
object from a Python dictionary
Examples:
>>> avatar = NUAvatar(id=u'xxxx-xxx-xxx-xxx', name=u'Avatar')
>>> avatar = NUAvatar(data=my_dict)
"""
super(NUAvatar, self).__init__()
# Read/Write Attributes
self._last_updated_by = None
self._last_updated_date = None
self._embedded_metadata = None
self._entity_scope = None
self._creation_date = None
self._owner = None
self._external_id = None
self._type = None
self.expose_attribute(local_name="last_updated_by", remote_name="lastUpdatedBy", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="last_updated_date", remote_name="lastUpdatedDate", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="embedded_metadata", remote_name="embeddedMetadata", attribute_type=list, is_required=False, is_unique=False)
self.expose_attribute(local_name="entity_scope", remote_name="entityScope", attribute_type=str, is_required=False, is_unique=False, choices=[u'ENTERPRISE', u'GLOBAL'])
self.expose_attribute(local_name="creation_date", remote_name="creationDate", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="owner", remote_name="owner", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="external_id", remote_name="externalID", attribute_type=str, is_required=False, is_unique=True)
self.expose_attribute(local_name="type", remote_name="type", attribute_type=str, is_required=False, is_unique=False)
# Fetchers
self.permissions = NUPermissionsFetcher.fetcher_with_object(parent_object=self, relationship="child")
self.metadatas = NUMetadatasFetcher.fetcher_with_object(parent_object=self, relationship="child")
self.global_metadatas = NUGlobalMetadatasFetcher.fetcher_with_object(parent_object=self, relationship="child")
self._compute_args(**kwargs)
# Properties
@property
def last_updated_by(self):
""" Get last_updated_by value.
Notes:
ID of the user who last updated the object.
This attribute is named `lastUpdatedBy` in VSD API.
"""
return self._last_updated_by
@last_updated_by.setter
def last_updated_by(self, value):
""" Set last_updated_by value.
Notes:
ID of the user who last updated the object.
This attribute is named `lastUpdatedBy` in VSD API.
"""
self._last_updated_by = value
@property
def last_updated_date(self):
""" Get last_updated_date value.
Notes:
Time stamp when this object was last updated.
This attribute is named `lastUpdatedDate` in VSD API.
"""
return self._last_updated_date
@last_updated_date.setter
def last_updated_date(self, value):
""" Set last_updated_date value.
Notes:
Time stamp when this object was last updated.
This attribute is named `lastUpdatedDate` in VSD API.
"""
self._last_updated_date = value
@property
def embedded_metadata(self):
""" Get embedded_metadata value.
Notes:
Metadata objects associated with this entity. This will contain a list of Metadata objects if the API request is made using the special flag to enable the embedded Metadata feature. Only a maximum of Metadata objects is returned based on the value set in the system configuration.
This attribute is named `embeddedMetadata` in VSD API.
"""
return self._embedded_metadata
@embedded_metadata.setter
def embedded_metadata(self, value):
""" Set embedded_metadata value.
Notes:
Metadata objects associated with this entity. This will contain a list of Metadata objects if the API request is made using the special flag to enable the embedded Metadata feature. Only a maximum of Metadata objects is returned based on the value set in the system configuration.
This attribute is named `embeddedMetadata` in VSD API.
"""
self._embedded_metadata = value
@property
def entity_scope(self):
""" Get entity_scope value.
Notes:
Specify if scope of entity is Data center or Enterprise level
This attribute is named `entityScope` in VSD API.
"""
return self._entity_scope
@entity_scope.setter
def entity_scope(self, value):
""" Set entity_scope value.
Notes:
Specify if scope of entity is Data center or Enterprise level
This attribute is named `entityScope` in VSD API.
"""
self._entity_scope = value
@property
def creation_date(self):
""" Get creation_date value.
Notes:
Time stamp when this object was created.
This attribute is named `creationDate` in VSD API.
"""
return self._creation_date
@creation_date.setter
def creation_date(self, value):
""" Set creation_date value.
Notes:
Time stamp when this object was created.
This attribute is named `creationDate` in VSD API.
"""
self._creation_date = value
@property
def owner(self):
""" Get owner value.
Notes:
Identifies the user that has created this object.
"""
return self._owner
@owner.setter
def owner(self, value):
""" Set owner value.
Notes:
Identifies the user that has created this object.
"""
self._owner = value
@property
def external_id(self):
""" Get external_id value.
Notes:
External object ID. Used for integration with third party systems
This attribute is named `externalID` in VSD API.
"""
return self._external_id
@external_id.setter
def external_id(self, value):
""" Set external_id value.
Notes:<|fim▁hole|>
This attribute is named `externalID` in VSD API.
"""
self._external_id = value
@property
def type(self):
""" Get type value.
Notes:
The image type
"""
return self._type
@type.setter
def type(self, value):
""" Set type value.
Notes:
The image type
"""
self._type = value<|fim▁end|>
|
External object ID. Used for integration with third party systems
|
<|file_name|>closure.rs<|end_file_name|><|fim▁begin|>// rustfmt-normalize_comments: true
// Closures
fn main() {
let square = ( |i: i32 | i * i );
let commented = |/* first */ a /*argument*/, /* second*/ b: WithType /* argument*/, /* ignored */ _ |
(aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa, bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb);
let block_body = move |xxxxxxxxxxxxxxxxxxxxxxxxxxxxx, ref yyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy| {
xxxxxxxxxxxxxxxxxxxxxxxxxxxxx + yyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy
};
let loooooooooooooong_name = |field| {
// format comments.
if field.node.attrs.len() > 0 { field.node.attrs[0].span.lo()
} else {
field.span.lo()
}};
let unblock_me = |trivial| {
closure()
};
let empty = |arg| {};
let simple = |arg| { /* comment formatting */ foo(arg) };
let test = | | { do_something(); do_something_else(); };
let arg_test = |big_argument_name, test123| looooooooooooooooooong_function_naaaaaaaaaaaaaaaaame();
let arg_test = |big_argument_name, test123| {looooooooooooooooooong_function_naaaaaaaaaaaaaaaaame()};
let simple_closure = move || -> () {};
let closure = |input: Ty| -> Option<String> {
foo()
};
let closure_with_return_type = |aaaaaaaaaaaaaaaaaaaaaaarg1, aaaaaaaaaaaaaaaaaaaaaaarg2| -> Strong { "sup".to_owned() };
|arg1, arg2, _, _, arg3, arg4| { let temp = arg4 + arg3;
arg2 * arg1 - temp };
let block_body_with_comment = args.iter()
.map(|a| {
// Emitting only dep-info is possible only for final crate type, as<|fim▁hole|> } else { a }
});
}
fn issue311() {
let func = |x| println!("{}", x);
(func)(0.0);
}
fn issue863() {
let closure = |x| match x {
0 => true,
_ => false,
} == true;
}
fn issue934() {
let hash: &Fn(&&Block) -> u64 = &|block| -> u64 {
let mut h = SpanlessHash::new(cx);
h.hash_block(block);
h.finish()
};
let hash: &Fn(&&Block) -> u64 = &|block| -> u64 {
let mut h = SpanlessHash::new(cx);
h.hash_block(block);
h.finish();
};
}
impl<'a, 'tcx: 'a> SpanlessEq<'a, 'tcx> {
pub fn eq_expr(&self, left: &Expr, right: &Expr) -> bool {
match (&left.node, &right.node) {
(&ExprBinary(l_op, ref ll, ref lr), &ExprBinary(r_op, ref rl, ref rr)) => {
l_op.node == r_op.node && self.eq_expr(ll, rl) && self.eq_expr(lr, rr) ||
swap_binop(l_op.node, ll, lr).map_or(false, |(l_op, ll, lr)| l_op == r_op.node && self.eq_expr(ll, rl) && self.eq_expr(lr, rr))
}
}
}
}
fn foo() {
lifetimes_iter___map(|lasdfasfd| {
let hi = if l.bounds.is_empty() {
l.lifetime.span.hi()
};
});
}
fn issue1405() {
open_raw_fd(fd, b'r')
.and_then(|file| Capture::new_raw(None, |_, err| unsafe {
raw::pcap_fopen_offline(file, err)
}));
}
fn issue1466() {
let vertex_buffer = frame.scope(|ctx| {
let buffer =
ctx.create_host_visible_buffer::<VertexBuffer<Vertex>>(&vertices);
ctx.create_device_local_buffer(buffer)
});
}
fn issue470() {
{{{
let explicit_arg_decls =
explicit_arguments.into_iter()
.enumerate()
.map(|(index, (ty, pattern))| {
let lvalue = Lvalue::Arg(index as u32);
block = this.pattern(block,
argument_extent,
hair::PatternRef::Hair(pattern),
&lvalue);
ArgDecl { ty: ty }
});
}}}
}
// #1509
impl Foo {
pub fn bar(&self) {
Some(SomeType {
push_closure_out_to_100_chars: iter(otherwise_it_works_ok.into_iter().map(|f| {
Ok(f)
})),
})
}
}
fn issue1329() {
aaaaaaaaaaaaaaaa.map(|x| {
x += 1;
x
})
.filter
}
fn issue325() {
let f = || unsafe { xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx };
}
fn issue1697() {
Test.func_a(A_VERY_LONG_CONST_VARIABLE_NAME, move |arg1, arg2, arg3, arg4| arg1 + arg2 + arg3 + arg4)
}
fn issue1694() {
foooooo(|_referencefffffffff: _, _target_reference: _, _oid: _, _target_oid: _| format!("refs/pull/{}/merge", pr_id))
}
fn issue1713() {
rayon::join(
|| recurse(left, is_less, pred, limit),
|| recurse(right, is_less, Some(pivot), limit),
);
rayon::join(
1,
|| recurse(left, is_less, pred, limit),
2,
|| recurse(right, is_less, Some(pivot), limit),
);
}
fn issue2063() {
|ctx: Ctx<(String, String)>| -> io::Result<Response> {
Ok(Response::new().with_body(ctx.params.0))
}
}
fn issue1524() {
let f = |x| {{{{x}}}};
let f = |x| {{{x}}};
let f = |x| {{x}};
let f = |x| {x};
let f = |x| x;
}
fn issue2171() {
foo(|| unsafe {
if PERIPHERALS {
loop {}
} else {
PERIPHERALS = true;
}
})
}
fn issue2207() {
a.map(|_| unsafe {
a_very_very_very_very_very_very_very_long_function_name_or_anything_else()
}.to_string())
}
fn issue2262() {
result.init(&mut result.slave.borrow_mut(), &mut (result.strategy)()).map_err(|factory| Error {
factory,
slave: None,
})?;
}<|fim▁end|>
|
// as others may emit required metadata for dependent crate types
if a.starts_with("--emit") && is_final_crate_type && !self.workspace_mode {
"--emit=dep-info"
|
<|file_name|>generic.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import sys
import gobject
import dbus.mainloop.glib
dbus.mainloop.glib.DBusGMainLoop(set_as_default = True)
import telepathy
DBUS_PROPERTIES = 'org.freedesktop.DBus.Properties'
def get_registry():
reg = telepathy.client.ManagerRegistry()
reg.LoadManagers()
return reg
def get_connection_manager(reg):
cm = reg.GetManager('bluewire')
return cm
class Action(object):
def __init__(self):
self._action = None
def queue_action(self):
pass
def append_action(self, action):
assert self._action is None
self._action = action
def get_next_action(self):
assert self._action is not None
return self._action
def _on_done(self):
if self._action is None:
return
self._action.queue_action()
def _on_error(self, error):
print error
def _on_generic_message(self, *args):
pass
class DummyAction(Action):
def queue_action(self):
gobject.idle_add(self._on_done)
class QuitLoop(Action):
def __init__(self, loop):
super(QuitLoop, self).__init__()
self._loop = loop
def queue_action(self):
self._loop.quit()
class DisplayParams(Action):
def __init__(self, cm):
super(DisplayParams, self).__init__()
self._cm = cm
def queue_action(self):
self._cm[telepathy.interfaces.CONN_MGR_INTERFACE].GetParameters(<|fim▁hole|> )
def _on_done(self, params):
print "Connection Parameters:"
for name, flags, signature, default in params:
print "\t%s (%s)" % (name, signature),
if flags & telepathy.constants.CONN_MGR_PARAM_FLAG_REQUIRED:
print "required",
if flags & telepathy.constants.CONN_MGR_PARAM_FLAG_REGISTER:
print "register",
if flags & telepathy.constants.CONN_MGR_PARAM_FLAG_SECRET:
print "secret",
if flags & telepathy.constants.CONN_MGR_PARAM_FLAG_DBUS_PROPERTY:
print "dbus-property",
if flags & telepathy.constants.CONN_MGR_PARAM_FLAG_HAS_DEFAULT:
print "has-default(%s)" % default,
print ""
super(DisplayParams, self)._on_done()
class RequestConnection(Action):
def __init__(self, cm, username, password, forward):
super(RequestConnection, self).__init__()
self._cm = cm
self._conn = None
self._serviceName = None
self._username = username
self._password = password
self._forward = forward
@property
def conn(self):
return self._conn
@property
def serviceName(self):
return self._serviceName
def queue_action(self):
self._cm[telepathy.server.CONNECTION_MANAGER].RequestConnection(
'bluetooth",
{
'account': self._username,
'password': self._password,
'forward': self._forward,
},
reply_handler = self._on_done,
error_handler = self._on_error,
)
def _on_done(self, busName, objectPath):
self._serviceName = busName
self._conn = telepathy.client.Connection(busName, objectPath)
super(RequestConnection, self)._on_done()
class Connect(Action):
def __init__(self, connAction):
super(Connect, self).__init__()
self._connAction = connAction
def queue_action(self):
self._connAction.conn[telepathy.server.CONNECTION].connect_to_signal(
'StatusChanged',
self._on_change,
)
self._connAction.conn[telepathy.server.CONNECTION].Connect(
reply_handler = self._on_generic_message,
error_handler = self._on_error,
)
def _on_done(self):
super(Connect, self)._on_done()
def _on_change(self, status, reason):
if status == telepathy.constants.CONNECTION_STATUS_DISCONNECTED:
print "Disconnected!"
self._conn = None
elif status == telepathy.constants.CONNECTION_STATUS_CONNECTED:
print "Connected"
self._on_done()
elif status == telepathy.constants.CONNECTION_STATUS_CONNECTING:
print "Connecting"
else:
print "Status: %r" % status
class SimplePresenceOptions(Action):
def __init__(self, connAction):
super(SimplePresenceOptions, self).__init__()
self._connAction = connAction
def queue_action(self):
self._connAction.conn[DBUS_PROPERTIES].Get(
telepathy.server.CONNECTION_INTERFACE_SIMPLE_PRESENCE,
'Statuses',
reply_handler = self._on_done,
error_handler = self._on_error,
)
def _on_done(self, statuses):
print "\tAvailable Statuses"
for (key, value) in statuses.iteritems():
print "\t\t - %s" % key
super(SimplePresenceOptions, self)._on_done()
class NullHandle(object):
@property
def handle(self):
return 0
@property
def handles(self):
return []
class UserHandle(Action):
def __init__(self, connAction):
super(UserHandle, self).__init__()
self._connAction = connAction
self._handle = None
@property
def handle(self):
return self._handle
@property
def handles(self):
return [self._handle]
def queue_action(self):
self._connAction.conn[telepathy.server.CONNECTION].GetSelfHandle(
reply_handler = self._on_done,
error_handler = self._on_error,
)
def _on_done(self, handle):
self._handle = handle
super(UserHandle, self)._on_done()
class RequestHandle(Action):
def __init__(self, connAction, handleType, handleNames):
super(RequestHandle, self).__init__()
self._connAction = connAction
self._handle = None
self._handleType = handleType
self._handleNames = handleNames
@property
def handle(self):
return self._handle
@property
def handles(self):
return [self._handle]
def queue_action(self):
self._connAction.conn[telepathy.server.CONNECTION].RequestHandles(
self._handleType,
self._handleNames,
reply_handler = self._on_done,
error_handler = self._on_error,
)
def _on_done(self, handles):
self._handle = handles[0]
super(RequestHandle, self)._on_done()
class RequestChannel(Action):
def __init__(self, connAction, handleAction, channelType, handleType):
super(RequestChannel, self).__init__()
self._connAction = connAction
self._handleAction = handleAction
self._channel = None
self._channelType = channelType
self._handleType = handleType
@property
def channel(self):
return self._channel
def queue_action(self):
self._connAction.conn[telepathy.server.CONNECTION].RequestChannel(
self._channelType,
self._handleType,
self._handleAction.handle,
True,
reply_handler = self._on_done,
error_handler = self._on_error,
)
def _on_done(self, channelObjectPath):
self._channel = telepathy.client.Channel(self._connAction.serviceName, channelObjectPath)
super(RequestChannel, self)._on_done()
class EnsureChannel(Action):
def __init__(self, connAction, channelType, handleType, handleId):
super(EnsureChannel, self).__init__()
self._connAction = connAction
self._channel = None
self._channelType = channelType
self._handleType = handleType
self._handleId = handleId
self._handle = None
@property
def channel(self):
return self._channel
@property
def handle(self):
return self._handle
@property
def handles(self):
return [self._handle]
def queue_action(self):
properties = {
telepathy.server.CHANNEL_INTERFACE+".ChannelType": self._channelType,
telepathy.server.CHANNEL_INTERFACE+".TargetHandleType": self._handleType,
telepathy.server.CHANNEL_INTERFACE+".TargetID": self._handleId,
}
self._connAction.conn[telepathy.server.CONNECTION_INTERFACE_REQUESTS].EnsureChannel(
properties,
reply_handler = self._on_done,
error_handler = self._on_error,
)
def _on_done(self, yours, channelObjectPath, properties):
print "Create?", not not yours
print "Path:", channelObjectPath
print "Properties:", properties
self._channel = telepathy.client.Channel(self._connAction.serviceName, channelObjectPath)
self._handle = properties[telepathy.server.CHANNEL_INTERFACE+".TargetHandle"]
super(EnsureChannel, self)._on_done()
class CloseChannel(Action):
def __init__(self, connAction, chanAction):
super(CloseChannel, self).__init__()
self._connAction = connAction
self._chanAction = chanAction
self._handles = []
def queue_action(self):
self._chanAction.channel[telepathy.server.CHANNEL].Close(
reply_handler = self._on_done,
error_handler = self._on_error,
)
def _on_done(self):
super(CloseChannel, self)._on_done()
class ContactHandles(Action):
def __init__(self, connAction, chanAction):
super(ContactHandles, self).__init__()
self._connAction = connAction
self._chanAction = chanAction
self._handles = []
@property
def handles(self):
return self._handles
def queue_action(self):
self._chanAction.channel[DBUS_PROPERTIES].Get(
telepathy.server.CHANNEL_INTERFACE_GROUP,
'Members',
reply_handler = self._on_done,
error_handler = self._on_error,
)
def _on_done(self, handles):
self._handles = list(handles)
super(ContactHandles, self)._on_done()
class SimplePresenceStatus(Action):
def __init__(self, connAction, handleAction):
super(SimplePresenceStatus, self).__init__()
self._connAction = connAction
self._handleAction = handleAction
def queue_action(self):
self._connAction.conn[telepathy.server.CONNECTION_INTERFACE_SIMPLE_PRESENCE].GetPresences(
self._handleAction.handles,
reply_handler = self._on_done,
error_handler = self._on_error,
)
def _on_done(self, aliases):
print "\tPresences:"
for hid, (presenceType, presence, presenceMessage) in aliases.iteritems():
print "\t\t%s:" % hid, presenceType, presence, presenceMessage
super(SimplePresenceStatus, self)._on_done()
class SetSimplePresence(Action):
def __init__(self, connAction, status, message):
super(SetSimplePresence, self).__init__()
self._connAction = connAction
self._status = status
self._message = message
def queue_action(self):
self._connAction.conn[telepathy.server.CONNECTION_INTERFACE_SIMPLE_PRESENCE].SetPresence(
self._status,
self._message,
reply_handler = self._on_done,
error_handler = self._on_error,
)
def _on_done(self):
super(SetSimplePresence, self)._on_done()
class Aliases(Action):
def __init__(self, connAction, handleAction):
super(Aliases, self).__init__()
self._connAction = connAction
self._handleAction = handleAction
def queue_action(self):
self._connAction.conn[telepathy.server.CONNECTION_INTERFACE_ALIASING].RequestAliases(
self._handleAction.handles,
reply_handler = self._on_done,
error_handler = self._on_error,
)
def _on_done(self, aliases):
print "\tAliases:"
for h, alias in zip(self._handleAction.handles, aliases):
print "\t\t", h, alias
super(Aliases, self)._on_done()
class Call(Action):
def __init__(self, connAction, chanAction, handleAction):
super(Call, self).__init__()
self._connAction = connAction
self._chanAction = chanAction
self._handleAction = handleAction
def queue_action(self):
self._chanAction.channel[telepathy.server.CHANNEL_TYPE_STREAMED_MEDIA].RequestStreams(
self._handleAction.handle,
[telepathy.constants.MEDIA_STREAM_TYPE_AUDIO],
reply_handler = self._on_done,
error_handler = self._on_error,
)
def _on_done(self, handle):
print "Call started"
super(Call, self)._on_done()
class SendText(Action):
def __init__(self, connAction, chanAction, handleAction, messageType, message):
super(SendText, self).__init__()
self._connAction = connAction
self._chanAction = chanAction
self._handleAction = handleAction
self._messageType = messageType
self._message = message
def queue_action(self):
self._chanAction.channel[telepathy.server.CHANNEL_TYPE_TEXT].Send(
self._messageType,
self._message,
reply_handler = self._on_done,
error_handler = self._on_error,
)
def _on_done(self,):
print "Message sent"
super(SendText, self)._on_done()
class Sleep(Action):
def __init__(self, length):
super(Sleep, self).__init__()
self._length = length
def queue_action(self):
gobject.timeout_add(self._length, self._on_done)
class Block(Action):
def __init__(self):
super(Block, self).__init__()
def queue_action(self):
print "Blocking"
def _on_done(self):
#super(SendText, self)._on_done()
pass
class Disconnect(Action):
def __init__(self, connAction):
super(Disconnect, self).__init__()
self._connAction = connAction
def queue_action(self):
self._connAction.conn[telepathy.server.CONNECTION].Disconnect(
reply_handler = self._on_done,
error_handler = self._on_error,
)
if __name__ == '__main__':
loop = gobject.MainLoop()
reg = get_registry()
cm = get_connection_manager(reg)
nullHandle = NullHandle()
dummy = DummyAction()
firstAction = dummy
lastAction = dummy
if True:
dp = DisplayParams(cm)
lastAction.append_action(dp)
lastAction = lastAction.get_next_action()
if True:
username = sys.argv[1]
password = sys.argv[2]
forward = sys.argv[3]
reqcon = RequestConnection(cm, username, password, forward)
lastAction.append_action(reqcon)
lastAction = lastAction.get_next_action()
if False:
reqcon = RequestConnection(cm, username, password, forward)
lastAction.append_action(reqcon)
lastAction = lastAction.get_next_action()
con = Connect(reqcon)
lastAction.append_action(con)
lastAction = lastAction.get_next_action()
if True:
spo = SimplePresenceOptions(reqcon)
lastAction.append_action(spo)
lastAction = lastAction.get_next_action()
if True:
uh = UserHandle(reqcon)
lastAction.append_action(uh)
lastAction = lastAction.get_next_action()
ua = Aliases(reqcon, uh)
lastAction.append_action(ua)
lastAction = lastAction.get_next_action()
sps = SimplePresenceStatus(reqcon, uh)
lastAction.append_action(sps)
lastAction = lastAction.get_next_action()
if False:
setdnd = SetSimplePresence(reqcon, "dnd", "")
lastAction.append_action(setdnd)
lastAction = lastAction.get_next_action()
sps = SimplePresenceStatus(reqcon, uh)
lastAction.append_action(sps)
lastAction = lastAction.get_next_action()
setdnd = SetSimplePresence(reqcon, "available", "")
lastAction.append_action(setdnd)
lastAction = lastAction.get_next_action()
sps = SimplePresenceStatus(reqcon, uh)
lastAction.append_action(sps)
lastAction = lastAction.get_next_action()
if False:
sl = Sleep(10 * 1000)
lastAction.append_action(sl)
lastAction = lastAction.get_next_action()
if False:
rclh = RequestHandle(reqcon, telepathy.HANDLE_TYPE_LIST, ["subscribe"])
lastAction.append_action(rclh)
lastAction = lastAction.get_next_action()
rclc = RequestChannel(
reqcon,
rclh,
telepathy.CHANNEL_TYPE_CONTACT_LIST,
telepathy.HANDLE_TYPE_LIST,
)
lastAction.append_action(rclc)
lastAction = lastAction.get_next_action()
ch = ContactHandles(reqcon, rclc)
lastAction.append_action(ch)
lastAction = lastAction.get_next_action()
ca = Aliases(reqcon, ch)
lastAction.append_action(ca)
lastAction = lastAction.get_next_action()
if True:
accountNumber = sys.argv[4]
enChan = EnsureChannel(reqcon, telepathy.CHANNEL_TYPE_TEXT, telepathy.HANDLE_TYPE_CONTACT, accountNumber)
lastAction.append_action(enChan)
lastAction = lastAction.get_next_action()
sendDebugtext = SendText(reqcon, enChan, enChan, telepathy.CHANNEL_TEXT_MESSAGE_TYPE_NORMAL, "Boo!")
lastAction.append_action(sendDebugtext)
lastAction = lastAction.get_next_action()
if False:
rch = RequestHandle(reqcon, telepathy.HANDLE_TYPE_CONTACT, ["18005558355"]) #(1-800-555-TELL)
lastAction.append_action(rch)
lastAction = lastAction.get_next_action()
# making a phone call
if True:
smHandle = rch
smHandleType = telepathy.HANDLE_TYPE_CONTACT
else:
smHandle = nullHandle
smHandleType = telepathy.HANDLE_TYPE_NONE
rsmc = RequestChannel(
reqcon,
smHandle,
telepathy.CHANNEL_TYPE_STREAMED_MEDIA,
smHandleType,
)
lastAction.append_action(rsmc)
lastAction = lastAction.get_next_action()
if False:
call = Call(reqcon, rsmc, rch)
lastAction.append_action(call)
lastAction = lastAction.get_next_action()
# sending a text
rtc = RequestChannel(
reqcon,
rch,
telepathy.CHANNEL_TYPE_TEXT,
smHandleType,
)
lastAction.append_action(rtc)
lastAction = lastAction.get_next_action()
if True:
closechan = CloseChannel(reqcon, rtc)
lastAction.append_action(closechan)
lastAction = lastAction.get_next_action()
rtc = RequestChannel(
reqcon,
rch,
telepathy.CHANNEL_TYPE_TEXT,
smHandleType,
)
lastAction.append_action(rtc)
lastAction = lastAction.get_next_action()
if False:
sendtext = SendText(reqcon, rtc, rch, telepathy.CHANNEL_TEXT_MESSAGE_TYPE_NORMAL, "Boo!")
lastAction.append_action(sendtext)
lastAction = lastAction.get_next_action()
if False:
bl = Block()
lastAction.append_action(bl)
lastAction = lastAction.get_next_action()
if False:
sl = Sleep(30 * 1000)
lastAction.append_action(sl)
lastAction = lastAction.get_next_action()
dis = Disconnect(reqcon)
lastAction.append_action(dis)
lastAction = lastAction.get_next_action()
quitter = QuitLoop(loop)
lastAction.append_action(quitter)
lastAction = lastAction.get_next_action()
firstAction.queue_action()
loop.run()<|fim▁end|>
|
'bluetooth,
reply_handler = self._on_done,
error_handler = self._on_error,
|
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>mod decode;<|fim▁hole|>mod mirror;<|fim▁end|>
|
mod encode;
|
<|file_name|>HumidTemp.js<|end_file_name|><|fim▁begin|>const sensor = require('node-dht-sensor');
const logger = require('../logging/Logger');
/**
* Reads pin 4 of the raspberry PI to obtain temperature and humidity information.
* @return {Promise} A promise that will resolve with the results. In the
* case where there was an error reading, will return a zero filled object,
* with an additional error field.
* { temperature: Number,
* humidity: Number,
* error: Error|undefined }
*/
exports.getHumidityTemperature = function() {
return new Promise( (resolve, reject) => {
sensor.read(22, 4, (err, temperature, humidity) => {<|fim▁hole|> return resolve({
temperature: 0,
humidity: 0,
error: err});
}
resolve({
temperature: temperature * 1.8 + 32,
humidity: humidity});
});
});
}<|fim▁end|>
|
if(err) {
logger.error("Could not read from the DHT sensor. " + err);
|
<|file_name|>col.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from flask import (Blueprint, redirect, url_for, render_template, flash,
request, abort, send_file, current_app)
from flask_babel import gettext
from sqlalchemy.orm.exc import NoResultFound
import crypto_util
import store
from db import db_session, Submission
from journalist_app.decorators import login_required
from journalist_app.forms import ReplyForm
from journalist_app.utils import (make_star_true, make_star_false, get_source,
delete_collection, col_download_unread,
col_download_all, col_star, col_un_star,
col_delete)
def make_blueprint(config):
view = Blueprint('col', __name__)
@view.route('/add_star/<filesystem_id>', methods=('POST',))
@login_required
def add_star(filesystem_id):
make_star_true(filesystem_id)
db_session.commit()
return redirect(url_for('main.index'))
@view.route("/remove_star/<filesystem_id>", methods=('POST',))
@login_required
def remove_star(filesystem_id):
make_star_false(filesystem_id)
db_session.commit()
return redirect(url_for('main.index'))
@view.route('/<filesystem_id>')
@login_required
def col(filesystem_id):
form = ReplyForm()
source = get_source(filesystem_id)
source.has_key = crypto_util.getkey(filesystem_id)
return render_template("col.html", filesystem_id=filesystem_id,
source=source, form=form)
@view.route('/delete/<filesystem_id>', methods=('POST',))
@login_required
def delete_single(filesystem_id):
"""deleting a single collection from its /col page"""
source = get_source(filesystem_id)
delete_collection(filesystem_id)
flash(gettext("{source_name}'s collection deleted")
.format(source_name=source.journalist_designation),
"notification")
return redirect(url_for('main.index'))
@view.route('/process', methods=('POST',))
@login_required
def process():
actions = {'download-unread': col_download_unread,
'download-all': col_download_all, 'star': col_star,
'un-star': col_un_star, 'delete': col_delete}
if 'cols_selected' not in request.form:
flash(gettext('No collections selected.'), 'error')
return redirect(url_for('main.index'))
# getlist is cgi.FieldStorage.getlist
cols_selected = request.form.getlist('cols_selected')
action = request.form['action']
if action not in actions:
return abort(500)
method = actions[action]
return method(cols_selected)
@view.route('/<filesystem_id>/<fn>')
@login_required
def download_single_submission(filesystem_id, fn):
"""Sends a client the contents of a single submission."""
if '..' in fn or fn.startswith('/'):
abort(404)
try:
Submission.query.filter(
Submission.filename == fn).one().downloaded = True
db_session.commit()
except NoResultFound as e:
current_app.logger.error(
"Could not mark " + fn + " as downloaded: %s" % (e,))
return send_file(store.path(filesystem_id, fn),
mimetype="application/pgp-encrypted")
<|fim▁hole|> return view<|fim▁end|>
| |
<|file_name|>listProcessSuccessfulUsersTop_jsp.java<|end_file_name|><|fim▁begin|>package com.sapienter.jbilling.client.jspc.user;
import javax.servlet.*;
import javax.servlet.http.*;
import javax.servlet.jsp.*;
import com.sapienter.jbilling.client.util.Constants;
public final class listProcessSuccessfulUsersTop_jsp extends org.apache.jasper.runtime.HttpJspBase
implements org.apache.jasper.runtime.JspSourceDependent {
private static final JspFactory _jspxFactory = JspFactory.getDefaultFactory();
private static java.util.List _jspx_dependants;
private org.apache.jasper.runtime.TagHandlerPool _005fjspx_005ftagPool_005fhtml_005fmessages_0026_005fmessage_005fid;
private org.apache.jasper.runtime.TagHandlerPool _005fjspx_005ftagPool_005fbean_005fwrite_0026_005fname_005fnobody;
private org.apache.jasper.runtime.TagHandlerPool _005fjspx_005ftagPool_005fbean_005fdefine_0026_005fvalue_005ftoScope_005fid_005fnobody;
private org.apache.jasper.runtime.TagHandlerPool _005fjspx_005ftagPool_005fjbilling_005fgenericList_0026_005ftype_005fsetup_005fnobody;
private javax.el.ExpressionFactory _el_expressionfactory;
private org.apache.AnnotationProcessor _jsp_annotationprocessor;
public Object getDependants() {
return _jspx_dependants;
}
public void _jspInit() {
_005fjspx_005ftagPool_005fhtml_005fmessages_0026_005fmessage_005fid = org.apache.jasper.runtime.TagHandlerPool.getTagHandlerPool(getServletConfig());
_005fjspx_005ftagPool_005fbean_005fwrite_0026_005fname_005fnobody = org.apache.jasper.runtime.TagHandlerPool.getTagHandlerPool(getServletConfig());
_005fjspx_005ftagPool_005fbean_005fdefine_0026_005fvalue_005ftoScope_005fid_005fnobody = org.apache.jasper.runtime.TagHandlerPool.getTagHandlerPool(getServletConfig());
_005fjspx_005ftagPool_005fjbilling_005fgenericList_0026_005ftype_005fsetup_005fnobody = org.apache.jasper.runtime.TagHandlerPool.getTagHandlerPool(getServletConfig());
_el_expressionfactory = _jspxFactory.getJspApplicationContext(getServletConfig().getServletContext()).getExpressionFactory();
_jsp_annotationprocessor = (org.apache.AnnotationProcessor) getServletConfig().getServletContext().getAttribute(org.apache.AnnotationProcessor.class.getName());
}
public void _jspDestroy() {
_005fjspx_005ftagPool_005fhtml_005fmessages_0026_005fmessage_005fid.release();
_005fjspx_005ftagPool_005fbean_005fwrite_0026_005fname_005fnobody.release();
_005fjspx_005ftagPool_005fbean_005fdefine_0026_005fvalue_005ftoScope_005fid_005fnobody.release();
_005fjspx_005ftagPool_005fjbilling_005fgenericList_0026_005ftype_005fsetup_005fnobody.release();
}
public void _jspService(HttpServletRequest request, HttpServletResponse response)
throws java.io.IOException, ServletException {
PageContext pageContext = null;
HttpSession session = null;
ServletContext application = null;
ServletConfig config = null;
JspWriter out = null;
Object page = this;
JspWriter _jspx_out = null;
PageContext _jspx_page_context = null;
try {
response.setContentType("text/html");
pageContext = _jspxFactory.getPageContext(this, request, response,
null, true, 8192, true);
_jspx_page_context = pageContext;
application = pageContext.getServletContext();
config = pageContext.getServletConfig();
session = pageContext.getSession();
out = pageContext.getOut();
_jspx_out = out;
out.write("\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n");
out.write("\r\n\r\n");
// html:messages
org.apache.struts.taglib.html.MessagesTag _jspx_th_html_005fmessages_005f0 = (org.apache.struts.taglib.html.MessagesTag) _005fjspx_005ftagPool_005fhtml_005fmessages_0026_005fmessage_005fid.get(org.apache.struts.taglib.html.MessagesTag.class);
_jspx_th_html_005fmessages_005f0.setPageContext(_jspx_page_context);
_jspx_th_html_005fmessages_005f0.setParent(null);
// /user/listProcessSuccessfulUsersTop.jsp(30,0) name = message type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_html_005fmessages_005f0.setMessage("true");
// /user/listProcessSuccessfulUsersTop.jsp(30,0) name = id type = java.lang.String reqTime = false required = true fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_html_005fmessages_005f0.setId("myMessage");
<|fim▁hole|> if (_jspx_eval_html_005fmessages_005f0 != javax.servlet.jsp.tagext.Tag.SKIP_BODY) {
java.lang.String myMessage = null;
if (_jspx_eval_html_005fmessages_005f0 != javax.servlet.jsp.tagext.Tag.EVAL_BODY_INCLUDE) {
out = _jspx_page_context.pushBody();
_jspx_th_html_005fmessages_005f0.setBodyContent((javax.servlet.jsp.tagext.BodyContent) out);
_jspx_th_html_005fmessages_005f0.doInitBody();
}
myMessage = (java.lang.String) _jspx_page_context.findAttribute("myMessage");
do {
out.write("\r\n\t<p>");
if (_jspx_meth_bean_005fwrite_005f0(_jspx_th_html_005fmessages_005f0, _jspx_page_context))
return;
out.write("</p>\r\n");
int evalDoAfterBody = _jspx_th_html_005fmessages_005f0.doAfterBody();
myMessage = (java.lang.String) _jspx_page_context.findAttribute("myMessage");
if (evalDoAfterBody != javax.servlet.jsp.tagext.BodyTag.EVAL_BODY_AGAIN)
break;
} while (true);
if (_jspx_eval_html_005fmessages_005f0 != javax.servlet.jsp.tagext.Tag.EVAL_BODY_INCLUDE) {
out = _jspx_page_context.popBody();
}
}
if (_jspx_th_html_005fmessages_005f0.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_005fjspx_005ftagPool_005fhtml_005fmessages_0026_005fmessage_005fid.reuse(_jspx_th_html_005fmessages_005f0);
return;
}
_005fjspx_005ftagPool_005fhtml_005fmessages_0026_005fmessage_005fid.reuse(_jspx_th_html_005fmessages_005f0);
out.write("\r\n\r\n");
out.write('\r');
out.write('\n');
// bean:define
org.apache.struts.taglib.bean.DefineTag _jspx_th_bean_005fdefine_005f0 = (org.apache.struts.taglib.bean.DefineTag) _005fjspx_005ftagPool_005fbean_005fdefine_0026_005fvalue_005ftoScope_005fid_005fnobody.get(org.apache.struts.taglib.bean.DefineTag.class);
_jspx_th_bean_005fdefine_005f0.setPageContext(_jspx_page_context);
_jspx_th_bean_005fdefine_005f0.setParent(null);
// /user/listProcessSuccessfulUsersTop.jsp(36,0) name = id type = java.lang.String reqTime = false required = true fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_bean_005fdefine_005f0.setId("forward_from");
// /user/listProcessSuccessfulUsersTop.jsp(36,0) name = value type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_bean_005fdefine_005f0.setValue(Constants.FORWARD_USER_VIEW);
// /user/listProcessSuccessfulUsersTop.jsp(36,0) name = toScope type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_bean_005fdefine_005f0.setToScope("session");
int _jspx_eval_bean_005fdefine_005f0 = _jspx_th_bean_005fdefine_005f0.doStartTag();
if (_jspx_th_bean_005fdefine_005f0.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_005fjspx_005ftagPool_005fbean_005fdefine_0026_005fvalue_005ftoScope_005fid_005fnobody.reuse(_jspx_th_bean_005fdefine_005f0);
return;
}
_005fjspx_005ftagPool_005fbean_005fdefine_0026_005fvalue_005ftoScope_005fid_005fnobody.reuse(_jspx_th_bean_005fdefine_005f0);
java.lang.String forward_from = null;
forward_from = (java.lang.String) _jspx_page_context.findAttribute("forward_from");
out.write("\r\n\r\n");
// bean:define
org.apache.struts.taglib.bean.DefineTag _jspx_th_bean_005fdefine_005f1 = (org.apache.struts.taglib.bean.DefineTag) _005fjspx_005ftagPool_005fbean_005fdefine_0026_005fvalue_005ftoScope_005fid_005fnobody.get(org.apache.struts.taglib.bean.DefineTag.class);
_jspx_th_bean_005fdefine_005f1.setPageContext(_jspx_page_context);
_jspx_th_bean_005fdefine_005f1.setParent(null);
// /user/listProcessSuccessfulUsersTop.jsp(40,0) name = id type = java.lang.String reqTime = false required = true fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_bean_005fdefine_005f1.setId("forward_to");
// /user/listProcessSuccessfulUsersTop.jsp(40,0) name = value type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_bean_005fdefine_005f1.setValue(Constants.FORWARD_USER_VIEW);
// /user/listProcessSuccessfulUsersTop.jsp(40,0) name = toScope type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_bean_005fdefine_005f1.setToScope("session");
int _jspx_eval_bean_005fdefine_005f1 = _jspx_th_bean_005fdefine_005f1.doStartTag();
if (_jspx_th_bean_005fdefine_005f1.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_005fjspx_005ftagPool_005fbean_005fdefine_0026_005fvalue_005ftoScope_005fid_005fnobody.reuse(_jspx_th_bean_005fdefine_005f1);
return;
}
_005fjspx_005ftagPool_005fbean_005fdefine_0026_005fvalue_005ftoScope_005fid_005fnobody.reuse(_jspx_th_bean_005fdefine_005f1);
java.lang.String forward_to = null;
forward_to = (java.lang.String) _jspx_page_context.findAttribute("forward_to");
out.write("\r\n\r\n");
// jbilling:genericList
com.sapienter.jbilling.client.list.GenericListTag _jspx_th_jbilling_005fgenericList_005f0 = (com.sapienter.jbilling.client.list.GenericListTag) _005fjspx_005ftagPool_005fjbilling_005fgenericList_0026_005ftype_005fsetup_005fnobody.get(com.sapienter.jbilling.client.list.GenericListTag.class);
_jspx_th_jbilling_005fgenericList_005f0.setPageContext(_jspx_page_context);
_jspx_th_jbilling_005fgenericList_005f0.setParent(null);
// /user/listProcessSuccessfulUsersTop.jsp(44,0) name = setup type = java.lang.Boolean reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_jbilling_005fgenericList_005f0.setSetup(new Boolean(true));
// /user/listProcessSuccessfulUsersTop.jsp(44,0) name = type type = java.lang.String reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_jbilling_005fgenericList_005f0.setType(Constants.LIST_TYPE_PROCESS_RUN_SUCCESSFULL_USERS);
int _jspx_eval_jbilling_005fgenericList_005f0 = _jspx_th_jbilling_005fgenericList_005f0.doStartTag();
if (_jspx_th_jbilling_005fgenericList_005f0.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_005fjspx_005ftagPool_005fjbilling_005fgenericList_0026_005ftype_005fsetup_005fnobody.reuse(_jspx_th_jbilling_005fgenericList_005f0);
return;
}
_005fjspx_005ftagPool_005fjbilling_005fgenericList_0026_005ftype_005fsetup_005fnobody.reuse(_jspx_th_jbilling_005fgenericList_005f0);
out.write(" \r\n \r\n");
} catch (Throwable t) {
if (!(t instanceof SkipPageException)){
out = _jspx_out;
if (out != null && out.getBufferSize() != 0)
try { out.clearBuffer(); } catch (java.io.IOException e) {}
if (_jspx_page_context != null) _jspx_page_context.handlePageException(t);
}
} finally {
_jspxFactory.releasePageContext(_jspx_page_context);
}
}
private boolean _jspx_meth_bean_005fwrite_005f0(javax.servlet.jsp.tagext.JspTag _jspx_th_html_005fmessages_005f0, PageContext _jspx_page_context)
throws Throwable {
PageContext pageContext = _jspx_page_context;
JspWriter out = _jspx_page_context.getOut();
// bean:write
org.apache.struts.taglib.bean.WriteTag _jspx_th_bean_005fwrite_005f0 = (org.apache.struts.taglib.bean.WriteTag) _005fjspx_005ftagPool_005fbean_005fwrite_0026_005fname_005fnobody.get(org.apache.struts.taglib.bean.WriteTag.class);
_jspx_th_bean_005fwrite_005f0.setPageContext(_jspx_page_context);
_jspx_th_bean_005fwrite_005f0.setParent((javax.servlet.jsp.tagext.Tag) _jspx_th_html_005fmessages_005f0);
// /user/listProcessSuccessfulUsersTop.jsp(31,4) name = name type = null reqTime = true required = true fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_bean_005fwrite_005f0.setName("myMessage");
int _jspx_eval_bean_005fwrite_005f0 = _jspx_th_bean_005fwrite_005f0.doStartTag();
if (_jspx_th_bean_005fwrite_005f0.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_005fjspx_005ftagPool_005fbean_005fwrite_0026_005fname_005fnobody.reuse(_jspx_th_bean_005fwrite_005f0);
return true;
}
_005fjspx_005ftagPool_005fbean_005fwrite_0026_005fname_005fnobody.reuse(_jspx_th_bean_005fwrite_005f0);
return false;
}
}<|fim▁end|>
|
int _jspx_eval_html_005fmessages_005f0 = _jspx_th_html_005fmessages_005f0.doStartTag();
|
<|file_name|>yaxis.js<|end_file_name|><|fim▁begin|>const alter = require('../lib/alter.js');
const Chainable = require('../lib/classes/chainable');
module.exports = new Chainable('yaxis', {
args: [
{
name: 'inputSeries',
types: ['seriesList']
},
{
name: 'yaxis',
types: ['number', 'null'],
help: 'The numbered y-axis to plot this series on, eg .yaxis(2) for a 2nd y-axis.'
},
{
name: 'min',
types: ['number', 'null'],
help: 'Min value'
},
{
name: 'max',
types: ['number', 'null'],
help: 'Max value'
},
{
name: 'position',
types: ['string', 'null'],
help: 'left or right'
},
{
name: 'label',
types: ['string', 'null'],
help: 'Label for axis'
},
{
name: 'color',
types: ['string', 'null'],
help: 'Color of axis label'
},
],
help: 'Configures a variety of y-axis options, the most important likely being the ability to add an Nth (eg 2nd) y-axis',
fn: function yaxisFn(args) {
return alter(args, function (eachSeries, yaxis, min, max, position, label, color) {
yaxis = yaxis || 1;
eachSeries.yaxis = yaxis;<|fim▁hole|>
eachSeries._global.yaxes = eachSeries._global.yaxes || [];
eachSeries._global.yaxes[yaxis - 1] = eachSeries._global.yaxes[yaxis - 1] || {};
const myAxis = eachSeries._global.yaxes[yaxis - 1];
myAxis.position = position || (yaxis % 2 ? 'left' : 'right');
myAxis.min = min;
myAxis.max = max;
myAxis.axisLabelFontSizePixels = 11;
myAxis.axisLabel = label;
myAxis.axisLabelColour = color;
myAxis.axisLabelUseCanvas = true;
return eachSeries;
});
}
});<|fim▁end|>
|
eachSeries._global = eachSeries._global || {};
|
<|file_name|>express.js<|end_file_name|><|fim▁begin|>'use strict';
/**
* Module dependencies.
*/
var express = require('express'),
consolidate = require('consolidate'),
mongoStore = require('connect-mongo')(express),
flash = require('connect-flash'),
helpers = require('view-helpers'),
assetmanager = require('assetmanager'),
config = require('../config/config');
var console = require('../utils/easy-logger')('express.js');
module.exports = function(app, passport, db) {
app.set('showStackError', true);
// Prettify HTML
app.locals.pretty = true;
// cache=memory or swig dies in NODE_ENV=production
app.locals.cache = 'memory';
// Should be placed before express.static
// To ensure that all assets and data are compressed (utilize bandwidth)
app.use(express.compress({
filter: function(req, res) {
return (/json|text|javascript|css/).test(res.getHeader('Content-Type'));
},
// Levels are specified in a range of 0 to 9, where-as 0 is
// no compression and 9 is best compression, but slowest
level: 9
}));
// Only use logger for development environment
if (process.env.NODE_ENV === 'development') {
app.use(express.logger('dev'));
}
// assign the template engine to .html files
app.engine('html', consolidate[config.templateEngine]);
// set .html as the default extension
app.set('view engine', 'html');
// Set views path, template engine and default layout
app.set('views', config.root + '/odin-front/app/views');
// Enable jsonp
app.enable('jsonp callback');
app.configure(function() {
// The cookieParser should be above session
app.use(express.cookieParser());
// Request body parsing middleware should be above methodOverride
app.use(express.urlencoded());
app.use(express.json());
app.use(express.methodOverride());
// Import your asset file
var assets = require('./../config/assets.json');
assetmanager.init({
js: assets.js,
css: assets.css,
debug: (process.env.NODE_ENV !== 'production'),
webroot: 'odin-front/public'
});
// Add assets to local variables
app.use(function (req, res, next) {
res.locals({
assets: assetmanager.assets
});
next();
});
// Express/Mongo session storage
app.use(express.session({
secret: config.sessionSecret,
store: new mongoStore({
db: db.connection.db,
collection: config.sessionCollection
})
}));
// Dynamic helpers
app.use(helpers(config.app.name));
// Use passport session
app.use(passport.initialize());
app.use(passport.session());
// Connect flash for flash messages
app.use(flash());
// Routes should be at the last
app.use(app.router);
// Setting the fav icon and static folder
app.use(express.favicon());
app.use(express.static(config.root + '/odin-front/public'));
// Assume "not found" in the error msgs is a 404. this is somewhat
// silly, but valid, you can do whatever you like, set properties,
// use instanceof etc.
app.use(function(err, req, res, next) {
// Treat as 404
if (~err.message.indexOf('not found')) return next();
<|fim▁hole|> // Error page
res.status(500).render('500', {
error: err.stack
});
});
// Assume 404 since no middleware responded
app.use(function(req, res) {
res.status(404).render('404', {
url: req.originalUrl,
error: 'Not found'
});
});
});
};<|fim▁end|>
|
// Log it
console.error(err.stack);
|
<|file_name|>slackbuild.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# -*- coding: utf-8 -*-
# slackbuild.py file is part of slpkg.
# Copyright 2014-2015 Dimitris Zlatanidis <[email protected]>
# All rights reserved.
# Slpkg is a user-friendly package manager for Slackware installations
# https://github.com/dslackw/slpkg
# Slpkg is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
from slpkg.utils import Utils
from slpkg.messages import Msg
from slpkg.toolbar import status
from slpkg.log_deps import write_deps
from slpkg.blacklist import BlackList
from slpkg.downloader import Download
from slpkg.__metadata__ import MetaData as _meta_
from slpkg.pkg.find import find_package
from slpkg.pkg.build import BuildPackage
from slpkg.pkg.manager import PackageManager
from slpkg.pkg.installed import GetFromInstalled
from slpkg.sbo.greps import SBoGrep
from slpkg.sbo.remove import delete
from slpkg.sbo.sbo_arch import SBoArch
from slpkg.sbo.compressed import SBoLink
from slpkg.sbo.dependency import Requires
from slpkg.sbo.search import sbo_search_pkg
from slpkg.sbo.slack_find import slack_package
class SBoInstall(object):
"""Build and install SBo packages with all dependencies
"""
def __init__(self, slackbuilds, flag):
self.slackbuilds = slackbuilds
self.flag = flag
self.meta = _meta_
self.msg = Msg()
self.arch = SBoArch().get()
self.build_folder = self.meta.build_path
for fl in self.flag:
if fl.startswith("--directory-prefix="):
self.build_folder = fl.split("=")[1]
if not self.build_folder.endswith("/"):
self.build_folder += "/"
self.unst = ["UNSUPPORTED", "UNTESTED"]
self.master_packages = []
self.deps = []
self.dependencies = []
self.package_not_found = []
self.package_found = []
self.deps_dict = {}
self.answer = ""
self.match = False
self.count_ins = 0
self.count_upg = 0
self.count_uni = 0
self.msg.reading()
self.data = SBoGrep(name="").names()
self.blacklist = BlackList().packages(pkgs=self.data, repo="sbo")
def start(self, if_upgrade):
"""Start view, build and install SBo packages
"""
tagc = ""
self.if_upgrade = if_upgrade
self.case_insensitive()
for _sbo in self.slackbuilds:
status(0.03)
if _sbo in self.data and _sbo not in self.blacklist:
sbo_deps = Requires(self.flag).sbo(_sbo)
self.deps += sbo_deps
self.deps_dict[_sbo] = self.one_for_all(sbo_deps)
self.package_found.append(_sbo)
else:
self.package_not_found.append(_sbo)
self.update_deps()
if not self.package_found:
self.match = True
self.matching()
self.master_packages, mas_src = self.sbo_version_source(
self.package_found)
self.msg.done()
if (self.meta.rsl_deps in ["on", "ON"] and
self.flag != "--resolve-off" and not self.match):
self.msg.resolving()
self.dependencies, dep_src = self.sbo_version_source(
self.one_for_all(self.deps))
if (self.meta.rsl_deps in ["on", "ON"] and
self.flag != "--resolve-off" and not self.match):
self.msg.done()
self.clear_masters()
if self.package_found:
print("\nThe following packages will be automatically "
"installed or upgraded \nwith new version:\n")
self.top_view()
self.msg.upg_inst(self.if_upgrade)
# view master packages
for sbo, arch in zip(self.master_packages, mas_src):
tagc = self.tag(sbo)
name = "-".join(sbo.split("-")[:-1])
self.view_packages(tagc, name, sbo.split("-")[-1],
self.select_arch(arch))
self.view_installing_for_deps()
# view dependencies
for dep, arch in zip(self.dependencies, dep_src):
tagc = self.tag(dep)
name = "-".join(dep.split("-")[:-1])
self.view_packages(tagc, name, dep.split("-")[-1],
self.select_arch(arch))
count_total = sum([self.count_ins, self.count_upg,
self.count_uni])
print("\nInstalling summary")
print("=" * 79)
print("{0}Total {1} {2}.".format(
self.meta.color["GREY"], count_total,
self.msg.pkg(count_total)))
print("{0} {1} will be installed, {2} already installed and "
"{3} {4}".format(self.count_uni,
self.msg.pkg(self.count_uni),
self.count_ins, self.count_upg,
self.msg.pkg(self.count_upg)))
print("will be upgraded.{0}\n".format(self.meta.color["ENDC"]))
self.continue_to_install()
else:
self.msg.not_found(self.if_upgrade)<|fim▁hole|>
def case_insensitive(self):
"""Matching packages distinguish between uppercase and
lowercase
"""
if "--case-ins" in self.flag:
data_dict = Utils().case_sensitive(self.data)
for name in self.slackbuilds:
index = self.slackbuilds.index(name)
for key, value in data_dict.iteritems():
if key == name.lower():
self.slackbuilds[index] = value
def update_deps(self):
"""Update dependencies dictionary with all package
"""
onelist, dependencies = [], []
onelist = Utils().dimensional_list(self.deps)
dependencies = Utils().remove_dbs(onelist)
for dep in dependencies:
deps = Requires(self.flag).sbo(dep)
self.deps_dict[dep] = self.one_for_all(deps)
def continue_to_install(self):
"""Continue to install ?
"""
if (self.count_uni > 0 or self.count_upg > 0 or
"--download-only" in self.flag):
if self.master_packages and self.msg.answer() in ["y", "Y"]:
installs, upgraded = self.build_install()
if "--download-only" in self.flag:
raise SystemExit()
self.msg.reference(installs, upgraded)
write_deps(self.deps_dict)
delete(self.build_folder)
def view_installing_for_deps(self):
"""View installing message for dependencies
"""
if not self.match and self.dependencies:
print("Installing for dependencies:")
def clear_masters(self):
"""Clear master slackbuilds if already exist in dependencies
or if added to install two or more times
"""
self.master_packages = Utils().remove_dbs(self.master_packages)
for mas in self.master_packages:
if mas in self.dependencies:
self.master_packages.remove(mas)
def matching(self):
"""Return found matching SBo packages
"""
for sbo in self.package_not_found:
for pkg in self.data:
if sbo in pkg and pkg not in self.blacklist:
self.package_found.append(pkg)
def sbo_version_source(self, slackbuilds):
"""Create sbo name with version
"""
sbo_versions, sources = [], []
for sbo in slackbuilds:
status(0.02)
sbo_ver = "{0}-{1}".format(sbo, SBoGrep(sbo).version())
sbo_versions.append(sbo_ver)
sources.append(SBoGrep(sbo).source())
return [sbo_versions, sources]
def one_for_all(self, deps):
"""Because there are dependencies that depend on other
dependencies are created lists into other lists.
Thus creating this loop create one-dimensional list and
remove double packages from dependencies.
"""
requires, dependencies = [], []
deps.reverse()
# Inverting the list brings the
# dependencies in order to be installed.
requires = Utils().dimensional_list(deps)
dependencies = Utils().remove_dbs(requires)
return dependencies
def top_view(self):
"""View top template
"""
self.msg.template(78)
print("{0}{1}{2}{3}{4}{5}{6}{7}{8}{9}{10}".format(
"| Package", " " * 17,
"New version", " " * 8,
"Arch", " " * 4,
"Build", " " * 2,
"Repos", " " * 10,
"Size"))
self.msg.template(78)
def view_packages(self, *args):
""":View slackbuild packages with version and arch
args[0] package color
args[1] package
args[2] version
args[3] arch
"""
ver = GetFromInstalled(args[1]).version()
print(" {0}{1}{2}{3} {4}{5} {6}{7}{8}{9}{10}{11:>11}{12}".format(
args[0], args[1] + ver, self.meta.color["ENDC"],
" " * (23-len(args[1] + ver)), args[2],
" " * (18-len(args[2])), args[3],
" " * (15-len(args[3])), "",
"", "SBo", "", "")).rstrip()
def tag(self, sbo):
"""Tag with color green if package already installed,
color yellow for packages to upgrade and color red
if not installed.
"""
# split sbo name with version and get name
sbo_name = "-".join(sbo.split("-")[:-1])
find = GetFromInstalled(sbo_name).name()
if find_package(sbo, self.meta.pkg_path):
paint = self.meta.color["GREEN"]
self.count_ins += 1
elif sbo_name == find:
paint = self.meta.color["YELLOW"]
self.count_upg += 1
else:
paint = self.meta.color["RED"]
self.count_uni += 1
return paint
def select_arch(self, src):
"""Looks if sources unsupported or untested
from arch else select arch.
"""
arch = self.arch
for item in self.unst:
if item in src:
arch = item
return arch
def filenames(self, sources):
"""Return filenames from sources links
"""
filename = []
for src in sources:
filename.append(src.split("/")[-1])
return filename
def build_install(self):
"""Build and install packages if not already installed
"""
slackbuilds = self.dependencies + self.master_packages
installs, upgraded, = [], []
if not os.path.exists(self.build_folder):
os.makedirs(self.build_folder)
os.chdir(self.build_folder)
for prgnam in slackbuilds:
pkg = "-".join(prgnam.split("-")[:-1])
installed = "".join(find_package(prgnam, self.meta.pkg_path))
src_link = SBoGrep(pkg).source().split()
if installed and "--download-only" not in self.flag:
self.msg.template(78)
self.msg.pkg_found(prgnam)
self.msg.template(78)
elif self.unst[0] in src_link or self.unst[1] in src_link:
self.msg.template(78)
print("| Package {0} {1}{2}{3}".format(
prgnam, self.meta.color["RED"], "".join(src_link),
self.meta.color["ENDC"]))
self.msg.template(78)
else:
sbo_url = sbo_search_pkg(pkg)
sbo_link = SBoLink(sbo_url).tar_gz()
script = sbo_link.split("/")[-1]
dwn_srcs = sbo_link.split() + src_link
Download(self.build_folder, dwn_srcs, repo="sbo").start()
if "--download-only" in self.flag:
continue
sources = self.filenames(src_link)
BuildPackage(script, sources, self.build_folder,
auto=False).build()
binary = slack_package(prgnam)
if GetFromInstalled(pkg).name() == pkg:
print("[ {0}Upgrading{1} ] --> {2}".format(
self.meta.color["YELLOW"],
self.meta.color["ENDC"], prgnam))
upgraded.append(prgnam)
else:
print("[ {0}Installing{1} ] --> {2}".format(
self.meta.color["GREEN"],
self.meta.color["ENDC"], prgnam))
installs.append(prgnam)
PackageManager(binary).upgrade(flag="--install-new")
return installs, upgraded<|fim▁end|>
| |
<|file_name|>InPlaceEdit.js<|end_file_name|><|fim▁begin|>YUI.add("inputex-inplaceedit", function(Y){
var lang = Y.Lang;//, Event = YAHOO.util.Event, Dom = YAHOO.util.Dom, CSS_PREFIX = 'inputEx-InPlaceEdit-';
/**
* Meta field providing in place editing (the editor appears when you click on the formatted value).
* @class inputEx.InPlaceEdit
* @extends inputEx.Field
* @constructor
* @param {Object} options Added options:
* <ul>
* <li>visu</li><|fim▁hole|> * <li>editorField</li>
* <li>animColors</li>
* </ul>
*/
inputEx.InPlaceEdit = function(options) {
inputEx.InPlaceEdit.superclass.constructor.call(this, options);
this.publish('openEditor');
this.publish('closeEditor');
};
lang.extend(inputEx.InPlaceEdit, inputEx.Field, {
/**
* Set the default values of the options
* @param {Object} options Options object as passed to the constructor
*/
setOptions: function(options) {
inputEx.InPlaceEdit.superclass.setOptions.call(this, options);
this.options.visu = options.visu;
this.options.editorField = options.editorField;
//this.options.buttonTypes = options.buttonTypes || {ok:"submit",cancel:"link"};
this.options.buttonConfigs = options.buttonConfigs || [{
type: "submit",
value: inputEx.messages.okEditor,
className: "inputEx-Button "+CSS_PREFIX+'OkButton',
onClick: {fn: this.onOkEditor, scope:this}
},{
type: "link",
value: inputEx.messages.cancelEditor,
className: "inputEx-Button "+CSS_PREFIX+'CancelLink',
onClick: {fn: this.onCancelEditor, scope:this}
}];
this.options.animColors = options.animColors || null;
},
/**
* Override renderComponent to create 2 divs: the visualization one, and the edit in place form
*/
renderComponent: function() {
this.renderVisuDiv();
this.renderEditor();
},
/**
* Render the editor
*/
renderEditor: function() {
this.editorContainer = inputEx.cn('div', {className: CSS_PREFIX+'editor'}, {display: 'none'});
// Render the editor field
this.editorField = inputEx(this.options.editorField,this);
var editorFieldEl = this.editorField.getEl();
this.editorContainer.appendChild( editorFieldEl );
Y.one(editorFieldEl).addClass(CSS_PREFIX+'editorDiv');
this.buttons = [];
for (var i = 0; i < this.options.buttonConfigs.length ; i++){
var config = this.options.buttonConfigs[i];
config.parentEl = this.editorContainer;
this.buttons.push(new inputEx.widget.Button(config));
}
// Line breaker ()
this.editorContainer.appendChild( inputEx.cn('div',null, {clear: 'both'}) );
this.fieldContainer.appendChild(this.editorContainer);
},
/**
* Set the color when hovering the field
* @param {Event} e The original mouseover event
*/
onVisuMouseOver: function(e) {
// to totally disable the visual effect on mouse enter, you should change css options inputEx-InPlaceEdit-visu:hover
if(this.disabled) return;
if(this.colorAnim) {
this.colorAnim.stop(true);
}
inputEx.sn(this.formattedContainer, null, {backgroundColor: this.options.animColors.from });
},
/**
* Start the color animation when hovering the field
* @param {Event} e The original mouseout event
*/
onVisuMouseOut: function(e) {
var optionsAnim;
if(this.disabled) return;
// Start animation
if(this.colorAnim) {
this.colorAnim.stop(true);
}
if(!this.options.animColors) return;
optionsAnim = {
node: Y.one(this.formattedContainer),
}
if(this.options.animColors.from){
optionsAnim.from = {
backgroundColor : this.options.animColors.from
}
}
if(this.options.animColors.from){
optionsAnim.to = {
backgroundColor : this.options.animColors.to
}
}
this.colorAnim = new Y.Anim(optionsAnim);
this.colorAnim.on("end",function() {
Y.one(this.formattedContainer).setStyle('background-color', '');
});
this.colorAnim.run();
},
/**
* Create the div that will contain the visualization of the value
*/
renderVisuDiv: function() {
this.formattedContainer = inputEx.cn('div', {className: 'inputEx-InPlaceEdit-visu'});
if( lang.isFunction(this.options.formatDom) ) {
this.formattedContainer.appendChild( this.options.formatDom(this.options.value) );
}
else if( lang.isFunction(this.options.formatValue) ) {
this.formattedContainer.innerHTML = this.options.formatValue(this.options.value);
}
else {
this.formattedContainer.innerHTML = lang.isUndefined(this.options.value) ? inputEx.messages.emptyInPlaceEdit: this.options.value;
}
this.fieldContainer.appendChild(this.formattedContainer);
},
/**
* Adds the events for the editor and color animations
*/
initEvents: function() {
Y.one(this.formattedContainer).on("click", this.openEditor, this, true);
// For color animation (if specified)
if (this.options.animColors) {
Y.one(this.formattedContainer).on('mouseover', this.onVisuMouseOver, this);
Y.one(this.formattedContainer).on('mouseout', this.onVisuMouseOut, this);
}
if(this.editorField.el) {
var that = this;
// Register some listeners
Y.on("key", function(){ that.onKeyUp },"#"+Y.one(this.editorField.el).get("id"),"up:");
Y.on("key", function(){ that.onKeyDown },"#"+Y.one(this.editorField.el).get("id"),"down:" );
}
},
/**
* Handle some keys events to close the editor
* @param {Event} e The original keyup event
*/
onKeyUp: function(e) {
// Enter
if( e.keyCode == 13) {
this.onOkEditor(e);
}
// Escape
if( e.keyCode == 27) {
this.onCancelEditor(e);
}
},
/**
* Handle the tabulation key to close the editor
* @param {Event} e The original keydown event
*/
onKeyDown: function(e) {
// Tab
if(e.keyCode == 9) {
this.onOkEditor(e);
}
},
/**
* Validate the editor (ok button, enter key or tabulation key)
*/
onOkEditor: function(e) {
e.halt();
var newValue = this.editorField.getValue();
this.setValue(newValue);
this.closeEditor();
var that = this;
setTimeout(function() {that.fire("updated",newValue);}, 50);
},
/**
* Close the editor on cancel (cancel button, blur event or escape key)
* @param {Event} e The original event (click, blur or keydown)
*/
onCancelEditor: function(e) {
e.halt();
this.closeEditor();
},
/**
* Close the editor on cancel (cancel button, blur event or escape key)
* @param {Event} e The original event (click, blur or keydown)
*/
closeEditor: function() {
this.editorContainer.style.display = 'none';
this.formattedContainer.style.display = '';
this.fire("closeEditor")
},
/**
* Override enable to Enable openEditor
*/
enable: function(){
this.disabled = false;
inputEx.sn(this.formattedContainer, {className: 'inputEx-InPlaceEdit-visu'});
},
/**
* Override disable to Disable openEditor
*/
disable: function(){
this.disabled = true;
inputEx.sn(this.formattedContainer, {className: 'inputEx-InPlaceEdit-visu-disable'});
},
/**
* Display the editor
*/
openEditor: function() {
if(this.disabled) return;
var value = this.getValue();
this.editorContainer.style.display = '';
this.formattedContainer.style.display = 'none';
if(!lang.isUndefined(value)) {
this.editorField.setValue(value);
}
// Set focus in the element !
this.editorField.focus();
// Select the content
if(this.editorField.el && lang.isFunction(this.editorField.el.setSelectionRange) && (!!value && !!value.length)) {
this.editorField.el.setSelectionRange(0,value.length);
}
this.fire("openEditor");
},
/**
* Returned the previously stored value
* @return {Any} The value of the subfield
*/
getValue: function() {
var editorOpened = (this.editorContainer.style.display == '');
return editorOpened ? this.editorField.getValue() : this.value;
},
/**
* Set the value and update the display
* @param {Any} value The value to set
* @param {boolean} [sendUpdatedEvt] (optional) Wether this setValue should fire the updatedEvt or not (default is true, pass false to NOT send the event)
*/
setValue: function(value, sendUpdatedEvt) {
// Store the value
this.value = value;
if(lang.isUndefined(value) || value == "") {
inputEx.renderVisu(this.options.visu, inputEx.messages.emptyInPlaceEdit, this.formattedContainer);
}
else {
inputEx.renderVisu(this.options.visu, this.value, this.formattedContainer);
}
// If the editor is opened, update it
if(this.editorContainer.style.display == '') {
this.editorField.setValue(value);
}
inputEx.InPlaceEdit.superclass.setValue.call(this, value, sendUpdatedEvt);
},
/**
* Close the editor when calling the close function on this field
*/
close: function() {
this.editorContainer.style.display = 'none';
this.formattedContainer.style.display = '';
this.fire("openEditor");
}
});
inputEx.messages.emptyInPlaceEdit = "(click to edit)";
inputEx.messages.cancelEditor = "cancel";
inputEx.messages.okEditor = "Ok";
// Register this class as "inplaceedit" type
inputEx.registerType("inplaceedit", inputEx.InPlaceEdit, [
{ type:'type', label: 'Editor', name: 'editorField'}
]);
}, '0.1.1', {
requires:["anim","inputex-field","inputex-button"]
})<|fim▁end|>
| |
<|file_name|>15.2.3.6-3-29.js<|end_file_name|><|fim▁begin|>// Copyright (c) 2012 Ecma International. All rights reserved.
// Ecma International makes this code available under the terms and conditions set
// forth on http://hg.ecmascript.org/tests/test262/raw-file/tip/LICENSE (the
// "Use Terms"). Any redistribution of this code must retain the above
// copyright and this notice and otherwise comply with the Use Terms.
/*---
es5id: 15.2.3.6-3-29
description: >
Object.defineProperty - 'enumerable' property in 'Attributes' is
own accessor property that overrides an inherited accessor
property (8.10.5 step 3.a)
includes: [runTestCase.js]
---*/
function testcase() {
var obj = {};
var accessed = false;
var proto = {};
Object.defineProperty(proto, "enumerable", {
get: function () {
return false;<|fim▁hole|> }
});
var ConstructFun = function () { };
ConstructFun.prototype = proto;
var child = new ConstructFun();
Object.defineProperty(child, "enumerable", {
get: function () {
return true;
}
});
Object.defineProperty(obj, "property", child);
for (var prop in obj) {
if (prop === "property") {
accessed = true;
}
}
return accessed;
}
runTestCase(testcase);<|fim▁end|>
| |
<|file_name|>object.js<|end_file_name|><|fim▁begin|>/**
* 在球场
* zaiqiuchang.com
*/
const initialState = {
users: {},
posts: {},
courts: {},
files: {},
userStats: {},
postStats: {},
courtStats: {},<|fim▁hole|> fileStats: {}
}
export default (state = initialState, action) => {
if (action.type === 'CACHE_OBJECTS') {
let newState = Object.assign({}, state)
for (let [k, v] of Object.entries(action)) {
if (newState[k] === undefined) {
continue
}
newState[k] = Object.assign({}, newState[k], v)
}
return newState
} else if (action.type === 'RESET' || action.type === 'RESET_OBJECT_CACHE') {
return initialState
} else {
return state
}
}<|fim▁end|>
| |
<|file_name|>validation.go<|end_file_name|><|fim▁begin|>package validation
import (
"fmt"
"net"
"net/url"
"os"
"strings"
"github.com/go-ldap/ldap"
"github.com/spf13/pflag"
kvalidation "k8s.io/kubernetes/pkg/api/validation"
"k8s.io/kubernetes/pkg/util/fielderrors"
"k8s.io/kubernetes/pkg/util/sets"
"github.com/openshift/origin/pkg/auth/ldaputil"
"github.com/openshift/origin/pkg/cmd/server/api"
cmdflags "github.com/openshift/origin/pkg/cmd/util/flags"
)
func ValidateHostPort(value string, field string) fielderrors.ValidationErrorList {
allErrs := fielderrors.ValidationErrorList{}
if len(value) == 0 {
allErrs = append(allErrs, fielderrors.NewFieldRequired(field))
} else if _, _, err := net.SplitHostPort(value); err != nil {
allErrs = append(allErrs, fielderrors.NewFieldInvalid(field, value, "must be a host:port"))
}
return allErrs
}
func ValidateCertInfo(certInfo api.CertInfo, required bool) fielderrors.ValidationErrorList {
allErrs := fielderrors.ValidationErrorList{}
if required || len(certInfo.CertFile) > 0 || len(certInfo.KeyFile) > 0 {
if len(certInfo.CertFile) == 0 {
allErrs = append(allErrs, fielderrors.NewFieldRequired("certFile"))
}
if len(certInfo.KeyFile) == 0 {
allErrs = append(allErrs, fielderrors.NewFieldRequired("keyFile"))
}
}
if len(certInfo.CertFile) > 0 {
allErrs = append(allErrs, ValidateFile(certInfo.CertFile, "certFile")...)
}
if len(certInfo.KeyFile) > 0 {
allErrs = append(allErrs, ValidateFile(certInfo.KeyFile, "keyFile")...)
}
return allErrs
}
func ValidateServingInfo(info api.ServingInfo) fielderrors.ValidationErrorList {
allErrs := fielderrors.ValidationErrorList{}
allErrs = append(allErrs, ValidateHostPort(info.BindAddress, "bindAddress")...)
allErrs = append(allErrs, ValidateCertInfo(info.ServerCert, false)...)
switch info.BindNetwork {
case "tcp", "tcp4", "tcp6":
default:
allErrs = append(allErrs, fielderrors.NewFieldInvalid("bindNetwork", info.BindNetwork, "must be 'tcp', 'tcp4', or 'tcp6'"))
}
if len(info.ServerCert.CertFile) > 0 {
if len(info.ClientCA) > 0 {
allErrs = append(allErrs, ValidateFile(info.ClientCA, "clientCA")...)
}
} else {
if len(info.ClientCA) > 0 {
allErrs = append(allErrs, fielderrors.NewFieldInvalid("clientCA", info.ClientCA, "cannot specify a clientCA without a certFile"))
}
}
return allErrs
}
func ValidateHTTPServingInfo(info api.HTTPServingInfo) fielderrors.ValidationErrorList {
allErrs := ValidateServingInfo(info.ServingInfo)
if info.MaxRequestsInFlight < 0 {
allErrs = append(allErrs, fielderrors.NewFieldInvalid("maxRequestsInFlight", info.MaxRequestsInFlight, "must be zero (no limit) or greater"))
}
if info.RequestTimeoutSeconds < -1 {
allErrs = append(allErrs, fielderrors.NewFieldInvalid("requestTimeoutSeconds", info.RequestTimeoutSeconds, "must be -1 (no timeout), 0 (default timeout), or greater"))
}
return allErrs
}
func ValidateDisabledFeatures(disabledFeatures []string, field string) fielderrors.ValidationErrorList {
allErrs := fielderrors.ValidationErrorList{}
known := sets.NewString()
for _, feature := range api.KnownOpenShiftFeatures {
known.Insert(strings.ToLower(feature))
}
for i, feature := range disabledFeatures {
if !known.Has(strings.ToLower(feature)) {
allErrs = append(allErrs, fielderrors.NewFieldInvalid(fmt.Sprintf("%s[%d]", field, i), disabledFeatures[i], fmt.Sprintf("not one of valid features: %s", strings.Join(api.KnownOpenShiftFeatures, ", "))))
}
}
return allErrs
}
func ValidateKubeConfig(path string, field string) fielderrors.ValidationErrorList {
allErrs := fielderrors.ValidationErrorList{}
allErrs = append(allErrs, ValidateFile(path, field)...)
// TODO: load and parse
return allErrs
}
func ValidateRemoteConnectionInfo(remoteConnectionInfo api.RemoteConnectionInfo) fielderrors.ValidationErrorList {
allErrs := fielderrors.ValidationErrorList{}
if len(remoteConnectionInfo.URL) == 0 {
allErrs = append(allErrs, fielderrors.NewFieldRequired("url"))
} else {
_, urlErrs := ValidateURL(remoteConnectionInfo.URL, "url")
allErrs = append(allErrs, urlErrs...)
}
if len(remoteConnectionInfo.CA) > 0 {
allErrs = append(allErrs, ValidateFile(remoteConnectionInfo.CA, "ca")...)
}
allErrs = append(allErrs, ValidateCertInfo(remoteConnectionInfo.ClientCert, false)...)
return allErrs
}
func ValidatePodManifestConfig(podManifestConfig *api.PodManifestConfig) fielderrors.ValidationErrorList {
allErrs := fielderrors.ValidationErrorList{}
// the Path can be a file or a directory
allErrs = append(allErrs, ValidateFile(podManifestConfig.Path, "path")...)
if podManifestConfig.FileCheckIntervalSeconds < 1 {
allErrs = append(allErrs, fielderrors.NewFieldInvalid("fileCheckIntervalSeconds", podManifestConfig.FileCheckIntervalSeconds, "interval has to be positive"))
}
return allErrs
}
func ValidateSpecifiedIP(ipString string, field string) fielderrors.ValidationErrorList {
allErrs := fielderrors.ValidationErrorList{}
ip := net.ParseIP(ipString)
if ip == nil {
allErrs = append(allErrs, fielderrors.NewFieldInvalid(field, ipString, "must be a valid IP"))
} else if ip.IsUnspecified() {
allErrs = append(allErrs, fielderrors.NewFieldInvalid(field, ipString, "cannot be an unspecified IP"))
}
return allErrs
}
func ValidateSecureURL(urlString string, field string) (*url.URL, fielderrors.ValidationErrorList) {
url, urlErrs := ValidateURL(urlString, field)
if len(urlErrs) == 0 && url.Scheme != "https" {
urlErrs = append(urlErrs, fielderrors.NewFieldInvalid(field, urlString, "must use https scheme"))
}
return url, urlErrs
}
func ValidateURL(urlString string, field string) (*url.URL, fielderrors.ValidationErrorList) {
allErrs := fielderrors.ValidationErrorList{}
urlObj, err := url.Parse(urlString)
if err != nil {
allErrs = append(allErrs, fielderrors.NewFieldInvalid(field, urlString, "must be a valid URL"))
return nil, allErrs
}
if len(urlObj.Scheme) == 0 {
allErrs = append(allErrs, fielderrors.NewFieldInvalid(field, urlString, "must contain a scheme (e.g. https://)"))
}
if len(urlObj.Host) == 0 {
allErrs = append(allErrs, fielderrors.NewFieldInvalid(field, urlString, "must contain a host"))
}
return urlObj, allErrs
}
func ValidateNamespace(namespace, field string) fielderrors.ValidationErrorList {
allErrs := fielderrors.ValidationErrorList{}
if len(namespace) == 0 {
allErrs = append(allErrs, fielderrors.NewFieldRequired(field))
} else if ok, _ := kvalidation.ValidateNamespaceName(namespace, false); !ok {
allErrs = append(allErrs, fielderrors.NewFieldInvalid(field, namespace, "must be a valid namespace"))
}
return allErrs
}
func ValidateFile(path string, field string) fielderrors.ValidationErrorList {
allErrs := fielderrors.ValidationErrorList{}
if len(path) == 0 {
allErrs = append(allErrs, fielderrors.NewFieldRequired(field))
} else if _, err := os.Stat(path); err != nil {
allErrs = append(allErrs, fielderrors.NewFieldInvalid(field, path, "could not read file"))
}
return allErrs
}
func ValidateDir(path string, field string) fielderrors.ValidationErrorList {
allErrs := fielderrors.ValidationErrorList{}
if len(path) == 0 {
allErrs = append(allErrs, fielderrors.NewFieldRequired(field))
} else {
fileInfo, err := os.Stat(path)
if err != nil {
allErrs = append(allErrs, fielderrors.NewFieldInvalid(field, path, "could not read info"))
} else if !fileInfo.IsDir() {
allErrs = append(allErrs, fielderrors.NewFieldInvalid(field, path, "not a directory"))
}
}
return allErrs
}
func ValidateExtendedArguments(config api.ExtendedArguments, flagFunc func(*pflag.FlagSet)) fielderrors.ValidationErrorList {
allErrs := fielderrors.ValidationErrorList{}
// check extended arguments for errors
for _, err := range cmdflags.Resolve(config, flagFunc) {
switch t := err.(type) {
case *fielderrors.ValidationError:
allErrs = append(allErrs, t)
default:
allErrs = append(allErrs, fielderrors.NewFieldInvalid("????", config, err.Error()))
}
}
return allErrs
}
func ValidateLDAPSyncConfig(config api.LDAPSyncConfig) ValidationResults {
validationResults := ValidateLDAPClientConfig("config",
config.Host,
config.BindDN,
config.BindPassword,
config.CA,
config.Insecure)
var numConfigs int
if config.RFC2307Config != nil {
configResults := ValidateRFC2307Config(config.RFC2307Config)
validationResults.AddErrors(configResults.Errors...)
validationResults.AddWarnings(configResults.Warnings...)
numConfigs++
}
if config.ActiveDirectoryConfig != nil {
configResults := ValidateActiveDirectoryConfig(config.ActiveDirectoryConfig)
validationResults.AddErrors(configResults.Errors...)
validationResults.AddWarnings(configResults.Warnings...)
numConfigs++
}
if config.AugmentedActiveDirectoryConfig != nil {
configResults := ValidateAugmentedActiveDirectoryConfig(config.AugmentedActiveDirectoryConfig)
validationResults.AddErrors(configResults.Errors...)
validationResults.AddWarnings(configResults.Warnings...)
numConfigs++
}
if numConfigs != 1 {
validationResults.AddErrors(fielderrors.NewFieldInvalid("", config.LDAPSchemaSpecificConfig,
"only one schema-specific config is allowed"))
}
return validationResults
}
func ValidateLDAPClientConfig(parent, url, bindDN, bindPassword, CA string, insecure bool) ValidationResults {
validationResults := ValidationResults{}
if len(url) == 0 {
validationResults.AddErrors(fielderrors.NewFieldRequired(parent + ".host"))
return validationResults
}
u, err := ldaputil.ParseURL(url)
if err != nil {
validationResults.AddErrors(fielderrors.NewFieldInvalid(parent+".URL", url, err.Error()))
return validationResults
}
// Make sure bindDN and bindPassword are both set, or both unset
// Both unset means an anonymous bind is used for search (https://tools.ietf.org/html/rfc4513#section-5.1.1)
// Both set means the name/password simple bind is used for search (https://tools.ietf.org/html/rfc4513#section-5.1.3)
if (len(bindDN) == 0) != (len(bindPassword) == 0) {
validationResults.AddErrors(fielderrors.NewFieldInvalid(parent+".bindDN", bindDN,
"bindDN and bindPassword must both be specified, or both be empty"))
validationResults.AddErrors(fielderrors.NewFieldInvalid(parent+".bindPassword", "<masked>",
"bindDN and bindPassword must both be specified, or both be empty"))
}
if insecure {
if u.Scheme == ldaputil.SchemeLDAPS {
validationResults.AddErrors(fielderrors.NewFieldInvalid(parent+".url", url,
fmt.Sprintf("Cannot use %s scheme with insecure=true", u.Scheme)))
}
if len(CA) > 0 {
validationResults.AddErrors(fielderrors.NewFieldInvalid(parent+".ca", CA,
"Cannot specify a ca with insecure=true"))
}
} else {
if len(CA) > 0 {
validationResults.AddErrors(ValidateFile(CA, parent+".ca")...)
}
}
// Warn if insecure
if insecure {
validationResults.AddWarnings(fielderrors.NewFieldInvalid(parent+".insecure", insecure,
"validating passwords over an insecure connection could allow them to be intercepted"))
}
return validationResults
}
func ValidateRFC2307Config(config *api.RFC2307Config) ValidationResults {
validationResults := ValidationResults{}
groupQueryResults := ValidateLDAPQuery("groupQuery", config.GroupQuery)
validationResults.AddErrors(groupQueryResults.Errors...)
validationResults.AddWarnings(groupQueryResults.Warnings...)
if len(config.GroupNameAttributes) == 0 {
validationResults.AddErrors(fielderrors.NewFieldRequired("groupName"))
}
if len(config.GroupMembershipAttributes) == 0 {
validationResults.AddErrors(fielderrors.NewFieldRequired("groupMembership"))
}
userQueryResults := ValidateLDAPQuery("userQuery", config.UserQuery)
validationResults.AddErrors(userQueryResults.Errors...)
validationResults.AddWarnings(userQueryResults.Warnings...)
if len(config.UserNameAttributes) == 0 {
validationResults.AddErrors(fielderrors.NewFieldRequired("userName"))
}
return validationResults
}
func ValidateActiveDirectoryConfig(config *api.ActiveDirectoryConfig) ValidationResults {
validationResults := ValidationResults{}
userQueryResults := ValidateLDAPQuery("usersQuery", config.UsersQuery)
validationResults.AddErrors(userQueryResults.Errors...)
validationResults.AddWarnings(userQueryResults.Warnings...)
if len(config.UserNameAttributes) == 0 {<|fim▁hole|> validationResults.AddErrors(fielderrors.NewFieldRequired("userName"))
}
if len(config.GroupMembershipAttributes) == 0 {
validationResults.AddErrors(fielderrors.NewFieldRequired("groupMembership"))
}
return validationResults
}
func ValidateAugmentedActiveDirectoryConfig(config *api.AugmentedActiveDirectoryConfig) ValidationResults {
validationResults := ValidationResults{}
groupQueryResults := ValidateLDAPQuery("groupQuery", config.GroupQuery)
validationResults.AddErrors(groupQueryResults.Errors...)
validationResults.AddWarnings(groupQueryResults.Warnings...)
if len(config.GroupNameAttributes) == 0 {
validationResults.AddErrors(fielderrors.NewFieldRequired("groupName"))
}
if len(config.GroupMembershipAttributes) == 0 {
validationResults.AddErrors(fielderrors.NewFieldRequired("groupMembership"))
}
userQueryResults := ValidateLDAPQuery("usersQuery", config.UsersQuery)
validationResults.AddErrors(userQueryResults.Errors...)
validationResults.AddWarnings(userQueryResults.Warnings...)
if len(config.UserNameAttributes) == 0 {
validationResults.AddErrors(fielderrors.NewFieldRequired("userName"))
}
return validationResults
}
func ValidateLDAPQuery(queryName string, query api.LDAPQuery) ValidationResults {
validationResults := ValidationResults{}
if _, err := ldap.ParseDN(query.BaseDN); err != nil {
validationResults.AddErrors(fielderrors.NewFieldInvalid(queryName+".baseDN", query.BaseDN,
fmt.Sprintf("invalid base DN for search: %v", err)))
}
if len(query.Scope) > 0 {
if _, err := ldaputil.DetermineLDAPScope(query.Scope); err != nil {
validationResults.AddErrors(fielderrors.NewFieldInvalid(queryName+".scope", query.Scope,
"invalid LDAP search scope"))
}
}
if len(query.DerefAliases) > 0 {
if _, err := ldaputil.DetermineDerefAliasesBehavior(query.DerefAliases); err != nil {
validationResults.AddErrors(fielderrors.NewFieldInvalid(queryName+".derefAliases",
query.DerefAliases, "LDAP alias dereferencing instruction invalid"))
}
}
if query.TimeLimit < 0 {
validationResults.AddErrors(fielderrors.NewFieldInvalid(queryName+".timeout", query.TimeLimit,
"timeout must be equal to or greater than zero"))
}
if _, err := ldap.CompileFilter(query.Filter); err != nil {
validationResults.AddErrors(fielderrors.NewFieldInvalid(queryName+".filter", query.Filter,
fmt.Sprintf("invalid query filter: %v", err)))
}
return validationResults
}<|fim▁end|>
| |
<|file_name|>plot_forest.py<|end_file_name|><|fim▁begin|>import numpy as np
import matplotlib.pyplot as plt
import matplotlib.patches as patches
from os import listdir
<|fim▁hole|>colorarray = np.random.random_sample((10000, 3))
for f in files:
size = int(f.split('-')[0])
x, y, c = np.loadtxt('./data/plots/' + f, unpack=True)
fig = plt.figure(figsize=(5, 5))
ax = fig.add_subplot(111)
for px, py, col in zip(x, y, c):
rect = patches.Rectangle((px, py), 1, 1, color=colorarray[col])
ax.add_patch(rect)
plt.xlim([0, size])
plt.ylim([0, size])
plt.savefig('./plots/nice/' + f.replace('.txt', '.pdf'))
plt.clf()<|fim▁end|>
|
files = listdir('./data/plots/')
|
<|file_name|>urls.py<|end_file_name|><|fim▁begin|>from __future__ import print_function<|fim▁hole|>from django.conf.urls import * # NOQA
from django.conf.urls.i18n import i18n_patterns
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.contrib import admin
from django.conf import settings
from ImmOrbit.api import router
admin.autodiscover()
urlpatterns = i18n_patterns('',
url(r'^admin/', include(admin.site.urls)), # NOQA
url(r'^sitemap\.xml$', 'django.contrib.sitemaps.views.sitemap',
{'sitemaps': {'cmspages': CMSSitemap}}),
url(r'^select2/', include('django_select2.urls')),
url(r'^api/', include(router.urls)),
url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework')),
url(r'^', include('cms.urls')),
)
# This is only needed when using runserver.
if settings.DEBUG:
urlpatterns = patterns('',
url(r'^media/(?P<path>.*)$', 'django.views.static.serve', # NOQA
{'document_root': settings.MEDIA_ROOT, 'show_indexes': True}),
) + staticfiles_urlpatterns() + urlpatterns # NOQA<|fim▁end|>
|
from cms.sitemaps import CMSSitemap
|
<|file_name|>rm.py<|end_file_name|><|fim▁begin|># Short help
def display_summary():
print("{:<13}{}".format( 'rm', "Removes a previously copied SCM Repository" ))<|fim▁hole|>USAGE="""
Removes a previously 'copied' repository
===============================================================================
usage: evie [common-opts] rm [options] <dst> <repo> <origin> <id>
evie [common-opts] rm [options] get-success-msg
evie [common-opts] rm [options] get-error-msg
Arguments:
<dst> PARENT directory for where the package was copied. The
directory is specified as a relative path to the root
of primary repository.
<repo> Name of the repository to remove
<origin> Path/URL to the repository
<id> Label/Tag/Hash/Version of code to be remove
get-success-msg Returns a SCM specific message that informs the end user
of additional action(s) that may be required when
the command is successful
get-error-msg Returns a SCM specific message that informs the end user
of additional action(s) that may be required when
the command fails
Options:
-p PKGNAME Specifies the Package name if different from the <repo>
name
-b BRANCH Specifies the source branch in <repo>. The use/need
of this option in dependent on the <repo> SCM type.
Options:
-h, --help Display help for this command
Notes:
o The command MUST be run in the root of the primary respostiory.
o This command only applied to repositories previously mounted using
the 'copy' command.
"""<|fim▁end|>
|
# DOCOPT command line definition
|
<|file_name|>report.service.ts<|end_file_name|><|fim▁begin|>import { HttpClient } from '@angular/common/http';
import { Injectable } from '@angular/core';<|fim▁hole|>export class ReportService {
constructor (private http: HttpClient, private global: GlobalService) {
}
public postReport (data: FormData): Observable<any> {
return this.http.post(this.global.url + `/report`, data).map((res: any) => {
if ( res.status === 'success' ) {
return res.result;
} else {
alert('[ERROR]: ' + res.result);
}
});
}
public getReportList (): Observable<any> {
return this.http.get(this.global.url + `/report`).map((res: any) => {
if ( res.status === 'success' ) {
return res.result;
} else {
alert('[ERROR]: ' + res.result);
}
})
}
public getReportById (id: number): Observable<any> {
return this.http.get(this.global.url + `/report/${id}`).map((res: any) => {
if ( res.status === 'success' ) {
return res.result;
} else {
alert('[ERROR]: ' + res.result);
}
})
}
}<|fim▁end|>
|
import { Observable } from 'rxjs/Observable';
import { GlobalService } from '../service/global.service';
@Injectable()
|
<|file_name|>bloom9.go<|end_file_name|><|fim▁begin|>// Copyright 2014 The go-ethereum Authors
// This file is part of the go-ethereum library.
//
// The go-ethereum library is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// The go-ethereum library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with the go-ethereum library. If not, see <http://www.gnu.org/licenses/>.
package types
import (
"fmt"
"math/big"
"github.com/ethereum/go-ethereum/common/hexutil"
"github.com/ethereum/go-ethereum/crypto"
)
type bytesBacked interface {
Bytes() []byte
}
const bloomLength = 256
// Bloom represents a 256 bit bloom filter.
type Bloom [bloomLength]byte
// BytesToBloom converts a byte slice to a bloom filter.
// It panics if b is not of suitable size.
func BytesToBloom(b []byte) Bloom {
var bloom Bloom
bloom.SetBytes(b)
return bloom
}
// SetBytes sets the content of b to the given bytes.
// It panics if d is not of suitable size.
func (b *Bloom) SetBytes(d []byte) {
if len(b) < len(d) {
panic(fmt.Sprintf("bloom bytes too big %d %d", len(b), len(d)))
}
copy(b[bloomLength-len(d):], d)
}
// Add adds d to the filter. Future calls of Test(d) will return true.
func (b *Bloom) Add(d *big.Int) {
bin := new(big.Int).SetBytes(b[:])
bin.Or(bin, bloom9(d.Bytes()))
b.SetBytes(bin.Bytes())
}
// Big converts b to a big integer.
func (b Bloom) Big() *big.Int {
return new(big.Int).SetBytes(b[:])
}
func (b Bloom) Bytes() []byte {
return b[:]
}
func (b Bloom) Test(test *big.Int) bool {
return BloomLookup(b, test)
}
func (b Bloom) TestBytes(test []byte) bool {
return b.Test(new(big.Int).SetBytes(test))
}
// MarshalText encodes b as a hex string with 0x prefix.
func (b Bloom) MarshalText() ([]byte, error) {
return hexutil.Bytes(b[:]).MarshalText()
}
// UnmarshalText b as a hex string with 0x prefix.
func (b *Bloom) UnmarshalText(input []byte) error {
return hexutil.UnmarshalFixedText("Bloom", input, b[:])
}
func CreateBloom(receipts Receipts) Bloom {
bin := new(big.Int)
for _, receipt := range receipts {
bin.Or(bin, LogsBloom(receipt.Logs))
}
return BytesToBloom(bin.Bytes())
}
func LogsBloom(logs []*Log) *big.Int {
bin := new(big.Int)
for _, log := range logs {
bin.Or(bin, bloom9(log.Address.Bytes()))
for _, b := range log.Topics {<|fim▁hole|> }
return bin
}
func bloom9(b []byte) *big.Int {
b = crypto.Keccak256(b[:])
r := new(big.Int)
for i := 0; i < 6; i += 2 {
t := big.NewInt(1)
b := (uint(b[i+1]) + (uint(b[i]) << 8)) & 2047
r.Or(r, t.Lsh(t, b))
}
return r
}
var Bloom9 = bloom9
func BloomLookup(bin Bloom, topic bytesBacked) bool {
bloom := bin.Big()
cmp := bloom9(topic.Bytes()[:])
return bloom.And(bloom, cmp).Cmp(cmp) == 0
}<|fim▁end|>
|
bin.Or(bin, bloom9(b[:]))
}
|
<|file_name|>test_game.ts<|end_file_name|><|fim▁begin|>/* eslint-disable no-self-assign */
import Game from '../../../../gui/html/js/game';
import { Plugin } from '../../../../gui/html/js/plugin';
import Translator from '../../../../gui/html/js/translator';
jest.mock('../../../../gui/html/js/dom');
jest.mock('../../../../gui/html/js/filters');
describe('Game', () => {
const l10n = new Translator();
const defaultDerivedPluginMetadata = {
name: 'test',
isActive: false,
isDirty: false,
isEmpty: false,
isMaster: false,
isLightPlugin: false,
loadsArchive: false,
messages: [],
suggestedTags: [],
currentTags: []
};
const gameData = {
folder: 'test',
generalMessages: [
{
type: 'say',
text: 'test',
language: 'en',
condition: ''
}
],
masterlist: { revision: '0', date: '' },
groups: { masterlist: [], userlist: [] },
plugins: [defaultDerivedPluginMetadata],
bashTags: []
};
describe('#constructor()', () => {
test("should set folder to the object's value", () => {
const game = new Game(gameData, l10n);
expect(game.folder).toBe('test');
});
test("should set generalMessages to the object's value", () => {
const game = new Game(gameData, l10n);
expect(game.generalMessages).toEqual([
{
type: 'say',
text: 'test',
language: 'en',
condition: ''
}
]);
});
test("should set masterlist to the object's value", () => {
const game = new Game(gameData, l10n);
expect(game.masterlist).toEqual({ revision: '0', date: '' });
});
test("should construct plugins from the object's plugins value", () => {
const game = new Game(gameData, l10n);
expect(game.plugins.length).toBe(1);
expect(game.plugins[0]).toHaveProperty('update');
expect(game.plugins[0].name).toBe('test');
expect(game.plugins[0].cardZIndex).toBe(1);
});
test("should set oldLoadOrder to an empty array even if the object's value if defined", () => {
const game = new Game(gameData, l10n);
expect(game.oldLoadOrder).toEqual([]);
});
});
describe('#folder', () => {
let game: Game;
// It's not worth the hassle of defining and checking the event type in test
// code.
// eslint-disable-next-line @typescript-eslint/no-explicit-any
let handleEvent: (evt: any) => void;
beforeEach(() => {
game = new Game(gameData, l10n);
});
afterEach(() => {
document.removeEventListener('loot-game-folder-change', handleEvent);
});
test('setting value should not dispatch an event if the new value is equal to the old one', done => {
handleEvent = () => {
done(new Error('Should not have fired an event'));
};
document.addEventListener('loot-game-folder-change', handleEvent);
game.folder = game.folder;
setTimeout(done, 100);
});
test('setting value should dispatch an event if the new value differs from the old one', done => {
handleEvent = evt => {
expect(evt.detail.folder).toBe('other test');
done();
};
document.addEventListener('loot-game-folder-change', handleEvent);
game.folder = 'other test';
});
});
describe('#generalMessages', () => {
let game: Game;
// It's not worth the hassle of defining and checking the event type in test
// code.
// eslint-disable-next-line @typescript-eslint/no-explicit-any
let handleEvent: (evt: any) => void;
beforeEach(() => {
game = new Game(gameData, l10n);
});
afterEach(() => {
document.removeEventListener(
'loot-game-global-messages-change',
handleEvent
);
});
test('setting value should not dispatch an event if the new value is equal to the old one', done => {
handleEvent = () => {
done(new Error('Should not have fired an event'));
};
document.addEventListener(
'loot-game-global-messages-change',
handleEvent
);
game.generalMessages = game.generalMessages;
setTimeout(done, 100);
});
test('setting value should dispatch an event if the new and old message strings differ', done => {
const newMessages = [
{
type: 'say',
text: 'bar',
language: 'en',
condition: ''
}
];
handleEvent = evt => {
expect(evt.detail.messages).toEqual(newMessages);
expect(evt.detail.totalDiff).toBe(0);
expect(evt.detail.errorDiff).toBe(0);
expect(evt.detail.warningDiff).toBe(0);
done();
};
document.addEventListener(
'loot-game-global-messages-change',
handleEvent
);
game.generalMessages = newMessages;
});
test('setting value should dispatch an event if the new and old message type counts differ', done => {
const newMessages = [
{
type: 'warn',
text: 'foo',
language: 'en',
condition: ''
},
{
type: 'error',
text: 'bar',
language: 'en',
condition: ''
}
];
game.generalMessages = [];
handleEvent = evt => {
expect(evt.detail.messages).toEqual(newMessages);
expect(evt.detail.totalDiff).toBe(2);
expect(evt.detail.errorDiff).toBe(1);
expect(evt.detail.warningDiff).toBe(1);
done();
};
document.addEventListener(
'loot-game-global-messages-change',
handleEvent
);
game.generalMessages = newMessages;
});
});
describe('#masterlist', () => {
let game: Game;
// It's not worth the hassle of defining and checking the event type in test
// code.
// eslint-disable-next-line @typescript-eslint/no-explicit-any
let handleEvent: (evt: any) => void;
beforeEach(() => {
game = new Game(gameData, l10n);
});
afterEach(() => {
document.removeEventListener('loot-game-masterlist-change', handleEvent);
});
test('setting value should not dispatch an event if the new value is equal to the old one', done => {
handleEvent = () => {
done(new Error('Should not have fired an event'));
};
document.addEventListener('loot-game-masterlist-change', handleEvent);
game.masterlist = game.masterlist;
setTimeout(done, 100);
});
test('setting value should dispatch an event if the new value differs from the old one', done => {
const newMasterlist = {
revision: 'foo',
date: 'bar'
};
handleEvent = evt => {
expect(evt.detail).toEqual(newMasterlist);
done();
};
document.addEventListener('loot-game-masterlist-change', handleEvent);
game.masterlist = newMasterlist;
});
});
describe('#plugins', () => {
let game: Game;
// It's not worth the hassle of defining and checking the event type in test
// code.
// eslint-disable-next-line @typescript-eslint/no-explicit-any
let handleEvent: (evt: any) => void;
beforeEach(() => {
game = new Game(gameData, l10n);
});
afterEach(() => {
document.removeEventListener('loot-game-plugins-change', handleEvent);
});
test('setting value should dispatch an event if the new value is equal to the old one', done => {
handleEvent = () => {
done();
};
document.addEventListener('loot-game-plugins-change', handleEvent);
game.plugins = game.plugins;
});
test('setting value should dispatch an event if the new value differs from the old one', done => {
const newMessages = [
{
type: 'warn',
text: 'foo',
language: 'en',
condition: ''
},
{
type: 'error',
text: 'bar',
language: 'en',
condition: ''
}
];
const newPlugins = [
new Plugin({
...defaultDerivedPluginMetadata,
name: 'a',
isActive: true,
messages: [newMessages[0]]
}),
new Plugin({
...defaultDerivedPluginMetadata,
name: 'b',
isDirty: true,
messages: [{ ...newMessages[0], type: 'say' }]
})
];
handleEvent = evt => {
expect(evt.detail.valuesAreTotals).toBe(true);
expect(evt.detail.totalMessageNo).toBe(4);
expect(evt.detail.warnMessageNo).toBe(2);
expect(evt.detail.errorMessageNo).toBe(1);
expect(evt.detail.totalPluginNo).toBe(2);
expect(evt.detail.activePluginNo).toBe(1);
expect(evt.detail.dirtyPluginNo).toBe(1);
done();
};
document.addEventListener('loot-game-plugins-change', handleEvent);
game.generalMessages = newMessages;
game.plugins = newPlugins;
});
});
describe('groups', () => {
test("get should return the game's groups", () => {
const groups = {
masterlist: [{ name: 'a', after: [] }],
userlist: [{ name: 'b', after: [] }]
};
const game = new Game({ ...gameData, groups }, l10n);
expect(game.groups).toStrictEqual([
{ name: 'a', after: [], isUserAdded: false },
{ name: 'b', after: [], isUserAdded: true }
]);
});
});
describe('#setGroups()', () => {
// It's not worth the hassle of defining and checking the event type in test
// code.
// eslint-disable-next-line @typescript-eslint/no-explicit-any
let handleEvent: (evt: any) => void;
afterEach(() => {
document.removeEventListener('loot-game-groups-change', handleEvent);
});
test('should merge the given masterlist and userlist groups arrays', () => {
const game = new Game(gameData, l10n);
const groups = {
masterlist: [{ name: 'a', after: [] }],
userlist: [{ name: 'b', after: [] }]
};
game.setGroups(groups);
expect(game.groups).toStrictEqual([
{ name: 'a', after: [], isUserAdded: false },
{ name: 'b', after: [], isUserAdded: true }
]);
});
test('should dispatch an event', done => {
handleEvent = () => {
done();
};
document.addEventListener('loot-game-groups-change', handleEvent);
const game = new Game(gameData, l10n);
const groups = {
masterlist: [{ name: 'a', after: [] }],
userlist: [{ name: 'b', after: [] }]
};
game.setGroups(groups);
});
});
describe('#getContent()', () => {
let game: Game;
beforeEach(() => {
game = new Game(gameData, l10n);
});
test('should return an object of two empty arrays if there is no game data', () => {
game.plugins = [];
game.generalMessages = [];
expect(game.getContent()).toEqual({
messages: [],
plugins: []
});
});
test('should return a structure containing converted plugin and message structures', () => {
game.generalMessages = [
{
type: 'say',
condition: 'file("foo.esp")',
language: 'fr',
text: 'Bonjour le monde'
}
];
game.plugins = [
new Plugin({
name: 'foo',
crc: 0xdeadbeef,<|fim▁hole|> isEmpty: true,
isMaster: false,
isLightPlugin: false,
loadsArchive: true,
group: 'group1',
messages: [
{
type: 'warn',
condition: 'file("bar.esp")',
language: 'en',
text: 'Hello world'
}
],
currentTags: [
{
name: 'Relev',
isAddition: true,
condition: ''
}
],
suggestedTags: [
{
name: 'Delev',
isAddition: true,
condition: ''
}
],
isDirty: true
})
];
expect(game.getContent()).toEqual({
messages: game.generalMessages,
plugins: [
{
name: game.plugins[0].name,
crc: game.plugins[0].crc,
version: game.plugins[0].version,
isActive: game.plugins[0].isActive,
isEmpty: game.plugins[0].isEmpty,
loadsArchive: game.plugins[0].loadsArchive,
group: game.plugins[0].group,
messages: game.plugins[0].messages,
currentTags: game.plugins[0].currentTags,
suggestedTags: game.plugins[0].suggestedTags,
isDirty: game.plugins[0].isDirty
}
]
});
});
});
describe('#getPluginNames()', () => {
let game: Game;
beforeEach(() => {
game = new Game(gameData, l10n);
});
test('should return an empty array if there are no plugins', () => {
game.plugins = [];
expect(game.getPluginNames().length).toBe(0);
});
test('should return an array of plugin filenames if there are plugins', () => {
game.plugins = [
new Plugin({
...defaultDerivedPluginMetadata,
name: 'foo',
isActive: true,
messages: [{ type: 'warn', text: '', language: 'en', condition: '' }]
})
];
expect(game.getPluginNames()).toEqual(['foo']);
});
});
describe('#getGroupPluginNames()', () => {
let game: Game;
beforeEach(() => {
const plugins = [
new Plugin({
...defaultDerivedPluginMetadata,
name: 'foo',
group: 'test group'
}),
new Plugin({
...defaultDerivedPluginMetadata,
name: 'bar',
group: 'other group'
}),
new Plugin({
...defaultDerivedPluginMetadata,
name: 'foobar',
group: 'test group'
})
];
game = new Game({ ...gameData, plugins }, l10n);
});
test('should return an empty array if there are no plugins in the given group', () => {
expect(game.getGroupPluginNames('empty group').length).toBe(0);
});
test('should return an array of filenames of plugins in the given group', () => {
expect(game.getGroupPluginNames('test group')).toEqual(['foo', 'foobar']);
});
});
describe('#setSortedPlugins', () => {
let game: Game;
// It's not worth the hassle of defining and checking the event type in test
// code.
// eslint-disable-next-line @typescript-eslint/no-explicit-any
let handleEvent: (evt: any) => void;
beforeEach(() => {
game = new Game(gameData, l10n);
});
afterEach(() => {
document.removeEventListener('loot-game-plugins-change', handleEvent);
});
test('should append new plugins to the plugins array', () => {
game.setSortedPlugins([
new Plugin({
...defaultDerivedPluginMetadata,
name: 'foo'
})
]);
expect(game.plugins[0].name).toBe('foo');
});
test('should update existing plugins with new data', () => {
game.plugins = [
new Plugin({
...defaultDerivedPluginMetadata,
name: 'foo',
isActive: true,
messages: [{ type: 'warn', text: '', language: 'en', condition: '' }]
})
];
game.setSortedPlugins([
new Plugin({
...defaultDerivedPluginMetadata,
name: 'foo',
crc: 0xdeadbeef
})
]);
expect(game.plugins[0].crc).toBe(0xdeadbeef);
expect(game.plugins[0].isActive).toBe(false);
});
test('should reorder plugins to given order', () => {
game.plugins = [
new Plugin({
...defaultDerivedPluginMetadata,
name: 'foo'
}),
new Plugin({
...defaultDerivedPluginMetadata,
name: 'bar'
})
];
game.setSortedPlugins([
new Plugin({
...defaultDerivedPluginMetadata,
name: 'bar'
}),
new Plugin({
...defaultDerivedPluginMetadata,
name: 'foo'
})
]);
expect(game.plugins[0].name).toBe('bar');
expect(game.plugins[1].name).toBe('foo');
});
test('should store old load order', () => {
game.plugins = [
new Plugin({
...defaultDerivedPluginMetadata,
name: 'foo'
}),
new Plugin({
...defaultDerivedPluginMetadata,
name: 'bar'
})
];
game.setSortedPlugins([
new Plugin({
...defaultDerivedPluginMetadata,
name: 'bar'
}),
new Plugin({
...defaultDerivedPluginMetadata,
name: 'foo'
})
]);
expect(game.oldLoadOrder[0].name).toBe('foo');
expect(game.oldLoadOrder[1].name).toBe('bar');
});
test('should dispatch an event', done => {
handleEvent = () => {
done();
};
document.addEventListener('loot-game-plugins-change', handleEvent);
game.setSortedPlugins([
new Plugin({
...defaultDerivedPluginMetadata,
name: 'foo'
})
]);
});
});
describe('#applySort', () => {
let game: Game;
beforeEach(() => {
game = new Game(gameData, l10n);
});
test('should delete the stored old load order', () => {
game.oldLoadOrder = gameData.plugins.map(p => new Plugin(p));
game.applySort();
expect(game.oldLoadOrder).toEqual([]);
});
});
describe('#cancelSort', () => {
let game: Game;
beforeEach(() => {
game = new Game(gameData, l10n);
});
test('should set the current load order to the given plugins', () => {
const oldLoadOrder = [
new Plugin({
...defaultDerivedPluginMetadata,
name: 'foo'
}),
new Plugin({
...defaultDerivedPluginMetadata,
name: 'bar'
}),
new Plugin({
...defaultDerivedPluginMetadata,
name: 'baz'
})
];
game.oldLoadOrder = oldLoadOrder;
game.plugins = [oldLoadOrder[2], oldLoadOrder[1], oldLoadOrder[0]];
game.cancelSort(
[{ name: 'baz', loadOrderIndex: 0 }, { name: 'foo' }],
[]
);
expect(game.plugins).toEqual([oldLoadOrder[2], oldLoadOrder[0]]);
});
test('should delete the stored old load order', () => {
game.oldLoadOrder = [
new Plugin({
...defaultDerivedPluginMetadata,
name: 'foo'
}),
new Plugin({
...defaultDerivedPluginMetadata,
name: 'bar'
})
];
game.cancelSort([], []);
expect(game.oldLoadOrder).toEqual([]);
});
test('should set plugin load order indices using the array passed as the first parameter', () => {
game.oldLoadOrder = [
new Plugin({
...defaultDerivedPluginMetadata,
name: 'foo',
loadOrderIndex: 1
}),
new Plugin({
...defaultDerivedPluginMetadata,
name: 'bar',
loadOrderIndex: 0
})
];
game.cancelSort(
[
{ name: 'bar', loadOrderIndex: 0 },
{ name: 'foo', loadOrderIndex: 2 }
],
[]
);
expect(game.plugins[0].name).toBe('bar');
expect(game.plugins[0].loadOrderIndex).toBe(0);
expect(game.plugins[1].name).toBe('foo');
expect(game.plugins[1].loadOrderIndex).toBe(2);
});
test('should set the general messages to the second passed parameter', () => {
game.oldLoadOrder = [
new Plugin({
...defaultDerivedPluginMetadata,
name: 'foo',
loadOrderIndex: 1
}),
new Plugin({
...defaultDerivedPluginMetadata,
name: 'bar',
loadOrderIndex: 0
})
];
const messages = [
{
type: 'say',
text: 'foo',
language: 'en',
condition: ''
}
];
game.cancelSort([], messages);
expect(game.generalMessages).toEqual(messages);
});
});
describe('#clearMetadata', () => {
let game: Game;
beforeEach(() => {
game = new Game(gameData, l10n);
});
test('should delete stored userlist data for existing plugins', () => {
game.plugins = [
new Plugin({
...defaultDerivedPluginMetadata,
name: 'foo',
userlist: {
name: '',
after: [],
req: [],
inc: [],
msg: [],
tag: [],
dirty: [],
clean: [],
url: []
}
})
];
game.clearMetadata([
new Plugin({
...defaultDerivedPluginMetadata,
name: 'foo'
})
]);
expect(game.plugins[0].userlist).toBe(undefined);
});
test('should update existing plugin data', () => {
game.plugins = [
new Plugin({
...defaultDerivedPluginMetadata,
name: 'foo',
isActive: true
})
];
game.clearMetadata([
new Plugin({
...defaultDerivedPluginMetadata,
name: 'foo',
crc: 0xdeadbeef
})
]);
expect(game.plugins[0].crc).toBe(0xdeadbeef);
expect(game.plugins[0].isActive).toBe(false);
});
});
});<|fim▁end|>
|
version: '1.0',
isActive: true,
|
<|file_name|>get_item_details.py<|end_file_name|><|fim▁begin|># Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
from frappe import _, throw
from frappe.utils import flt, cint, add_days, cstr, add_months
import json
from erpnext.accounts.doctype.pricing_rule.pricing_rule import get_pricing_rule_for_item, set_transaction_type
from erpnext.setup.utils import get_exchange_rate
from frappe.model.meta import get_field_precision
from erpnext.stock.doctype.batch.batch import get_batch_no
from erpnext import get_company_currency
from erpnext.stock.doctype.item.item import get_item_defaults, get_uom_conv_factor
from erpnext.setup.doctype.item_group.item_group import get_item_group_defaults
from six import string_types, iteritems
sales_doctypes = ['Quotation', 'Sales Order', 'Delivery Note', 'Sales Invoice']
purchase_doctypes = ['Material Request', 'Supplier Quotation', 'Purchase Order', 'Purchase Receipt', 'Purchase Invoice']
@frappe.whitelist()
def get_item_details(args):
"""
args = {
"item_code": "",
"warehouse": None,
"customer": "",
"conversion_rate": 1.0,
"selling_price_list": None,
"price_list_currency": None,
"plc_conversion_rate": 1.0,
"doctype": "",
"name": "",
"supplier": None,
"transaction_date": None,
"conversion_rate": 1.0,
"buying_price_list": None,
"is_subcontracted": "Yes" / "No",
"ignore_pricing_rule": 0/1
"project": ""
"set_warehouse": ""
}
"""
args = process_args(args)
item = frappe.get_cached_doc("Item", args.item_code)
validate_item_details(args, item)
out = get_basic_details(args, item)
get_party_item_code(args, item, out)
set_valuation_rate(out, args)
update_party_blanket_order(args, out)
get_price_list_rate(args, item, out)
if args.customer and cint(args.is_pos):
out.update(get_pos_profile_item_details(args.company, args))
if out.get("warehouse"):
out.update(get_bin_details(args.item_code, out.warehouse))
# update args with out, if key or value not exists
for key, value in iteritems(out):
if args.get(key) is None:
args[key] = value
out.update(get_pricing_rule_for_item(args))
update_stock(args, out)
if args.transaction_date and item.lead_time_days:
out.schedule_date = out.lead_time_date = add_days(args.transaction_date,
item.lead_time_days)
if args.get("is_subcontracted") == "Yes":
out.bom = args.get('bom') or get_default_bom(args.item_code)
get_gross_profit(out)
if args.doctype == 'Material Request':
out.rate = args.rate or out.price_list_rate
out.amount = flt(args.qty * out.rate)
return out
def update_stock(args, out):
if (args.get("doctype") == "Delivery Note" or
(args.get("doctype") == "Sales Invoice" and args.get('update_stock'))) \
and out.warehouse and out.stock_qty > 0:
if out.has_batch_no and not args.get("batch_no"):
out.batch_no = get_batch_no(out.item_code, out.warehouse, out.qty)
actual_batch_qty = get_batch_qty(out.batch_no, out.warehouse, out.item_code)
if actual_batch_qty:
out.update(actual_batch_qty)
if out.has_serial_no and args.get('batch_no'):
reserved_so = get_so_reservation_for_item(args)
out.batch_no = args.get('batch_no')
out.serial_no = get_serial_no(out, args.serial_no, sales_order=reserved_so)
elif out.has_serial_no:
reserved_so = get_so_reservation_for_item(args)
out.serial_no = get_serial_no(out, args.serial_no, sales_order=reserved_so)
def set_valuation_rate(out, args):
if frappe.db.exists("Product Bundle", args.item_code, cache=True):
valuation_rate = 0.0
bundled_items = frappe.get_doc("Product Bundle", args.item_code)
for bundle_item in bundled_items.items:
valuation_rate += \
flt(get_valuation_rate(bundle_item.item_code, args.company, out.get("warehouse")).get("valuation_rate") \
* bundle_item.qty)
out.update({
"valuation_rate": valuation_rate
})
else:
out.update(get_valuation_rate(args.item_code, args.company, out.get("warehouse")))
def process_args(args):
if isinstance(args, string_types):
args = json.loads(args)
args = frappe._dict(args)
if not args.get("price_list"):
args.price_list = args.get("selling_price_list") or args.get("buying_price_list")
if args.barcode:
args.item_code = get_item_code(barcode=args.barcode)
elif not args.item_code and args.serial_no:
args.item_code = get_item_code(serial_no=args.serial_no)
set_transaction_type(args)
return args
@frappe.whitelist()
def get_item_code(barcode=None, serial_no=None):
if barcode:
item_code = frappe.db.get_value("Item Barcode", {"barcode": barcode}, fieldname=["parent"])
if not item_code:
frappe.throw(_("No Item with Barcode {0}").format(barcode))
elif serial_no:
item_code = frappe.db.get_value("Serial No", serial_no, "item_code")
if not item_code:
frappe.throw(_("No Item with Serial No {0}").format(serial_no))
return item_code
def validate_item_details(args, item):
if not args.company:
throw(_("Please specify Company"))
from erpnext.stock.doctype.item.item import validate_end_of_life
validate_end_of_life(item.name, item.end_of_life, item.disabled)
if args.transaction_type == "selling" and cint(item.has_variants):
throw(_("Item {0} is a template, please select one of its variants").format(item.name))
elif args.transaction_type == "buying" and args.doctype != "Material Request":
if args.get("is_subcontracted") == "Yes" and item.is_sub_contracted_item != 1:
throw(_("Item {0} must be a Sub-contracted Item").format(item.name))
def get_basic_details(args, item):
"""
:param args: {
"item_code": "",
"warehouse": None,
"customer": "",
"conversion_rate": 1.0,
"selling_price_list": None,
"price_list_currency": None,
"price_list_uom_dependant": None,
"plc_conversion_rate": 1.0,
"doctype": "",
"name": "",
"supplier": None,
"transaction_date": None,
"conversion_rate": 1.0,
"buying_price_list": None,
"is_subcontracted": "Yes" / "No",
"ignore_pricing_rule": 0/1
"project": "",
barcode: "",
serial_no: "",
currency: "",
update_stock: "",
price_list: "",
company: "",
order_type: "",
is_pos: "",
project: "",
qty: "",
stock_qty: "",
conversion_factor: ""
}
:param item: `item_code` of Item object
:return: frappe._dict
"""
if not item:
item = frappe.get_doc("Item", args.get("item_code"))
if item.variant_of:
item.update_template_tables()
from frappe.defaults import get_user_default_as_list
user_default_warehouse_list = get_user_default_as_list('Warehouse')
user_default_warehouse = user_default_warehouse_list[0] \
if len(user_default_warehouse_list) == 1 else ""
item_defaults = get_item_defaults(item.name, args.company)
item_group_defaults = get_item_group_defaults(item.name, args.company)
warehouse = args.get("set_warehouse") or user_default_warehouse or item_defaults.get("default_warehouse") or\
item_group_defaults.get("default_warehouse") or args.warehouse
if args.get('doctype') == "Material Request" and not args.get('material_request_type'):
args['material_request_type'] = frappe.db.get_value('Material Request',
args.get('name'), 'material_request_type', cache=True)
#Set the UOM to the Default Sales UOM or Default Purchase UOM if configured in the Item Master
if not args.uom:
if args.get('doctype') in sales_doctypes:
args.uom = item.sales_uom if item.sales_uom else item.stock_uom
elif (args.get('doctype') in ['Purchase Order', 'Purchase Receipt', 'Purchase Invoice']) or \
(args.get('doctype') == 'Material Request' and args.get('material_request_type') == 'Purchase'):
args.uom = item.purchase_uom if item.purchase_uom else item.stock_uom
else:
args.uom = item.stock_uom
out = frappe._dict({
"item_code": item.name,
"item_name": item.item_name,
"description": cstr(item.description).strip(),
"image": cstr(item.image).strip(),
"warehouse": warehouse,
"income_account": get_default_income_account(args, item_defaults, item_group_defaults),
"expense_account": get_default_expense_account(args, item_defaults, item_group_defaults),
"cost_center": get_default_cost_center(args, item_defaults, item_group_defaults),
'has_serial_no': item.has_serial_no,
'has_batch_no': item.has_batch_no,
"batch_no": None,
"item_tax_rate": json.dumps(dict(([d.tax_type, d.tax_rate] for d in
item.get("taxes")))),
"uom": args.uom,
"min_order_qty": flt(item.min_order_qty) if args.doctype == "Material Request" else "",
"qty": args.qty or 1.0,
"stock_qty": args.qty or 1.0,
"price_list_rate": 0.0,
"base_price_list_rate": 0.0,
"rate": 0.0,
"base_rate": 0.0,
"amount": 0.0,
"base_amount": 0.0,
"net_rate": 0.0,
"net_amount": 0.0,
"discount_percentage": 0.0,
"supplier": get_default_supplier(args, item_defaults, item_group_defaults),
"update_stock": args.get("update_stock") if args.get('doctype') in ['Sales Invoice', 'Purchase Invoice'] else 0,
"delivered_by_supplier": item.delivered_by_supplier if args.get("doctype") in ["Sales Order", "Sales Invoice"] else 0,
"is_fixed_asset": item.is_fixed_asset,
"weight_per_unit":item.weight_per_unit,
"weight_uom":item.weight_uom,
"last_purchase_rate": item.last_purchase_rate if args.get("doctype") in ["Purchase Order"] else 0,
"transaction_date": args.get("transaction_date")
})
if item.get("enable_deferred_revenue") or item.get("enable_deferred_expense"):
out.update(calculate_service_end_date(args, item))
# calculate conversion factor
if item.stock_uom == args.uom:
out.conversion_factor = 1.0
else:
out.conversion_factor = args.conversion_factor or \
get_conversion_factor(item.name, args.uom).get("conversion_factor")
args.conversion_factor = out.conversion_factor
out.stock_qty = out.qty * out.conversion_factor
# calculate last purchase rate
if args.get('doctype') in purchase_doctypes:
from erpnext.buying.doctype.purchase_order.purchase_order import item_last_purchase_rate
out.last_purchase_rate = item_last_purchase_rate(args.name, args.conversion_rate, item.name, out.conversion_factor)
# if default specified in item is for another company, fetch from company
for d in [
["Account", "income_account", "default_income_account"],
["Account", "expense_account", "default_expense_account"],
["Cost Center", "cost_center", "cost_center"],
["Warehouse", "warehouse", ""]]:
if not out[d[1]]:
out[d[1]] = frappe.get_cached_value('Company', args.company, d[2]) if d[2] else None
for fieldname in ("item_name", "item_group", "barcodes", "brand", "stock_uom"):
out[fieldname] = item.get(fieldname)
child_doctype = args.doctype + ' Item'
meta = frappe.get_meta(child_doctype)
if meta.get_field("barcode"):
update_barcode_value(out)
return out
def update_barcode_value(out):
from erpnext.accounts.doctype.sales_invoice.pos import get_barcode_data
barcode_data = get_barcode_data([out])
# If item has one barcode then update the value of the barcode field
if barcode_data and len(barcode_data.get(out.item_code)) == 1:
out['barcode'] = barcode_data.get(out.item_code)[0]
@frappe.whitelist()
def calculate_service_end_date(args, item=None):
args = process_args(args)
if not item:
item = frappe.get_cached_doc("Item", args.item_code)
doctype = args.get("parenttype") or args.get("doctype")
if doctype == "Sales Invoice":
enable_deferred = "enable_deferred_revenue"
no_of_months = "no_of_months"
account = "deferred_revenue_account"
else:
enable_deferred = "enable_deferred_expense"
no_of_months = "no_of_months_exp"
account = "deferred_expense_account"
service_start_date = args.service_start_date if args.service_start_date else args.transaction_date
service_end_date = add_months(service_start_date, item.get(no_of_months))
deferred_detail = {
"service_start_date": service_start_date,
"service_end_date": service_end_date
}
deferred_detail[enable_deferred] = item.get(enable_deferred)
deferred_detail[account] = get_default_deferred_account(args, item, fieldname=account)
return deferred_detail
def get_default_income_account(args, item, item_group):
return (item.get("income_account")
or item_group.get("income_account")
or args.income_account)
def get_default_expense_account(args, item, item_group):
return (item.get("expense_account")
or item_group.get("expense_account")
or args.expense_account)
def get_default_deferred_account(args, item, fieldname=None):
if item.get("enable_deferred_revenue") or item.get("enable_deferred_expense"):
return (item.get(fieldname)
or args.get(fieldname)
or frappe.get_cached_value('Company', args.company, "default_"+fieldname))
else:
return None
def get_default_cost_center(args, item, item_group, company=None):
cost_center = None
if args.get('project'):
cost_center = frappe.db.get_value("Project", args.get("project"), "cost_center", cache=True)
if not cost_center:
if args.get('customer'):
cost_center = item.get('selling_cost_center') or item_group.get('selling_cost_center')
else:
cost_center = item.get('buying_cost_center') or item_group.get('buying_cost_center')
cost_center = cost_center or args.get("cost_center")
if (company and cost_center
and frappe.get_cached_value("Cost Center", cost_center, "company") != company):
return None
return cost_center
def get_default_supplier(args, item, item_group):
return (item.get("default_supplier")
or item_group.get("default_supplier"))
def get_price_list_rate(args, item_doc, out):
meta = frappe.get_meta(args.parenttype or args.doctype)
if meta.get_field("currency") or args.get('currency'):
pl_details = get_price_list_currency_and_exchange_rate(args)
args.update(pl_details)
validate_price_list(args)
if meta.get_field("currency"):
validate_conversion_rate(args, meta)
price_list_rate = get_price_list_rate_for(args, item_doc.name) or 0
# variant
if not price_list_rate and item_doc.variant_of:
price_list_rate = get_price_list_rate_for(args, item_doc.variant_of)
# insert in database
if not price_list_rate:
if args.price_list and args.rate:
insert_item_price(args)
return {}
out.price_list_rate = flt(price_list_rate) * flt(args.plc_conversion_rate) \
/ flt(args.conversion_rate)
if not out.price_list_rate and args.transaction_type=="buying":
from erpnext.stock.doctype.item.item import get_last_purchase_details
out.update(get_last_purchase_details(item_doc.name,
args.name, args.conversion_rate))
def insert_item_price(args):
"""Insert Item Price if Price List and Price List Rate are specified and currency is the same"""
if frappe.db.get_value("Price List", args.price_list, "currency", cache=True) == args.currency \
and cint(frappe.db.get_single_value("Stock Settings", "auto_insert_price_list_rate_if_missing")):
if frappe.has_permission("Item Price", "write"):
price_list_rate = (args.rate / args.get('conversion_factor')
if args.get("conversion_factor") else args.rate)
item_price = frappe.db.get_value('Item Price',
{'item_code': args.item_code, 'price_list': args.price_list, 'currency': args.currency},
['name', 'price_list_rate'], as_dict=1)
if item_price and item_price.name:
if item_price.price_list_rate != price_list_rate:
frappe.db.set_value('Item Price', item_price.name, "price_list_rate", price_list_rate)
frappe.msgprint(_("Item Price updated for {0} in Price List {1}").format(args.item_code,
args.price_list), alert=True)
else:
item_price = frappe.get_doc({
"doctype": "Item Price",
"price_list": args.price_list,
"item_code": args.item_code,
"currency": args.currency,
"price_list_rate": price_list_rate
})
item_price.insert()
frappe.msgprint(_("Item Price added for {0} in Price List {1}").format(args.item_code,
args.price_list), alert=True)
def get_item_price(args, item_code, ignore_party=False):
"""
Get name, price_list_rate from Item Price based on conditions
Check if the desired qty is within the increment of the packing list.
:param args: dict (or frappe._dict) with mandatory fields price_list, uom
optional fields min_qty, transaction_date, customer, supplier
:param item_code: str, Item Doctype field item_code
"""
args['item_code'] = item_code
conditions = """where item_code=%(item_code)s
and price_list=%(price_list)s
and ifnull(uom, '') in ('', %(uom)s)"""
if not ignore_party:
if args.get("customer"):
conditions += " and customer=%(customer)s"
elif args.get("supplier"):
conditions += " and supplier=%(supplier)s"
else:
conditions += " and (customer is null or customer = '') and (supplier is null or supplier = '')"
if args.get('min_qty'):
conditions += " and ifnull(min_qty, 0) <= %(min_qty)s"
if args.get('transaction_date'):
conditions += """ and %(transaction_date)s between
ifnull(valid_from, '2000-01-01') and ifnull(valid_upto, '2500-12-31')"""
return frappe.db.sql(""" select name, price_list_rate, uom
from `tabItem Price` {conditions}
order by uom desc, min_qty desc """.format(conditions=conditions), args)
def get_price_list_rate_for(args, item_code):
"""
Return Price Rate based on min_qty of each Item Price Rate.\
For example, desired qty is 10 and Item Price Rates exists
for min_qty 9 and min_qty 20. It returns Item Price Rate for qty 9 as
the best fit in the range of avaliable min_qtyies
:param customer: link to Customer DocType
:param supplier: link to Supplier DocType
:param price_list: str (Standard Buying or Standard Selling)
:param item_code: str, Item Doctype field item_code
:param qty: Desired Qty
:param transaction_date: Date of the price
"""
item_price_args = {
"item_code": item_code,
"price_list": args.get('price_list'),
"customer": args.get('customer'),
"supplier": args.get('supplier'),
"uom": args.get('uom'),
"min_qty": args.get('qty'),
"transaction_date": args.get('transaction_date'),
}
item_price_data = 0
price_list_rate = get_item_price(item_price_args, item_code)
if price_list_rate:
desired_qty = args.get("qty")
if desired_qty and check_packing_list(price_list_rate[0][0], desired_qty, item_code):
item_price_data = price_list_rate
else:
for field in ["customer", "supplier", "min_qty"]:
del item_price_args[field]
general_price_list_rate = get_item_price(item_price_args, item_code, ignore_party=args.get("ignore_party"))
if not general_price_list_rate and args.get("uom") != args.get("stock_uom"):
item_price_args["uom"] = args.get("stock_uom")
general_price_list_rate = get_item_price(item_price_args, item_code, ignore_party=args.get("ignore_party"))
if general_price_list_rate:
item_price_data = general_price_list_rate
if item_price_data:
if item_price_data[0][2] == args.get("uom"):
return item_price_data[0][1]
elif not args.get('price_list_uom_dependant'):
return flt(item_price_data[0][1] * flt(args.get("conversion_factor", 1)))
else:
return item_price_data[0][1]
def check_packing_list(price_list_rate_name, desired_qty, item_code):
"""
Check if the desired qty is within the increment of the packing list.
:param price_list_rate_name: Name of Item Price
:param desired_qty: Desired Qt
:param item_code: str, Item Doctype field item_code
:param qty: Desired Qt
"""
flag = True
item_price = frappe.get_doc("Item Price", price_list_rate_name)
if item_price.packing_unit:
packing_increment = desired_qty % item_price.packing_unit
if packing_increment != 0:
flag = False
return flag
def validate_price_list(args):
if args.get("price_list"):
if not frappe.db.get_value("Price List",
{"name": args.price_list, args.transaction_type: 1, "enabled": 1}):
throw(_("Price List {0} is disabled or does not exist").format(args.price_list))
elif not args.get("supplier"):
throw(_("Price List not selected"))
def validate_conversion_rate(args, meta):
from erpnext.controllers.accounts_controller import validate_conversion_rate
if (not args.conversion_rate
and args.currency==frappe.get_cached_value('Company', args.company, "default_currency")):
args.conversion_rate = 1.0
# validate currency conversion rate
validate_conversion_rate(args.currency, args.conversion_rate,
meta.get_label("conversion_rate"), args.company)
args.conversion_rate = flt(args.conversion_rate,
get_field_precision(meta.get_field("conversion_rate"),
frappe._dict({"fields": args})))
if args.price_list:
if (not args.plc_conversion_rate
and args.price_list_currency==frappe.db.get_value("Price List", args.price_list, "currency", cache=True)):
args.plc_conversion_rate = 1.0
# validate price list currency conversion rate
if not args.get("price_list_currency"):
throw(_("Price List Currency not selected"))
else:
validate_conversion_rate(args.price_list_currency, args.plc_conversion_rate,
meta.get_label("plc_conversion_rate"), args.company)
if meta.get_field("plc_conversion_rate"):
args.plc_conversion_rate = flt(args.plc_conversion_rate,
get_field_precision(meta.get_field("plc_conversion_rate"),
frappe._dict({"fields": args})))
def get_party_item_code(args, item_doc, out):
if args.transaction_type=="selling" and args.customer:
out.customer_item_code = None
if args.quotation_to and args.quotation_to != 'Customer':
return
customer_item_code = item_doc.get("customer_items", {"customer_name": args.customer})
if customer_item_code:
out.customer_item_code = customer_item_code[0].ref_code
else:
customer_group = frappe.get_cached_value("Customer", args.customer, "customer_group")
customer_group_item_code = item_doc.get("customer_items", {"customer_group": customer_group})
if customer_group_item_code and not customer_group_item_code[0].customer_name:
out.customer_item_code = customer_group_item_code[0].ref_code
if args.transaction_type=="buying" and args.supplier:
item_supplier = item_doc.get("supplier_items", {"supplier": args.supplier})
out.supplier_part_no = item_supplier[0].supplier_part_no if item_supplier else None
def get_pos_profile_item_details(company, args, pos_profile=None, update_data=False):
res = frappe._dict()
if not frappe.flags.pos_profile and not pos_profile:
pos_profile = frappe.flags.pos_profile = get_pos_profile(company, args.get('pos_profile'))
if pos_profile:
for fieldname in ("income_account", "cost_center", "warehouse", "expense_account"):
if (not args.get(fieldname) or update_data) and pos_profile.get(fieldname):
res[fieldname] = pos_profile.get(fieldname)
if res.get("warehouse"):
res.actual_qty = get_bin_details(args.item_code,
res.warehouse).get("actual_qty")
return res
@frappe.whitelist()
def get_pos_profile(company, pos_profile=None, user=None):
if pos_profile: return frappe.get_cached_doc('POS Profile', pos_profile)
if not user:
user = frappe.session['user']
condition = "pfu.user = %(user)s AND pfu.default=1"
if user and company:
condition = "pfu.user = %(user)s AND pf.company = %(company)s AND pfu.default=1"
pos_profile = frappe.db.sql("""SELECT pf.*
FROM
`tabPOS Profile` pf LEFT JOIN `tabPOS Profile User` pfu
ON
pf.name = pfu.parent
WHERE
{cond} AND pf.disabled = 0
""".format(cond = condition), {
'user': user,
'company': company
}, as_dict=1)
if not pos_profile and company:
pos_profile = frappe.db.sql("""SELECT pf.*
FROM
`tabPOS Profile` pf LEFT JOIN `tabPOS Profile User` pfu
ON
pf.name = pfu.parent
WHERE
pf.company = %(company)s AND pf.disabled = 0
""", {
'company': company
}, as_dict=1)
return pos_profile and pos_profile[0] or None
def get_serial_nos_by_fifo(args, sales_order=None):
if frappe.db.get_single_value("Stock Settings", "automatically_set_serial_nos_based_on_fifo"):
return "\n".join(frappe.db.sql_list("""select name from `tabSerial No`
where item_code=%(item_code)s and warehouse=%(warehouse)s and
sales_order=IF(%(sales_order)s IS NULL, sales_order, %(sales_order)s)
order by timestamp(purchase_date, purchase_time)
asc limit %(qty)s""",
{
"item_code": args.item_code,
"warehouse": args.warehouse,
"qty": abs(cint(args.stock_qty)),
"sales_order": sales_order
}))
def get_serial_no_batchwise(args, sales_order=None):
if frappe.db.get_single_value("Stock Settings", "automatically_set_serial_nos_based_on_fifo"):
return "\n".join(frappe.db.sql_list("""select name from `tabSerial No`
where item_code=%(item_code)s and warehouse=%(warehouse)s and
sales_order=IF(%(sales_order)s IS NULL, sales_order, %(sales_order)s)
and batch_no=IF(%(batch_no)s IS NULL, batch_no, %(batch_no)s) order
by timestamp(purchase_date, purchase_time) asc limit %(qty)s""", {
"item_code": args.item_code,
"warehouse": args.warehouse,
"batch_no": args.batch_no,
"qty": abs(cint(args.stock_qty)),
"sales_order": sales_order
}))
@frappe.whitelist()
def get_conversion_factor(item_code, uom):
variant_of = frappe.db.get_value("Item", item_code, "variant_of", cache=True)
filters = {"parent": item_code, "uom": uom}
if variant_of:
filters["parent"] = ("in", (item_code, variant_of))
conversion_factor = frappe.db.get_value("UOM Conversion Detail",
filters, "conversion_factor")
if not conversion_factor:
stock_uom = frappe.db.get_value("Item", item_code, "stock_uom")
conversion_factor = get_uom_conv_factor(uom, stock_uom)
return {"conversion_factor": conversion_factor or 1.0}
@frappe.whitelist()
def get_projected_qty(item_code, warehouse):
return {"projected_qty": frappe.db.get_value("Bin",
{"item_code": item_code, "warehouse": warehouse}, "projected_qty")}
@frappe.whitelist()
def get_bin_details(item_code, warehouse):
return frappe.db.get_value("Bin", {"item_code": item_code, "warehouse": warehouse},
["projected_qty", "actual_qty", "reserved_qty"], as_dict=True, cache=True) \
or {"projected_qty": 0, "actual_qty": 0, "reserved_qty": 0}
@frappe.whitelist()
def get_serial_no_details(item_code, warehouse, stock_qty, serial_no):
args = frappe._dict({"item_code":item_code, "warehouse":warehouse, "stock_qty":stock_qty, "serial_no":serial_no})
serial_no = get_serial_no(args)
return {'serial_no': serial_no}
@frappe.whitelist()
def get_bin_details_and_serial_nos(item_code, warehouse, has_batch_no, stock_qty=None, serial_no=None):
bin_details_and_serial_nos = {}
bin_details_and_serial_nos.update(get_bin_details(item_code, warehouse))
if flt(stock_qty) > 0:
if has_batch_no:
args = frappe._dict({"item_code":item_code, "warehouse":warehouse, "stock_qty":stock_qty})
serial_no = get_serial_no(args)
bin_details_and_serial_nos.update({'serial_no': serial_no})
return bin_details_and_serial_nos
bin_details_and_serial_nos.update(get_serial_no_details(item_code, warehouse, stock_qty, serial_no))
return bin_details_and_serial_nos
@frappe.whitelist()
def get_batch_qty_and_serial_no(batch_no, stock_qty, warehouse, item_code, has_serial_no):
batch_qty_and_serial_no = {}
batch_qty_and_serial_no.update(get_batch_qty(batch_no, warehouse, item_code))
if (flt(batch_qty_and_serial_no.get('actual_batch_qty')) >= flt(stock_qty)) and has_serial_no:
args = frappe._dict({"item_code":item_code, "warehouse":warehouse, "stock_qty":stock_qty, "batch_no":batch_no})<|fim▁hole|> batch_qty_and_serial_no.update({'serial_no': serial_no})
return batch_qty_and_serial_no
@frappe.whitelist()
def get_batch_qty(batch_no, warehouse, item_code):
from erpnext.stock.doctype.batch import batch
if batch_no:
return {'actual_batch_qty': batch.get_batch_qty(batch_no, warehouse)}
@frappe.whitelist()
def apply_price_list(args, as_doc=False):
"""Apply pricelist on a document-like dict object and return as
{'parent': dict, 'children': list}
:param args: See below
:param as_doc: Updates value in the passed dict
args = {
"doctype": "",
"name": "",
"items": [{"doctype": "", "name": "", "item_code": "", "brand": "", "item_group": ""}, ...],
"conversion_rate": 1.0,
"selling_price_list": None,
"price_list_currency": None,
"price_list_uom_dependant": None,
"plc_conversion_rate": 1.0,
"doctype": "",
"name": "",
"supplier": None,
"transaction_date": None,
"conversion_rate": 1.0,
"buying_price_list": None,
"ignore_pricing_rule": 0/1
}
"""
args = process_args(args)
parent = get_price_list_currency_and_exchange_rate(args)
children = []
if "items" in args:
item_list = args.get("items")
args.update(parent)
for item in item_list:
args_copy = frappe._dict(args.copy())
args_copy.update(item)
item_details = apply_price_list_on_item(args_copy)
children.append(item_details)
if as_doc:
args.price_list_currency = parent.price_list_currency,
args.plc_conversion_rate = parent.plc_conversion_rate
if args.get('items'):
for i, item in enumerate(args.get('items')):
for fieldname in children[i]:
# if the field exists in the original doc
# update the value
if fieldname in item and fieldname not in ("name", "doctype"):
item[fieldname] = children[i][fieldname]
return args
else:
return {
"parent": parent,
"children": children
}
def apply_price_list_on_item(args):
item_details = frappe._dict()
item_doc = frappe.get_doc("Item", args.item_code)
get_price_list_rate(args, item_doc, item_details)
item_details.update(get_pricing_rule_for_item(args))
return item_details
def get_price_list_currency(price_list):
if price_list:
result = frappe.db.get_value("Price List", {"name": price_list,
"enabled": 1}, ["name", "currency"], as_dict=True)
if not result:
throw(_("Price List {0} is disabled or does not exist").format(price_list))
return result.currency
def get_price_list_uom_dependant(price_list):
if price_list:
result = frappe.db.get_value("Price List", {"name": price_list,
"enabled": 1}, ["name", "price_not_uom_dependent"], as_dict=True)
if not result:
throw(_("Price List {0} is disabled or does not exist").format(price_list))
return not result.price_not_uom_dependent
def get_price_list_currency_and_exchange_rate(args):
if not args.price_list:
return {}
if args.doctype in ['Quotation', 'Sales Order', 'Delivery Note', 'Sales Invoice']:
args.update({"exchange_rate": "for_selling"})
elif args.doctype in ['Purchase Order', 'Purchase Receipt', 'Purchase Invoice']:
args.update({"exchange_rate": "for_buying"})
price_list_currency = get_price_list_currency(args.price_list)
price_list_uom_dependant = get_price_list_uom_dependant(args.price_list)
plc_conversion_rate = args.plc_conversion_rate
company_currency = get_company_currency(args.company)
if (not plc_conversion_rate) or (price_list_currency and args.price_list_currency \
and price_list_currency != args.price_list_currency):
# cksgb 19/09/2016: added args.transaction_date as posting_date argument for get_exchange_rate
plc_conversion_rate = get_exchange_rate(price_list_currency, company_currency,
args.transaction_date, args.exchange_rate) or plc_conversion_rate
return frappe._dict({
"price_list_currency": price_list_currency,
"price_list_uom_dependant": price_list_uom_dependant,
"plc_conversion_rate": plc_conversion_rate
})
@frappe.whitelist()
def get_default_bom(item_code=None):
if item_code:
bom = frappe.db.get_value("BOM", {"docstatus": 1, "is_default": 1, "is_active": 1, "item": item_code})
if bom:
return bom
def get_valuation_rate(item_code, company, warehouse=None):
item = get_item_defaults(item_code, company)
item_group = get_item_group_defaults(item_code, company)
# item = frappe.get_doc("Item", item_code)
if item.get("is_stock_item"):
if not warehouse:
warehouse = item.get("default_warehouse") or item_group.get("default_warehouse")
return frappe.db.get_value("Bin", {"item_code": item_code, "warehouse": warehouse},
["valuation_rate"], as_dict=True) or {"valuation_rate": 0}
elif not item.get("is_stock_item"):
valuation_rate =frappe.db.sql("""select sum(base_net_amount) / sum(qty*conversion_factor)
from `tabPurchase Invoice Item`
where item_code = %s and docstatus=1""", item_code)
if valuation_rate:
return {"valuation_rate": valuation_rate[0][0] or 0.0}
else:
return {"valuation_rate": 0.0}
def get_gross_profit(out):
if out.valuation_rate:
out.update({
"gross_profit": ((out.base_rate - out.valuation_rate) * out.stock_qty)
})
return out
@frappe.whitelist()
def get_serial_no(args, serial_nos=None, sales_order=None):
serial_no = None
if isinstance(args, string_types):
args = json.loads(args)
args = frappe._dict(args)
if args.get('doctype') == 'Sales Invoice' and not args.get('update_stock'):
return ""
if args.get('warehouse') and args.get('stock_qty') and args.get('item_code'):
has_serial_no = frappe.get_value('Item', {'item_code': args.item_code}, "has_serial_no")
if args.get('batch_no') and has_serial_no == 1:
return get_serial_no_batchwise(args, sales_order)
elif has_serial_no == 1:
args = json.dumps({"item_code": args.get('item_code'),"warehouse": args.get('warehouse'),"stock_qty": args.get('stock_qty')})
args = process_args(args)
serial_no = get_serial_nos_by_fifo(args, sales_order)
if not serial_no and serial_nos:
# For POS
serial_no = serial_nos
return serial_no
def update_party_blanket_order(args, out):
blanket_order_details = get_blanket_order_details(args)
if blanket_order_details:
out.update(blanket_order_details)
@frappe.whitelist()
def get_blanket_order_details(args):
if isinstance(args, string_types):
args = frappe._dict(json.loads(args))
blanket_order_details = None
condition = ''
if args.item_code:
if args.customer and args.doctype == "Sales Order":
condition = ' and bo.customer=%(customer)s'
elif args.supplier and args.doctype == "Purchase Order":
condition = ' and bo.supplier=%(supplier)s'
if args.blanket_order:
condition += ' and bo.name =%(blanket_order)s'
if args.transaction_date:
condition += ' and bo.to_date>=%(transaction_date)s'
blanket_order_details = frappe.db.sql('''
select boi.rate as blanket_order_rate, bo.name as blanket_order
from `tabBlanket Order` bo, `tabBlanket Order Item` boi
where bo.company=%(company)s and boi.item_code=%(item_code)s
and bo.docstatus=1 and bo.name = boi.parent {0}
'''.format(condition), args, as_dict=True)
blanket_order_details = blanket_order_details[0] if blanket_order_details else ''
return blanket_order_details
def get_so_reservation_for_item(args):
reserved_so = None
if args.get('against_sales_order'):
if get_reserved_qty_for_so(args.get('against_sales_order'), args.get('item_code')):
reserved_so = args.get('against_sales_order')
elif args.get('against_sales_invoice'):
sales_order = frappe.db.sql("""select sales_order from `tabSales Invoice Item` where
parent=%s and item_code=%s""", (args.get('against_sales_invoice'), args.get('item_code')))
if sales_order and sales_order[0]:
if get_reserved_qty_for_so(sales_order[0][0], args.get('item_code')):
reserved_so = sales_order[0]
elif args.get("sales_order"):
if get_reserved_qty_for_so(args.get('sales_order'), args.get('item_code')):
reserved_so = args.get('sales_order')
return reserved_so
def get_reserved_qty_for_so(sales_order, item_code):
reserved_qty = frappe.db.sql("""select sum(qty) from `tabSales Order Item`
where parent=%s and item_code=%s and ensure_delivery_based_on_produced_serial_no=1
""", (sales_order, item_code))
if reserved_qty and reserved_qty[0][0]:
return reserved_qty[0][0]
else:
return 0<|fim▁end|>
|
serial_no = get_serial_no(args)
|
<|file_name|>monitor_correction_test.py<|end_file_name|><|fim▁begin|># Copyright 2014 Diamond Light Source Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
.. module:: monitor_correction_test
:platform: Unix
:synopsis: tests the monitor correction
.. moduleauthor:: Aaron Parsons <[email protected]>
"""
import unittest
from savu.test import test_utils as tu
from savu.test.travis.framework_tests.plugin_runner_test import \
run_protected_plugin_runner
class MonitorCorrectionTest(unittest.TestCase):
def test_monitor_correction(self):
data_file = tu.get_test_data_path('mm.nxs')
process_file = tu.get_test_process_path('monitor_correction_test.nxs')
run_protected_plugin_runner(tu.set_options(data_file,
process_file=process_file))
if __name__ == "__main__":<|fim▁hole|><|fim▁end|>
|
unittest.main()
|
<|file_name|>exporter_spec.js<|end_file_name|><|fim▁begin|>/*globals describe, afterEach, beforeEach, it*/
var should = require('should'),
sinon = require('sinon'),
Promise = require('bluebird'),
// Stuff we're testing
db = require('../../server/data/db'),
errors = require('../../server/errors'),
exporter = require('../../server/data/export'),
schema = require('../../server/data/schema'),
settings = require('../../server/api/settings'),
schemaTables = Object.keys(schema.tables),
sandbox = sinon.sandbox.create();
require('should-sinon');
describe('Exporter', function () {
var versionStub, tablesStub, queryMock, knexMock, knexStub;
afterEach(function () {
sandbox.restore();
knexStub.restore();
});
describe('doExport', function () {
beforeEach(function () {
versionStub = sandbox.stub(schema.versioning, 'getDatabaseVersion').returns(new Promise.resolve('004'));
tablesStub = sandbox.stub(schema.commands, 'getTables').returns(schemaTables);
queryMock = {
select: sandbox.stub()
};
knexMock = sandbox.stub().returns(queryMock);
// this MUST use sinon, not sandbox, see sinonjs/sinon#781
knexStub = sinon.stub(db, 'knex', {get: function () { return knexMock; }});
});
it('should try to export all the correct tables', function (done) {
// Setup for success
queryMock.select.returns(new Promise.resolve({}));
// Execute
exporter.doExport().then(function (exportData) {
// No tables, less the number of excluded tables
var expectedCallCount = schemaTables.length - 4;
should.exist(exportData);
versionStub.should.be.calledOnce();
tablesStub.should.be.calledOnce();
knexStub.get.should.be.called();
knexMock.should.be.called();
queryMock.select.should.be.called();
knexMock.should.have.callCount(expectedCallCount);
queryMock.select.should.have.callCount(expectedCallCount);
<|fim▁hole|> knexMock.getCall(4).should.be.calledWith('permissions');
knexMock.getCall(5).should.be.calledWith('permissions_users');
knexMock.getCall(6).should.be.calledWith('permissions_roles');
knexMock.getCall(7).should.be.calledWith('permissions_apps');
knexMock.getCall(8).should.be.calledWith('settings');
knexMock.getCall(9).should.be.calledWith('tags');
knexMock.getCall(10).should.be.calledWith('posts_tags');
knexMock.getCall(11).should.be.calledWith('apps');
knexMock.getCall(12).should.be.calledWith('app_settings');
knexMock.getCall(13).should.be.calledWith('app_fields');
knexMock.should.not.be.calledWith('clients');
knexMock.should.not.be.calledWith('client_trusted_domains');
knexMock.should.not.be.calledWith('refreshtokens');
knexMock.should.not.be.calledWith('accesstokens');
done();
}).catch(done);
});
it('should catch and log any errors', function (done) {
// Setup for failure
var errorStub = sandbox.stub(errors, 'logAndThrowError');
queryMock.select.returns(new Promise.reject({}));
// Execute
exporter.doExport().then(function (exportData) {
should.not.exist(exportData);
errorStub.should.be.calledOnce();
done();
}).catch(done);
});
});
describe('exportFileName', function () {
it('should return a correctly structured filename', function (done) {
var settingsStub = sandbox.stub(settings, 'read').returns(
new Promise.resolve({settings: [{value: 'testblog'}]})
);
exporter.fileName().then(function (result) {
should.exist(result);
settingsStub.should.be.calledOnce();
result.should.match(/^testblog\.ghost\.[0-9]{4}-[0-9]{2}-[0-9]{2}\.json$/);
done();
}).catch(done);
});
it('should return a correctly structured filename if settings is empty', function (done) {
var settingsStub = sandbox.stub(settings, 'read').returns(
new Promise.resolve()
);
exporter.fileName().then(function (result) {
should.exist(result);
settingsStub.should.be.calledOnce();
result.should.match(/^ghost\.[0-9]{4}-[0-9]{2}-[0-9]{2}\.json$/);
done();
}).catch(done);
});
it('should return a correctly structured filename if settings errors', function (done) {
var settingsStub = sandbox.stub(settings, 'read').returns(
new Promise.reject()
);
exporter.fileName().then(function (result) {
should.exist(result);
settingsStub.should.be.calledOnce();
result.should.match(/^ghost\.[0-9]{4}-[0-9]{2}-[0-9]{2}\.json$/);
done();
}).catch(done);
});
});
});<|fim▁end|>
|
knexMock.getCall(0).should.be.calledWith('posts');
knexMock.getCall(1).should.be.calledWith('users');
knexMock.getCall(2).should.be.calledWith('roles');
knexMock.getCall(3).should.be.calledWith('roles_users');
|
<|file_name|>balances.go<|end_file_name|><|fim▁begin|>/*
Real-time Online/Offline Charging System (OCS) for Telecom & ISP environments
Copyright (C) ITsysCOM GmbH
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT MetaAny WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>
*/
package engine
import (
"errors"
"fmt"
"sort"
"strings"
"time"
"github.com/cgrates/cgrates/config"
"github.com/cgrates/cgrates/utils"
)
// Can hold different units as seconds or monetary
type Balance struct {
Uuid string //system wide unique
ID string // account wide unique
Value float64
ExpirationDate time.Time
Weight float64
DestinationIDs utils.StringMap
RatingSubject string
Categories utils.StringMap
SharedGroups utils.StringMap
Timings []*RITiming
TimingIDs utils.StringMap
Disabled bool
Factor ValueFactor
Blocker bool
precision int
account *Account // used to store ub reference for shared balances
dirty bool
}
func (b *Balance) Equal(o *Balance) bool {
if len(b.DestinationIDs) == 0 {
b.DestinationIDs = utils.StringMap{utils.MetaAny: true}
}
if len(o.DestinationIDs) == 0 {
o.DestinationIDs = utils.StringMap{utils.MetaAny: true}
}
return b.Uuid == o.Uuid &&
b.ID == o.ID &&
b.ExpirationDate.Equal(o.ExpirationDate) &&
b.Weight == o.Weight &&
b.DestinationIDs.Equal(o.DestinationIDs) &&
b.RatingSubject == o.RatingSubject &&
b.Categories.Equal(o.Categories) &&
b.SharedGroups.Equal(o.SharedGroups) &&
b.Disabled == o.Disabled &&
b.Blocker == o.Blocker
}
func (b *Balance) MatchFilter(o *BalanceFilter, skipIds, skipExpiry bool) bool {
if o == nil {
return true
}
if !skipIds && o.Uuid != nil && *o.Uuid != "" {
return b.Uuid == *o.Uuid
}
if !skipIds && o.ID != nil && *o.ID != "" {
return b.ID == *o.ID
}
if !skipExpiry {
if o.ExpirationDate != nil && !b.ExpirationDate.Equal(*o.ExpirationDate) {
return false
}
}
return (o.Weight == nil || b.Weight == *o.Weight) &&
(o.Blocker == nil || b.Blocker == *o.Blocker) &&
(o.Disabled == nil || b.Disabled == *o.Disabled) &&
(o.DestinationIDs == nil || b.DestinationIDs.Includes(*o.DestinationIDs)) &&
(o.Categories == nil || b.Categories.Includes(*o.Categories)) &&
(o.TimingIDs == nil || b.TimingIDs.Includes(*o.TimingIDs)) &&
(o.SharedGroups == nil || b.SharedGroups.Includes(*o.SharedGroups)) &&
(o.RatingSubject == nil || b.RatingSubject == *o.RatingSubject)
}
func (b *Balance) HardMatchFilter(o *BalanceFilter, skipIds bool) bool {
if o == nil {
return true
}
if !skipIds && o.Uuid != nil && *o.Uuid != "" {
return b.Uuid == *o.Uuid
}
if !skipIds && o.ID != nil && *o.ID != "" {
return b.ID == *o.ID
}
return (o.ExpirationDate == nil || b.ExpirationDate.Equal(*o.ExpirationDate)) &&
(o.Weight == nil || b.Weight == *o.Weight) &&
(o.Blocker == nil || b.Blocker == *o.Blocker) &&
(o.Disabled == nil || b.Disabled == *o.Disabled) &&
(o.DestinationIDs == nil || b.DestinationIDs.Equal(*o.DestinationIDs)) &&
(o.Categories == nil || b.Categories.Equal(*o.Categories)) &&
(o.TimingIDs == nil || b.TimingIDs.Equal(*o.TimingIDs)) &&
(o.SharedGroups == nil || b.SharedGroups.Equal(*o.SharedGroups)) &&
(o.RatingSubject == nil || b.RatingSubject == *o.RatingSubject)
}
// the default balance has standard Id
func (b *Balance) IsDefault() bool {
return b.ID == utils.MetaDefault
}
// IsExpiredAt check if ExpirationDate is before time t
func (b *Balance) IsExpiredAt(t time.Time) bool {
return !b.ExpirationDate.IsZero() && b.ExpirationDate.Before(t)
}
func (b *Balance) IsActive() bool {
return b.IsActiveAt(time.Now())
}
func (b *Balance) IsActiveAt(t time.Time) bool {
if b.Disabled {
return false
}
if len(b.Timings) == 0 {
return true
}
for _, tim := range b.Timings {
if tim.IsActiveAt(t) {
return true
}
}
return false
}
func (b *Balance) MatchCategory(category string) bool {
return len(b.Categories) == 0 || b.Categories[category] == true
}
func (b *Balance) HasDestination() bool {
return len(b.DestinationIDs) > 0 && b.DestinationIDs[utils.MetaAny] == false
}
func (b *Balance) MatchDestination(destinationID string) bool {
return !b.HasDestination() || b.DestinationIDs[destinationID] == true
}
func (b *Balance) MatchActionTrigger(at *ActionTrigger) bool {
return b.HardMatchFilter(at.Balance, false)
}
func (b *Balance) Clone() *Balance {
if b == nil {
return nil
}
n := &Balance{
Uuid: b.Uuid,
ID: b.ID,
Value: b.Value, // this value is in seconds
ExpirationDate: b.ExpirationDate,
Weight: b.Weight,
RatingSubject: b.RatingSubject,
Categories: b.Categories,
SharedGroups: b.SharedGroups,
TimingIDs: b.TimingIDs,
Timings: b.Timings, // should not be a problem with aliasing
Blocker: b.Blocker,
Disabled: b.Disabled,
dirty: b.dirty,
}
if b.DestinationIDs != nil {
n.DestinationIDs = b.DestinationIDs.Clone()
}
return n
}
func (b *Balance) getMatchingPrefixAndDestID(dest string) (prefix, destID string) {
if len(b.DestinationIDs) != 0 && b.DestinationIDs[utils.MetaAny] == false {
for _, p := range utils.SplitPrefix(dest, MIN_PREFIX_MATCH) {
if destIDs, err := dm.GetReverseDestination(p, true, true, utils.NonTransactional); err == nil {
for _, dID := range destIDs {
if b.DestinationIDs[dID] == true {
return p, dID
}
}
}
}
}
return
}
// Returns the available number of seconds for a specified credit
func (b *Balance) GetMinutesForCredit(origCD *CallDescriptor, initialCredit float64) (duration time.Duration, credit float64) {
cd := origCD.Clone()
availableDuration := time.Duration(b.GetValue()) * time.Second
duration = availableDuration
credit = initialCredit
cc, err := b.GetCost(cd, false)
if err != nil {
utils.Logger.Err(fmt.Sprintf("Error getting new cost for balance subject: %v", err))
return 0, credit
}
if cc.deductConnectFee {
connectFee := cc.GetConnectFee()
if connectFee <= credit {
credit -= connectFee
// remove connect fee from the total cost
cc.Cost -= connectFee
} else {
return 0, credit
}
}
if cc.Cost > 0 {
duration = 0
for _, ts := range cc.Timespans {
ts.createIncrementsSlice()
if cd.MaxRate > 0 && cd.MaxRateUnit > 0 {
rate, _, rateUnit := ts.RateInterval.GetRateParameters(ts.GetGroupStart())
if rate/float64(rateUnit.Nanoseconds()) > cd.MaxRate/float64(cd.MaxRateUnit.Nanoseconds()) {
return
}
}
for _, incr := range ts.Increments {
if incr.Cost <= credit && availableDuration-incr.Duration >= 0 {
credit -= incr.Cost
duration += incr.Duration
availableDuration -= incr.Duration
} else {
return
}
}
}
}
return
}
// Gets the cost using balance RatingSubject if present otherwize
// retuns a callcost obtained using standard rating
func (b *Balance) GetCost(cd *CallDescriptor, getStandardIfEmpty bool) (*CallCost, error) {
// testing only
if cd.testCallcost != nil {
return cd.testCallcost, nil
}
if b.RatingSubject != "" && !strings.HasPrefix(b.RatingSubject, utils.MetaRatingSubjectPrefix) {
origSubject := cd.Subject
cd.Subject = b.RatingSubject
origAccount := cd.Account
cd.Account = cd.Subject
cd.RatingInfos = nil
cc, err := cd.getCost()
// restor orig values
cd.Subject = origSubject
cd.Account = origAccount
return cc, err
}
if getStandardIfEmpty {
cd.RatingInfos = nil
return cd.getCost()
} else {
cc := cd.CreateCallCost()
cc.Cost = 0
return cc, nil
}
}
func (b *Balance) GetValue() float64 {
return b.Value
}
func (b *Balance) AddValue(amount float64) {
b.SetValue(b.GetValue() + amount)
}
func (b *Balance) SubstractValue(amount float64) {
b.SetValue(b.GetValue() - amount)
}
func (b *Balance) SetValue(amount float64) {
b.Value = amount
b.Value = utils.Round(b.GetValue(), globalRoundingDecimals, utils.MetaRoundingMiddle)
b.dirty = true
}
func (b *Balance) SetDirty() {
b.dirty = true
}
// debitUnits will debit units for call descriptor.
// returns the amount debited within cc
func (b *Balance) debitUnits(cd *CallDescriptor, ub *Account, moneyBalances Balances, count bool, dryRun, debitConnectFee bool) (cc *CallCost, err error) {
if !b.IsActiveAt(cd.TimeStart) || b.GetValue() <= 0 {
return
}
if duration, err := utils.ParseZeroRatingSubject(cd.ToR, b.RatingSubject, config.CgrConfig().RalsCfg().BalanceRatingSubject); err == nil {
// we have *zero based units
cc = cd.CreateCallCost()
cc.Timespans = append(cc.Timespans, &TimeSpan{
TimeStart: cd.TimeStart,
TimeEnd: cd.TimeEnd,
})
ts := cc.Timespans[0]
ts.RoundToDuration(duration)
ts.RateInterval = &RateInterval{
Rating: &RIRate{
Rates: RateGroups{
&RGRate{
GroupIntervalStart: 0,
Value: 0,
RateIncrement: duration,
RateUnit: duration,
},
},
},
}
prefix, destid := b.getMatchingPrefixAndDestID(cd.Destination)
if prefix == "" {
prefix = cd.Destination
}
if destid == "" {
destid = utils.MetaAny
}
ts.setRatingInfo(&RatingInfo{
MatchedSubject: b.Uuid,
MatchedPrefix: prefix,
MatchedDestId: destid,
RatingPlanId: utils.MetaNone,
})
ts.createIncrementsSlice()
//log.Printf("CC: %+v", ts)
for incIndex, inc := range ts.Increments {
//log.Printf("INCREMENET: %+v", inc)
amount := float64(inc.Duration.Nanoseconds())
if b.Factor != nil {
amount = utils.Round(amount/b.Factor.GetValue(cd.ToR),
globalRoundingDecimals, utils.MetaRoundingUp)
}
if b.GetValue() >= amount {
b.SubstractValue(amount)
inc.BalanceInfo.Unit = &UnitInfo{
UUID: b.Uuid,
ID: b.ID,
Value: b.Value,
DestinationID: cc.Destination,
Consumed: amount,
ToR: cc.ToR,
RateInterval: nil,
}
inc.BalanceInfo.AccountID = ub.ID
inc.Cost = 0
inc.paid = true
if count {
ub.countUnits(amount, cc.ToR, cc, b)
}
} else {
inc.paid = false
// delete the rest of the unpiad increments/timespans
if incIndex == 0 {
// cut the entire current timespan
cc.Timespans = nil
} else {
ts.SplitByIncrement(incIndex)
}
if len(cc.Timespans) == 0 {
cc = nil
}
return cc, nil
}
}
} else {
// get the cost from balance
//log.Printf("::::::: %+v", cd)
var debitedConnectFeeBalance Balance
var ok bool
cc, err = b.GetCost(cd, true)
if err != nil {
return nil, err
}
if debitConnectFee {
// this is the first add, debit the connect fee
if ok, debitedConnectFeeBalance = ub.DebitConnectionFee(cc, moneyBalances, count, true); !ok {
// found blocker balance
return nil, nil
}
}
cc.Timespans.Decompress()
//log.Printf("CC: %+v", cc)
for tsIndex, ts := range cc.Timespans {
if ts.Increments == nil {
ts.createIncrementsSlice()
}
if ts.RateInterval == nil {
utils.Logger.Err(fmt.Sprintf("Nil RateInterval ERROR on TS: %+v, CC: %+v, from CD: %+v", ts, cc, cd))
return nil, errors.New("timespan with no rate interval assigned")
}
if tsIndex == 0 && ts.RateInterval.Rating.ConnectFee > 0 && debitConnectFee && cc.deductConnectFee && ok {
inc := &Increment{
Duration: 0,
Cost: ts.RateInterval.Rating.ConnectFee,
BalanceInfo: &DebitInfo{
Monetary: &MonetaryInfo{
UUID: debitedConnectFeeBalance.Uuid,
ID: debitedConnectFeeBalance.ID,
Value: debitedConnectFeeBalance.Value,
},
AccountID: ub.ID,
},
}
incs := []*Increment{inc}
ts.Increments = append(incs, ts.Increments...)
}
maxCost, strategy := ts.RateInterval.GetMaxCost()
for incIndex, inc := range ts.Increments {
if tsIndex == 0 && incIndex == 0 && ts.RateInterval.Rating.ConnectFee > 0 && debitConnectFee && cc.deductConnectFee && ok {
// go to nextincrement
continue
}
// debit minutes and money
amount := float64(inc.Duration.Nanoseconds())
if b.Factor != nil {
amount = utils.Round(amount/b.Factor.GetValue(cd.ToR), globalRoundingDecimals, utils.MetaRoundingUp)
}
cost := inc.Cost
inc.paid = false
if strategy == utils.MetaMaxCostDisconnect && cd.MaxCostSoFar >= maxCost {
// cut the entire current timespan
cc.maxCostDisconect = true
if dryRun {
if incIndex == 0 {
// cut the entire current timespan
cc.Timespans = cc.Timespans[:tsIndex]
} else {
ts.SplitByIncrement(incIndex)
cc.Timespans = cc.Timespans[:tsIndex+1]
}
return cc, nil
}
}
if strategy == utils.MetaMaxCostFree && cd.MaxCostSoFar >= maxCost {
cost, inc.Cost = 0.0, 0.0
inc.BalanceInfo.Monetary = &MonetaryInfo{
UUID: b.Uuid,
ID: b.ID,
Value: b.Value,
RateInterval: ts.RateInterval,
}
inc.BalanceInfo.AccountID = ub.ID
inc.paid = true
if count {
ub.countUnits(cost, utils.MetaMonetary, cc, b)
}
// go to nextincrement
continue
}
var moneyBal *Balance
for _, mb := range moneyBalances {
if mb.GetValue() >= cost {
moneyBal = mb
break
}
}
if cost != 0 && moneyBal == nil && (!dryRun || ub.AllowNegative) { // Fix for issue #685
utils.Logger.Warning(fmt.Sprintf("<RALs> Going negative on account %s with AllowNegative: false", cd.GetAccountKey()))
moneyBal = ub.GetDefaultMoneyBalance()
}
if b.GetValue() >= amount && (moneyBal != nil || cost == 0) {
b.SubstractValue(amount)
inc.BalanceInfo.Unit = &UnitInfo{
UUID: b.Uuid,
ID: b.ID,
Value: b.Value,
DestinationID: cc.Destination,
Consumed: amount,
ToR: cc.ToR,
RateInterval: ts.RateInterval,
}
inc.BalanceInfo.AccountID = ub.ID
if cost != 0 {
moneyBal.SubstractValue(cost)
inc.BalanceInfo.Monetary = &MonetaryInfo{
UUID: moneyBal.Uuid,
ID: moneyBal.ID,
Value: moneyBal.Value,
}
cd.MaxCostSoFar += cost
}
inc.paid = true
if count {
ub.countUnits(amount, cc.ToR, cc, b)
if cost != 0 {
ub.countUnits(cost, utils.MetaMonetary, cc, moneyBal)
}
}
} else {
inc.paid = false
// delete the rest of the unpaid increments/timespans
if incIndex == 0 {
// cut the entire current timespan
cc.Timespans = cc.Timespans[:tsIndex]
} else {
ts.SplitByIncrement(incIndex)
cc.Timespans = cc.Timespans[:tsIndex+1]
}
if len(cc.Timespans) == 0 {
cc = nil
}
return cc, nil
}
}
}
}
return
}
func (b *Balance) debitMoney(cd *CallDescriptor, ub *Account, moneyBalances Balances, count bool, dryRun, debitConnectFee bool) (cc *CallCost, err error) {
if !b.IsActiveAt(cd.TimeStart) || b.GetValue() <= 0 {
return
}
//log.Print("B: ", utils.ToJSON(b))
//log.Printf("}}}}}}} %+v", cd.testCallcost)
cc, err = b.GetCost(cd, true)
if err != nil {
return nil, err
}
var debitedConnectFeeBalance Balance
var ok bool
//log.Print("cc: " + utils.ToJSON(cc))
if debitConnectFee {
// this is the first add, debit the connect fee
if ok, debitedConnectFeeBalance = ub.DebitConnectionFee(cc, moneyBalances, count, true); !ok {
// balance is blocker
return nil, nil
}
}
cc.Timespans.Decompress()
//log.Printf("CallCost In Debit: %+v", cc)
//for _, ts := range cc.Timespans {
// log.Printf("CC_TS: %+v", ts.RateInterval.Rating.Rates[0])
//}
for tsIndex, ts := range cc.Timespans {
if ts.Increments == nil {
ts.createIncrementsSlice()
}
//log.Printf("TS: %+v", ts)
if ts.RateInterval == nil {
utils.Logger.Err(fmt.Sprintf("Nil RateInterval ERROR on TS: %+v, CC: %+v, from CD: %+v", ts, cc, cd))
return nil, errors.New("timespan with no rate interval assigned")
}
if tsIndex == 0 && ts.RateInterval.Rating.ConnectFee > 0 && debitConnectFee && cc.deductConnectFee && ok {
inc := &Increment{
Duration: 0,
Cost: ts.RateInterval.Rating.ConnectFee,
BalanceInfo: &DebitInfo{
Monetary: &MonetaryInfo{
UUID: debitedConnectFeeBalance.Uuid,
ID: debitedConnectFeeBalance.ID,
Value: debitedConnectFeeBalance.Value,
},<|fim▁hole|> AccountID: ub.ID,
},
}
incs := []*Increment{inc}
ts.Increments = append(incs, ts.Increments...)
}
maxCost, strategy := ts.RateInterval.GetMaxCost()
//log.Printf("Timing: %+v", ts.RateInterval.Timing)
//log.Printf("RGRate: %+v", ts.RateInterval.Rating)
for incIndex, inc := range ts.Increments {
// check standard subject tags
//log.Printf("INC: %+v", inc)
if tsIndex == 0 && incIndex == 0 && ts.RateInterval.Rating.ConnectFee > 0 && cc.deductConnectFee && ok {
// go to nextincrement
continue
}
amount := inc.Cost
inc.paid = false
if strategy == utils.MetaMaxCostDisconnect && cd.MaxCostSoFar >= maxCost {
// cut the entire current timespan
cc.maxCostDisconect = true
if dryRun {
if incIndex == 0 {
// cut the entire current timespan
cc.Timespans = cc.Timespans[:tsIndex]
} else {
ts.SplitByIncrement(incIndex)
cc.Timespans = cc.Timespans[:tsIndex+1]
}
return cc, nil
}
}
if strategy == utils.MetaMaxCostFree && cd.MaxCostSoFar >= maxCost {
amount, inc.Cost = 0.0, 0.0
inc.BalanceInfo.Monetary = &MonetaryInfo{
UUID: b.Uuid,
ID: b.ID,
Value: b.Value,
}
inc.BalanceInfo.AccountID = ub.ID
if b.RatingSubject != "" {
inc.BalanceInfo.Monetary.RateInterval = ts.RateInterval
}
inc.paid = true
if count {
ub.countUnits(amount, utils.MetaMonetary, cc, b)
}
//log.Printf("TS: %+v", cc.Cost)
// go to nextincrement
continue
}
if b.GetValue() >= amount {
b.SubstractValue(amount)
cd.MaxCostSoFar += amount
inc.BalanceInfo.Monetary = &MonetaryInfo{
UUID: b.Uuid,
ID: b.ID,
Value: b.Value,
}
inc.BalanceInfo.AccountID = ub.ID
if b.RatingSubject != "" {
inc.BalanceInfo.Monetary.RateInterval = ts.RateInterval
}
inc.paid = true
if count {
ub.countUnits(amount, utils.MetaMonetary, cc, b)
}
} else {
inc.paid = false
// delete the rest of the unpiad increments/timespans
if incIndex == 0 {
// cut the entire current timespan
cc.Timespans = cc.Timespans[:tsIndex]
} else {
ts.SplitByIncrement(incIndex)
cc.Timespans = cc.Timespans[:tsIndex+1]
}
if len(cc.Timespans) == 0 {
cc = nil
}
return cc, nil
}
}
}
//log.Printf("END: %+v", cd.testCallcost)
if len(cc.Timespans) == 0 {
cc = nil
}
return cc, nil
}
// AsBalanceSummary converts the balance towards compressed information to be displayed
func (b *Balance) AsBalanceSummary(typ string) *BalanceSummary {
bd := &BalanceSummary{UUID: b.Uuid, ID: b.ID, Type: typ, Value: b.Value, Disabled: b.Disabled}
if bd.ID == "" {
bd.ID = b.Uuid
}
return bd
}
/*
Structure to store minute buckets according to weight, precision or price.
*/
type Balances []*Balance
func (bc Balances) Len() int {
return len(bc)
}
func (bc Balances) Swap(i, j int) {
bc[i], bc[j] = bc[j], bc[i]
}
// we need the better ones at the beginning
func (bc Balances) Less(j, i int) bool {
return bc[i].precision < bc[j].precision ||
(bc[i].precision == bc[j].precision && bc[i].Weight < bc[j].Weight)
}
func (bc Balances) Sort() {
sort.Sort(bc)
}
func (bc Balances) GetTotalValue() (total float64) {
for _, b := range bc {
if !b.IsExpiredAt(time.Now()) && b.IsActive() {
total += b.GetValue()
}
}
total = utils.Round(total, globalRoundingDecimals, utils.MetaRoundingMiddle)
return
}
func (bc Balances) Equal(o Balances) bool {
if len(bc) != len(o) {
return false
}
bc.Sort()
o.Sort()
for i := 0; i < len(bc); i++ {
if !bc[i].Equal(o[i]) {
return false
}
}
return true
}
func (bc Balances) Clone() Balances {
var newChain Balances
for _, b := range bc {
newChain = append(newChain, b.Clone())
}
return newChain
}
func (bc Balances) GetBalance(uuid string) *Balance {
for _, balance := range bc {
if balance.Uuid == uuid {
return balance
}
}
return nil
}
func (bc Balances) HasBalance(balance *Balance) bool {
for _, b := range bc {
if b.Equal(balance) {
return true
}
}
return false
}
func (bc Balances) SaveDirtyBalances(acc *Account) {
savedAccounts := make(map[string]*Account)
for _, b := range bc {
if b.account != nil && b.account != acc && b.dirty && savedAccounts[b.account.ID] == nil {
dm.SetAccount(b.account)
savedAccounts[b.account.ID] = b.account
}
}
if len(savedAccounts) != 0 {
for _, acnt := range savedAccounts {
acntSummary := acnt.AsAccountSummary()
cgrEv := &utils.CGREvent{
Tenant: acntSummary.Tenant,
ID: utils.GenUUID(),
Time: utils.TimePointer(time.Now()),
Event: acntSummary.AsMapInterface(),
Opts: map[string]interface{}{
utils.MetaEventType: utils.AccountUpdate,
},
}
if len(config.CgrConfig().RalsCfg().ThresholdSConns) != 0 {
var tIDs []string
if err := connMgr.Call(config.CgrConfig().RalsCfg().ThresholdSConns, nil,
utils.ThresholdSv1ProcessEvent, &ThresholdsArgsProcessEvent{
CGREvent: cgrEv,
}, &tIDs); err != nil &&
err.Error() != utils.ErrNotFound.Error() {
utils.Logger.Warning(
fmt.Sprintf("<AccountS> error: %s processing account event %+v with ThresholdS.", err.Error(), cgrEv))
}
}
if len(config.CgrConfig().RalsCfg().StatSConns) != 0 {
var stsIDs []string
if err := connMgr.Call(config.CgrConfig().RalsCfg().StatSConns, nil,
utils.StatSv1ProcessEvent, &StatsArgsProcessEvent{
CGREvent: cgrEv,
}, &stsIDs); err != nil &&
err.Error() != utils.ErrNotFound.Error() {
utils.Logger.Warning(
fmt.Sprintf("<AccountS> error: %s processing account event %+v with StatS.", err.Error(), cgrEv))
}
}
}
}
}
type ValueFactor map[string]float64
func (f ValueFactor) GetValue(tor string) float64 {
if value, ok := f[tor]; ok {
return value
}
return 1.0
}
// BalanceSummary represents compressed information about a balance
type BalanceSummary struct {
UUID string // Balance UUID
ID string // Balance ID if not defined
Type string // *voice, *data, etc
Initial float64 // initial value before the debit operation
Value float64
Disabled bool
}
// BalanceSummaries is a list of BalanceSummaries
type BalanceSummaries []*BalanceSummary
// BalanceSummaryWithUUD returns a BalanceSummary based on an UUID
func (bs BalanceSummaries) BalanceSummaryWithUUD(bsUUID string) (b *BalanceSummary) {
for _, blc := range bs {
if blc.UUID == bsUUID {
b = blc
break
}
}
return
}
// FieldAsInterface func to help EventCost FieldAsInterface
func (bl *BalanceSummary) FieldAsInterface(fldPath []string) (val interface{}, err error) {
if len(fldPath) != 1 {
return nil, utils.ErrNotFound
}
switch fldPath[0] {
default:
return nil, fmt.Errorf("unsupported field prefix: <%s>", fldPath[0])
case utils.UUID:
return bl.UUID, nil
case utils.ID:
return bl.ID, nil
case utils.Type:
return bl.Type, nil
case utils.Value:
return bl.Value, nil
case utils.Disabled:
return bl.Disabled, nil
}
}<|fim▁end|>
| |
<|file_name|>global.go<|end_file_name|><|fim▁begin|>// Copyright 2015 The etcd Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package command
import (
"crypto/tls"
"errors"
"fmt"
"io"
"io/ioutil"
"os"
"strings"
"time"
"github.com/bgentry/speakeasy"
"go.etcd.io/etcd/clientv3"
"go.etcd.io/etcd/pkg/flags"
"go.etcd.io/etcd/pkg/srv"
"go.etcd.io/etcd/pkg/transport"
"github.com/spf13/cobra"
"github.com/spf13/pflag"
"go.uber.org/zap"
"google.golang.org/grpc/grpclog"
)
// GlobalFlags are flags that defined globally
// and are inherited to all sub-commands.
type GlobalFlags struct {
Insecure bool
InsecureSkipVerify bool
InsecureDiscovery bool
Endpoints []string
DialTimeout time.Duration
CommandTimeOut time.Duration
KeepAliveTime time.Duration
KeepAliveTimeout time.Duration
DNSClusterServiceName string
TLS transport.TLSInfo
OutputFormat string
IsHex bool
User string
Password string
Debug bool
}
type secureCfg struct {
cert string
key string
cacert string
serverName string
insecureTransport bool
insecureSkipVerify bool
}
type authCfg struct {
username string
password string
}
type discoveryCfg struct {
domain string
insecure bool
serviceName string
}
var display printer = &simplePrinter{}
func initDisplayFromCmd(cmd *cobra.Command) {
isHex, err := cmd.Flags().GetBool("hex")
if err != nil {
ExitWithError(ExitError, err)
}
outputType, err := cmd.Flags().GetString("write-out")
if err != nil {
ExitWithError(ExitError, err)
}
if display = NewPrinter(outputType, isHex); display == nil {
ExitWithError(ExitBadFeature, errors.New("unsupported output format"))
}
}
type clientConfig struct {
endpoints []string
dialTimeout time.Duration
keepAliveTime time.Duration
keepAliveTimeout time.Duration
scfg *secureCfg
acfg *authCfg
}
type discardValue struct{}
func (*discardValue) String() string { return "" }
func (*discardValue) Set(string) error { return nil }
func (*discardValue) Type() string { return "" }
func clientConfigFromCmd(cmd *cobra.Command) *clientConfig {
fs := cmd.InheritedFlags()
if strings.HasPrefix(cmd.Use, "watch") {
// silence "pkg/flags: unrecognized environment variable ETCDCTL_WATCH_KEY=foo" warnings
// silence "pkg/flags: unrecognized environment variable ETCDCTL_WATCH_RANGE_END=bar" warnings
fs.AddFlag(&pflag.Flag{Name: "watch-key", Value: &discardValue{}})
fs.AddFlag(&pflag.Flag{Name: "watch-range-end", Value: &discardValue{}})
}
flags.SetPflagsFromEnv("ETCDCTL", fs)
debug, err := cmd.Flags().GetBool("debug")
if err != nil {
ExitWithError(ExitError, err)
}
if debug {
clientv3.SetLogger(grpclog.NewLoggerV2WithVerbosity(os.Stderr, os.Stderr, os.Stderr, 4))
fs.VisitAll(func(f *pflag.Flag) {
fmt.Fprintf(os.Stderr, "%s=%v\n", flags.FlagToEnv("ETCDCTL", f.Name), f.Value)
})
} else {
// WARNING logs contain important information like TLS misconfirugation, but spams
// too many routine connection disconnects to turn on by default.
//
// See https://github.com/etcd-io/etcd/pull/9623 for background
clientv3.SetLogger(grpclog.NewLoggerV2(ioutil.Discard, ioutil.Discard, os.Stderr))
}
cfg := &clientConfig{}
cfg.endpoints, err = endpointsFromCmd(cmd)
if err != nil {
ExitWithError(ExitError, err)
}
cfg.dialTimeout = dialTimeoutFromCmd(cmd)
cfg.keepAliveTime = keepAliveTimeFromCmd(cmd)
cfg.keepAliveTimeout = keepAliveTimeoutFromCmd(cmd)
cfg.scfg = secureCfgFromCmd(cmd)
cfg.acfg = authCfgFromCmd(cmd)
initDisplayFromCmd(cmd)
return cfg
}
func mustClientCfgFromCmd(cmd *cobra.Command) *clientv3.Config {
cc := clientConfigFromCmd(cmd)
cfg, err := newClientCfg(cc.endpoints, cc.dialTimeout, cc.keepAliveTime, cc.keepAliveTimeout, cc.scfg, cc.acfg)
if err != nil {
ExitWithError(ExitBadArgs, err)
}
return cfg
}
func mustClientFromCmd(cmd *cobra.Command) *clientv3.Client {
cfg := clientConfigFromCmd(cmd)
return cfg.mustClient()
}
func (cc *clientConfig) mustClient() *clientv3.Client {
cfg, err := newClientCfg(cc.endpoints, cc.dialTimeout, cc.keepAliveTime, cc.keepAliveTimeout, cc.scfg, cc.acfg)
if err != nil {
ExitWithError(ExitBadArgs, err)
}
client, err := clientv3.New(*cfg)
if err != nil {
ExitWithError(ExitBadConnection, err)
}
return client
}
func newClientCfg(endpoints []string, dialTimeout, keepAliveTime, keepAliveTimeout time.Duration, scfg *secureCfg, acfg *authCfg) (*clientv3.Config, error) {
// set tls if any one tls option set
var cfgtls *transport.TLSInfo
tlsinfo := transport.TLSInfo{}
tlsinfo.Logger, _ = zap.NewProduction()
if scfg.cert != "" {
tlsinfo.CertFile = scfg.cert
cfgtls = &tlsinfo
}
if scfg.key != "" {
tlsinfo.KeyFile = scfg.key
cfgtls = &tlsinfo
}
if scfg.cacert != "" {
tlsinfo.TrustedCAFile = scfg.cacert
cfgtls = &tlsinfo
}
if scfg.serverName != "" {
tlsinfo.ServerName = scfg.serverName
cfgtls = &tlsinfo
}
cfg := &clientv3.Config{
Endpoints: endpoints,
DialTimeout: dialTimeout,
DialKeepAliveTime: keepAliveTime,
DialKeepAliveTimeout: keepAliveTimeout,
}
if cfgtls != nil {
clientTLS, err := cfgtls.ClientConfig()
if err != nil {
return nil, err
}
cfg.TLS = clientTLS
}
// if key/cert is not given but user wants secure connection, we
// should still setup an empty tls configuration for gRPC to setup
// secure connection.
if cfg.TLS == nil && !scfg.insecureTransport {
cfg.TLS = &tls.Config{}
}
// If the user wants to skip TLS verification then we should set
// the InsecureSkipVerify flag in tls configuration.
if scfg.insecureSkipVerify && cfg.TLS != nil {
cfg.TLS.InsecureSkipVerify = true
}
if acfg != nil {
cfg.Username = acfg.username
cfg.Password = acfg.password
}
return cfg, nil
}
func argOrStdin(args []string, stdin io.Reader, i int) (string, error) {
if i < len(args) {
return args[i], nil
}
bytes, err := ioutil.ReadAll(stdin)
if string(bytes) == "" || err != nil {
return "", errors.New("no available argument and stdin")
}
return string(bytes), nil
}
func dialTimeoutFromCmd(cmd *cobra.Command) time.Duration {
dialTimeout, err := cmd.Flags().GetDuration("dial-timeout")
if err != nil {
ExitWithError(ExitError, err)
}
return dialTimeout
}
func keepAliveTimeFromCmd(cmd *cobra.Command) time.Duration {
keepAliveTime, err := cmd.Flags().GetDuration("keepalive-time")
if err != nil {
ExitWithError(ExitError, err)
}
return keepAliveTime
}
func keepAliveTimeoutFromCmd(cmd *cobra.Command) time.Duration {
keepAliveTimeout, err := cmd.Flags().GetDuration("keepalive-timeout")
if err != nil {
ExitWithError(ExitError, err)
}
return keepAliveTimeout
}
func secureCfgFromCmd(cmd *cobra.Command) *secureCfg {
cert, key, cacert := keyAndCertFromCmd(cmd)
insecureTr := insecureTransportFromCmd(cmd)
skipVerify := insecureSkipVerifyFromCmd(cmd)
discoveryCfg := discoveryCfgFromCmd(cmd)
if discoveryCfg.insecure {
discoveryCfg.domain = ""
}
return &secureCfg{
cert: cert,
key: key,
cacert: cacert,
serverName: discoveryCfg.domain,
insecureTransport: insecureTr,
insecureSkipVerify: skipVerify,
}
}
func insecureTransportFromCmd(cmd *cobra.Command) bool {
insecureTr, err := cmd.Flags().GetBool("insecure-transport")
if err != nil {
ExitWithError(ExitError, err)
}
return insecureTr<|fim▁hole|>}
func insecureSkipVerifyFromCmd(cmd *cobra.Command) bool {
skipVerify, err := cmd.Flags().GetBool("insecure-skip-tls-verify")
if err != nil {
ExitWithError(ExitError, err)
}
return skipVerify
}
func keyAndCertFromCmd(cmd *cobra.Command) (cert, key, cacert string) {
var err error
if cert, err = cmd.Flags().GetString("cert"); err != nil {
ExitWithError(ExitBadArgs, err)
} else if cert == "" && cmd.Flags().Changed("cert") {
ExitWithError(ExitBadArgs, errors.New("empty string is passed to --cert option"))
}
if key, err = cmd.Flags().GetString("key"); err != nil {
ExitWithError(ExitBadArgs, err)
} else if key == "" && cmd.Flags().Changed("key") {
ExitWithError(ExitBadArgs, errors.New("empty string is passed to --key option"))
}
if cacert, err = cmd.Flags().GetString("cacert"); err != nil {
ExitWithError(ExitBadArgs, err)
} else if cacert == "" && cmd.Flags().Changed("cacert") {
ExitWithError(ExitBadArgs, errors.New("empty string is passed to --cacert option"))
}
return cert, key, cacert
}
func authCfgFromCmd(cmd *cobra.Command) *authCfg {
userFlag, err := cmd.Flags().GetString("user")
if err != nil {
ExitWithError(ExitBadArgs, err)
}
passwordFlag, err := cmd.Flags().GetString("password")
if err != nil {
ExitWithError(ExitBadArgs, err)
}
if userFlag == "" {
return nil
}
var cfg authCfg
if passwordFlag == "" {
splitted := strings.SplitN(userFlag, ":", 2)
if len(splitted) < 2 {
cfg.username = userFlag
cfg.password, err = speakeasy.Ask("Password: ")
if err != nil {
ExitWithError(ExitError, err)
}
} else {
cfg.username = splitted[0]
cfg.password = splitted[1]
}
} else {
cfg.username = userFlag
cfg.password = passwordFlag
}
return &cfg
}
func insecureDiscoveryFromCmd(cmd *cobra.Command) bool {
discovery, err := cmd.Flags().GetBool("insecure-discovery")
if err != nil {
ExitWithError(ExitError, err)
}
return discovery
}
func discoverySrvFromCmd(cmd *cobra.Command) string {
domainStr, err := cmd.Flags().GetString("discovery-srv")
if err != nil {
ExitWithError(ExitBadArgs, err)
}
return domainStr
}
func discoveryDNSClusterServiceNameFromCmd(cmd *cobra.Command) string {
serviceNameStr, err := cmd.Flags().GetString("discovery-srv-name")
if err != nil {
ExitWithError(ExitBadArgs, err)
}
return serviceNameStr
}
func discoveryCfgFromCmd(cmd *cobra.Command) *discoveryCfg {
return &discoveryCfg{
domain: discoverySrvFromCmd(cmd),
insecure: insecureDiscoveryFromCmd(cmd),
serviceName: discoveryDNSClusterServiceNameFromCmd(cmd),
}
}
func endpointsFromCmd(cmd *cobra.Command) ([]string, error) {
eps, err := endpointsFromFlagValue(cmd)
if err != nil {
return nil, err
}
// If domain discovery returns no endpoints, check endpoints flag
if len(eps) == 0 {
eps, err = cmd.Flags().GetStringSlice("endpoints")
if err == nil {
for i, ip := range eps {
eps[i] = strings.TrimSpace(ip)
}
}
}
return eps, err
}
func endpointsFromFlagValue(cmd *cobra.Command) ([]string, error) {
discoveryCfg := discoveryCfgFromCmd(cmd)
// If we still don't have domain discovery, return nothing
if discoveryCfg.domain == "" {
return []string{}, nil
}
srvs, err := srv.GetClient("etcd-client", discoveryCfg.domain, discoveryCfg.serviceName)
if err != nil {
return nil, err
}
eps := srvs.Endpoints
if discoveryCfg.insecure {
return eps, err
}
// strip insecure connections
ret := []string{}
for _, ep := range eps {
if strings.HasPrefix("http://", ep) {
fmt.Fprintf(os.Stderr, "ignoring discovered insecure endpoint %q\n", ep)
continue
}
ret = append(ret, ep)
}
return ret, err
}<|fim▁end|>
| |
<|file_name|>app.ts<|end_file_name|><|fim▁begin|>/// <reference path="../framework/angular.d.ts"/>
/// <reference path="../controllers/home-controller.ts"/>
/// <reference path="./factories/gamevars-factory.ts"/>
/// <reference path="./factories/work-factory.ts"/>
/// <reference path="./factories/food-factory.ts"/>
/// <reference path="./factories/sleep-factory.ts"/>
/// <reference path="./directives/choose-work.ts"/>
/// <reference path="./services/worklist-service.ts"/>
var appModule = angular.module("tamangularApp", []);
appModule.controller("HomeController",
["$scope", "gamevarsfactory", "$interval", "workfactory", "foodfactory", "sleepfactory",
($scope, gamevarsfactory, $interval, workfactory, foodfactory, sleepfactory) =>
new Application.Controllers.HomeController($scope, gamevarsfactory, $interval, workfactory, foodfactory, sleepfactory)]);
/** Declare factory for the game variables (life, mood, tired, money) **/
appModule.factory("gamevarsfactory", () => Application.Factories.gamevarsfactory);
// Go to work factory
appModule.factory("workfactory", () => Application.Factories.workfactory);
// Go to work factory
appModule.factory("foodfactory", () => Application.Factories.foodfactory);
// Go to sleep factory
appModule.factory("sleepfactory", () => Application.Factories.sleepfactory);
// Go to work list service
appModule.service("worklistService", ["$http", "$q", ($http, $q) => new Application.Services.worklistService($http, $q)]);
// Choose work directive<|fim▁hole|>appModule.directive("chooseworkDirective", ["worklistService", (worklistService) => new Application.Directives.chooseworkDirective(worklistService)]);<|fim▁end|>
| |
<|file_name|>EuropeanaExtractor.java<|end_file_name|><|fim▁begin|>/*
* WANDORA
* Knowledge Extraction, Management, and Publishing Application
* http://wandora.org
*
* Copyright (C) 2004-2016 Wandora Team
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
package org.wandora.application.tools.extractors.europeana;
import org.wandora.application.Wandora;
import org.wandora.application.WandoraTool;
import org.wandora.application.contexts.Context;
/**
*
* @author nlaitinen
*/
public class EuropeanaExtractor extends AbstractEuropeanaExtractor {
private EuropeanaExtractorUI ui = null;
@Override
public void execute(Wandora wandora, Context context) {
try {
if(ui == null) {
ui = new EuropeanaExtractorUI();
}
<|fim▁hole|> ui.open(wandora, context);
if(ui.wasAccepted()) {
WandoraTool[] extrs = null;
try{
extrs = ui.getExtractors(this);
} catch(Exception e) {
log(e.getMessage());
return;
}
if(extrs != null && extrs.length > 0) {
setDefaultLogger();
int c = 0;
log("Performing Europeana API query...");
for(int i=0; i<extrs.length && !forceStop(); i++) {
try {
WandoraTool e = extrs[i];
e.setToolLogger(getDefaultLogger());
e.execute(wandora);
setState(EXECUTE);
c++;
}
catch(Exception e) {
log(e);
}
}
log("Ready.");
}
else {
log("Couldn't find a suitable subextractor to perform or there was an error with an extractor.");
}
}
}
catch(Exception e) {
singleLog(e);
}
if(ui != null && ui.wasAccepted()) setState(WAIT);
else setState(CLOSE);
}
}<|fim▁end|>
| |
<|file_name|>test.py<|end_file_name|><|fim▁begin|># Copyright (C) 2016 VIB/BEG/UGent - Tim Diels <[email protected]>
#
# This file is part of pytil.
#
# pytil is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# pytil is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with pytil. If not, see <http://www.gnu.org/licenses/>.
from contextlib import contextmanager
from pathlib import Path
from pytil import path as path_ # yay, 'resolving' circular dependencies
import pytest
import os
# Used by cedalion
@pytest.fixture
def temp_dir_cwd(tmpdir):
'''
pytest fixture which sets current working directory to a temporary
directory.
'''
original_cwd = Path.cwd()
os.chdir(str(tmpdir))
yield tmpdir
# ensure the user has full permissions on temp dir (so that pytest can remove it later)
path_.chmod(Path(str(tmpdir)), 0o700, '+', recursive=True)
os.chdir(str(original_cwd))
# Used by cedalion
@contextmanager
def assert_dir_unchanged(path, ignore=()):<|fim▁hole|>
Parameters
----------
path : ~pathlib.Path
Dir to assert for changes.
ignore : ~typing.Collection[~pathlib.Path]
Paths to ignore in comparison.
Examples
--------
>>> with assert_dir_unchanged(Path('input')):
... Path('input/child').mkdir()
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
AssertionError: ...
'''
def contents():
children = set(path.iterdir())
ignored_children = {
child
for child in children
for path in ignore
if path_.is_descendant_or_self(child, path)
}
return set(map(str, children - ignored_children))
expected = contents()
yield
actual = contents()
assert actual == expected, f'\nActual: {actual}\nExpected: {expected}'<|fim▁end|>
|
'''
Assert dir unchanged after code block.
|
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>#![feature(slice_patterns)]
use std::io;
use std::io::Read;
use std::fs::{self, OpenOptions};
mod engine;
use engine::BrainFuck;
use std::env;<|fim▁hole|> let program = if let Ok(f) = fs::metadata(&src) {
if f.is_file() {
let mut s = String::new();
let mut f = OpenOptions::new().read(true).open(&src).unwrap();
let _ = f.read_to_string(&mut s);
s
} else {
src
}
} else {
src
};
let mut brain = BrainFuck::new(&program);
let res = brain.eval();
println!("{}", res.unwrap());
} else {
println!("Provide the src file/string as an argument");
}
}<|fim▁end|>
|
fn main() {
if let Some(src) = env::args().skip(1).take(1).next() {
|
<|file_name|>tasks.py<|end_file_name|><|fim▁begin|>import csv
import datetime
import logging
import os
from celery.task import task
from django.conf import settings
from django.contrib.auth import get_user_model
from django.utils.timezone import now
from libya_elections.constants import REMINDER_CHECKIN, REMINDER_REPORT, \
REMINDER_LAST_REPORT, REMINDER_CLOSE
from polling_reports.models import CenterOpen, PollingReport, StaffPhone
from register.models import Whitelist
from text_messages.utils import get_message
from .models import Batch, Broadcast
from .utils import Line<|fim▁hole|>logger = logging.getLogger(__name__)
def read_messages_from_file(file_path):
"""
Read uploaded bulk SMS file.
Generate tuples: (phone_number, message, from_shortcode).
Delete file afterward.
:param file_path:
:return:
"""
# We don't currently enable customization of the from_shortcode via file upload.
# Just use the default.
from_shortcode = None
with open(file_path, encoding='utf-8') as f:
reader = csv.reader(f)
for row in reader:
if any(row):
line = Line._make(row)
number = int(line.number)
yield number, line.message, from_shortcode
os.remove(file_path)
@task
def upload_bulk_sms_file(batch_id, file_path):
"""
Upload a batch of bulk SMS messages for the given batch. Delete
the temp file after we're done.
Assumes the file is valid (run is_file_valid on it first!)
:param batch_id:
:param _file:
:return: message_for_user
"""
batch = Batch.objects.get(id=batch_id)
batch.add_messages(read_messages_from_file(file_path))
batch.status = Batch.PENDING
batch.save()
# Break out some of the logic for sending polling report reminder messages
# for easier testing
class PollingReportReminderMessage(object):
"""
Capture some of the common logic for polling report reminders.
(Do not instantiate, use the subclasses.)
"""
def __init__(self, message_number, reminder_number):
self.message_number = message_number
self.reminder_number = reminder_number
def get_message_code(self):
raise NotImplementedError
def get_message_text(self):
context = {'message_number': self.message_number,
'reminder_number': self.reminder_number}
return get_message(self.get_message_code()).msg.format(**context)
def get_phone_numbers_to_send_to(self):
"""
Generator that yields (phone_number, message_text, from_shortcode) tuples
for the phone numbers that we need to send this reminder to.
"""
# Get the phone numbers we want to send to, excluding those that have
# already done the thing we want to remind them of
phone_numbers = self.PhoneModel.objects.exclude(phone_number__in=self.to_exclude())\
.values_list('phone_number', flat=True)
message_text = self.get_message_text()
# Set from_number to REPORTS_SHORT_CODE so that recipient can
# simply just respond to this message with their report.
from_shortcode = settings.REPORTS_SHORT_CODE
for phone_number in phone_numbers:
yield phone_number, message_text, from_shortcode
def to_exclude(self):
raise NotImplementedError
class CheckinReminderMessage(PollingReportReminderMessage):
"""
Message telling user to check in (activate phone, roll call)
"""
def __init__(self, message_number, reminder_number):
super(CheckinReminderMessage, self).__init__(message_number, reminder_number)
self.PhoneModel = Whitelist
def get_message_code(self):
return REMINDER_CHECKIN
def to_exclude(self):
"""Return list of phone numbers to exclude"""
midnight = now().replace(hour=0, minute=0, microsecond=0)
return CenterOpen.objects.filter(
creation_date__gte=midnight,
).values_list('phone_number', flat=True)
class PollingDayReportReminderMessage(PollingReportReminderMessage):
"""
Message telling user to send in polling day statistics report
"""
def __init__(self, message_number, reminder_number):
super(PollingDayReportReminderMessage, self).__init__(message_number, reminder_number)
self.PhoneModel = StaffPhone
def get_message_code(self):
return {
4: REMINDER_REPORT,
5: REMINDER_REPORT,
6: REMINDER_LAST_REPORT,
7: REMINDER_CLOSE,
}[self.message_number]
def to_exclude(self):
"""Return list of phone numbers to exclude"""
reporting_period = self.message_number - 3
one_day_ago = now() - datetime.timedelta(hours=24)
return PollingReport.objects.filter(
period_number=reporting_period,
creation_date__gte=one_day_ago,
).values_list('phone_number', flat=True)
@task
def message_reminder_task(message_number, reminder_number, audience, election):
"""
Make a batch to send out a bunch of reminder messages to a given audience,
iffi they haven't sent us the expected report yet.
"""
logger.debug("Start message_reminder_task")
if audience not in ('whitelist', 'registered'):
raise ValueError("Unknown audience type %s - expected whitelist or registered" % audience)
# Batches need to be owned by somebody - pick a non-random superuser
user = get_user_model().objects.filter(is_active=True, is_superuser=True)[0]
batch = Batch.objects.create(
name="Reminder %d for message_number %d" % (reminder_number, message_number),
created_by=user,
priority=Batch.PRIORITY_TIME_CRITICAL)
# create the corresponding broadcast object
broadcast = Broadcast.objects.create(
created_by=batch.created_by,
batch=batch,
audience=Broadcast.STAFF_ONLY,
message=batch.name, # this message is only temporary
)
try:
if audience == 'whitelist':
msg = CheckinReminderMessage(message_number, reminder_number)
else:
msg = PollingDayReportReminderMessage(message_number, reminder_number)
batch.add_messages(msg.get_phone_numbers_to_send_to())
batch.status = Batch.APPROVED
batch.reviewed_by = user
batch.save()
# update the message for the broadcast.
broadcast.message = msg.get_message_text()
broadcast.save()
logger.debug("Batch saved")
except Exception:
logger.exception("Error while creating message reminder batch")
# If anything went wrong, don't leave partial batch lying around in unknown state
batch.delete()
broadcast.delete()
raise
@task
def approve_broadcast(broadcast_id):
"""Creates messages for each individual in the audience and
changes batch status to approved."""
broadcast = Broadcast.objects.get(pk=broadcast_id)
messages = broadcast.get_messages()
batch = broadcast.batch
batch.add_messages(messages)
batch.status = Batch.APPROVED
batch.save()<|fim▁end|>
| |
<|file_name|>extenv-not-defined-default.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at<|fim▁hole|>//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
fn main() { env!("__HOPEFULLY_NOT_DEFINED__"); } //~ ERROR: environment variable `__HOPEFULLY_NOT_DEFINED__` not defined<|fim▁end|>
|
// http://rust-lang.org/COPYRIGHT.
|
<|file_name|>dbscan_.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
DBSCAN: Density-Based Spatial Clustering of Applications with Noise
"""
# Author: Robert Layton <[email protected]>
#
# License: BSD
import warnings
import numpy as np
from ..base import BaseEstimator
from ..metrics import pairwise_distances
from ..utils import check_random_state<|fim▁hole|>
def dbscan(X, eps=0.5, min_samples=5, metric='euclidean',
random_state=None):
"""Perform DBSCAN clustering from vector array or distance matrix.
Parameters
----------
X: array [n_samples, n_samples] or [n_samples, n_features]
Array of distances between samples, or a feature array.
The array is treated as a feature array unless the metric is given as
'precomputed'.
eps: float, optional
The maximum distance between two samples for them to be considered
as in the same neighborhood.
min_samples: int, optional
The number of samples in a neighborhood for a point to be considered
as a core point.
metric: string, or callable
The metric to use when calculating distance between instances in a
feature array. If metric is a string or callable, it must be one of
the options allowed by metrics.pairwise.calculate_distance for its
metric parameter.
If metric is "precomputed", X is assumed to be a distance matrix and
must be square.
random_state: numpy.RandomState, optional
The generator used to initialize the centers. Defaults to numpy.random.
Returns
-------
core_samples: array [n_core_samples]
Indices of core samples.
labels : array [n_samples]
Cluster labels for each point. Noisy samples are given the label -1.
Notes
-----
See examples/plot_dbscan.py for an example.
References
----------
Ester, M., H. P. Kriegel, J. Sander, and X. Xu, “A Density-Based
Algorithm for Discovering Clusters in Large Spatial Databases with Noise”.
In: Proceedings of the 2nd International Conference on Knowledge Discovery
and Data Mining, Portland, OR, AAAI Press, pp. 226–231. 1996
"""
X = np.asarray(X)
n = X.shape[0]
# If index order not given, create random order.
random_state = check_random_state(random_state)
index_order = np.arange(n)
random_state.shuffle(index_order)
D = pairwise_distances(X, metric=metric)
# Calculate neighborhood for all samples. This leaves the original point
# in, which needs to be considered later (i.e. point i is the
# neighborhood of point i. While True, its useless information)
neighborhoods = [np.where(x <= eps)[0] for x in D]
# Initially, all samples are noise.
labels = -np.ones(n)
# A list of all core samples found.
core_samples = []
# label_num is the label given to the new cluster
label_num = 0
# Look at all samples and determine if they are core.
# If they are then build a new cluster from them.
for index in index_order:
if labels[index] != -1 or len(neighborhoods[index]) < min_samples:
# This point is already classified, or not enough for a core point.
continue
core_samples.append(index)
labels[index] = label_num
# candidates for new core samples in the cluster.
candidates = [index]
while len(candidates) > 0:
new_candidates = []
# A candidate is a core point in the current cluster that has
# not yet been used to expand the current cluster.
for c in candidates:
noise = np.where(labels[neighborhoods[c]] == -1)[0]
noise = neighborhoods[c][noise]
labels[noise] = label_num
for neighbor in noise:
# check if its a core point as well
if len(neighborhoods[neighbor]) >= min_samples:
# is new core point
new_candidates.append(neighbor)
core_samples.append(neighbor)
# Update candidates for next round of cluster expansion.
candidates = new_candidates
# Current cluster finished.
# Next core point found will start a new cluster.
label_num += 1
return core_samples, labels
class DBSCAN(BaseEstimator):
"""Perform DBSCAN clustering from vector array or distance matrix.
DBSCAN - Density-Based Spatial Clustering of Applications with Noise.
Finds core samples of high density and expands clusters from them.
Good for data which contains clusters of similar density.
Parameters
----------
eps : float, optional
The maximum distance between two samples for them to be considered
as in the same neighborhood.
min_samples : int, optional
The number of samples in a neighborhood for a point to be considered
as a core point.
metric : string, or callable
The metric to use when calculating distance between instances in a
feature array. If metric is a string or callable, it must be one of
the options allowed by metrics.pairwise.calculate_distance for its
metric parameter.
If metric is "precomputed", X is assumed to be a distance matrix and
must be square.
random_state : numpy.RandomState, optional
The generator used to initialize the centers. Defaults to numpy.random.
Attributes
----------
`core_sample_indices_` : array, shape = [n_core_samples]
Indices of core samples.
`components_` : array, shape = [n_core_samples, n_features]
Copy of each core sample found by training.
`labels_` : array, shape = [n_samples]
Cluster labels for each point in the dataset given to fit().
Noisy samples are given the label -1.
Notes
-----
See examples/plot_dbscan.py for an example.
References
----------
Ester, M., H. P. Kriegel, J. Sander, and X. Xu, “A Density-Based
Algorithm for Discovering Clusters in Large Spatial Databases with Noise”.
In: Proceedings of the 2nd International Conference on Knowledge Discovery
and Data Mining, Portland, OR, AAAI Press, pp. 226–231. 1996
"""
def __init__(self, eps=0.5, min_samples=5, metric='euclidean',
random_state=None):
self.eps = eps
self.min_samples = min_samples
self.metric = metric
self.random_state = check_random_state(random_state)
def fit(self, X, **params):
"""Perform DBSCAN clustering from vector array or distance matrix.
Parameters
----------
X: array [n_samples, n_samples] or [n_samples, n_features]
Array of distances between samples, or a feature array.
The array is treated as a feature array unless the metric is
given as 'precomputed'.
params: dict
Overwrite keywords from __init__.
"""
if params:
warnings.warn('Passing parameters to fit methods is '
'depreciated', stacklevel=2)
self.set_params(**params)
self.core_sample_indices_, self.labels_ = dbscan(X,
**self.get_params())
self.components_ = X[self.core_sample_indices_].copy()
return self<|fim▁end|>
| |
<|file_name|>0011_auto__add_fare__add_unique_fare_source_fare_id__add_unique_shape_sourc.py<|end_file_name|><|fim▁begin|># encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Fare'
db.create_table('gtfs_fare', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('source', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['gtfs.Source'], null=True)),
('fare_id', self.gf('django.db.models.fields.CharField')(max_length=20, db_index=True)),
('price', self.gf('django.db.models.fields.FloatField')()),
('currency_type', self.gf('django.db.models.fields.CharField')(max_length=3)),
('payment_method', self.gf('django.db.models.fields.IntegerField')()),
('transfers', self.gf('django.db.models.fields.IntegerField')(null=True)),
('transfer_duration', self.gf('django.db.models.fields.IntegerField')()),
))
db.send_create_signal('gtfs', ['Fare'])
# Adding unique constraint on 'Fare', fields ['source', 'fare_id']
db.create_unique('gtfs_fare', ['source_id', 'fare_id'])
# Adding unique constraint on 'Shape', fields ['source', 'shape_id']
db.create_unique('gtfs_shape', ['source_id', 'shape_id'])
# Adding field 'Zone.source'
db.add_column('gtfs_zone', 'source', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['gtfs.Source'], null=True), keep_default=False)
# Adding unique constraint on 'Zone', fields ['source', 'zone_id']
db.create_unique('gtfs_zone', ['source_id', 'zone_id'])
# Deleting field 'FareRule.payment_method'
db.delete_column('gtfs_farerule', 'payment_method')
# Deleting field 'FareRule.price'
db.delete_column('gtfs_farerule', 'price')
# Deleting field 'FareRule.currency_type'
db.delete_column('gtfs_farerule', 'currency_type')
# Deleting field 'FareRule.transfer_duration'
db.delete_column('gtfs_farerule', 'transfer_duration')
# Deleting field 'FareRule.transfers'
db.delete_column('gtfs_farerule', 'transfers')
# Deleting field 'FareRule.farerule_id'
db.delete_column('gtfs_farerule', 'farerule_id')
# Deleting field 'FareRule.agency'
db.delete_column('gtfs_farerule', 'agency_id')
# Adding field 'FareRule.fare'
db.add_column('gtfs_farerule', 'fare', self.gf('django.db.models.fields.related.ForeignKey')(default=None, to=orm['gtfs.Fare']), keep_default=False)
<|fim▁hole|> # Removing unique constraint on 'Zone', fields ['source', 'zone_id']
db.delete_unique('gtfs_zone', ['source_id', 'zone_id'])
# Removing unique constraint on 'Shape', fields ['source', 'shape_id']
db.delete_unique('gtfs_shape', ['source_id', 'shape_id'])
# Removing unique constraint on 'Fare', fields ['source', 'fare_id']
db.delete_unique('gtfs_fare', ['source_id', 'fare_id'])
# Deleting model 'Fare'
db.delete_table('gtfs_fare')
# Deleting field 'Zone.source'
db.delete_column('gtfs_zone', 'source_id')
# User chose to not deal with backwards NULL issues for 'FareRule.payment_method'
raise RuntimeError("Cannot reverse this migration. 'FareRule.payment_method' and its values cannot be restored.")
# User chose to not deal with backwards NULL issues for 'FareRule.price'
raise RuntimeError("Cannot reverse this migration. 'FareRule.price' and its values cannot be restored.")
# User chose to not deal with backwards NULL issues for 'FareRule.currency_type'
raise RuntimeError("Cannot reverse this migration. 'FareRule.currency_type' and its values cannot be restored.")
# User chose to not deal with backwards NULL issues for 'FareRule.transfer_duration'
raise RuntimeError("Cannot reverse this migration. 'FareRule.transfer_duration' and its values cannot be restored.")
# Adding field 'FareRule.transfers'
db.add_column('gtfs_farerule', 'transfers', self.gf('django.db.models.fields.IntegerField')(null=True), keep_default=False)
# User chose to not deal with backwards NULL issues for 'FareRule.farerule_id'
raise RuntimeError("Cannot reverse this migration. 'FareRule.farerule_id' and its values cannot be restored.")
# User chose to not deal with backwards NULL issues for 'FareRule.agency'
raise RuntimeError("Cannot reverse this migration. 'FareRule.agency' and its values cannot be restored.")
# Deleting field 'FareRule.fare'
db.delete_column('gtfs_farerule', 'fare_id')
models = {
'gtfs.agency': {
'Meta': {'unique_together': "(('source', 'agency_id'),)", 'object_name': 'Agency'},
'agency_id': ('django.db.models.fields.CharField', [], {'max_length': '20', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'lang': ('django.db.models.fields.CharField', [], {'max_length': '2'}),
'name': ('django.db.models.fields.TextField', [], {}),
'phone': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
'source': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['gtfs.Source']", 'null': 'True'}),
'timezone': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200'})
},
'gtfs.block': {
'Meta': {'unique_together': "(('source', 'block_id'),)", 'object_name': 'Block'},
'block_id': ('django.db.models.fields.TextField', [], {'max_length': '20', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'source': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['gtfs.Source']", 'null': 'True'})
},
'gtfs.calendar': {
'Meta': {'object_name': 'Calendar'},
'end_date': ('django.db.models.fields.DateField', [], {}),
'friday': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'monday': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'saturday': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'service': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['gtfs.Service']", 'unique': 'True'}),
'start_date': ('django.db.models.fields.DateField', [], {}),
'sunday': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'thursday': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'tuesday': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'wednesday': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
'gtfs.calendardate': {
'Meta': {'object_name': 'CalendarDate'},
'date': ('django.db.models.fields.DateField', [], {}),
'exception_type': ('django.db.models.fields.IntegerField', [], {'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'service': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['gtfs.Service']"})
},
'gtfs.fare': {
'Meta': {'unique_together': "(('source', 'fare_id'),)", 'object_name': 'Fare'},
'currency_type': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'fare_id': ('django.db.models.fields.CharField', [], {'max_length': '20', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'payment_method': ('django.db.models.fields.IntegerField', [], {}),
'price': ('django.db.models.fields.FloatField', [], {}),
'source': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['gtfs.Source']", 'null': 'True'}),
'transfer_duration': ('django.db.models.fields.IntegerField', [], {}),
'transfers': ('django.db.models.fields.IntegerField', [], {'null': 'True'})
},
'gtfs.farerule': {
'Meta': {'object_name': 'FareRule'},
'contains': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'fare_rule_contains'", 'null': 'True', 'to': "orm['gtfs.Zone']"}),
'destination': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'fare_rule_destinations'", 'null': 'True', 'to': "orm['gtfs.Zone']"}),
'fare': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['gtfs.Fare']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'origin': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'fare_rule_origins'", 'null': 'True', 'to': "orm['gtfs.Zone']"}),
'route': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['gtfs.Route']", 'null': 'True'})
},
'gtfs.frequency': {
'Meta': {'object_name': 'Frequency'},
'end_time': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'end_time_days': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'headway_secs': ('django.db.models.fields.IntegerField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'start_time': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'start_time_days': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'trip': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['gtfs.Trip']"})
},
'gtfs.route': {
'Meta': {'unique_together': "(('agency', 'route_id'),)", 'object_name': 'Route'},
'agency': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['gtfs.Agency']", 'null': 'True'}),
'color': ('django.db.models.fields.CharField', [], {'max_length': '6', 'blank': 'True'}),
'desc': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'long_name': ('django.db.models.fields.TextField', [], {}),
'route_id': ('django.db.models.fields.CharField', [], {'max_length': '20', 'db_index': 'True'}),
'route_type': ('django.db.models.fields.IntegerField', [], {'db_index': 'True'}),
'short_name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'db_index': 'True'}),
'text_color': ('django.db.models.fields.TextField', [], {'max_length': '6', 'blank': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '1000', 'blank': 'True'})
},
'gtfs.service': {
'Meta': {'unique_together': "(('source', 'service_id'),)", 'object_name': 'Service'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'service_id': ('django.db.models.fields.TextField', [], {'max_length': '20', 'db_index': 'True'}),
'source': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['gtfs.Source']", 'null': 'True'})
},
'gtfs.shape': {
'Meta': {'unique_together': "(('source', 'shape_id'),)", 'object_name': 'Shape'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'path': ('django.contrib.gis.db.models.fields.LineStringField', [], {'null': 'True'}),
'shape_id': ('django.db.models.fields.CharField', [], {'max_length': '20', 'db_index': 'True'}),
'source': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['gtfs.Source']", 'null': 'True'})
},
'gtfs.source': {
'Meta': {'object_name': 'Source'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'gtfs.stop': {
'Meta': {'unique_together': "(('source', 'stop_id'),)", 'object_name': 'Stop'},
'code': ('django.db.models.fields.CharField', [], {'max_length': '200', 'db_index': 'True'}),
'desc': ('django.db.models.fields.TextField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'location': ('django.contrib.gis.db.models.fields.PointField', [], {}),
'location_type': ('django.db.models.fields.IntegerField', [], {'default': '0', 'db_index': 'True'}),
'name': ('django.db.models.fields.TextField', [], {}),
'parent_station': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['gtfs.Stop']", 'null': 'True'}),
'source': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['gtfs.Source']", 'null': 'True'}),
'stop_id': ('django.db.models.fields.CharField', [], {'max_length': '20', 'db_index': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'zone': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['gtfs.Zone']", 'null': 'True'})
},
'gtfs.stoptime': {
'Meta': {'object_name': 'StopTime'},
'arrival_days': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'arrival_time': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'departure_days': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'departure_time': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'drop_off_type': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'pickup_type': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'shape_dist_travelled': ('django.db.models.fields.FloatField', [], {'null': 'True'}),
'stop': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['gtfs.Stop']"}),
'stop_headsign': ('django.db.models.fields.TextField', [], {}),
'stop_sequence': ('django.db.models.fields.IntegerField', [], {}),
'trip': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['gtfs.Trip']"})
},
'gtfs.transfer': {
'Meta': {'object_name': 'Transfer'},
'from_stop': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'transfer_from_stop'", 'to': "orm['gtfs.Stop']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'min_transfer_time': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'to_stop': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'transfer_to_stop'", 'to': "orm['gtfs.Stop']"}),
'transfer_type': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'gtfs.trip': {
'Meta': {'unique_together': "(('service', 'trip_id'), ('route', 'trip_id'))", 'object_name': 'Trip'},
'block': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['gtfs.Block']", 'null': 'True'}),
'direction_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'db_index': 'True'}),
'headsign': ('django.db.models.fields.TextField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'route': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['gtfs.Route']"}),
'service': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['gtfs.Service']"}),
'shape': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['gtfs.Shape']", 'null': 'True'}),
'short_name': ('django.db.models.fields.TextField', [], {}),
'trip_id': ('django.db.models.fields.CharField', [], {'max_length': '100', 'db_index': 'True'})
},
'gtfs.zone': {
'Meta': {'unique_together': "(('source', 'zone_id'),)", 'object_name': 'Zone'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'source': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['gtfs.Source']", 'null': 'True'}),
'zone_id': ('django.db.models.fields.TextField', [], {'max_length': '20', 'db_index': 'True'})
}
}
complete_apps = ['gtfs']<|fim▁end|>
|
def backwards(self, orm):
|
<|file_name|>simulation_test.go<|end_file_name|><|fim▁begin|>package wrapper
import (
"encoding/json"
"testing"
"github.com/SpectoLabs/hoverfly/core/handlers/v2"
"github.com/SpectoLabs/hoverfly/core/matching/matchers"
. "github.com/onsi/gomega"
)
func Test_ExportSimulation_GetsModeFromHoverfly(t *testing.T) {
RegisterTestingT(t)
responseBody := `{"simulation": true}`
simulationList := v2.SimulationViewV5{
DataViewV5: v2.DataViewV5{
RequestResponsePairs: []v2.RequestMatcherResponsePairViewV5{
{
RequestMatcher: v2.RequestMatcherViewV5{
Method: []v2.MatcherViewV5{
{
Matcher: matchers.Exact,
Value: "GET",
},
},
Path: []v2.MatcherViewV5{
{<|fim▁hole|> Matcher: matchers.Exact,
Value: "/api/v2/simulation",
},
},
},
Response: v2.ResponseDetailsViewV5{
Status: 200,
Body: responseBody,
},
},
},
},
MetaView: v2.MetaView{
SchemaVersion: "v2",
},
}
simulationListBytes, err := json.Marshal(simulationList)
Expect(err).To(BeNil())
simulationList.RequestResponsePairs[0].Response.Body = string(simulationListBytes[:])
hoverfly.ReplaceSimulation(simulationList)
simulationList.RequestResponsePairs[0].Response.Body = responseBody
view, err := ExportSimulation(target, "")
Expect(err).To(BeNil())
Expect(view).To(Equal(simulationList))
}
func Test_ExportSimulation_WithUrlPattern(t *testing.T) {
RegisterTestingT(t)
responseBody := `{"simulation": true}`
simulationList := v2.SimulationViewV5{
DataViewV5: v2.DataViewV5{
RequestResponsePairs: []v2.RequestMatcherResponsePairViewV5{
{
RequestMatcher: v2.RequestMatcherViewV5{
Method: []v2.MatcherViewV5{
{
Matcher: matchers.Exact,
Value: "GET",
},
},
Path: []v2.MatcherViewV5{
{
Matcher: matchers.Exact,
Value: "/api/v2/simulation",
},
},
Query: &v2.QueryMatcherViewV5{
"urlPattern": []v2.MatcherViewV5{
{
Matcher: matchers.Exact,
Value: "test-(.+).com",
},
},
},
},
Response: v2.ResponseDetailsViewV5{
Status: 200,
Body: responseBody,
},
},
},
},
MetaView: v2.MetaView{
SchemaVersion: "v2",
},
}
simulationListBytes, err := json.Marshal(simulationList)
Expect(err).To(BeNil())
simulationList.RequestResponsePairs[0].Response.Body = string(simulationListBytes[:])
hoverfly.ReplaceSimulation(simulationList)
simulationList.RequestResponsePairs[0].Response.Body = responseBody
view, err := ExportSimulation(target, "test-(.+).com")
Expect(err).To(BeNil())
Expect(view).To(Equal(simulationList))
}
func Test_ExportSimulation_ErrorsWhen_HoverflyNotAccessible(t *testing.T) {
RegisterTestingT(t)
_, err := ExportSimulation(inaccessibleTarget, "")
Expect(err).ToNot(BeNil())
Expect(err.Error()).To(Equal("Could not connect to Hoverfly at something:1234"))
}
func Test_ExportSimulation_ErrorsWhen_HoverflyReturnsNon200(t *testing.T) {
RegisterTestingT(t)
hoverfly.DeleteSimulation()
hoverfly.PutSimulation(v2.SimulationViewV5{
DataViewV5: v2.DataViewV5{
RequestResponsePairs: []v2.RequestMatcherResponsePairViewV5{
{
RequestMatcher: v2.RequestMatcherViewV5{
Method: []v2.MatcherViewV5{
{
Matcher: matchers.Exact,
Value: "GET",
},
},
Path: []v2.MatcherViewV5{
{
Matcher: matchers.Exact,
Value: "/api/v2/simulation",
},
},
},
Response: v2.ResponseDetailsViewV5{
Status: 400,
Body: "{\"error\":\"test error\"}",
},
},
},
},
MetaView: v2.MetaView{
SchemaVersion: "v2",
},
})
_, err := ExportSimulation(target, "")
Expect(err).ToNot(BeNil())
Expect(err.Error()).To(Equal("Could not retrieve simulation\n\ntest error"))
}
func Test_ImportSimulation_SendsCorrectHTTPRequest(t *testing.T) {
RegisterTestingT(t)
hoverfly.DeleteSimulation()
hoverfly.PutSimulation(v2.SimulationViewV5{
DataViewV5: v2.DataViewV5{
RequestResponsePairs: []v2.RequestMatcherResponsePairViewV5{
{
RequestMatcher: v2.RequestMatcherViewV5{
Method: []v2.MatcherViewV5{
{
Matcher: matchers.Exact,
Value: "PUT",
},
},
Path: []v2.MatcherViewV5{
{
Matcher: matchers.Exact,
Value: "/api/v2/simulation",
},
},
Body: []v2.MatcherViewV5{
{
Matcher: "json",
Value: `{"simulation": true}`,
},
},
},
Response: v2.ResponseDetailsViewV5{
Status: 200,
Body: `{"simulation": true}`,
},
},
},
},
MetaView: v2.MetaView{
SchemaVersion: "v2",
},
})
err := ImportSimulation(target, `{"simulation": true}`)
Expect(err).To(BeNil())
}
func Test_ImportSimulation_ErrorsWhen_HoverflyNotAccessible(t *testing.T) {
RegisterTestingT(t)
err := ImportSimulation(inaccessibleTarget, "")
Expect(err).ToNot(BeNil())
Expect(err.Error()).To(Equal("Could not connect to Hoverfly at something:1234"))
}
func Test_ImportSimulation_ErrorsWhen_HoverflyReturnsNon200(t *testing.T) {
RegisterTestingT(t)
hoverfly.DeleteSimulation()
hoverfly.PutSimulation(v2.SimulationViewV5{
DataViewV5: v2.DataViewV5{
RequestResponsePairs: []v2.RequestMatcherResponsePairViewV5{
{
RequestMatcher: v2.RequestMatcherViewV5{
Method: []v2.MatcherViewV5{
{
Matcher: matchers.Exact,
Value: "PUT",
},
},
Path: []v2.MatcherViewV5{
{
Matcher: matchers.Exact,
Value: "/api/v2/simulation",
},
},
},
Response: v2.ResponseDetailsViewV5{
Status: 400,
Body: "{\"error\":\"test error\"}",
},
},
},
},
MetaView: v2.MetaView{
SchemaVersion: "v2",
},
})
err := ImportSimulation(target, "")
Expect(err).ToNot(BeNil())
Expect(err.Error()).To(Equal("Could not import simulation\n\ntest error"))
}
func Test_AddSimulation_SendsCorrectHTTPRequest(t *testing.T) {
RegisterTestingT(t)
hoverfly.DeleteSimulation()
hoverfly.PutSimulation(v2.SimulationViewV5{
DataViewV5: v2.DataViewV5{
RequestResponsePairs: []v2.RequestMatcherResponsePairViewV5{
{
RequestMatcher: v2.RequestMatcherViewV5{
Method: []v2.MatcherViewV5{
{
Matcher: matchers.Exact,
Value: "POST",
},
},
Path: []v2.MatcherViewV5{
{
Matcher: matchers.Exact,
Value: "/api/v2/simulation",
},
},
Body: []v2.MatcherViewV5{
{
Matcher: "json",
Value: `{"simulation": true}`,
},
},
},
Response: v2.ResponseDetailsViewV5{
Status: 200,
Body: `{"simulation": true}`,
},
},
},
},
MetaView: v2.MetaView{
SchemaVersion: "v2",
},
})
err := AddSimulation(target, `{"simulation": true}`)
Expect(err).To(BeNil())
}
func Test_DeleteSimulations_SendsCorrectHTTPRequest(t *testing.T) {
RegisterTestingT(t)
hoverfly.DeleteSimulation()
hoverfly.PutSimulation(v2.SimulationViewV5{
DataViewV5: v2.DataViewV5{
RequestResponsePairs: []v2.RequestMatcherResponsePairViewV5{
{
RequestMatcher: v2.RequestMatcherViewV5{
Method: []v2.MatcherViewV5{
{
Matcher: matchers.Exact,
Value: "DELETE",
},
},
Path: []v2.MatcherViewV5{
{
Matcher: matchers.Exact,
Value: "/api/v2/simulation",
},
},
},
Response: v2.ResponseDetailsViewV5{
Status: 200,
Body: `{"simulation": true}`,
},
},
},
},
MetaView: v2.MetaView{
SchemaVersion: "v2",
},
})
err := DeleteSimulations(target)
Expect(err).To(BeNil())
}
func Test_DeleteSimulations_ErrorsWhen_HoverflyNotAccessible(t *testing.T) {
RegisterTestingT(t)
err := DeleteSimulations(inaccessibleTarget)
Expect(err).ToNot(BeNil())
Expect(err.Error()).To(Equal("Could not connect to Hoverfly at something:1234"))
}
func Test_DeleteSimulations_ErrorsWhen_HoverflyReturnsNon200(t *testing.T) {
RegisterTestingT(t)
hoverfly.DeleteSimulation()
hoverfly.PutSimulation(v2.SimulationViewV5{
DataViewV5: v2.DataViewV5{
RequestResponsePairs: []v2.RequestMatcherResponsePairViewV5{
{
RequestMatcher: v2.RequestMatcherViewV5{
Method: []v2.MatcherViewV5{
{
Matcher: matchers.Exact,
Value: "DELETE",
},
},
Path: []v2.MatcherViewV5{
{
Matcher: matchers.Exact,
Value: "/api/v2/simulation",
},
},
},
Response: v2.ResponseDetailsViewV5{
Status: 400,
Body: "{\"error\":\"test error\"}",
},
},
},
},
MetaView: v2.MetaView{
SchemaVersion: "v2",
},
})
err := DeleteSimulations(target)
Expect(err).ToNot(BeNil())
Expect(err.Error()).To(Equal("Could not delete simulation\n\ntest error"))
}<|fim▁end|>
| |
<|file_name|>test_t1197_telem.py<|end_file_name|><|fim▁begin|>import json
import pytest
from common.utils.attack_utils import ScanStatus
from infection_monkey.model import VictimHost
from infection_monkey.telemetry.attack.t1197_telem import T1197Telem
DOMAIN_NAME = "domain-name"
IP = "127.0.0.1"
MACHINE = VictimHost(IP, DOMAIN_NAME)
STATUS = ScanStatus.USED
USAGE_STR = "[Usage info]"
@pytest.fixture
def T1197_telem_test_instance():
return T1197Telem(STATUS, MACHINE, USAGE_STR)
def test_T1197_send(T1197_telem_test_instance, spy_send_telemetry):
T1197_telem_test_instance.send()
expected_data = {
"status": STATUS.value,
"technique": "T1197",
"machine": {"domain_name": DOMAIN_NAME, "ip_addr": IP},<|fim▁hole|> expected_data = json.dumps(expected_data, cls=T1197_telem_test_instance.json_encoder)
assert spy_send_telemetry.data == expected_data
assert spy_send_telemetry.telem_category == "attack"<|fim▁end|>
|
"usage": USAGE_STR,
}
|
<|file_name|>pyRona.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python3
# Copyright 2016-2018 Francisco Pina Martins <[email protected]>
# This file is part of pyRona.
# pyRona is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# pyRona is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with pyRona. If not, see <http://www.gnu.org/licenses/>.
from collections import defaultdict
from sys import argv
import numpy as np
try:
import md_outlier_remover as mor
import plotters.general_plotter as gp
import file_parser as fp
from argparser import argument_parser
except ImportError:
import pyRona.md_outlier_remover as mor
import pyRona.plotters.general_plotter as gp
import pyRona.file_parser as fp
from pyRona.argparser import argument_parser
class RonaClass:
"""
Stores the RONA values for each covar
"""
POP_NAMES = []
def __init__(self, covar):
self.name = covar
self.pop_ronas = defaultdict(list)
self.corr_coef = {}
self.avg_ronas = []
self.stderr_ronas = []
def basic_stats(self, use_weights):
"""
Gets the average RONA and stdev per population for each associated
covariate. Stores the values in variables inside the class instance.
"""
if len(self.pop_ronas) > 1:
# Sort markers:
markers = sorted(list(self.corr_coef.keys()))
list_of_marker_values = np.array([self.pop_ronas[x] for x in
markers],
dtype=float)
corr_weights = np.array([self.corr_coef[x] for x in markers],
dtype=float)
for i in list_of_marker_values.T:
not_nans = ~np.isnan(i)
if use_weights is True:
if True in not_nans:
self.avg_ronas += [np.average(i[not_nans],
weights=corr_weights
[not_nans])]<|fim▁hole|> else:
self.avg_ronas += [np.average(i[not_nans])]
self.stderr_ronas += [np.std(i[not_nans])
/ np.sqrt(len(i[not_nans]))]
else:
self.avg_ronas = [x for x in self.pop_ronas.values()][0]
self.stderr_ronas = [0.0] * len(list(self.pop_ronas.values())[0])
def count_markers(self):
"""
Counts the number of markers in the instance.
"""
return len(self.pop_ronas)
def calculate_rona(marker_name, rona, present_covar, future_covar,
allele_freqs, plot, outliers, rtype):
"""
Calculates the "Risk of non adaptation" (RONA) of each popuation for a
given association.
Also plots the associations if requested.
"""
# Remove outliers
if outliers is True:
outlier_pos = mor.md_remove_outliers(present_covar, allele_freqs)
outlier_pos = np.array(outlier_pos, dtype='int')
for i in outlier_pos:
present_covar[i] = np.nan
future_covar[i] = np.nan
allele_freqs[i] = np.nan
rona.pop_names = np.delete(RonaClass.POP_NAMES, outlier_pos)
else:
rona.pop_names = RonaClass.POP_NAMES
# Calculate trendline:
not_nan = ~np.isnan(present_covar)
fit = np.polyfit(present_covar[not_nan], allele_freqs[not_nan], 1)
fit_fn = np.poly1d(fit)
# Get R²:
rona.corr_coef[marker_name] = np.corrcoef(present_covar[not_nan],
allele_freqs[not_nan])[1, 0] ** 2
for pres, fut, freq in zip(present_covar, future_covar, allele_freqs):
pres_distance = freq - fit_fn(pres)
fut_distance = freq - fit_fn(fut)
distance_diff = abs(pres_distance) - abs(fut_distance)
amplitude = max(allele_freqs) - min(allele_freqs)
if rtype == "diff":
rel_distance = distance_diff / amplitude
elif rtype == "absdiff":
rel_distance = abs(distance_diff) / amplitude
elif rtype == "dist":
rel_distance = abs(fut_distance)
rona.pop_ronas[marker_name] += [rel_distance]
if plot is not None:
gp.draw_individual_plots(present_covar, future_covar, rona,
marker_name, allele_freqs, fit_fn, plot)
def results_summary(ronas, use_weights):
"""
This function outputs a summary of the RONAS for each population and
covariate.
"""
pop_names = ronas[0].pop_names
for i, j in enumerate(pop_names):
if i == 0:
print("Covar\t%s" % "\t".join([x.name for x in ronas]))
print("#SNPs\t%s" % "\t".join([str(x.count_markers()) for x in
ronas]))
print("%s\t%s" % (j, "\t".join([str(x.avg_ronas[i]) for x in ronas])))
print("Min R^2\t%s" %
"\t".join([str(np.nanmin(list(x.corr_coef.values()))) for x in
ronas]))
print("Max R^2\t%s" %
"\t".join([str(np.nanmax(list(x.corr_coef.values()))) for x in
ronas]))
# if use_weights is True:
# means = [str(np.average(list(x.corr_coef.values()),
# weights=list(x.corr_coef.values()))) for x in
# ronas]
# else:
means = [str(np.nanmean(list(x.corr_coef.values()))) for x in ronas]
print("Average R^2\t%s" % "\t".join(means))
def ronas_filterer(ronas, use_weights, num_covars):
"""
Filters RONAS to remove immutable covars, and return only the top "n" most
represented covariables.
"""
sortable_representation = {}
for k, rona in ronas.items():
rona.basic_stats(use_weights)
sortable_representation[k] = len(rona.pop_ronas)
top_represented = sorted(sortable_representation,
key=sortable_representation.get,
reverse=True)[:num_covars]
top_ronas = [ronas[x] for x in top_represented]
return top_ronas
def main():
"""
Main function. Takes all the inputs as arguments and runs the remaining
functions of the program.
"""
if len(argv) < 2:
arg_list = ["-h"]
else:
arg_list = argv[1:]
arg = argument_parser(arg_list)
if arg.upstream == "baypass":
present_covariates = fp.parse_baypass_envfile(arg.present_covars_file)
future_covariates = fp.parse_baypass_envfile(arg.future_covars_file)
RonaClass.POP_NAMES = fp.popnames_parser(arg.popnames_file)
assocs = fp.baypass_summary_betai_parser(
arg.baypass_summary_betai_file,
arg.bayes_factor, arg.immutables)
al_freqs = fp.baypass_pij_parser(arg.baypass_pij_file, assocs)
elif arg.upstream == "lfmm":
present_covariates = fp.parse_lfmm_envfile(arg.present_covars_file)
future_covariates = fp.parse_lfmm_envfile(arg.future_covars_file)
assocs = fp.lfmm_results_parser(arg.lfmm_assoc_file,
arg.p_thres,
arg.immutables)
RonaClass.POP_NAMES, al_freqs = fp.lfmm_to_pop_allele_freqs(
arg.allele_freqs_file,
arg.present_covars_file,
assocs,
popnames=True)
ronas = {}
for assoc in assocs:
marker, covar = assoc
# Instanciate class
if covar not in ronas:
rona = RonaClass(covar)
else:
rona = ronas[covar]
calculate_rona(marker, rona, present_covariates[int(covar) - 1],
future_covariates[int(covar) - 1],
al_freqs[marker],
arg.plots, arg.outliers, arg.rtype)
ronas[covar] = rona
ronas = ronas_filterer(ronas, arg.use_weights, arg.num_covars)
results_summary(ronas, arg.use_weights)
gp.draw_rona_plot(ronas, arg.outfile)
if arg.map_filename is not None:
# The map plotting module is only imported if a map plot is requested.
# This is to be able to keep 'cartopy' as an optional dependency.
try:
import plotters.map_plotter as mapper
except ImportError:
import pyRona.plotters.map_plotter as mapper
mapper.map_plotter(ronas, present_covariates[1], present_covariates[0],
arg.map_filename)
if __name__ == "__main__":
main()<|fim▁end|>
|
else:
self.avg_ronas += [np.nan]
|
<|file_name|>runtests.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import sys
from django.conf import settings
from django.core.management import execute_from_command_line
from tests import mongoutils
settings.configure(
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
},<|fim▁hole|> 'default': {
'name': 'dumb',
},
},
INSTALLED_APPS=(
'tests',
),
MIDDLEWARE_CLASSES=(),
ROOT_URLCONF=None,
SECRET_KEY='foobar',
TEST_RUNNER='tests.mongoutils.TestRunner'
)
def runtests():
mongoutils.mongo_connect()
argv = sys.argv[:1] + ['test'] + sys.argv[1:]
execute_from_command_line(argv)
if __name__ == '__main__':
runtests()<|fim▁end|>
|
},
MONGO_DATABASES={
|
<|file_name|>i16.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Operations and constants for signed 16-bits integers (`i16` type)
#![allow(non_uppercase_statics)]
use prelude::*;
use default::Default;
use from_str::FromStr;
use num::{Bitwise, Bounded, CheckedAdd, CheckedSub, CheckedMul};
use num::{CheckedDiv, Zero, One, strconv};
use num::{ToStrRadix, FromStrRadix};
use option::{Option, Some, None};
use str;
use intrinsics;
int_module!(i16, 16)
impl Bitwise for i16 {
/// Returns the number of ones in the binary representation of the number.
#[inline]
fn count_ones(&self) -> i16 { unsafe { intrinsics::ctpop16(*self as u16) as i16 } }
/// Returns the number of leading zeros in the in the binary representation
/// of the number.
#[inline]
fn leading_zeros(&self) -> i16 { unsafe { intrinsics::ctlz16(*self as u16) as i16 } }
/// Returns the number of trailing zeros in the in the binary representation
/// of the number.
#[inline]
fn trailing_zeros(&self) -> i16 { unsafe { intrinsics::cttz16(*self as u16) as i16 } }
}
impl CheckedAdd for i16 {
#[inline]
fn checked_add(&self, v: &i16) -> Option<i16> {
unsafe {
let (x, y) = intrinsics::i16_add_with_overflow(*self, *v);
if y { None } else { Some(x) }
}
}
}
impl CheckedSub for i16 {
#[inline]
fn checked_sub(&self, v: &i16) -> Option<i16> {<|fim▁hole|> }
}
impl CheckedMul for i16 {
#[inline]
fn checked_mul(&self, v: &i16) -> Option<i16> {
unsafe {
let (x, y) = intrinsics::i16_mul_with_overflow(*self, *v);
if y { None } else { Some(x) }
}
}
}<|fim▁end|>
|
unsafe {
let (x, y) = intrinsics::i16_sub_with_overflow(*self, *v);
if y { None } else { Some(x) }
}
|
<|file_name|>0001_initial.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Generated by Django 1.9.1 on 2016-01-13 18:08
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]<|fim▁hole|> name='ProxyGrantingTicket',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('session_key', models.CharField(blank=True, max_length=255, null=True)),
('pgtiou', models.CharField(blank=True, max_length=255, null=True)),
('pgt', models.CharField(blank=True, max_length=255, null=True)),
('date', models.DateTimeField(auto_now_add=True)),
('user', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='+', to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='SessionTicket',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('session_key', models.CharField(max_length=255)),
('ticket', models.CharField(max_length=255)),
],
),
migrations.AlterUniqueTogether(
name='proxygrantingticket',
unique_together=set([('session_key', 'user')]),
),
]<|fim▁end|>
|
operations = [
migrations.CreateModel(
|
<|file_name|>wsgi.py<|end_file_name|><|fim▁begin|>"""
WSGI config for meetingroom project.
This module contains the WSGI application used by Django's development server<|fim▁hole|>named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "meetingroom.settings")
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)<|fim▁end|>
|
and any production WSGI deployments. It should expose a module-level variable
|
<|file_name|>types.rs<|end_file_name|><|fim▁begin|>// Copyright 2021 TiKV Project Authors. Licensed under Apache-2.0.
use super::timestamp::TimeStamp;
use bitflags::bitflags;
use byteorder::{ByteOrder, NativeEndian};
use collections::HashMap;
use kvproto::kvrpcpb;
use std::fmt::{self, Debug, Display, Formatter};
use tikv_util::codec;
use tikv_util::codec::bytes;
use tikv_util::codec::bytes::BytesEncoder;
use tikv_util::codec::number::{self, NumberEncoder};
// Short value max len must <= 255.
pub const SHORT_VALUE_MAX_LEN: usize = 255;
pub const SHORT_VALUE_PREFIX: u8 = b'v';
pub fn is_short_value(value: &[u8]) -> bool {
value.len() <= SHORT_VALUE_MAX_LEN
}
/// Value type which is essentially raw bytes.
pub type Value = Vec<u8>;
/// Key-value pair type.
///
/// The value is simply raw bytes; the key is a little bit tricky, which is
/// encoded bytes.
pub type KvPair = (Vec<u8>, Value);
/// Key type.
///
/// Keys have 2 types of binary representation - raw and encoded. The raw
/// representation is for public interface, the encoded representation is for
/// internal storage. We can get both representations from an instance of this
/// type.
///
/// Orthogonal to binary representation, keys may or may not embed a timestamp,
/// but this information is transparent to this type, the caller must use it
/// consistently.
#[derive(Eq, PartialEq, Ord, PartialOrd, Hash)]
pub struct Key(Vec<u8>);
/// Core functions for `Key`.
impl Key {
/// Creates a key from raw bytes.
#[inline]
pub fn from_raw(key: &[u8]) -> Key {
// adding extra length for appending timestamp
let len = codec::bytes::max_encoded_bytes_size(key.len()) + codec::number::U64_SIZE;
let mut encoded = Vec::with_capacity(len);
encoded.encode_bytes(key, false).unwrap();
Key(encoded)
}
/// Creates a key from raw bytes but returns None if the key is an empty slice.
#[inline]<|fim▁hole|> } else {
Some(Key::from_raw(key))
}
}
/// Gets and moves the raw representation of this key.
#[inline]
pub fn into_raw(self) -> Result<Vec<u8>, codec::Error> {
let mut k = self.0;
bytes::decode_bytes_in_place(&mut k, false)?;
Ok(k)
}
/// Gets the raw representation of this key.
#[inline]
pub fn to_raw(&self) -> Result<Vec<u8>, codec::Error> {
bytes::decode_bytes(&mut self.0.as_slice(), false)
}
/// Creates a key from encoded bytes vector.
#[inline]
pub fn from_encoded(encoded_key: Vec<u8>) -> Key {
Key(encoded_key)
}
/// Creates a key with reserved capacity for timestamp from encoded bytes slice.
#[inline]
pub fn from_encoded_slice(encoded_key: &[u8]) -> Key {
let mut k = Vec::with_capacity(encoded_key.len() + number::U64_SIZE);
k.extend_from_slice(encoded_key);
Key(k)
}
/// Gets the encoded representation of this key.
#[inline]
pub fn as_encoded(&self) -> &Vec<u8> {
&self.0
}
/// Gets and moves the encoded representation of this key.
#[inline]
pub fn into_encoded(self) -> Vec<u8> {
self.0
}
/// Creates a new key by appending a `u64` timestamp to this key.
#[inline]
pub fn append_ts(mut self, ts: TimeStamp) -> Key {
self.0.encode_u64_desc(ts.into_inner()).unwrap();
self
}
/// Gets the timestamp contained in this key.
///
/// Preconditions: the caller must ensure this is actually a timestamped
/// key.
#[inline]
pub fn decode_ts(&self) -> Result<TimeStamp, codec::Error> {
Self::decode_ts_from(&self.0)
}
/// Creates a new key by truncating the timestamp from this key.
///
/// Preconditions: the caller must ensure this is actually a timestamped key.
#[inline]
pub fn truncate_ts(mut self) -> Result<Key, codec::Error> {
let len = self.0.len();
if len < number::U64_SIZE {
// TODO: IMHO, this should be an assertion failure instead of
// returning an error. If this happens, it indicates a bug in
// the caller module, have to make code change to fix it.
//
// Even if it passed the length check, it still could be buggy,
// a better way is to introduce a type `TimestampedKey`, and
// functions to convert between `TimestampedKey` and `Key`.
// `TimestampedKey` is in a higher (MVCC) layer, while `Key` is
// in the core storage engine layer.
Err(codec::Error::KeyLength)
} else {
self.0.truncate(len - number::U64_SIZE);
Ok(self)
}
}
/// Split a ts encoded key, return the user key and timestamp.
#[inline]
pub fn split_on_ts_for(key: &[u8]) -> Result<(&[u8], TimeStamp), codec::Error> {
if key.len() < number::U64_SIZE {
Err(codec::Error::KeyLength)
} else {
let pos = key.len() - number::U64_SIZE;
let k = &key[..pos];
let mut ts = &key[pos..];
Ok((k, number::decode_u64_desc(&mut ts)?.into()))
}
}
/// Extract the user key from a ts encoded key.
#[inline]
pub fn truncate_ts_for(key: &[u8]) -> Result<&[u8], codec::Error> {
let len = key.len();
if len < number::U64_SIZE {
return Err(codec::Error::KeyLength);
}
Ok(&key[..key.len() - number::U64_SIZE])
}
/// Decode the timestamp from a ts encoded key.
#[inline]
pub fn decode_ts_from(key: &[u8]) -> Result<TimeStamp, codec::Error> {
let len = key.len();
if len < number::U64_SIZE {
return Err(codec::Error::KeyLength);
}
let mut ts = &key[len - number::U64_SIZE..];
Ok(number::decode_u64_desc(&mut ts)?.into())
}
/// Whether the user key part of a ts encoded key `ts_encoded_key` equals to the encoded
/// user key `user_key`.
///
/// There is an optimization in this function, which is to compare the last 8 encoded bytes
/// first before comparing the rest. It is because in TiDB many records are ended with an 8
/// byte row id and in many situations only this part is different when calling this function.
//
// TODO: If the last 8 byte is memory aligned, it would be better.
#[inline]
pub fn is_user_key_eq(ts_encoded_key: &[u8], user_key: &[u8]) -> bool {
let user_key_len = user_key.len();
if ts_encoded_key.len() != user_key_len + number::U64_SIZE {
return false;
}
if user_key_len >= number::U64_SIZE {
// We compare last 8 bytes as u64 first, then compare the rest.
// TODO: Can we just use == to check the left part and right part? `memcmp` might
// be smart enough.
let left = NativeEndian::read_u64(&ts_encoded_key[user_key_len - 8..]);
let right = NativeEndian::read_u64(&user_key[user_key_len - 8..]);
if left != right {
return false;
}
ts_encoded_key[..user_key_len - 8] == user_key[..user_key_len - 8]
} else {
ts_encoded_key[..user_key_len] == user_key[..]
}
}
/// Returns whether the encoded key is encoded from `raw_key`.
pub fn is_encoded_from(&self, raw_key: &[u8]) -> bool {
bytes::is_encoded_from(&self.0, raw_key, false)
}
/// TiDB uses the same hash algorithm.
pub fn gen_hash(&self) -> u64 {
farmhash::fingerprint64(&self.to_raw().unwrap())
}
#[allow(clippy::len_without_is_empty)]
#[inline]
pub fn len(&self) -> usize {
self.0.len()
}
}
impl Clone for Key {
fn clone(&self) -> Self {
// default clone implemention use self.len() to reserve capacity
// for the sake of appending ts, we need to reserve more
let mut key = Vec::with_capacity(self.0.capacity());
key.extend_from_slice(&self.0);
Key(key)
}
}
impl Debug for Key {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
write!(f, "{:?}", &log_wrappers::Value::key(&self.0))
}
}
impl Display for Key {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
write!(f, "{:?}", &log_wrappers::Value::key(&self.0))
}
}
#[derive(Debug, Copy, Clone, PartialEq)]
pub enum MutationType {
Put,
Delete,
Lock,
Insert,
Other,
}
/// A row mutation.
#[derive(Debug, Clone)]
pub enum RawMutation {
/// Put `Value` into `Key` with TTL. The TTL will overwrite the existing TTL value.
Put { key: Key, value: Value, ttl: u64 },
/// Delete `Key`.
Delete { key: Key },
}
impl RawMutation {
pub fn key(&self) -> &Key {
match self {
RawMutation::Put {
ref key,
value: _,
ttl: _,
} => key,
RawMutation::Delete { ref key } => key,
}
}
}
/// A row mutation.
#[derive(Debug, Clone)]
pub enum Mutation {
/// Put `Value` into `Key`, overwriting any existing value.
Put((Key, Value)),
/// Delete `Key`.
Delete(Key),
/// Set a lock on `Key`.
Lock(Key),
/// Put `Value` into `Key` if `Key` does not yet exist.
///
/// Returns `kvrpcpb::KeyError::AlreadyExists` if the key already exists.
Insert((Key, Value)),
/// Check `key` must be not exist.
///
/// Returns `kvrpcpb::KeyError::AlreadyExists` if the key already exists.
CheckNotExists(Key),
}
impl Mutation {
pub fn key(&self) -> &Key {
match self {
Mutation::Put((ref key, _)) => key,
Mutation::Delete(ref key) => key,
Mutation::Lock(ref key) => key,
Mutation::Insert((ref key, _)) => key,
Mutation::CheckNotExists(ref key) => key,
}
}
pub fn mutation_type(&self) -> MutationType {
match self {
Mutation::Put(_) => MutationType::Put,
Mutation::Delete(_) => MutationType::Delete,
Mutation::Lock(_) => MutationType::Lock,
Mutation::Insert(_) => MutationType::Insert,
_ => MutationType::Other,
}
}
pub fn into_key_value(self) -> (Key, Option<Value>) {
match self {
Mutation::Put((key, value)) => (key, Some(value)),
Mutation::Delete(key) => (key, None),
Mutation::Lock(key) => (key, None),
Mutation::Insert((key, value)) => (key, Some(value)),
Mutation::CheckNotExists(key) => (key, None),
}
}
pub fn should_not_exists(&self) -> bool {
matches!(self, Mutation::Insert(_) | Mutation::CheckNotExists(_))
}
pub fn should_not_write(&self) -> bool {
matches!(self, Mutation::CheckNotExists(_))
}
}
impl From<kvrpcpb::Mutation> for Mutation {
fn from(mut m: kvrpcpb::Mutation) -> Mutation {
match m.get_op() {
kvrpcpb::Op::Put => Mutation::Put((Key::from_raw(m.get_key()), m.take_value())),
kvrpcpb::Op::Del => Mutation::Delete(Key::from_raw(m.get_key())),
kvrpcpb::Op::Lock => Mutation::Lock(Key::from_raw(m.get_key())),
kvrpcpb::Op::Insert => Mutation::Insert((Key::from_raw(m.get_key()), m.take_value())),
kvrpcpb::Op::CheckNotExists => Mutation::CheckNotExists(Key::from_raw(m.get_key())),
_ => panic!("mismatch Op in prewrite mutations"),
}
}
}
/// `OldValue` is used by cdc to read the previous value associated with some key during the prewrite process
#[derive(Debug, Clone, PartialEq)]
pub enum OldValue {
/// A real `OldValue`
Value { value: Value },
/// A timestamp of an old value in case a value is not inlined in Write
ValueTimeStamp { start_ts: TimeStamp },
/// `None` means we don't found a previous value
None,
/// `Unspecified` means one of the following:
/// - The user doesn't care about the previous value
/// - We don't sure if there is a previous value
Unspecified,
}
impl Default for OldValue {
fn default() -> Self {
OldValue::Unspecified
}
}
impl OldValue {
pub fn valid(&self) -> bool {
!matches!(self, OldValue::Unspecified)
}
pub fn size(&self) -> usize {
let value_size = match self {
OldValue::Value { value } => value.len(),
_ => 0,
};
value_size + std::mem::size_of::<OldValue>()
}
}
// Returned by MvccTxn when extra_op is set to kvrpcpb::ExtraOp::ReadOldValue.
// key with current ts -> (short value of the prev txn, start ts of the prev txn).
// The value of the map will be None when the mutation is `Insert`.
// MutationType is the type of mutation of the current write.
pub type OldValues = HashMap<Key, (OldValue, Option<MutationType>)>;
// Extra data fields filled by kvrpcpb::ExtraOp.
#[derive(Default, Debug, Clone)]
pub struct TxnExtra {
pub old_values: OldValues,
// Marks that this transaction is a 1PC transaction. RaftKv should set this flag
// in the raft command request.
pub one_pc: bool,
}
impl TxnExtra {
pub fn is_empty(&self) -> bool {
self.old_values.is_empty()
}
}
pub trait TxnExtraScheduler: Send + Sync {
fn schedule(&self, txn_extra: TxnExtra);
}
bitflags! {
/// Additional flags for a write batch.
/// They should be set in the `flags` field in `RaftRequestHeader`.
pub struct WriteBatchFlags: u64 {
/// Indicates this request is from a 1PC transaction.
/// It helps CDC recognize 1PC transactions and handle them correctly.
const ONE_PC = 0b00000001;
/// Indicates this request is from a stale read-only transaction.
const STALE_READ = 0b00000010;
}
}
impl WriteBatchFlags {
/// Convert from underlying bit representation
/// panic if it contains bits that do not correspond to a flag
pub fn from_bits_check(bits: u64) -> WriteBatchFlags {
match WriteBatchFlags::from_bits(bits) {
None => panic!("unrecognized flags: {:b}", bits),
// zero or more flags
Some(f) => f,
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_flags() {
assert!(WriteBatchFlags::from_bits_check(0).is_empty());
assert_eq!(
WriteBatchFlags::from_bits_check(WriteBatchFlags::ONE_PC.bits()),
WriteBatchFlags::ONE_PC
);
assert_eq!(
WriteBatchFlags::from_bits_check(WriteBatchFlags::STALE_READ.bits()),
WriteBatchFlags::STALE_READ
);
}
#[test]
fn test_flags_panic() {
for _ in 0..100 {
assert!(
panic_hook::recover_safe(|| {
// r must be an invalid flags if it is not zero
let r = rand::random::<u64>() & !WriteBatchFlags::all().bits();
WriteBatchFlags::from_bits_check(r);
if r == 0 {
panic!("panic for zero");
}
})
.is_err()
);
}
}
#[test]
fn test_is_user_key_eq() {
// make a short name to keep format for the test.
fn eq(a: &[u8], b: &[u8]) -> bool {
Key::is_user_key_eq(a, b)
}
assert_eq!(false, eq(b"", b""));
assert_eq!(false, eq(b"12345", b""));
assert_eq!(true, eq(b"12345678", b""));
assert_eq!(true, eq(b"x12345678", b"x"));
assert_eq!(false, eq(b"x12345", b"x"));
// user key len == 3
assert_eq!(true, eq(b"xyz12345678", b"xyz"));
assert_eq!(true, eq(b"xyz________", b"xyz"));
assert_eq!(false, eq(b"xyy12345678", b"xyz"));
assert_eq!(false, eq(b"yyz12345678", b"xyz"));
assert_eq!(false, eq(b"xyz12345678", b"xy"));
assert_eq!(false, eq(b"xyy12345678", b"xy"));
assert_eq!(false, eq(b"yyz12345678", b"xy"));
// user key len == 7
assert_eq!(true, eq(b"abcdefg12345678", b"abcdefg"));
assert_eq!(true, eq(b"abcdefgzzzzzzzz", b"abcdefg"));
assert_eq!(false, eq(b"abcdefg12345678", b"abcdef"));
assert_eq!(false, eq(b"abcdefg12345678", b"bcdefg"));
assert_eq!(false, eq(b"abcdefv12345678", b"abcdefg"));
assert_eq!(false, eq(b"vbcdefg12345678", b"abcdefg"));
assert_eq!(false, eq(b"abccefg12345678", b"abcdefg"));
// user key len == 8
assert_eq!(true, eq(b"abcdefgh12345678", b"abcdefgh"));
assert_eq!(true, eq(b"abcdefghyyyyyyyy", b"abcdefgh"));
assert_eq!(false, eq(b"abcdefgh12345678", b"abcdefg"));
assert_eq!(false, eq(b"abcdefgh12345678", b"bcdefgh"));
assert_eq!(false, eq(b"abcdefgz12345678", b"abcdefgh"));
assert_eq!(false, eq(b"zbcdefgh12345678", b"abcdefgh"));
assert_eq!(false, eq(b"abcddfgh12345678", b"abcdefgh"));
// user key len == 9
assert_eq!(true, eq(b"abcdefghi12345678", b"abcdefghi"));
assert_eq!(true, eq(b"abcdefghixxxxxxxx", b"abcdefghi"));
assert_eq!(false, eq(b"abcdefghi12345678", b"abcdefgh"));
assert_eq!(false, eq(b"abcdefghi12345678", b"bcdefghi"));
assert_eq!(false, eq(b"abcdefghy12345678", b"abcdefghi"));
assert_eq!(false, eq(b"ybcdefghi12345678", b"abcdefghi"));
assert_eq!(false, eq(b"abcddfghi12345678", b"abcdefghi"));
// user key len == 11
assert_eq!(true, eq(b"abcdefghijk87654321", b"abcdefghijk"));
assert_eq!(true, eq(b"abcdefghijkabcdefgh", b"abcdefghijk"));
assert_eq!(false, eq(b"abcdefghijk87654321", b"abcdefghij"));
assert_eq!(false, eq(b"abcdefghijk87654321", b"bcdefghijk"));
assert_eq!(false, eq(b"abcdefghijx87654321", b"abcdefghijk"));
assert_eq!(false, eq(b"xbcdefghijk87654321", b"abcdefghijk"));
assert_eq!(false, eq(b"abxdefghijk87654321", b"abcdefghijk"));
assert_eq!(false, eq(b"axcdefghijk87654321", b"abcdefghijk"));
assert_eq!(false, eq(b"abcdeffhijk87654321", b"abcdefghijk"));
}
#[test]
fn test_is_encoded_from() {
for raw_len in 0..=24 {
let raw: Vec<u8> = (0..raw_len).collect();
let encoded = Key::from_raw(&raw);
assert!(encoded.is_encoded_from(&raw));
let encoded_len = encoded.as_encoded().len();
// Should return false if we modify one byte in raw
for i in 0..raw.len() {
let mut invalid_raw = raw.clone();
invalid_raw[i] = raw[i].wrapping_add(1);
assert!(!encoded.is_encoded_from(&invalid_raw));
}
// Should return false if we modify one byte in encoded
for i in 0..encoded_len {
let mut invalid_encoded = encoded.clone();
invalid_encoded.0[i] = encoded.0[i].wrapping_add(1);
assert!(!invalid_encoded.is_encoded_from(&raw));
}
// Should return false if encoded length is not a multiple of 9
let mut invalid_encoded = encoded.clone();
invalid_encoded.0.pop();
assert!(!invalid_encoded.is_encoded_from(&raw));
// Should return false if encoded has less or more chunks
let shorter_encoded = Key::from_encoded_slice(&encoded.0[..encoded_len - 9]);
assert!(!shorter_encoded.is_encoded_from(&raw));
let mut longer_encoded = encoded.as_encoded().clone();
longer_encoded.extend(&[0, 0, 0, 0, 0, 0, 0, 0, 0xFF]);
let longer_encoded = Key::from_encoded(longer_encoded);
assert!(!longer_encoded.is_encoded_from(&raw));
// Should return false if raw is longer or shorter
if !raw.is_empty() {
let shorter_raw = &raw[..raw.len() - 1];
assert!(!encoded.is_encoded_from(shorter_raw));
}
let mut longer_raw = raw.to_vec();
longer_raw.push(0);
assert!(!encoded.is_encoded_from(&longer_raw));
}
}
#[test]
fn test_old_value_valid() {
let cases = vec![
(OldValue::Unspecified, false),
(OldValue::None, true),
(OldValue::Value { value: vec![] }, true),
(OldValue::ValueTimeStamp { start_ts: 0.into() }, true),
];
for (old_value, v) in cases {
assert_eq!(old_value.valid(), v);
}
}
}<|fim▁end|>
|
pub fn from_raw_maybe_unbounded(key: &[u8]) -> Option<Key> {
if key.is_empty() {
None
|
<|file_name|>AstVisitor.java<|end_file_name|><|fim▁begin|>package yokohama.unit.ast;
public abstract class AstVisitor<T> {
public abstract T visitGroup(Group group);
public abstract T visitAbbreviation(Abbreviation abbreviation);
public T visitDefinition(Definition definition) {
return definition.accept(
this::visitTest,
this::visitFourPhaseTest,
this::visitTable,
this::visitCodeBlock,
this::visitHeading);
}
public abstract T visitTest(Test test);
public abstract T visitAssertion(Assertion assertion);
public abstract T visitClause(Clause clause);
public abstract T visitProposition(Proposition proposition);
public T visitPredicate(Predicate predicate) {
return predicate.accept(
this::visitIsPredicate,
this::visitIsNotPredicate,
this::visitThrowsPredicate,
this::visitMatchesPredicate,
this::visitDoesNotMatchPredicate);
};
public abstract T visitIsPredicate(IsPredicate isPredicate);
public abstract T visitIsNotPredicate(IsNotPredicate isNotPredicate);
public abstract T visitThrowsPredicate(ThrowsPredicate throwsPredicate);
public abstract T visitMatchesPredicate(MatchesPredicate matchesPredicate);
public abstract T visitDoesNotMatchPredicate(DoesNotMatchPredicate doesNotMatchPredicate);
public T visitMatcher(Matcher matcher) {
return matcher.accept(
this::visitEqualToMatcher,
this::visitInstanceOfMatcher,
this::visitInstanceSuchThatMatcher,
this::visitNullValueMatcher);
}
public abstract T visitEqualToMatcher(EqualToMatcher equalTo);
public abstract T visitInstanceOfMatcher(InstanceOfMatcher instanceOf);
public abstract T visitInstanceSuchThatMatcher(InstanceSuchThatMatcher instanceSuchThat);
public abstract T visitNullValueMatcher(NullValueMatcher nullValue);
public T visitPattern(Pattern pattern) {
return pattern.accept((java.util.function.Function<RegExpPattern, T>)(this::visitRegExpPattern));
}
public abstract T visitRegExpPattern(RegExpPattern regExpPattern);
public T visitExpr(Expr expr) {
return expr.accept(
this::visitQuotedExpr,
this::visitStubExpr,
this::visitInvocationExpr,
this::visitIntegerExpr,
this::visitFloatingPointExpr,
this::visitBooleanExpr,
this::visitCharExpr,
this::visitStringExpr,
this::visitAnchorExpr,
this::visitAsExpr,
this::visitResourceExpr,
this::visitTempFileExpr);
}
public abstract T visitQuotedExpr(QuotedExpr quotedExpr);
public abstract T visitStubExpr(StubExpr stubExpr);
public abstract T visitInvocationExpr(InvocationExpr invocationExpr);
public abstract T visitIntegerExpr(IntegerExpr integerExpr);
public abstract T visitFloatingPointExpr(FloatingPointExpr floatingPointExpr);
public abstract T visitBooleanExpr(BooleanExpr booleanExpr);
public abstract T visitCharExpr(CharExpr charExpr);
public abstract T visitStringExpr(StringExpr stringExpr);
public abstract T visitAnchorExpr(AnchorExpr anchorExpr);
public abstract T visitAsExpr(AsExpr asExpr);
public abstract T visitResourceExpr(ResourceExpr resourceExpr);
public abstract T visitTempFileExpr(TempFileExpr tempFileExpr);
public T visitStubBehavior(StubBehavior behavior) {
return behavior.accept(<|fim▁hole|> this::visitStubReturns,
this::visitStubThrows);
}
public abstract T visitStubReturns(StubReturns stubReturns);
public abstract T visitStubThrows(StubThrows stubThrows);
public abstract T visitMethodPattern(MethodPattern methodPattern);
public abstract T visitType(Type type);
public T visitNonArrayType(NonArrayType nonArrayType) {
return nonArrayType.accept(
this::visitPrimitiveType,
this::visitClassType);
}
public abstract T visitPrimitiveType(PrimitiveType primitiveType);
public abstract T visitClassType(ClassType classType);
public T visitFixture(Fixture fixture) {
return fixture.accept(
this::visitNone,
this::visitTableRef,
this::visitBindings);
}
public abstract T visitNone();
public abstract T visitTableRef(TableRef tableRef);
public abstract T visitBindings(Bindings bindings);
public T visitBinding(Binding binding) {
return binding.accept(
this::visitSingleBinding,
this::visitChoiceBinding,
this::visitTableBinding);
}
public abstract T visitSingleBinding(SingleBinding singleBinding);
public abstract T visitChoiceBinding(ChoiceBinding choiceBinding);
public abstract T visitTableBinding(TableBinding tableBinding);
public abstract T visitFourPhaseTest(FourPhaseTest fourPhaseTest);
public abstract T visitPhase(Phase phase);
public abstract T visitVerifyPhase(VerifyPhase verifyPhase);
public abstract T visitLetStatement(LetStatement letStatement);
public T visitStatement(Statement statement) {
return statement.accept(
this::visitExecution,
this::visitInvoke);
}
public abstract T visitExecution(Execution execution);
public abstract T visitInvoke(Invoke invoke);
public abstract T visitTable(Table table);
public abstract T visitRow(Row row);
public T visitCell(Cell cell) {
return cell.accept(
this::visitExprCell,
this::visitPredCell);
}
public abstract T visitExprCell(ExprCell exprCell);
public abstract T visitPredCell(PredCell predCell);
public abstract T visitCodeBlock(CodeBlock codeBlock);
public abstract T visitHeading(Heading heading);
public abstract T visitIdent(Ident ident);
}<|fim▁end|>
| |
<|file_name|>views.py<|end_file_name|><|fim▁begin|>import stripe <|fim▁hole|>from django.http import HttpResponse
from django.shortcuts import render
from django.views.generic import TemplateView, DetailView, ListView
from carton.cart import Cart
from .models import Sale, SaleProduct, SaleError
from products.models import Product
class AddView(TemplateView):
def get(self, request, *args, **kwargs):
cart = Cart(request.session)
product = Product.objects.get(id=kwargs["pk"])
cart.add(product, price=product.price)
return HttpResponse("Added")
class RemoveSingleView(TemplateView):
def get(self, request, *args, **kwargs):
cart = Cart(request.session)
product = Product.objects.get(id=kwargs["pk"])
cart.remove_single(product)
return HttpResponse("Removed Single " + str(product))
class RemoveView(TemplateView):
def get(self, request, *args, **kwargs):
cart = Cart(request.session)
product = Product.objects.get(id=kwargs["pk"])
cart.remove(product)
return HttpResponse("Removed")
class CartTemplateView(TemplateView):
template_name = "shoppingcontent/cart.html"
def get_context_data(self, **kwargs):
# only return the active products
context = super(CartTemplateView, self).get_context_data(**kwargs)
context['STRIPE_PUBLIC_KEY'] = settings.STRIPE_PUBLIC_KEY
context['cart_stripe_total'] = int(Cart(self.request.session).total * 100)
return context
class SaleDetailView(DetailView):
template_name = "shoppingcontent/sale.html"
model = Sale
class SaleListView(ListView):
template_name = "shoppingcontent/sales_list.html"
model = Sale
class SaleErrorListView(ListView):
template_name = "shoppingcontent/sales_error_list.html"
model = SaleError
def charge(request):
''' This function is split into 4 separate parts, which are all wrapped around a try:
except block.
The first part is where we charge the card through Stripe. Stripe will either
accept the payment or send back various different error messages depending on
what went wrong.
The second thing we do is very simple, empty the user's cart.
The third thing is that we're going to create a Sale object for our own records.
This is where we'll see what items have been sold, and also manage orders.
The last thing is creating the sale items, which is really part of creating the
sale, but it is separated in a different try block so that we are sure to create
the sale item if something goes wrong when creating the items associated with the
sale.
Each try block will writhe to the SaleError model if something goes wrong.
'''
if request.method == "POST":
stripe.api_key = settings.STRIPE_SECRET_KEY
email = request.POST['stripeEmail']
stripe_amount = request.POST['stripeAmount']
sale_products_string = request.POST['products']
# Create the charge on Stripe's servers - this will charge the user's card
try:
charge = stripe.Charge.create(
amount=stripe_amount, # amount in cents, again
currency="usd",
card=request.POST['stripeToken'],
description=email
)
except stripe.CardError as e:
# The card has been declined
text = "There was an error processing your card, and we were not able to charge. "
return render(request, "shoppingcontent/error.html",
{'text': text, 'error': e})
except stripe.InvalidRequestError as e:
# Attempt to use the token multiple times.
text = "Your order has already been processed, and it can only be processed once. " \
"If you refreshed this page, that is why you're seeing this error."
return render(request, "shoppingcontent/error.html",
{'text': text})
except stripe.APIConnectionError as e:
# Connection error with Stripe
text = "There was a connection error with our payment processor. Please try again in a few minutes, your cart should still be intact."
return render(request, "shoppingcontent/error.html",
{'text': text, 'error': e})
finally:
# write to the error log if needed
try:
SaleError.objects.create(message=e, location="C")
except:
pass
try:
# clear the cart. If there is an error just keep going.
cart = Cart(request.session).clear()
except Exception as e:
SaleError.objects.create(message=e, location="A")
pass
try:
# create the sale object
if "test" in stripe.api_key:
live = False
elif "live" in stripe.api_key:
live = True
sale = Sale.objects.create(live=live, email=email, total="%.2f" % (int(stripe_amount) / 100))
except Exception as e:
SaleError.objects.create(message=e, location="S", problem="email -" + email + ", amount - " + amount)
text = "Sorry, there was an error processing your order. You have been billed, and " + \
"we have general information about your order, but will be contacting you to get the details."
return render(request, "shoppingcontent/error.html",
{'text': text})
try:
# create the product sales objects
obj = json.loads(sale_products_string)
for i in obj:
id = i.get('id')
quantity = i.get('quantity')
product = Product.objects.get(id=id)
SaleProduct.objects.create(
product=product,
quantity=quantity,
price=product.price,
sale=sale
)
return render(request, "shoppingcontent/success.html",
{'sale': sale, 'email': email})
except Exception as e:
SaleError.objects.create(message=e, location="I", problem=sale_products_string, sale=sale)
text = "There was a problem processing your purchase. The charge went through successfully, " + \
"and we have recorded the sale. However, the product details we will need to contact you about. " + \
"Sorry for the inconvenience."
return render(request, "shoppingcontent/error.html",
{'text': text})
else:
text = "Error: This page is only meant to be hit by the server after a payment."
return render(request, "shoppingcontent/error.html",
{'text': text})<|fim▁end|>
|
import json
from django.conf import settings
|
<|file_name|>socialevents.client.routes.js<|end_file_name|><|fim▁begin|>'use strict';
// Setting up route
angular.module('socialevents').config(['$stateProvider',
function($stateProvider) {
// SocialEvents state routing
$stateProvider.
state('listSocialEvents', {
url: '/socialevents',
templateUrl: 'modules/socialevents/views/list-socialevents.client.view.html'<|fim▁hole|> templateUrl: 'modules/socialevents/views/create-socialevent.client.view.html'
}).
state('viewSocialEvent', {
url: '/socialevents/:socialeventId',
templateUrl: 'modules/socialevents/views/view-socialevent.client.view.html'
}).
state('editSocialEvent', {
url: '/socialevents/:socialeventId/edit',
templateUrl: 'modules/socialevents/views/edit-socialevent.client.view.html'
});
}
]);<|fim▁end|>
|
}).
state('createSocialEvent', {
url: '/socialevents/create',
|
<|file_name|>ServiceContext.java<|end_file_name|><|fim▁begin|>/*******************************************************************************
* Copyright (c) 2012-2017 Codenvy, S.A.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Codenvy, S.A. - initial API and implementation
*******************************************************************************/
package org.eclipse.che.api.core.rest;
import javax.ws.rs.core.UriBuilder;
/**
* Helps to deliver context of RESTful request to components.
*
* @author <a href="mailto:[email protected]">Andrey Parfonov</a>
*/
public interface ServiceContext {
/**
* Get UriBuilder which already contains base URI of RESTful application and URL pattern of RESTful service that produces this
* instance.
*/<|fim▁hole|> UriBuilder getBaseUriBuilder();
}<|fim▁end|>
|
UriBuilder getServiceUriBuilder();
/** Get UriBuilder which already contains base URI of RESTful application. */
|
<|file_name|>A_out.js<|end_file_name|><|fim▁begin|>public class X {
void bar() {
for (final int i; 0 < (i = 1); i = i + 1) {
}
}
}
<|fim▁hole|>//public class X {
// public static void main(String[] args) {
// final int i;
// for ( ; ; i = 1)
// break;
// i = 1;
// }
//}
//public class X {
// final int blank;
// {
// while ((null == null || true)) {
// blank = 1;
// break;
// }
// }
// X(){
// }
// public static void main(String[] argv){
// System.out.println("SUCCESS");
// }
//}
// should fail
//class X {
//
// public static void main(String[] args) {
// final boolean b;
// do
// break;
// while ((b = true) && false);
// b = true;
// }
//}
// ?
//class X {
//
// public static void main(String[] args) {
// final boolean b;
// do
// break;
// while ((b = true) && false);
// b = true;
// }
//}<|fim▁end|>
| |
<|file_name|>client-common.ts<|end_file_name|><|fim▁begin|>/*! *****************************************************************************
Copyright (c) 2016 Tangra Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
***************************************************************************** */
import * as definition from "../client";
import { MobileServiceTable } from "../table";
import { MobileServiceUser, AuthenticationProvider } from "../user";
import { MobileServicePush } from "../push";
export abstract class MobileServiceClient implements definition.MobileServiceClient {
protected _msClient;
protected _url;
public user: MobileServiceUser;
public push: MobileServicePush;<|fim▁hole|>
constructor(url: string) {
this._url = url;
}
abstract getTable(tableName: string): MobileServiceTable;
abstract login(provider: AuthenticationProvider, urlScheme?: string): Promise<MobileServiceUser>;
abstract loginFromCache(): boolean;
}<|fim▁end|>
|
public static configureClientAuthAppDelegate(): void {
// nothing to do by default
}
|
<|file_name|>privacy_reexport.rs<|end_file_name|><|fim▁begin|>// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or<|fim▁hole|>// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
pub extern crate core;
pub use foo as bar;
pub mod foo {
pub fn frob() {}
}<|fim▁end|>
|
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>#![feature(path_ext)]
extern crate delivery;
extern crate rustc_serialize;
extern crate tempdir;
#[macro_use] extern crate log;
// Thanks, Cargo.
macro_rules! test {
($name:ident $expr:expr) => (
#[test]
fn $name() {
setup();
$expr;
}
)
}
macro_rules! panic_on_error {
($expr:expr) => (
match $expr {<|fim▁hole|> }
)
}
mod support;
mod config;
mod cli;
mod utils;<|fim▁end|>
|
Ok(val) => val,
Err(e) => {
panic!("{:?}", e)
}
|
<|file_name|>astropyautosummary.py<|end_file_name|><|fim▁begin|># Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
This sphinx extension builds off of `sphinx.ext.autosummary` to
clean up some issues it presents in the Astropy docs.
The main issue this fixes is the summary tables getting cut off before the
end of the sentence in some cases.
Note: Sphinx 1.2 appears to have fixed the the main issues in the stock
autosummary extension that are addressed by this extension. So use of this
extension with newer versions of Sphinx is deprecated.
"""
import re
from distutils.version import LooseVersion
import sphinx
from sphinx.ext.autosummary import Autosummary
from ...utils import deprecated
# used in AstropyAutosummary.get_items
_itemsummrex = re.compile(r'^([A-Z].*?\.(?:\s|$))')
@deprecated('1.0', message='AstropyAutosummary is only needed when used '
'with Sphinx versions less than 1.2')
class AstropyAutosummary(Autosummary):
def get_items(self, names):
"""Try to import the given names, and return a list of
``[(name, signature, summary_string, real_name), ...]``.
"""
from sphinx.ext.autosummary import (get_import_prefixes_from_env,
import_by_name, get_documenter, mangle_signature)
<|fim▁hole|> items = []
max_item_chars = 50
for name in names:
display_name = name
if name.startswith('~'):
name = name[1:]
display_name = name.split('.')[-1]
try:
import_by_name_values = import_by_name(name, prefixes=prefixes)
except ImportError:
self.warn('[astropyautosummary] failed to import %s' % name)
items.append((name, '', '', name))
continue
# to accommodate Sphinx v1.2.2 and v1.2.3
if len(import_by_name_values) == 3:
real_name, obj, parent = import_by_name_values
elif len(import_by_name_values) == 4:
real_name, obj, parent, module_name = import_by_name_values
# NB. using real_name here is important, since Documenters
# handle module prefixes slightly differently
documenter = get_documenter(obj, parent)(self, real_name)
if not documenter.parse_name():
self.warn('[astropyautosummary] failed to parse name %s' % real_name)
items.append((display_name, '', '', real_name))
continue
if not documenter.import_object():
self.warn('[astropyautosummary] failed to import object %s' % real_name)
items.append((display_name, '', '', real_name))
continue
# -- Grab the signature
sig = documenter.format_signature()
if not sig:
sig = ''
else:
max_chars = max(10, max_item_chars - len(display_name))
sig = mangle_signature(sig, max_chars=max_chars)
sig = sig.replace('*', r'\*')
# -- Grab the summary
doc = list(documenter.process_doc(documenter.get_doc()))
while doc and not doc[0].strip():
doc.pop(0)
m = _itemsummrex.search(" ".join(doc).strip())
if m:
summary = m.group(1).strip()
elif doc:
summary = doc[0].strip()
else:
summary = ''
items.append((display_name, sig, summary, real_name))
return items
def setup(app):
# need autosummary, of course
app.setup_extension('sphinx.ext.autosummary')
# Don't make the replacement if Sphinx is at least 1.2
if LooseVersion(sphinx.__version__) < LooseVersion('1.2.0'):
# this replaces the default autosummary with the astropy one
app.add_directive('autosummary', AstropyAutosummary)<|fim▁end|>
|
env = self.state.document.settings.env
prefixes = get_import_prefixes_from_env(env)
|
<|file_name|>buffer.js<|end_file_name|><|fim▁begin|>var buf = new Buffer(39);
var len = buf.write("www.baidu.com");
<|fim▁hole|>}
var json = buf.toJSON();
console.log(json);<|fim▁end|>
|
console.log("写入字节数为 : "+len);
for(var i=13;i < 39;i++){
buf[i] = i+84;
|
<|file_name|>index.ts<|end_file_name|><|fim▁begin|><|fim▁hole|>export * from "./graphic";<|fim▁end|>
| |
<|file_name|>test_models.py<|end_file_name|><|fim▁begin|>"""
Test models, managers, and validators.
"""
from __future__ import absolute_import, division, unicode_literals
import six
from completion import models, waffle
from completion.test_utils import CompletionWaffleTestMixin, submit_completions_for_testing
from django.core.exceptions import ValidationError
from django.test import TestCase
from opaque_keys.edx.keys import CourseKey, UsageKey
from six.moves import range, zip
from openedx.core.djangolib.testing.utils import skip_unless_lms
from student.tests.factories import CourseEnrollmentFactory, UserFactory
SELECT = 1
UPDATE = 1
SAVEPOINT = 1
OTHER = 1
@skip_unless_lms
class PercentValidatorTestCase(TestCase):
"""
Test that validate_percent only allows floats (and ints) between 0.0 and 1.0.
"""
def test_valid_percents(self):
for value in [1.0, 0.0, 1, 0, 0.5, 0.333081348071397813987230871]:
models.validate_percent(value)
def test_invalid_percent(self):
for value in [-0.00000000001, 1.0000000001, 47.1, 1000, None, float('inf'), float('nan')]:
self.assertRaises(ValidationError, models.validate_percent, value)
class CompletionSetUpMixin(CompletionWaffleTestMixin):
"""
Mixin that provides helper to create test BlockCompletion object.
"""
def set_up_completion(self):
self.user = UserFactory()
self.block_key = UsageKey.from_string(u'block-v1:edx+test+run+type@video+block@doggos')
self.completion = models.BlockCompletion.objects.create(
user=self.user,
course_key=self.block_key.course_key,
block_type=self.block_key.block_type,
block_key=self.block_key,
completion=0.5,
)
@skip_unless_lms
class SubmitCompletionTestCase(CompletionSetUpMixin, TestCase):
"""
Test that BlockCompletion.objects.submit_completion has the desired
semantics.
"""
def setUp(self):
super(SubmitCompletionTestCase, self).setUp()
self.override_waffle_switch(True)
self.set_up_completion()
def test_changed_value(self):
with self.assertNumQueries(SELECT + UPDATE + 2 * SAVEPOINT + 2 * OTHER):
# OTHER = user exists, completion exists
completion, isnew = models.BlockCompletion.objects.submit_completion(
user=self.user,
course_key=self.block_key.course_key,
block_key=self.block_key,
completion=0.9,
)
completion.refresh_from_db()
self.assertEqual(completion.completion, 0.9)
self.assertFalse(isnew)
self.assertEqual(models.BlockCompletion.objects.count(), 1)
def test_unchanged_value(self):
with self.assertNumQueries(SELECT + 2 * SAVEPOINT):
completion, isnew = models.BlockCompletion.objects.submit_completion(
user=self.user,
course_key=self.block_key.course_key,
block_key=self.block_key,
completion=0.5,
)
completion.refresh_from_db()
self.assertEqual(completion.completion, 0.5)
self.assertFalse(isnew)
self.assertEqual(models.BlockCompletion.objects.count(), 1)
def test_new_user(self):
newuser = UserFactory()<|fim▁hole|> user=newuser,
course_key=self.block_key.course_key,
block_key=self.block_key,
completion=0.0,
)
self.assertTrue(isnew)
self.assertEqual(models.BlockCompletion.objects.count(), 2)
def test_new_block(self):
newblock = UsageKey.from_string(u'block-v1:edx+test+run+type@video+block@puppers')
with self.assertNumQueries(SELECT + UPDATE + 4 * SAVEPOINT):
_, isnew = models.BlockCompletion.objects.submit_completion(
user=self.user,
course_key=newblock.course_key,
block_key=newblock,
completion=1.0,
)
self.assertTrue(isnew)
self.assertEqual(models.BlockCompletion.objects.count(), 2)
def test_invalid_completion(self):
with self.assertRaises(ValidationError):
models.BlockCompletion.objects.submit_completion(
user=self.user,
course_key=self.block_key.course_key,
block_key=self.block_key,
completion=1.2
)
completion = models.BlockCompletion.objects.get(user=self.user, block_key=self.block_key)
self.assertEqual(completion.completion, 0.5)
self.assertEqual(models.BlockCompletion.objects.count(), 1)
@skip_unless_lms
class CompletionDisabledTestCase(CompletionSetUpMixin, TestCase):
"""
Tests that completion API is not called when the feature is disabled.
"""
def setUp(self):
super(CompletionDisabledTestCase, self).setUp()
# insert one completion record...
self.set_up_completion()
# ...then disable the feature.
self.override_waffle_switch(False)
def test_cannot_call_submit_completion(self):
self.assertEqual(models.BlockCompletion.objects.count(), 1)
with self.assertRaises(RuntimeError):
models.BlockCompletion.objects.submit_completion(
user=self.user,
course_key=self.block_key.course_key,
block_key=self.block_key,
completion=0.9,
)
self.assertEqual(models.BlockCompletion.objects.count(), 1)
@skip_unless_lms
class SubmitBatchCompletionTestCase(CompletionWaffleTestMixin, TestCase):
"""
Test that BlockCompletion.objects.submit_batch_completion has the desired
semantics.
"""
def setUp(self):
super(SubmitBatchCompletionTestCase, self).setUp()
self.override_waffle_switch(True)
self.block_key = UsageKey.from_string('block-v1:edx+test+run+type@video+block@doggos')
self.course_key_obj = CourseKey.from_string('course-v1:edx+test+run')
self.user = UserFactory()
CourseEnrollmentFactory.create(user=self.user, course_id=six.text_type(self.course_key_obj))
def test_submit_batch_completion(self):
blocks = [(self.block_key, 1.0)]
models.BlockCompletion.objects.submit_batch_completion(self.user, self.course_key_obj, blocks)
self.assertEqual(models.BlockCompletion.objects.count(), 1)
self.assertEqual(models.BlockCompletion.objects.last().completion, 1.0)
def test_submit_batch_completion_without_waffle(self):
with waffle.waffle().override(waffle.ENABLE_COMPLETION_TRACKING, False):
with self.assertRaises(RuntimeError):
blocks = [(self.block_key, 1.0)]
models.BlockCompletion.objects.submit_batch_completion(self.user, self.course_key_obj, blocks)
def test_submit_batch_completion_with_same_block_new_completion_value(self):
blocks = [(self.block_key, 0.0)]
self.assertEqual(models.BlockCompletion.objects.count(), 0)
models.BlockCompletion.objects.submit_batch_completion(self.user, self.course_key_obj, blocks)
self.assertEqual(models.BlockCompletion.objects.count(), 1)
model = models.BlockCompletion.objects.first()
self.assertEqual(model.completion, 0.0)
blocks = [
(UsageKey.from_string('block-v1:edx+test+run+type@video+block@doggos'), 1.0),
]
models.BlockCompletion.objects.submit_batch_completion(self.user, self.course_key_obj, blocks)
self.assertEqual(models.BlockCompletion.objects.count(), 1)
model = models.BlockCompletion.objects.first()
self.assertEqual(model.completion, 1.0)
@skip_unless_lms
class BatchCompletionMethodTests(CompletionWaffleTestMixin, TestCase):
"""
Tests for the classmethods that retrieve course/block completion data.
"""
def setUp(self):
super(BatchCompletionMethodTests, self).setUp()
self.override_waffle_switch(True)
self.user = UserFactory.create()
self.other_user = UserFactory.create()
self.course_key = CourseKey.from_string("edX/MOOC101/2049_T2")
self.other_course_key = CourseKey.from_string("course-v1:ReedX+Hum110+1904")
self.block_keys = [UsageKey.from_string("i4x://edX/MOOC101/video/{}".format(number)) for number in range(5)]
submit_completions_for_testing(self.user, self.course_key, self.block_keys[:3])
submit_completions_for_testing(self.other_user, self.course_key, self.block_keys[2:])
submit_completions_for_testing(self.user, self.other_course_key, [self.block_keys[4]])
def test_get_course_completions_missing_runs(self):
actual_completions = models.BlockCompletion.get_course_completions(self.user, self.course_key)
expected_block_keys = [key.replace(course_key=self.course_key) for key in self.block_keys[:3]]
expected_completions = dict(list(zip(expected_block_keys, [1.0, 0.8, 0.6])))
self.assertEqual(expected_completions, actual_completions)
def test_get_course_completions_empty_result_set(self):
self.assertEqual(
models.BlockCompletion.get_course_completions(self.other_user, self.other_course_key),
{}
)
def test_get_latest_block_completed(self):
self.assertEqual(
models.BlockCompletion.get_latest_block_completed(self.user, self.course_key).block_key,
self.block_keys[2]
)
def test_get_latest_completed_none_exist(self):
self.assertIsNone(models.BlockCompletion.get_latest_block_completed(self.other_user, self.other_course_key))<|fim▁end|>
|
with self.assertNumQueries(SELECT + UPDATE + 4 * SAVEPOINT):
_, isnew = models.BlockCompletion.objects.submit_completion(
|
<|file_name|>server.py<|end_file_name|><|fim▁begin|>import random, copy
<|fim▁hole|>def grade(data):
raise Exception('deliberately broken grading function')<|fim▁end|>
|
def generate(data):
data['correct_answers']['x'] = 3
|
<|file_name|>pg_psql.py<|end_file_name|><|fim▁begin|><|fim▁hole|>
from django.core.management.base import BaseCommand, CommandError
import subprocess
from pgrunner import bin_path
from pgrunner.commands import get_port
class Command(BaseCommand):
help = 'Run psql with correct database'
def run_from_argv(self, argv):
port = get_port()
cmd = [bin_path('psql'), '-p', str(port), '-h', '127.0.0.1', 'django']
cmd.extend(argv[2:])
print(' '.join(cmd))
subprocess.call(cmd)<|fim▁end|>
|
from __future__ import absolute_import, division, print_function, unicode_literals
|
<|file_name|>toString_test.js<|end_file_name|><|fim▁begin|>import { run } from '@ember/runloop';
import { guidFor, setName } from 'ember-utils';
import { context } from 'ember-environment';
import EmberObject from '../../../lib/system/object';
import Namespace from '../../../lib/system/namespace';
import { moduleFor, AbstractTestCase } from 'internal-test-helpers';
let originalLookup = context.lookup;
let lookup;
moduleFor(
'system/object/toString',
class extends AbstractTestCase {
beforeEach() {
context.lookup = lookup = {};
}
afterEach() {
context.lookup = originalLookup;
}
['@test toString() returns the same value if called twice'](assert) {
let Foo = Namespace.create();
Foo.toString = function() {
return 'Foo';
};
Foo.Bar = EmberObject.extend();
assert.equal(Foo.Bar.toString(), 'Foo.Bar');
assert.equal(Foo.Bar.toString(), 'Foo.Bar');
let obj = Foo.Bar.create();
assert.equal(obj.toString(), '<Foo.Bar:' + guidFor(obj) + '>');
assert.equal(obj.toString(), '<Foo.Bar:' + guidFor(obj) + '>');
assert.equal(Foo.Bar.toString(), 'Foo.Bar');
run(Foo, 'destroy');
}
['@test toString on a class returns a useful value when nested in a namespace'](assert) {<|fim▁hole|> let obj;
let Foo = Namespace.create();
Foo.toString = function() {
return 'Foo';
};
Foo.Bar = EmberObject.extend();
assert.equal(Foo.Bar.toString(), 'Foo.Bar');
obj = Foo.Bar.create();
assert.equal(obj.toString(), '<Foo.Bar:' + guidFor(obj) + '>');
Foo.Baz = Foo.Bar.extend();
assert.equal(Foo.Baz.toString(), 'Foo.Baz');
obj = Foo.Baz.create();
assert.equal(obj.toString(), '<Foo.Baz:' + guidFor(obj) + '>');
obj = Foo.Bar.create();
assert.equal(obj.toString(), '<Foo.Bar:' + guidFor(obj) + '>');
run(Foo, 'destroy');
}
['@test toString on a namespace finds the namespace in lookup'](assert) {
let Foo = (lookup.Foo = Namespace.create());
assert.equal(Foo.toString(), 'Foo');
run(Foo, 'destroy');
}
['@test toString on a namespace finds the namespace in lookup'](assert) {
let Foo = (lookup.Foo = Namespace.create());
let obj;
Foo.Bar = EmberObject.extend();
assert.equal(Foo.Bar.toString(), 'Foo.Bar');
obj = Foo.Bar.create();
assert.equal(obj.toString(), '<Foo.Bar:' + guidFor(obj) + '>');
run(Foo, 'destroy');
}
['@test toString on a namespace falls back to modulePrefix, if defined'](assert) {
let Foo = Namespace.create({ modulePrefix: 'foo' });
assert.equal(Foo.toString(), 'foo');
run(Foo, 'destroy');
}
['@test toString includes toStringExtension if defined'](assert) {
let Foo = EmberObject.extend({
toStringExtension() {
return 'fooey';
},
});
let foo = Foo.create();
let Bar = EmberObject.extend({});
let bar = Bar.create();
// simulate these classes being defined on a Namespace
setName(Foo, 'Foo');
setName(Bar, 'Bar');
assert.equal(
bar.toString(),
'<Bar:' + guidFor(bar) + '>',
'does not include toStringExtension part'
);
assert.equal(
foo.toString(),
'<Foo:' + guidFor(foo) + ':fooey>',
'Includes toStringExtension result'
);
}
}
);<|fim▁end|>
| |
<|file_name|>core_macro_forms.rs<|end_file_name|><|fim▁begin|>use crate::{
ast,
ast::Ast,
ast_walk::{
LazyWalkReses,
WalkRule::{Body, LiteralLike, NotWalked},
},
beta::{Beta, Beta::*, ExportBeta},
core_forms::{strip_ee, strip_ql},
core_type_forms::{less_quoted_ty, more_quoted_ty},
form::{EitherPN::*, Form},
grammar::{
FormPat::{self, *},
SynEnv,
},
name::*,
runtime::{eval::Eval, reify::Reifiable},
ty::SynthTy,
util::assoc::Assoc,
walk_mode::WalkElt,
};
use std::rc::Rc;
// Macros!
//
// Macro expansion happens after typechecking. Macro expansion happens after typechecking.
// You'd think I'd remember that better, what with that being the whole point of Unseemly, but nope.
// Here's how macros work:
// The programmer writes a macro definition, e.g.:
//
// extend_syntax macro
// Expr ::=also
// forall T . '{ (lit if) ,{Expr<Bool> | cond},
// (lit then) ,{Expr<T> | then_e},
// (lit else) ,{Expr<T> | else_e}, }'
// conditional ->
// '[Expr | match ,[cond], {
// +[True]+ => ,[then_e],
// +[False]+ => ,[else_e], } ]'
// in
// if (zero? five) then three else eight
//
// The parser parses the `if` as a macro invocation, but doesn't lose the '{…}'!
// It spits out an `Ast` in which the `extend` binds `conditional` and `if ⋯` references it.
// Under the hood, `conditional` has the type
// `∀ T . [ *[ cond : Expr<Bool> then : Expr<T> else : Expr<T> -> Expr<T> ]* ]
// ... even though it's a macro, not a function. (A kind-checker is needed here!)
//
// Everything is typechecked (including the `.{ ⋯ }.` implementation and the invocation).
// The macro name (`conditional`) is a bit of a hack
// The syntax extension is typechecked, much like a function definition is.
// (`cond`, `then_e`, and `else_e` are assumed to be their respective types,
// and the macro is shown to produce an `Expr<T>`.)
// So is the invocation, which subtypes away the `T`, checks its arguments, and produces `T`.
//
// Macro expansion evaluates the macro with its arguments, `(zero? five)`, `three`, and `eight`,
// producing a match statement.
//
// Finally, phase-0 evaluation produces a result of `8`!
// It's best to read this file in the voice of the /Pushing Daisies/ narrator.
// This macro is a helper for generating `FormPat`-generating syntactic forms.
macro_rules! syntax_syntax {
// atomic FormPat
(( $($gram:tt)* ) $syntax_name:ident ) => {
Rc::new(Form {
name: n(&stringify!($syntax_name).to_lowercase()),
grammar: Rc::new(form_pat!( $($gram)* )),
type_compare: Both(NotWalked,NotWalked), // Not a type
// Binds nothing
synth_type: Both(NotWalked, cust_rc_box!(|_parts| { Ok(Assoc::new())}) ),
eval: Positive(cust_rc_box!(|_parts| {
Ok($syntax_name.reify())}
)),
quasiquote: Both(LiteralLike, LiteralLike)
})
};
// FormPat with arguments
(( $($gram:tt)* ) $syntax_name:ident ( $($arg:ident => $e:expr),* ) ) => {
Rc::new(Form {
name: n(&stringify!($syntax_name).to_lowercase()),
grammar: Rc::new(form_pat!( $($gram)* )),
type_compare: Both(NotWalked,NotWalked), // Not a type
synth_type: Negative(cust_rc_box!(|parts| {
let mut out = Assoc::<Name, Ast>::new();
$(
out = out.set_assoc(&parts.get_res(n(&stringify!($arg)))?);
)*
Ok(out)
})),
eval: Positive(cust_rc_box!(|parts| {
Ok($syntax_name(
$( { let $arg = parts.get_res(n(&stringify!($arg)))?; $e } ),*
).reify())}
)),
quasiquote: Both(LiteralLike, LiteralLike)
})
};
// FormPat with arguments, and just doing `get_res` on everything doesn't work:
(( $($gram:tt)* ) $syntax_name:ident { $type:expr } { $eval:expr }) => {
Rc::new(Form {
name: n(&stringify!($syntax_name).to_lowercase()),
grammar: Rc::new(form_pat!( $($gram)* )),
type_compare: Both(NotWalked,NotWalked), // Not a type
synth_type: Negative(cust_rc_box!( $type )), // Produces a typed value
eval: Positive(cust_rc_box!( $eval )),
quasiquote: Both(LiteralLike, LiteralLike)
})
};
}
// Macros have types!
// ...but they're not higher-order (i.e., you can't do anything with a macro other than invoke it).
// This means that we can just generate a type for them at the location of invocation.
fn macro_type(forall_ty_vars: &[Name], arguments: Vec<(Name, Ast)>, output: Ast) -> Ast {
let mut components = vec![];
for (k, v) in arguments.iter() {
// The fields in a struct type are not renamed like normal during freshening,
// so roll back any freshening that happened during evaluation, hence `unhygienic_orig`.
// TODO: this can go wrong if a macro-defining macro collides two term names.
// Fixing this probably requires rethinking how "component_name" works.
// Perhaps not using structs at all might also work.
components.push(mbe!("component_name" => (, ast!(k.unhygienic_orig())),
"component" => (, v.to_ast())));
}
let argument_struct = raw_ast!(Node(
crate::core_forms::find_core_form("Type", "struct"),
crate::util::mbe::EnvMBE::new_from_anon_repeat(components),
ExportBeta::Nothing
));
let mac_fn = u!({Type fn : [(, argument_struct)] (, output.to_ast())});
if forall_ty_vars.is_empty() {
mac_fn
} else {
ast!({"Type" "forall_type" :
"body" => (import [* [forall "param"]] (, mac_fn)),
"param" => (,seq forall_ty_vars.iter().map(|n: &Name| { ast!(*n) }).collect::<Vec<_>>())
})
}
}
fn type_macro_invocation(
parts: &LazyWalkReses<SynthTy>,
expected_return: Ast,
grammar: &FormPat,
) -> Result<Assoc<Name, Ast>, crate::ty::TypeError> {
// Typecheck the subterms, and then quote them:
let mut q_arguments = vec![];
for (binder, depth) in grammar.binders() {
// Things like `token := (/some_stuff/)` are well-typed in all invocations;
// examine things like `token := (,{Expr<T>},)
let term_ty = if let Some(nt) = grammar.find_named_call(binder) {
// For example, `body := (,{Expr<T>},)`
if crate::core_type_forms::nt_is_positive(nt) {
parts.flatten_res_at_depth(
binder,
depth,
&|ty: Ast| more_quoted_ty(&ty, nt),
&|ty_vec: Vec<Ast>| ast!({"Type" "tuple" : "component" => (,seq ty_vec) }),
)?
} else {
parts.flatten_generate_at_depth(
binder,
depth,
&|| crate::ty_compare::Subtype::underspecified(binder),
&|ty_vec: Vec<Ast>| ast!({"Type" "tuple" : "component" => (,seq ty_vec) }),
)
}
} else {
// For example, `token := /(foo)/`.
// HACK: currently this is the only other type possible,
// but if multiple ones become available, figuring out the right one is tricky.
// (and ∀ T. T doesn't work: it's the opposite of what we want!)
ast!({"Type" "Ident" :})
};
q_arguments.push((binder, term_ty));
}
// This is lifted almost verbatim from "Expr" "apply". Maybe they should be unified?
use crate::walk_mode::WalkMode;
let _ = crate::ty_compare::is_subtype(
¯o_type(&[], q_arguments.clone(), expected_return),
&parts.get_res(n("macro_name"))?,
&parts,
)
.map_err(|e| crate::util::err::sp(e, parts.this_ast.clone()))?;
// TODO: `Assoc` should implement `From<Vec<(K,V)>>`
let mut res = Assoc::new();
for (k, v) in q_arguments {
res = res.set(k, v.clone())
}
Ok(res)
}
// This will be called at parse-time to generate the `Ast` for a macro invocation.
// The form it emits is analogous to the "Expr" "apply" form.
// Public for use in `expand.rs` tests.
pub fn macro_invocation(
grammar: FormPat,
macro_name: Name,
implementation: crate::runtime::eval::Closure,
export_names: Vec<Name>,
) -> Rc<Form> {
use crate::{ty_compare, walk_mode::WalkMode};
// TODO: instead of stripping the lambda, we should be invoking it with the syntax arguments.
let prefab_impl = implementation.prefab();
// Strip off the lambda (`env` now binds its names for us):
node_let!(prefab_impl => {Expr lambda}
impl_body__with__ee = body);
// ...and its binding:
let impl_body = strip_ee(&impl_body__with__ee).clone();
let grammar1 = grammar.clone();
let grammar2 = grammar.clone();
Rc::new(Form {
name: n("macro_invocation"), // TODO: maybe generate a fresh name?
grammar: Rc::new(form_pat!([
// `type_macro_invocation` expects "macro_name" to be set
(named "macro_name", (anyways (vr macro_name))),
// Capture this here so that its environmental names get freshened properly.
// Need to store this one phase unquoted.
(named "impl", (-- 1 (anyways (,impl_body)))),
(, grammar.clone())
])),
type_compare: Both(NotWalked, NotWalked),
// Invoked at typechecking time.
// The macro_name part will be bound to a type of the form
// ∀ T . [*[x : Nt<T> ⋯ ]* -> Nt<T>]
// ... which you can imagine is the type of the implementation of the macro
synth_type: Both(
cust_rc_box!(move |parts| {
let return_type = ty_compare::Subtype::underspecified(n("<return_type>"));
let _ = type_macro_invocation(&parts, return_type.clone(), &grammar1)?;
// What return type made that work?
let q_result = ty_compare::unification.with(|unif| {
let resolved = ty_compare::resolve(
crate::ast_walk::Clo { it: return_type, env: parts.env.clone() },
&unif.borrow(),
);
// Canonicalize the type in its environment:
let resolved = ty_compare::canonicalize(&resolved.it, resolved.env);
resolved.map_err(|e| crate::util::err::sp(e, parts.this_ast.clone()))
})?;
less_quoted_ty(&q_result, Some(n("Expr")), &parts.this_ast)
}),
cust_rc_box!(move |parts| {
// From the macro's point of view, its parts are all positive;
// they all produce (well, expand to), rather than consume, syntax.
let parts_positive = parts.switch_mode::<SynthTy>();
let expected_return_type = more_quoted_ty(parts.context_elt(), n("Pat"));
let arguments =
type_macro_invocation(&parts_positive, expected_return_type, &grammar2)?;
// What argument types made that work?
let mut res: Assoc<Name, Ast> = Assoc::new();
ty_compare::unification.with(|unif| {
for binder in &export_names {
let ty = arguments.find_or_panic(binder);
let binder_clo = ty_compare::resolve(
crate::ast_walk::Clo { it: ty.clone(), env: parts.env.clone() },
&unif.borrow(),
);
let binder_ty = ty_compare::canonicalize(&binder_clo.it, binder_clo.env)
.map_err(|e| crate::util::err::sp(e, parts.this_ast.clone()))?;
for (ty_n, ty) in
parts.with_context(binder_ty).get_res(*binder)?.iter_pairs()
{
res = res
.set(*ty_n, less_quoted_ty(ty, Some(n("Pat")), &parts.this_ast)?);
}
}
Ok(res)
})
}),
),
// Kind of a HACK, but we re-use `eval` instead of having a separate field.
eval: Positive(cust_rc_box!(move |parts| {
use crate::runtime::eval::Value;
// This code is like that for "apply".
let mut env = parts.env.clone();
for (param, depth) in &grammar.binders() {
let nt = grammar.find_named_call(*param);
if nt != Some(n("DefaultAtom")) && nt != Some(n("Ident")) {
// TODO: why not for those two NTs?
let rhs = parts.map_flatten_term_at_depth(
*param,
*depth,
&|mut a: &Ast| {
// Nuke all binding, since we're abandoning its context.
// The user will† deposit this syntax inside a replacement binding form.
// (†still not enforced until issue #31 is fixed)
while let ast::ExtendEnv(ref body, _)
| ast::ExtendEnvPhaseless(ref body, _) = a.c()
{
a = &*body;
}
Value::from_ast(a)
},
&|vec: Vec<Value>| Value::Sequence(vec.into_iter().map(Rc::new).collect()),
);
env = env.set(*param, rhs);
}
}
let expanded = Ast::reflect(&crate::runtime::eval::eval(
crate::core_forms::strip_ql(parts.get_term_ref(n("impl"))),
env,
)?);
// Expand any macros produced by expansion, or that were already present in subterms:
Ok(crate::expand::expand(&expanded)?.reify())
})),
quasiquote: Both(LiteralLike, LiteralLike),
})
}
/// What should `t` be, if matched under a repetition?
/// A tuple, driven by whatever names are `forall`ed in `env`.
fn repeated_type(t: &Ast, env: &Assoc<Name, Ast>) -> Result<Ast, crate::ty::TypeError> {
let mut drivers = vec![];
for v in t.free_vrs() {
if env.find(&v).map(|a| a.c()) == Some(&ast::VariableReference(v)) {
drivers.push(env.find_or_panic(&v).clone());
}
}
if drivers.is_empty() {
// TODO: this is just a tuple where every element has the same type...
// ...but how do we constrain the length?
ty_err!(NeedsDriver (()) at t);
}
Ok(ast!({"Type" "tuple" : "component" => (,seq vec![ast!({"Type" "dotdotdot_type" :
"driver" => (,seq drivers),
"body" => (, t.clone())
})])}))
}
pub fn make_core_macro_forms() -> SynEnv {
let trivial_type_form = crate::core_type_forms::type_defn("unused", form_pat!((impossible)));
let beta_grammar = forms_to_form_pat_export![
syntax_syntax!( ((lit "nothing")) Nothing ) => [],
syntax_syntax!(
([(named "name", (call "DefaultReference")),
(lit ":"), (named "type", (call "DefaultReference"))]) Basic {
|_| icp!("Betas are not typed")
} {
|parts| {
Ok(Basic(parts.get_term(n("name")).vr_to_name(),
parts.get_term(n("type")).vr_to_name()).reify())
}
}) => [],
syntax_syntax!(
([(named "name", (call "DefaultReference")),
(lit "="), (named "type", (call "Type"))]) SameAs {
|_| icp!("Betas are not typed")
} {
|parts| {
Ok(SameAs(parts.get_term(n("name")).vr_to_name(),
Box::new(parts.get_term(n("type")))).reify())
}
}) => [],
syntax_syntax!(
([(lit "prot"), (named "name", (call "DefaultReference"))]) Protected {
|_| icp!("Betas are not typed")
} {
|parts| {
Ok(Protected(parts.get_term(n("name")).vr_to_name()).reify())
}
}) => [],
syntax_syntax!(
([(lit "forall"), (named "name", (call "DefaultReference"))]) Underspecified {
|_| icp!("Betas are not typed")
} {
|parts| {
Ok(Underspecified(parts.get_term(n("name")).vr_to_name()).reify())
}
}) => [],
syntax_syntax!(
((delim "...[", "[", (named "sub", (call "Beta")))) ShadowAll {
|_| icp!("Betas are not typed")
} {
|parts| {
let sub = Beta::reflect(&parts.get_res(n("sub"))?);
let drivers = sub.names_mentioned();
Ok(ShadowAll(Box::new(sub), drivers).reify())
}
}) => [],
syntax_syntax!(
((delim "[", "[",
[(named "over", (call "Beta")), (lit "o>"), (named "under", (call "Beta"))])) Shadow {
|_| icp!("Betas are not typed")
} {
|parts| {
Ok(Beta::Shadow(
Box::new(Beta::reflect(&parts.get_res(n("over"))?)),
Box::new(Beta::reflect(&parts.get_res(n("under"))?))).reify())
}
}) => []
];
let capture_language_form = typed_form!("capture_language",
(extend_nt [(lit "capture_language")], "OnlyNt",
crate::core_extra_forms::extend__capture_language),
Body(n("body")),
Body(n("body")));
// Most of "Syntax" is a negative walk (because it produces an environment),
// but lacking a `negative_ret_val`.
let grammar_grammar = forms_to_form_pat_export![
syntax_syntax!( ( (delim "anyways{", "{", (named "body", (call "Expr"))) ) Anyways (
body => Ast::reflect(&body)
)) => ["body"],
// HACK: expanding to `'[Expr| capture_language]'` doesn't do what you want, so do this:
Rc::new(Form {
name: n("capture_language_form"),
grammar: Rc::new(form_pat!( (lit "capture_language_form") )),
type_compare: Both(NotWalked, NotWalked),
synth_type: Negative(cust_rc_box!(|_| Ok(Assoc::new()))),
eval: Positive(cust_rc_box!(move |_| {
Ok(FormPat::Scope(capture_language_form.clone(),
crate::beta::ExportBeta::Nothing).reify())
})),
quasiquote: Both(LiteralLike, LiteralLike)
}) => [],
syntax_syntax!( ((lit "impossible")) Impossible ) => [],
syntax_syntax!( ([(named "body", (call "Syntax")), (lit "reserving"),
(star (named "words", (scan r"\s*'((?:[^'\\]|\\'|\\\\)*)'")))
]) Reserved {
|parts| {
parts.get_res(n("body"))
}
} {
|parts| {
Ok(Reserved(Rc::new(FormPat::reflect(&parts.get_res(n("body"))?)),
parts.get_rep_term(n("words")).iter().map(Ast::to_name).collect::<Vec<_>>()
).reify())
}
}) => ["body"],
syntax_syntax!( ( // TODO: this might have to be both positive and negative
[(lit "lit"), (named "body", (call "Syntax")),
// Allow \\ and \' as escapes:
(lit "="), (named "expected", (scan r"\s*'((?:[^'\\]|\\'|\\\\)*)'"))])
Literal {
|parts| {
parts.get_res(n("body"))
}
} {
|parts| {
let literal = parts.get_term(n("expected")).to_name().orig_sp()
.replace(r#"\'"#, r#"'"#).replace(r#"\\"#, r#"\"#);
Ok(FormPat::Literal(Rc::new(FormPat::reflect(&parts.get_res(n("body"))?)),
n(&literal)).reify())
}
}) => [],
syntax_syntax!( ([(lit "vr"), (delim "(", "(", (named "body", (call "Syntax")))]) VarRef (
body => Rc::new(FormPat::reflect(&body))
)) => [],
// TODO: split out a separate SyntaxSeq, so that we can get rid of the [ ] delimiters
syntax_syntax!( ( (delim "[", "[", (star (named "elt", (call "Syntax"))))) Seq {
|parts| {
let mut out = Assoc::<Name, Ast>::new();
for sub in &parts.get_rep_res(n("elt"))? {
out = out.set_assoc(sub);
}
Ok(out)
}
} {
|parts| {
Ok(Seq(parts.get_rep_res(n("elt"))?.iter().map(|val| {
Rc::new(FormPat::reflect(val))
}).collect()).reify())
}
}) => [* ["elt"]],
syntax_syntax!( ([(named "body", (call "Syntax")), (lit "*")]) Star {
|parts| {
let body : Assoc<Name, Ast> = parts.get_res(n("body"))?;
body.map(|t| repeated_type(t, &parts.env)).lift_result()
}
} {
|parts| {
Ok(Star(Rc::new(FormPat::reflect(&parts.get_res(n("body"))?))).reify())
}
}) => ["body"],
syntax_syntax!( ([(named "body", (call "Syntax")), (lit "+")]) Plus {
|parts| {
let body : Assoc<Name, Ast> = parts.get_res(n("body"))?;
body.map(|t| repeated_type(t, &parts.env)).lift_result()
}
} {
|parts| {
Ok(Plus(Rc::new(FormPat::reflect(&parts.get_res(n("body"))?))).reify())
}
}) => ["body"],
// TODO: support seprators, and add a separator here
syntax_syntax!( ( (delim "alt[", "[", (star [(named "elt", (call "Syntax"))]))) Alt {
|parts| {
let mut out = Assoc::<Name, Ast>::new();
for sub in &parts.get_rep_res(n("elt"))? {
out = out.set_assoc(sub);
}
Ok(out)
}
} {
|parts| {
Ok(Alt(parts.get_rep_res(n("elt"))?.iter().map(|val| {
Rc::new(FormPat::reflect(val))
}).collect()).reify())
}
}) => [* ["elt"]],
syntax_syntax!( ([(named "plan_a", (call "Syntax")),
(delim "or{", "{", (named "plan_b", (call "Syntax"))) ]) Biased (
plan_a => Rc::new(FormPat::reflect(&plan_a)),
plan_b => Rc::new(FormPat::reflect(&plan_b))
)) => ["plan_a" "plan_b"],
// `Named` switches to a positive mode for typechecking its body.
// TODO: I don't think this makes sense, now that `Named` and `Call` are split apart:
// TODO: replace `binder` with a `Pat`, and make the following true:
// This has to have the same named parts as `unquote`, because it reuses its typechecker
// But the type walk (as an overall quotation and locally) is always negative.
syntax_syntax!( ([(named "part_name", atom), (lit ":="),
(delim "(", "(", (named "body", (call "Syntax")))])
Named {
|parts| {
let binder = parts.get_term(n("part_name")).to_name();
Ok(Assoc::new().set(binder, parts.switch_mode::<SynthTy>().get_res(n("body"))?))
}
} {
|parts| {
Ok(Named(
parts.get_term(n("part_name")).to_name(),
Rc::new(FormPat::reflect(&parts.get_res(n("body"))?))).reify())
}
}) => ["part_name"],
// `Call` without a type
syntax_syntax!( ((delim ",{", "{", (named "nt", atom))) Call {
|_| {
Ok(Assoc::new()) // We should check that the nt is defined, but we can't here
}
} {
|parts| {
Ok(Call(parts.get_term(n("nt")).to_name()).reify())
}
}) => [],
// `Call` with a type is positive (has to be under a `Named`)
Rc::new(Form {
name: n("call_with_type"),
grammar: Rc::new(form_pat!(
(delim ",{", "{",
[(named "nt", atom),
(call "DefaultSeparator"), (scan r"(<)"),
(named "ty_annot", (call "Type")),
(call "DefaultSeparator"), (scan r"(>)")]))),
type_compare: Both(NotWalked,NotWalked), // Not a type
synth_type: Both(cust_rc_box!(|parts| {
let expected_type = parts.get_res(n("ty_annot"))?;
let nt = parts.get_term(n("nt")).to_name();
Ok(more_quoted_ty(&expected_type, nt))
}), NotWalked),
eval: Positive(cust_rc_box!(|parts| {
let nt = parts.get_term(n("nt")).to_name();
Ok(Rc::new(Call(nt)).reify())
})),
quasiquote: Both(LiteralLike, LiteralLike)
}) => [],
// `Scan` can be positive or negative (may be under a `Named`)
Rc::new(Form {
name: n("scan"),
grammar: Rc::new(form_pat!(
[(call "DefaultSeparator"),
(named "pat", (scan_cat r"/((?:[^/\\]|\\.)*)/", "string.regexp")),
(alt [], [
(lit "as"), (call "DefaultSeparator"),
(named "category", (scan r"((?:\p{Letter}|[-.])*)"))])])),
type_compare: Both(NotWalked,NotWalked), // Not a type
synth_type: Both(
cust_rc_box!(|_| { Ok(ast!({"Type" "Ident" :})) }),
cust_rc_box!(|_| { Ok(Assoc::new()) } )),
eval: Positive(cust_rc_box!(|parts| {
let regex = parts.get_term(n("pat")).to_name().orig_sp()
.replace(r#"\/"#, r#"/"#);
Ok(crate::grammar::new_scan(
®ex,
parts.maybe_get_term(n("category")).map(|a| a.to_name().orig_sp())
).reify())
})),
quasiquote: Both(LiteralLike, LiteralLike)
}) => [],
// `Common` can be positive or negative (may be under a `Named`)
Rc::new(Form {
name: n("common"),
grammar: Rc::new(form_pat!(
[(lit "common"), (delim "(", "(", (named "body", (call "Syntax")))])),
type_compare: Both(NotWalked,NotWalked), // Not a type
synth_type: Both(
cust_rc_box!(|parts| { parts.get_res(n("body")) }),
cust_rc_box!(|parts| { parts.get_res(n("body")) })),
eval: Positive(cust_rc_box!(|parts| {
Ok(Common(Rc::new(FormPat::reflect(&parts.get_res(n("body"))?))).reify())
})),
quasiquote: Both(LiteralLike, LiteralLike)
}) => ["body"],
// `Import` is positive (has to be under a `Named`)
Rc::new(Form {
name: n("import"),
grammar: Rc::new(form_pat!(
[(named "body", (call "Syntax")), (lit "<--"), (named "imported", (call "Beta"))])),
type_compare: Both(NotWalked,NotWalked), // Not a type
synth_type: Both(cust_rc_box!(|parts| {
parts.get_res(n("body"))
}),
cust_rc_box!(|_| panic!("TODO prevent `import`s outside of `named`s"))),
eval: Positive(cust_rc_box!(|parts| {
Ok(NameImport(Rc::new(FormPat::reflect(&parts.get_res(n("body"))?)),
Beta::reflect(&parts.get_res(n("imported"))?)).reify())
})),
quasiquote: Both(LiteralLike, LiteralLike)
}) => [],
// `Pick` is positive (has to be under a `Named`), but its body is negative.
Rc::new(Form {
name: n("pick"),
grammar: Rc::new(form_pat!(
[(lit "pick"), (named "selection", atom), (lit "in"),
(named "body", (call "Syntax"))])),
type_compare: Both(NotWalked,NotWalked), // Not a type
synth_type: Both(cust_rc_box!(|parts| {
let env = parts.switch_to_negative().get_res(n("body"))?;
Ok(env.find_or_panic(&parts.get_term(n("selection")).to_name()).clone())
}),
cust_rc_box!(|_| panic!("TODO prevent `pick`s outside of `named`s"))),
eval: Positive(cust_rc_box!(|parts| {
Ok(Pick(Rc::new(FormPat::reflect(&parts.get_res(n("body"))?)),
parts.get_term(n("selection")).to_name()).reify())
})),
quasiquote: Both(LiteralLike, LiteralLike)
}) => [],
// TODO: implement syntax for ComputeSyntax
// Not sure if `Scope` syntax should be positive or negative.
syntax_syntax!( ([(lit "forall"), (star (named "param", atom)), (lit "."),
(delim "'{", "{",
(named "syntax",
(import [unusable "syntax"],
(import [* [forall "param"]], (call "Syntax"))))),
(named "macro_name", atom), (lit "->"),
(delim ".{", "{", (named "implementation",
// TODO: `beta!` needs `Shadow` so we can combine these `import`s.
// TODO: Why can't these be regular imports,
// and why can't the `--` be on the outside?
// (Things have to be this way to have the `--` at all.)
(import_phaseless [* [forall "param"]],
// Arbitrary context element:
(import_phaseless ["syntax" == {trivial_type_form ; }],
(-- 1 (call "Expr")))))),
(alt [], // TODO: needs proper `beta` structure, not just a name list:
[(lit "=>"), (star (named "export", atom))])])
Scope {
|parts| {
let return_ty = parts.switch_mode::<SynthTy>().get_res(n("implementation"))?;
let mut arguments : Vec<(Name, Ast)> = parts.get_res(n("syntax"))?
.iter_pairs().map(|(n, t)| (*n, t.clone())).collect();
arguments.sort_by(|lhs, rhs| lhs.0.cmp(&rhs.0) ); // Pick a canonical order
let ty_params = &parts.get_rep_term(n("param")).iter().map(Ast::to_name
).collect::<Vec<_>>();
Ok(Assoc::new().set(parts.get_term(n("macro_name")).to_name(),
macro_type(&ty_params, arguments, return_ty)))
}
} {
|parts| {
// TODO: This is the right thing to do, right?
let macro_params = crate::beta::bound_from_export_beta(
&ebeta!(["syntax"]), &parts.this_ast.node_parts(), 0);
let implementation = strip_ql(&strip_ee(
&strip_ee(&parts.get_term(n("implementation"))))).clone();
let mut export = ExportBeta::Nothing;
let export_names = parts.get_rep_term(n("export")).iter()
.map(Ast::to_name).collect::<Vec<Name>>();
for name in &export_names {
export = ExportBeta::Shadow(
Box::new(ExportBeta::Use(*name)),
Box::new(export));
}
// This macro invocation (will replace `syntax`):
Ok(Scope(macro_invocation(
FormPat::reflect(&parts.get_res(n("syntax"))?),
parts.get_term(n("macro_name")).to_name(),
crate::runtime::eval::Closure{ body: implementation,
params: macro_params,
env: parts.env
},
export_names),
export).reify())
}
}) => ["macro_name"] // This exports a macro, not syntax (like `binders` does)!
];
assoc_n!(
"Syntax" => Rc::new(grammar_grammar),
"Beta" => Rc::new(beta_grammar))
}
thread_local! {
/// Per-file highlighting keeps track of the successive languages,
/// and looks for the syntax extensions that trigger them:
pub static syn_envs__for__highlighting: std::cell::RefCell<Vec<(Ast, SynEnv)>>
= std::cell::RefCell::new(vec![]);
}
pub fn extend_syntax() -> Rc<Form> {
use crate::earley::ParseContext;
let perform_extension = move |pc: ParseContext, extension_info: Ast| -> ParseContext {
let bnf_parts =
// TODO: getting a `Shape` (the second element is the `(lit "in")`) must be a parser bug
extract!((extension_info.c()) ast::Shape = (ref subs) =>
extract!((subs[0].c()) ast::IncompleteNode = (ref parts) => parts));
let nts: Vec<Name> =
bnf_parts.get_rep_leaf_or_panic(n("nt")).iter().map(|a| a.to_name()).collect();
let ops: Vec<bool> = bnf_parts
.get_rep_leaf_or_panic(n("operator"))
.iter()
.map(|a| a.to_name() == n("::=also"))
.collect();
let rhses: Vec<&Ast> = bnf_parts.get_rep_leaf_or_panic(n("rhs"));
// Figure out the syntax extension:
let mut syn_env = pc.grammar;
for ((nt, extend), rhs) in nts.into_iter().zip(ops.into_iter()).zip(rhses.into_iter()) {
let expanded_rhs = crate::expand::expand(&rhs).unwrap();
let rhs_form_pat =
FormPat::reflect(&crate::ast_walk::walk(&expanded_rhs, &pc.eval_ctxt).unwrap());
syn_env = syn_env.set(
nt,
Rc::new(if extend {
form_pat!((alt (, rhs_form_pat), (, (**syn_env.find_or_panic(&nt)).clone())))
} else {
rhs_form_pat
}),
)
}
syn_envs__for__highlighting.with(|envs| {
// For per-file highlighting:
envs.borrow_mut().push((extension_info, syn_env.clone()));
});
ParseContext { grammar: syn_env, type_ctxt: pc.type_ctxt, eval_ctxt: pc.eval_ctxt }
};
// The `Syntax` argument, `rhs`, doesn't care about the context element, so we make one up:
let trivial_type_form = crate::core_type_forms::type_defn("unused", form_pat!((impossible)));
Rc::new(Form {<|fim▁hole|> [(star [(named "nt", atom),
(named "operator", (alt (lit "::="), (lit "::=also"))),
(named "rhs", (call "Syntax")),
(lit ";")]),
(lit "in")],
(named "body",
(import_phaseless [* ["rhs" == {trivial_type_form ; }]], (call "Expr"))),
perform_extension)])),
type_compare: Both(NotWalked, NotWalked),
synth_type: Positive(Body(n("body"))),
eval: Positive(cust_rc_box!(move |extend_syntax_parts| {
// HACK: since the macros have been expanded away, `rhs` needs to be be unbound
crate::ast_walk::walk::<Eval>(
strip_ee(extend_syntax_parts.get_term_ref(n("body"))),
&extend_syntax_parts,
)
})),
quasiquote: Both(LiteralLike, LiteralLike),
})
}
#[test]
fn formpat_reflection() {
use crate::{core_forms::find_form, runtime::eval::eval_top};
let macro_forms = make_core_macro_forms()
.set(n("DefaultToken"), Rc::new(crate::grammar::new_scan(r"\s*(\S+)", None)))
.set(n("OpenDelim"), Rc::new(crate::grammar::new_scan(r"\s*(\S+)", None)))
.set(n("CloseDelim"), Rc::new(crate::grammar::new_scan(r"\s*(\S+)", None)))
.set(n("DefaultAtom"), Rc::new(FormPat::Call(n("DefaultToken"))))
.set(n("AtomNotInPat"), Rc::new(FormPat::Call(n("DefaultToken"))))
.set(n("DefaultReference"), Rc::new(VarRef(Rc::new(FormPat::Call(n("DefaultToken"))))))
.set(n("Type"), Rc::new(FormPat::Call(n("DefaultReference"))))
.set(n("DefaultSeparator"), Rc::new(crate::grammar::new_scan(r"(\s*)", None)));
fn syntax_to_form_pat(a: Ast) -> FormPat { FormPat::reflect(&eval_top(&a).unwrap()) }
assert_eq!(
syntax_to_form_pat(ast!({
find_form(¯o_forms, "Syntax", "impossible");
})),
Impossible
);
assert_eq!(
syntax_to_form_pat(ast!({find_form(¯o_forms, "Syntax", "literal");
"expected" => "<--->",
"body" => {find_form(¯o_forms, "Syntax", "call");
"nt" => "DefaultToken"
}})),
Literal(std::rc::Rc::new(Call(n("DefaultToken"))), n("<--->"))
);
let string_to_form_pat = |s: &str| -> FormPat {
syntax_to_form_pat(
crate::earley::parse_in_syn_env(&form_pat!((call "Syntax")), macro_forms.clone(), s)
.unwrap(),
)
};
assert_eq!(string_to_form_pat(r"/\s*(\S+)/"), crate::grammar::new_scan(r"\s*(\S+)", None));
assert_eq!(string_to_form_pat(r"lit /\s*(\S+)/ = 'x'"), form_pat!((lit_aat "x")));
assert_eq!(
string_to_form_pat(r"[ lit /\s*(\S+)/ = 'write_this' ,{ Expr < Int > }, <-- nothing ]"),
form_pat!([(lit_aat "write_this"), (import [], (call "Expr"))])
);
assert_eq!(
string_to_form_pat(r"[ lit /\s*(\S+)/ = 'write_this' ,{ Expr < Int > }, <-- a : b ]"),
form_pat!([(lit_aat "write_this"), (import ["a" : "b"], (call "Expr"))])
);
assert_eq!(
string_to_form_pat(r",{ Expr < Int > }, <-- [ forall thing o> a = b ]"),
form_pat!((import [forall "thing" "a" = "b"], (call "Expr")))
);
}
#[test]
fn macro_definitions() {
let int_expr_type = uty!({type_apply : (prim Expr) [{Int :}]});
let env = assoc_n!("ie" => int_expr_type.clone(), "T" => uty!(T))
.set(negative_ret_val(), ast!((trivial)));
assert_eq!(
crate::ty::neg_synth_type(
&u!({Syntax star => ["body"] :
{named => ["part_name"] : x {call_with_type : Expr T}}}),
env.clone()
),
Ok(
assoc_n!("x" => uty!({tuple : [{dotdotdot_type : [T] {type_apply : (prim Expr) [T]}}]}))
)
);
let t_expr_type = uty!({type_apply : (prim Expr) [T]});
let s_expr_type = uty!({type_apply : (prim Expr) [S]});
let t_pat_type = uty!({type_apply : (prim Pat) [T]});
assert_eq!(
crate::ty::neg_synth_type(
&u!({Syntax scope : [T; S]
{seq => [* ["elt"]] :
[{named => ["part_name"] : body {call_with_type : Expr S}};
{named => ["part_name"] : val {call_with_type : Expr T}};
{named => ["part_name"] : binding {call_with_type : Pat T}}]
}
some_macro
ie
}),
env.clone()
),
Ok(assoc_n!(
"some_macro" => macro_type(&vec![n("T"), n("S")],
vec![(n("binding"), t_pat_type.clone()),
(n("body"), s_expr_type.clone()),
(n("val"), t_expr_type.clone())],
int_expr_type.clone())))
);
}
#[test]
fn macro_types() {
let int_expr_type = uty!({type_apply : (prim Expr) [{Int :}]});
let t_expr_type = uty!({type_apply : (prim Expr) [T]});
assert_eq!(
macro_type(&vec![], vec![(n("a"), int_expr_type.clone())], int_expr_type.clone()),
uty!({fn :
[{struct : [a {type_apply : (prim Expr) [{Int :}]}]}]
{type_apply : (prim Expr) [{Int :}]}})
);
assert_eq!(
macro_type(&vec![n("T")], vec![(n("a"), t_expr_type.clone())], t_expr_type.clone()),
uty!({forall_type : [T]
{fn : [{struct : [a {type_apply : (prim Expr) [T]}]}]
{type_apply : (prim Expr) [T]}}})
);
}
#[test]
fn type_basic_macro_invocation() {
let int_expr_type = uty!({type_apply : (prim Expr) [{Int :}]});
let t_expr_type = uty!({type_apply : (prim Expr) [T]});
let s_expr_type = uty!({type_apply : (prim Expr) [S]});
let t_pat_type = uty!({type_apply : (prim Pat) [T]});
let t_type_type = uty!({type_apply : (prim Type) [S]});
let env = assoc_n!(
"int_var" => uty!({Int :}),
"nat_var" => uty!({Nat :}),
"basic_int_macro" =>
macro_type(&vec![], vec![(n("a"), int_expr_type.clone())], int_expr_type.clone()),
"basic_t_macro" =>
macro_type(&vec![n("T")], vec![(n("a"), t_expr_type.clone())], t_expr_type.clone()),
"basic_pattern_macro" =>
macro_type(&vec![n("T")], vec![(n("a"), t_pat_type.clone())], t_pat_type.clone()),
"let_like_macro" =>
macro_type(&vec![n("T"), n("S")],
vec![(n("val"), t_expr_type.clone()),
(n("binding"), t_pat_type.clone()),
(n("body"), s_expr_type.clone())],
s_expr_type.clone()),
"pattern_cond_like_macro" =>
macro_type(&vec![n("T"), n("S")],
vec![(n("t"), t_type_type.clone()),
(n("body"), t_pat_type.clone()),
(n("cond_expr"), int_expr_type.clone())], // (would really be a bool)
t_pat_type.clone())
);
let impl_clo =
crate::runtime::eval::Closure { body: ast!((trivial)), params: vec![], env: Assoc::new() };
assert_eq!(
crate::ty::synth_type(
&ast!({
macro_invocation(
form_pat!([(lit "invoke basic_int_macro"), (named "a", (call "Expr"))]),
n("basic_int_macro"), impl_clo.clone(), vec![]) ;
"macro_name" => (vr "basic_int_macro"),
"a" => (vr "int_var")
}),
env.clone()
),
Ok(ast!({ "Type" "Int" :}))
);
assert_eq!(
crate::ty::synth_type(
&ast!({
macro_invocation(
form_pat!([(lit "invoke basic_t_macro"), (named "a", (call "Expr"))]),
n("basic_t_macro"), impl_clo.clone(), vec![]) ;
"macro_name" => (vr "basic_t_macro"),
"a" => (vr "nat_var")
}),
env.clone()
),
Ok(ast!({ "Type" "Nat" :}))
);
assert_m!(
crate::ty::synth_type(
&ast!({
macro_invocation(
form_pat!([(lit "invoke basic_int_macro"), (named "a", (call "Expr"))]),
n("basic_int_macro"), impl_clo.clone(), vec![]) ;
"macro_name" => (vr "basic_int_macro"),
"a" => (vr "nat_var")
}),
env.clone()
),
Err(_)
);
assert_eq!(
crate::ty::neg_synth_type(
&ast!({
macro_invocation(
form_pat!([(lit "invoke basic_pattern_macro"), (named "a", (call "Pat"))]),
n("basic_pattern_macro"), impl_clo.clone(), vec![n("a")]) => ["a"];
"macro_name" => (vr "basic_pattern_macro"),
"a" => "should_be_nat"
}),
env.clone().set(negative_ret_val(), ast!({"Type" "Nat" :}))
),
Ok(assoc_n!("should_be_nat" => ast!({"Type" "Nat" :})))
);
assert_eq!(
crate::ty::synth_type(
&ast!({
macro_invocation(
form_pat!([(lit "invoke let_like_macro"),
(named "val", (call "Expr")),
(named "binding", (call "Pat")),
(named "body", (import ["binding" = "val"], (call "Expr")))]),
n("let_like_macro"), impl_clo.clone(), vec![]) ;
"macro_name" => (vr "let_like_macro"),
"val" => (vr "nat_var"),
"binding" => "x",
"body" => (import ["binding" = "val"] (vr "x"))
}),
env.clone()
),
Ok(ast!({ "Type" "Nat" :}))
);
assert_eq!(
crate::ty::neg_synth_type(
&ast!({
macro_invocation(
form_pat!([(lit "invoke pattern_cond_like_macro"),
(named "t", (call "Type")),
(named "body", (call "Pat")),
(named "cond_expr", (import ["body" : "t"], (call "Expr")))]),
n("pattern_cond_like_macro"), impl_clo.clone(), vec![n("body")]) ;
"macro_name" => (vr "pattern_cond_like_macro"),
"t" => {"Type" "Int" :},
"body" => "x",
"cond_expr" => (import ["body" : "t"] (vr "x"))
}),
env.set(negative_ret_val(), ast!({"Type" "Int" :})).clone()
),
Ok(assoc_n!("x" => ast!({ "Type" "Int" :})))
);
}
#[test]
fn type_ddd_macro() {
let t_rep_expr_type = uty!({tuple : [{dotdotdot_type : [T] {type_apply : (prim Expr) [T]}}]});
let s_expr_type = uty!({type_apply : (prim Expr) [S]});
let t_rep_pat_type = uty!({tuple : [{dotdotdot_type : [T] {type_apply : (prim Pat) [T]}}]});
let impl_clo =
crate::runtime::eval::Closure { body: ast!((trivial)), params: vec![], env: Assoc::new() };
let env = assoc_n!(
"int_var" => uty!({Int :}),
"nat_var" => uty!({Nat :}),
"let_like_macro" =>
macro_type(&vec![n("T"), n("S")],
vec![(n("val"), t_rep_expr_type.clone()),
(n("binding"), t_rep_pat_type.clone()),
(n("body"), s_expr_type.clone())],
s_expr_type.clone()));
assert_eq!(
crate::ty::synth_type(
&ast!({
macro_invocation(
form_pat!([(lit "invoke let_like_macro"),
(star (named "val", (call "Expr"))),
(star (named "binding", (call "Pat"))),
(named "body", (import [* ["binding" = "val"]], (call "Expr")))]),
n("let_like_macro"), impl_clo, vec![]) ;
"macro_name" => (vr "let_like_macro"),
"val" => [@"arm" (vr "nat_var"), (vr "nat_var")],
"binding" => [@"arm" "x1", "x2"],
"body" => (import [* ["binding" = "val"]] (vr "x1"))
}),
env.clone()
),
Ok(uty!({Nat :}))
);
}
#[test]
fn define_and_parse_macros() {
crate::grammar::parse(
&form_pat!((scope extend_syntax())),
crate::core_forms::outermost__parse_context(),
"extend_syntax
Expr ::=also forall T . '{
[ lit ,{ OpenDelim }, = '['
body := ( ,{ Expr< Int > }, )
lit ,{ CloseDelim }, = ']'
]
}' add_one__macro -> .{ '[ Expr | (plus one ,[body], ) ]' }. ;
in [ [ [ one ] ] ]",
)
.unwrap();
}<|fim▁end|>
|
name: n("extend_syntax"),
grammar: Rc::new(form_pat!(
[(lit "extend_syntax"),
(extend
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.