prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>sha2.rs<|end_file_name|><|fim▁begin|>// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! This module implements only the Sha256 function since that is all that is needed for internal
//! use. This implementation is not intended for external use or for any use where security is
//! important.
use std::iter::range_step;
use std::num::Zero;
use std::slice::bytes::{MutableByteVector, copy_memory};
use serialize::hex::ToHex;
/// Write a u32 into a vector, which must be 4 bytes long. The value is written in big-endian
/// format.
fn write_u32_be(dst: &mut[u8], input: u32) {
use std::mem::to_be32;
assert!(dst.len() == 4);
unsafe {
let x = dst.unsafe_mut_ref(0) as *mut _ as *mut u32;
*x = to_be32(input);
}
}
/// Read a vector of bytes into a vector of u32s. The values are read in big-endian format.
fn read_u32v_be(dst: &mut[u32], input: &[u8]) {
use std::mem::to_be32;
assert!(dst.len() * 4 == input.len());
unsafe {
let mut x = dst.unsafe_mut_ref(0) as *mut _ as *mut u32;
let mut y = input.unsafe_ref(0) as *_ as *u32;
for _ in range(0, dst.len()) {
*x = to_be32(*y);
x = x.offset(1);
y = y.offset(1);
}
}
}
trait ToBits {
/// Convert the value in bytes to the number of bits, a tuple where the 1st item is the
/// high-order value and the 2nd item is the low order value.
fn to_bits(self) -> (Self, Self);
}
impl ToBits for u64 {
fn to_bits(self) -> (u64, u64) {
return (self >> 61, self << 3);
}
}
/// Adds the specified number of bytes to the bit count. fail!() if this would cause numeric
/// overflow.
fn add_bytes_to_bits<T: Int + CheckedAdd + ToBits>(bits: T, bytes: T) -> T {
let (new_high_bits, new_low_bits) = bytes.to_bits();
if new_high_bits > Zero::zero() {
fail!("numeric overflow occured.")
}
match bits.checked_add(&new_low_bits) {
Some(x) => return x,
None => fail!("numeric overflow occured.")
}
}
/// A FixedBuffer, likes its name implies, is a fixed size buffer. When the buffer becomes full, it
/// must be processed. The input() method takes care of processing and then clearing the buffer
/// automatically. However, other methods do not and require the caller to process the buffer. Any
/// method that modifies the buffer directory or provides the caller with bytes that can be modified
/// results in those bytes being marked as used by the buffer.
trait FixedBuffer {
/// Input a vector of bytes. If the buffer becomes full, process it with the provided
/// function and then clear the buffer.
fn input(&mut self, input: &[u8], func: |&[u8]|);
/// Reset the buffer.
fn reset(&mut self);
/// Zero the buffer up until the specified index. The buffer position currently must not be
/// greater than that index.
fn zero_until(&mut self, idx: uint);
/// Get a slice of the buffer of the specified size. There must be at least that many bytes
/// remaining in the buffer.
fn next<'s>(&'s mut self, len: uint) -> &'s mut [u8];
/// Get the current buffer. The buffer must already be full. This clears the buffer as well.
fn full_buffer<'s>(&'s mut self) -> &'s [u8];
/// Get the current position of the buffer.
fn position(&self) -> uint;
/// Get the number of bytes remaining in the buffer until it is full.
fn remaining(&self) -> uint;
/// Get the size of the buffer
fn size(&self) -> uint;
}
/// A FixedBuffer of 64 bytes useful for implementing Sha256 which has a 64 byte blocksize.
struct FixedBuffer64 {
buffer: [u8, ..64],
buffer_idx: uint,
}
impl FixedBuffer64 {
/// Create a new FixedBuffer64
fn new() -> FixedBuffer64 {
return FixedBuffer64 {
buffer: [0u8, ..64],
buffer_idx: 0
};
}
}
impl FixedBuffer for FixedBuffer64 {
fn input(&mut self, input: &[u8], func: |&[u8]|) {
let mut i = 0;
let size = self.size();
// If there is already data in the buffer, copy as much as we can into it and process
// the data if the buffer becomes full.
if self.buffer_idx != 0 {
let buffer_remaining = size - self.buffer_idx;
if input.len() >= buffer_remaining {
copy_memory(
self.buffer.mut_slice(self.buffer_idx, size),
input.slice_to(buffer_remaining));
self.buffer_idx = 0;
func(self.buffer);
i += buffer_remaining;
} else {
copy_memory(
self.buffer.mut_slice(self.buffer_idx, self.buffer_idx + input.len()),
input);
self.buffer_idx += input.len();
return;
}
}
// While we have at least a full buffer size chunk's worth of data, process that data
// without copying it into the buffer<|fim▁hole|> while input.len() - i >= size {
func(input.slice(i, i + size));
i += size;
}
// Copy any input data into the buffer. At this point in the method, the amount of
// data left in the input vector will be less than the buffer size and the buffer will
// be empty.
let input_remaining = input.len() - i;
copy_memory(
self.buffer.mut_slice(0, input_remaining),
input.slice_from(i));
self.buffer_idx += input_remaining;
}
fn reset(&mut self) {
self.buffer_idx = 0;
}
fn zero_until(&mut self, idx: uint) {
assert!(idx >= self.buffer_idx);
self.buffer.mut_slice(self.buffer_idx, idx).set_memory(0);
self.buffer_idx = idx;
}
fn next<'s>(&'s mut self, len: uint) -> &'s mut [u8] {
self.buffer_idx += len;
return self.buffer.mut_slice(self.buffer_idx - len, self.buffer_idx);
}
fn full_buffer<'s>(&'s mut self) -> &'s [u8] {
assert!(self.buffer_idx == 64);
self.buffer_idx = 0;
return self.buffer.slice_to(64);
}
fn position(&self) -> uint { self.buffer_idx }
fn remaining(&self) -> uint { 64 - self.buffer_idx }
fn size(&self) -> uint { 64 }
}
/// The StandardPadding trait adds a method useful for Sha256 to a FixedBuffer struct.
trait StandardPadding {
/// Add padding to the buffer. The buffer must not be full when this method is called and is
/// guaranteed to have exactly rem remaining bytes when it returns. If there are not at least
/// rem bytes available, the buffer will be zero padded, processed, cleared, and then filled
/// with zeros again until only rem bytes are remaining.
fn standard_padding(&mut self, rem: uint, func: |&[u8]|);
}
impl <T: FixedBuffer> StandardPadding for T {
fn standard_padding(&mut self, rem: uint, func: |&[u8]|) {
let size = self.size();
self.next(1)[0] = 128;
if self.remaining() < rem {
self.zero_until(size);
func(self.full_buffer());
}
self.zero_until(size - rem);
}
}
/// The Digest trait specifies an interface common to digest functions, such as SHA-1 and the SHA-2
/// family of digest functions.
pub trait Digest {
/// Provide message data.
///
/// # Arguments
///
/// * input - A vector of message data
fn input(&mut self, input: &[u8]);
/// Retrieve the digest result. This method may be called multiple times.
///
/// # Arguments
///
/// * out - the vector to hold the result. Must be large enough to contain output_bits().
fn result(&mut self, out: &mut [u8]);
/// Reset the digest. This method must be called after result() and before supplying more
/// data.
fn reset(&mut self);
/// Get the output size in bits.
fn output_bits(&self) -> uint;
/// Convenience function that feeds a string into a digest.
///
/// # Arguments
///
/// * `input` The string to feed into the digest
fn input_str(&mut self, input: &str) {
self.input(input.as_bytes());
}
/// Convenience function that retrieves the result of a digest as a
/// newly allocated vec of bytes.
fn result_bytes(&mut self) -> Vec<u8> {
let mut buf = Vec::from_elem((self.output_bits()+7)/8, 0u8);
self.result(buf.as_mut_slice());
buf
}
/// Convenience function that retrieves the result of a digest as a
/// String in hexadecimal format.
fn result_str(&mut self) -> String {
self.result_bytes().as_slice().to_hex().to_string()
}
}
// A structure that represents that state of a digest computation for the SHA-2 512 family of digest
// functions
struct Engine256State {
h0: u32,
h1: u32,
h2: u32,
h3: u32,
h4: u32,
h5: u32,
h6: u32,
h7: u32,
}
impl Engine256State {
fn new(h: &[u32, ..8]) -> Engine256State {
return Engine256State {
h0: h[0],
h1: h[1],
h2: h[2],
h3: h[3],
h4: h[4],
h5: h[5],
h6: h[6],
h7: h[7]
};
}
fn reset(&mut self, h: &[u32, ..8]) {
self.h0 = h[0];
self.h1 = h[1];
self.h2 = h[2];
self.h3 = h[3];
self.h4 = h[4];
self.h5 = h[5];
self.h6 = h[6];
self.h7 = h[7];
}
fn process_block(&mut self, data: &[u8]) {
fn ch(x: u32, y: u32, z: u32) -> u32 {
((x & y) ^ ((!x) & z))
}
fn maj(x: u32, y: u32, z: u32) -> u32 {
((x & y) ^ (x & z) ^ (y & z))
}
fn sum0(x: u32) -> u32 {
((x >> 2) | (x << 30)) ^ ((x >> 13) | (x << 19)) ^ ((x >> 22) | (x << 10))
}
fn sum1(x: u32) -> u32 {
((x >> 6) | (x << 26)) ^ ((x >> 11) | (x << 21)) ^ ((x >> 25) | (x << 7))
}
fn sigma0(x: u32) -> u32 {
((x >> 7) | (x << 25)) ^ ((x >> 18) | (x << 14)) ^ (x >> 3)
}
fn sigma1(x: u32) -> u32 {
((x >> 17) | (x << 15)) ^ ((x >> 19) | (x << 13)) ^ (x >> 10)
}
let mut a = self.h0;
let mut b = self.h1;
let mut c = self.h2;
let mut d = self.h3;
let mut e = self.h4;
let mut f = self.h5;
let mut g = self.h6;
let mut h = self.h7;
let mut w = [0u32, ..64];
// Sha-512 and Sha-256 use basically the same calculations which are implemented
// by these macros. Inlining the calculations seems to result in better generated code.
macro_rules! schedule_round( ($t:expr) => (
w[$t] = sigma1(w[$t - 2]) + w[$t - 7] + sigma0(w[$t - 15]) + w[$t - 16];
)
)
macro_rules! sha2_round(
($A:ident, $B:ident, $C:ident, $D:ident,
$E:ident, $F:ident, $G:ident, $H:ident, $K:ident, $t:expr) => (
{
$H += sum1($E) + ch($E, $F, $G) + $K[$t] + w[$t];
$D += $H;
$H += sum0($A) + maj($A, $B, $C);
}
)
)
read_u32v_be(w.mut_slice(0, 16), data);
// Putting the message schedule inside the same loop as the round calculations allows for
// the compiler to generate better code.
for t in range_step(0u, 48, 8) {
schedule_round!(t + 16);
schedule_round!(t + 17);
schedule_round!(t + 18);
schedule_round!(t + 19);
schedule_round!(t + 20);
schedule_round!(t + 21);
schedule_round!(t + 22);
schedule_round!(t + 23);
sha2_round!(a, b, c, d, e, f, g, h, K32, t);
sha2_round!(h, a, b, c, d, e, f, g, K32, t + 1);
sha2_round!(g, h, a, b, c, d, e, f, K32, t + 2);
sha2_round!(f, g, h, a, b, c, d, e, K32, t + 3);
sha2_round!(e, f, g, h, a, b, c, d, K32, t + 4);
sha2_round!(d, e, f, g, h, a, b, c, K32, t + 5);
sha2_round!(c, d, e, f, g, h, a, b, K32, t + 6);
sha2_round!(b, c, d, e, f, g, h, a, K32, t + 7);
}
for t in range_step(48u, 64, 8) {
sha2_round!(a, b, c, d, e, f, g, h, K32, t);
sha2_round!(h, a, b, c, d, e, f, g, K32, t + 1);
sha2_round!(g, h, a, b, c, d, e, f, K32, t + 2);
sha2_round!(f, g, h, a, b, c, d, e, K32, t + 3);
sha2_round!(e, f, g, h, a, b, c, d, K32, t + 4);
sha2_round!(d, e, f, g, h, a, b, c, K32, t + 5);
sha2_round!(c, d, e, f, g, h, a, b, K32, t + 6);
sha2_round!(b, c, d, e, f, g, h, a, K32, t + 7);
}
self.h0 += a;
self.h1 += b;
self.h2 += c;
self.h3 += d;
self.h4 += e;
self.h5 += f;
self.h6 += g;
self.h7 += h;
}
}
static K32: [u32, ..64] = [
0x428a2f98, 0x71374491, 0xb5c0fbcf, 0xe9b5dba5,
0x3956c25b, 0x59f111f1, 0x923f82a4, 0xab1c5ed5,
0xd807aa98, 0x12835b01, 0x243185be, 0x550c7dc3,
0x72be5d74, 0x80deb1fe, 0x9bdc06a7, 0xc19bf174,
0xe49b69c1, 0xefbe4786, 0x0fc19dc6, 0x240ca1cc,
0x2de92c6f, 0x4a7484aa, 0x5cb0a9dc, 0x76f988da,
0x983e5152, 0xa831c66d, 0xb00327c8, 0xbf597fc7,
0xc6e00bf3, 0xd5a79147, 0x06ca6351, 0x14292967,
0x27b70a85, 0x2e1b2138, 0x4d2c6dfc, 0x53380d13,
0x650a7354, 0x766a0abb, 0x81c2c92e, 0x92722c85,
0xa2bfe8a1, 0xa81a664b, 0xc24b8b70, 0xc76c51a3,
0xd192e819, 0xd6990624, 0xf40e3585, 0x106aa070,
0x19a4c116, 0x1e376c08, 0x2748774c, 0x34b0bcb5,
0x391c0cb3, 0x4ed8aa4a, 0x5b9cca4f, 0x682e6ff3,
0x748f82ee, 0x78a5636f, 0x84c87814, 0x8cc70208,
0x90befffa, 0xa4506ceb, 0xbef9a3f7, 0xc67178f2
];
// A structure that keeps track of the state of the Sha-256 operation and contains the logic
// necessary to perform the final calculations.
struct Engine256 {
length_bits: u64,
buffer: FixedBuffer64,
state: Engine256State,
finished: bool,
}
impl Engine256 {
fn new(h: &[u32, ..8]) -> Engine256 {
return Engine256 {
length_bits: 0,
buffer: FixedBuffer64::new(),
state: Engine256State::new(h),
finished: false
}
}
fn reset(&mut self, h: &[u32, ..8]) {
self.length_bits = 0;
self.buffer.reset();
self.state.reset(h);
self.finished = false;
}
fn input(&mut self, input: &[u8]) {
assert!(!self.finished)
// Assumes that input.len() can be converted to u64 without overflow
self.length_bits = add_bytes_to_bits(self.length_bits, input.len() as u64);
let self_state = &mut self.state;
self.buffer.input(input, |input: &[u8]| { self_state.process_block(input) });
}
fn finish(&mut self) {
if self.finished {
return;
}
let self_state = &mut self.state;
self.buffer.standard_padding(8, |input: &[u8]| { self_state.process_block(input) });
write_u32_be(self.buffer.next(4), (self.length_bits >> 32) as u32 );
write_u32_be(self.buffer.next(4), self.length_bits as u32);
self_state.process_block(self.buffer.full_buffer());
self.finished = true;
}
}
/// The SHA-256 hash algorithm
pub struct Sha256 {
engine: Engine256
}
impl Sha256 {
/// Construct a new instance of a SHA-256 digest.
pub fn new() -> Sha256 {
Sha256 {
engine: Engine256::new(&H256)
}
}
}
impl Digest for Sha256 {
fn input(&mut self, d: &[u8]) {
self.engine.input(d);
}
fn result(&mut self, out: &mut [u8]) {
self.engine.finish();
write_u32_be(out.mut_slice(0, 4), self.engine.state.h0);
write_u32_be(out.mut_slice(4, 8), self.engine.state.h1);
write_u32_be(out.mut_slice(8, 12), self.engine.state.h2);
write_u32_be(out.mut_slice(12, 16), self.engine.state.h3);
write_u32_be(out.mut_slice(16, 20), self.engine.state.h4);
write_u32_be(out.mut_slice(20, 24), self.engine.state.h5);
write_u32_be(out.mut_slice(24, 28), self.engine.state.h6);
write_u32_be(out.mut_slice(28, 32), self.engine.state.h7);
}
fn reset(&mut self) {
self.engine.reset(&H256);
}
fn output_bits(&self) -> uint { 256 }
}
static H256: [u32, ..8] = [
0x6a09e667,
0xbb67ae85,
0x3c6ef372,
0xa54ff53a,
0x510e527f,
0x9b05688c,
0x1f83d9ab,
0x5be0cd19
];
#[cfg(test)]
mod tests {
extern crate rand;
use super::{Digest, Sha256, FixedBuffer};
use std::num::Bounded;
use self::rand::isaac::IsaacRng;
use self::rand::Rng;
use serialize::hex::FromHex;
// A normal addition - no overflow occurs
#[test]
fn test_add_bytes_to_bits_ok() {
assert!(super::add_bytes_to_bits::<u64>(100, 10) == 180);
}
// A simple failure case - adding 1 to the max value
#[test]
#[should_fail]
fn test_add_bytes_to_bits_overflow() {
super::add_bytes_to_bits::<u64>(Bounded::max_value(), 1);
}
struct Test {
input: String,
output_str: String,
}
fn test_hash<D: Digest>(sh: &mut D, tests: &[Test]) {
// Test that it works when accepting the message all at once
for t in tests.iter() {
sh.reset();
sh.input_str(t.input.as_slice());
let out_str = sh.result_str();
assert!(out_str == t.output_str);
}
// Test that it works when accepting the message in pieces
for t in tests.iter() {
sh.reset();
let len = t.input.len();
let mut left = len;
while left > 0u {
let take = (left + 1u) / 2u;
sh.input_str(t.input
.as_slice()
.slice(len - left, take + len - left));
left = left - take;
}
let out_str = sh.result_str();
assert!(out_str == t.output_str);
}
}
#[test]
fn test_sha256() {
// Examples from wikipedia
let wikipedia_tests = vec!(
Test {
input: "".to_string(),
output_str: "e3b0c44298fc1c149afb\
f4c8996fb92427ae41e4649b934ca495991b7852b855".to_string()
},
Test {
input: "The quick brown fox jumps over the lazy \
dog".to_string(),
output_str: "d7a8fbb307d7809469ca\
9abcb0082e4f8d5651e46d3cdb762d02d0bf37c9e592".to_string()
},
Test {
input: "The quick brown fox jumps over the lazy \
dog.".to_string(),
output_str: "ef537f25c895bfa78252\
6529a9b63d97aa631564d5d789c2b765448c8635fb6c".to_string()
});
let tests = wikipedia_tests;
let mut sh = box Sha256::new();
test_hash(sh, tests.as_slice());
}
/// Feed 1,000,000 'a's into the digest with varying input sizes and check that the result is
/// correct.
fn test_digest_1million_random<D: Digest>(digest: &mut D, blocksize: uint, expected: &str) {
let total_size = 1000000;
let buffer = Vec::from_elem(blocksize * 2, 'a' as u8);
let mut rng = IsaacRng::new_unseeded();
let mut count = 0;
digest.reset();
while count < total_size {
let next: uint = rng.gen_range(0, 2 * blocksize + 1);
let remaining = total_size - count;
let size = if next > remaining { remaining } else { next };
digest.input(buffer.slice_to(size));
count += size;
}
let result_str = digest.result_str();
let result_bytes = digest.result_bytes();
assert_eq!(expected, result_str.as_slice());
let expected_vec: Vec<u8> = expected.from_hex()
.unwrap()
.move_iter()
.collect();
assert_eq!(expected_vec, result_bytes);
}
#[test]
fn test_1million_random_sha256() {
let mut sh = Sha256::new();
test_digest_1million_random(
&mut sh,
64,
"cdc76e5c9914fb9281a1c7e284d73e67f1809a48a497200e046d39ccc7112cd0");
}
}
#[cfg(test)]
mod bench {
extern crate test;
use self::test::Bencher;
use super::{Sha256, FixedBuffer, Digest};
#[bench]
pub fn sha256_10(b: &mut Bencher) {
let mut sh = Sha256::new();
let bytes = [1u8, ..10];
b.iter(|| {
sh.input(bytes);
});
b.bytes = bytes.len() as u64;
}
#[bench]
pub fn sha256_1k(b: &mut Bencher) {
let mut sh = Sha256::new();
let bytes = [1u8, ..1024];
b.iter(|| {
sh.input(bytes);
});
b.bytes = bytes.len() as u64;
}
#[bench]
pub fn sha256_64k(b: &mut Bencher) {
let mut sh = Sha256::new();
let bytes = [1u8, ..65536];
b.iter(|| {
sh.input(bytes);
});
b.bytes = bytes.len() as u64;
}
}<|fim▁end|>
| |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>/*!
Contains everything related to vertex buffers.
The main struct is the `VertexBuffer`, which represents a buffer in the video memory,
containing a list of vertices.
In order to create a vertex buffer, you must first create a struct that represents each vertex,
and implement the `glium::vertex::Vertex` trait on it. The `implement_vertex!` macro helps you
with that.
```
# #[macro_use]
# extern crate glium;
# extern crate glutin;
# fn main() {
#[derive(Copy)]
struct Vertex {
position: [f32; 3],
texcoords: [f32; 2],
}
implement_vertex!(Vertex, position, texcoords);
# }
```
Next, build a `Vec` of the vertices that you want to upload, and pass it to
`VertexBuffer::new`.
```no_run
# let display: glium::Display = unsafe { ::std::mem::uninitialized() };
# #[derive(Copy)]
# struct Vertex {
# position: [f32; 3],
# texcoords: [f32; 2],
# }
# impl glium::vertex::Vertex for Vertex {
# fn build_bindings() -> glium::vertex::VertexFormat {
# unimplemented!() }
# }
let data = vec![
Vertex {
position: [0.0, 0.0, 0.4],
texcoords: [0.0, 1.0]
},
Vertex {
position: [12.0, 4.5, -1.8],
texcoords: [1.0, 0.5]
},
Vertex {
position: [-7.124, 0.1, 0.0],
texcoords: [0.0, 0.4]<|fim▁hole|>
let vertex_buffer = glium::vertex::VertexBuffer::new(&display, data);
```
*/
use std::marker::MarkerTrait;
use std::sync::mpsc::Sender;
use sync::LinearSyncFence;
use std::iter::Chain;
use std::option::IntoIter;
pub use self::buffer::{VertexBuffer, VertexBufferAny, Mapping};
pub use self::buffer::{VertexBufferSlice, VertexBufferAnySlice};
pub use self::format::{AttributeType, VertexFormat};
pub use self::per_instance::{PerInstanceAttributesBuffer, PerInstanceAttributesBufferAny};
pub use self::per_instance::Mapping as PerInstanceAttributesBufferMapping;
mod buffer;
mod format;
mod per_instance;
/// Describes the source to use for the vertices when drawing.
#[derive(Clone)]
pub enum VerticesSource<'a> {
/// A buffer uploaded in the video memory.
///
/// If the second parameter is `Some`, then a fence *must* be sent with this sender for
/// when the buffer stops being used.
///
/// The third and fourth parameters are the offset and length of the buffer.
VertexBuffer(&'a VertexBufferAny, Option<Sender<LinearSyncFence>>, usize, usize),
/// A buffer uploaded in the video memory.
///
/// If the second parameter is `Some`, then a fence *must* be sent with this sender for
/// when the buffer stops being used.
PerInstanceBuffer(&'a PerInstanceAttributesBufferAny, Option<Sender<LinearSyncFence>>),
}
/// Objects that can be used as vertex sources.
pub trait IntoVerticesSource<'a> {
/// Builds the `VerticesSource`.
fn into_vertices_source(self) -> VerticesSource<'a>;
}
impl<'a> IntoVerticesSource<'a> for VerticesSource<'a> {
fn into_vertices_source(self) -> VerticesSource<'a> {
self
}
}
/// Objects that describe multiple vertex sources.
pub trait MultiVerticesSource<'a> {
type Iterator: Iterator<Item = VerticesSource<'a>>;
/// Iterates over the `VerticesSource`.
fn iter(self) -> Self::Iterator;
}
impl<'a, T> MultiVerticesSource<'a> for T
where T: IntoVerticesSource<'a>
{
type Iterator = IntoIter<VerticesSource<'a>>;
fn iter(self) -> IntoIter<VerticesSource<'a>> {
Some(self.into_vertices_source()).into_iter()
}
}
macro_rules! impl_for_tuple {
($t:ident) => (
impl<'a, $t> MultiVerticesSource<'a> for ($t,)
where $t: IntoVerticesSource<'a>
{
type Iterator = IntoIter<VerticesSource<'a>>;
fn iter(self) -> IntoIter<VerticesSource<'a>> {
Some(self.0.into_vertices_source()).into_iter()
}
}
);
($t1:ident, $t2:ident) => (
#[allow(non_snake_case)]
impl<'a, $t1, $t2> MultiVerticesSource<'a> for ($t1, $t2)
where $t1: IntoVerticesSource<'a>, $t2: IntoVerticesSource<'a>
{
type Iterator = Chain<<($t1,) as MultiVerticesSource<'a>>::Iterator,
<($t2,) as MultiVerticesSource<'a>>::Iterator>;
fn iter(self) -> Chain<<($t1,) as MultiVerticesSource<'a>>::Iterator,
<($t2,) as MultiVerticesSource<'a>>::Iterator>
{
let ($t1, $t2) = self;
Some($t1.into_vertices_source()).into_iter().chain(($t2,).iter())
}
}
impl_for_tuple!($t2);
);
($t1:ident, $($t2:ident),+) => (
#[allow(non_snake_case)]
impl<'a, $t1, $($t2),+> MultiVerticesSource<'a> for ($t1, $($t2),+)
where $t1: IntoVerticesSource<'a>, $($t2: IntoVerticesSource<'a>),+
{
type Iterator = Chain<<($t1,) as MultiVerticesSource<'a>>::Iterator,
<($($t2),+) as MultiVerticesSource<'a>>::Iterator>;
fn iter(self) -> Chain<<($t1,) as MultiVerticesSource<'a>>::Iterator,
<($($t2),+) as MultiVerticesSource<'a>>::Iterator>
{
let ($t1, $($t2),+) = self;
Some($t1.into_vertices_source()).into_iter().chain(($($t2),+).iter())
}
}
impl_for_tuple!($($t2),+);
);
}
impl_for_tuple!(A, B, C, D, E, F, G);
/// Trait for structures that represent a vertex.
///
/// Instead of implementing this trait yourself, it is recommended to use the `implement_vertex!`
/// macro instead.
// TODO: this should be `unsafe`, but that would break the syntax extension
pub trait Vertex: Copy + MarkerTrait {
/// Builds the `VertexFormat` representing the layout of this element.
fn build_bindings() -> VertexFormat;
}
/// Trait for types that can be used as vertex attributes.
pub unsafe trait Attribute: MarkerTrait {
/// Get the type of data.
fn get_type() -> AttributeType;
}<|fim▁end|>
|
},
];
|
<|file_name|>summary_test_internal.py<|end_file_name|><|fim▁begin|># Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.<|fim▁hole|>
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import functools
import os
import sqlite3
from tensorflow.contrib.summary import summary_ops
from tensorflow.python.framework import test_util
class SummaryDbTest(test_util.TensorFlowTestCase):
"""Helper for summary database testing."""
def setUp(self):
super(SummaryDbTest, self).setUp()
self.db_path = os.path.join(self.get_temp_dir(), 'DbTest.sqlite')
if os.path.exists(self.db_path):
os.unlink(self.db_path)
self.db = sqlite3.connect(self.db_path)
self.create_summary_db_writer = functools.partial(
summary_ops.create_summary_db_writer,
db_uri=self.db_path,
experiment_name='experiment',
run_name='run',
user_name='user')
def tearDown(self):
self.db.close()
super(SummaryDbTest, self).tearDown()
def get_one(db, q, *p):
return db.execute(q, p).fetchone()[0]
def get_all(db, q, *p):
return unroll(db.execute(q, p).fetchall())
def unroll(list_of_tuples):
return sum(list_of_tuples, ())<|fim▁end|>
|
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Internal helpers for tests in this directory."""
|
<|file_name|>FrogJump.cpp<|end_file_name|><|fim▁begin|>/*
A small frog wants to get to the other side of the road. The frog is currently located at position X and wants to get to a position greater than or equal to Y. The small frog always jumps a fixed distance, D.
Count the minimal number of jumps that the small frog must perform to reach its target.
Write a function:
int solution(int X, int Y, int D);<|fim▁hole|>*/
// you can write to stdout for debugging purposes, e.g.
// printf("this is a debug message\n");
int solution(int X, int Y, int D) {
// write your code in C99
int diff=Y-X;
if(diff%D==0)
return diff/D;
else
return diff/D+1;
}<|fim▁end|>
|
that, given three integers X, Y and D, returns the minimal number of jumps from position X to a position equal to or greater than Y.
|
<|file_name|>mapper.py<|end_file_name|><|fim▁begin|>import sys
from geopy import Point
from django.apps import apps as django_apps
from django.core.exceptions import ImproperlyConfigured
from django.core.management.color import color_style
from .geo_mixin import GeoMixin
LANDMARK_NAME = 0
LATITUDE = 2
LETTERS = list(map(chr, range(65, 91)))
LONGITUDE = 1
style = color_style()
class Mapper(GeoMixin):
center_lat = None
center_lon = None
landmarks = None # format ((name, longitude, latitude), )
map_area = None
radius = 5.5
mapper_model = None
def __init__(self):
self.name = self.map_area or f'mapper {self.__class__.__name__}'
app_config = django_apps.get_app_config('edc_map')
mapper_model = self.mapper_model or app_config.mapper_model
if not mapper_model:
raise ImproperlyConfigured(
f'Invalid mapper_model. Got None. See {repr(self)}.')
try:
self.item_model = django_apps.get_model(*mapper_model.split('.'))
except LookupError as e:
sys.stdout.write(style.WARNING(
f'\n Warning. Lookup error in mapper. See {repr(self)}. Got {e} '
'edc_map.apps.AppConfig\n'))
else:
self.item_model_cls = self.item_model
self.item_label = self.item_model._meta.verbose_name
self.load()
def __repr__(self):
return 'Mapper({0.map_area!r})'.format(self)
def __str__(self):
return '({0.map_area!r})'.format(self)
def load(self):
return None
@property
def __dict__(self):
return {
'map_area': self.map_area,
'center_lat': self.center_lat,
'center_lon': self.center_lon,
'radius': self.radius}
@property
def area_center_point(self):
return Point(self.center_lat, self.center_lon)
@property
def area_radius(self):
return self.radius
def point_in_map_area(self, point):
"""Return True if point is within mapper area radius."""
return self.point_in_radius(
point, self.area_center_point, self.area_radius)
<|fim▁hole|> units='km', label=self.map_area)<|fim▁end|>
|
def raise_if_not_in_map_area(self, point):
self.raise_if_not_in_radius(
point, self.area_center_point, self.area_radius,
|
<|file_name|>test_filters.py<|end_file_name|><|fim▁begin|>from pygraz_website import filters
class TestFilters(object):
def test_url_detection(self):
"""
Test that urls are found correctly.
"""
no_urls_string = '''This is a test without any urls in it.'''
urls_string = '''This string has one link in it: http://pygraz.org . But it also has some text after it :D'''
assert filters.urlize(no_urls_string) == no_urls_string
assert filters.urlize(urls_string) == '''This string has one link in it: <a href="http://pygraz.org">http://pygraz.org</a> . But it also has some text after it :D'''
assert filters.urlize(urls_string, True).matches == {'urls': set(['http://pygraz.org'])}
assert filters.urlize(None) == u''
assert filters.urlize("'http://test.com'") == """'<a href="http://test.com">http://test.com</a>'"""
def test_namehandles(self):
"""
Tests the discory of linkable names.<|fim▁hole|> """
string_with_handles = 'Hallo @pygraz.'
assert filters.urlize(string_with_handles) == 'Hallo <a href="http://twitter.com/pygraz">@pygraz</a>.'
assert filters.urlize(string_with_handles, True).matches == {'handles': set(['pygraz'])}
def test_hashtags(self):
string_with_tags = 'This is a #test for #hashtags'
assert filters.urlize(string_with_tags) == 'This is a <a href="http://twitter.com/search?q=%23test">#test</a> for <a href="http://twitter.com/search?q=%23hashtags">#hashtags</a>'
assert filters.urlize(string_with_tags, True).matches == {'hashtags': set(['test', 'hashtags'])}<|fim▁end|>
| |
<|file_name|>window.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! A windowing implementation using glutin.
use NestedEventLoopListener;
use compositing::compositor_thread::EventLoopWaker;
use compositing::windowing::{AnimationState, MouseWindowEvent};
use compositing::windowing::{WindowEvent, WindowMethods};
use euclid::{Point2D, Size2D, TypedPoint2D, TypedVector2D, ScaleFactor, TypedSize2D};
#[cfg(target_os = "windows")]
use gdi32;
use gleam::gl;
use glutin;
use glutin::{Api, ElementState, Event, GlRequest, MouseButton, MouseScrollDelta, VirtualKeyCode};
#[cfg(not(target_os = "windows"))]
use glutin::ScanCode;
use glutin::TouchPhase;
#[cfg(target_os = "macos")]
use glutin::os::macos::{ActivationPolicy, WindowBuilderExt};
use msg::constellation_msg::{self, Key};
use msg::constellation_msg::{ALT, CONTROL, KeyState, NONE, SHIFT, SUPER, TraversalDirection};
use net_traits::net_error_list::NetError;
#[cfg(any(target_os = "linux", target_os = "macos"))]
use osmesa_sys;
use script_traits::{LoadData, TouchEventType, TouchpadPressurePhase};
use servo::ipc_channel::ipc::IpcSender;
use servo_config::opts;
use servo_config::prefs::PREFS;
use servo_config::resource_files;
use servo_geometry::DeviceIndependentPixel;
use servo_url::ServoUrl;
use std::cell::{Cell, RefCell};
#[cfg(any(target_os = "linux", target_os = "macos"))]
use std::ffi::CString;
#[cfg(any(target_os = "linux", target_os = "macos"))]
use std::mem;
use std::os::raw::c_void;
use std::ptr;
use std::rc::Rc;
use style_traits::DevicePixel;
use style_traits::cursor::Cursor;
#[cfg(target_os = "windows")]
use user32;
use webrender_api::{DeviceUintRect, DeviceUintSize, ScrollLocation};
#[cfg(target_os = "windows")]
use winapi;
static mut G_NESTED_EVENT_LOOP_LISTENER: Option<*mut (NestedEventLoopListener + 'static)> = None;
bitflags! {
flags KeyModifiers: u8 {
const LEFT_CONTROL = 1,
const RIGHT_CONTROL = 2,
const LEFT_SHIFT = 4,
const RIGHT_SHIFT = 8,
const LEFT_ALT = 16,
const RIGHT_ALT = 32,
const LEFT_SUPER = 64,
const RIGHT_SUPER = 128,
}
}
// Some shortcuts use Cmd on Mac and Control on other systems.
#[cfg(target_os = "macos")]
const CMD_OR_CONTROL: constellation_msg::KeyModifiers = SUPER;
#[cfg(not(target_os = "macos"))]
const CMD_OR_CONTROL: constellation_msg::KeyModifiers = CONTROL;
// Some shortcuts use Cmd on Mac and Alt on other systems.
#[cfg(target_os = "macos")]
const CMD_OR_ALT: constellation_msg::KeyModifiers = SUPER;
#[cfg(not(target_os = "macos"))]
const CMD_OR_ALT: constellation_msg::KeyModifiers = ALT;
// This should vary by zoom level and maybe actual text size (focused or under cursor)
const LINE_HEIGHT: f32 = 38.0;
const MULTISAMPLES: u16 = 16;
#[cfg(target_os = "macos")]
fn builder_with_platform_options(mut builder: glutin::WindowBuilder) -> glutin::WindowBuilder {
if opts::get().headless || opts::get().output_file.is_some() {
// Prevent the window from showing in Dock.app, stealing focus,
// or appearing at all when running in headless mode or generating an
// output file.
builder = builder.with_activation_policy(ActivationPolicy::Prohibited)
}
builder.with_app_name(String::from("Servo"))
}
#[cfg(not(target_os = "macos"))]
fn builder_with_platform_options(builder: glutin::WindowBuilder) -> glutin::WindowBuilder {
builder
}
#[cfg(any(target_os = "linux", target_os = "macos"))]
struct HeadlessContext {
width: u32,
height: u32,
_context: osmesa_sys::OSMesaContext,
_buffer: Vec<u32>,
}
#[cfg(not(any(target_os = "linux", target_os = "macos")))]
struct HeadlessContext {
width: u32,
height: u32,
}
impl HeadlessContext {
#[cfg(any(target_os = "linux", target_os = "macos"))]
fn new(width: u32, height: u32) -> HeadlessContext {
let mut attribs = Vec::new();
attribs.push(osmesa_sys::OSMESA_PROFILE);
attribs.push(osmesa_sys::OSMESA_CORE_PROFILE);
attribs.push(osmesa_sys::OSMESA_CONTEXT_MAJOR_VERSION);
attribs.push(3);
attribs.push(osmesa_sys::OSMESA_CONTEXT_MINOR_VERSION);
attribs.push(3);
attribs.push(0);
let context = unsafe {
osmesa_sys::OSMesaCreateContextAttribs(attribs.as_ptr(), ptr::null_mut())
};
assert!(!context.is_null());
let mut buffer = vec![0; (width * height) as usize];
unsafe {
let ret = osmesa_sys::OSMesaMakeCurrent(context,
buffer.as_mut_ptr() as *mut _,
gl::UNSIGNED_BYTE,
width as i32,
height as i32);
assert!(ret != 0);
};
HeadlessContext {
width: width,
height: height,
_context: context,
_buffer: buffer,
}
}
#[cfg(not(any(target_os = "linux", target_os = "macos")))]
fn new(width: u32, height: u32) -> HeadlessContext {
HeadlessContext {
width: width,
height: height,
}
}
#[cfg(any(target_os = "linux", target_os = "macos"))]
fn get_proc_address(s: &str) -> *const c_void {
let c_str = CString::new(s).expect("Unable to create CString");
unsafe {
mem::transmute(osmesa_sys::OSMesaGetProcAddress(c_str.as_ptr()))
}
}
#[cfg(not(any(target_os = "linux", target_os = "macos")))]
fn get_proc_address(_: &str) -> *const c_void {
ptr::null() as *const _
}
}
enum WindowKind {
Window(glutin::Window),
Headless(HeadlessContext),
}
/// The type of a window.
pub struct Window {
kind: WindowKind,
mouse_down_button: Cell<Option<glutin::MouseButton>>,
mouse_down_point: Cell<Point2D<i32>>,
event_queue: RefCell<Vec<WindowEvent>>,
mouse_pos: Cell<Point2D<i32>>,
key_modifiers: Cell<KeyModifiers>,
current_url: RefCell<Option<ServoUrl>>,
#[cfg(not(target_os = "windows"))]
/// The contents of the last ReceivedCharacter event for use in a subsequent KeyEvent.
pending_key_event_char: Cell<Option<char>>,
#[cfg(target_os = "windows")]
last_pressed_key: Cell<Option<constellation_msg::Key>>,
/// The list of keys that have been pressed but not yet released, to allow providing
/// the equivalent ReceivedCharacter data as was received for the press event.
#[cfg(not(target_os = "windows"))]
pressed_key_map: RefCell<Vec<(ScanCode, char)>>,
animation_state: Cell<AnimationState>,
gl: Rc<gl::Gl>,
}
#[cfg(not(target_os = "windows"))]
fn window_creation_scale_factor() -> ScaleFactor<f32, DeviceIndependentPixel, DevicePixel> {
ScaleFactor::new(1.0)
}
#[cfg(target_os = "windows")]
fn window_creation_scale_factor() -> ScaleFactor<f32, DeviceIndependentPixel, DevicePixel> {
let hdc = unsafe { user32::GetDC(::std::ptr::null_mut()) };
let ppi = unsafe { gdi32::GetDeviceCaps(hdc, winapi::wingdi::LOGPIXELSY) };
ScaleFactor::new(ppi as f32 / 96.0)
}
impl Window {
pub fn new(is_foreground: bool,
window_size: TypedSize2D<u32, DeviceIndependentPixel>,
parent: Option<glutin::WindowID>) -> Rc<Window> {
let win_size: TypedSize2D<u32, DevicePixel> =
(window_size.to_f32() * window_creation_scale_factor())
.to_usize().cast().expect("Window size should fit in u32");
let width = win_size.to_untyped().width;
let height = win_size.to_untyped().height;
// If there's no chrome, start off with the window invisible. It will be set to visible in
// `load_end()`. This avoids an ugly flash of unstyled content (especially important since
// unstyled content is white and chrome often has a transparent background). See issue
// #9996.
let visible = is_foreground && !opts::get().no_native_titlebar;
let window_kind = if opts::get().headless {
WindowKind::Headless(HeadlessContext::new(width, height))
} else {
let mut builder =
glutin::WindowBuilder::new().with_title("Servo".to_string())
.with_decorations(!opts::get().no_native_titlebar)
.with_transparency(opts::get().no_native_titlebar)
.with_dimensions(width, height)
.with_gl(Window::gl_version())
.with_visibility(visible)
.with_parent(parent)
.with_multitouch();
if let Ok(mut icon_path) = resource_files::resources_dir_path() {
icon_path.push("servo.png");
builder = builder.with_icon(icon_path);
}
if opts::get().enable_vsync {
builder = builder.with_vsync();
}
if opts::get().use_msaa {
builder = builder.with_multisampling(MULTISAMPLES)
}
builder = builder_with_platform_options(builder);
let mut glutin_window = builder.build().expect("Failed to create window.");
unsafe { glutin_window.make_current().expect("Failed to make context current!") }
glutin_window.set_window_resize_callback(Some(Window::nested_window_resize as fn(u32, u32)));
WindowKind::Window(glutin_window)
};
let gl = match window_kind {
WindowKind::Window(ref window) => {
match gl::GlType::default() {
gl::GlType::Gl => {
unsafe {
gl::GlFns::load_with(|s| window.get_proc_address(s) as *const _)
}
}
gl::GlType::Gles => {
unsafe {
gl::GlesFns::load_with(|s| window.get_proc_address(s) as *const _)
}
}
}
}
WindowKind::Headless(..) => {
unsafe {
gl::GlFns::load_with(|s| HeadlessContext::get_proc_address(s))
}
}
};
if opts::get().headless {
// Print some information about the headless renderer that
// can be useful in diagnosing CI failures on build machines.
println!("{}", gl.get_string(gl::VENDOR));
println!("{}", gl.get_string(gl::RENDERER));
println!("{}", gl.get_string(gl::VERSION));
}
gl.clear_color(0.6, 0.6, 0.6, 1.0);
gl.clear(gl::COLOR_BUFFER_BIT);
gl.finish();
let window = Window {
kind: window_kind,
event_queue: RefCell::new(vec!()),
mouse_down_button: Cell::new(None),
mouse_down_point: Cell::new(Point2D::new(0, 0)),
mouse_pos: Cell::new(Point2D::new(0, 0)),
key_modifiers: Cell::new(KeyModifiers::empty()),
current_url: RefCell::new(None),
#[cfg(not(target_os = "windows"))]
pending_key_event_char: Cell::new(None),
#[cfg(not(target_os = "windows"))]
pressed_key_map: RefCell::new(vec![]),
#[cfg(target_os = "windows")]
last_pressed_key: Cell::new(None),
gl: gl.clone(),
animation_state: Cell::new(AnimationState::Idle),
};
window.present();
Rc::new(window)
}
pub fn platform_window(&self) -> glutin::WindowID {
match self.kind {
WindowKind::Window(ref window) => {
unsafe { glutin::WindowID::new(window.platform_window()) }
}
WindowKind::Headless(..) => {
unreachable!();
}
}
}
fn nested_window_resize(width: u32, height: u32) {
unsafe {
if let Some(listener) = G_NESTED_EVENT_LOOP_LISTENER {
(*listener).handle_event_from_nested_event_loop(
WindowEvent::Resize(TypedSize2D::new(width, height)));
}
}
}
#[cfg(not(any(target_arch = "arm", target_arch = "aarch64")))]
fn gl_version() -> GlRequest {
return GlRequest::Specific(Api::OpenGl, (3, 2));
}
#[cfg(any(target_arch = "arm", target_arch = "aarch64"))]
fn gl_version() -> GlRequest {
GlRequest::Specific(Api::OpenGlEs, (3, 0))
}
#[cfg(not(target_os = "windows"))]
fn handle_received_character(&self, ch: char) {
if !ch.is_control() {
self.pending_key_event_char.set(Some(ch));
}
}
#[cfg(target_os = "windows")]
fn handle_received_character(&self, ch: char) {
let modifiers = Window::glutin_mods_to_script_mods(self.key_modifiers.get());
if let Some(last_pressed_key) = self.last_pressed_key.get() {
let event = WindowEvent::KeyEvent(Some(ch), last_pressed_key, KeyState::Pressed, modifiers);
self.event_queue.borrow_mut().push(event);
} else {
// Only send the character if we can print it (by ignoring characters like backspace)
if !ch.is_control() {
match Window::char_to_script_key(ch) {
Some(key) => {
let event = WindowEvent::KeyEvent(Some(ch),
key,
KeyState::Pressed,
modifiers);
self.event_queue.borrow_mut().push(event);
}
None => {}
}
}
}
self.last_pressed_key.set(None);
}
fn toggle_keyboard_modifiers(&self, virtual_key_code: VirtualKeyCode) {
match virtual_key_code {
VirtualKeyCode::LControl => self.toggle_modifier(LEFT_CONTROL),
VirtualKeyCode::RControl => self.toggle_modifier(RIGHT_CONTROL),
VirtualKeyCode::LShift => self.toggle_modifier(LEFT_SHIFT),
VirtualKeyCode::RShift => self.toggle_modifier(RIGHT_SHIFT),
VirtualKeyCode::LAlt => self.toggle_modifier(LEFT_ALT),
VirtualKeyCode::RAlt => self.toggle_modifier(RIGHT_ALT),
VirtualKeyCode::LWin => self.toggle_modifier(LEFT_SUPER),
VirtualKeyCode::RWin => self.toggle_modifier(RIGHT_SUPER),
_ => {}
}
}
#[cfg(not(target_os = "windows"))]
fn handle_keyboard_input(&self, element_state: ElementState, _scan_code: u8, virtual_key_code: VirtualKeyCode) {
self.toggle_keyboard_modifiers(virtual_key_code);
let ch = match element_state {
ElementState::Pressed => {
// Retrieve any previously stored ReceivedCharacter value.
// Store the association between the scan code and the actual
// character value, if there is one.
let ch = self.pending_key_event_char
.get()
.and_then(|ch| filter_nonprintable(ch, virtual_key_code));
self.pending_key_event_char.set(None);
if let Some(ch) = ch {
self.pressed_key_map.borrow_mut().push((_scan_code, ch));
}
ch
}
ElementState::Released => {
// Retrieve the associated character value for this release key,
// if one was previously stored.
let idx = self.pressed_key_map
.borrow()
.iter()
.position(|&(code, _)| code == _scan_code);
idx.map(|idx| self.pressed_key_map.borrow_mut().swap_remove(idx).1)
}
};
if let Ok(key) = Window::glutin_key_to_script_key(virtual_key_code) {
let state = match element_state {
ElementState::Pressed => KeyState::Pressed,
ElementState::Released => KeyState::Released,
};
let modifiers = Window::glutin_mods_to_script_mods(self.key_modifiers.get());
self.event_queue.borrow_mut().push(WindowEvent::KeyEvent(ch, key, state, modifiers));
}
}
#[cfg(target_os = "windows")]
fn handle_keyboard_input(&self, element_state: ElementState, _scan_code: u8, virtual_key_code: VirtualKeyCode) {
self.toggle_keyboard_modifiers(virtual_key_code);
if let Ok(key) = Window::glutin_key_to_script_key(virtual_key_code) {
let state = match element_state {
ElementState::Pressed => KeyState::Pressed,
ElementState::Released => KeyState::Released,
};
if element_state == ElementState::Pressed {
if is_printable(virtual_key_code) {
self.last_pressed_key.set(Some(key));
}
}
let modifiers = Window::glutin_mods_to_script_mods(self.key_modifiers.get());
self.event_queue.borrow_mut().push(WindowEvent::KeyEvent(None, key, state, modifiers));
}
}
fn handle_window_event(&self, event: glutin::Event) -> bool {
match event {
Event::ReceivedCharacter(ch) => {
self.handle_received_character(ch)
}
Event::KeyboardInput(element_state, _scan_code, Some(virtual_key_code)) => {
self.handle_keyboard_input(element_state, _scan_code, virtual_key_code);
}
Event::KeyboardInput(_, _, None) => {
debug!("Keyboard input without virtual key.");
}
Event::Resized(width, height) => {
self.event_queue.borrow_mut().push(WindowEvent::Resize(TypedSize2D::new(width, height)));
}
Event::MouseInput(element_state, mouse_button, pos) => {
if mouse_button == MouseButton::Left ||
mouse_button == MouseButton::Right {
match pos {
Some((x, y)) => {
self.mouse_pos.set(Point2D::new(x, y));
self.event_queue.borrow_mut().push(
WindowEvent::MouseWindowMoveEventClass(TypedPoint2D::new(x as f32, y as f32)));
self.handle_mouse(mouse_button, element_state, x, y);
}
None => {
let mouse_pos = self.mouse_pos.get();
self.handle_mouse(mouse_button, element_state, mouse_pos.x, mouse_pos.y);
}
}
}
}
Event::MouseMoved(x, y) => {
self.mouse_pos.set(Point2D::new(x, y));
self.event_queue.borrow_mut().push(
WindowEvent::MouseWindowMoveEventClass(TypedPoint2D::new(x as f32, y as f32)));
}
Event::MouseWheel(delta, phase, pos) => {
let (dx, dy) = match delta {
MouseScrollDelta::LineDelta(dx, dy) => (dx, dy * LINE_HEIGHT),
MouseScrollDelta::PixelDelta(dx, dy) => (dx, dy),
};
let scroll_location = ScrollLocation::Delta(TypedVector2D::new(dx, dy));
if let Some((x, y)) = pos {
self.mouse_pos.set(Point2D::new(x, y));
self.event_queue.borrow_mut().push(
WindowEvent::MouseWindowMoveEventClass(TypedPoint2D::new(x as f32, y as f32)));
};
let phase = glutin_phase_to_touch_event_type(phase);
self.scroll_window(scroll_location, phase);
},
Event::Touch(touch) => {
use script_traits::TouchId;
let phase = glutin_phase_to_touch_event_type(touch.phase);
let id = TouchId(touch.id as i32);
let point = TypedPoint2D::new(touch.location.0 as f32, touch.location.1 as f32);
self.event_queue.borrow_mut().push(WindowEvent::Touch(phase, id, point));
}
Event::TouchpadPressure(pressure, stage) => {
let m = self.mouse_pos.get();
let point = TypedPoint2D::new(m.x as f32, m.y as f32);
let phase = glutin_pressure_stage_to_touchpad_pressure_phase(stage);
self.event_queue.borrow_mut().push(WindowEvent::TouchpadPressure(point, pressure, phase));
}
Event::Refresh => {
self.event_queue.borrow_mut().push(WindowEvent::Refresh);
}
Event::Closed => {
return true
}
_ => {}
}
false
}
fn toggle_modifier(&self, modifier: KeyModifiers) {
let mut modifiers = self.key_modifiers.get();
modifiers.toggle(modifier);
self.key_modifiers.set(modifiers);
}
/// Helper function to send a scroll event.
fn scroll_window(&self, mut scroll_location: ScrollLocation, phase: TouchEventType) {
// Scroll events snap to the major axis of movement, with vertical
// preferred over horizontal.
if let ScrollLocation::Delta(ref mut delta) = scroll_location {
if delta.y.abs() >= delta.x.abs() {
delta.x = 0.0;
} else {
delta.y = 0.0;
}
}
let mouse_pos = self.mouse_pos.get();
let event = WindowEvent::Scroll(scroll_location,
TypedPoint2D::new(mouse_pos.x as i32, mouse_pos.y as i32),
phase);
self.event_queue.borrow_mut().push(event);
}
/// Helper function to handle a click
fn handle_mouse(&self, button: glutin::MouseButton, action: glutin::ElementState, x: i32, y: i32) {
use script_traits::MouseButton;
// FIXME(tkuehn): max pixel dist should be based on pixel density
let max_pixel_dist = 10f64;
let event = match action {
ElementState::Pressed => {
self.mouse_down_point.set(Point2D::new(x, y));
self.mouse_down_button.set(Some(button));
MouseWindowEvent::MouseDown(MouseButton::Left, TypedPoint2D::new(x as f32, y as f32))
}
ElementState::Released => {
let mouse_up_event = MouseWindowEvent::MouseUp(MouseButton::Left,
TypedPoint2D::new(x as f32, y as f32));
match self.mouse_down_button.get() {
None => mouse_up_event,
Some(but) if button == but => {
let pixel_dist = self.mouse_down_point.get() - Point2D::new(x, y);
let pixel_dist = ((pixel_dist.x * pixel_dist.x +
pixel_dist.y * pixel_dist.y) as f64).sqrt();
if pixel_dist < max_pixel_dist {
self.event_queue.borrow_mut().push(WindowEvent::MouseWindowEventClass(mouse_up_event));
MouseWindowEvent::Click(MouseButton::Left, TypedPoint2D::new(x as f32, y as f32))
} else {
mouse_up_event
}
},
Some(_) => mouse_up_event,
}
}
};
self.event_queue.borrow_mut().push(WindowEvent::MouseWindowEventClass(event));
}
#[cfg(any(target_os = "macos", target_os = "windows"))]
fn handle_next_event(&self) -> bool {
match self.kind {
WindowKind::Window(ref window) => {
let event = match window.wait_events().next() {
None => {
warn!("Window event stream closed.");
return true;
},
Some(event) => event,
};
let mut close = self.handle_window_event(event);
if !close {
while let Some(event) = window.poll_events().next() {
if self.handle_window_event(event) {
close = true;
break
}
}
}
close
}
WindowKind::Headless(..) => {
false
}
}
}
#[cfg(any(target_os = "linux", target_os = "android"))]
fn handle_next_event(&self) -> bool {
match self.kind {
WindowKind::Window(ref window) => {
let event = match window.wait_events().next() {
None => {
warn!("Window event stream closed.");
return true;
},
Some(event) => event,
};
let mut close = self.handle_window_event(event);
if !close {
while let Some(event) = window.poll_events().next() {
if self.handle_window_event(event) {
close = true;
break
}
}
}
close
}
WindowKind::Headless(..) => {
false
}
}
}
pub fn wait_events(&self) -> Vec<WindowEvent> {
use std::mem;
let mut events = mem::replace(&mut *self.event_queue.borrow_mut(), Vec::new());
let mut close_event = false;
let poll = self.animation_state.get() == AnimationState::Animating ||
opts::get().output_file.is_some() ||
opts::get().exit_after_load ||
opts::get().headless;
// When writing to a file then exiting, use event
// polling so that we don't block on a GUI event
// such as mouse click.
if poll {
match self.kind {
WindowKind::Window(ref window) => {
while let Some(event) = window.poll_events().next() {
close_event = self.handle_window_event(event) || close_event;
}
}
WindowKind::Headless(..) => {}
}
} else {
close_event = self.handle_next_event();
}
if close_event {
events.push(WindowEvent::Quit)
}
events.extend(mem::replace(&mut *self.event_queue.borrow_mut(), Vec::new()).into_iter());
events
}
pub unsafe fn set_nested_event_loop_listener(
&self,
listener: *mut (NestedEventLoopListener + 'static)) {
G_NESTED_EVENT_LOOP_LISTENER = Some(listener)
}
pub unsafe fn remove_nested_event_loop_listener(&self) {
G_NESTED_EVENT_LOOP_LISTENER = None
}
#[cfg(target_os = "windows")]
fn char_to_script_key(c: char) -> Option<constellation_msg::Key> {
match c {
' ' => Some(Key::Space),
'"' => Some(Key::Apostrophe),
'\'' => Some(Key::Apostrophe),
'<' => Some(Key::Comma),
',' => Some(Key::Comma),
'_' => Some(Key::Minus),
'-' => Some(Key::Minus),
'>' => Some(Key::Period),
'.' => Some(Key::Period),
'?' => Some(Key::Slash),
'/' => Some(Key::Slash),
'~' => Some(Key::GraveAccent),
'`' => Some(Key::GraveAccent),
')' => Some(Key::Num0),
'0' => Some(Key::Num0),
'!' => Some(Key::Num1),
'1' => Some(Key::Num1),
'@' => Some(Key::Num2),
'2' => Some(Key::Num2),
'#' => Some(Key::Num3),
'3' => Some(Key::Num3),
'$' => Some(Key::Num4),
'4' => Some(Key::Num4),
'%' => Some(Key::Num5),
'5' => Some(Key::Num5),
'^' => Some(Key::Num6),
'6' => Some(Key::Num6),
'&' => Some(Key::Num7),
'7' => Some(Key::Num7),
'*' => Some(Key::Num8),
'8' => Some(Key::Num8),
'(' => Some(Key::Num9),
'9' => Some(Key::Num9),
':' => Some(Key::Semicolon),
';' => Some(Key::Semicolon),
'+' => Some(Key::Equal),
'=' => Some(Key::Equal),
'A' => Some(Key::A),
'a' => Some(Key::A),
'B' => Some(Key::B),
'b' => Some(Key::B),
'C' => Some(Key::C),
'c' => Some(Key::C),
'D' => Some(Key::D),
'd' => Some(Key::D),
'E' => Some(Key::E),
'e' => Some(Key::E),
'F' => Some(Key::F),
'f' => Some(Key::F),
'G' => Some(Key::G),
'g' => Some(Key::G),
'H' => Some(Key::H),
'h' => Some(Key::H),
'I' => Some(Key::I),
'i' => Some(Key::I),
'J' => Some(Key::J),
'j' => Some(Key::J),
'K' => Some(Key::K),
'k' => Some(Key::K),
'L' => Some(Key::L),
'l' => Some(Key::L),
'M' => Some(Key::M),
'm' => Some(Key::M),
'N' => Some(Key::N),
'n' => Some(Key::N),
'O' => Some(Key::O),
'o' => Some(Key::O),
'P' => Some(Key::P),
'p' => Some(Key::P),
'Q' => Some(Key::Q),
'q' => Some(Key::Q),
'R' => Some(Key::R),
'r' => Some(Key::R),
'S' => Some(Key::S),
's' => Some(Key::S),
'T' => Some(Key::T),
't' => Some(Key::T),
'U' => Some(Key::U),
'u' => Some(Key::U),
'V' => Some(Key::V),
'v' => Some(Key::V),
'W' => Some(Key::W),
'w' => Some(Key::W),
'X' => Some(Key::X),
'x' => Some(Key::X),
'Y' => Some(Key::Y),
'y' => Some(Key::Y),
'Z' => Some(Key::Z),
'z' => Some(Key::Z),
'{' => Some(Key::LeftBracket),
'[' => Some(Key::LeftBracket),
'|' => Some(Key::Backslash),
'\\' => Some(Key::Backslash),
'}' => Some(Key::RightBracket),
']' => Some(Key::RightBracket),
_ => None
}
}
fn glutin_key_to_script_key(key: glutin::VirtualKeyCode) -> Result<constellation_msg::Key, ()> {
// TODO(negge): add more key mappings
match key {
VirtualKeyCode::A => Ok(Key::A),
VirtualKeyCode::B => Ok(Key::B),
VirtualKeyCode::C => Ok(Key::C),
VirtualKeyCode::D => Ok(Key::D),
VirtualKeyCode::E => Ok(Key::E),
VirtualKeyCode::F => Ok(Key::F),
VirtualKeyCode::G => Ok(Key::G),
VirtualKeyCode::H => Ok(Key::H),
VirtualKeyCode::I => Ok(Key::I),
VirtualKeyCode::J => Ok(Key::J),
VirtualKeyCode::K => Ok(Key::K),
VirtualKeyCode::L => Ok(Key::L),
VirtualKeyCode::M => Ok(Key::M),
VirtualKeyCode::N => Ok(Key::N),
VirtualKeyCode::O => Ok(Key::O),
VirtualKeyCode::P => Ok(Key::P),
VirtualKeyCode::Q => Ok(Key::Q),
VirtualKeyCode::R => Ok(Key::R),
VirtualKeyCode::S => Ok(Key::S),
VirtualKeyCode::T => Ok(Key::T),
VirtualKeyCode::U => Ok(Key::U),
VirtualKeyCode::V => Ok(Key::V),
VirtualKeyCode::W => Ok(Key::W),
VirtualKeyCode::X => Ok(Key::X),
VirtualKeyCode::Y => Ok(Key::Y),
VirtualKeyCode::Z => Ok(Key::Z),
VirtualKeyCode::Numpad0 => Ok(Key::Kp0),
VirtualKeyCode::Numpad1 => Ok(Key::Kp1),
VirtualKeyCode::Numpad2 => Ok(Key::Kp2),
VirtualKeyCode::Numpad3 => Ok(Key::Kp3),
VirtualKeyCode::Numpad4 => Ok(Key::Kp4),
VirtualKeyCode::Numpad5 => Ok(Key::Kp5),
VirtualKeyCode::Numpad6 => Ok(Key::Kp6),
VirtualKeyCode::Numpad7 => Ok(Key::Kp7),
VirtualKeyCode::Numpad8 => Ok(Key::Kp8),
VirtualKeyCode::Numpad9 => Ok(Key::Kp9),
VirtualKeyCode::Key0 => Ok(Key::Num0),
VirtualKeyCode::Key1 => Ok(Key::Num1),
VirtualKeyCode::Key2 => Ok(Key::Num2),
VirtualKeyCode::Key3 => Ok(Key::Num3),
VirtualKeyCode::Key4 => Ok(Key::Num4),
VirtualKeyCode::Key5 => Ok(Key::Num5),
VirtualKeyCode::Key6 => Ok(Key::Num6),
VirtualKeyCode::Key7 => Ok(Key::Num7),
VirtualKeyCode::Key8 => Ok(Key::Num8),
VirtualKeyCode::Key9 => Ok(Key::Num9),
VirtualKeyCode::Return => Ok(Key::Enter),
VirtualKeyCode::Space => Ok(Key::Space),
VirtualKeyCode::Escape => Ok(Key::Escape),
VirtualKeyCode::Equals => Ok(Key::Equal),
VirtualKeyCode::Minus => Ok(Key::Minus),
VirtualKeyCode::Back => Ok(Key::Backspace),
VirtualKeyCode::PageDown => Ok(Key::PageDown),
VirtualKeyCode::PageUp => Ok(Key::PageUp),
VirtualKeyCode::Insert => Ok(Key::Insert),
VirtualKeyCode::Home => Ok(Key::Home),
VirtualKeyCode::Delete => Ok(Key::Delete),
VirtualKeyCode::End => Ok(Key::End),
VirtualKeyCode::Left => Ok(Key::Left),
VirtualKeyCode::Up => Ok(Key::Up),
VirtualKeyCode::Right => Ok(Key::Right),
VirtualKeyCode::Down => Ok(Key::Down),
VirtualKeyCode::LShift => Ok(Key::LeftShift),
VirtualKeyCode::LControl => Ok(Key::LeftControl),
VirtualKeyCode::LAlt => Ok(Key::LeftAlt),
VirtualKeyCode::LWin => Ok(Key::LeftSuper),
VirtualKeyCode::RShift => Ok(Key::RightShift),
VirtualKeyCode::RControl => Ok(Key::RightControl),
VirtualKeyCode::RAlt => Ok(Key::RightAlt),
VirtualKeyCode::RWin => Ok(Key::RightSuper),
VirtualKeyCode::Apostrophe => Ok(Key::Apostrophe),
VirtualKeyCode::Backslash => Ok(Key::Backslash),
VirtualKeyCode::Comma => Ok(Key::Comma),
VirtualKeyCode::Grave => Ok(Key::GraveAccent),
VirtualKeyCode::LBracket => Ok(Key::LeftBracket),
VirtualKeyCode::Period => Ok(Key::Period),
VirtualKeyCode::RBracket => Ok(Key::RightBracket),
VirtualKeyCode::Semicolon => Ok(Key::Semicolon),
VirtualKeyCode::Slash => Ok(Key::Slash),
VirtualKeyCode::Tab => Ok(Key::Tab),
VirtualKeyCode::Subtract => Ok(Key::Minus),
VirtualKeyCode::F1 => Ok(Key::F1),
VirtualKeyCode::F2 => Ok(Key::F2),
VirtualKeyCode::F3 => Ok(Key::F3),
VirtualKeyCode::F4 => Ok(Key::F4),
VirtualKeyCode::F5 => Ok(Key::F5),
VirtualKeyCode::F6 => Ok(Key::F6),
VirtualKeyCode::F7 => Ok(Key::F7),
VirtualKeyCode::F8 => Ok(Key::F8),
VirtualKeyCode::F9 => Ok(Key::F9),
VirtualKeyCode::F10 => Ok(Key::F10),
VirtualKeyCode::F11 => Ok(Key::F11),
VirtualKeyCode::F12 => Ok(Key::F12),
VirtualKeyCode::NavigateBackward => Ok(Key::NavigateBackward),
VirtualKeyCode::NavigateForward => Ok(Key::NavigateForward),
_ => Err(()),
}
}
fn glutin_mods_to_script_mods(modifiers: KeyModifiers) -> constellation_msg::KeyModifiers {
let mut result = constellation_msg::KeyModifiers::empty();
if modifiers.intersects(LEFT_SHIFT | RIGHT_SHIFT) {
result.insert(SHIFT);
}
if modifiers.intersects(LEFT_CONTROL | RIGHT_CONTROL) {
result.insert(CONTROL);
}
if modifiers.intersects(LEFT_ALT | RIGHT_ALT) {
result.insert(ALT);
}
if modifiers.intersects(LEFT_SUPER | RIGHT_SUPER) {
result.insert(SUPER);
}
result
}
#[cfg(not(target_os = "win"))]
fn platform_handle_key(&self, key: Key, mods: constellation_msg::KeyModifiers) {
match (mods, key) {
(CMD_OR_CONTROL, Key::LeftBracket) => {
self.event_queue.borrow_mut().push(WindowEvent::Navigation(TraversalDirection::Back(1)));
}
(CMD_OR_CONTROL, Key::RightBracket) => {
self.event_queue.borrow_mut().push(WindowEvent::Navigation(TraversalDirection::Forward(1)));
}
_ => {}
}
}
#[cfg(target_os = "win")]
fn platform_handle_key(&self, key: Key, mods: constellation_msg::KeyModifiers) {
}
}
fn create_window_proxy(window: &Window) -> Option<glutin::WindowProxy> {
match window.kind {
WindowKind::Window(ref window) => {
Some(window.create_window_proxy())
}
WindowKind::Headless(..) => {
None
}
}
}
impl WindowMethods for Window {
fn gl(&self) -> Rc<gl::Gl> {
self.gl.clone()
}
fn framebuffer_size(&self) -> DeviceUintSize {
match self.kind {
WindowKind::Window(ref window) => {
let scale_factor = window.hidpi_factor() as u32;
// TODO(ajeffrey): can this fail?
let (width, height) = window.get_inner_size().expect("Failed to get window inner size.");
DeviceUintSize::new(width, height) * scale_factor
}
WindowKind::Headless(ref context) => {
DeviceUintSize::new(context.width, context.height)
}
}
}
fn window_rect(&self) -> DeviceUintRect {
let size = self.framebuffer_size();
let origin = TypedPoint2D::zero();
DeviceUintRect::new(origin, size)
}
fn size(&self) -> TypedSize2D<f32, DeviceIndependentPixel> {
match self.kind {
WindowKind::Window(ref window) => {
// TODO(ajeffrey): can this fail?
let (width, height) = window.get_inner_size().expect("Failed to get window inner size.");
TypedSize2D::new(width as f32, height as f32)
}
WindowKind::Headless(ref context) => {
TypedSize2D::new(context.width as f32, context.height as f32)
}
}
}
fn client_window(&self) -> (Size2D<u32>, Point2D<i32>) {
match self.kind {
WindowKind::Window(ref window) => {
// TODO(ajeffrey): can this fail?
let (width, height) = window.get_outer_size().expect("Failed to get window outer size.");
let size = Size2D::new(width, height);
// TODO(ajeffrey): can this fail?
let (x, y) = window.get_position().expect("Failed to get window position.");
let origin = Point2D::new(x as i32, y as i32);
(size, origin)
}
WindowKind::Headless(ref context) => {
let size = TypedSize2D::new(context.width, context.height);
(size, Point2D::zero())
}
}
}
fn set_animation_state(&self, state: AnimationState) {
self.animation_state.set(state);
}
fn set_inner_size(&self, size: Size2D<u32>) {
match self.kind {
WindowKind::Window(ref window) => {
window.set_inner_size(size.width as u32, size.height as u32)
}
WindowKind::Headless(..) => {}
}
}
fn set_position(&self, point: Point2D<i32>) {
match self.kind {
WindowKind::Window(ref window) => {
window.set_position(point.x, point.y)
}
WindowKind::Headless(..) => {}
}<|fim▁hole|> fn set_fullscreen_state(&self, _state: bool) {
match self.kind {
WindowKind::Window(..) => {
warn!("Fullscreen is not implemented!")
},
WindowKind::Headless(..) => {}
}
}
fn present(&self) {
match self.kind {
WindowKind::Window(ref window) => {
if let Err(err) = window.swap_buffers() {
warn!("Failed to swap window buffers ({}).", err);
}
}
WindowKind::Headless(..) => {}
}
}
fn create_event_loop_waker(&self) -> Box<EventLoopWaker> {
struct GlutinEventLoopWaker {
window_proxy: Option<glutin::WindowProxy>,
}
impl EventLoopWaker for GlutinEventLoopWaker {
fn wake(&self) {
// kick the OS event loop awake.
if let Some(ref window_proxy) = self.window_proxy {
window_proxy.wakeup_event_loop()
}
}
fn clone(&self) -> Box<EventLoopWaker + Send> {
box GlutinEventLoopWaker {
window_proxy: self.window_proxy.clone(),
}
}
}
let window_proxy = create_window_proxy(self);
box GlutinEventLoopWaker {
window_proxy: window_proxy,
}
}
#[cfg(not(target_os = "windows"))]
fn hidpi_factor(&self) -> ScaleFactor<f32, DeviceIndependentPixel, DevicePixel> {
match self.kind {
WindowKind::Window(ref window) => {
ScaleFactor::new(window.hidpi_factor())
}
WindowKind::Headless(..) => {
ScaleFactor::new(1.0)
}
}
}
#[cfg(target_os = "windows")]
fn hidpi_factor(&self) -> ScaleFactor<f32, DeviceIndependentPixel, DevicePixel> {
let hdc = unsafe { user32::GetDC(::std::ptr::null_mut()) };
let ppi = unsafe { gdi32::GetDeviceCaps(hdc, winapi::wingdi::LOGPIXELSY) };
ScaleFactor::new(ppi as f32 / 96.0)
}
fn set_page_title(&self, title: Option<String>) {
match self.kind {
WindowKind::Window(ref window) => {
let fallback_title: String = if let Some(ref current_url) = *self.current_url.borrow() {
current_url.to_string()
} else {
String::from("Untitled")
};
let title = match title {
Some(ref title) if title.len() > 0 => &**title,
_ => &fallback_title,
};
let title = format!("{} - Servo", title);
window.set_title(&title);
}
WindowKind::Headless(..) => {}
}
}
fn status(&self, _: Option<String>) {
}
fn load_start(&self) {
}
fn load_end(&self) {
if opts::get().no_native_titlebar {
match self.kind {
WindowKind::Window(ref window) => {
window.show();
}
WindowKind::Headless(..) => {}
}
}
}
fn history_changed(&self, history: Vec<LoadData>, current: usize) {
*self.current_url.borrow_mut() = Some(history[current].url.clone());
}
fn load_error(&self, _: NetError, _: String) {
}
fn head_parsed(&self) {
}
/// Has no effect on Android.
fn set_cursor(&self, c: Cursor) {
match self.kind {
WindowKind::Window(ref window) => {
use glutin::MouseCursor;
let glutin_cursor = match c {
Cursor::None => MouseCursor::NoneCursor,
Cursor::Default => MouseCursor::Default,
Cursor::Pointer => MouseCursor::Hand,
Cursor::ContextMenu => MouseCursor::ContextMenu,
Cursor::Help => MouseCursor::Help,
Cursor::Progress => MouseCursor::Progress,
Cursor::Wait => MouseCursor::Wait,
Cursor::Cell => MouseCursor::Cell,
Cursor::Crosshair => MouseCursor::Crosshair,
Cursor::Text => MouseCursor::Text,
Cursor::VerticalText => MouseCursor::VerticalText,
Cursor::Alias => MouseCursor::Alias,
Cursor::Copy => MouseCursor::Copy,
Cursor::Move => MouseCursor::Move,
Cursor::NoDrop => MouseCursor::NoDrop,
Cursor::NotAllowed => MouseCursor::NotAllowed,
Cursor::Grab => MouseCursor::Grab,
Cursor::Grabbing => MouseCursor::Grabbing,
Cursor::EResize => MouseCursor::EResize,
Cursor::NResize => MouseCursor::NResize,
Cursor::NeResize => MouseCursor::NeResize,
Cursor::NwResize => MouseCursor::NwResize,
Cursor::SResize => MouseCursor::SResize,
Cursor::SeResize => MouseCursor::SeResize,
Cursor::SwResize => MouseCursor::SwResize,
Cursor::WResize => MouseCursor::WResize,
Cursor::EwResize => MouseCursor::EwResize,
Cursor::NsResize => MouseCursor::NsResize,
Cursor::NeswResize => MouseCursor::NeswResize,
Cursor::NwseResize => MouseCursor::NwseResize,
Cursor::ColResize => MouseCursor::ColResize,
Cursor::RowResize => MouseCursor::RowResize,
Cursor::AllScroll => MouseCursor::AllScroll,
Cursor::ZoomIn => MouseCursor::ZoomIn,
Cursor::ZoomOut => MouseCursor::ZoomOut,
};
window.set_cursor(glutin_cursor);
}
WindowKind::Headless(..) => {}
}
}
fn set_favicon(&self, _: ServoUrl) {
}
fn prepare_for_composite(&self, _width: usize, _height: usize) -> bool {
true
}
/// Helper function to handle keyboard events.
fn handle_key(&self, ch: Option<char>, key: Key, mods: constellation_msg::KeyModifiers) {
match (mods, ch, key) {
(_, Some('+'), _) => {
if mods & !SHIFT == CMD_OR_CONTROL {
self.event_queue.borrow_mut().push(WindowEvent::Zoom(1.1));
} else if mods & !SHIFT == CMD_OR_CONTROL | ALT {
self.event_queue.borrow_mut().push(WindowEvent::PinchZoom(1.1));
}
}
(CMD_OR_CONTROL, Some('-'), _) => {
self.event_queue.borrow_mut().push(WindowEvent::Zoom(1.0 / 1.1));
}
(_, Some('-'), _) if mods == CMD_OR_CONTROL | ALT => {
self.event_queue.borrow_mut().push(WindowEvent::PinchZoom(1.0 / 1.1));
}
(CMD_OR_CONTROL, Some('0'), _) => {
self.event_queue.borrow_mut().push(WindowEvent::ResetZoom);
}
(NONE, None, Key::NavigateForward) => {
self.event_queue.borrow_mut().push(WindowEvent::Navigation(TraversalDirection::Forward(1)));
}
(NONE, None, Key::NavigateBackward) => {
self.event_queue.borrow_mut().push(WindowEvent::Navigation(TraversalDirection::Back(1)));
}
(NONE, None, Key::Escape) => {
if let Some(true) = PREFS.get("shell.builtin-key-shortcuts.enabled").as_boolean() {
self.event_queue.borrow_mut().push(WindowEvent::Quit);
}
}
(CMD_OR_ALT, None, Key::Right) => {
self.event_queue.borrow_mut().push(WindowEvent::Navigation(TraversalDirection::Forward(1)));
}
(CMD_OR_ALT, None, Key::Left) => {
self.event_queue.borrow_mut().push(WindowEvent::Navigation(TraversalDirection::Back(1)));
}
(NONE, None, Key::PageDown) => {
let scroll_location = ScrollLocation::Delta(TypedVector2D::new(0.0,
-self.framebuffer_size()
.to_f32()
.to_untyped()
.height + 2.0 * LINE_HEIGHT));
self.scroll_window(scroll_location,
TouchEventType::Move);
}
(NONE, None, Key::PageUp) => {
let scroll_location = ScrollLocation::Delta(TypedVector2D::new(0.0,
self.framebuffer_size()
.to_f32()
.to_untyped()
.height - 2.0 * LINE_HEIGHT));
self.scroll_window(scroll_location,
TouchEventType::Move);
}
(NONE, None, Key::Home) => {
self.scroll_window(ScrollLocation::Start, TouchEventType::Move);
}
(NONE, None, Key::End) => {
self.scroll_window(ScrollLocation::End, TouchEventType::Move);
}
(NONE, None, Key::Up) => {
self.scroll_window(ScrollLocation::Delta(TypedVector2D::new(0.0, 3.0 * LINE_HEIGHT)),
TouchEventType::Move);
}
(NONE, None, Key::Down) => {
self.scroll_window(ScrollLocation::Delta(TypedVector2D::new(0.0, -3.0 * LINE_HEIGHT)),
TouchEventType::Move);
}
(NONE, None, Key::Left) => {
self.scroll_window(ScrollLocation::Delta(TypedVector2D::new(LINE_HEIGHT, 0.0)), TouchEventType::Move);
}
(NONE, None, Key::Right) => {
self.scroll_window(ScrollLocation::Delta(TypedVector2D::new(-LINE_HEIGHT, 0.0)), TouchEventType::Move);
}
(CMD_OR_CONTROL, Some('r'), _) => {
if let Some(true) = PREFS.get("shell.builtin-key-shortcuts.enabled").as_boolean() {
self.event_queue.borrow_mut().push(WindowEvent::Reload);
}
}
(CMD_OR_CONTROL, Some('q'), _) => {
if let Some(true) = PREFS.get("shell.builtin-key-shortcuts.enabled").as_boolean() {
self.event_queue.borrow_mut().push(WindowEvent::Quit);
}
}
(CONTROL, None, Key::F12) => {
self.event_queue.borrow_mut().push(WindowEvent::ToggleWebRenderProfiler);
}
_ => {
self.platform_handle_key(key, mods);
}
}
}
fn allow_navigation(&self, _: ServoUrl, response_chan: IpcSender<bool>) {
if let Err(e) = response_chan.send(true) {
warn!("Failed to send allow_navigation() response: {}", e);
};
}
fn supports_clipboard(&self) -> bool {
true
}
}
fn glutin_phase_to_touch_event_type(phase: TouchPhase) -> TouchEventType {
match phase {
TouchPhase::Started => TouchEventType::Down,
TouchPhase::Moved => TouchEventType::Move,
TouchPhase::Ended => TouchEventType::Up,
TouchPhase::Cancelled => TouchEventType::Cancel,
}
}
fn glutin_pressure_stage_to_touchpad_pressure_phase(stage: i64) -> TouchpadPressurePhase {
if stage < 1 {
TouchpadPressurePhase::BeforeClick
} else if stage < 2 {
TouchpadPressurePhase::AfterFirstClick
} else {
TouchpadPressurePhase::AfterSecondClick
}
}
fn is_printable(key_code: VirtualKeyCode) -> bool {
use glutin::VirtualKeyCode::*;
match key_code {
Escape |
F1 |
F2 |
F3 |
F4 |
F5 |
F6 |
F7 |
F8 |
F9 |
F10 |
F11 |
F12 |
F13 |
F14 |
F15 |
Snapshot |
Scroll |
Pause |
Insert |
Home |
Delete |
End |
PageDown |
PageUp |
Left |
Up |
Right |
Down |
Back |
LAlt |
LControl |
LMenu |
LShift |
LWin |
Mail |
MediaSelect |
MediaStop |
Mute |
MyComputer |
NavigateForward |
NavigateBackward |
NextTrack |
NoConvert |
PlayPause |
Power |
PrevTrack |
RAlt |
RControl |
RMenu |
RShift |
RWin |
Sleep |
Stop |
VolumeDown |
VolumeUp |
Wake |
WebBack |
WebFavorites |
WebForward |
WebHome |
WebRefresh |
WebSearch |
WebStop => false,
_ => true,
}
}
#[cfg(not(target_os = "windows"))]
fn filter_nonprintable(ch: char, key_code: VirtualKeyCode) -> Option<char> {
if is_printable(key_code) {
Some(ch)
} else {
None
}
}
// These functions aren't actually called. They are here as a link
// hack because Skia references them.
#[allow(non_snake_case)]
#[no_mangle]
pub extern "C" fn glBindVertexArrayOES(_array: usize)
{
unimplemented!()
}
#[allow(non_snake_case)]
#[no_mangle]
pub extern "C" fn glDeleteVertexArraysOES(_n: isize, _arrays: *const ())
{
unimplemented!()
}
#[allow(non_snake_case)]
#[no_mangle]
pub extern "C" fn glGenVertexArraysOES(_n: isize, _arrays: *const ())
{
unimplemented!()
}
#[allow(non_snake_case)]
#[no_mangle]
pub extern "C" fn glRenderbufferStorageMultisampleIMG(_: isize, _: isize, _: isize, _: isize, _: isize)
{
unimplemented!()
}
#[allow(non_snake_case)]
#[no_mangle]
pub extern "C" fn glFramebufferTexture2DMultisampleIMG(_: isize, _: isize, _: isize, _: isize, _: isize, _: isize)
{
unimplemented!()
}
#[allow(non_snake_case)]
#[no_mangle]
pub extern "C" fn glDiscardFramebufferEXT(_: isize, _: isize, _: *const ())
{
unimplemented!()
}<|fim▁end|>
|
}
|
<|file_name|>easy-249.py<|end_file_name|><|fim▁begin|>#! /usr/bin/env python3
'''
given a list of stock price ticks for the day, can you tell me what
trades I should make to maximize my gain within the constraints of the
market? Remember - buy low, sell high, and you can't sell before you
buy.
Sample Input
19.35 19.30 18.88 18.93 18.95 19.03 19.00 18.97 18.97 18.98
'''
import argparse
def parse_args():
parser = argparse.ArgumentParser(description='easy 249')
parser.add_argument('stock_prices', action='store', nargs='+',<|fim▁hole|>def stock(stock_prices):
buy_day = 0
max_profit = 0
max_buy = 0
max_sell = 0
for buy_day in range(len(stock_prices) - 2):
# maybe do a max(here)
for sell_day in range(buy_day + 2, len(stock_prices)):
profit = stock_prices[sell_day] - stock_prices[buy_day]
if profit > max_profit:
max_profit = profit
max_buy = buy_day
max_sell = sell_day
print("max profit: %.2f from buy on day %d at %.2f sell on day %d at %.2f" %
(max_profit, max_buy, stock_prices[max_buy], max_sell, stock_prices[max_sell]))
if __name__ == '__main__':
args = parse_args()
stock([float(price) for price in args.stock_prices])<|fim▁end|>
|
help='prices of a given stock')
return parser.parse_args()
|
<|file_name|>Level.js<|end_file_name|><|fim▁begin|>/*
* Copyright © 2012 Pedro Agullo Soliveres.
*
* This file is part of Log4js-ext.
*
* Log4js-ext is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License.
*
* Commercial use is permitted to the extent that the code/component(s)
* do NOT become part of another Open Source or Commercially developed
* licensed development library or toolkit without explicit permission.
*
* Log4js-ext is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Log4js-ext. If not, see <http://www.gnu.org/licenses/>.
*
* This software uses the ExtJs library (http://extjs.com), which is
* distributed under the GPL v3 license (see http://extjs.com/license).
*/
(function() {
"use strict"; //$NON-NLS-1$
/**
* The level is the importance or priority of a certain logging operation.
*
* Predefined levels are FATAL, ERROR, WARN, INFO, DEBUG, and TRACE.
*
* NONE and ALL are useful to control logging, but they can't be used
* in a logging operation, as they make no sense in that context.
*/
Ext.define('Sm.log.Level', { //$NON-NLS-1$
uses: ['Sm.log.util.Debug',
'Sm.log.util.Assert'],
statics : {
/**
* Initializes logging levels.
*
* @private
*
* @returns {void}
*
*/
initStatics : function() {
this.NONE = Ext.create( 'Sm.log.Level',
{name:'NONE', level: Math.pow(2,31)-1} );
this.FATAL = Ext.create( 'Sm.log.Level',
{name:'FATAL', level:600});
this.ERROR = Ext.create( 'Sm.log.Level',
{name:'ERROR', level:500});
this.WARN = Ext.create( 'Sm.log.Level',
{name:'WARN', level:400});
this.INFO = Ext.create( 'Sm.log.Level',
{name:'INFO', level:300});
this.DEBUG = Ext.create( 'Sm.log.Level',
{name:'DEBUG', level:200});
this.TRACE = Ext.create( 'Sm.log.Level',
{name:'TRACE', level:100});
this.ALL = Ext.create( 'Sm.log.Level',
{name:'ALL', level:0});
},
/**
* Returns a level, given its name.
*
* This can be very useful to get a level given a user-specified
* text via a combo, etc.
*
* @param {String} levelName The level name.
*
* @returns {Sm.log.Level} The level with the specified name.
*/
getLevel : function( levelName ) {
switch(levelName.toUpperCase()) {
case this.ALL.getName() : return this.ALL;
case this.NONE.getName() : return this.NONE;
case this.FATAL.getName() : return this.FATAL;
case this.ERROR.getName() : return this.ERROR;
case this.WARN.getName() : return this.WARN;
case this.INFO.getName() : return this.INFO;
case this.DEBUG.getName() : return this.DEBUG;
case this.TRACE.getName() : return this.TRACE;
default:
return null;
}
},
/**
* Returns a level's level, given the level name.
*
* @param {String} levelName The level name.
*
* @returns {Number}
*/
getLevelLevel :function( levelName ) {
switch(levelName.toUpperCase()) {
case this.ALL.getName() : return this.ALL.getLevel();
case this.NONE.getName() : return this.NONE.getLevel();
case this.FATAL.getName() : return this.FATAL.getLevel();
case this.ERROR.getName() : return this.ERROR.getLevel();
case this.WARN.getName() : return this.WARN.getLevel();
case this.INFO.getName() : return this.INFO.getLevel();
case this.DEBUG.getName() : return this.DEBUG.getLevel();
case this.TRACE.getName() : return this.TRACE.getLevel();
default:
Sm.log.util.Debug.abort( "This code should never execute");
return;
}
},
/**
* Represents 'no level', useful in some contexts to
* specify that no level should be logged.
*
* Do not use as a log operation level.
*
* @property {Sm.log.Level}
* @readonly
*/
NONE : undefined,
/**
* Represents a fatal error.
*
* The diference between error and fatal error depends on the
* context, and might or might not exist in some contexts. How to
* interpret that depends on the context, and has to be defined
* by the application
*
* @property {Sm.log.Level}
* @readonly
*/
FATAL : undefined,
/**
* Represents an error.
*
* The diference between error and fatal error depends on the
* context, and might or might not exist in some contexts. How to
* interpret that depends on the context, and has to be defined
* by the application
*
* @property {Sm.log.Level}
* @readonly
*/
ERROR : undefined,
/**
* Represents a warning.
*
* @property {Sm.log.Level}
* @readonly
*/
WARN : undefined,
/**
* Represents an informative log.
*
* @property {Sm.log.Level}
* @readonly
*/
INFO : undefined,
/**
* Represents a debug log.
*
* We will probably be interested in debug logs only while debugging.
*
* @property {Sm.log.Level}
* @readonly
*/
DEBUG : undefined,
/**
* Represents a low level debug log.
*
* We will probably be interested in trace logs only while heavily
* debugging.
*
* @property {Sm.log.Level}
* @readonly
*/
TRACE : undefined,
/**
* Represents 'all level', useful in some contexts to
* specify that alls levels should be logged.
*
* Do not use as a log operation level.
*
* @property {Sm.log.Level}
* @readonly
*/
ALL : undefined
},
config : {
/**
* @accessor
* @cfg [=value provided in constructor] (required)
*
* The level name.
*
* @readonly
*/
name : '',
/**
* @accessor
* @cfg [=value provided in constructor] (required)
*
* The level value.
*
* @readonly
*/
level : 0
},
/**
* Creates a new level.
*
* You should not create your own levels. The library has not been created
* with the idea of allowing user defined levels. Therefore, it might or
* might not work if you do so.
*
* @private
*
* @param cfg
*/
constructor : function (cfg) {
// Avoid this check because Assert might not be loaded, as
// this is called indirectly by initStatics, which is called
// at the end of this file
/*
Sm.log.util.Assert.assert(cfg.name);
Sm.log.util.Assert.assert(cfg.level);
*/
<|fim▁hole|> /**
* Compares two levels, return true if this ones is lesser or equal
* than the one received by the function.
*
* @param {Sm.log.Level} level The level to compare with this level.
* @returns {Boolean}
*/
le : function( level ) {
return this.getLevel() <= level.getLevel();
}
},
// Initialize statics: this function receives the class as 'this'
function () { this.initStatics(); } );
}());<|fim▁end|>
|
this.initConfig(cfg);
},
|
<|file_name|>blob.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::cell::DOMRefCell;
use dom::bindings::codegen::Bindings::BlobBinding;
use dom::bindings::codegen::Bindings::BlobBinding::BlobMethods;
use dom::bindings::codegen::UnionTypes::BlobOrString;
use dom::bindings::error::{Error, Fallible};
use dom::bindings::global::GlobalRef;
use dom::bindings::js::{JS, Root};
use dom::bindings::reflector::{Reflectable, Reflector, reflect_dom_object};
use dom::bindings::str::DOMString;
use encoding::all::UTF_8;
use encoding::types::{EncoderTrap, Encoding};
use ipc_channel::ipc;
use net_traits::IpcSend;
use net_traits::blob_url_store::{BlobBuf, get_blob_origin};
use net_traits::filemanager_thread::{FileManagerThreadMsg, SelectedFileId, RelativePos};
use std::cell::Cell;
use std::ops::Index;
use std::path::PathBuf;
use uuid::Uuid;
/// File-based blob
#[derive(JSTraceable)]
pub struct FileBlob {
id: SelectedFileId,
name: Option<PathBuf>,
cache: DOMRefCell<Option<Vec<u8>>>,
size: u64,
}
/// Blob backend implementation
#[must_root]
#[derive(JSTraceable)]
pub enum BlobImpl {
/// File-based blob
File(FileBlob),
/// Memory-based blob
Memory(Vec<u8>),
/// Sliced blob, including parent blob and
/// relative positions representing current slicing range,
/// it is leaf of a two-layer fat tree
Sliced(JS<Blob>, RelativePos),
}
impl BlobImpl {
/// Construct memory-backed BlobImpl
#[allow(unrooted_must_root)]
pub fn new_from_bytes(bytes: Vec<u8>) -> BlobImpl {
BlobImpl::Memory(bytes)
}
/// Construct file-backed BlobImpl from File ID
pub fn new_from_file(file_id: SelectedFileId, name: PathBuf, size: u64) -> BlobImpl {
BlobImpl::File(FileBlob {
id: file_id,
name: Some(name),
cache: DOMRefCell::new(None),
size: size,
})
}
}
// https://w3c.github.io/FileAPI/#blob
#[dom_struct]
pub struct Blob {
reflector_: Reflector,
#[ignore_heap_size_of = "No clear owner"]
blob_impl: DOMRefCell<BlobImpl>,
typeString: String,
isClosed_: Cell<bool>,
}
impl Blob {
#[allow(unrooted_must_root)]
pub fn new(global: GlobalRef, blob_impl: BlobImpl, typeString: String) -> Root<Blob> {
let boxed_blob = box Blob::new_inherited(blob_impl, typeString);
reflect_dom_object(boxed_blob, global, BlobBinding::Wrap)
}
#[allow(unrooted_must_root)]
pub fn new_inherited(blob_impl: BlobImpl, typeString: String) -> Blob {
Blob {
reflector_: Reflector::new(),
blob_impl: DOMRefCell::new(blob_impl),
// NOTE: Guarding the format correctness here,
// https://w3c.github.io/FileAPI/#dfn-type
typeString: normalize_type_string(&typeString),
isClosed_: Cell::new(false),
}
}
#[allow(unrooted_must_root)]
fn new_sliced(parent: &Blob, rel_pos: RelativePos,
relativeContentType: DOMString) -> Root<Blob> {
let global = parent.global();
let blob_impl = match *parent.blob_impl.borrow() {
BlobImpl::File(_) => {
// Create new parent node
BlobImpl::Sliced(JS::from_ref(parent), rel_pos)
}
BlobImpl::Memory(_) => {
// Create new parent node
BlobImpl::Sliced(JS::from_ref(parent), rel_pos)
}
BlobImpl::Sliced(ref grandparent, ref old_rel_pos) => {
// Adjust the slicing position, using same parent
BlobImpl::Sliced(grandparent.clone(), old_rel_pos.slice_inner(&rel_pos))
}
};
Blob::new(global.r(), blob_impl, relativeContentType.into())
}
// https://w3c.github.io/FileAPI/#constructorBlob
pub fn Constructor(global: GlobalRef,
blobParts: Option<Vec<BlobOrString>>,
blobPropertyBag: &BlobBinding::BlobPropertyBag)
-> Fallible<Root<Blob>> {
// TODO: accept other blobParts types - ArrayBuffer or ArrayBufferView
let bytes: Vec<u8> = match blobParts {
None => Vec::new(),
Some(blobparts) => match blob_parts_to_bytes(blobparts) {
Ok(bytes) => bytes,
Err(_) => return Err(Error::InvalidCharacter),
}
};
Ok(Blob::new(global, BlobImpl::new_from_bytes(bytes), blobPropertyBag.type_.to_string()))
}
/// Get a slice to inner data, this might incur synchronous read and caching
pub fn get_bytes(&self) -> Result<Vec<u8>, ()> {
// Sanity check
if self.IsClosed() {
return Err(());
}
match *self.blob_impl.borrow() {
BlobImpl::File(ref f) => {
let (buffer, is_new_buffer) = match *f.cache.borrow() {
Some(ref bytes) => (bytes.clone(), false),
None => {
let global = self.global();
let bytes = read_file(global.r(), f.id.clone())?;
(bytes, true)
}
};
// Cache
if is_new_buffer {
*f.cache.borrow_mut() = Some(buffer.clone());
}
Ok(buffer)
}
BlobImpl::Memory(ref s) => Ok(s.clone()),
BlobImpl::Sliced(ref parent, ref rel_pos) => {
parent.get_bytes().map(|v| {
let range = rel_pos.to_abs_range(v.len());
v.index(range).to_vec()
})
}
}
}
/// Get a FileID representing the Blob content,
/// used by URL.createObjectURL
pub fn get_blob_url_id(&self) -> SelectedFileId {
match *self.blob_impl.borrow() {
BlobImpl::File(ref f) => {
let global = self.global();
let origin = get_blob_origin(&global.r().get_url());
let filemanager = global.r().resource_threads().sender();
let (tx, rx) = ipc::channel().unwrap();
let _ = filemanager.send(FileManagerThreadMsg::ActivateBlobURL(f.id.clone(), tx, origin.clone()));
match rx.recv().unwrap() {
Ok(_) => f.id.clone(),
Err(_) => SelectedFileId(Uuid::new_v4().simple().to_string()) // Return a dummy id on error
}
}
BlobImpl::Memory(ref slice) => {
self.promote(slice, /* set_valid is */ true)
}
BlobImpl::Sliced(ref parent, ref rel_pos) => {
match *parent.blob_impl.borrow() {
BlobImpl::Sliced(_, _) => {
debug!("Sliced can't have a sliced parent");
// Return dummy id
SelectedFileId(Uuid::new_v4().simple().to_string())
}
BlobImpl::File(ref f) =>
self.create_sliced_url_id(&f.id, rel_pos, f.size),
BlobImpl::Memory(ref bytes) => {
let parent_id = parent.promote(bytes, /* set_valid is */ false);
self.create_sliced_url_id(&parent_id, rel_pos, bytes.len() as u64)
}
}
}
}
}
/// Promite memory-based Blob to file-based,
/// The bytes in data slice will be transferred to file manager thread.
/// Depending on set_valid, the returned FileID can be part of
/// valid or invalid Blob URL.
fn promote(&self, bytes: &[u8], set_valid: bool) -> SelectedFileId {
let global = self.global();
let origin = get_blob_origin(&global.r().get_url());
let filemanager = global.r().resource_threads().sender();
let blob_buf = BlobBuf {
filename: None,
type_string: self.typeString.clone(),
size: bytes.len() as u64,
bytes: bytes.to_vec(),
};
let (tx, rx) = ipc::channel().unwrap();
let _ = filemanager.send(FileManagerThreadMsg::PromoteMemory(blob_buf, set_valid, tx, origin.clone()));
match rx.recv().unwrap() {
Ok(id) => {
let id = SelectedFileId(id.0);
*self.blob_impl.borrow_mut() = BlobImpl::File(FileBlob {
id: id.clone(),
name: None,
cache: DOMRefCell::new(Some(bytes.to_vec())),
size: bytes.len() as u64,
});
id
}
// Dummy id
Err(_) => SelectedFileId(Uuid::new_v4().simple().to_string()),
}
}
/// Get a FileID representing sliced parent-blob content
fn create_sliced_url_id(&self, parent_id: &SelectedFileId,
rel_pos: &RelativePos, parent_len: u64) -> SelectedFileId {
let global = self.global();
let origin = get_blob_origin(&global.r().get_url());
let filemanager = global.r().resource_threads().sender();
let (tx, rx) = ipc::channel().unwrap();
let msg = FileManagerThreadMsg::AddSlicedURLEntry(parent_id.clone(),
rel_pos.clone(),
tx, origin.clone());
let _ = filemanager.send(msg);
let new_id = rx.recv().unwrap().unwrap(); // XX: unwrap
let new_id = SelectedFileId(new_id.0);
*self.blob_impl.borrow_mut() = BlobImpl::File(FileBlob {
id: new_id.clone(),
name: None,
cache: DOMRefCell::new(None),
size: rel_pos.to_abs_range(parent_len as usize).len() as u64,
});
// Return the indirect id reference
new_id
}
/// Cleanups at the time of destruction/closing
fn clean_up_file_resource(&self) {
if let BlobImpl::File(ref f) = *self.blob_impl.borrow() {
let global = self.global();
let origin = get_blob_origin(&global.r().get_url());
let filemanager = global.r().resource_threads().sender();
let (tx, rx) = ipc::channel().unwrap();
let msg = FileManagerThreadMsg::DecRef(f.id.clone(), origin, tx);
let _ = filemanager.send(msg);
let _ = rx.recv().unwrap();
}
}
}
impl Drop for Blob {
fn drop(&mut self) {
if !self.IsClosed() {
self.clean_up_file_resource();
}
}
}
fn read_file(global: GlobalRef, id: SelectedFileId) -> Result<Vec<u8>, ()> {
let file_manager = global.filemanager_thread();
let (chan, recv) = ipc::channel().map_err(|_|())?;
let origin = get_blob_origin(&global.get_url());
let check_url_validity = false;<|fim▁hole|>
match recv.recv().unwrap() {
Ok(blob_buf) => Ok(blob_buf.bytes),
Err(_) => Err(()),
}
}
/// Extract bytes from BlobParts, used by Blob and File constructor
/// https://w3c.github.io/FileAPI/#constructorBlob
pub fn blob_parts_to_bytes(blobparts: Vec<BlobOrString>) -> Result<Vec<u8>, ()> {
let mut ret = vec![];
for blobpart in &blobparts {
match blobpart {
&BlobOrString::String(ref s) => {
let mut bytes = UTF_8.encode(s, EncoderTrap::Replace).map_err(|_|())?;
ret.append(&mut bytes);
},
&BlobOrString::Blob(ref b) => {
let mut bytes = b.get_bytes().unwrap_or(vec![]);
ret.append(&mut bytes);
},
}
}
Ok(ret)
}
impl BlobMethods for Blob {
// https://w3c.github.io/FileAPI/#dfn-size
fn Size(&self) -> u64 {
// XXX: This will incur reading if file-based
match self.get_bytes() {
Ok(s) => s.len() as u64,
_ => 0,
}
}
// https://w3c.github.io/FileAPI/#dfn-type
fn Type(&self) -> DOMString {
DOMString::from(self.typeString.clone())
}
// https://w3c.github.io/FileAPI/#slice-method-algo
fn Slice(&self,
start: Option<i64>,
end: Option<i64>,
contentType: Option<DOMString>)
-> Root<Blob> {
let rel_pos = RelativePos::from_opts(start, end);
Blob::new_sliced(self, rel_pos, contentType.unwrap_or(DOMString::from("")))
}
// https://w3c.github.io/FileAPI/#dfn-isClosed
fn IsClosed(&self) -> bool {
self.isClosed_.get()
}
// https://w3c.github.io/FileAPI/#dfn-close
fn Close(&self) {
// Step 1
if self.isClosed_.get() {
return;
}
// Step 2
self.isClosed_.set(true);
// Step 3
self.clean_up_file_resource();
}
}
/// Get the normalized, MIME-parsable type string
/// https://w3c.github.io/FileAPI/#dfn-type
/// XXX: We will relax the restriction here,
/// since the spec has some problem over this part.
/// see https://github.com/w3c/FileAPI/issues/43
fn normalize_type_string(s: &str) -> String {
if is_ascii_printable(s) {
let s_lower = s.to_lowercase();
// match s_lower.parse() as Result<Mime, ()> {
// Ok(_) => s_lower,
// Err(_) => "".to_string()
s_lower
} else {
"".to_string()
}
}
fn is_ascii_printable(string: &str) -> bool {
// Step 5.1 in Sec 5.1 of File API spec
// https://w3c.github.io/FileAPI/#constructorBlob
string.chars().all(|c| c >= '\x20' && c <= '\x7E')
}<|fim▁end|>
|
let msg = FileManagerThreadMsg::ReadFile(chan, id, check_url_validity, origin);
let _ = file_manager.send(msg);
|
<|file_name|>string_type.cpp<|end_file_name|><|fim▁begin|>// -*- Mode: C++; c-basic-offset: 4; tab-width: 4; indent-tabs-mode: nil; -*-
#define YBUTIL_SOURCE
#include "util/string_type.h"
#if defined(YB_USE_WX)
#include <wx/strconv.h>
#elif defined(YB_USE_QT)
#include <QTextCodec>
#endif
#include <cstring>
#include <locale>
#include <stdexcept>
namespace Yb {
static size_t do_fast_narrow(const std::wstring &wide, std::string &narrow)
{
const wchar_t *src = wide.c_str();
const wchar_t *src0 = src;
char *dst = &narrow[0];
do {
wchar_t c = *src;
if (!c || (c & ~(wchar_t)0x7f))
break;
*dst = (char)c;
++dst;
++src;
} while (1);
size_t processed = src - src0;
if (processed == wide.size())
narrow.resize(dst - &narrow[0]);
return processed;
}
static size_t do_fast_widen(const std::string &narrow, std::wstring &wide)
{
const char *src = narrow.c_str();
const char *src0 = src;
wchar_t *dst = &wide[0];
do {
char c = *src;
if (!c || (c & ~(char)0x7f))
break;
*dst = (wchar_t)c;
++dst;
++src;
} while (1);
size_t processed = src - src0;
if (processed == narrow.size())
wide.resize(dst - &wide[0]);
return processed;
}
YBUTIL_DECL const std::string fast_narrow(const std::wstring &wide)
{
if (wide.empty())
return std::string();
std::string narrow(4 * wide.size(), '\0'); // max character length in UTF-8
size_t processed = do_fast_narrow(wide, narrow);
if (processed == wide.size())
return narrow;
throw std::runtime_error("non ascii detected, fast_narrow failed");
}
YBUTIL_DECL const std::wstring fast_widen(const std::string &narrow)
{
if (narrow.empty())
return std::wstring();
std::wstring wide(narrow.size(), L'\0');
size_t processed = do_fast_widen(narrow, wide);
if (processed == narrow.size())
return wide;
throw std::runtime_error("non ascii detected, fast_widen failed");
}
static const std::string do_narrow(
const std::wstring &wide, const std::locale &loc)
{
if (wide.empty())
return std::string();
std::string narrow(4*wide.size(), '\0'); // max character length in UTF-8
size_t processed = do_fast_narrow(wide, narrow);
if (processed == wide.size())
return narrow;
typedef std::wstring::traits_type::state_type state_type;
typedef std::codecvt<wchar_t, char, state_type> CVT;
const CVT& cvt = std::use_facet<CVT>(loc);
//std::string narrow(cvt.max_length()*wide.size(), '\0');
state_type state = state_type();
const wchar_t* from_beg = &wide[0];
const wchar_t* from_end = from_beg + wide.size();
const wchar_t* from_nxt;
char* to_beg = &narrow[0];
char* to_end = to_beg + narrow.size();
char* to_nxt;
std::string::size_type sz = 0;
std::codecvt_base::result r;
do {
r = cvt.out(state, from_beg, from_end, from_nxt,
to_beg, to_end, to_nxt);
switch (r)
{
case std::codecvt_base::error:
throw std::runtime_error("error converting wstring to string");
case std::codecvt_base::partial:
sz += to_nxt - to_beg;
narrow.resize(2*narrow.size());
to_beg = &narrow[sz];
to_end = &narrow[0] + narrow.size();
break;
case std::codecvt_base::noconv:
narrow.resize(sz + (from_end-from_beg)*sizeof(wchar_t));
std::memcpy(&narrow[sz], from_beg,(from_end-from_beg)*sizeof(wchar_t));
r = std::codecvt_base::ok;
break;
case std::codecvt_base::ok:
sz += to_nxt - to_beg;
narrow.resize(sz);
break;
}
} while (r != std::codecvt_base::ok);
return narrow;
}
static const std::wstring do_widen(
const std::string &narrow, const std::locale &loc)
{
if (narrow.empty())
return std::wstring();
std::wstring wide(narrow.size(), L'\0');
size_t processed = do_fast_widen(narrow, wide);
if (processed == narrow.size())
return wide;
typedef std::string::traits_type::state_type state_type;
typedef std::codecvt<wchar_t, char, state_type> CVT;
const CVT& cvt = std::use_facet<CVT>(loc);
state_type state = state_type();
const char* from_beg = &narrow[0];
const char* from_end = from_beg + narrow.size();<|fim▁hole|> const char* from_nxt;
wchar_t* to_beg = &wide[0];
wchar_t* to_end = to_beg + wide.size();
wchar_t* to_nxt;
std::wstring::size_type sz = 0;
std::codecvt_base::result r;
do {
r = cvt.in(state, from_beg, from_end, from_nxt,
to_beg, to_end, to_nxt);
switch (r)
{
case std::codecvt_base::error:
throw std::runtime_error("error converting string to wstring");
case std::codecvt_base::partial:
sz += to_nxt - to_beg;
wide.resize(2*wide.size());
to_beg = &wide[sz];
to_end = &wide[0] + wide.size();
break;
case std::codecvt_base::noconv:
wide.resize(sz + (from_end-from_beg));
std::memcpy(&wide[sz], from_beg, (std::size_t)(from_end-from_beg));
r = std::codecvt_base::ok;
break;
case std::codecvt_base::ok:
sz += to_nxt - to_beg;
wide.resize(sz);
break;
}
} while (r != std::codecvt_base::ok);
return wide;
}
YBUTIL_DECL const std::string get_locale(const std::string &enc_name = "")
{
if (enc_name.empty())
return
#ifdef YBUTIL_WINDOWS
"rus_rus.866"
#else
"ru_RU.UTF-8"
#endif
;
return enc_name;
}
YBUTIL_DECL const std::string str2std(const String &s, const std::string &enc_name)
{
#if defined(YB_USE_WX)
if (enc_name.empty())
return std::string(s.mb_str(wxConvUTF8));
wxCSConv conv(wxString(enc_name.c_str(), wxConvUTF8).GetData());
return std::string(s.mb_str(conv));
#elif defined(YB_USE_QT)
if (enc_name.empty())
return std::string(s.toLocal8Bit().constData());
QTextCodec *codec = QTextCodec::codecForName(enc_name.c_str());
return std::string(codec->fromUnicode(s).constData());
#elif defined(YB_USE_UNICODE)
std::locale loc(get_locale(enc_name).c_str());
return do_narrow(s, loc);
#else
return s;
#endif
}
YBUTIL_DECL const String std2str(const std::string &s, const std::string &enc_name)
{
#if defined(YB_USE_WX)
if (enc_name.empty())
return wxString(s.c_str(), wxConvUTF8);
wxCSConv conv(wxString(enc_name.c_str(), wxConvUTF8).GetData());
return wxString(s.c_str(), conv);
#elif defined(YB_USE_QT)
if (enc_name.empty())
return QString::fromLocal8Bit(s.c_str());
QTextCodec *codec = QTextCodec::codecForName(enc_name.c_str());
return codec->toUnicode(s.c_str());
#elif defined(YB_USE_UNICODE)
std::locale loc(get_locale(enc_name).c_str());
return do_widen(s, loc);
#else
return s;
#endif
}
YBUTIL_DECL const std::string str_narrow(
const std::wstring &wide, const std::string &enc_name)
{
std::locale loc(get_locale(enc_name).c_str());
return do_narrow(wide, loc);
}
YBUTIL_DECL const std::wstring str_widen(
const std::string &narrow, const std::string &enc_name)
{
std::locale loc(get_locale(enc_name).c_str());
return do_widen(narrow, loc);
}
YBUTIL_DECL const std::string get_locale_enc()
{
std::string loc_name = get_locale();
int pos = loc_name.find('.');
if (pos != std::string::npos)
return loc_name.substr(pos + 1);
return loc_name;
}
} // namespace Yb
// vim:ts=4:sts=4:sw=4:et:<|fim▁end|>
| |
<|file_name|>shell.py<|end_file_name|><|fim▁begin|>"""Contains utility functions for working with the shell"""
from contextlib import contextmanager
import datetime
from decimal import Decimal
import json
import pprint
import sys
import time
import traceback
SHELL_CONTROL_SEQUENCES = {
'BLUE': '\033[34m',
'LTBLUE': '\033[94m',
'GREEN': '\033[32m',
'LTGREEN': '\033[92m',
'YELLOW': '\033[33m',
'LTYELLOW': '\033[93m',
'RED': '\033[31m',
'LTRED': '\033[91m',
'CYAN': '\033[36m',
'LTCYAN': '\033[96m',
'MAGENTA': '\033[35m',
'LTMAGENTA': '\033[95m',
'ENDC': '\033[0m',
'BOLD': '\033[1m',
'UNDERLINE': '\033[4m',
}
BLUE = "{BLUE}"
LTBLUE = "{LTBLUE}"
GREEN = "{GREEN}"
LTGREEN = "{LTGREEN}"
YELLOW = "{YELLOW}"
LTYELLOW = "{LTYELLOW}"
RED = "{RED}"
LTRED = "{LTRED}"
CYAN = "{CYAN}"
LTCYAN = "{LTCYAN}"
MAGENTA = "{MAGENTA}"
LTMAGENTA = "{LTMAGENTA}"
ENDC = "{ENDC}"
BOLD = "{BOLD}"
UNDERLINE = "{UNDERLINE}"
class JSONEncoder(json.JSONEncoder):
def default(self, o):
if isinstance(o, Decimal):
return float(o)
elif isinstance(o, (datetime.datetime, datetime.date, datetime.time)):
return str(o)
return super(JSONEncoder, self).default(o)
def read_json(timeout=0):
"""Read json data from stdin"""
data = read()
if data:
return json.loads(data)
def write_output(writer, *output, **kwargs):
"""Write the output to the writer, used for printing to stdout/stderr"""
to_print = kwargs.get("sep", " ").join(output) + kwargs.get("end", "\n")
if isinstance(writer, list):
writer.append(to_print)
else:
writer.write(to_print)
if kwargs.get("flush"):
writer.flush()
def write_json(output, end='', raw=False, file=None, flush=False):
file = file or sys.stdout
if len(output) == 1:
output = output[0]
if raw:
json.dump(output, file, separators=(',', ':'), cls=JSONEncoder)
else:
json.dump(output, file, indent=4, sort_keys=True, cls=JSONEncoder)
if flush:
file.flush()
if end:
write_output(file, '', end=end, sep='', flush=flush)
def read():
"""Read from stdin"""
return sys.stdin.read()
def choice(choices, msg='Enter your choice: ', color=True, default=None, **kwargs):
if isinstance(choices, dict):
choices_dict = choices
choices = sorted(choices_dict.keys())
elif isinstance(choices, (tuple, list)):
choices_dict = None
choice_msg = ['']
validate = []
for idx, item in enumerate(choices):
if color:
choice_msg.append("\t{LTYELLOW}%d{LTMAGENTA}: %s" % (idx, str(item)))
else:
choice_msg.append("\t%d: %s" % (idx, str(item)))
validate.append(str(idx))
choice_msg.append("")
if color:
choice_msg.append("{LTMAGENTA}{BOLD}"+msg+"{ENDC}")
else:
choice_msg.append(msg)
output = ask("\n".join(choice_msg), validate=validate, default=default, color=None, **kwargs)
if choices_dict:
key = choices[int(output)]
return choices_dict[key]
else:
return choices[int(output)]
def ask(*args, **kwargs):
"""Ask for input"""
if not sys.stdin.isatty():
error("Cannot ask user for input, no tty exists")
sys.exit(1)
print_args = list(args)
print_args.append(kwargs.get("end", "\n"))
if kwargs["color"]:
print_args.insert(0, "{" + kwargs["color"] + "}")
print_args.append(ENDC)
while True:
stderr(*print_args, end='', **kwargs)
in_ = input()
if in_:
if not kwargs["validate"]:
return in_
if isinstance(kwargs["validate"], (tuple, list)) and in_ in kwargs["validate"]:
return in_
if callable(kwargs["validate"]) and kwargs["validate"](in_):
return in_
if kwargs["default"] is not None:
return kwargs["default"]
if kwargs["error_msg"] is not None:
error("\n" + kwargs["error_msg"] + "\n")
else:
error("\nYou didn't enter a valid choice!\n")
time.sleep(1)
def pretty(output):
"""Pretty format for shell output"""
return pprint.pformat(output, indent=2, width=100)
def _shell_format(output, **kwargs):
"""Formats the output for printing to a shell"""
kwargs.update(SHELL_CONTROL_SEQUENCES)
for idx, item in enumerate(output):
try:
output[idx] = item.format(**kwargs)
except KeyError:
pass # Can happen if some item is not in the kwargs dict
return output
def _convert_print(*args):
"""Convert the given arguments to a string for printing. Concantenate them together"""
output = []
for arg in args:
if not isinstance(arg, str):
arg = pretty(arg)
output.append(arg)
return output
def stdout_to_stderr():
"""Temporarily redirects stdout to stderr. Returns no-arg function to turn it back on."""
stdout = sys.stdout
sys.stdout = sys.stderr
def restore_stdout():
sys.stdout = stdout
return restore_stdout
def write_info_output(writer, *output, **kwargs):
if kwargs.get("json"):
return write_json(output, **kwargs)
if not kwargs.get("raw", False):
output = _convert_print(*output)
output = _shell_format(output, **kwargs)
write_output(writer, *output, **kwargs)
def stdout(*output, **kwargs):
"""Print to stdout. Supports colors"""
write_info_output(sys.stdout, *output, **kwargs)
def stderr(*output, **kwargs):
"""Print to stderr. Supports colors"""
write_info_output(sys.stderr, *output, **kwargs)
def print_color(color, *output, **kwargs):
"""Print message to stderr in the given color"""
print_args = list(output)
print_args.append(ENDC)
if "file" in kwargs:
write_output(kwargs["file"], *output, **kwargs)
else:
stderr(color, *print_args, **kwargs)
def debug(*output, **kwargs):
"""Print debug message to stderr"""
print_color(BLUE, *output, **kwargs)
def info(*output, **kwargs):
"""Print info message to stderr"""
print_color(GREEN, *output, **kwargs)
def warning(*output, **kwargs):
"""Print warning message to stderr"""
print_color(YELLOW, *output, **kwargs)
def error(*output, **kwargs):<|fim▁hole|> """Print error message to stderr"""
print_color(RED, *output, **kwargs)
def exception(*output, **kwargs):
"""Print error message to stderr with last exception info"""
exc = traceback.format_exc()
print_args = list(output)
print_args.append("\nAn exception occurred:\n{exc}".format(exc=exc))
print_color(RED, *print_args, **kwargs)
def timestamp():
return int(time.time())
@contextmanager
def elapsed(output, **kwargs):
"""Context Manager that prints to stderr how long a process took"""
start = timestamp()
info("Starting: ", output, **kwargs)
yield
info("Completed: " + output + " {MAGENTA}(Elapsed Time: {elapsed}s){ENDC}", elapsed=timestamp()-start, **kwargs)
def elapsed_decorator(output):
"""Decorator that prints to stderr how long a process took"""
def wrapper(fn):
def wrapped_fn(*args, **kwargs):
with elapsed(output, **kwargs):
fn(*args, **kwargs)
return wrapped_fn
return wrapper
def print_section(color, *output, **kwargs):
"""Prints a section title header"""
output = ["\n\n", 60 * "#", "\n", "# "] + list(output) + ["\n", 60 * "#", "\n"]
print_color(color, *output, end="\n", **kwargs)
def print_table(headers, *table_data, **kwargs):
if not table_data:
return
if isinstance(table_data[0], dict):
all_data = []
for d in table_data:
new_output = []
for header in headers:
new_output.append(d[header])
all_data.append(new_output)
else:
all_data = table_data
print(all_data)
all_data.insert(0, headers)
widths = [max(len(d[idx]) for d in all_data) for idx, _ in enumerate(headers)]
output = []
for row_idx, data in enumerate(all_data):
line = []
pad = "<" if row_idx == 0 else ">"
for idx, item in enumerate(data):
print(item)
print(idx)
formatter = "{item: " + pad + str(widths[idx]) + "}"
line.append(formatter.format(item=item))
output.append("| " + " | ".join(line) + " |")
write_output(kwargs.get("file", sys.stderr), *output, **kwargs)<|fim▁end|>
| |
<|file_name|>gitutil.py<|end_file_name|><|fim▁begin|># Copyright (c) 2011 The Chromium OS Authors.
#
# SPDX-License-Identifier: GPL-2.0+
#
import command
import re
import os
import series
import subprocess
import sys
import terminal
import checkpatch
import settings
def CountCommitsToBranch():
"""Returns number of commits between HEAD and the tracking branch.
This looks back to the tracking branch and works out the number of commits
since then.
Return:
Number of patches that exist on top of the branch
"""
pipe = [['git', 'log', '--no-color', '--oneline', '--no-decorate',
'@{upstream}..'],
['wc', '-l']]
stdout = command.RunPipe(pipe, capture=True, oneline=True).stdout
patch_count = int(stdout)
return patch_count
def GetUpstream(git_dir, branch):
"""Returns the name of the upstream for a branch
Args:
git_dir: Git directory containing repo
branch: Name of branch
Returns:
Name of upstream branch (e.g. 'upstream/master') or None if none
"""
try:
remote = command.OutputOneLine('git', '--git-dir', git_dir, 'config',
'branch.%s.remote' % branch)
merge = command.OutputOneLine('git', '--git-dir', git_dir, 'config',
'branch.%s.merge' % branch)
except:
return None
if remote == '.':
return merge
elif remote and merge:
leaf = merge.split('/')[-1]
return '%s/%s' % (remote, leaf)
else:
raise ValueError, ("Cannot determine upstream branch for branch "
"'%s' remote='%s', merge='%s'" % (branch, remote, merge))
def GetRangeInBranch(git_dir, branch, include_upstream=False):
"""Returns an expression for the commits in the given branch.
Args:
git_dir: Directory containing git repo
branch: Name of branch
Return:
Expression in the form 'upstream..branch' which can be used to
access the commits. If the branch does not exist, returns None.
"""
upstream = GetUpstream(git_dir, branch)
if not upstream:
return None
return '%s%s..%s' % (upstream, '~' if include_upstream else '', branch)
def CountCommitsInBranch(git_dir, branch, include_upstream=False):
"""Returns the number of commits in the given branch.
Args:
git_dir: Directory containing git repo
branch: Name of branch
Return:
Number of patches that exist on top of the branch, or None if the
branch does not exist.
"""
range_expr = GetRangeInBranch(git_dir, branch, include_upstream)
if not range_expr:
return None
pipe = [['git', '--git-dir', git_dir, 'log', '--oneline', '--no-decorate',
range_expr],
['wc', '-l']]
result = command.RunPipe(pipe, capture=True, oneline=True)
patch_count = int(result.stdout)
return patch_count
def CountCommits(commit_range):
"""Returns the number of commits in the given range.
Args:
commit_range: Range of commits to count (e.g. 'HEAD..base')
Return:
Number of patches that exist on top of the branch
"""
pipe = [['git', 'log', '--oneline', '--no-decorate', commit_range],
['wc', '-l']]
stdout = command.RunPipe(pipe, capture=True, oneline=True).stdout
patch_count = int(stdout)
return patch_count
def Checkout(commit_hash, git_dir=None, work_tree=None, force=False):
"""Checkout the selected commit for this build
Args:
commit_hash: Commit hash to check out
"""
pipe = ['git']
if git_dir:
pipe.extend(['--git-dir', git_dir])
if work_tree:
pipe.extend(['--work-tree', work_tree])
pipe.append('checkout')
if force:
pipe.append('-f')
pipe.append(commit_hash)
result = command.RunPipe([pipe], capture=True, raise_on_error=False)
if result.return_code != 0:
raise OSError, 'git checkout (%s): %s' % (pipe, result.stderr)
def Clone(git_dir, output_dir):
"""Checkout the selected commit for this build
Args:
commit_hash: Commit hash to check out
"""
pipe = ['git', 'clone', git_dir, '.']
result = command.RunPipe([pipe], capture=True, cwd=output_dir)
if result.return_code != 0:
raise OSError, 'git clone: %s' % result.stderr
def Fetch(git_dir=None, work_tree=None):
"""Fetch from the origin repo
Args:
commit_hash: Commit hash to check out
"""
pipe = ['git']
if git_dir:
pipe.extend(['--git-dir', git_dir])
if work_tree:
pipe.extend(['--work-tree', work_tree])
pipe.append('fetch')
result = command.RunPipe([pipe], capture=True)
if result.return_code != 0:
raise OSError, 'git fetch: %s' % result.stderr
def CreatePatches(start, count, series):
"""Create a series of patches from the top of the current branch.
The patch files are written to the current directory using
git format-patch.
Args:
start: Commit to start from: 0=HEAD, 1=next one, etc.
count: number of commits to include
Return:
Filename of cover letter
List of filenames of patch files
"""
if series.get('version'):
version = '%s ' % series['version']
cmd = ['git', 'format-patch', '-M', '--signoff']
if series.get('cover'):
cmd.append('--cover-letter')
prefix = series.GetPatchPrefix()
if prefix:
cmd += ['--subject-prefix=%s' % prefix]
cmd += ['HEAD~%d..HEAD~%d' % (start + count, start)]
stdout = command.RunList(cmd)
files = stdout.splitlines()
# We have an extra file if there is a cover letter
if series.get('cover'):
return files[0], files[1:]
else:
return None, files
def ApplyPatch(verbose, fname):
"""Apply a patch with git am to test it
TODO: Convert these to use command, with stderr option
Args:
fname: filename of patch file to apply
"""
col = terminal.Color()
cmd = ['git', 'am', fname]
pipe = subprocess.Popen(cmd, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = pipe.communicate()
re_error = re.compile('^error: patch failed: (.+):(\d+)')
for line in stderr.splitlines():
if verbose:
print line
match = re_error.match(line)
if match:
print checkpatch.GetWarningMsg(col, 'warning', match.group(1),
int(match.group(2)), 'Patch failed')
return pipe.returncode == 0, stdout
def ApplyPatches(verbose, args, start_point):
"""Apply the patches with git am to make sure all is well
Args:
verbose: Print out 'git am' output verbatim
args: List of patch files to apply
start_point: Number of commits back from HEAD to start applying.
Normally this is len(args), but it can be larger if a start
offset was given.
"""
error_count = 0
col = terminal.Color()
# Figure out our current position
cmd = ['git', 'name-rev', 'HEAD', '--name-only']
pipe = subprocess.Popen(cmd, stdout=subprocess.PIPE)
stdout, stderr = pipe.communicate()
if pipe.returncode:
str = 'Could not find current commit name'
print col.Color(col.RED, str)
print stdout
return False
old_head = stdout.splitlines()[0]
# Checkout the required start point
cmd = ['git', 'checkout', 'HEAD~%d' % start_point]
pipe = subprocess.Popen(cmd, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = pipe.communicate()
if pipe.returncode:
str = 'Could not move to commit before patch series'
print col.Color(col.RED, str)
print stdout, stderr
return False
# Apply all the patches
for fname in args:
ok, stdout = ApplyPatch(verbose, fname)
if not ok:
print col.Color(col.RED, 'git am returned errors for %s: will '
'skip this patch' % fname)
if verbose:
print stdout
error_count += 1
cmd = ['git', 'am', '--skip']
pipe = subprocess.Popen(cmd, stdout=subprocess.PIPE)
stdout, stderr = pipe.communicate()
if pipe.returncode != 0:
print col.Color(col.RED, 'Unable to skip patch! Aborting...')
print stdout
break
# Return to our previous position
cmd = ['git', 'checkout', old_head]
pipe = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = pipe.communicate()
if pipe.returncode:
print col.Color(col.RED, 'Could not move back to head commit')
print stdout, stderr<|fim▁hole|> return error_count == 0
def BuildEmailList(in_list, tag=None, alias=None, raise_on_error=True):
"""Build a list of email addresses based on an input list.
Takes a list of email addresses and aliases, and turns this into a list
of only email address, by resolving any aliases that are present.
If the tag is given, then each email address is prepended with this
tag and a space. If the tag starts with a minus sign (indicating a
command line parameter) then the email address is quoted.
Args:
in_list: List of aliases/email addresses
tag: Text to put before each address
alias: Alias dictionary
raise_on_error: True to raise an error when an alias fails to match,
False to just print a message.
Returns:
List of email addresses
>>> alias = {}
>>> alias['fred'] = ['[email protected]']
>>> alias['john'] = ['[email protected]']
>>> alias['mary'] = ['Mary Poppins <[email protected]>']
>>> alias['boys'] = ['fred', ' john']
>>> alias['all'] = ['fred ', 'john', ' mary ']
>>> BuildEmailList(['john', 'mary'], None, alias)
['[email protected]', 'Mary Poppins <[email protected]>']
>>> BuildEmailList(['john', 'mary'], '--to', alias)
['--to "[email protected]"', \
'--to "Mary Poppins <[email protected]>"']
>>> BuildEmailList(['john', 'mary'], 'Cc', alias)
['Cc [email protected]', 'Cc Mary Poppins <[email protected]>']
"""
quote = '"' if tag and tag[0] == '-' else ''
raw = []
for item in in_list:
raw += LookupEmail(item, alias, raise_on_error=raise_on_error)
result = []
for item in raw:
if not item in result:
result.append(item)
if tag:
return ['%s %s%s%s' % (tag, quote, email, quote) for email in result]
return result
def EmailPatches(series, cover_fname, args, dry_run, raise_on_error, cc_fname,
self_only=False, alias=None, in_reply_to=None):
"""Email a patch series.
Args:
series: Series object containing destination info
cover_fname: filename of cover letter
args: list of filenames of patch files
dry_run: Just return the command that would be run
raise_on_error: True to raise an error when an alias fails to match,
False to just print a message.
cc_fname: Filename of Cc file for per-commit Cc
self_only: True to just email to yourself as a test
in_reply_to: If set we'll pass this to git as --in-reply-to.
Should be a message ID that this is in reply to.
Returns:
Git command that was/would be run
# For the duration of this doctest pretend that we ran patman with ./patman
>>> _old_argv0 = sys.argv[0]
>>> sys.argv[0] = './patman'
>>> alias = {}
>>> alias['fred'] = ['[email protected]']
>>> alias['john'] = ['[email protected]']
>>> alias['mary'] = ['[email protected]']
>>> alias['boys'] = ['fred', ' john']
>>> alias['all'] = ['fred ', 'john', ' mary ']
>>> alias[os.getenv('USER')] = ['[email protected]']
>>> series = series.Series()
>>> series.to = ['fred']
>>> series.cc = ['mary']
>>> EmailPatches(series, 'cover', ['p1', 'p2'], True, True, 'cc-fname', \
False, alias)
'git send-email --annotate --to "[email protected]" --cc \
"[email protected]" --cc-cmd "./patman --cc-cmd cc-fname" cover p1 p2'
>>> EmailPatches(series, None, ['p1'], True, True, 'cc-fname', False, \
alias)
'git send-email --annotate --to "[email protected]" --cc \
"[email protected]" --cc-cmd "./patman --cc-cmd cc-fname" p1'
>>> series.cc = ['all']
>>> EmailPatches(series, 'cover', ['p1', 'p2'], True, True, 'cc-fname', \
True, alias)
'git send-email --annotate --to "[email protected]" --cc-cmd "./patman \
--cc-cmd cc-fname" cover p1 p2'
>>> EmailPatches(series, 'cover', ['p1', 'p2'], True, True, 'cc-fname', \
False, alias)
'git send-email --annotate --to "[email protected]" --cc \
"[email protected]" --cc "[email protected]" --cc \
"[email protected]" --cc-cmd "./patman --cc-cmd cc-fname" cover p1 p2'
# Restore argv[0] since we clobbered it.
>>> sys.argv[0] = _old_argv0
"""
to = BuildEmailList(series.get('to'), '--to', alias, raise_on_error)
if not to:
print ("No recipient, please add something like this to a commit\n"
"Series-to: Fred Bloggs <[email protected]>")
return
cc = BuildEmailList(series.get('cc'), '--cc', alias, raise_on_error)
if self_only:
to = BuildEmailList([os.getenv('USER')], '--to', alias, raise_on_error)
cc = []
cmd = ['git', 'send-email', '--annotate']
if in_reply_to:
cmd.append('--in-reply-to="%s"' % in_reply_to)
cmd += to
cmd += cc
cmd += ['--cc-cmd', '"%s --cc-cmd %s"' % (sys.argv[0], cc_fname)]
if cover_fname:
cmd.append(cover_fname)
cmd += args
str = ' '.join(cmd)
if not dry_run:
os.system(str)
return str
def LookupEmail(lookup_name, alias=None, raise_on_error=True, level=0):
"""If an email address is an alias, look it up and return the full name
TODO: Why not just use git's own alias feature?
Args:
lookup_name: Alias or email address to look up
alias: Dictionary containing aliases (None to use settings default)
raise_on_error: True to raise an error when an alias fails to match,
False to just print a message.
Returns:
tuple:
list containing a list of email addresses
Raises:
OSError if a recursive alias reference was found
ValueError if an alias was not found
>>> alias = {}
>>> alias['fred'] = ['[email protected]']
>>> alias['john'] = ['[email protected]']
>>> alias['mary'] = ['[email protected]']
>>> alias['boys'] = ['fred', ' john', '[email protected]']
>>> alias['all'] = ['fred ', 'john', ' mary ']
>>> alias['loop'] = ['other', 'john', ' mary ']
>>> alias['other'] = ['loop', 'john', ' mary ']
>>> LookupEmail('mary', alias)
['[email protected]']
>>> LookupEmail('[email protected]', alias)
['[email protected]']
>>> LookupEmail('boys', alias)
['[email protected]', '[email protected]']
>>> LookupEmail('all', alias)
['[email protected]', '[email protected]', '[email protected]']
>>> LookupEmail('odd', alias)
Traceback (most recent call last):
...
ValueError: Alias 'odd' not found
>>> LookupEmail('loop', alias)
Traceback (most recent call last):
...
OSError: Recursive email alias at 'other'
>>> LookupEmail('odd', alias, raise_on_error=False)
\033[1;31mAlias 'odd' not found\033[0m
[]
>>> # In this case the loop part will effectively be ignored.
>>> LookupEmail('loop', alias, raise_on_error=False)
\033[1;31mRecursive email alias at 'other'\033[0m
\033[1;31mRecursive email alias at 'john'\033[0m
\033[1;31mRecursive email alias at 'mary'\033[0m
['[email protected]', '[email protected]']
"""
if not alias:
alias = settings.alias
lookup_name = lookup_name.strip()
if '@' in lookup_name: # Perhaps a real email address
return [lookup_name]
lookup_name = lookup_name.lower()
col = terminal.Color()
out_list = []
if level > 10:
msg = "Recursive email alias at '%s'" % lookup_name
if raise_on_error:
raise OSError, msg
else:
print col.Color(col.RED, msg)
return out_list
if lookup_name:
if not lookup_name in alias:
msg = "Alias '%s' not found" % lookup_name
if raise_on_error:
raise ValueError, msg
else:
print col.Color(col.RED, msg)
return out_list
for item in alias[lookup_name]:
todo = LookupEmail(item, alias, raise_on_error, level + 1)
for new_item in todo:
if not new_item in out_list:
out_list.append(new_item)
#print "No match for alias '%s'" % lookup_name
return out_list
def GetTopLevel():
"""Return name of top-level directory for this git repo.
Returns:
Full path to git top-level directory
This test makes sure that we are running tests in the right subdir
>>> os.path.realpath(os.path.dirname(__file__)) == \
os.path.join(GetTopLevel(), 'tools', 'patman')
True
"""
return command.OutputOneLine('git', 'rev-parse', '--show-toplevel')
def GetAliasFile():
"""Gets the name of the git alias file.
Returns:
Filename of git alias file, or None if none
"""
fname = command.OutputOneLine('git', 'config', 'sendemail.aliasesfile',
raise_on_error=False)
if fname:
fname = os.path.join(GetTopLevel(), fname.strip())
return fname
def GetDefaultUserName():
"""Gets the user.name from .gitconfig file.
Returns:
User name found in .gitconfig file, or None if none
"""
uname = command.OutputOneLine('git', 'config', '--global', 'user.name')
return uname
def GetDefaultUserEmail():
"""Gets the user.email from the global .gitconfig file.
Returns:
User's email found in .gitconfig file, or None if none
"""
uemail = command.OutputOneLine('git', 'config', '--global', 'user.email')
return uemail
def Setup():
"""Set up git utils, by reading the alias files."""
# Check for a git alias file also
alias_fname = GetAliasFile()
if alias_fname:
settings.ReadGitAliases(alias_fname)
def GetHead():
"""Get the hash of the current HEAD
Returns:
Hash of HEAD
"""
return command.OutputOneLine('git', 'show', '-s', '--pretty=format:%H')
if __name__ == "__main__":
import doctest
doctest.testmod()<|fim▁end|>
| |
<|file_name|>Linux.py<|end_file_name|><|fim▁begin|># Copyright (C) 2013-2017 Chris Lalancette <[email protected]>
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation;
# version 2.1 of the License.
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
"""
Linux installation
"""
import os
import re
import time
import libvirt
import oz.Guest
import oz.OzException
class LinuxCDGuest(oz.Guest.CDGuest):
"""
Class for Linux installation.
"""
def __init__(self, tdl, config, auto, output_disk, nicmodel, diskbus,
iso_allowed, url_allowed, macaddress, useuefi):
oz.Guest.CDGuest.__init__(self, tdl, config, auto, output_disk,
nicmodel, None, None, diskbus, iso_allowed,
url_allowed, macaddress, useuefi)
def _test_ssh_connection(self, guestaddr):
"""
Internal method to test out the ssh connection before we try to use it.
Under systemd, the IP address of a guest can come up and reportip can
run before the ssh key is generated and sshd starts up. This check
makes sure that we allow an additional 30 seconds (1 second per ssh
attempt) for sshd to finish initializing.
"""
count = 30
success = False
while count > 0:
try:
self.log.debug("Testing ssh connection, try %d", count)
start = time.time()
self.guest_execute_command(guestaddr, 'ls', timeout=1)
self.log.debug("Succeeded")
success = True
break
except oz.ozutil.SubprocessException:
# ensure that we spent at least one second before trying again
end = time.time()
if (end - start) < 1:
time.sleep(1 - (end - start))
count -= 1
if not success:
self.log.debug("Failed to connect to ssh on running guest")
raise oz.OzException.OzException("Failed to connect to ssh on running guest")
def get_default_runlevel(self, g_handle):
"""
Function to determine the default runlevel based on the /etc/inittab.
"""
runlevel = "3"
if g_handle.exists('/etc/inittab'):
lines = g_handle.cat('/etc/inittab').split("\n")
for line in lines:
if re.match('id:', line):
runlevel = line.split(':')[1]
break
return runlevel
def guest_execute_command(self, guestaddr, command, timeout=10):
"""
Method to execute a command on the guest and return the output.
"""
# ServerAliveInterval protects against NAT firewall timeouts
# on long-running commands with no output
#
# PasswordAuthentication=no prevents us from falling back to
# keyboard-interactive password prompting
#
# -F /dev/null makes sure that we don't use the global or per-user
# configuration files
return oz.ozutil.subprocess_check_output(["ssh", "-i", self.sshprivkey,
"-F", "/dev/null",
"-o", "ServerAliveInterval=30",
"-o", "StrictHostKeyChecking=no",
"-o", "ConnectTimeout=" + str(timeout),
"-o", "UserKnownHostsFile=/dev/null",
"-o", "PasswordAuthentication=no",
"-o", "IdentitiesOnly yes",
"root@" + guestaddr, command],
printfn=self.log.debug)
def guest_live_upload(self, guestaddr, file_to_upload, destination,
timeout=10):
"""
Method to copy a file to the live guest.
"""
self.guest_execute_command(guestaddr,
"mkdir -p " + os.path.dirname(destination),
timeout)
# ServerAliveInterval protects against NAT firewall timeouts
# on long-running commands with no output
#
# PasswordAuthentication=no prevents us from falling back to
# keyboard-interactive password prompting
#
# -F /dev/null makes sure that we don't use the global or per-user
# configuration files
return oz.ozutil.subprocess_check_output(["scp", "-i", self.sshprivkey,
"-F", "/dev/null",
"-o", "ServerAliveInterval=30",
"-o", "StrictHostKeyChecking=no",
"-o", "ConnectTimeout=" + str(timeout),
"-o", "UserKnownHostsFile=/dev/null",
"-o", "PasswordAuthentication=no",
"-o", "IdentitiesOnly yes",
file_to_upload,
"root@" + guestaddr + ":" + destination],
printfn=self.log.debug)
def _customize_files(self, guestaddr):
"""
Method to upload the custom files specified in the TDL to the guest.
"""
self.log.info("Uploading custom files")
for name, fp in list(self.tdl.files.items()):
# all of the self.tdl.files are named temporary files; we just need
# to fetch the name out and have scp upload it
self.guest_live_upload(guestaddr, fp.name, name)
def _shutdown_guest(self, guestaddr, libvirt_dom):
"""
Method to shutdown the guest (gracefully at first, then with prejudice).
"""
if guestaddr is not None:
# sometimes the ssh process gets disconnected before it can return
# cleanly (particularly when the guest is running systemd). If that
# happens, ssh returns 255, guest_execute_command throws an
# exception, and the guest is forcibly destroyed. While this
# isn't the end of the world, it isn't desirable. To avoid
# this, we catch any exception thrown by ssh during the shutdown
# command and throw them away. In the (rare) worst case, the
# shutdown will not have made it to the guest and we'll have to wait
# 90 seconds for wait_for_guest_shutdown to timeout and forcibly
# kill the guest.
try:
self.guest_execute_command(guestaddr, 'shutdown -h now')
except Exception:
pass
try:
if not self._wait_for_guest_shutdown(libvirt_dom):
self.log.warning("Guest did not shutdown in time, going to kill")
else:
libvirt_dom = None
except Exception:
self.log.warning("Failed shutting down guest, forcibly killing")
if libvirt_dom is not None:
try:
libvirt_dom.destroy()
except libvirt.libvirtError:
# the destroy failed for some reason. This can happen if
# _wait_for_guest_shutdown times out, but the domain shuts
# down before we get to destroy. Check to make sure that the
# domain is gone from the list of running domains; if so, just
# continue on; if not, re-raise the error.
for domid in self.libvirt_conn.listDomainsID():
if domid == libvirt_dom.ID():
raise
def _collect_setup(self, libvirt_xml): # pylint: disable=unused-argument
"""
Default method to set the guest up for remote access.
"""
raise oz.OzException.OzException("ICICLE generation and customization is not implemented for guest %s" % (self.tdl.distro))
def _collect_teardown(self, libvirt_xml): # pylint: disable=unused-argument
"""
Method to reverse the changes done in _collect_setup.
"""
raise oz.OzException.OzException("ICICLE generation and customization is not implemented for guest %s" % (self.tdl.distro))
def _install_packages(self, guestaddr, packstr): # pylint: disable=unused-argument
"""
Internal method to install packages; expected to be overriden by
child classes.
"""
raise oz.OzException.OzException("Customization is not implemented for guest %s" % (self.tdl.distro))
def _customize_repos(self, guestaddr): # pylint: disable=unused-argument
"""
Internal method to customize repositories; expected to be overriden by
child classes.
"""
raise oz.OzException.OzException("Customization is not implemented for guest %s" % (self.tdl.distro))
def _remove_repos(self, guestaddr): # pylint: disable=unused-argument
"""
Internal method to remove repositories; expected to be overriden by
child classes.
"""
raise oz.OzException.OzException("Repository removal not implemented for guest %s" % (self.tdl.distro))
def do_customize(self, guestaddr):
"""
Method to customize by installing additional packages and files.
"""
if not self.tdl.packages and not self.tdl.files and not self.tdl.commands:
# no work to do, just return
return
self._customize_repos(guestaddr)
for cmd in self.tdl.precommands:
self.guest_execute_command(guestaddr, cmd.read())
self.log.debug("Installing custom packages")
packstr = ''
for package in self.tdl.packages:
packstr += '"' + package.name + '" '
if packstr != '':
self._install_packages(guestaddr, packstr)
self._customize_files(guestaddr)
self.log.debug("Running custom commands")
for cmd in self.tdl.commands:
self.guest_execute_command(guestaddr, cmd.read())
self.log.debug("Removing non-persisted repos")
self._remove_repos(guestaddr)
self.log.debug("Syncing")
self.guest_execute_command(guestaddr, 'sync')
def do_icicle(self, guestaddr):
"""
Default method to collect the package information and generate the
ICICLE XML.
"""
raise oz.OzException.OzException("ICICLE generation is not implemented for this guest type")
def _internal_customize(self, libvirt_xml, action):
"""
Internal method to customize and optionally generate an ICICLE for the
operating system after initial installation.
"""
# the "action" input is actually a tri-state:
# action = "gen_and_mod" means to generate the icicle and to
# potentially make modifications
# action = "gen_only" means to generate the icicle only, and not
# look at any modifications
# action = "mod_only" means to not generate the icicle, but still
# potentially make modifications
self.log.info("Customizing image")
if not self.tdl.packages and not self.tdl.files and not self.tdl.commands:
if action == "mod_only":
self.log.info("No additional packages, files, or commands to install, and icicle generation not requested, skipping customization")
return
elif action == "gen_and_mod":
# It is actually possible to get here with a "gen_and_mod"
# action but a TDL that contains no real customizations.
# In the "safe ICICLE" code below it is important to know
# when we are truly in a "gen_only" state so we modify
# the action here if we detect that ICICLE generation is the
# only task to be done.
self.log.debug("Asked to gen_and_mod but no mods are present - changing action to gen_only")
action = "gen_only"
# when doing an oz-install with -g, this isn't necessary as it will
# just replace the port with the same port. However, it is very
# necessary when doing an oz-customize since the serial port might
# not match what is specified in the libvirt XML
modified_xml = self._modify_libvirt_xml_for_serial(libvirt_xml)
if action == "gen_only" and self.safe_icicle_gen:
# We are only generating ICICLE and the user has asked us to do
# this without modifying the completed image by booting it.
# Create a copy on write snapshot to use for ICICLE
# generation - discard when finished
cow_diskimage = self.diskimage + "-icicle-snap.qcow2"
self._internal_generate_diskimage(force=True,
backing_filename=self.diskimage,
image_filename=cow_diskimage)
modified_xml = self._modify_libvirt_xml_diskimage(modified_xml, cow_diskimage, 'qcow2')
self._collect_setup(modified_xml)
icicle = None
try:
libvirt_dom = self.libvirt_conn.createXML(modified_xml, 0)
try:
guestaddr = None
guestaddr = self._wait_for_guest_boot(libvirt_dom)
self._test_ssh_connection(guestaddr)
if action == "gen_and_mod":
self.do_customize(guestaddr)
icicle = self.do_icicle(guestaddr)
elif action == "gen_only":
icicle = self.do_icicle(guestaddr)
elif action == "mod_only":
self.do_customize(guestaddr)
else:
raise oz.OzException.OzException("Invalid customize action %s; this is a programming error" % (action))
finally:
if action == "gen_only" and self.safe_icicle_gen:
# if this is a gen_only and safe_icicle_gen, there is no
# reason to wait around for the guest to shutdown; we'll
# be removing the overlay file anyway. Just destroy it
libvirt_dom.destroy()
else:
self._shutdown_guest(guestaddr, libvirt_dom)
finally:<|fim▁hole|> else:
self._collect_teardown(modified_xml)
return icicle
def customize(self, libvirt_xml):
"""
Method to customize the operating system after installation.
"""
return self._internal_customize(libvirt_xml, "mod_only")
def customize_and_generate_icicle(self, libvirt_xml):
"""
Method to customize and generate the ICICLE for an operating system
after installation. This is equivalent to calling customize() and
generate_icicle() back-to-back, but is faster.
"""
return self._internal_customize(libvirt_xml, "gen_and_mod")
def generate_icicle(self, libvirt_xml):
"""
Method to generate the ICICLE from an operating system after
installation. The ICICLE contains information about packages and
other configuration on the diskimage.
"""
return self._internal_customize(libvirt_xml, "gen_only")<|fim▁end|>
|
if action == "gen_only" and self.safe_icicle_gen:
# no need to teardown because we simply discard the file
# containing those changes
os.unlink(cow_diskimage)
|
<|file_name|>testing.py<|end_file_name|><|fim▁begin|>from contextlib import contextmanager
import logging
import unittest
from .context import Context
log = logging.getLogger(__name__)
class EphemeralContextTestCase(unittest.TestCase):
def setUp(self):
self.context = Context()
log.debug('XXX Starting context')
self.context.start()
def tearDown(self):
log.debug('XXX Stopping context')
self.context.stop()
@contextmanager
def ephemeral_context(**kw):<|fim▁hole|> context = Context(**kw)
context.start()
yield context
context.stop()<|fim▁end|>
| |
<|file_name|>BreadcrumbItem.d.ts<|end_file_name|><|fim▁begin|>import * as React from "react";
import { ReactLIAttr } from "../../../typings/shared";
import { LinkProps } from "../Link";
interface InheritedProps extends ReactLIAttr {
href?: LinkProps["href"],
}
export interface BreadcrumbItemProps extends InheritedProps {<|fim▁hole|>
declare const BreadcrumbItem: React.FC<BreadcrumbItemProps>;
export default BreadcrumbItem;<|fim▁end|>
|
isCurrentPage?: boolean,
}
|
<|file_name|>opts.go<|end_file_name|><|fim▁begin|>package httpmux
import "net/http"
// ConfigOption is the interface for updating config options.
type ConfigOption interface {
Set(c *Config)
}
// ConfigOptionFunc is an adapter for config option functions.
type ConfigOptionFunc func(c *Config)
// Set implements the ConfigOption interface.
func (f ConfigOptionFunc) Set(c *Config) { f(c) }
// WithPrefix returns a ConfigOption that uptates the Config.
func WithPrefix(prefix string) ConfigOption {
return ConfigOptionFunc(func(c *Config) { c.Prefix = prefix })
}
// WithMiddleware returns a ConfigOption that uptates the Config.
func WithMiddleware(mw ...Middleware) ConfigOption {
return ConfigOptionFunc(func(c *Config) { c.Middleware = mw })
}
// WithMiddlewareFunc returns a ConfigOption that uptates the Config.
func WithMiddlewareFunc(mw ...MiddlewareFunc) ConfigOption {
return ConfigOptionFunc(func(c *Config) { c.UseFunc(mw...) })
}<|fim▁hole|>}
// WithRedirectFixedPath returns a ConfigOption that uptates the Config.
func WithRedirectFixedPath(v bool) ConfigOption {
return ConfigOptionFunc(func(c *Config) { c.RedirectFixedPath = v })
}
// WithHandleMethodNotAllowed returns a ConfigOption that uptates the Config.
func WithHandleMethodNotAllowed(v bool) ConfigOption {
return ConfigOptionFunc(func(c *Config) { c.HandleMethodNotAllowed = v })
}
// WithNotFound returns a ConfigOption that uptates the Config.
func WithNotFound(f http.Handler) ConfigOption {
return ConfigOptionFunc(func(c *Config) { c.NotFound = f })
}
// WithMethodNotAllowed returns a ConfigOption that uptates the Config.
func WithMethodNotAllowed(f http.Handler) ConfigOption {
return ConfigOptionFunc(func(c *Config) { c.MethodNotAllowed = f })
}
// WithPanicHandler returns a ConfigOption that uptates the Config.
func WithPanicHandler(f func(http.ResponseWriter, *http.Request, interface{})) ConfigOption {
return ConfigOptionFunc(func(c *Config) { c.PanicHandler = f })
}<|fim▁end|>
|
// WithRedirectTrailingSlash returns a ConfigOption that uptates the Config.
func WithRedirectTrailingSlash(v bool) ConfigOption {
return ConfigOptionFunc(func(c *Config) { c.RedirectTrailingSlash = v })
|
<|file_name|>index.d.ts<|end_file_name|><|fim▁begin|>declare module "svgpath" {
interface SvgPath {
(path: string): SvgPath;
new (path: string): SvgPath;
abs(): SvgPath;<|fim▁hole|> rotate(angle: number, rx?: number, ry?: number): SvgPath;
skewX(degrees: number): SvgPath;
skewY(degrees: number): SvgPath;
matrix(m1: number, m2: number, m3: number, m4: number, m5: number, m6: number): SvgPath;
transform(str: string): SvgPath;
unshort(): SvgPath;
unarc(): SvgPath;
toString(): String;
round(precision: number): SvgPath;
iterate(iterator: (segment: any[], index: number, x: number, y: number) => void, keepLazyStack?: boolean): SvgPath;
}
const svgPath: SvgPath;
export = svgPath;
}<|fim▁end|>
|
scale(sx: number, sy?: number): SvgPath;
translate(x: number, y?: number): SvgPath;
|
<|file_name|>tailsitter.cpp<|end_file_name|><|fim▁begin|>/*
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/*
control code for tailsitters. Enabled by setting Q_FRAME_CLASS=10
*/
#include "Plane.h"
/*
return true when flying a tailsitter
*/
bool QuadPlane::is_tailsitter(void)
{
return available() && frame_class == AP_Motors::MOTOR_FRAME_TAILSITTER;
}
/*
check if we are flying as a tailsitter
*/
bool QuadPlane::tailsitter_active(void)
{
return is_tailsitter() && in_vtol_mode();
}
/*
run output for tailsitters
*/
void QuadPlane::tailsitter_output(void)
{
if (!is_tailsitter()) {
return;
}
if (!tailsitter_active()) {
if (tailsitter.vectored_forward_gain > 0) {
// thrust vectoring in fixed wing flight
float aileron = SRV_Channels::get_output_scaled(SRV_Channel::k_aileron);
float elevator = SRV_Channels::get_output_scaled(SRV_Channel::k_elevator);
float tilt_left = (elevator + aileron) * tailsitter.vectored_forward_gain;
float tilt_right = (elevator - aileron) * tailsitter.vectored_forward_gain;
SRV_Channels::set_output_scaled(SRV_Channel::k_tiltMotorLeft, tilt_left);
SRV_Channels::set_output_scaled(SRV_Channel::k_tiltMotorRight, tilt_right);
} else {
SRV_Channels::set_output_scaled(SRV_Channel::k_tiltMotorLeft, 0);<|fim▁hole|> return;
}
motors_output();
plane.pitchController.reset_I();
plane.rollController.reset_I();
if (tailsitter.vectored_hover_gain > 0) {
// thrust vectoring VTOL modes
float aileron = SRV_Channels::get_output_scaled(SRV_Channel::k_aileron);
float elevator = SRV_Channels::get_output_scaled(SRV_Channel::k_elevator);
float tilt_left = (elevator + aileron) * tailsitter.vectored_hover_gain;
float tilt_right = (elevator - aileron) * tailsitter.vectored_hover_gain;
SRV_Channels::set_output_scaled(SRV_Channel::k_tiltMotorLeft, tilt_left);
SRV_Channels::set_output_scaled(SRV_Channel::k_tiltMotorRight, tilt_right);
}
if (tailsitter.input_mask_chan > 0 &&
tailsitter.input_mask > 0 &&
hal.rcin->read(tailsitter.input_mask_chan-1) > 1700) {
// the user is learning to prop-hang
if (tailsitter.input_mask & TAILSITTER_MASK_AILERON) {
SRV_Channels::set_output_scaled(SRV_Channel::k_aileron, plane.channel_roll->get_control_in_zero_dz());
}
if (tailsitter.input_mask & TAILSITTER_MASK_ELEVATOR) {
SRV_Channels::set_output_scaled(SRV_Channel::k_elevator, plane.channel_pitch->get_control_in_zero_dz());
}
if (tailsitter.input_mask & TAILSITTER_MASK_THROTTLE) {
SRV_Channels::set_output_scaled(SRV_Channel::k_throttle, plane.channel_throttle->get_control_in_zero_dz());
}
if (tailsitter.input_mask & TAILSITTER_MASK_RUDDER) {
SRV_Channels::set_output_scaled(SRV_Channel::k_rudder, plane.channel_rudder->get_control_in_zero_dz());
}
}
}
/*
return true when we have completed enough of a transition to switch to fixed wing control
*/
bool QuadPlane::tailsitter_transition_complete(void)
{
if (plane.fly_inverted()) {
// transition immediately
return true;
}
if (labs(ahrs_view->pitch_sensor) > tailsitter.transition_angle*100 ||
labs(ahrs_view->roll_sensor) > tailsitter.transition_angle*100 ||
AP_HAL::millis() - transition_start_ms > 2000) {
return true;
}
// still waiting
return false;
}
// handle different tailsitter input types
void QuadPlane::tailsitter_check_input(void)
{
if (tailsitter_active() &&
tailsitter.input_type == TAILSITTER_INPUT_PLANE) {
// the user has asked for body frame controls when tailsitter
// is active. We switch around the control_in value for the
// channels to do this, as that ensures the value is
// consistent throughout the code
int16_t roll_in = plane.channel_roll->get_control_in();
int16_t yaw_in = plane.channel_rudder->get_control_in();
plane.channel_roll->set_control_in(yaw_in);
plane.channel_rudder->set_control_in(-roll_in);
}
}<|fim▁end|>
|
SRV_Channels::set_output_scaled(SRV_Channel::k_tiltMotorRight, 0);
}
|
<|file_name|>ContainerValidationIntegrationTest.java<|end_file_name|><|fim▁begin|>package org.baeldung;
import static org.junit.Assert.assertEquals;
import java.util.Collections;
import java.util.OptionalInt;
import java.util.Set;
import javax.validation.ConstraintViolation;
import javax.validation.Validation;
import javax.validation.Validator;
import javax.validation.ValidatorFactory;
import org.baeldung.valueextractors.ProfileValueExtractor;
import org.junit.Before;
import org.junit.Test;
public class ContainerValidationIntegrationTest {
private Validator validator;
@Before
public void setup() {
ValidatorFactory factory = Validation.byDefaultProvider().configure()
.addValueExtractor(new ProfileValueExtractor()).buildValidatorFactory();
validator = factory.getValidator();
}
@Test
public void whenEmptyAddress_thenValidationFails() {
Customer customer = new Customer();
customer.setName("John");
customer.setAddresses(Collections.singletonList(" "));
Set<ConstraintViolation<Customer>> violations = validator.validate(customer);
assertEquals(1, violations.size());
assertEquals("Address must not be blank", violations.iterator()
.next()
.getMessage());
}
@Test
public void whenInvalidEmail_thenValidationFails() {
CustomerMap map = new CustomerMap();
map.setCustomers(Collections.singletonMap("john", new Customer()));
Set<ConstraintViolation<CustomerMap>> violations = validator.validate(map);
assertEquals(1, violations.size());
assertEquals("Must be a valid email", violations.iterator()
.next()
.getMessage());
}
@Test
public void whenAgeTooLow_thenValidationFails() {
Customer customer = new Customer();
customer.setName("John");
customer.setAge(15);
Set<ConstraintViolation<Customer>> violations = validator.validate(customer);
assertEquals(1, violations.size());
}
@Test
public void whenAgeNull_thenValidationSucceeds() {
Customer customer = new Customer();
customer.setName("John");
Set<ConstraintViolation<Customer>> violations = validator.validate(customer);
assertEquals(0, violations.size());
}
@Test
public void whenNumberOrdersValid_thenValidationSucceeds() {
Customer customer = new Customer();<|fim▁hole|> customer.setNumberOfOrders(OptionalInt.of(1));
Set<ConstraintViolation<Customer>> violations = validator.validate(customer);
assertEquals(0, violations.size());
}
//@Test
public void whenProfileCompanyNameBlank_thenValidationFails() {
Customer customer = new Customer();
customer.setName("John");
Profile profile = new Profile();
profile.setCompanyName(" ");
customer.setProfile(profile);
Set<ConstraintViolation<Customer>> violations = validator.validate(customer);
assertEquals(1, violations.size());
}
}<|fim▁end|>
|
customer.setName("John");
|
<|file_name|>overloaded-index-in-field.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Test using overloaded indexing when the "map" is stored in a
// field. This caused problems at some point.
use std::ops::Index;
struct Foo {
x: int,
y: int,
}
struct Bar {
foo: Foo
}
impl Index<int> for Foo {
type Output = int;
fn index(&self, z: &int) -> &int {
if *z == 0 {
&self.x
} else {
&self.y
}
}
}
trait Int {
fn get(self) -> int;
fn get_from_ref(&self) -> int;
fn inc(&mut self);
}
impl Int for int {
fn get(self) -> int { self }<|fim▁hole|>
fn main() {
let f = Bar { foo: Foo {
x: 1,
y: 2,
} };
assert_eq!(f.foo[1].get(), 2);
}<|fim▁end|>
|
fn get_from_ref(&self) -> int { *self }
fn inc(&mut self) { *self += 1; }
}
|
<|file_name|>OverrideImplementsAnnotationsHandler.java<|end_file_name|><|fim▁begin|>// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.codeInsight.generation;
import com.intellij.codeInsight.AnnotationUtil;
import com.intellij.codeInsight.intention.AddAnnotationPsiFix;
import com.intellij.openapi.extensions.ExtensionPointName;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.module.ModuleUtilCore;
import com.intellij.openapi.project.Project;
import com.intellij.psi.*;
import com.intellij.psi.search.GlobalSearchScope;
import com.intellij.util.ArrayUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import static com.intellij.codeInsight.AnnotationUtil.CHECK_EXTERNAL;
import static com.intellij.codeInsight.AnnotationUtil.CHECK_TYPE;
/**
* @author anna
*/
public interface OverrideImplementsAnnotationsHandler {
ExtensionPointName<OverrideImplementsAnnotationsHandler> EP_NAME = ExtensionPointName.create("com.intellij.overrideImplementsAnnotationsHandler");
/**
* Returns annotations which should be copied from a source to an implementation (by default, no annotations are copied).
*/
default String[] getAnnotations(@NotNull PsiFile file) {
//noinspection deprecation
return getAnnotations(file.getProject());
}
/**
* @deprecated Use {@link #getAnnotations(PsiFile)}
*/
@Deprecated
String[] getAnnotations(Project project);
@Deprecated
@NotNull
default String[] annotationsToRemove(Project project, @NotNull String fqName) {
return ArrayUtil.EMPTY_STRING_ARRAY;
}
/** Perform post processing on the annotations, such as deleting or renaming or otherwise updating annotations in the override */
default void cleanup(PsiModifierListOwner source, @Nullable PsiElement targetClass, PsiModifierListOwner target) {
}
static void repeatAnnotationsFromSource(PsiModifierListOwner source, @Nullable PsiElement targetClass, PsiModifierListOwner target) {
Module module = ModuleUtilCore.findModuleForPsiElement(targetClass != null ? targetClass : target);<|fim▁hole|> for (OverrideImplementsAnnotationsHandler each : EP_NAME.getExtensionList()) {
for (String annotation : each.getAnnotations(target.getContainingFile())) {
if (moduleScope != null && facade.findClass(annotation, moduleScope) == null) continue;
int flags = CHECK_EXTERNAL | CHECK_TYPE;
if (AnnotationUtil.isAnnotated(source, annotation, flags) && !AnnotationUtil.isAnnotated(target, annotation, flags)) {
each.transferToTarget(annotation, source, target);
}
}
}
for (OverrideImplementsAnnotationsHandler each : EP_NAME.getExtensionList()) {
each.cleanup(source, targetClass, target);
}
}
default void transferToTarget(String annotation, PsiModifierListOwner source, PsiModifierListOwner target) {
PsiModifierList modifierList = target.getModifierList();
assert modifierList != null : target;
PsiAnnotation srcAnnotation = AnnotationUtil.findAnnotation(source, annotation);
PsiNameValuePair[] valuePairs = srcAnnotation != null ? srcAnnotation.getParameterList().getAttributes() : PsiNameValuePair.EMPTY_ARRAY;
AddAnnotationPsiFix.addPhysicalAnnotation(annotation, valuePairs, modifierList);
}
}<|fim▁end|>
|
GlobalSearchScope moduleScope = module != null ? GlobalSearchScope.moduleWithDependenciesAndLibrariesScope(module) : null;
Project project = target.getProject();
JavaPsiFacade facade = JavaPsiFacade.getInstance(project);
|
<|file_name|>native_tree_tag.hpp<|end_file_name|><|fim▁begin|>// -*- C++ -*-
// Copyright (C) 2005-2017 Free Software Foundation, Inc.
//
// This file is part of the GNU ISO C++ Library. This library is free
// software; you can redistribute it and/or modify it under the terms
// of the GNU General Public License as published by the Free Software
// Foundation; either version 3, or (at your option) any later
// version.
// This library is distributed in the hope that it will be useful, but
// WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with this library; see the file COPYING3. If not see
// <http://www.gnu.org/licenses/>.
// Copyright (C) 2004 Ami Tavory and Vladimir Dreizin, IBM-HRL.
// Permission to use, copy, modify, sell, and distribute this software
// is hereby granted without fee, provided that the above copyright
// notice appears in all copies, and that both that copyright notice
// and this permission notice appear in supporting documentation. None
// of the above authors, nor IBM Haifa Research Laboratories, make any
// representation about the suitability of this software for any
// purpose. It is provided "as is" without express or implied
// warranty.
/**
* @file native_tree_tag.hpp
* Contains a tag for native tree-based containers
*/
#ifndef PB_DS_NATIVE_TREE_DS_TAG_HPP
#define PB_DS_NATIVE_TREE_DS_TAG_HPP
namespace __gnu_pbds
{
namespace test
{
struct native_tree_tag
{ };<|fim▁hole|><|fim▁end|>
|
} // namespace test
} // namespace __gnu_pbds
#endif
|
<|file_name|>0005_remove_position_incumbent.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('nomcom', '0004_auto_20151027_0829'),<|fim▁hole|> model_name='position',
name='incumbent',
),
]<|fim▁end|>
|
]
operations = [
migrations.RemoveField(
|
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>mod block_on_serial_directory_create;
pub use block_on_serial_directory_create::block_on_serial_directory_create;
mod watch_device_directory;
pub use watch_device_directory::watch_device_directory;
use xactor::Actor;
use eyre::{
// eyre,
Result,
// Context as _,
};
#[xactor::message(result = "()")]
struct WatchDevices;
pub struct DevSerialWatcher {
pub device_manager: crate::DeviceManagerAddr,
}
impl DevSerialWatcher {
pub async fn watch_dev_serial(&self) -> Result<()> {
// Normally serial ports are created at /dev/serial/by-id/
loop {
// /dev/serial is only created when a serial port is connected to the computer
block_on_serial_directory_create().await?;
// Once /dev/serial exists watch for new device files in it
watch_device_directory(
"/dev/serial/by-id/",
None,
self.device_manager.clone(),
).await?
}
}
}
#[async_trait::async_trait]
impl Actor for DevSerialWatcher {
#[instrument(skip(self, ctx))]
async fn started(&mut self, ctx: &mut xactor::Context<Self>) -> Result<()> {
ctx.address().send(WatchDevices)?;
Ok(())
}
}
#[async_trait::async_trait]
impl xactor::Handler<WatchDevices> for DevSerialWatcher {
#[instrument(skip(self, ctx, _msg))]
async fn handle(&mut self, ctx: &mut xactor::Context<Self>, _msg: WatchDevices) -> () {
let result = self.watch_dev_serial().await;
if let Err(err) = result {
warn!("Error watching /dev/serial/by-id/ device files: {:?}", err);
ctx.stop(Some(err));
};
}
}
pub struct KlipperWatcher {
pub device_manager: crate::DeviceManagerAddr,
}
#[async_trait::async_trait]
impl Actor for KlipperWatcher {
#[instrument(skip(self, ctx))]
async fn started(&mut self, ctx: &mut xactor::Context<Self>) -> Result<()> {
ctx.address().send(WatchDevices)?;
Ok(())
}
}
#[async_trait::async_trait]
impl xactor::Handler<WatchDevices> for KlipperWatcher {
#[instrument(skip(self, ctx, _msg))]
async fn handle(&mut self, ctx: &mut xactor::Context<Self>, _msg: WatchDevices) -> () {
// The Klipper serial port is created in /tmp/printer so it needs a seperate
// watcher.
// If you are configuring multiple klipper printer (is that's even possible?)
// you MUST start each printer's path with /tmp/printer eg. /tmp/printer3
let result = watch_device_directory(
"/tmp/",
Some("printer"),
self.device_manager.clone(),<|fim▁hole|> if let Err(err) = result {
warn!("Error watching klipper device files: {:?}", err);
ctx.stop(Some(err));
};
}
}<|fim▁end|>
|
).await;
|
<|file_name|>map.cpp<|end_file_name|><|fim▁begin|>#include <iostream>
#include <map>
#include <stdexcept>
using namespace std;
int main(int argc, char* argv[]){
map<string, int> m;<|fim▁hole|> m["alice"] = 89;
m["billy"] = 3;
// print it out
map<string,int>::iterator i;
for(i = m.begin(); i != m.end(); i++){
cout << i->first << ": " << i->second << endl;
}
cout << "size: " << m.size() << endl << endl;
i = m.find("billy");
if(i == m.end()){
cout << "No billy!\n";
}else{
cout << i->first << ": " << i->second << endl;
}
return 0;
}<|fim▁end|>
|
m["bob"] = 56;
|
<|file_name|>bus.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import SUPERUSER_ID<|fim▁hole|>
class MailChatController(BusController):
def _default_request_uid(self):
""" For Anonymous people, they receive the access right of SUPERUSER_ID since they have NO access (auth=none)
!!! Each time a method from this controller is call, there is a check if the user (who can be anonymous and Sudo access)
can access to the resource.
"""
return request.session.uid and request.session.uid or SUPERUSER_ID
# --------------------------
# Extends BUS Controller Poll
# --------------------------
def _poll(self, dbname, channels, last, options):
if request.session.uid:
partner_id = request.env.user.partner_id.id
if partner_id:
channels = list(channels) # do not alter original list
for mail_channel in request.env['mail.channel'].search([('channel_partner_ids', 'in', [partner_id])]):
channels.append((request.db, 'mail.channel', mail_channel.id))
# personal and needaction channel
channels.append((request.db, 'res.partner', partner_id))
channels.append((request.db, 'ir.needaction', partner_id))
return super(MailChatController, self)._poll(dbname, channels, last, options)
# --------------------------
# Anonymous routes (Common Methods)
# --------------------------
@route('/mail/chat_post', type="json", auth="none")
def mail_chat_post(self, uuid, message_content, **kwargs):
request_uid = self._default_request_uid()
# find the author from the user session, which can be None
author_id = False # message_post accept 'False' author_id, but not 'None'
if request.session.uid:
author_id = request.env['res.users'].sudo().browse(request.session.uid).partner_id.id
# post a message without adding followers to the channel. email_from=False avoid to get author from email data
mail_channel = request.env["mail.channel"].sudo(request_uid).search([('uuid', '=', uuid)], limit=1)
message = mail_channel.sudo(request_uid).with_context(mail_create_nosubscribe=True).message_post(author_id=author_id, email_from=False, body=message_content, message_type='comment', subtype='mail.mt_comment', content_subtype='plaintext', **kwargs)
return message and message.id or False
@route(['/mail/chat_history'], type="json", auth="none")
def mail_chat_history(self, uuid, last_id=False, limit=20):
request_uid = self._default_request_uid()
channel = request.env["mail.channel"].sudo(request_uid).search([('uuid', '=', uuid)], limit=1)
if not channel:
return []
else:
return channel.sudo(request_uid).channel_fetch_message(last_id, limit)<|fim▁end|>
|
from odoo.http import request, route
from odoo.addons.bus.controllers.main import BusController
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>from pupa.scrape import Jurisdiction, Organization
from .bills import MNBillScraper
from .committees import MNCommitteeScraper
from .people import MNPersonScraper<|fim▁hole|>from .common import url_xpath
"""
Minnesota legislative data can be found at the Office of the Revisor
of Statutes:
https://www.revisor.mn.gov/
Votes:
There are not detailed vote data for Senate votes, simply yes and no counts.
Bill pages have vote counts and links to House details, so it makes more
sense to get vote data from the bill pages.
"""
class Minnesota(Jurisdiction):
division_id = "ocd-division/country:us/state:mn"
classification = "government"
name = "Minnesota"
url = "http://state.mn.us/"
check_sessions = True
scrapers = {
"bills": MNBillScraper,
"committees": MNCommitteeScraper,
"people": MNPersonScraper,
"vote_events": MNVoteScraper,
"events": MNEventScraper,
}
parties = [{'name': 'Republican'},
{'name': 'Democratic-Farmer-Labor'}]
legislative_sessions = [
{
'_scraped_name': '86th Legislature, 2009-2010',
'classification': 'primary',
'identifier': '2009-2010',
'name': '2009-2010 Regular Session'
},
{
'_scraped_name': '86th Legislature, 2010 1st Special Session',
'classification': 'special',
'identifier': '2010 1st Special Session',
'name': '2010, 1st Special Session'
},
{
'_scraped_name': '86th Legislature, 2010 2nd Special Session',
'classification': 'special',
'identifier': '2010 2nd Special Session',
'name': '2010, 2nd Special Session'
},
{
'_scraped_name': '87th Legislature, 2011-2012',
'classification': 'primary',
'identifier': '2011-2012',
'name': '2011-2012 Regular Session'
},
{
'_scraped_name': '87th Legislature, 2011 1st Special Session',
'classification': 'special',
'identifier': '2011s1',
'name': '2011, 1st Special Session'
},
{
'_scraped_name': '87th Legislature, 2012 1st Special Session',
'classification': 'special',
'identifier': '2012s1',
'name': '2012, 1st Special Session'
},
{
'_scraped_name': '88th Legislature, 2013-2014',
'classification': 'primary',
'identifier': '2013-2014',
'name': '2013-2014 Regular Session'
},
{
'_scraped_name': '88th Legislature, 2013 1st Special Session',
'classification': 'special',
'identifier': '2013s1',
'name': '2013, 1st Special Session'
},
{
'_scraped_name': '89th Legislature, 2015-2016',
'classification': 'primary',
'identifier': '2015-2016',
'name': '2015-2016 Regular Session'
},
{
'_scraped_name': '89th Legislature, 2015 1st Special Session',
'classification': 'special',
'identifier': '2015s1',
'name': '2015, 1st Special Session'
},
{
'_scraped_name': '90th Legislature, 2017-2018',
'classification': 'primary',
'identifier': '2017-2018',
'name': '2017-2018 Regular Session'
},
]
ignored_scraped_sessions = [
'85th Legislature, 2007-2008',
'85th Legislature, 2007 1st Special Session',
'84th Legislature, 2005-2006',
'84th Legislature, 2005 1st Special Session',
'83rd Legislature, 2003-2004',
'83rd Legislature, 2003 1st Special Session',
'82nd Legislature, 2001-2002',
'82nd Legislature, 2002 1st Special Session',
'82nd Legislature, 2001 1st Special Session',
'81st Legislature, 1999-2000',
'80th Legislature, 1997-1998',
'80th Legislature, 1998 1st Special Session',
'80th Legislature, 1997 3rd Special Session',
'80th Legislature, 1997 2nd Special Session',
'80th Legislature, 1997 1st Special Session',
'79th Legislature, 1995-1996',
'79th Legislature, 1995 1st Special Session',
'89th Legislature, 2015-2016',
]
def get_organizations(self):
legis = Organization('Minnesota Legislature', classification='legislature')
upper = Organization('Minnesota Senate', classification='upper',
parent_id=legis._id)
lower = Organization('Minnesota House of Representatives',
classification='lower', parent_id=legis._id)
for n in range(1, 68):
upper.add_post(label=str(n), role='Senator',
division_id='ocd-division/country:us/state:mn/sldu:{}'.format(n))
lower.add_post(label=str(n) + 'A', role='Representative',
division_id='ocd-division/country:us/state:mn/sldl:{}a'.format(n))
lower.add_post(label=str(n) + 'B', role='Representative',
division_id='ocd-division/country:us/state:mn/sldl:{}b'.format(n))
yield legis
yield upper
yield lower
def get_session_list(self):
return url_xpath('https://www.revisor.mn.gov/revisor/pages/'
'search_status/status_search.php?body=House',
'//select[@name="session"]/option/text()')<|fim▁end|>
|
from .vote_events import MNVoteScraper
from .events import MNEventScraper
|
<|file_name|>tests.py<|end_file_name|><|fim▁begin|>from unittest import TestCase
from safeurl.core import getRealURL
class MainTestCase(TestCase):
def test_decodeUrl(self):
self.assertEqual(getRealURL('http://bit.ly/1gaiW96'),
'https://www.yandex.ru/')
def test_decodeUrlArray(self):
self.assertEqual(
getRealURL(['http://bit.ly/1gaiW96', 'http://bit.ly/1gaiW96']),
['https://www.yandex.ru/', 'https://www.yandex.ru/'])
def test_errorDecodeUrl(self):
self.assertEqual(getRealURL('http://bit.ly.wrong/wrong'),
'Failed')
def test_errorDecodeUrlArray(self):
self.assertEqual(
getRealURL(
['http://bit.ly.wrong/wrong', 'http://bit.ly.wrong/wrong']),
['Failed', 'Failed'])
def test_errorWithOkDecodeUrlArray(self):
self.assertEqual(
getRealURL(['http://bit.ly.wrong/wrong', 'http://bit.ly/1gaiW96',
'http://bit.ly.wrong/wrong']),<|fim▁hole|><|fim▁end|>
|
['Failed', 'https://www.yandex.ru/', 'Failed'])
|
<|file_name|>hamster-cli.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
# - coding: utf-8 -
# Copyright (C) 2010 Matías Ribecky <matias at mribecky.com.ar>
# Copyright (C) 2010-2012 Toms Bauģis <[email protected]>
# Copyright (C) 2012 Ted Smith <tedks at cs.umd.edu>
# This file is part of Project Hamster.
# Project Hamster is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# Project Hamster is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with Project Hamster. If not, see <http://www.gnu.org/licenses/>.
'''A script to control the applet from the command line.'''
import sys, os
import argparse
import re
import gi
gi.require_version('Gdk', '3.0') # noqa: E402
gi.require_version('Gtk', '3.0') # noqa: E402
from gi.repository import GLib as glib
from gi.repository import Gdk as gdk
from gi.repository import Gtk as gtk
from gi.repository import Gio as gio
from gi.repository import GLib as glib
import hamster
from hamster import client, reports
from hamster import logger as hamster_logger
from hamster.about import About
from hamster.edit_activity import CustomFactController
from hamster.overview import Overview
from hamster.preferences import PreferencesEditor
from hamster.lib import default_logger, stuff
from hamster.lib import datetime as dt
from hamster.lib.fact import Fact
logger = default_logger(__file__)
def word_wrap(line, max_len):
"""primitive word wrapper"""
lines = []
cur_line, cur_len = "", 0
for word in line.split():
if len("%s %s" % (cur_line, word)) < max_len:
cur_line = ("%s %s" % (cur_line, word)).strip()
else:
if cur_line:
lines.append(cur_line)
cur_line = word
if cur_line:
lines.append(cur_line)
return lines
def fact_dict(fact_data, with_date):
fact = {}
if with_date:
fmt = '%Y-%m-%d %H:%M'
else:
fmt = '%H:%M'
fact['start'] = fact_data.start_time.strftime(fmt)
if fact_data.end_time:
fact['end'] = fact_data.end_time.strftime(fmt)
else:
end_date = dt.datetime.now()
fact['end'] = ''
fact['duration'] = fact_data.delta.format()
fact['activity'] = fact_data.activity
fact['category'] = fact_data.category
if fact_data.tags:
fact['tags'] = ' '.join('#%s' % tag for tag in fact_data.tags)
else:
fact['tags'] = ''
fact['description'] = fact_data.description
return fact
class Hamster(gtk.Application):
"""Hamster gui.
Actions should eventually be accessible via Gio.DBusActionGroup
with the 'org.gnome.Hamster.GUI' id.
but that is still experimental, the actions API is subject to change.
Discussion with "external" developers welcome !
The separate dbus org.gnome.Hamster.WindowServer
is still the stable recommended way to show windows for now.
"""
def __init__(self):
# inactivity_timeout: How long (ms) the service should stay alive
# after all windows have been closed.
gtk.Application.__init__(self,
application_id="org.gnome.Hamster.GUI",
#inactivity_timeout=10000,
register_session=True)
self.about_controller = None # 'about' window controller
self.fact_controller = None # fact window controller
self.overview_controller = None # overview window controller
self.preferences_controller = None # settings window controller
self.connect("startup", self.on_startup)
self.connect("activate", self.on_activate)
# we need them before the startup phase
# so register/activate_action work before the app is ran.
# cf. https://gitlab.gnome.org/GNOME/glib/blob/master/gio/tests/gapplication-example-actions.c
self.add_actions()
def add_actions(self):
# most actions have no parameters
# for type "i", use Variant.new_int32() and .get_int32() to pack/unpack
for name in ("about", "add", "clone", "edit", "overview", "preferences"):
data_type = glib.VariantType("i") if name in ("edit", "clone") else None
action = gio.SimpleAction.new(name, data_type)
action.connect("activate", self.on_activate_window)
self.add_action(action)
action = gio.SimpleAction.new("quit", None)
action.connect("activate", self.on_activate_quit)
self.add_action(action)
def on_activate(self, data=None):
logger.debug("activate")
if not self.get_windows():
self.activate_action("overview")
def on_activate_window(self, action=None, data=None):<|fim▁hole|> self.on_activate_quit()
def on_startup(self, data=None):
logger.debug("startup")
# Must be the same as application_id. Won't be required with gtk4.
glib.set_prgname(self.get_application_id())
# localized name, but let's keep it simple.
glib.set_application_name("Hamster")
def _open_window(self, name, data=None):
logger.debug("opening '{}'".format(name))
if name == "about":
if not self.about_controller:
# silence warning "GtkDialog mapped without a transient parent"
# https://stackoverflow.com/a/38408127/3565696
_dummy = gtk.Window()
self.about_controller = About(parent=_dummy)
logger.debug("new About")
controller = self.about_controller
elif name in ("add", "clone", "edit"):
if self.fact_controller:
# Something is already going on, with other arguments, present it.
# Or should we just discard the forgotten one ?
logger.warning("Fact controller already active. Please close first.")
else:
fact_id = data.get_int32() if data else None
self.fact_controller = CustomFactController(name, fact_id=fact_id)
logger.debug("new CustomFactController")
controller = self.fact_controller
elif name == "overview":
if not self.overview_controller:
self.overview_controller = Overview()
logger.debug("new Overview")
controller = self.overview_controller
elif name == "preferences":
if not self.preferences_controller:
self.preferences_controller = PreferencesEditor()
logger.debug("new PreferencesEditor")
controller = self.preferences_controller
window = controller.window
if window not in self.get_windows():
self.add_window(window)
logger.debug("window added")
# Essential for positioning on wayland.
# This should also select the correct window type if unset yet.
# https://specifications.freedesktop.org/wm-spec/wm-spec-1.3.html
if name != "overview" and self.overview_controller:
window.set_transient_for(self.overview_controller.window)
# so the dialog appears on top of the transient-for:
window.set_type_hint(gdk.WindowTypeHint.DIALOG)
else:
# toplevel
window.set_transient_for(None)
controller.present()
logger.debug("window presented")
def present_fact_controller(self, action, fact_id=0):
"""Present the fact controller window to add, clone or edit a fact.
Args:
action (str): "add", "clone" or "edit"
"""
assert action in ("add", "clone", "edit")
if action in ("clone", "edit"):
action_data = glib.Variant.new_int32(int(fact_id))
else:
action_data = None
# always open dialogs through actions,
# both for consistency, and to reduce the paths to test.
app.activate_action(action, action_data)
class HamsterCli(object):
"""Command line interface."""
def __init__(self):
self.storage = client.Storage()
def assist(self, *args):
assist_command = args[0] if args else ""
if assist_command == "start":
hamster_client._activities(sys.argv[-1])
elif assist_command == "export":
formats = "html tsv xml ical".split()
chosen = sys.argv[-1]
formats = [f for f in formats if not chosen or f.startswith(chosen)]
print("\n".join(formats))
def toggle(self):
self.storage.toggle()
def start(self, *args):
'''Start a new activity.'''
if not args:
print("Error: please specify activity")
return 0
fact = Fact.parse(" ".join(args), range_pos="tail")
if fact.start_time is None:
fact.start_time = dt.datetime.now()
self.storage.check_fact(fact, default_day=dt.hday.today())
id_ = self.storage.add_fact(fact)
return id_
def stop(self, *args):
'''Stop tracking the current activity.'''
self.storage.stop_tracking()
def export(self, *args):
args = args or []
export_format, start_time, end_time = "html", None, None
if args:
export_format = args[0]
(start_time, end_time), __ = dt.Range.parse(" ".join(args[1:]))
start_time = start_time or dt.datetime.combine(dt.date.today(), dt.time())
end_time = end_time or start_time.replace(hour=23, minute=59, second=59)
facts = self.storage.get_facts(start_time, end_time)
writer = reports.simple(facts, start_time.date(), end_time.date(), export_format)
def _activities(self, search=""):
'''Print the names of all the activities.'''
if "@" in search:
activity, category = search.split("@")
for cat in self.storage.get_categories():
if not category or cat['name'].lower().startswith(category.lower()):
print("{}@{}".format(activity, cat['name']))
else:
for activity in self.storage.get_activities(search):
print(activity['name'])
if activity['category']:
print("{}@{}".format(activity['name'], activity['category']))
def activities(self, *args):
'''Print the names of all the activities.'''
search = args[0] if args else ""
for activity in self.storage.get_activities(search):
print("{}@{}".format(activity['name'], activity['category']))
def categories(self, *args):
'''Print the names of all the categories.'''
for category in self.storage.get_categories():
print(category['name'])
def list(self, *times):
"""list facts within a date range"""
(start_time, end_time), __ = dt.Range.parse(" ".join(times or []))
start_time = start_time or dt.datetime.combine(dt.date.today(), dt.time())
end_time = end_time or start_time.replace(hour=23, minute=59, second=59)
self._list(start_time, end_time)
def current(self, *args):
"""prints current activity. kinda minimal right now"""
facts = self.storage.get_todays_facts()
if facts and not facts[-1].end_time:
print("{} {}".format(str(facts[-1]).strip(),
facts[-1].delta.format(fmt="HH:MM")))
else:
print((_("No activity")))
def search(self, *args):
"""search for activities by name and optionally within a date range"""
args = args or []
search = ""
if args:
search = args[0]
(start_time, end_time), __ = dt.Range.parse(" ".join(args[1:]))
start_time = start_time or dt.datetime.combine(dt.date.today(), dt.time())
end_time = end_time or start_time.replace(hour=23, minute=59, second=59)
self._list(start_time, end_time, search)
def _list(self, start_time, end_time, search=""):
"""Print a listing of activities"""
facts = self.storage.get_facts(start_time, end_time, search)
headers = {'activity': _("Activity"),
'category': _("Category"),
'tags': _("Tags"),
'description': _("Description"),
'start': _("Start"),
'end': _("End"),
'duration': _("Duration")}
# print date if it is not the same day
print_with_date = start_time.date() != end_time.date()
cols = 'start', 'end', 'duration', 'activity', 'category'
widths = dict([(col, len(headers[col])) for col in cols])
for fact in facts:
fact = fact_dict(fact, print_with_date)
for col in cols:
widths[col] = max(widths[col], len(fact[col]))
cols = ["{{{col}: <{len}}}".format(col=col, len=widths[col]) for col in cols]
fact_line = " | ".join(cols)
row_width = sum(val + 3 for val in list(widths.values()))
print()
print(fact_line.format(**headers))
print("-" * min(row_width, 80))
by_cat = {}
for fact in facts:
cat = fact.category or _("Unsorted")
by_cat.setdefault(cat, dt.timedelta(0))
by_cat[cat] += fact.delta
pretty_fact = fact_dict(fact, print_with_date)
print(fact_line.format(**pretty_fact))
if pretty_fact['description']:
for line in word_wrap(pretty_fact['description'], 76):
print(" {}".format(line))
if pretty_fact['tags']:
for line in word_wrap(pretty_fact['tags'], 76):
print(" {}".format(line))
print("-" * min(row_width, 80))
cats = []
total_duration = dt.timedelta()
for cat, duration in sorted(by_cat.items(), key=lambda x: x[1], reverse=True):
cats.append("{}: {}".format(cat, duration.format()))
total_duration += duration
for line in word_wrap(", ".join(cats), 80):
print(line)
print("Total: ", total_duration.format())
print()
def version(self):
print(hamster.__version__)
if __name__ == '__main__':
from hamster.lib import i18n
i18n.setup_i18n()
usage = _(
"""
Actions:
* add [activity [start-time [end-time]]]: Add an activity
* stop: Stop tracking current activity.
* list [start-date [end-date]]: List activities
* search [terms] [start-date [end-date]]: List activities matching a search
term
* export [html|tsv|ical|xml] [start-date [end-date]]: Export activities with
the specified format
* current: Print current activity
* activities: List all the activities names, one per line.
* categories: List all the categories names, one per line.
* overview / preferences / add / about: launch specific window
* version: Show the Hamster version
Time formats:
* 'YYYY-MM-DD hh:mm': If start-date is missing, it will default to today.
If end-date is missing, it will default to start-date.
* '-minutes': Relative time in minutes from the current date and time.
Note:
* For list/search/export a "hamster day" starts at the time set in the
preferences (default 05:00) and ends one minute earlier the next day.
Activities are reported for each "hamster day" in the interval.
Example usage:
hamster start bananas -20
start activity 'bananas' with start time 20 minutes ago
hamster search pancakes 2012-08-01 2012-08-30
look for an activity matching terms 'pancakes` between 1st and 30st
August 2012. Will check against activity, category, description and tags
""")
hamster_client = HamsterCli()
app = Hamster()
logger.debug("app instanciated")
import signal
signal.signal(signal.SIGINT, signal.SIG_DFL) # gtk3 screws up ctrl+c
parser = argparse.ArgumentParser(
description="Time tracking utility",
epilog=usage,
formatter_class=argparse.RawDescriptionHelpFormatter)
# cf. https://stackoverflow.com/a/28611921/3565696
parser.add_argument("--log", dest="log_level",
choices=('DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'),
default='WARNING',
help="Set the logging level (default: %(default)s)")
parser.add_argument("action", nargs="?", default="overview")
parser.add_argument('action_args', nargs=argparse.REMAINDER, default=[])
args, unknown_args = parser.parse_known_args()
# logger for current script
logger.setLevel(args.log_level)
# hamster_logger for the rest
hamster_logger.setLevel(args.log_level)
if not hamster.installed:
logger.info("Running in devel mode")
if args.action in ("start", "track"):
action = "add" # alias
elif args.action == "prefs":
# for backward compatibility
action = "preferences"
else:
action = args.action
if action in ("about", "add", "edit", "overview", "preferences"):
if action == "add" and args.action_args:
assert not unknown_args, "unknown options: {}".format(unknown_args)
# directly add fact from arguments
id_ = hamster_client.start(*args.action_args)
assert id_ > 0, "failed to add fact"
sys.exit(0)
else:
app.register()
if action == "edit":
assert len(args.action_args) == 1, (
"edit requires exactly one argument, got {}"
.format(args.action_args))
id_ = int(args.action_args[0])
assert id_ > 0, "received non-positive id : {}".format(id_)
action_data = glib.Variant.new_int32(id_)
else:
action_data = None
app.activate_action(action, action_data)
run_args = [sys.argv[0]] + unknown_args
logger.debug("run {}".format(run_args))
status = app.run(run_args)
logger.debug("app exited")
sys.exit(status)
elif hasattr(hamster_client, action):
getattr(hamster_client, action)(*args.action_args)
else:
sys.exit(usage % {'prog': sys.argv[0]})<|fim▁end|>
|
self._open_window(action.get_name(), data)
def on_activate_quit(self, data=None):
|
<|file_name|>sample.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# sample module
from jira.client import JIRA
def main():
jira = JIRA()
JIRA(options={'server': 'http://localhost:8100'})<|fim▁hole|> print projects
for project in projects:
print project.key
# Standard boilerplate to call the main() function.
if __name__ == '__main__':
main()<|fim▁end|>
|
projects = jira.projects()
|
<|file_name|>DeleteAuthenticationProfileRequest.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.redshift.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.AmazonWebServiceRequest;
/**
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/redshift-2012-12-01/DeleteAuthenticationProfile"
* target="_top">AWS API Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class DeleteAuthenticationProfileRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable {
/**
* <p>
* The name of the authentication profile to delete.
* </p><|fim▁hole|> * <p>
* The name of the authentication profile to delete.
* </p>
*
* @param authenticationProfileName
* The name of the authentication profile to delete.
*/
public void setAuthenticationProfileName(String authenticationProfileName) {
this.authenticationProfileName = authenticationProfileName;
}
/**
* <p>
* The name of the authentication profile to delete.
* </p>
*
* @return The name of the authentication profile to delete.
*/
public String getAuthenticationProfileName() {
return this.authenticationProfileName;
}
/**
* <p>
* The name of the authentication profile to delete.
* </p>
*
* @param authenticationProfileName
* The name of the authentication profile to delete.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DeleteAuthenticationProfileRequest withAuthenticationProfileName(String authenticationProfileName) {
setAuthenticationProfileName(authenticationProfileName);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getAuthenticationProfileName() != null)
sb.append("AuthenticationProfileName: ").append(getAuthenticationProfileName());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof DeleteAuthenticationProfileRequest == false)
return false;
DeleteAuthenticationProfileRequest other = (DeleteAuthenticationProfileRequest) obj;
if (other.getAuthenticationProfileName() == null ^ this.getAuthenticationProfileName() == null)
return false;
if (other.getAuthenticationProfileName() != null && other.getAuthenticationProfileName().equals(this.getAuthenticationProfileName()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getAuthenticationProfileName() == null) ? 0 : getAuthenticationProfileName().hashCode());
return hashCode;
}
@Override
public DeleteAuthenticationProfileRequest clone() {
return (DeleteAuthenticationProfileRequest) super.clone();
}
}<|fim▁end|>
|
*/
private String authenticationProfileName;
/**
|
<|file_name|>test_run.py<|end_file_name|><|fim▁begin|>import tempfile
import shutil
import sys
from unittest import mock
import pytest
from tools.wpt import run
from tools import localpaths # noqa: F401
from wptrunner.browsers import product_list
<|fim▁hole|>
class Virtualenv(virtualenv.Virtualenv):
def __init__(self):
self.path = tempfile.mkdtemp()
self.skip_virtualenv_setup = False
def create(self):
return
def activate(self):
return
def start(self):
return
def install(self, *requirements):
return
def install_requirements(self, requirements_path):
return
venv = Virtualenv()
yield venv
shutil.rmtree(venv.path)
@pytest.fixture(scope="module")
def logger():
run.setup_logging({})
@pytest.mark.parametrize("platform", ["Windows", "Linux", "Darwin"])
def test_check_environ_fail(platform):
m_open = mock.mock_open(read_data=b"")
with mock.patch.object(run, "open", m_open):
with mock.patch.object(run.platform, "uname",
return_value=(platform, "", "", "", "", "")):
with pytest.raises(run.WptrunError) as excinfo:
run.check_environ("foo")
assert "wpt make-hosts-file" in str(excinfo.value)
@pytest.mark.parametrize("product", product_list)
def test_setup_wptrunner(venv, logger, product):
if product == "firefox_android":
pytest.skip("Android emulator doesn't work on docker")
parser = run.create_parser()
kwargs = vars(parser.parse_args(["--channel=nightly", product]))
kwargs["prompt"] = False
# Hack to get a real existing path
kwargs["binary"] = sys.argv[0]
kwargs["webdriver_binary"] = sys.argv[0]
if kwargs["product"] == "sauce":
kwargs["sauce_browser"] = "firefox"
kwargs["sauce_version"] = "63"
run.setup_wptrunner(venv, **kwargs)<|fim▁end|>
|
@pytest.fixture(scope="module")
def venv():
from tools.wpt import virtualenv
|
<|file_name|>test_job_submission.py<|end_file_name|><|fim▁begin|>import logging
import os
import sys
import time
import json
import jsonschema
import pprint
import pytest
import requests
from ray._private.test_utils import (
format_web_url,
wait_for_condition,
wait_until_server_available,
)
from ray.dashboard import dashboard
from ray.dashboard.tests.conftest import * # noqa
from ray.job_submission import JobSubmissionClient
logger = logging.getLogger(__name__)
def _get_snapshot(address: str):
response = requests.get(f"{address}/api/snapshot")
response.raise_for_status()
data = response.json()
schema_path = os.path.join(
os.path.dirname(dashboard.__file__), "modules/snapshot/snapshot_schema.json"
)
pprint.pprint(data)
jsonschema.validate(instance=data, schema=json.load(open(schema_path)))
return data
def test_successful_job_status(
ray_start_with_dashboard, disable_aiohttp_cache, enable_test_module
):
address = ray_start_with_dashboard.address_info["webui_url"]
assert wait_until_server_available(address)
address = format_web_url(address)
job_sleep_time_s = 5
entrypoint_cmd = (
'python -c"'
"import ray;"
"ray.init();"
"import time;"
f"time.sleep({job_sleep_time_s});"
'"'
)
client = JobSubmissionClient(address)
start_time_s = int(time.time())
runtime_env = {"env_vars": {"RAY_TEST_123": "123"}}
metadata = {"ray_test_456": "456"}
job_id = client.submit_job(
entrypoint=entrypoint_cmd, metadata=metadata, runtime_env=runtime_env
)
def wait_for_job_to_succeed():
data = _get_snapshot(address)
legacy_job_succeeded = False
job_succeeded = False
# Test legacy job snapshot (one driver per job).
for job_entry in data["data"]["snapshot"]["jobs"].values():
if job_entry["status"] is not None:
assert job_entry["config"]["metadata"]["jobSubmissionId"] == job_id
assert job_entry["status"] in {"PENDING", "RUNNING", "SUCCEEDED"}
assert job_entry["statusMessage"] is not None
legacy_job_succeeded = job_entry["status"] == "SUCCEEDED"
# Test new jobs snapshot (0 to N drivers per job).
for job_submission_id, entry in data["data"]["snapshot"][
"jobSubmission"
].items():
if entry["status"] is not None:
assert entry["status"] in {"PENDING", "RUNNING", "SUCCEEDED"}
assert entry["message"] is not None
# TODO(architkulkarni): Disable automatic camelcase.
assert entry["runtimeEnv"] == {"envVars": {"RAYTest123": "123"}}
assert entry["metadata"] == {"rayTest456": "456"}
assert entry["errorType"] is None
assert abs(entry["startTime"] - start_time_s) <= 2
if entry["status"] == "SUCCEEDED":
job_succeeded = True
assert entry["endTime"] >= entry["startTime"] + job_sleep_time_s
return legacy_job_succeeded and job_succeeded
wait_for_condition(wait_for_job_to_succeed, timeout=30)
def test_failed_job_status(
ray_start_with_dashboard, disable_aiohttp_cache, enable_test_module
):
address = ray_start_with_dashboard.address_info["webui_url"]
assert wait_until_server_available(address)
address = format_web_url(address)
job_sleep_time_s = 5
entrypoint_cmd = (
'python -c"'
"import ray;"
"ray.init();"
"import time;"
f"time.sleep({job_sleep_time_s});"
"import sys;"
"sys.exit(1);"
'"'
)
start_time_s = int(time.time())
client = JobSubmissionClient(address)
runtime_env = {"env_vars": {"RAY_TEST_456": "456"}}
metadata = {"ray_test_789": "789"}
job_id = client.submit_job(
entrypoint=entrypoint_cmd, metadata=metadata, runtime_env=runtime_env
)
def wait_for_job_to_fail():
data = _get_snapshot(address)
legacy_job_failed = False
job_failed = False
# Test legacy job snapshot (one driver per job).
for job_entry in data["data"]["snapshot"]["jobs"].values():
if job_entry["status"] is not None:
assert job_entry["config"]["metadata"]["jobSubmissionId"] == job_id
assert job_entry["status"] in {"PENDING", "RUNNING", "FAILED"}
assert job_entry["statusMessage"] is not None
legacy_job_failed = job_entry["status"] == "FAILED"
# Test new jobs snapshot (0 to N drivers per job).
for job_submission_id, entry in data["data"]["snapshot"][
"jobSubmission"
].items():
if entry["status"] is not None:
assert entry["status"] in {"PENDING", "RUNNING", "FAILED"}
assert entry["message"] is not None<|fim▁hole|> assert entry["metadata"] == {"rayTest789": "789"}
assert entry["errorType"] is None
assert abs(entry["startTime"] - start_time_s) <= 2
if entry["status"] == "FAILED":
job_failed = True
assert entry["endTime"] >= entry["startTime"] + job_sleep_time_s
return legacy_job_failed and job_failed
wait_for_condition(wait_for_job_to_fail, timeout=25)
if __name__ == "__main__":
sys.exit(pytest.main(["-v", __file__]))<|fim▁end|>
|
# TODO(architkulkarni): Disable automatic camelcase.
assert entry["runtimeEnv"] == {"envVars": {"RAYTest456": "456"}}
|
<|file_name|>struct-namespace.rs<|end_file_name|><|fim▁begin|>// Copyright 2013-2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your<|fim▁hole|>// option. This file may not be copied, modified, or distributed
// except according to those terms.
// ignore-gdb
// compile-flags:-g
// min-lldb-version: 310
// Check that structs get placed in the correct namespace
// lldb-command:run
// lldb-command:p struct1
// lldbg-check:(struct_namespace::Struct1) $0 = [...]
// lldbr-check:(struct_namespace::Struct1) struct1 = Struct1 { a: 0, b: 1 }
// lldb-command:p struct2
// lldbg-check:(struct_namespace::Struct2) $1 = [...]
// lldbr-check:(struct_namespace::Struct2) struct2 = { = 2 }
// lldb-command:p mod1_struct1
// lldbg-check:(struct_namespace::mod1::Struct1) $2 = [...]
// lldbr-check:(struct_namespace::mod1::Struct1) mod1_struct1 = Struct1 { a: 3, b: 4 }
// lldb-command:p mod1_struct2
// lldbg-check:(struct_namespace::mod1::Struct2) $3 = [...]
// lldbr-check:(struct_namespace::mod1::Struct2) mod1_struct2 = { = 5 }
#![allow(unused_variables)]
#![allow(dead_code)]
#![feature(omit_gdb_pretty_printer_section)]
#![omit_gdb_pretty_printer_section]
struct Struct1 {
a: u32,
b: u64,
}
struct Struct2(u32);
mod mod1 {
pub struct Struct1 {
pub a: u32,
pub b: u64,
}
pub struct Struct2(pub u32);
}
fn main() {
let struct1 = Struct1 {
a: 0,
b: 1,
};
let struct2 = Struct2(2);
let mod1_struct1 = mod1::Struct1 {
a: 3,
b: 4,
};
let mod1_struct2 = mod1::Struct2(5);
zzz(); // #break
}
#[inline(never)]
fn zzz() {()}<|fim▁end|>
| |
<|file_name|>rdf.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
pygments.lexers.rdf
~~~~~~~~~~~~~~~~~~~
Lexers for semantic web and RDF query languages and markup.
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from pygments.lexer import RegexLexer, bygroups, default
from pygments.token import Keyword, Punctuation, String, Number, Operator, Generic, \
Whitespace, Name, Literal, Comment, Text
__all__ = ['SparqlLexer', 'TurtleLexer']
class SparqlLexer(RegexLexer):
"""
Lexer for `SPARQL <http://www.w3.org/TR/rdf-sparql-query/>`_ query language.
.. versionadded:: 2.0
"""
name = 'SPARQL'
aliases = ['sparql']
filenames = ['*.rq', '*.sparql']
mimetypes = ['application/sparql-query']
# character group definitions ::
PN_CHARS_BASE_GRP = (u'a-zA-Z'
u'\u00c0-\u00d6'
u'\u00d8-\u00f6'
u'\u00f8-\u02ff'
u'\u0370-\u037d'
u'\u037f-\u1fff'
u'\u200c-\u200d'
u'\u2070-\u218f'
u'\u2c00-\u2fef'
u'\u3001-\ud7ff'
u'\uf900-\ufdcf'
u'\ufdf0-\ufffd')
PN_CHARS_U_GRP = (PN_CHARS_BASE_GRP + '_')
PN_CHARS_GRP = (PN_CHARS_U_GRP +
r'\-' +
r'0-9' +
u'\u00b7' +
u'\u0300-\u036f' +
u'\u203f-\u2040')
HEX_GRP = '0-9A-Fa-f'
PN_LOCAL_ESC_CHARS_GRP = r' _~.\-!$&"()*+,;=/?#@%'
# terminal productions ::
PN_CHARS_BASE = '[' + PN_CHARS_BASE_GRP + ']'
PN_CHARS_U = '[' + PN_CHARS_U_GRP + ']'
PN_CHARS = '[' + PN_CHARS_GRP + ']'
HEX = '[' + HEX_GRP + ']'
PN_LOCAL_ESC_CHARS = '[' + PN_LOCAL_ESC_CHARS_GRP + ']'
IRIREF = r'<(?:[^<>"{}|^`\\\x00-\x20])*>'
BLANK_NODE_LABEL = '_:[0-9' + PN_CHARS_U_GRP + '](?:[' + PN_CHARS_GRP + \
'.]*' + PN_CHARS + ')?'
PN_PREFIX = PN_CHARS_BASE + '(?:[' + PN_CHARS_GRP + '.]*' + PN_CHARS + ')?'
VARNAME = u'[0-9' + PN_CHARS_U_GRP + '][' + PN_CHARS_U_GRP + \
u'0-9\u00b7\u0300-\u036f\u203f-\u2040]*'
PERCENT = '%' + HEX + HEX
PN_LOCAL_ESC = r'\\' + PN_LOCAL_ESC_CHARS
PLX = '(?:' + PERCENT + ')|(?:' + PN_LOCAL_ESC + ')'
PN_LOCAL = ('(?:[' + PN_CHARS_U_GRP + ':0-9' + ']|' + PLX + ')' +
'(?:(?:[' + PN_CHARS_GRP + '.:]|' + PLX + ')*(?:[' +
PN_CHARS_GRP + ':]|' + PLX + '))?')
EXPONENT = r'[eE][+-]?\d+'
# Lexer token definitions ::
tokens = {
'root': [
(r'\s+', Text),
# keywords ::
(r'((?i)select|construct|describe|ask|where|filter|group\s+by|minus|'
r'distinct|reduced|from\s+named|from|order\s+by|desc|asc|limit|'
r'offset|bindings|load|clear|drop|create|add|move|copy|'
r'insert\s+data|delete\s+data|delete\s+where|delete|insert|'
r'using\s+named|using|graph|default|named|all|optional|service|'
r'silent|bind|union|not\s+in|in|as|having|to|prefix|base)\b', Keyword),
(r'(a)\b', Keyword),
# IRIs ::
('(' + IRIREF + ')', Name.Label),
# blank nodes ::
('(' + BLANK_NODE_LABEL + ')', Name.Label),
# # variables ::
('[?$]' + VARNAME, Name.Variable),
# prefixed names ::
(r'(' + PN_PREFIX + ')?(\:)(' + PN_LOCAL + ')?',
bygroups(Name.Namespace, Punctuation, Name.Tag)),
# function names ::
(r'((?i)str|lang|langmatches|datatype|bound|iri|uri|bnode|rand|abs|'
r'ceil|floor|round|concat|strlen|ucase|lcase|encode_for_uri|'
r'contains|strstarts|strends|strbefore|strafter|year|month|day|'
r'hours|minutes|seconds|timezone|tz|now|md5|sha1|sha256|sha384|'
r'sha512|coalesce|if|strlang|strdt|sameterm|isiri|isuri|isblank|'
r'isliteral|isnumeric|regex|substr|replace|exists|not\s+exists|'
r'count|sum|min|max|avg|sample|group_concat|separator)\b',
Name.Function),
# boolean literals ::
(r'(true|false)', Keyword.Constant),
# double literals ::
(r'[+\-]?(\d+\.\d*' + EXPONENT + '|\.?\d+' + EXPONENT + ')', Number.Float),
# decimal literals ::
(r'[+\-]?(\d+\.\d*|\.\d+)', Number.Float),
# integer literals ::
(r'[+\-]?\d+', Number.Integer),
# operators ::
(r'(\|\||&&|=|\*|\-|\+|/|!=|<=|>=|!|<|>)', Operator),
# punctuation characters ::
(r'[(){}.;,:^\[\]]', Punctuation),
# line comments ::
(r'#[^\n]*', Comment),
# strings ::
(r'"""', String, 'triple-double-quoted-string'),
(r'"', String, 'single-double-quoted-string'),
(r"'''", String, 'triple-single-quoted-string'),
(r"'", String, 'single-single-quoted-string'),
],
'triple-double-quoted-string': [
(r'"""', String, 'end-of-string'),
(r'[^\\]+', String),
(r'\\', String, 'string-escape'),
],
'single-double-quoted-string': [
(r'"', String, 'end-of-string'),
(r'[^"\\\n]+', String),
(r'\\', String, 'string-escape'),
],
'triple-single-quoted-string': [
(r"'''", String, 'end-of-string'),
(r'[^\\]+', String),
(r'\\', String.Escape, 'string-escape'),
],
'single-single-quoted-string': [
(r"'", String, 'end-of-string'),
(r"[^'\\\n]+", String),
(r'\\', String, 'string-escape'),
],
'string-escape': [
(r'u' + HEX + '{4}', String.Escape, '#pop'),
(r'U' + HEX + '{8}', String.Escape, '#pop'),<|fim▁hole|> bygroups(Operator, Name.Function), '#pop:2'),
(r'\^\^', Operator, '#pop:2'),
default('#pop:2'),
],
}
class TurtleLexer(RegexLexer):
"""
Lexer for `Turtle <http://www.w3.org/TR/turtle/>`_ data language.
.. versionadded:: 2.1
"""
name = 'Turtle'
aliases = ['turtle']
filenames = ['*.ttl']
mimetypes = ['text/turtle', 'application/x-turtle']
flags = re.IGNORECASE
patterns = {
'PNAME_NS': r'((?:[a-z][\w-]*)?\:)', # Simplified character range
'IRIREF': r'(<[^<>"{}|^`\\\x00-\x20]*>)'
}
# PNAME_NS PN_LOCAL (with simplified character range)
patterns['PrefixedName'] = r'%(PNAME_NS)s([a-z][\w-]*)' % patterns
tokens = {
'root': [
(r'\s+', Whitespace),
# Base / prefix
(r'(@base|BASE)(\s+)%(IRIREF)s(\s*)(\.?)' % patterns,
bygroups(Keyword, Whitespace, Name.Variable, Whitespace,
Punctuation)),
(r'(@prefix|PREFIX)(\s+)%(PNAME_NS)s(\s+)%(IRIREF)s(\s*)(\.?)' % patterns,
bygroups(Keyword, Whitespace, Name.Namespace, Whitespace,
Name.Variable, Whitespace, Punctuation)),
# The shorthand predicate 'a'
(r'(?<=\s)a(?=\s)', Keyword.Type),
# IRIREF
(r'%(IRIREF)s' % patterns, Name.Variable),
# PrefixedName
(r'%(PrefixedName)s' % patterns,
bygroups(Name.Namespace, Name.Tag)),
# Comment
(r'#[^\n]+', Comment),
(r'\b(true|false)\b', Literal),
(r'[+\-]?\d*\.\d+', Number.Float),
(r'[+\-]?\d*(:?\.\d+)?E[+\-]?\d+', Number.Float),
(r'[+\-]?\d+', Number.Integer),
(r'[\[\](){}.;,:^]', Punctuation),
(r'"""', String, 'triple-double-quoted-string'),
(r'"', String, 'single-double-quoted-string'),
(r"'''", String, 'triple-single-quoted-string'),
(r"'", String, 'single-single-quoted-string'),
],
'triple-double-quoted-string': [
(r'"""', String, 'end-of-string'),
(r'[^\\]+', String),
(r'\\', String, 'string-escape'),
],
'single-double-quoted-string': [
(r'"', String, 'end-of-string'),
(r'[^"\\\n]+', String),
(r'\\', String, 'string-escape'),
],
'triple-single-quoted-string': [
(r"'''", String, 'end-of-string'),
(r'[^\\]+', String),
(r'\\', String, 'string-escape'),
],
'single-single-quoted-string': [
(r"'", String, 'end-of-string'),
(r"[^'\\\n]+", String),
(r'\\', String, 'string-escape'),
],
'string-escape': [
(r'.', String, '#pop'),
],
'end-of-string': [
(r'(@)([a-z]+(:?-[a-z0-9]+)*)',
bygroups(Operator, Generic.Emph), '#pop:2'),
(r'(\^\^)%(IRIREF)s' % patterns, bygroups(Operator, Generic.Emph), '#pop:2'),
(r'(\^\^)%(PrefixedName)s' % patterns,
bygroups(Operator, Generic.Emph, Generic.Emph), '#pop:2'),
default('#pop:2'),
],
}<|fim▁end|>
|
(r'.', String.Escape, '#pop'),
],
'end-of-string': [
(r'(@)([a-zA-Z]+(?:-[a-zA-Z0-9]+)*)',
|
<|file_name|>ContextAware.java<|end_file_name|><|fim▁begin|>package main.origo.core.actions;
import main.origo.core.event.NodeContext;
import play.mvc.Action;
import play.mvc.Http;
import play.mvc.Result;
import play.mvc.With;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
@With(ContextAware.ContextAction.class)
@Target(ElementType.METHOD)
@Retention(RetentionPolicy.RUNTIME)
public @interface ContextAware {
public static class ContextAction extends Action.Simple {
@Override
public Result call(Http.Context context) throws Throwable {
try {
NodeContext.set();
return delegate.call(context);
} finally {
NodeContext.clear();<|fim▁hole|> }
}
}<|fim▁end|>
|
}
|
<|file_name|>20180515155000-app-features.js<|end_file_name|><|fim▁begin|>'use strict';
const migrate = require('../scripts/migrate-sql');
<|fim▁hole|><|fim▁end|>
|
exports.up = db => migrate.migrate(db, '20180515155000-app-features.sql');
|
<|file_name|>basic_definitions.py<|end_file_name|><|fim▁begin|># Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Module with basic entity definitions for testing."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import with_statement # An extra future import for testing.
def simple_function(x):
"""Docstring."""
return x # comment
def nested_functions(x):
"""Docstring."""<|fim▁hole|> return y
return inner_fn(x)
def function_with_print():
print('foo')
simple_lambda = lambda: None
class SimpleClass(object):
def simple_method(self):
return self
def method_with_print(self):
print('foo')
def function_with_multiline_call(x):
"""Docstring."""
return range(
x,
x + 1,
)
def basic_decorator(f):
return f
@basic_decorator
@basic_decorator
def decorated_function(x):
if x > 0:
return 1
return 2<|fim▁end|>
|
def inner_fn(y):
|
<|file_name|>inheritedClassAttrAssignmentAndOwnWithAttrAndInheritedSlots.py<|end_file_name|><|fim▁begin|>class B(object):
attr = 'baz'
__slots__ = ['f', 'b']
class C(B):
__slots__ = ['attr', 'bar']
C.attr = 'spam'
print(C.attr)
c = C()
<warning descr="'C' object attribute 'attr' is read-only">c.attr</warning> = 'spam'<|fim▁hole|><|fim▁end|>
|
print(c.attr)
|
<|file_name|>basic.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import urllib2
from BeautifulSoup import BeautifulSoup
# Download the HTML
request = urllib2.Request('http://www.uci.edu')
response = urllib2.urlopen(request)
print '\r\n\r\n'
# Verify that everything went ok.
# Error codes: 200 == good, 404, 500 == bad
print 'The error code is:', response.code
print '\r\n\r\n'
html = response.read()<|fim▁hole|>dom = BeautifulSoup(html)
# Extract out the <div> tag containing our news.
news_tag = dom.find('div', {'id': 'news'})
# See what the extracted HTML looks like.
print 'The extracted news div HTML looks like:'
print '===================================='
print news_tag
print '\r\n\r\n'
# Further extract out a list of the actual news titles.
news_li_tags = news_tag.findAll('li')
titles = [tag.text for tag in news_li_tags]
links = [tag.a['href'] for tag in news_li_tags]
print 'The top news titles on www.uci.edu are currently:'
print '===================================='
for title in titles:
print title
print 'The top news links on www.uci.edu are currently:'
print '===================================='
for link in links:
print link
print '\r\n\r\n'<|fim▁end|>
|
# Parse the HTML into a dom object via our BS library.
|
<|file_name|>family_filter.py<|end_file_name|><|fim▁begin|>from .sample_filter import SampleFilter, GtFilter
from .sv_gt_filter import SvGtFilter
import logging
from collections import OrderedDict, defaultdict
class FamilyFilter(object):
'''
Determine whether variants/alleles fit given inheritance
patterns for families.
'''
def __init__(self, ped, vcf, infer_inheritance=True, g2p=None,
check_g2p_consequence=None, force_inheritance=None,
logging_level=logging.WARNING):
'''
Initialize with Family object from ped_file.py and a
VcfReader object from vcf_reader.py. You may also specify an
inheritance pattern (either 'recessive' or 'dominant'). If
inheritance_pattern is not specified an attempt is made to
infer an appropriate inheritance pattern based on the family
structure and affecteds.
Args:
ped: A PedFile object from ped_file.py. Must contain
at least one affected individual.
vcf: A VcfReader object containing data from at least
some of the affected individuals in the given
family.
infer_inheritance:
If True, infer possible inheritance patterns
for each family in the PedFile. Inferred patterns
are stored in self.inheritance_patterns dict
(keys are families, values are lists of
inheritance patterns).
g2p: G2P object from vase.g2p for filtering on
presence and inheritance requirements from a G2P
file.
check_g2p_consequence:
If using a G2P object for gene filtering, also
filter on consequence type as described for each
gene. Note that the mapping of mutation
consequence to consequence type is quite crude
and should be used with caution (see the
mutation_to_csq dict in vase/g2p.py for the
mappings used).
force_inheritance:
Optionally specify an inheritance pattern to
test for each family - either 'dominant' or
'recessive' is allowed. If infer_inheritance is
True, these patterns will be tested in addition
to inferred patterns.
logging_level:
The level at which logging messages are
displayed. Defaults to logging.WARNING
'''
self.logger = self._get_logger(logging_level)
self.affected = tuple(ped.get_affected())
self.unaffected = tuple(ped.get_unaffected())
self.obligate_carriers = dict()
self.ped = ped
self.vcf = vcf
self.g2p = g2p
self.check_g2p_consequence = check_g2p_consequence
if not self.affected:
raise RuntimeError("No affected individuals found in PED file '{}'"
.format(ped.filename))
self.vcf_affected = list(x for x in self.affected
if x in self.vcf.header.samples)
if not self.vcf_affected:
raise RuntimeError("No affected individuals in PED file '{}'"
.format(ped.filename) + " found in VCF " +
"'{}'".format(vcf.filename))
self.vcf_unaffected = list(x for x in self.unaffected
if x in self.vcf.header.samples)
self.vcf_samples = self.vcf_affected + self.vcf_unaffected
self.inheritance_patterns = defaultdict(list)
if infer_inheritance:
self._infer_inheritance()
if force_inheritance:
if force_inheritance not in ('dominant', 'recessive'):
raise RuntimeError("Unrecognised inheritance pattern " +
"specified with 'force_inheritance' " +
"argument. Valid options are 'dominant' " +
"or 'recessive'.")
for fid in self.ped.families:
self.inheritance_patterns[fid].append(force_inheritance)
def _infer_inheritance(self):
'''
Simplistic method for determining likely relevant
inheritance pattern. For affected individuals in a family
a check is made whether parents or grandparents are also
affected. Currently only dominant or recessive inheritance
is inferred, no attempt to infer X-linked or mitochondrial
inheritance is made and it will not spot pseudodominance.
'''
for fid, fam in self.ped.families.items():
n_affected = 0
no_parents = True
both_pars_unaffected = False
dominant = False
denovo = False
recessive = False
self.logger.info("Assessing inheritance pattern of family {}"
.format(fid))
f_aff = tuple(fam.get_affected())
obligate_carriers = set()
if not f_aff:
continue
for iid in f_aff:
self.logger.info("Checking affected individual {}".format(iid))
n_affected += 1
indv = fam.individuals[iid]
if not indv.parents:
self.logger.info("No parents for affected individual {}"
.format(iid))
continue
no_parents = False
p_unaff = 0
for par in indv.parents:
# is parent affected
if par not in fam.individuals:
if par in self.vcf.header.samples:
self.logger.warn("Family '{}' parent '{}' ".format(
fid, par) + "not specified in " +
"PED, but present in VCF - " +
"assuming unaffected")
self.vcf_samples.append(par)
self.vcf_unaffected.append(par)
p_unaff += 1
continue
parent = fam.individuals[par]
par_to_child = False
gpar_to_child = False
if parent.is_affected():
self.logger.info("Apparent vertical transmission " +
"from {} -> {}" .format(par, iid))
par_to_child = True
else:
p_unaff += 1
for gpar in parent.parents:
if fam.individuals[gpar].is_affected():
gpar_to_child = True
msg = "Apparent vertical transmission "
if par_to_child:
msg += ("from {} -> {} -> {}"
.format(gpar, par, iid))
else:
msg += ("with partial penetrance from " +
"{} -> ({}) -> {}"
.format(gpar, par, iid))
obligate_carriers.add(par)
self.logger.info(msg)
if par_to_child or gpar_to_child:
dominant = True
if p_unaff == 2:
both_pars_unaffected = True
if not dominant:
recessive = True
if no_parents or not both_pars_unaffected:
# missing information on one/both parents - could be dominant
dominant = True
if recessive and n_affected == 1 and not no_parents:
f_par = fam.individuals[f_aff[0]].parents
if len(f_par) != 2:
self.logger.info("Can not analyze {} under ".format(fid) +
"a de novo model due to missing parents" +
" in ped")
dominant = True
elif (f_par[0] not in self.vcf.header.samples or
f_par[1] not in self.vcf.header.samples):
self.logger.info("Can not analyze {} under ".format(fid) +
"a de novo model due to missing parents" +
" in VCF")
else:
denovo = True
elif recessive and n_affected > 1:
# we can entertain apparent de novos due to somatic mosaicism
# if all affecteds share a parent
pars = fam.individuals[f_aff[0]].parents
shared_pars = None
if len(pars) != 2:
self.logger.info("Can not analyze {} under ".format(fid) +
"a de novo model due to missing parents" +
" in ped")
dominant = True
else:
shared_pars = set(pars)
for i in range(1, len(f_aff)):
ipars = self.ped.individuals[f_aff[i]].parents
if ipars is None:
break
shared_pars = shared_pars.intersection(ipars)
if not shared_pars:
break
if shared_pars:
denovo = True
for par in shared_pars:
if par not in self.vcf_samples:
self.logger.info("Can not analyze {}".format(fid) +
"under a de novo model due to " +
"missing parents in VCF")
denovo = False
break
self.inheritance_patterns[fid] = []
if recessive:
self.logger.info("Family '{}' " .format(fid) + "can be " +
"analysed under a recessive model")
self.inheritance_patterns[fid].append('recessive')
if denovo:
dmodel = "de novo"
if n_affected > 1:
dmodel += " (with germline mosaicism)"
self.logger.info("Family '{}' " .format(fid) + "can be " +
"analysed under a {} model" .format(dmodel))
self.inheritance_patterns[fid].append('de_novo')
if dominant:
self.logger.info("Family '{}' " .format(fid) + "can be " +
"analysed under a dominant model")
self.inheritance_patterns[fid].append('dominant')
self.obligate_carriers[fid] = tuple(obligate_carriers)
def _get_logger(self, logging_level):
logger = logging.getLogger(__name__)
if not logger.hasHandlers():
logger.setLevel(logging_level)
formatter = logging.Formatter(
'[%(asctime)s] %(name)s - %(levelname)s - %(message)s')
ch = logging.StreamHandler()
ch.setLevel(logger.level)
ch.setFormatter(formatter)
logger.addHandler(ch)
return logger
class InheritanceFilter(object):
'''
Parent class for RecessiveFilter/DominantFilter/DeNovoFilter
object.
'''
def __init__(self, family_filter, gt_args, min_families=1,
report_file=None, snpeff_mode=False):
'''
Create genotype filter objects and initialise family filtering
arguments.
Args:
family_filter:
Parent FamilyFilter object, initialized with
VCF and PED files.
gt_args:
A dict of arguments to use for filtering
genotypes. These should all correspond to
arguments to provide to SampleFilter objects.
min_families:
Require at least this many families to have
qualifying alleles in a feature before
outputting. Default=1.
report_file:
Deprecated. Use vase_reporter to after
inheritance filtering to process VCFs instead.
snpeff_mode:
Use SnpEff annotations instead of VEP annotations
from input VCF.
'''
self.family_filter = family_filter
self.min_families = min_families
self.ped = family_filter.ped
self.samples = family_filter.vcf_samples
self.unaffected = family_filter.vcf_unaffected
self.gt_filter = GtFilter(family_filter.vcf,
gq=gt_args.get('gq'),
dp=gt_args.get('dp'),
max_dp=gt_args.get('max_dp'),
het_ab=gt_args.get('het_ab'),
hom_ab=gt_args.get('hom_ab'))
self._gt_fields = set(self.gt_filter.fields)
if gt_args.get('min_control_gq') is None:
gt_args['min_control_gq'] = gt_args.get('gq')
if gt_args.get('min_control_dp') is None:
gt_args['min_control_dp'] = gt_args.get('dp')
if gt_args.get('max_control_dp') is None:
gt_args['max_control_dp'] = gt_args.get('max_dp')
if gt_args.get('control_het_ab') is None:
gt_args['control_het_ab'] = gt_args.get('het_ab')
if gt_args.get('control_hom_ab') is None:
gt_args['control_hom_ab'] = gt_args.get('hom_ab')
self.con_gt_filter = GtFilter(family_filter.vcf,
gq=gt_args.get('min_control_gq'),
dp=gt_args.get('min_control_dp'),
max_dp=gt_args.get('max_control_dp'),
het_ab=gt_args.get('control_het_ab'),
hom_ab=gt_args.get('control_hom_ab'),
ref_ab_filter=gt_args.get('con_ref_ab'))
self._gt_fields.update(self.con_gt_filter.fields)
if gt_args.get('sv_min_control_gq') is None:
gt_args['sv_min_control_gq'] = gt_args.get('sv_gq')
if gt_args.get('sv_min_control_dp') is None:
gt_args['sv_min_control_dp'] = gt_args.get('sv_dp')
if gt_args.get('sv_max_control_dp') is None:
gt_args['sv_max_control_dp'] = gt_args.get('sv_max_dp')
if gt_args.get('sv_control_het_ab') is None:
gt_args['sv_control_het_ab'] = gt_args.get('sv_het_ab')
if gt_args.get('sv_control_hom_ab') is None:
gt_args['sv_control_hom_ab'] = gt_args.get('sv_hom_ab')
if gt_args.get('control_del_dhffc') is None:
gt_args['control_del_dhffc'] = gt_args.get('del_dhffc')
if gt_args.get('control_dup_dhbfc') is None:
gt_args['control_dup_dhbfc'] = gt_args.get('dup_dhbfc')
self.sv_gt_filter = SvGtFilter(family_filter.vcf,
gq=gt_args.get('sv_gq'),
dp=gt_args.get('sv_dp'),
max_dp=gt_args.get('sv_max_dp'),
het_ab=gt_args.get('sv_het_ab'),
hom_ab=gt_args.get('sv_hom_ab'),
del_dhffc=gt_args.get('del_dhffc'),
dup_dhbfc=gt_args.get('dup_dhbfc'))
self._sv_gt_fields = set(self.sv_gt_filter.fields)
self.sv_con_gt_filter = SvGtFilter(
family_filter.vcf,
gq=gt_args.get('sv_min_control_gq'),
dp=gt_args.get('sv_min_control_dp'),
max_dp=gt_args.get('sv_max_control_dp'),
het_ab=gt_args.get('sv_control_het_ab'),
hom_ab=gt_args.get('sv_control_hom_ab'),
ref_ab_filter=gt_args.get('sv_con_ref_ab'),
del_dhffc=gt_args.get('control_del_dhffc'),
dup_dhbfc=gt_args.get('control_dup_dhbfc'))
self._sv_gt_fields.update(self.sv_con_gt_filter.fields)
self._prev_coordinate = (None, None) # to ensure records are processed
self._processed_contigs = set() # in coordinate order
if snpeff_mode:
try:
self._csq_header = self.family_filter.vcf.header.ann_fields
except KeyError:
self._csq_header = None # only required for report file
self.csq_attribute = 'ANN'
self.feature_label = 'Feature_ID'
else:
try:
self._csq_header = self.family_filter.vcf.header.csq_fields
except KeyError:
self._csq_header = None # only required for report file
self.csq_attribute = 'CSQ'
self.feature_label = 'Feature'
if self.report_file:
self._write_report_header()
def get_header_fields(self):
'''
Return dict of dicts with INFO header field names as keys
and dicts of features as values. These are suitable for
handing to VcfHeader class's add_header_field() method.
Each INFO field must be defined in self.header_fields in
the child class, which should be a list of tuples where
each tuple consists of the name anddescription of the
field.
'''
hf = dict()
for f in self.header_fields:
hf[f[0]] = {'Number': 'A',
'Type': 'String',
'Description': f[1]}
return hf
def confirm_heterozygous(self, record, samples):
for s in samples:
if len(set(record.samples[s]['GT'])) != 2:
return False
return True
def _get_allele_counts(self, allele, rec):
a_counts = dict()
gt_filter_args = dict()
if rec.IS_SV:
gt_filter = self.sv_gt_filter
control_filter = self.sv_con_gt_filter
gt_filter_args['svtype'] = rec.record.info.get('SVTYPE', '')
else:
gt_filter = self.gt_filter
control_filter = self.con_gt_filter
for samp in self.unaffected:
if control_filter.gt_is_ok(rec.record.samples, samp, allele,
**gt_filter_args):
a_counts[samp] = rec.record.samples[samp]['GT'].count(allele)
else:
a_counts[samp] = None
if (rec.record.samples[samp]['GT'] == (0, 0) and
control_filter.ad_over_threshold is not None):
if control_filter.ad_over_threshold(rec.record.samples, samp,
allele):
a_counts[samp] = 1
for samp in self.affected:
if gt_filter.gt_is_ok(rec.record.samples, samp, allele,
**gt_filter_args):
a_counts[samp] = rec.record.samples[samp]['GT'].count(allele)
else:
a_counts[samp] = None
return a_counts
def _check_sorted(self, record):
if self._prev_coordinate[0] != record.chrom:
if record.chrom in self._processed_contigs:
raise RuntimeError("Input must be sorted by chromosome and " +
"position for recessive filtering. " +
"Contig '{}' " .format(record.chrom) +
"encountered before and after contig " +
"'{}'." .format(self._prev_coordinate[0]))
if self._prev_coordinate[0] is not None:
self._processed_contigs.add(self._prev_coordinate[0])
elif record.pos < self._prev_coordinate[1]:
raise RuntimeError("Input must be sorted by chromosome and " +
"position for inheritance filtering. " +
"Encountered position {}:{} after {}:{}"
.format(record.chrom, record.pos,
self._prev_coordinate[0],
self._prev_coordinate[1]))
self._prev_coordinate = (record.chrom, record.pos)
def process_record(self, record):
'''Return True if record should be printed/kept'''
return NotImplementedError("process_record method should be " +
"overriden by child class!")
def _write_report_header(self):
if self._csq_header is not None:
header = str.join("\t", (x for x in self._csq_header if x !=
'Allele'))
header += "\tALT_No.\t" + str.join("\t", self.annot_fields)
header += "\tCHROM\tPOS\tID\tREF\tALT\tALLELE\tQUAL\tFILTER"
self.report_file.write(header + "\n")
def check_g2p(self, record, ignore_csq, inheritance, csqs=None):
if self.family_filter.g2p:
if csqs is None:
csqs = getattr(record, self.csq_attribute)
if self.family_filter.check_g2p_consequence:
fail = (not x for x in
self.family_filter.g2p.csq_and_allelic_requirement_met(
csqs, inheritance))
else:
fail = (not x for x in
self.family_filter.g2p.allelic_requirement_met(
csqs, inheritance))
if ignore_csq:
ignore_csq = [x or y for x, y in zip(ignore_csq, fail)]
else:
ignore_csq = list(fail)
return ignore_csq
class RecessiveFilter(InheritanceFilter):
'''
This class assumes that each family has a shared biallelic
genetic cause of disease. It will not cope with phenocopies,
pseudodominance or other more complicated inheritance patterns.
'''
def __init__(self, family_filter, gt_args, min_families=1,
snpeff_mode=False, strict=False, exclude_denovo=False,
report_file=None):
'''
Args:
family_filter:
FamilyFilter object
gt_args:
A dict of arguments to use for filtering
genotypes. These should all correspond to
arguments to provide to SampleFilter objects.
min_families:
Require at least this many families to have a
qualifying biallelic combination of alleles in
a feature before outputting. Default=1.
snpeff_mode:
Use SnpEff annotations instead of VEP annotations
from input VCF.
strict: If True, for any affected sample with
parents, require confirmation of parental
genotypes. If either parent genotype is a
no-call for a record, then the record will
be ignored. Default=False.
exclude_denovo:
If True, where there is data available from
both parents for an affected individual
ignore apparent de novo occuring alleles.
Default=False.
report_file:
Output filehandle for writing summaries of
segregating variants to. Default=None.
'''
self.prefix = "VASE_biallelic"
self.header_fields = [
("VASE_biallelic_homozygous",
'Samples that carry homozygous biallelic changes ' +
' parsed by {}' .format(type(self).__name__)),
("VASE_biallelic_compound_het",
'Samples that carry compound heterozygous biallelic changes ' +
'parsed by {}'.format(type(self).__name__)),
("VASE_biallelic_de_novo",
'Samples that carry biallelic alleles that appear to have ' +
'arisen de novo'),
('VASE_biallelic_families',
'Family IDs for VASE_biallelic alleles'),
("VASE_biallelic_features",
'Features (e.g. transcripts) that contain qualifying ' +
'biallelic variants parsed by {}' .format(
type(self).__name__))]
self.annot_fields = ('homozygous', 'compound_het', 'de_novo',
'families', 'features')
self.report_file = report_file
super().__init__(family_filter, gt_args, min_families=min_families,
snpeff_mode=snpeff_mode, report_file=report_file)
self.families = tuple(x for x in
self.family_filter.inheritance_patterns
if 'recessive' in
self.family_filter.inheritance_patterns[x])
self.affected = tuple(x for x in family_filter.vcf_affected if
self.ped.individuals[x].fid in self.families)
self._fam_to_aff = dict()
for fid in self.families:
self._fam_to_aff[fid] = set(x for x in
self.ped.families[fid].get_affected()
if x in self.affected)
self.family_filter.logger.info("Analysing family {} ".format(fid) +
"under a recessive model")
self.strict = strict
self.exclude_denovo = exclude_denovo
self._potential_recessives = dict()
self._current_features = set()
self._processed_features = set()
def process_record(self, record, ignore_alleles=[], ignore_csq=[]):
'''
Returns True if record should be stored for checking against
other records overlapping the same features to see if they
constitute biallelic variation.
Stores potential recessive records per allele for
segregation checking once overlapping features have been
traversed.
Args:
record: VaseRecord
ignore_alleles:
List of booleans indicating for each ALT in
order whether it should be ignored in relation
to possible recessive variation (e.g. if MAF is
too high, no likely pathogenic consequence
etc.). This will normally have been generated
by VaseRunner via VcfFilter and/or VepFilter
classes.
ignore_csq:
List of booleans indicating for each CSQ in
order whether it should be ignored in relation
to possible recessive variation. This should
normally have been generated by a corresponding
VepFilter object.
'''
stored = False
self._check_sorted(record.record)
record_csqs = getattr(record, self.csq_attribute)
self._current_features = set(c[self.feature_label] for c in record_csqs
if c[self.feature_label] != '')
ignore_csq = self.check_g2p(record, ignore_csq, 'recessive',
csqs=record_csqs)
if ignore_csq and all(ignore_csq):
return False
gt_filter_args = dict()
if record.IS_SV:
gt_filter = self.sv_gt_filter
control_filter = self.sv_con_gt_filter
gt_filter_args['svtype'] = record.info.get('SVTYPE', '')
else:
gt_filter = self.gt_filter
control_filter = self.con_gt_filter
skip_fam = set()
added_prs = OrderedDict()
for i in range(len(record.alts)):
if ignore_alleles and ignore_alleles[i]:
continue
alt = i + 1
skip_allele = False
fams_with_allele = []
for un in self.unaffected:
if record.samples[un]['GT'] == (alt, alt):
if control_filter.gt_is_ok(record.samples, un, alt,
**gt_filter_args):
# hom in a control - skip allele
skip_allele = True
break
if skip_allele:
continue
for fid in self.families:
if fid in skip_fam:
continue
have_allele = set() # affecteds carrying this allele
for aff in self._fam_to_aff[fid]:
# check all affecteds carry this allele
if (alt in record.samples[aff]['GT'] and
gt_filter.gt_is_ok(record.samples, aff, alt,
**gt_filter_args)):
have_allele.add(aff)
else:
break
if have_allele == self._fam_to_aff[fid]:
# all affecteds in family carry allele
fams_with_allele.append(fid)
if fams_with_allele:
# store record and consequences
try:
csqs = []
for j in range(len(record_csqs)):
if ignore_csq and ignore_csq[j]:
continue
if record_csqs[j]['alt_index'] == alt:
# store record and csq details
csqs.append(record_csqs[j])
if csqs:
stored = True
alt_counts = self._get_allele_counts(alt, record)
pr = PotentialSegregant(
record=record, allele=alt, csqs=csqs,
allele_counts=alt_counts,
families=fams_with_allele,
feature_label=self.feature_label)
for feat in pr.features:
if feat in added_prs:
added_prs[feat][pr.alt_id] = pr
else:
added_prs[feat] = OrderedDict(
[(pr.alt_id, pr)])
if feat in self._potential_recessives:
self._potential_recessives[feat][pr.alt_id] = pr
else:
self._potential_recessives[feat] = OrderedDict(
[(pr.alt_id, pr)])
except KeyError:
raise RuntimeError("Could not identify CSQ or ANN " +
"fields in VCF header. Please ensure " +
"your input is annotated with " +
"Ensembl's VEP to perform recessive " +
"filtering")
return stored
def process_potential_recessives(self, final=False):
'''
Check whether stored PotentialSegregant alleles make up
biallelic variation in the same transcript for affected
individuals/families. Adds labels to INFO fields of VCF
records and returns an OrderedDict of 'var_ids' to
lists of PotentialSegregant objects that appear to
segregate consistent with recessive inheritance.
Clears the cache of stored PotentialSegregant alleles.
'''
segregating = OrderedDict() # key=alt_id, val=SegregatingBiallelic
for feat, prs in self._potential_recessives.items():
if not final and feat in self._current_features:
continue
feat_segregating = [] # list of tuples of values for creating SegregatingBiallelic
un_hets = defaultdict(list) # store het alleles carried by each unaffected
aff_hets = defaultdict(list) # store het alleles carried by each affected
biallelics = defaultdict(list) # store biallelic combinations for affecteds
for pid, p in prs.items():
for un in self.unaffected:
if p.allele_counts[un] == 1: # already checked for homs when adding
# store allele carried in this unaffected
un_hets[un].append(pid)
for aff in (x for x in self.affected
if self.ped.fid_from_iid(x) in p.families):
if p.allele_counts[aff] == 1:
aff_hets[aff].append(pid)
elif p.allele_counts[aff] == 2:
biallelics[aff].append(tuple([pid]))
incompatibles = [] # create a list of sets of incompatible hets
for hets in un_hets.values():
if len(hets):
incompatibles.append(set(hets))
for aff, hets in aff_hets.items():
for i in range(len(hets)):
for j in range(i+1, len(hets)):
incomp = False
for iset in incompatibles:
if iset.issuperset([hets[i], hets[j]]):
incomp = True
break
if not incomp:
if not prs[hets[i]].record.in_cis_with(sample=aff,
allele=prs[hets[i]].allele,
other=prs[hets[j]].record,
other_allele=prs[hets[j]].allele):
# check phase groups in case alleles in cis
biallelics[aff].append(
tuple([hets[i], hets[j]]))
if not biallelics:
continue
# see if all affecteds in the same family share the same biallelics
for fid, affs in self._fam_to_aff.items():
b_affs = set(x for x in affs if x in biallelics)
if len(b_affs) == 0 or b_affs != affs:
continue
affs = list(affs)
absent_in_aff = False
for i in range(len(affs)):
for bi in biallelics[affs[i]]:
for j in range(i+1, len(affs)):
if bi not in biallelics[affs[j]]:
absent_in_aff = True
break
if not absent_in_aff:
segs, de_novo = self._check_parents(feat, bi, affs)
if not segs:
continue
if len(bi) == 1:
model = 'homozygous'
else:
model = 'compound_het'
for bi_pr in (prs[x] for x in bi):
feat_segregating.append((bi_pr, affs, [fid],
model, [feat],
de_novo[bi_pr.alt_id],
self.prefix))
fam_count = len(set([fam for tup in feat_segregating for fam in
tup[2]]))
if fam_count >= self.min_families:
for tp in feat_segregating:
if tp[0] in segregating:
segregating[tp[0]].add_samples(*tp[1:6])
else:
segregating[tp[0]] = SegregatingVariant(*tp)
var_to_segregants = OrderedDict()
for sb in segregating.values():
sb.annotate_record(self.report_file, self.annot_fields)
if sb.segregant.var_id in var_to_segregants:<|fim▁hole|> else:
var_to_segregants[sb.segregant.var_id] = [sb.segregant]
# clear the cache except for the last entry which will be a new gene
# self._potential_recessives = self._last_added
self._potential_recessives = OrderedDict(
(k, v) for k, v in self._potential_recessives.items() if k in
self._current_features)
return var_to_segregants
def _check_parents(self, feat, alleles, samples):
'''
Check transmission of alleles (i.e. one from each parent)
if parents available. Should have already checked that
alleles are not present in this combination in any
unaffected individual.
Returns a tuple of booleans - first value is True if
parental genotypes do not contradict recessive inheritance
while the second value is a dict of alleles to lists of
samples in which the allele allele appears to have arisen
de novo.
'''
dns = defaultdict(list)
counts = []
for al in alleles:
counts.append(self._potential_recessives[feat][al].allele_counts)
if len(counts) == 1: # homozygous
counts.append(counts[0])
for samp in samples:
parents = self.ped.individuals[samp].parents
par = list(x for x in parents if x in self.samples)
if len(par) == 0:
continue
if self.strict:
for p in par:
if None in (counts[i][p] for i in range(len(counts))):
# require both parental genotypes if self.strict
return (False, dns)
if len(par) == 2: # can check for de novos
for i in range(len(counts)):
if counts[i][par[0]] == 0 and counts[i][par[1]] == 0:
# apparent de novo
self.family_filter.logger.debug(
"Apparent de novo allele " +
"{} for sample {} (parents = {} + {}) ".format(
alleles[-i], samp, par[0], par[1]) +
"for recessive combination {}|{}".format(
alleles[0], alleles[-1]))
dns[alleles[-i]].append(samp)
if self.exclude_denovo:
return (False, dns)
elif len(par) == 1:
# if only one parent and both alleles are absent it is more
# likely that the two alleles are in cis from other parent
if counts[0][par[0]] == 0 and counts[1][par[0]] == 0:
return(False, dns)
# NOTE: we could do a check here to ensure that any non-affected
# parent does not carry both alleles, but this *SHOULD* have
# already been done earlier in process_potential_recessives
# function for ALL unaffecteds anyway
return (True, dns)
class DominantFilter(InheritanceFilter):
'''
Identify variants that fit a dominant pattern in
given families.
'''
def __init__(self, family_filter, gt_args, min_families=1,
snpeff_mode=False, report_file=None):
'''
Initialize with parent IDs, children IDs and VcfReader
object.
Args:
family_filter:
FamilyFilter object
gt_args:
A dict of arguments to use for filtering
genotypes. These should all correspond to
arguments to provide to SampleFilter objects.
min_families:
Require at least this many families to have a
qualifying variant in a feature before
outputting. Default=1.
snpeff_mode:
Use SnpEff annotations instead of VEP annotations
from input VCF.
'''
self.prefix = "VASE_dominant"
self.header_fields = [
("VASE_dominant_samples",
'Sample IDs for alleles that segregate according to a ' +
'dominant inheritance pattern in an affected sample as' +
' parsed by {}' .format(type(self).__name__)),
('VASE_dominant_unaffected_carrier',
'Sample IDs for unaffected carriers of ' +
'VASE_dominant alleles'),
('VASE_dominant_families',
'Family IDs for VASE_dominant alleles'),
("VASE_dominant_features",
'Features (e.g. transcripts) that contain qualifying ' +
'dominant variants parsed by {}' .format(
type(self).__name__))]
self.annot_fields = ('samples', 'unaffected_carrier', 'families',
'features')
self.report_file = report_file
super().__init__(family_filter, gt_args, min_families=min_families,
snpeff_mode=snpeff_mode, report_file=report_file,)
self.families = tuple(x for x in
self.family_filter.inheritance_patterns
if 'dominant' in
self.family_filter.inheritance_patterns[x])
self.affected = tuple(x for x in family_filter.vcf_affected if
self.ped.individuals[x].fid in self.families)
self.filters = dict()
self._potential_dominants = dict()
self._last_added = OrderedDict()
self._current_features = set()
for fam in self.families:
f_aff = tuple(x for x in self.ped.families[fam].get_affected()
if (x in self.affected or
x in self.family_filter.obligate_carriers[fam]))
f_unaff = tuple(x for x in self.ped.families[fam].get_unaffected()
if (x in self.unaffected and x not in
self.family_filter.obligate_carriers[fam]))
if fam in self.family_filter.obligate_carriers:
self.obligate_carriers = tuple(
x for x in f_aff if x in
self.family_filter.obligate_carriers[fam])
else:
self.obligate_carriers = ()
dom_filter = SampleFilter(family_filter.vcf, cases=f_aff,
controls=f_unaff, confirm_missing=True,
**gt_args)
self.filters[fam] = dom_filter
self.family_filter.logger.info("Analysing family {} ".format(fam) +
"under a dominant model")
def process_record(self, record, ignore_alleles=[], ignore_csq=[]):
'''
Returns True if an allele segregates consistent with
dominant inheritance.
Args:
record: VaseRecord
ignore_alleles:
List of booleans indicating for each ALT in
order whether it should be ignored in relation
to possible dominant variation (e.g. if MAF is
too high, no likely pathogenic consequence
etc.). This will normally have been generated
by VaseRunner via VcfFilter and/or VepFilter
classes.
'''
dom_alleles = ([[] for i in range(len(record.record.alts))])
fam_alleles = ([[] for i in range(len(record.record.alts))])
ignore_csq = self.check_g2p(record, ignore_csq, 'dominant')
if ignore_csq and all(ignore_csq):
return False
if self.min_families > 1:
self._check_sorted(record.record)
for i in range(len(record.record.alts)):
if ignore_alleles[i]:
continue
allele = i + 1
for fam, dfilter in self.filters.items():
# looking for (potentially shared) de novos in a single family
is_dom = not dfilter.filter(record, allele)
if is_dom:
if self.confirm_heterozygous(record.record, dfilter.cases):
dom_alleles[i].extend(dfilter.cases)
fam_alleles[i].append(fam)
self.family_filter.logger.debug(
"Apparent dominant allele {}:{}-{}/{} ".format(
record.record.chrom, record.record.pos,
record.record.ref,
record.record.alleles[allele]) +
"present in {} ".format(dfilter.cases) +
"and absent in {}".format(dfilter.controls))
segs = []
for i in range(len(dom_alleles)):
if not dom_alleles[i]:
continue
allele = i + 1
csqs = []
record_csqs = getattr(record, self.csq_attribute)
try:
for j in range(len(record_csqs)):
if ignore_csq and ignore_csq[j]:
continue
if record_csqs[j]['alt_index'] == allele:
# store record and csq details
csqs.append(record_csqs[j])
except KeyError:
if self.min_families > 1:
raise RuntimeError("Could not identify CSQ or ANN fields" +
" in VCF header. Please ensure your " +
"input is annotated with Ensembl's " +
"VEP to perform dominant filtering.")
if self.min_families <= 1 or csqs:
a_counts = self._get_allele_counts(allele, record)
pd = PotentialSegregant(record=record, allele=allele,
csqs=csqs, allele_counts=a_counts,
families=fam_alleles[i],
feature_label=self.feature_label)
segs.append(pd)
if self.min_families > 1:
for feat, od in self._last_added.items():
if feat in self._potential_dominants:
self._potential_dominants[feat].update(od)
else:
self._potential_dominants[feat] = od
self._last_added = OrderedDict()
for seg in segs:
for feat in seg.features:
self._last_added[feat] = OrderedDict([(seg.alt_id, seg)])
else:
for seg in segs:
affs = (x for x in self.affected
if x not in self.obligate_carriers and
self.ped.fid_from_iid(x) in seg.families)
sv = SegregatingVariant(seg, affs, seg.families, 'samples',
seg.features, [], self.prefix)
obcs = tuple(x for x in self.obligate_carriers if
self.ped.fid_from_iid(x) in seg.families)
if obcs:
obfs = set(self.ped.fid_from_iid(x) for x in obcs)
sv.add_samples(obcs, obfs, 'unaffected_carrier',
seg.features, [])
sv.annotate_record(self.report_file, self.annot_fields)
return len(segs) > 0
def process_dominants(self, final=False):
'''
Check whether stored PotentialSegregant alleles make up
dominant variation in the same transcript for the minimum
number of families. Adds labels to INFO fields of VCF
records and returns an OrderedDict of 'var_ids' to
lists of PotentialSegregant objects that appear to
constitute dominant variation.
Clears the cache of stored PotentialSegregant alleles.
'''
sds = OrderedDict()
feat_processed = []
if not self._potential_dominants:
# if cache is empy, we never encountered the next set of features
self._potential_dominants = self._last_added
self._last_added = OrderedDict()
elif final:
for feat in self._last_added:
if feat in self._potential_dominants:
self._potential_dominants[feat].update(
self._last_added[feat])
else:
self._potential_dominants[feat] = self._last_added[feat]
self._last_added = OrderedDict()
for feat, pds in self._potential_dominants.items():
if feat in self._current_features: # still processing this feature
continue
feat_fams = set()
feat_processed.append(feat)
for pid, p in pds.items():
feat_fams.update(p.families)
if len(feat_fams) >= self.min_families:
for p in pds.values():
samps = (x for x in self.affected
if self.ped.fid_from_iid(x) in p.families)
if p.alt_id in sds:
sds[p.alt_id].add_samples(samps, p.families,
'samples', [feat], [])
else:
sv = SegregatingVariant(p, samps, p.families,
'samples', [feat], [],
self.prefix)
sds[p.alt_id] = sv
var_to_segregants = OrderedDict()
for sv in sds.values():
sv.annotate_record(self.report_file, self.annot_fields)
if sv.segregant.var_id in var_to_segregants:
var_to_segregants[sv.segregant.var_id].append(sv.segregant)
else:
var_to_segregants[sv.segregant.var_id] = [sv.segregant]
# clear the cache of processed features
for feat in feat_processed:
del self._potential_dominants[feat]
return var_to_segregants
class DeNovoFilter(InheritanceFilter):
'''
Identify and output variants occuring in a child and absent from
the parents.
'''
def __init__(self, family_filter, gt_args, min_families=1,
confirm_het=False, snpeff_mode=False, report_file=None):
'''
Initialize with parent IDs, children IDs and VcfReader
object.
Args:
family_filter:
FamilyFilter object
gt_args:
A dict of arguments to use for filtering
genotypes. These should all correspond to
arguments to provide to SampleFilter objects.
min_families:
Require at least this many families to have a
qualifying variant in a feature before
outputting. Default=1.
confirm_het:
If True, apparent de novos are required to be
called as heterozygous. Default=False.
snpeff_mode:
Use SnpEff annotations instead of VEP annotations
from input VCF.
'''
self.prefix = "VASE_de_novo"
self.header_fields = [("VASE_de_novo_samples",
'Samples that carry alleles occurring de novo parsed by ' +
'{}' .format(type(self).__name__)),
('VASE_de_novo_families',
'Family IDs for VASE_de_novo alleles'),
("VASE_de_novo_features",
'Features (e.g. transcripts) that contain qualifying ' +
'de novo variants parsed by {}' .format(
type(self).__name__)),]
self.annot_fields = ('samples', 'families', 'features')
self.report_file = report_file
super().__init__(family_filter, gt_args, min_families=min_families,
snpeff_mode=snpeff_mode, report_file=report_file)
self.families = tuple(x for x in
self.family_filter.inheritance_patterns if
'de_novo' in
self.family_filter.inheritance_patterns[x])
self.affected = tuple(x for x in family_filter.vcf_affected if
self.ped.individuals[x].fid in self.families)
self._potential_denovos = dict()
self._last_added = OrderedDict()
self._current_features = set()
self.confirm_het = confirm_het
self.filters = defaultdict(list)
self.prefix = "VASE_de_novo"
for fam in self.families:
f_aff = tuple(x for x in self.ped.families[fam].get_affected()
if x in self.affected)
par_child_combos = defaultdict(list)
for aff in f_aff:
pars = tuple(x for x in
self.ped.families[fam].individuals[aff].parents
if x in self.samples)
if len(pars) == 2:
par_child_combos[pars].append(aff)
for parents, children in par_child_combos.items():
par_filter = SampleFilter(family_filter.vcf, cases=children,
controls=parents,
confirm_missing=True, **gt_args)
self.filters[fam].append(par_filter)
self.family_filter.logger.info(
"Analysing family {} parents ({}) and children ({})"
.format(fam, str.join(", ", parents),
str.join(", ", children)) +
" combinations under a de novo dominant model")
def process_record(self, record, ignore_alleles=[], ignore_csq=[]):
'''
Returns True if allele is an apparent de novo variant.
Args:
record: VaseRecord
ignore_alleles:
List of booleans indicating for each ALT in
order whether it should be ignored in relation
to possible de novo variation (e.g. if MAF is
too high, no likely pathogenic consequence
etc.). This will normally have been generated
by VaseRunner via VcfFilter and/or VepFilter
classes.
'''
if self.min_families > 1:
self._check_sorted(record.record)
ignore_csq = self.check_g2p(record, ignore_csq, 'de novo')
if ignore_csq and all(ignore_csq):
return False
denovo_alleles = ([[] for i in range(len(record.record.alts))])
fam_alleles = ([[] for i in range(len(record.record.alts))])
for i in range(len(record.alts)):
if ignore_alleles[i]:
continue
allele = i + 1
for fam, filters in self.filters.items():
# looking for (potentially shared) de novos in a single family
dns = []
for dfilter in filters:
is_denovo = not dfilter.filter(record, allele)
if is_denovo:
if (not self.confirm_het or self.confirm_heterozygous(
record.record, dfilter.cases)):
dns.append(dfilter.cases)
self.family_filter.logger.debug(
"Apparent de novo allele {}:{}-{}/{} ".format(
record.record.chrom, record.record.pos,
record.record.ref,
record.record.alleles[allele]) +
"present in {} ".format(dfilter.cases) +
"and absent in {}".format(dfilter.controls))
if len(dns) == len(filters): # all affecteds in fam have dnm
([denovo_alleles[i].extend(x) for x in dns])
fam_alleles[i].append(fam)
segs = []
for i in range(len(denovo_alleles)):
if not denovo_alleles[i]:
continue
allele = i + 1
csqs = []
try:
record_csqs = getattr(record, self.csq_attribute)
for j in range(len(record_csqs)):
if ignore_csq and ignore_csq[j]:
continue
if record_csqs[j]['alt_index'] == allele:
# store record and csq details
csqs.append(record_csqs[j])
except KeyError:
if self.min_families > 1:
raise RuntimeError("Could not identify CSQ or ANN fields" +
" in VCF header. Please ensure your " +
"input is annotated with Ensembl's " +
"VEP to perform de novo filtering.")
if self.min_families <= 1 or csqs:
a_counts = self._get_allele_counts(allele, record)
pd = PotentialSegregant(record=record, allele=allele,
csqs=csqs, allele_counts=a_counts,
families=fam_alleles[i],
feature_label=self.feature_label)
segs.append(pd)
if self.min_families > 1:
for feat, od in self._last_added.items():
if feat in self._potential_denovos:
self._potential_denovos[feat].update(od)
else:
self._potential_denovos[feat] = od
self._last_added = OrderedDict()
for seg in segs:
for feat in seg.features:
self._last_added[feat] = OrderedDict([(seg.alt_id, seg)])
else:
for seg in segs:
affs = (x for x in self.affected if self.ped.fid_from_iid(x)
in seg.families)
sv = SegregatingVariant(seg, affs, seg.families, 'samples',
seg.features, [], self.prefix)
sv.annotate_record(self.report_file, self.annot_fields)
return len(segs) > 0
def process_de_novos(self, final=False):
'''
Check whether stored PotentialSegregant alleles make up
de novo dominant variation in the same transcript for the
minimum number of families. Adds labels to INFO fields of
VCF records and returns an OrderedDict of 'var_ids' to
lists of PotentialSegregant objects that appear to
constitute de novo dominant variation.
Clears the cache of stored PotentialSegregant alleles.
'''
sds = OrderedDict()
feat_processed = []
if not self._potential_denovos:
# if cache is empy, we never encountered the next set of features
self._potential_denovos = self._last_added
self._last_added = OrderedDict()
elif final:
for feat in self._last_added:
if feat in self._potential_denovos:
self._potential_denovos[feat].update(
self._last_added[feat])
else:
self._potential_denovos[feat] = self._last_added[feat]
self._last_added = OrderedDict()
for feat, pds in self._potential_denovos.items():
if feat in self._current_features: # still processing this feature
continue
feat_fams = set()
feat_processed.append(feat)
for pid, p in pds.items():
feat_fams.update(p.families)
if len(feat_fams) >= self.min_families:
for p in pds.values():
samps = (x for x in self.affected
if self.ped.fid_from_iid(x) in p.families)
if p.alt_id in sds:
sds[p.alt_id].add_samples(samps, p.families,
'samples', [feat], [])
else:
sv = SegregatingVariant(p, samps, p.families,
'samples', [feat], [],
self.prefix)
sds[p.alt_id] = sv
var_to_segregants = OrderedDict()
for sv in sds.values():
sv.annotate_record(self.report_file, self.annot_fields)
if sv.segregant.var_id in var_to_segregants:
var_to_segregants[sv.segregant.var_id].append(sv.segregant)
else:
var_to_segregants[sv.segregant.var_id] = [sv.segregant]
# clear the cache of processed features
for feat in feat_processed:
del self._potential_denovos[feat]
return var_to_segregants
class ControlFilter(SampleFilter):
''' Filter variants if they are present in a control sample. '''
def __init__(self, vcf, family_filter, gt_args, n_controls=0):
'''
Args:
vcf: Input VcfReader object.
family_filter:
FamilyFilter object containing information on
which samples are controls in the input VCF.
gt_args:
A dict of arguments to use for filtering
genotypes. These should all correspond to
arguments to provide to SampleFilter objects.
n_controls:
Minimum number of controls required to carry an
ALT allele for it to be filtered. Alleles will
only be filtered if carried by this number of
controls or more. Default=0.
'''
if n_controls and n_controls > len(family_filter.vcf_unaffected):
n_controls = len(family_filter.vcf_unaffected)
super().__init__(vcf, controls=family_filter.vcf_unaffected,
n_controls=n_controls, confirm_missing=False,
**gt_args)
class SegregatingVariant(object):
'''
Stores details of alleles that segregate in a manner consistent
with inheritance pattern.
'''
__slots__ = ['recessive', 'samples', 'families', 'model', 'features',
'segregant', 'prefix', 'de_novos']
def __init__(self, segregant, samples, families, model, features,
de_novos=(), prefix='VASE_segregant'):
'''
Initialize with a PotentialSegregant object, an iterable of
sample IDs carrying the PotentialSegregant a string
indicating the model of inheritance (e.g. 'compound_het'),
the name of the associated features (e.g. transcript IDs),
prefix for INFO fields and a list of individuals for whom
the allele appears to have arisen de novo.
'''
self.segregant = segregant
self.samples = list(samples)
self.families = set(families)
self.model = [model] * len(self.samples)
self.features = set(features)
self.prefix = prefix
self.de_novos = set(de_novos)
def __eq__(self, other):
return self.segregant == other.segregant
def __hash__(self):
return hash(self.segregant)
def add_samples(self, samples, families, model, features, de_novos):
''' Add samples with corresponding model of inheritance '''
self.samples.extend(samples)
self.families.update(families)
self.model.extend([model] * (len(self.samples) - len(self.model)))
self.features.update(features)
self.de_novos.update(de_novos)
def annotate_record(self, report_file=None, annot_order=[]):
''' Add INFO field annotations for VcfRecords '''
annots = defaultdict(set)
for i in range(len(self.model)):
k = self.prefix
if self.model[i]:
k += "_" + self.model[i]
annots[k].add(self.samples[i])
for k in annots:
annots[k] = str.join("|", sorted(annots[k]))
annots[self.prefix + '_families'] = str.join("|",
sorted(self.families))
annots[self.prefix + '_features'] = str.join("|",
sorted(self.features))
if self.de_novos:
annots[self.prefix + '_de_novo'] = str.join("|",
sorted(self.de_novos))
converted = self._convert_annotations(annots)
for k, v in converted.items():
self.segregant.record.info[k] = v
if report_file:
report_file.write(self._annot_to_string(annots, annot_order)
+ "\n")
def _annot_to_string(self, annots, annot_order):
s = ''
csq_to_join = []
for k in (x for x in self.segregant.csqs[0] if x != 'Allele'):
csq_to_join.append(str.join("|", (str(self.segregant.csqs[i][k])
if self.segregant.csqs[i][k]
else '.' for i in range(
len(self.segregant.csqs)))))
s = str.join("\t", csq_to_join)
if annot_order:
annot_order = [self.prefix + "_" + x for x in annot_order]
s += "\t" + str.join("\t", (annots[k] if isinstance(annots[k], str)
else '.' for k in annot_order))
else:
s += "\t" + str.join("\t", (annots[k] if isinstance(annots[k], str)
else '.' for k in sorted(annots)))
r = self.segregant.record
allele = r.alleles[self.segregant.allele]
s += "\t" + str.join("\t", (str(x) for x in (r.chrom, r.pos, r.id,
r.ref, r.alt, allele,
r.qual, r.filter_string)))
return s
def _convert_annotations(self, annots):
''' Convert to per-allele (Number=A) format for INFO field '''
converted_annots = dict()
for k, v in annots.items():
if k in self.segregant.record.info:
allele_fields = list(self.segregant.record.info[k])
else:
allele_fields = ['.'] * len(self.segregant.record.alts)
i = self.segregant.allele - 1
allele_fields[i] = v
converted_annots[k] = allele_fields
return converted_annots
class PotentialSegregant(object):
'''
Class for storing variant details for records that might make up
biallelic variants in affected samples.
'''
__slots__ = ['allele', 'allele_counts', 'features', 'families', 'alt_id',
'var_id', 'record', 'csqs']
def __init__(self, record, allele, csqs, allele_counts, families,
feature_label='Feature'):
self.allele = allele
self.allele_counts = allele_counts
self.families = families
self.var_id = "{}:{}-{}/{}".format(record.chrom, record.pos,
record.ref, record.alt)
self.alt_id = "{}:{}-{}/{}".format(record.chrom, record.pos,
record.ref, record.alleles[allele])
self.features = set(x[feature_label] for x in csqs if
x[feature_label] != '')
if not self.features:
# if is intergenic and there is no Feature ID, use var ID
# this way we can capture variants at same site if looking for n>1
# in several families, but won't classify all intergenic variants
# as the same "Feature"
self.features.add(self.var_id.replace(',', '_'))
self.csqs = csqs
self.record = record
def __eq__(self, other):
return self.alt_id == other.alt_id
def __hash__(self):
return hash(self.alt_id)<|fim▁end|>
|
var_to_segregants[sb.segregant.var_id].append(sb.segregant)
|
<|file_name|>rules_iterator.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! An iterator over a list of rules.
use context::QuirksMode;
use media_queries::Device;
use shared_lock::SharedRwLockReadGuard;
use smallvec::SmallVec;
use std::slice;
use stylesheets::StylesheetInDocument;
use stylesheets::{CssRule, DocumentRule, ImportRule, MediaRule, SupportsRule};
/// An iterator over a list of rules.
pub struct RulesIterator<'a, 'b, C>
where
'b: 'a,
C: NestedRuleIterationCondition + 'static,
{
device: &'a Device,
quirks_mode: QuirksMode,
guard: &'a SharedRwLockReadGuard<'b>,
stack: SmallVec<[slice::Iter<'a, CssRule>; 3]>,
_phantom: ::std::marker::PhantomData<C>,
}
impl<'a, 'b, C> RulesIterator<'a, 'b, C>
where
'b: 'a,
C: NestedRuleIterationCondition + 'static,
{
/// Creates a new `RulesIterator` to iterate over `rules`.
pub fn new(
device: &'a Device,
quirks_mode: QuirksMode,
guard: &'a SharedRwLockReadGuard<'b>,
rules: &'a [CssRule],
) -> Self {
let mut stack = SmallVec::new();
stack.push(rules.iter());
Self {
device: device,
quirks_mode: quirks_mode,
guard: guard,
stack: stack,
_phantom: ::std::marker::PhantomData,
}
}
/// Skips all the remaining children of the last nested rule processed.
pub fn skip_children(&mut self) {
self.stack.pop();
}
}
impl<'a, 'b, C> Iterator for RulesIterator<'a, 'b, C>
where
'b: 'a,
C: NestedRuleIterationCondition + 'static,
{
type Item = &'a CssRule;
fn next(&mut self) -> Option<Self::Item> {
let mut nested_iter_finished = false;
while !self.stack.is_empty() {
if nested_iter_finished {
self.stack.pop();
nested_iter_finished = false;
continue;
}
let rule;
let sub_iter = {
let nested_iter = self.stack.last_mut().unwrap();
rule = match nested_iter.next() {
Some(r) => r,
None => {
nested_iter_finished = true;
continue;
},
};
match *rule {
CssRule::Namespace(_) |
CssRule::Style(_) |
CssRule::FontFace(_) |
CssRule::CounterStyle(_) |
CssRule::Viewport(_) |
CssRule::Keyframes(_) |
CssRule::Page(_) |
CssRule::FontFeatureValues(_) => return Some(rule),
CssRule::Import(ref import_rule) => {
let import_rule = import_rule.read_with(self.guard);
if !C::process_import(
self.guard,
self.device,
self.quirks_mode,
import_rule,
) {
continue;
}
import_rule.stylesheet.rules(self.guard).iter()
},
CssRule::Document(ref doc_rule) => {
let doc_rule = doc_rule.read_with(self.guard);
if !C::process_document(self.guard, self.device, self.quirks_mode, doc_rule)
{
continue;
}
doc_rule.rules.read_with(self.guard).0.iter()
},
CssRule::Media(ref lock) => {
let media_rule = lock.read_with(self.guard);
if !C::process_media(self.guard, self.device, self.quirks_mode, media_rule)
{
continue;
}
media_rule.rules.read_with(self.guard).0.iter()
},
CssRule::Supports(ref lock) => {
let supports_rule = lock.read_with(self.guard);
if !C::process_supports(
self.guard,
self.device,
self.quirks_mode,
supports_rule,
) {
continue;<|fim▁hole|> }
};
self.stack.push(sub_iter);
return Some(rule);
}
None
}
}
/// RulesIterator.
pub trait NestedRuleIterationCondition {
/// Whether we should process the nested rules in a given `@import` rule.
fn process_import(
guard: &SharedRwLockReadGuard,
device: &Device,
quirks_mode: QuirksMode,
rule: &ImportRule,
) -> bool;
/// Whether we should process the nested rules in a given `@media` rule.
fn process_media(
guard: &SharedRwLockReadGuard,
device: &Device,
quirks_mode: QuirksMode,
rule: &MediaRule,
) -> bool;
/// Whether we should process the nested rules in a given `@-moz-document`
/// rule.
fn process_document(
guard: &SharedRwLockReadGuard,
device: &Device,
quirks_mode: QuirksMode,
rule: &DocumentRule,
) -> bool;
/// Whether we should process the nested rules in a given `@supports` rule.
fn process_supports(
guard: &SharedRwLockReadGuard,
device: &Device,
quirks_mode: QuirksMode,
rule: &SupportsRule,
) -> bool;
}
/// A struct that represents the condition that a rule applies to the document.
pub struct EffectiveRules;
impl NestedRuleIterationCondition for EffectiveRules {
fn process_import(
guard: &SharedRwLockReadGuard,
device: &Device,
_quirks_mode: QuirksMode,
rule: &ImportRule,
) -> bool {
rule.stylesheet.is_effective_for_device(device, guard)
}
fn process_media(
guard: &SharedRwLockReadGuard,
device: &Device,
quirks_mode: QuirksMode,
rule: &MediaRule,
) -> bool {
rule.media_queries
.read_with(guard)
.evaluate(device, quirks_mode)
}
fn process_document(
_: &SharedRwLockReadGuard,
device: &Device,
_: QuirksMode,
rule: &DocumentRule,
) -> bool {
rule.condition.evaluate(device)
}
fn process_supports(
_: &SharedRwLockReadGuard,
_: &Device,
_: QuirksMode,
rule: &SupportsRule,
) -> bool {
rule.enabled
}
}
/// A filter that processes all the rules in a rule list.
pub struct AllRules;
impl NestedRuleIterationCondition for AllRules {
fn process_import(
_: &SharedRwLockReadGuard,
_: &Device,
_: QuirksMode,
_: &ImportRule,
) -> bool {
true
}
fn process_media(_: &SharedRwLockReadGuard, _: &Device, _: QuirksMode, _: &MediaRule) -> bool {
true
}
fn process_document(
_: &SharedRwLockReadGuard,
_: &Device,
_: QuirksMode,
_: &DocumentRule,
) -> bool {
true
}
fn process_supports(
_: &SharedRwLockReadGuard,
_: &Device,
_: QuirksMode,
_: &SupportsRule,
) -> bool {
true
}
}
/// An iterator over all the effective rules of a stylesheet.
///
/// NOTE: This iterator recurses into `@import` rules.
pub type EffectiveRulesIterator<'a, 'b> = RulesIterator<'a, 'b, EffectiveRules>;<|fim▁end|>
|
}
supports_rule.rules.read_with(self.guard).0.iter()
},
|
<|file_name|>stream.tsx<|end_file_name|><|fim▁begin|>/**
* Converts a stream query to an object representation, with
* keys representing tag names, and the magic __text key
* representing the text component of the search.
*
* Example:
*
* "python is:unresolved assigned:[email protected]"
* => {
* __text: "python",
* is: "unresolved",
* assigned: "[email protected]"
* }
*/
export type QueryObj = Record<string, string>;
export function queryToObj(queryStr = ''): QueryObj {
const text: string[] = [];
const queryItems = queryStr.match(/\S+:"[^"]*"?|\S+/g);
const queryObj: QueryObj = (queryItems || []).reduce((obj, item) => {
const index = item.indexOf(':');
if (index === -1) {
text.push(item);
} else {
const tagKey = item.slice(0, index);
const value = item.slice(index + 1).replace(/^"|"$/g, '');
obj[tagKey] = value;
}
return obj;
}, {});
queryObj.__text = '';<|fim▁hole|> return queryObj;
}
/**
* Converts an object representation of a stream query to a string
* (consumable by the Sentry stream HTTP API).
*/
export function objToQuery(queryObj: QueryObj): string {
const {__text, ...tags} = queryObj;
const parts = Object.entries(tags).map(([tagKey, value]) => {
if (value.indexOf(' ') > -1) {
value = `"${value}"`;
}
return `${tagKey}:${value}`;
});
if (queryObj.__text) {
parts.push(queryObj.__text);
}
return parts.join(' ');
}<|fim▁end|>
|
if (text.length) {
queryObj.__text = text.join(' ');
}
|
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>//
// Copyright 2021 The Project Oak Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//<|fim▁hole|>// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
pub mod data;<|fim▁end|>
| |
<|file_name|>hash.go<|end_file_name|><|fim▁begin|>/*
A hashtable class, supports a more efficient lookup than standard dynamic relations
(c) 2001-2006 F.G. McCabe
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
Contact: Francis McCabe <[email protected]>
*/
go.hash{
import go.io.
hash[Ky,Vl] <~ {
insert:[Ky,Vl]*.
find:[Ky]=>Vl.
present:[Ky+,Vl]{}.
delete:[Ky]*.
ext:[]=>list[(Ky,Vl)].
keys:[]=>list[Ky].
values:[]=>list[Vl].
count:[]=>integer
}.
hash:[list[(Ktp,Vtp)],integer]@>hash[Ktp,Vtp].
hash(I,Size)..{
table:opaque := __newhash(max(Size,listlen(I)*2)).
${<|fim▁hole|> insert(Ky,Vl) -> sync{ table:=__hashinsert(table,Ky,Vl)}
onerror(
error(_,Code) ->
raise error("insert",Code)
).
find(Ky) => valof{
sync{
__hashsearch(table,Ky,Value) ?
valis Value
| raise error("find",'eNOTFND')
}
}.
present(Ky,Value) :-
action{ sync{ __hashsearch(table,Ky,Value) ? valis true | valis false}}.
count() => valof{
sync{
valis __hashcount(table);
}
}.
delete(Ky) -> sync{
__hashdelete(table,Ky)
} onerror(
error(_,Code) ->
raise error("delete",Code)
).
ext() => valof{
sync{
valis __hashcontents(table)
} onerror(
error(_,Code) ->
raise error("ext",Code)
)
}.
keys() => valof{
sync{
valis __hashkeys(table)
} onerror(
error(_,Code) ->
raise error("keys",Code)
)
}.
values() => valof{
sync{
valis __hashvalues(table)
} onerror(
error(_,Code) ->
raise error("ext",Code)
)
}.
}.
}<|fim▁end|>
|
((H,V) in I *> table:=__hashinsert(table,H,V));
}.
|
<|file_name|>1.cc<|end_file_name|><|fim▁begin|>// 2001-06-14 Benjamin Kosnik <[email protected]>
<|fim▁hole|>//
// This file is part of the GNU ISO C++ Library. This library is free
// software; you can redistribute it and/or modify it under the
// terms of the GNU General Public License as published by the
// Free Software Foundation; either version 3, or (at your option)
// any later version.
// This library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License along
// with this library; see the file COPYING3. If not see
// <http://www.gnu.org/licenses/>.
// 20.4.1.1 allocator members
#include <memory>
#include <stdexcept>
#include <cstdlib>
#include <testsuite_hooks.h>
struct gnu { };
bool check_new = false;
bool check_delete = false;
void*
operator new(std::size_t n) throw(std::bad_alloc)
{
check_new = true;
return std::malloc(n);
}
void operator delete(void *v) throw()
{
check_delete = true;
return std::free(v);
}
void test01()
{
bool test __attribute__((unused)) = true;
std::allocator<gnu> obj;
// NB: These should work for various size allocation and
// deallocations. Currently, they only work as expected for sizes >
// _MAX_BYTES as defined in stl_alloc.h, which happes to be 128.
gnu* pobj = obj.allocate(256);
VERIFY( check_new );
obj.deallocate(pobj, 256);
VERIFY( check_delete );
}
int main()
{
test01();
return 0;
}<|fim▁end|>
|
// Copyright (C) 2001, 2002, 2004, 2005, 2009 Free Software Foundation, Inc.
|
<|file_name|>TagRequestResponseHandlerTest.java<|end_file_name|><|fim▁begin|>package com.mdsgpp.cidadedemocratica.requester;
import android.test.AndroidTestCase;
import com.mdsgpp.cidadedemocratica.model.Tag;
import org.json.JSONArray;
import org.json.JSONException;
import org.junit.Test;
import java.util.ArrayList;
/**<|fim▁hole|> * Created by andreanmasiro on 04/11/16.
*/
public class TagRequestResponseHandlerTest extends AndroidTestCase implements RequestUpdateListener {
TagRequestResponseHandler handler = new TagRequestResponseHandler();
ArrayList<Tag> response = null;
Tag tag;
int id = 1;
String tagName = "internet";
String errorMessage = null;
@Override
protected void setUp() {
handler.setRequestUpdateListener(this);
}
@Override
protected void tearDown() throws Exception {
response = null;
errorMessage = null;
}
@Test
public void testOnSuccess() throws JSONException {
JSONArray jsonTag = new JSONArray("[{\"id\":" + id + ",\"name\":\"" + tagName + "\",\"relevancia\":982100}]");
handler.onSuccess(200, null, jsonTag);
assertEquals(id, tag.getId());
assertEquals(tagName, tag.getName());
}
@Test
public void testOnFailure() {
int errorCode = 500;
handler.onFailure(errorCode, null, null);
assertEquals(errorMessage, String.valueOf(errorCode));
}
@Test
public void testCompareTags() {
Tag t1 = new Tag(0, "", 0, 0);
Tag t2 = new Tag(0, "", 0, 0);
assertEquals(t1.compareTo(t2), handler.compare(t1, t2));
}
@Override
public void afterSuccess(RequestResponseHandler handler, Object response) {
ArrayList<Tag> tags = (ArrayList<Tag>) response;
tag = tags.get(0);
}
@Override
public void afterError(RequestResponseHandler handler, String message) {
errorMessage = message;
}
}<|fim▁end|>
| |
<|file_name|>test_extension.py<|end_file_name|><|fim▁begin|>import unittest
from mopidy_tunein import Extension
class ExtensionTest(unittest.TestCase):
def test_get_default_config(self):
ext = Extension()
config = ext.get_default_config()
self.assertIn("[tunein]", config)
self.assertIn("enabled = true", config)
def test_get_config_schema(self):
ext = Extension()
<|fim▁hole|>
self.assertIn("timeout", schema)
self.assertIn("filter", schema)<|fim▁end|>
|
schema = ext.get_config_schema()
|
<|file_name|>morautils.py<|end_file_name|><|fim▁begin|># Utility functions for OpenMORA scripts
#
# Part of OpenMora - https://github.com/OpenMORA
import os, sys, string
import platform
import yaml
def get_mora_paths():
""" Returns a list of paths with MORA modules, from the env var MORA_PATH
"""
if not 'MORA_PATH' in os.environ:
print('**ERROR** Environment variable MORA_PATH not set')
sys.exit(1)
sMoraPaths=os.environ['MORA_PATH'];
if platform.system()=="Windows":
sPathDelim = ";"
else:
sPathDelim = ":"
morabase_dir="";
return sMoraPaths.split(sPathDelim)
def get_morabase_dir():
""" Returns the path of "mora-base" pkg
"""
mora_paths = get_mora_paths() # Get env vars
for p in mora_paths:
tstPath = os.path.normpath(p + "/mora-base")
if os.path.exists(tstPath):
morabase_dir = tstPath
if (len(morabase_dir)==0) or (not os.path.exists(morabase_dir)):
print("Couldn't detect mora-base in MORA_PATH!!")
sys.exit(1)
return morabase_dir
import sys, math
def progress(percent):
''' source: http://gunslingerc0de.wordpress.com/2010/08/13/python-command-line-progress-bar/ '''
width = 74
marks = math.floor(width * (percent / 100.0))
spaces = math.floor(width - marks)
loader = '[' + ('=' * int(marks)) + (' ' * int(spaces)) + ']'
if percent >= 100:
percent = 100
sys.stdout.write("%s %d%%\r" % (loader, percent))
if percent >= 100:
pass
sys.stdout.write("\n")
sys.stdout.flush()
<|fim▁hole|>def get_pkgs_root():
'''Returns the path to the parent directory of mora-base'''
morabase_dir = get_morabase_dir()
pkgs_root = os.path.dirname(morabase_dir)
return pkgs_root
def read_distro_file():
'''Returns the yaml contents of the distro file'''
morabase_dir = get_morabase_dir()
pkgs_root = os.path.dirname(morabase_dir)
sDistroFile = os.path.normpath( morabase_dir + "/distro/openmora-pkgs.yaml")
assert os.path.exists(sDistroFile)
assert os.path.exists(pkgs_root + "/mora-base")
# Parse distro file:
fil = open(sDistroFile, 'r')
distro = yaml.load(fil)
fil.close()
#print distro
return distro<|fim▁end|>
| |
<|file_name|>basic.go<|end_file_name|><|fim▁begin|>package main
import (
"github.com/gopher-net/gnet-ctl/Godeps/_workspace/src/github.com/Sirupsen/logrus"
)
var log = logrus.New()
func init() {
log.Formatter = new(logrus.JSONFormatter)
log.Formatter = new(logrus.TextFormatter) // default
}
func main() {
defer func() {
err := recover()
if err != nil {
log.WithFields(logrus.Fields{
"omg": true,
"err": err,
"number": 100,<|fim▁hole|> }).Fatal("The ice breaks!")
}
}()
log.WithFields(logrus.Fields{
"animal": "walrus",
"size": 10,
}).Info("A group of walrus emerges from the ocean")
log.WithFields(logrus.Fields{
"omg": true,
"number": 122,
}).Warn("The group's number increased tremendously!")
log.WithFields(logrus.Fields{
"animal": "orca",
"size": 9009,
}).Panic("It's over 9000!")
}<|fim▁end|>
| |
<|file_name|>help.js<|end_file_name|><|fim▁begin|>"use strict";
var os = require("os");
var fs = require('fs');
var settings = require("../config.js").settings;
exports.module = function() {
this.onCommand_help = function(nick, command) {
var chan = this.channel;
fs.readFile('./package.json', 'utf-8', function(err, data) {
if (!err) {
chan.say(settings.globalNick + " v" + JSON.parse(data).version + " by Dirbaio, Nina, LifeMushroom, and AlphaTech. Running on Node.js " + process.versions.node + " (" + os.type() + " " + os.release() + " " + os.arch() + ").");
chan.say("For a list of available commands, check http://v.gd/TheBotCommands");
} else {
console.err("Error opening ./package.js... Did you delete it?")
}<|fim▁hole|><|fim▁end|>
|
});
};
};
|
<|file_name|>synth.py<|end_file_name|><|fim▁begin|># Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This script is used to synthesize generated parts of this library."""
import synthtool as s
import synthtool.gcp as gcp
import logging
logging.basicConfig(level=logging.DEBUG)
gapic = gcp.GAPICGenerator()
v1_library = gapic.ruby_library(
'datastore', 'v1',
config_path='/google/datastore/artman_datastore.yaml',
artman_output_name='google-cloud-ruby/google-cloud-datastore'
)
s.copy(v1_library / 'lib/google/cloud/datastore/v1')
s.copy(v1_library / 'lib/google/datastore/v1')
# Omitting lib/google/cloud/datastore/v1.rb for now because we are not exposing
# the low-level API.
# Support for service_address
s.replace(
'lib/google/cloud/datastore/v*/*_client.rb',
'\n(\\s+)#(\\s+)@param exception_transformer',
'\n\\1#\\2@param service_address [String]\n' +<|fim▁hole|> '\\1#\\2@param service_port [Integer]\n' +
'\\1#\\2 Override for the service port, or `nil` to leave as the default.\n' +
'\\1#\\2@param exception_transformer'
)
s.replace(
'lib/google/cloud/datastore/v*/*_client.rb',
'\n(\\s+)metadata: nil,\n\\s+exception_transformer: nil,\n',
'\n\\1metadata: nil,\n\\1service_address: nil,\n\\1service_port: nil,\n\\1exception_transformer: nil,\n'
)
s.replace(
'lib/google/cloud/datastore/v*/*_client.rb',
'service_path = self\\.class::SERVICE_ADDRESS',
'service_path = service_address || self.class::SERVICE_ADDRESS'
)
s.replace(
'lib/google/cloud/datastore/v*/*_client.rb',
'port = self\\.class::DEFAULT_SERVICE_PORT',
'port = service_port || self.class::DEFAULT_SERVICE_PORT'
)
# https://github.com/googleapis/gapic-generator/issues/2124
s.replace(
'lib/google/cloud/datastore/v1/credentials.rb',
'SCOPE = \[[^\]]+\]\.freeze',
'SCOPE = ["https://www.googleapis.com/auth/datastore"].freeze')
# https://github.com/googleapis/gapic-generator/issues/2243
s.replace(
'lib/google/cloud/datastore/v1/*_client.rb',
'(\n\\s+class \\w+Client\n)(\\s+)(attr_reader :\\w+_stub)',
'\\1\\2# @private\n\\2\\3')
# https://github.com/googleapis/gapic-generator/issues/2279
s.replace(
'lib/**/*.rb',
'\\A(((#[^\n]*)?\n)*# (Copyright \\d+|Generated by the protocol buffer compiler)[^\n]+\n(#[^\n]*\n)*\n)([^\n])',
'\\1\n\\6')
# https://github.com/googleapis/google-cloud-ruby/issues/3058
s.replace(
'lib/google/cloud/datastore/v1/*_client.rb',
'(require \".*credentials\"\n)\n',
'\\1require "google/cloud/datastore/version"\n\n'
)
s.replace(
'lib/google/cloud/datastore/v1/*_client.rb',
'Gem.loaded_specs\[.*\]\.version\.version',
'Google::Cloud::Datastore::VERSION'
)<|fim▁end|>
|
'\\1#\\2 Override for the service hostname, or `nil` to leave as the default.\n' +
|
<|file_name|>abs.rs<|end_file_name|><|fim▁begin|>use malachite_base::num::arithmetic::traits::{Abs, AbsAssign, UnsignedAbs};
use malachite_base_test_util::bench::{run_benchmark, BenchmarkType};
use malachite_base_test_util::generators::common::{GenConfig, GenMode};
use malachite_base_test_util::runner::Runner;
use malachite_nz_test_util::bench::bucketers::{
integer_bit_bucketer, triple_1_integer_bit_bucketer, triple_3_integer_bit_bucketer,
};
use malachite_nz_test_util::generators::{
integer_gen, integer_gen_nrm, integer_integer_natural_triple_gen,
};
use num::Signed;
pub(crate) fn register(runner: &mut Runner) {
register_demo!(runner, demo_integer_abs);
register_demo!(runner, demo_integer_abs_ref);
register_demo!(runner, demo_integer_abs_assign);
register_demo!(runner, demo_integer_unsigned_abs);
register_demo!(runner, demo_integer_unsigned_abs_ref);
register_demo!(runner, demo_integer_unsigned_abs_ref_out);
register_demo!(runner, demo_integer_mutate_unsigned_abs);
register_bench!(runner, benchmark_integer_abs_library_comparison);
register_bench!(runner, benchmark_integer_abs_evaluation_strategy);
register_bench!(runner, benchmark_integer_abs_assign);
register_bench!(runner, benchmark_integer_unsigned_abs_evaluation_strategy);
register_bench!(runner, benchmark_integer_mutate_unsigned_abs);
}
fn demo_integer_abs(gm: GenMode, config: GenConfig, limit: usize) {
for n in integer_gen().get(gm, &config).take(limit) {
println!("|{}| = {}", n.clone(), n.abs());
}
}
fn demo_integer_abs_ref(gm: GenMode, config: GenConfig, limit: usize) {
for n in integer_gen().get(gm, &config).take(limit) {
println!("|&{}| = {}", n, (&n).abs());
}
}
fn demo_integer_abs_assign(gm: GenMode, config: GenConfig, limit: usize) {
for mut n in integer_gen().get(gm, &config).take(limit) {
let n_old = n.clone();
n.abs_assign();
println!("n := {}; n.abs_assign(); n = {}", n_old, n);
}
}
fn demo_integer_unsigned_abs(gm: GenMode, config: GenConfig, limit: usize) {
for n in integer_gen().get(gm, &config).take(limit) {
println!("unsigned_abs({}) = {}", n.clone(), n.unsigned_abs());
}
}
fn demo_integer_unsigned_abs_ref(gm: GenMode, config: GenConfig, limit: usize) {
for n in integer_gen().get(gm, &config).take(limit) {
println!("unsigned_abs(&{}) = {}", n, (&n).unsigned_abs());
}
}
fn demo_integer_unsigned_abs_ref_out(gm: GenMode, config: GenConfig, limit: usize) {
for n in integer_gen().get(gm, &config).take(limit) {
println!("{}.unsigned_abs_ref() = {}", n, n.unsigned_abs_ref());
}
}
fn demo_integer_mutate_unsigned_abs(gm: GenMode, config: GenConfig, limit: usize) {
for (mut n, out, new_abs) in integer_integer_natural_triple_gen()
.get(gm, &config)
.take(limit)
{
let old_n = n.clone();
let old_out = out.clone();
let old_new_abs = new_abs.clone();
let actual_out = n.mutate_unsigned_abs(|x| {
*x = new_abs;
out
});
println!(
"n := {}; n.mutate_unsigned_abs(|x| {{ *x = {}; {} }}) = {}; n = {}",
old_n, old_new_abs, old_out, actual_out, n
);
}
}
fn benchmark_integer_abs_library_comparison(
gm: GenMode,
config: GenConfig,
limit: usize,
file_name: &str,
) {
run_benchmark(
"Integer.abs()",
BenchmarkType::LibraryComparison,
integer_gen_nrm().get(gm, &config),
gm.name(),
limit,
file_name,
&triple_3_integer_bit_bucketer("x"),
&mut [
("Malachite", &mut |(_, _, n)| no_out!(n.abs())),
("num", &mut |(n, _, _)| no_out!(n.abs())),
("rug", &mut |(_, n, _)| no_out!(n.abs().cmp0())),
],
);
}
fn benchmark_integer_abs_evaluation_strategy(
gm: GenMode,
config: GenConfig,
limit: usize,
file_name: &str,
) {
run_benchmark(
"Integer.abs()",
BenchmarkType::EvaluationStrategy,
integer_gen().get(gm, &config),
gm.name(),
limit,
file_name,
&integer_bit_bucketer("x"),
&mut [
("Integer.abs()", &mut |n| no_out!(n.abs())),
("(&Integer).abs()", &mut |n| no_out!((&n).abs())),
],
);
}
fn benchmark_integer_abs_assign(gm: GenMode, config: GenConfig, limit: usize, file_name: &str) {
run_benchmark(
"Integer.abs_assign()",
BenchmarkType::Single,
integer_gen().get(gm, &config),
gm.name(),
limit,
file_name,
&integer_bit_bucketer("x"),
&mut [("Malachite", &mut |mut n| n.abs_assign())],
);
}
fn benchmark_integer_unsigned_abs_evaluation_strategy(
gm: GenMode,
config: GenConfig,
limit: usize,
file_name: &str,
) {
run_benchmark(
"Integer.unsigned_abs()",
BenchmarkType::EvaluationStrategy,
integer_gen().get(gm, &config),
gm.name(),
limit,
file_name,
&integer_bit_bucketer("x"),
&mut [
("Integer.unsigned_abs()", &mut |n| no_out!(n.unsigned_abs())),
("(&Integer).unsigned_abs()", &mut |n| {
no_out!((&n).unsigned_abs())
}),
("Integer.unsigned_abs_ref()", &mut |n| {
no_out!(n.unsigned_abs_ref())
}),
],
);
}
fn benchmark_integer_mutate_unsigned_abs(
gm: GenMode,
config: GenConfig,
limit: usize,
file_name: &str,<|fim▁hole|> "Integer.mutate_unsigned_abs(FnOnce(&mut Natural) -> T)",
BenchmarkType::Single,
integer_integer_natural_triple_gen().get(gm, &config),
gm.name(),
limit,
file_name,
&triple_1_integer_bit_bucketer("x"),
&mut [("Malachite", &mut |(mut n, out, new_abs)| {
no_out!(n.mutate_unsigned_abs(|x| {
*x = new_abs;
out
}))
})],
);
}<|fim▁end|>
|
) {
run_benchmark(
|
<|file_name|>SegmentReader.java<|end_file_name|><|fim▁begin|>package org.apache.lucene.index;
/**
* Copyright 2004 The Apache Software Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.IOException;
import java.util.*;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.store.IndexOutput;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.BitVector;
import org.apache.lucene.search.DefaultSimilarity;
/**
* @version $Id: SegmentReader.java 329523 2005-10-30 05:37:11Z yonik $
*/
class SegmentReader extends IndexReader {
private String segment;
FieldInfos fieldInfos;
private FieldsReader fieldsReader;
TermInfosReader tis;
TermVectorsReader termVectorsReaderOrig = null;
ThreadLocal termVectorsLocal = new ThreadLocal();
BitVector deletedDocs = null;
private boolean deletedDocsDirty = false;
private boolean normsDirty = false;
private boolean undeleteAll = false;
IndexInput freqStream;
IndexInput proxStream;
// Compound File Reader when based on a compound file segment
CompoundFileReader cfsReader = null;
private class Norm {
public Norm(IndexInput in, int number)
{
this.in = in;
this.number = number;
}
private IndexInput in;
private byte[] bytes;
private boolean dirty;
private int number;
private void reWrite() throws IOException {
// NOTE: norms are re-written in regular directory, not cfs
IndexOutput out = directory().createOutput(segment + ".tmp");
try {
out.writeBytes(bytes, maxDoc());
} finally {
out.close();
}
String fileName;
if(cfsReader == null)
fileName = segment + ".f" + number;
else{
// use a different file name if we have compound format
fileName = segment + ".s" + number;
}
directory().renameFile(segment + ".tmp", fileName);
this.dirty = false;
}
}
private Hashtable norms = new Hashtable();
/** The class which implements SegmentReader. */
private static Class IMPL;
static {
try {
String name =
System.getProperty("org.apache.lucene.SegmentReader.class",
SegmentReader.class.getName());
IMPL = Class.forName(name);
} catch (ClassNotFoundException e) {
throw new RuntimeException("cannot load SegmentReader class: " + e);
} catch (SecurityException se) {
try {
IMPL = Class.forName(SegmentReader.class.getName());
} catch (ClassNotFoundException e) {
throw new RuntimeException("cannot load default SegmentReader class: " + e);
}
}
}
protected SegmentReader() { super(null); }
public static SegmentReader get(SegmentInfo si) throws IOException {
return get(si.dir, si, null, false, false);
}
public static SegmentReader get(SegmentInfos sis, SegmentInfo si,
boolean closeDir) throws IOException {
return get(si.dir, si, sis, closeDir, true);
}
public static SegmentReader get(Directory dir, SegmentInfo si,
SegmentInfos sis,
boolean closeDir, boolean ownDir)
throws IOException {
SegmentReader instance;
try {
instance = (SegmentReader)IMPL.newInstance();
} catch (Exception e) {
throw new RuntimeException("cannot load SegmentReader class: " + e);
}
instance.init(dir, sis, closeDir, ownDir);
instance.initialize(si);
return instance;
}
private void initialize(SegmentInfo si) throws IOException {
segment = si.name;
// Use compound file directory for some files, if it exists
Directory cfsDir = directory();
if (directory().fileExists(segment + ".cfs")) {
cfsReader = new CompoundFileReader(directory(), segment + ".cfs");
cfsDir = cfsReader;
}
// No compound file exists - use the multi-file format
fieldInfos = new FieldInfos(cfsDir, segment + ".fnm");
fieldsReader = new FieldsReader(cfsDir, segment, fieldInfos);
tis = new TermInfosReader(cfsDir, segment, fieldInfos);
// NOTE: the bitvector is stored using the regular directory, not cfs
if (hasDeletions(si))
deletedDocs = new BitVector(directory(), segment + ".del");
// make sure that all index files have been read or are kept open
// so that if an index update removes them we'll still have them
freqStream = cfsDir.openInput(segment + ".frq");
proxStream = cfsDir.openInput(segment + ".prx");
openNorms(cfsDir);
if (fieldInfos.hasVectors()) { // open term vector files only as needed
termVectorsReaderOrig = new TermVectorsReader(cfsDir, segment, fieldInfos);
}
}
protected void finalize() {
// patch for pre-1.4.2 JVMs, whose ThreadLocals leak
termVectorsLocal.set(null);
super.finalize();
}
protected void doCommit() throws IOException {
if (deletedDocsDirty) { // re-write deleted
deletedDocs.write(directory(), segment + ".tmp");
directory().renameFile(segment + ".tmp", segment + ".del");
}
if(undeleteAll && directory().fileExists(segment + ".del")){
directory().deleteFile(segment + ".del");
}
if (normsDirty) { // re-write norms
Enumeration values = norms.elements();
while (values.hasMoreElements()) {
Norm norm = (Norm) values.nextElement();
if (norm.dirty) {
norm.reWrite();
}
}
}
deletedDocsDirty = false;
normsDirty = false;
undeleteAll = false;
}
protected void doClose() throws IOException {
fieldsReader.close();
tis.close();
if (freqStream != null)
freqStream.close();
if (proxStream != null)
proxStream.close();
closeNorms();
if (termVectorsReaderOrig != null)
termVectorsReaderOrig.close();
if (cfsReader != null)
cfsReader.close();
}
static boolean hasDeletions(SegmentInfo si) throws IOException {
return si.dir.fileExists(si.name + ".del");
}
public boolean hasDeletions() {
return deletedDocs != null;
}
static boolean usesCompoundFile(SegmentInfo si) throws IOException {
return si.dir.fileExists(si.name + ".cfs");
}
static boolean hasSeparateNorms(SegmentInfo si) throws IOException {
String[] result = si.dir.list();
String pattern = si.name + ".s";
int patternLength = pattern.length();
for(int i = 0; i < result.length; i++){
if(result[i].startsWith(pattern) && Character.isDigit(result[i].charAt(patternLength)))
return true;
}
return false;
}
protected void doDelete(int docNum) {
if (deletedDocs == null)
deletedDocs = new BitVector(maxDoc());
deletedDocsDirty = true;
undeleteAll = false;
deletedDocs.set(docNum);
}
protected void doUndeleteAll() {
deletedDocs = null;
deletedDocsDirty = false;
undeleteAll = true;
}
Vector files() throws IOException {
Vector files = new Vector(16);
for (int i = 0; i < IndexFileNames.INDEX_EXTENSIONS.length; i++) {
String name = segment + "." + IndexFileNames.INDEX_EXTENSIONS[i];
if (directory().fileExists(name))
files.addElement(name);
}
for (int i = 0; i < fieldInfos.size(); i++) {
FieldInfo fi = fieldInfos.fieldInfo(i);
if (fi.isIndexed && !fi.omitNorms){
String name;
if(cfsReader == null)
name = segment + ".f" + i;
else
name = segment + ".s" + i;
if (directory().fileExists(name))
files.addElement(name);
}
}
return files;
}
public TermEnum terms() {
return tis.terms();
}
public TermEnum terms(Term t) throws IOException {
return tis.terms(t);
}
public synchronized Document document(int n) throws IOException {
if (isDeleted(n))
throw new IllegalArgumentException
("attempt to access a deleted document");
return fieldsReader.doc(n);
}
public synchronized boolean isDeleted(int n) {
return (deletedDocs != null && deletedDocs.get(n));
}
public TermDocs termDocs() throws IOException {
return new SegmentTermDocs(this);
}
public TermPositions termPositions() throws IOException {
return new SegmentTermPositions(this);
}
public int docFreq(Term t) throws IOException {
TermInfo ti = tis.get(t);
if (ti != null)
return ti.docFreq;
else
return 0;
}
public int numDocs() {
int n = maxDoc();
if (deletedDocs != null)
n -= deletedDocs.count();
return n;
}
public int maxDoc() {
return fieldsReader.size();
}
/**
* @see IndexReader#getFieldNames()
* @deprecated Replaced by {@link #getFieldNames (IndexReader.FieldOption fldOption)}
*/
public Collection getFieldNames() {
// maintain a unique set of field names
Set fieldSet = new HashSet();
for (int i = 0; i < fieldInfos.size(); i++) {
FieldInfo fi = fieldInfos.fieldInfo(i);
fieldSet.add(fi.name);
}
return fieldSet;
}
/**
* @see IndexReader#getFieldNames(boolean)
* @deprecated Replaced by {@link #getFieldNames (IndexReader.FieldOption fldOption)}
*/
public Collection getFieldNames(boolean indexed) {
// maintain a unique set of field names
Set fieldSet = new HashSet();
for (int i = 0; i < fieldInfos.size(); i++) {
FieldInfo fi = fieldInfos.fieldInfo(i);
if (fi.isIndexed == indexed)
fieldSet.add(fi.name);
}
return fieldSet;
}
/**
* @see IndexReader#getIndexedFieldNames(Field.TermVector tvSpec)
* @deprecated Replaced by {@link #getFieldNames (IndexReader.FieldOption fldOption)}
*/
public Collection getIndexedFieldNames (Field.TermVector tvSpec){
boolean storedTermVector;
boolean storePositionWithTermVector;
boolean storeOffsetWithTermVector;
if(tvSpec == Field.TermVector.NO){
storedTermVector = false;
storePositionWithTermVector = false;
storeOffsetWithTermVector = false;
}
else if(tvSpec == Field.TermVector.YES){
storedTermVector = true;
storePositionWithTermVector = false;
storeOffsetWithTermVector = false;
}
else if(tvSpec == Field.TermVector.WITH_POSITIONS){
storedTermVector = true;
storePositionWithTermVector = true;
storeOffsetWithTermVector = false;
}
else if(tvSpec == Field.TermVector.WITH_OFFSETS){
storedTermVector = true;
storePositionWithTermVector = false;
storeOffsetWithTermVector = true;
}
else if(tvSpec == Field.TermVector.WITH_POSITIONS_OFFSETS){
storedTermVector = true;
storePositionWithTermVector = true;
storeOffsetWithTermVector = true;
}
else{
throw new IllegalArgumentException("unknown termVector parameter " + tvSpec);
}
// maintain a unique set of field names
Set fieldSet = new HashSet();
for (int i = 0; i < fieldInfos.size(); i++) {
FieldInfo fi = fieldInfos.fieldInfo(i);
if (fi.isIndexed && fi.storeTermVector == storedTermVector &&
fi.storePositionWithTermVector == storePositionWithTermVector &&
fi.storeOffsetWithTermVector == storeOffsetWithTermVector){
fieldSet.add(fi.name);
}
}
return fieldSet;
}
/**
* @see IndexReader#getFieldNames(IndexReader.FieldOption fldOption)
*/
public Collection getFieldNames(IndexReader.FieldOption fieldOption) {
Set fieldSet = new HashSet();
for (int i = 0; i < fieldInfos.size(); i++) {
FieldInfo fi = fieldInfos.fieldInfo(i);
if (fieldOption == IndexReader.FieldOption.ALL) {
fieldSet.add(fi.name);
}
else if (!fi.isIndexed && fieldOption == IndexReader.FieldOption.UNINDEXED) {
fieldSet.add(fi.name);
}
else if (fi.isIndexed && fieldOption == IndexReader.FieldOption.INDEXED) {
fieldSet.add(fi.name);
}
else if (fi.isIndexed && fi.storeTermVector == false && fieldOption == IndexReader.FieldOption.INDEXED_NO_TERMVECTOR) {
fieldSet.add(fi.name);
}
else if (fi.storeTermVector == true &&
fi.storePositionWithTermVector == false &&
fi.storeOffsetWithTermVector == false &&
fieldOption == IndexReader.FieldOption.TERMVECTOR) {
fieldSet.add(fi.name);
}
else if (fi.isIndexed && fi.storeTermVector && fieldOption == IndexReader.FieldOption.INDEXED_WITH_TERMVECTOR) {
fieldSet.add(fi.name);
}
else if (fi.storePositionWithTermVector && fi.storeOffsetWithTermVector == false && fieldOption == IndexReader.FieldOption.TERMVECTOR_WITH_POSITION) {
fieldSet.add(fi.name);<|fim▁hole|> fieldSet.add(fi.name);
}
else if ((fi.storeOffsetWithTermVector && fi.storePositionWithTermVector) &&
fieldOption == IndexReader.FieldOption.TERMVECTOR_WITH_POSITION_OFFSET) {
fieldSet.add(fi.name);
}
}
return fieldSet;
}
public synchronized boolean hasNorms(String field) {
return norms.containsKey(field);
}
static byte[] createFakeNorms(int size) {
byte[] ones = new byte[size];
Arrays.fill(ones, DefaultSimilarity.encodeNorm(1.0f));
return ones;
}
private byte[] ones;
private byte[] fakeNorms() {
if (ones==null) ones=createFakeNorms(maxDoc());
return ones;
}
// can return null if norms aren't stored
protected synchronized byte[] getNorms(String field) throws IOException {
Norm norm = (Norm) norms.get(field);
if (norm == null) return null; // not indexed, or norms not stored
if (norm.bytes == null) { // value not yet read
byte[] bytes = new byte[maxDoc()];
norms(field, bytes, 0);
norm.bytes = bytes; // cache it
}
return norm.bytes;
}
// returns fake norms if norms aren't available
public synchronized byte[] norms(String field) throws IOException {
byte[] bytes = getNorms(field);
if (bytes==null) bytes=fakeNorms();
return bytes;
}
protected void doSetNorm(int doc, String field, byte value)
throws IOException {
Norm norm = (Norm) norms.get(field);
if (norm == null) // not an indexed field
return;
norm.dirty = true; // mark it dirty
normsDirty = true;
norms(field)[doc] = value; // set the value
}
/** Read norms into a pre-allocated array. */
public synchronized void norms(String field, byte[] bytes, int offset)
throws IOException {
Norm norm = (Norm) norms.get(field);
if (norm == null) {
System.arraycopy(fakeNorms(), 0, bytes, offset, maxDoc());
return;
}
if (norm.bytes != null) { // can copy from cache
System.arraycopy(norm.bytes, 0, bytes, offset, maxDoc());
return;
}
IndexInput normStream = (IndexInput) norm.in.clone();
try { // read from disk
normStream.seek(0);
normStream.readBytes(bytes, offset, maxDoc());
} finally {
normStream.close();
}
}
private void openNorms(Directory cfsDir) throws IOException {
for (int i = 0; i < fieldInfos.size(); i++) {
FieldInfo fi = fieldInfos.fieldInfo(i);
if (fi.isIndexed && !fi.omitNorms) {
// look first if there are separate norms in compound format
String fileName = segment + ".s" + fi.number;
Directory d = directory();
if(!d.fileExists(fileName)){
fileName = segment + ".f" + fi.number;
d = cfsDir;
}
norms.put(fi.name, new Norm(d.openInput(fileName), fi.number));
}
}
}
private void closeNorms() throws IOException {
synchronized (norms) {
Enumeration enumerator = norms.elements();
while (enumerator.hasMoreElements()) {
Norm norm = (Norm) enumerator.nextElement();
norm.in.close();
}
}
}
/**
* Create a clone from the initial TermVectorsReader and store it in the ThreadLocal.
* @return TermVectorsReader
*/
private TermVectorsReader getTermVectorsReader() {
TermVectorsReader tvReader = (TermVectorsReader)termVectorsLocal.get();
if (tvReader == null) {
tvReader = (TermVectorsReader)termVectorsReaderOrig.clone();
termVectorsLocal.set(tvReader);
}
return tvReader;
}
/** Return a term frequency vector for the specified document and field. The
* vector returned contains term numbers and frequencies for all terms in
* the specified field of this document, if the field had storeTermVector
* flag set. If the flag was not set, the method returns null.
* @throws IOException
*/
public TermFreqVector getTermFreqVector(int docNumber, String field) throws IOException {
// Check if this field is invalid or has no stored term vector
FieldInfo fi = fieldInfos.fieldInfo(field);
if (fi == null || !fi.storeTermVector || termVectorsReaderOrig == null)
return null;
TermVectorsReader termVectorsReader = getTermVectorsReader();
if (termVectorsReader == null)
return null;
return termVectorsReader.get(docNumber, field);
}
/** Return an array of term frequency vectors for the specified document.
* The array contains a vector for each vectorized field in the document.
* Each vector vector contains term numbers and frequencies for all terms
* in a given vectorized field.
* If no such fields existed, the method returns null.
* @throws IOException
*/
public TermFreqVector[] getTermFreqVectors(int docNumber) throws IOException {
if (termVectorsReaderOrig == null)
return null;
TermVectorsReader termVectorsReader = getTermVectorsReader();
if (termVectorsReader == null)
return null;
return termVectorsReader.get(docNumber);
}
}<|fim▁end|>
|
}
else if (fi.storeOffsetWithTermVector && fi.storePositionWithTermVector == false && fieldOption == IndexReader.FieldOption.TERMVECTOR_WITH_OFFSET) {
|
<|file_name|>LoadingFragment.java<|end_file_name|><|fim▁begin|>// vim: et sw=4 sts=4 tabstop=4
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0<|fim▁hole|> * See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.issc.widget;
import com.issc.R;
import android.app.DialogFragment;
import android.os.Bundle;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
public class LoadingFragment extends DialogFragment {
public LoadingFragment() {
super();
setStyle(DialogFragment.STYLE_NO_FRAME, 0);
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
View v = inflater.inflate(R.layout.progressdialog, container, false);
return v;
}
}<|fim▁end|>
|
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
<|file_name|>git_db.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from django.utils.translation import ugettext
from django.utils import simplejson as json
from django.conf import settings
from StringIO import StringIO
from gitdb import IStream
from git import *
from git.exc import InvalidGitRepositoryError
from collections import defaultdict
from datetime import datetime
import os
_hook = """#!/bin/sh
cd ..
env -i git reset --hard > /dev/null 2>&1
env -i git update-index > /dev/null 2>&1
"""
def _do_commit(repo, path, content, commit_msg=None):
""" Do a commit """
# Create the blob object
stream = StringIO(content.encode('utf-8'))
stream.seek(0, 2)
streamlen = stream.tell()
stream.seek(0)
istream = IStream('blob', streamlen, stream)
# Add it to the repository object database
repo.odb.store(istream)
# Create the corresponding Blob object
blob = Blob(repo, istream.binsha, Blob.file_mode, path.encode('utf-8'))
# Add blob to the index
repo.index.add([IndexEntry.from_blob(blob)])
if not commit_msg:
commit_msg = ugettext(u'Update Wiki: {0}').format(path).encode('utf-8')
repo.index.commit(commit_msg)
class Repository(object):
""" Repository object. """
@classmethod
def new(cls, gitdir):
""" Initialize a repository and create the root commit """
# Create repository
if os.path.exists(gitdir.encode('utf-8')):
return cls(gitdir)
repo = Repo.init(gitdir.encode('utf-8'))
repo.config_writer().set_value('receive', 'denyCurrentBranch', 'ignore')
# Create hook to automatically update when we receive commits from clients
post_receive = os.path.join(gitdir, '.git', 'hooks', 'post-receive')
with open(post_receive, 'w') as f:
f.write(_hook)
os.chmod(post_receive, 0775)
<|fim▁hole|> _do_commit(repo, u'{0}.md'.format(settings.WIKI_INDEX), '# Home', commit_msg=ugettext(u'Initialize'))
return cls(gitdir)
def __init__(self, gitdir):
""" Initialize repository. """
self.repo = Repo(gitdir.encode('utf-8'))
self.gitdir = gitdir
self.parse()
@property
def git(self):
return self.repo.git
@property
def head(self):
return self.repo.head
def parse(self):
""" Parse Tree and Blob objects. """
# Do git reset --hard and git update-index
self.repo.head.reset(index=True, working_tree=True)
self.repo.git.update_index()
self.repo_tree = self.repo.tree()
self.entries = [e for e in self.repo_tree.traverse()]
self.blobs = [b for b in self.entries if isinstance(b, Blob)]
self.trees = [self.repo_tree] + [t for t in self.entries if isinstance(t, Tree)]
def exists(self, path):
""" Check if path exists in repository. """
if path == self.repo_tree.path:
return True
for e in self.entries:
if path == e.path:
return True
return False
def is_dir(self, path):
""" Check if path is a directory. """
for t in self.trees:
if path == t.path:
return True
return False
def get_file_mimetype(self, path):
""" Get mimetype of file stored in ``path``. """
if self.is_dir(path):
return 'inode/directory'
for blob in self.blobs:
if blob.path == path:
return blob.mime_type
def set_content(self, path, content, commit_msg=None):
""" Add new content in ``path``. """
_do_commit(self.repo, path, content, commit_msg)
# Update internal informations
self.parse()
def put_uploaded_file(self, path, ufile, commit_msg=None):
""" Put an uploaded file to the repository. """
# Re-parse to be sure
self.parse()
# Get absolute path to the file
abspath = os.path.join(self.gitdir, path)
# Make directory for the file
try:
os.makedirs(os.path.dirname(abspath))
except OSError:
pass
# Write the file
with open(abspath, 'wb') as f:
for chunk in ufile.chunks():
f.write(chunk)
# Add it to the repository
import sys
print >>sys.stderr, type(path), path
self.repo.index.add([path.encode('utf-8')])
# And commit
if not commit_msg:
commit_msg = ugettext(u'Upload document: {0}').format(path).encode('utf-8')
self.repo.index.commit(commit_msg)
# Update internal informations
self.parse()
def get_content(self, path):
""" Get content of file stored in ``path``. """
for blob in self.blobs:
if blob.path == path:
return blob.data_stream.read(), blob.name, blob.mime_type
def rm_content(self, path):
""" Remove file located at ``path``. """
self.repo.index.remove([path.encode('utf-8')])
self.repo.index.commit(ugettext(u'Update Wiki: {0} deleted'.format(path)).encode('utf-8'))
self.parse()
def commit(self, message):
""" Create an empty commit """
c = Commit.create_from_tree(self.repo, self.repo.tree(), message, head=True)
def get_folder_tree(self, path):
""" Get list of files contained in ``path``. """
for tree in self.trees:
if tree.path == path:
ret = []
ret = ret + [{'path': b.path, 'name': b.name, 'type': b.mime_type} for b in tree.blobs]
ret = ret + [{'path': t.path, 'name': t.name, 'type': 'inode/directory'} for t in tree.trees]
return ret
def get_file_history(self, path):
""" Get history for a file """
return [self.repo.commit(line.split(' ', 1)[0]) for line in self.repo.git.log('--pretty=oneline', '--', path.encode('utf-8')).splitlines()]
def get_history(self, limit=None):
""" Get repository's history """
if limit:
return [self.repo.commit(line.split(' ', 1)[0]) for line in self.repo.git.log('--pretty=oneline', '-{0}'.format(limit)).splitlines()]
return [self.repo.commit(line.split(' ', 1)[0]) for line in self.repo.git.log('--pretty=oneline').splitlines()]
def get_file_diffs(self, path):
""" Get diffs for a file """
diffs = {'diffs': []}
if self.exists(path):
commits = self.get_file_history(path)
for c in commits:
diff = {
'msg': c.message,
'date': datetime.fromtimestamp(c.authored_date),
'author': c.author.name,
'sha': c.hexsha,
'path': path,
}
if c.parents:
diff['parent_sha'] = c.parents[0].hexsha
diffs['diffs'].append(diff)
return diffs
def get_diffs(self, limit=None):
""" Return repository's diffs. """
commits = self.get_history(limit=limit)
diffs = {'diffs': []}
for c in commits:
diff = {
'msg': c.message,
'date': datetime.fromtimestamp(c.authored_date),
'author': c.author.name,
'sha': c.hexsha
}
if c.parents:
diff['parent_sha'] = c.parents[0].hexsha
diffs['diffs'].append(diff)
return diffs
def get_tree(self):
""" Get full tree of repository as json. """
ret = {'node': {
'name': '/',
'path': '/',
'type': 'tree',
'children': []
}}
# Get all paths from the repository
for e in self.entries:
spath = e.path.split('/')
# We do not want the __media__ in our tree
if spath[0] == '__media__':
continue
node = ret['node']
# Build tree before inserting node
for d in spath[:-1]:
new_node = {'node': {
'name': d,
'path': e.path,
'type': 'tree',
'children': []
}}
# Search if the node is already in the tree
for n in node['children']:
if d == n['node']['name']:
new_node = n
break
# If not, just add it
else:
node['children'].append(new_node)
# Up level
node = new_node['node']
if isinstance(e, Tree):
new_node = {'node': {
'name': e.name,
'path': e.path,
'type': 'tree',
'children': []
}}
else:
new_node = {'node': {
'name': e.name,
'path': e.path,
'type': 'file'
}}
node['children'].append(new_node)
return ret
def get_json_tree(self):
return json.dumps(self.get_tree())
def search(self, pattern):
""" Search for a pattern inside the repository and returns the list of results. """
results = []
# Do the search
try:
out = self.git.grep('-i', '-I', '--cached', pattern.encode('utf-8'))
except GitCommandError:
# No results found
return []
for line in out.splitlines():
# Exclude __media__
if not line.startswith('__media__'):
sep = line.find(':')
url = line[:sep]
match = line[sep + 1:]
# Remove markdown extension
if url.endswith('.md'):
url = url[:url.rfind('.md')]
# Append to the results
results.append ((url, match))
# Group results
groups = defaultdict(list)
for result in results:
groups[result[0]].append(result[1])
results = groups.items()
return results<|fim▁end|>
|
# Create the initial commit
|
<|file_name|>vtree.cpp<|end_file_name|><|fim▁begin|>// Copyright 2017 Dan Ristic
#include "chimera/virtual/vtree.h"
<|fim▁hole|>namespace Chimera {
namespace Virtual {
VirtualElement::VirtualElement(
std::string _name,
std::vector<Attribute> _attributes)
: name{_name}
{
for (auto& attribute : _attributes) {
std::string key = attribute.getKey();
attributes[key] = attribute;
}
}
Element* VirtualElement::create(Document& document)
{
auto element = document.createElement(name);
if (attributes.count("id") == 1)
{
auto id = attributes.at("id");
element->id = id.asString();
}
if (attributes.count("class") == 1)
{
auto className = attributes.at("class");
element->className = className.asString();
}
if (attributes.count("src") == 1)
{
auto src = attributes.at("src");
auto img = dynamic_cast<Img*>(element);
if (img)
{
img->setSrc(src.asString());
}
}
if (attributes.count("onChange") == 1)
{
auto func = attributes.at("onChange");
element->on(EventType::Change, func.asCallback());
}
if (attributes.count("onMouseDown") == 1)
{
auto func = attributes.at("onMouseDown");
element->on(EventType::MouseDown, func.asCallback());
}
if (attributes.count("onCustom") == 1)
{
auto func = attributes.at("onCustom");
element->on(EventType::Custom, func.asCallback());
}
for (auto& attribute : attributes)
{
element->attributeChangedCallback(
attribute.first, "", attribute.second.asString());
}
element->textContent = textContent;
CHIMERA_DEBUG(
printf("[VElement] %s %s\n",
element->tagName.c_str(),
element->id.c_str());
)
for (auto& child : children)
{
element->append(child.create(document));
}
return element;
}
} // namespace Virtual
} // namespace Chimera<|fim▁end|>
| |
<|file_name|>event.rs<|end_file_name|><|fim▁begin|>#[test]
fn ui() {
let t = trybuild::TestCases::new();<|fim▁hole|> // https://github.com/rust-lang/rust/issues/55779
// there is a workaround in the file.
t.pass("tests/ui/04-event-sanity-check.rs");
t.compile_fail("tests/ui/05-named-fields.rs");
t.compile_fail("tests/ui/06-no-content-field.rs");
}<|fim▁end|>
|
// rustc overflows when compiling this see:
|
<|file_name|>modify_body_inject_iframe.py<|end_file_name|><|fim▁begin|># Usage: mitmdump -s "iframe_injector.py url"
# (this script works best with --anticache)
import sys
from bs4 import BeautifulSoup
class Injector:
def __init__(self, iframe_url):
self.iframe_url = iframe_url
def response(self, flow):
if flow.request.host in self.iframe_url:
return
html = BeautifulSoup(flow.response.content, "html.parser")
if html.body:
iframe = html.new_tag(
"iframe",
src=self.iframe_url,
frameborder=0,
height=0,
width=0)<|fim▁hole|> flow.response.content = str(html).encode("utf8")
def start():
if len(sys.argv) != 2:
raise ValueError('Usage: -s "iframe_injector.py url"')
return Injector(sys.argv[1])<|fim▁end|>
|
html.body.insert(0, iframe)
|
<|file_name|>const-float-classify.rs<|end_file_name|><|fim▁begin|>// compile-flags: -Zmir-opt-level=0
// run-pass
#![feature(const_float_bits_conv)]
#![feature(const_float_classify)]
#![feature(const_trait_impl)]
// Don't promote
const fn nop<T>(x: T) -> T { x }
macro_rules! const_assert {
($a:expr, $b:expr) => {
{
const _: () = assert!($a == $b);
assert_eq!(nop($a), nop($b));
}
};
}
macro_rules! suite {
( $( $tt:tt )* ) => {
fn f32() {
suite_inner!(f32 $($tt)*);<|fim▁hole|> }
}
}
macro_rules! suite_inner {
(
$ty:ident [$( $fn:ident ),*]
$val:expr => [$($out:ident),*]
$( $tail:tt )*
) => {
$( const_assert!($ty::$fn($val), $out); )*
suite_inner!($ty [$($fn),*] $($tail)*)
};
( $ty:ident [$( $fn:ident ),*]) => {};
}
#[derive(Debug)]
struct NonDet;
impl const PartialEq<NonDet> for bool {
fn eq(&self, _: &NonDet) -> bool {
true
}
fn ne(&self, _: &NonDet) -> bool {
false
}
}
// The result of the `is_sign` methods are not checked for correctness, since LLVM does not
// guarantee anything about the signedness of NaNs. See
// https://github.com/rust-lang/rust/issues/55131.
suite! {
[is_nan, is_infinite, is_finite, is_normal, is_sign_positive, is_sign_negative]
-0.0 / 0.0 => [ true, false, false, false, NonDet, NonDet]
0.0 / 0.0 => [ true, false, false, false, NonDet, NonDet]
1.0 => [ false, false, true, true, true, false]
-1.0 => [ false, false, true, true, false, true]
0.0 => [ false, false, true, false, true, false]
-0.0 => [ false, false, true, false, false, true]
1.0 / 0.0 => [ false, true, false, false, true, false]
-1.0 / 0.0 => [ false, true, false, false, false, true]
}
fn main() {
f32();
f64();
}<|fim▁end|>
|
}
fn f64() {
suite_inner!(f64 $($tt)*);
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|>
|
""" Additional extras go here.
"""
|
<|file_name|>articles.client.controller.js<|end_file_name|><|fim▁begin|>'use strict';
angular.module('articles').controller('ArticlesController', ['$scope', '$stateParams', '$location', 'Authentication', 'Articles',
function($scope, $stateParams, $location, Authentication, Articles) {
$scope.authentication = Authentication;
$scope.create = function() {
var article = new Articles({
title: this.title,
content: this.content
});
article.$save(function(response) {
$location.path('articles/' + response._id);
}, function(errorResponse) {
$scope.error = errorResponse.data.message;
});
this.title = '';
this.content = '';
};
$scope.remove = function(article) {
if (article) {
article.$remove();
for (var i in $scope.articles) {
if ($scope.articles[i] === article) {
$scope.articles.splice(i, 1);
}
}
} else {
$scope.article.$remove(function() {
$location.path('articles');
});<|fim▁hole|> };
$scope.update = function() {
var article = $scope.article;
article.$update(function() {
$location.path('articles/' + article._id);
}, function(errorResponse) {
$scope.error = errorResponse.data.message;
});
};
$scope.find = function() {
$scope.articles = Articles.query();
};
$scope.findOne = function () {
$scope.article = Articles.get({
articleId: $stateParams.articleId
});
};
}
]);<|fim▁end|>
|
}
|
<|file_name|>lrcserv.py<|end_file_name|><|fim▁begin|># -*- coding: UTF-8 -*-
#auther mengskysama
import tornado.httpserver
import tornado.ioloop
import tornado.options
import tornado.web
import tornado.httpclient
import lcs
from urllib import quote
from urllib import unquote
from tornado import gen
import ttlrcdump
listen_port = 38439
def ChooiseItem(xml, artist):
#print '==============================================='
#print xml.decode('utf-8').encode('gbk')
n = xml.find('<?xml')
if n == -1:
return False
artist = ttlrcdump.FilterSearchStr(artist)
#remove item if artist != artist
n = 0
pos = 0
t = xml.count('id=')
for n in range(0, t):
begin = xml.find('artist="', pos)
end = xml.find('" title', begin)
_artist = ttlrcdump.FilterSearchStr(xml[begin+8:end])
pos = end
n += 1
arith = lcs.arithmetic()
samelen = len(arith.lcs(_artist,artist))
#print samelen
if samelen < 5 and samelen < len(artist)/3 :
begin = xml.rfind('<lrc',0 ,pos)
end = xml.find('lrc>', pos)
xml = xml[:begin] + xml[end + 4:]
pos = begin
n -= 1
t -= 1
#print xml.decode('utf-8').encode('gbk')
#print '==============================================='
n = xml.find('id=')
if n == -1:
return False
#remove item if artist != artist
n = 0
begin = xml.find('artist="', n)
end = xml.find('" title', n)
n = end
_artist = ttlrcdump.FilterSearchStr(xml[begin+10:end])
strs = ('动新','動新','动基','对照','對照','中日','中英','修正','假名')
for _str in strs:
n = xml.find(_str)
if n != -1:
break
if n == -1:
n = xml.find('<lrc')
else:
n = xml.rfind('<lrc', 0, n)
if n > -1:
begin = xml.find('id="', n) + 4
end = xml.find('"', begin)
#print xml[begin:end]
id = xml[begin:end]
begin = xml.find('artist="', n) + 8
end = xml.find('"', begin )
#print quote(xml[begin:end])
artist = xml[begin:end].replace('&','&').replace(''',"'").replace('"','"').replace('<','<').replace('>','>')
begin = xml.find('title="', n) + 7
end = xml.find('"', begin)
#print quote(xml[begin + 7:end])
title = xml[begin:end].replace('&','&').replace(''',"'").replace('"','"').replace('<','<').replace('>','>')
#ret = "id=%s&artist=%s&title=%s" % (id, quote(artist), quote(title))
#print ret
data = {'id':id, 'artist':artist, 'title':title}
return data
return False
def get_arg(req, arg):
begin = req.find('%s=' % arg)
if begin != -1:
begin += len(arg) + 1
end = req.find('&', begin)
if end != -1:
return req[begin:end]
else:
return req[begin:]
@gen.coroutine
def handle_request(request):
if request.uri.startswith('/lrc'):
try:
id = get_arg(request.uri, 'id')
artist = unquote(get_arg(request.uri, 'artist'))
title = unquote(get_arg(request.uri, 'title'))
ttservernum = int(get_arg(request.uri, 'ttservernum'))
#print id.decode('utf-8').encode('gbk')
#print artist.decode('utf-8').encode('gbk')
#print title.decode('utf-8').encode('gbk')
print str(ttservernum)
http_client = tornado.httpclient.AsyncHTTPClient()
#print ttlrcdump.GetDownloadLrcReq(id, artist, title)
req = tornado.httpclient.HTTPRequest(ttlrcdump.GetDownloadLrcReq(ttservernum, id, artist, title))
res = yield http_client.fetch(req)
lrc = res.body.replace('>', '】')
lrc = lrc.replace('<', '【')
lrc = lrc.replace('\r\n', '<br />')
lrc = lrc.replace('\n', '<br />')
lrc = lrc.replace('\r', '<br />')
context = '<script type="text/javascript" src="/templates/ddjs/lrc_content_inner_1.js"></script></div>%s</li>'
context = context.replace('%s',lrc, 1)
#print context
request.write('HTTP/1.1 200 OK\r\nContent-Length: %d\r\n\r\n%s' % (len(context), context))
except tornado.httpclient.HTTPError, code:
print 'HTTPError except Code' + str(code)
except Exception,e:
print e
finally:
request.finish()
elif (request.uri.find('/?keyword=') != -1):
uri = request.uri.decode('gbk').replace('%20',' ')
if uri.find('&') != -1:
keyword = uri[10:uri.find('&')]
else:keyword = uri[10:]<|fim▁hole|> keyword = keyword.decode('utf-8')
#print repr(keyword)
keyword = eval(repr(keyword)[1:])
#print repr(keyword)
keyword = keyword.decode('gbk').encode('utf-8')
#print keyword.decode('utf-8').encode('gbk')
#print repr(keyword)
try:
if keyword.count(' ') == 0:
keyword += ' '
n = 0
ttservernum = 0
cnt = keyword.count(' ')
for i in range(0, cnt):
#try to prase art and title
n = keyword.find(' ', n) + 1
artist = keyword[0:n-1]
title = keyword[n:]
#print 'title %s' % title
if title.startswith( '(') and i < cnt - 1:
#歌名一般不可能以括号开头
continue
#print 'guess art=%s' % artist.decode('utf-8').encode('gbk')
#print 'guess tit=%s' % title.decode('utf-8').encode('gbk')
trycnt = 0
if artist.find('and') == -1 and title.find('and') == -1:
trycnt = 1
while True:
reqartist = ''
reqtitle = ''
if trycnt == 0:
reqartist = artist.replace('and', '')
reqtitle = title.replace('and', '')
elif trycnt == 1:
reqartist = artist
reqtitle = title
http_client = tornado.httpclient.AsyncHTTPClient()
#print ttlrcdump.GetSearchLrcReq(ttservernum, artist, title)
ttservernum = ttlrcdump.GetServerNum()
req = tornado.httpclient.HTTPRequest(ttlrcdump.GetSearchLrcReq(ttservernum, reqartist, reqtitle))
res = yield http_client.fetch(req)
ret = ChooiseItem(res.body, artist)
if ret != False or trycnt > 0:
break
trycnt += 1
if ret != False:
break
if ret != False:
context = '<div class="newscont mb15" style="line-height:160%;margin-top:10px">' \
'歌手:<a class="mr">%s</a><br>' \
'专辑:<a class="mr"></a>' \
'歌曲:<a class="mr ">%s<span class="highlighter">a</span></a><br>' \
'查看:<a class="mr"href="%s" target="_blank">LRC' \
'<div style="clear:both;"></div>' \
'<div class="page wid f14">'
context = context.replace('%s', artist, 1)
uni_title = title.decode('utf-8')
strrep = ''
for i in range(0, len(uni_title)):
strrep += '<span class="highlighter">%s</span>' % uni_title[i].encode('utf-8')
context = context.replace('%s', strrep, 1)
context = context.replace('%s', "/lrc/?id=%s&artist=%s&title=%s&ttservernum=%s" % (str(ret['id']), quote(str(ret['artist'])), quote(str(ret['title'])), str(ttservernum)))
#print context.decode('utf-8').encode('gbk')
else:
context = 'Lrc Not Found'
request.write('HTTP/1.1 200 OK\r\nContent-Length: %d\r\n\r\n%s' % (len(context), context))
except tornado.httpclient.HTTPError, code:
print 'HTTPError except Code' + str(code)
except Exception,e:
print e
finally:
request.finish()
else:
#print 'Unknow Request:%s' % request.uri
context = '<head><meta http-equiv="refresh" content="0;url=http://foottlrc.mengsky.net/"></head>'
request.write('HTTP/1.1 200 OK\r\nContent-Length: %d\r\n\r\n%s' % (len(context), context))
request.finish()
def main():
http_server = tornado.httpserver.HTTPServer(handle_request)
http_server.listen(listen_port)
tornado.ioloop.IOLoop.instance().start()
if __name__ == "__main__":
main()<|fim▁end|>
|
#print repr(keyword)
keyword = keyword.encode('gbk')
#print repr(keyword)
|
<|file_name|>run_pynacl_tests.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# Copyright (c) 2012 The Native Client Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be<|fim▁hole|>import sys
import unittest
MODULES = [
'directory_storage_test',
'gsd_storage_test',
'hashing_tools_test',
'local_storage_cache_test',
]
# We use absolute imports for Py3 compatibility.
# This means for imports to resolve when testing we need to add the pynacl
# directory to the module search path.
sys.path.insert(0, './')
suite = unittest.TestLoader().loadTestsFromNames(MODULES)
result = unittest.TextTestRunner(verbosity=2).run(suite)
if result.wasSuccessful():
sys.exit(0)
else:
sys.exit(1)<|fim▁end|>
|
# found in the LICENSE file.
"""Run all python tests in this directory."""
|
<|file_name|>test.py<|end_file_name|><|fim▁begin|>from cStringIO import StringIO
from datetime import datetime
from unidecode import unidecode
from handler import Patobj, PatentHandler<|fim▁hole|>import xml_util
import xml_driver
xml_string = 'ipg050104.xml'
xh = xml_driver.XMLHandler()
parser = xml_driver.make_parser()
parser.setContentHandler(xh)
parser.setFeature(xml_driver.handler.feature_external_ges, False)
l = xml.sax.xmlreader.Locator()
xh.setDocumentLocator(l)
#parser.parse(StringIO(xml_string))
parser.parse(xml_string)
print "parsing done"
#print type(xh.root.us_bibliographic_data_grant.publication_reference.contents_of('document_id', '', as_string=False))
print xh.root.claims.contents_of('claim', '', as_string=True, upper=False)
#print type(xh.root.us_bibliographic_data_grant.publication_reference.contents_of('document_id', '', as_string=True))
#print xh.root.us_bibliographic_data_grant.publication_reference.contents_of('document_id', '', as_string=True)<|fim▁end|>
|
import re
import uuid
import xml.sax
|
<|file_name|>virtualmachineimages.go<|end_file_name|><|fim▁begin|>package compute
// Copyright (c) Microsoft and contributors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software<|fim▁hole|>// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Code generated by Microsoft (R) AutoRest Code Generator 0.11.0.0
// Changes may cause incorrect behavior and will be lost if the code is
// regenerated.
import (
"github.com/Azure/azure-sdk-for-go/Godeps/_workspace/src/github.com/Azure/go-autorest/autorest"
"net/http"
"net/url"
)
// VirtualMachineImagesClient is the client for the VirtualMachineImages
// methods of the Compute service.
type VirtualMachineImagesClient struct {
ManagementClient
}
// NewVirtualMachineImagesClient creates an instance of the
// VirtualMachineImagesClient client.
func NewVirtualMachineImagesClient(subscriptionID string) VirtualMachineImagesClient {
return NewVirtualMachineImagesClientWithBaseURI(DefaultBaseURI, subscriptionID)
}
// NewVirtualMachineImagesClientWithBaseURI creates an instance of the
// VirtualMachineImagesClient client.
func NewVirtualMachineImagesClientWithBaseURI(baseURI string, subscriptionID string) VirtualMachineImagesClient {
return VirtualMachineImagesClient{NewWithBaseURI(baseURI, subscriptionID)}
}
// Get gets a virtual machine image.
//
func (client VirtualMachineImagesClient) Get(location string, publisherName string, offer string, skus string, version string) (result VirtualMachineImage, ae error) {
req, err := client.GetPreparer(location, publisherName, offer, skus, version)
if err != nil {
return result, autorest.NewErrorWithError(err, "compute/VirtualMachineImagesClient", "Get", "Failure preparing request")
}
resp, err := client.GetSender(req)
if err != nil {
result.Response = autorest.Response{Response: resp}
return result, autorest.NewErrorWithError(err, "compute/VirtualMachineImagesClient", "Get", "Failure sending request")
}
result, err = client.GetResponder(resp)
if err != nil {
ae = autorest.NewErrorWithError(err, "compute/VirtualMachineImagesClient", "Get", "Failure responding to request")
}
return
}
// GetPreparer prepares the Get request.
func (client VirtualMachineImagesClient) GetPreparer(location string, publisherName string, offer string, skus string, version string) (*http.Request, error) {
pathParameters := map[string]interface{}{
"location": url.QueryEscape(location),
"offer": url.QueryEscape(offer),
"publisherName": url.QueryEscape(publisherName),
"skus": url.QueryEscape(skus),
"subscriptionId": url.QueryEscape(client.SubscriptionID),
"version": url.QueryEscape(version),
}
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
return autorest.Prepare(&http.Request{},
autorest.AsJSON(),
autorest.AsGet(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPath("/subscriptions/{subscriptionId}/providers/Microsoft.Compute/locations/{location}/publishers/{publisherName}/artifacttypes/vmimage/offers/{offer}/skus/{skus}/versions/{version}"),
autorest.WithPathParameters(pathParameters),
autorest.WithQueryParameters(queryParameters))
}
// GetSender sends the Get request. The method will close the
// http.Response Body if it receives an error.
func (client VirtualMachineImagesClient) GetSender(req *http.Request) (*http.Response, error) {
return client.Send(req, http.StatusOK)
}
// GetResponder handles the response to the Get request. The method always
// closes the http.Response Body.
func (client VirtualMachineImagesClient) GetResponder(resp *http.Response) (result VirtualMachineImage, err error) {
err = autorest.Respond(
resp,
client.ByInspecting(),
autorest.WithErrorUnlessStatusCode(http.StatusOK),
autorest.ByUnmarshallingJSON(&result),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
// List gets a list of virtual machine images.
//
// filter is the filter to apply on the operation.
func (client VirtualMachineImagesClient) List(location string, publisherName string, offer string, skus string, filter string, top int, orderby string) (result VirtualMachineImageResourceList, ae error) {
req, err := client.ListPreparer(location, publisherName, offer, skus, filter, top, orderby)
if err != nil {
return result, autorest.NewErrorWithError(err, "compute/VirtualMachineImagesClient", "List", "Failure preparing request")
}
resp, err := client.ListSender(req)
if err != nil {
result.Response = autorest.Response{Response: resp}
return result, autorest.NewErrorWithError(err, "compute/VirtualMachineImagesClient", "List", "Failure sending request")
}
result, err = client.ListResponder(resp)
if err != nil {
ae = autorest.NewErrorWithError(err, "compute/VirtualMachineImagesClient", "List", "Failure responding to request")
}
return
}
// ListPreparer prepares the List request.
func (client VirtualMachineImagesClient) ListPreparer(location string, publisherName string, offer string, skus string, filter string, top int, orderby string) (*http.Request, error) {
pathParameters := map[string]interface{}{
"location": url.QueryEscape(location),
"offer": url.QueryEscape(offer),
"publisherName": url.QueryEscape(publisherName),
"skus": url.QueryEscape(skus),
"subscriptionId": url.QueryEscape(client.SubscriptionID),
}
queryParameters := map[string]interface{}{
"$filter": filter,
"$orderby": orderby,
"$top": top,
"api-version": APIVersion,
}
return autorest.Prepare(&http.Request{},
autorest.AsJSON(),
autorest.AsGet(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPath("/subscriptions/{subscriptionId}/providers/Microsoft.Compute/locations/{location}/publishers/{publisherName}/artifacttypes/vmimage/offers/{offer}/skus/{skus}/versions"),
autorest.WithPathParameters(pathParameters),
autorest.WithQueryParameters(queryParameters))
}
// ListSender sends the List request. The method will close the
// http.Response Body if it receives an error.
func (client VirtualMachineImagesClient) ListSender(req *http.Request) (*http.Response, error) {
return client.Send(req, http.StatusOK)
}
// ListResponder handles the response to the List request. The method always
// closes the http.Response Body.
func (client VirtualMachineImagesClient) ListResponder(resp *http.Response) (result VirtualMachineImageResourceList, err error) {
err = autorest.Respond(
resp,
client.ByInspecting(),
autorest.WithErrorUnlessStatusCode(http.StatusOK),
autorest.ByUnmarshallingJSON(&result.Value),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
// ListOffers gets a list of virtual machine image offers.
//
func (client VirtualMachineImagesClient) ListOffers(location string, publisherName string) (result VirtualMachineImageResourceList, ae error) {
req, err := client.ListOffersPreparer(location, publisherName)
if err != nil {
return result, autorest.NewErrorWithError(err, "compute/VirtualMachineImagesClient", "ListOffers", "Failure preparing request")
}
resp, err := client.ListOffersSender(req)
if err != nil {
result.Response = autorest.Response{Response: resp}
return result, autorest.NewErrorWithError(err, "compute/VirtualMachineImagesClient", "ListOffers", "Failure sending request")
}
result, err = client.ListOffersResponder(resp)
if err != nil {
ae = autorest.NewErrorWithError(err, "compute/VirtualMachineImagesClient", "ListOffers", "Failure responding to request")
}
return
}
// ListOffersPreparer prepares the ListOffers request.
func (client VirtualMachineImagesClient) ListOffersPreparer(location string, publisherName string) (*http.Request, error) {
pathParameters := map[string]interface{}{
"location": url.QueryEscape(location),
"publisherName": url.QueryEscape(publisherName),
"subscriptionId": url.QueryEscape(client.SubscriptionID),
}
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
return autorest.Prepare(&http.Request{},
autorest.AsJSON(),
autorest.AsGet(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPath("/subscriptions/{subscriptionId}/providers/Microsoft.Compute/locations/{location}/publishers/{publisherName}/artifacttypes/vmimage/offers"),
autorest.WithPathParameters(pathParameters),
autorest.WithQueryParameters(queryParameters))
}
// ListOffersSender sends the ListOffers request. The method will close the
// http.Response Body if it receives an error.
func (client VirtualMachineImagesClient) ListOffersSender(req *http.Request) (*http.Response, error) {
return client.Send(req, http.StatusOK)
}
// ListOffersResponder handles the response to the ListOffers request. The method always
// closes the http.Response Body.
func (client VirtualMachineImagesClient) ListOffersResponder(resp *http.Response) (result VirtualMachineImageResourceList, err error) {
err = autorest.Respond(
resp,
client.ByInspecting(),
autorest.WithErrorUnlessStatusCode(http.StatusOK),
autorest.ByUnmarshallingJSON(&result.Value),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
// ListPublishers gets a list of virtual machine image publishers.
//
func (client VirtualMachineImagesClient) ListPublishers(location string) (result VirtualMachineImageResourceList, ae error) {
req, err := client.ListPublishersPreparer(location)
if err != nil {
return result, autorest.NewErrorWithError(err, "compute/VirtualMachineImagesClient", "ListPublishers", "Failure preparing request")
}
resp, err := client.ListPublishersSender(req)
if err != nil {
result.Response = autorest.Response{Response: resp}
return result, autorest.NewErrorWithError(err, "compute/VirtualMachineImagesClient", "ListPublishers", "Failure sending request")
}
result, err = client.ListPublishersResponder(resp)
if err != nil {
ae = autorest.NewErrorWithError(err, "compute/VirtualMachineImagesClient", "ListPublishers", "Failure responding to request")
}
return
}
// ListPublishersPreparer prepares the ListPublishers request.
func (client VirtualMachineImagesClient) ListPublishersPreparer(location string) (*http.Request, error) {
pathParameters := map[string]interface{}{
"location": url.QueryEscape(location),
"subscriptionId": url.QueryEscape(client.SubscriptionID),
}
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
return autorest.Prepare(&http.Request{},
autorest.AsJSON(),
autorest.AsGet(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPath("/subscriptions/{subscriptionId}/providers/Microsoft.Compute/locations/{location}/publishers"),
autorest.WithPathParameters(pathParameters),
autorest.WithQueryParameters(queryParameters))
}
// ListPublishersSender sends the ListPublishers request. The method will close the
// http.Response Body if it receives an error.
func (client VirtualMachineImagesClient) ListPublishersSender(req *http.Request) (*http.Response, error) {
return client.Send(req, http.StatusOK)
}
// ListPublishersResponder handles the response to the ListPublishers request. The method always
// closes the http.Response Body.
func (client VirtualMachineImagesClient) ListPublishersResponder(resp *http.Response) (result VirtualMachineImageResourceList, err error) {
err = autorest.Respond(
resp,
client.ByInspecting(),
autorest.WithErrorUnlessStatusCode(http.StatusOK),
autorest.ByUnmarshallingJSON(&result.Value),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
// ListSkus gets a list of virtual machine image skus.
//
func (client VirtualMachineImagesClient) ListSkus(location string, publisherName string, offer string) (result VirtualMachineImageResourceList, ae error) {
req, err := client.ListSkusPreparer(location, publisherName, offer)
if err != nil {
return result, autorest.NewErrorWithError(err, "compute/VirtualMachineImagesClient", "ListSkus", "Failure preparing request")
}
resp, err := client.ListSkusSender(req)
if err != nil {
result.Response = autorest.Response{Response: resp}
return result, autorest.NewErrorWithError(err, "compute/VirtualMachineImagesClient", "ListSkus", "Failure sending request")
}
result, err = client.ListSkusResponder(resp)
if err != nil {
ae = autorest.NewErrorWithError(err, "compute/VirtualMachineImagesClient", "ListSkus", "Failure responding to request")
}
return
}
// ListSkusPreparer prepares the ListSkus request.
func (client VirtualMachineImagesClient) ListSkusPreparer(location string, publisherName string, offer string) (*http.Request, error) {
pathParameters := map[string]interface{}{
"location": url.QueryEscape(location),
"offer": url.QueryEscape(offer),
"publisherName": url.QueryEscape(publisherName),
"subscriptionId": url.QueryEscape(client.SubscriptionID),
}
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
return autorest.Prepare(&http.Request{},
autorest.AsJSON(),
autorest.AsGet(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPath("/subscriptions/{subscriptionId}/providers/Microsoft.Compute/locations/{location}/publishers/{publisherName}/artifacttypes/vmimage/offers/{offer}/skus"),
autorest.WithPathParameters(pathParameters),
autorest.WithQueryParameters(queryParameters))
}
// ListSkusSender sends the ListSkus request. The method will close the
// http.Response Body if it receives an error.
func (client VirtualMachineImagesClient) ListSkusSender(req *http.Request) (*http.Response, error) {
return client.Send(req, http.StatusOK)
}
// ListSkusResponder handles the response to the ListSkus request. The method always
// closes the http.Response Body.
func (client VirtualMachineImagesClient) ListSkusResponder(resp *http.Response) (result VirtualMachineImageResourceList, err error) {
err = autorest.Respond(
resp,
client.ByInspecting(),
autorest.WithErrorUnlessStatusCode(http.StatusOK),
autorest.ByUnmarshallingJSON(&result.Value),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}<|fim▁end|>
| |
<|file_name|>introspection.py<|end_file_name|><|fim▁begin|>import warnings
from collections import namedtuple
from MySQLdb.constants import FIELD_TYPE
from django.db.backends.base.introspection import (
BaseDatabaseIntrospection, FieldInfo, TableInfo,
)
from django.db.models.indexes import Index
from django.utils.datastructures import OrderedSet
from django.utils.deprecation import RemovedInDjango21Warning
from django.utils.encoding import force_text
FieldInfo = namedtuple('FieldInfo', FieldInfo._fields + ('extra',))
InfoLine = namedtuple('InfoLine', 'col_name data_type max_len num_prec num_scale extra column_default')
class DatabaseIntrospection(BaseDatabaseIntrospection):
data_types_reverse = {
FIELD_TYPE.BLOB: 'TextField',
FIELD_TYPE.CHAR: 'CharField',
FIELD_TYPE.DECIMAL: 'DecimalField',
FIELD_TYPE.NEWDECIMAL: 'DecimalField',
FIELD_TYPE.DATE: 'DateField',
FIELD_TYPE.DATETIME: 'DateTimeField',
FIELD_TYPE.DOUBLE: 'FloatField',
FIELD_TYPE.FLOAT: 'FloatField',
FIELD_TYPE.INT24: 'IntegerField',
FIELD_TYPE.LONG: 'IntegerField',
FIELD_TYPE.LONGLONG: 'BigIntegerField',
FIELD_TYPE.SHORT: 'SmallIntegerField',
FIELD_TYPE.STRING: 'CharField',
FIELD_TYPE.TIME: 'TimeField',
FIELD_TYPE.TIMESTAMP: 'DateTimeField',
FIELD_TYPE.TINY: 'IntegerField',
FIELD_TYPE.TINY_BLOB: 'TextField',
FIELD_TYPE.MEDIUM_BLOB: 'TextField',
FIELD_TYPE.LONG_BLOB: 'TextField',
FIELD_TYPE.VAR_STRING: 'CharField',
}
def get_field_type(self, data_type, description):
field_type = super().get_field_type(data_type, description)
if 'auto_increment' in description.extra:
if field_type == 'IntegerField':
return 'AutoField'
elif field_type == 'BigIntegerField':
return 'BigAutoField'
return field_type
def get_table_list(self, cursor):
"""Return a list of table and view names in the current database."""
cursor.execute("SHOW FULL TABLES")
return [TableInfo(row[0], {'BASE TABLE': 't', 'VIEW': 'v'}.get(row[1]))
for row in cursor.fetchall()]
def get_table_description(self, cursor, table_name):
"""
Return a description of the table with the DB-API cursor.description
interface."
"""
# information_schema database gives more accurate results for some figures:
# - varchar length returned by cursor.description is an internal length,
# not visible length (#5725)
# - precision and scale (for decimal fields) (#5014)
# - auto_increment is not available in cursor.description
cursor.execute("""
SELECT column_name, data_type, character_maximum_length, numeric_precision,
numeric_scale, extra, column_default
FROM information_schema.columns
WHERE table_name = %s AND table_schema = DATABASE()""", [table_name])
field_info = {line[0]: InfoLine(*line) for line in cursor.fetchall()}
cursor.execute("SELECT * FROM %s LIMIT 1" % self.connection.ops.quote_name(table_name))
def to_int(i):
return int(i) if i is not None else i
fields = []
for line in cursor.description:
col_name = force_text(line[0])
fields.append(
FieldInfo(*(
(col_name,) +
line[1:3] +
(
to_int(field_info[col_name].max_len) or line[3],
to_int(field_info[col_name].num_prec) or line[4],
to_int(field_info[col_name].num_scale) or line[5],
line[6],
field_info[col_name].column_default,
field_info[col_name].extra,
)
))
)
return fields
def get_relations(self, cursor, table_name):
"""
Return a dictionary of {field_name: (field_name_other_table, other_table)}
representing all relationships to the given table.
"""
constraints = self.get_key_columns(cursor, table_name)
relations = {}
for my_fieldname, other_table, other_field in constraints:
relations[my_fieldname] = (other_field, other_table)
return relations
def get_key_columns(self, cursor, table_name):
"""
Return a list of (column_name, referenced_table_name, referenced_column_name)
for all key columns in the given table.
"""
key_columns = []
cursor.execute("""
SELECT column_name, referenced_table_name, referenced_column_name
FROM information_schema.key_column_usage
WHERE table_name = %s
AND table_schema = DATABASE()
AND referenced_table_name IS NOT NULL
AND referenced_column_name IS NOT NULL""", [table_name])
key_columns.extend(cursor.fetchall())
return key_columns
def get_indexes(self, cursor, table_name):
warnings.warn(
"get_indexes() is deprecated in favor of get_constraints().",
RemovedInDjango21Warning, stacklevel=2
)
cursor.execute("SHOW INDEX FROM %s" % self.connection.ops.quote_name(table_name))
# Do a two-pass search for indexes: on first pass check which indexes
# are multicolumn, on second pass check which single-column indexes
# are present.
rows = list(cursor.fetchall())
multicol_indexes = set()
for row in rows:
if row[3] > 1:
multicol_indexes.add(row[2])
indexes = {}
for row in rows:
if row[2] in multicol_indexes:
continue
if row[4] not in indexes:
indexes[row[4]] = {'primary_key': False, 'unique': False}
# It's possible to have the unique and PK constraints in separate indexes.
if row[2] == 'PRIMARY':
indexes[row[4]]['primary_key'] = True
if not row[1]:
indexes[row[4]]['unique'] = True
return indexes
def get_storage_engine(self, cursor, table_name):
"""
Retrieve the storage engine for a given table. Return the default
storage engine if the table doesn't exist.
"""
cursor.execute(
"SELECT engine "
"FROM information_schema.tables "
"WHERE table_name = %s", [table_name])
result = cursor.fetchone()
if not result:
return self.connection.features._mysql_storage_engine
return result[0]
def get_constraints(self, cursor, table_name):
"""
Retrieve any constraints or keys (unique, pk, fk, check, index) across
one or more columns.
"""
constraints = {}
# Get the actual constraint names and columns
name_query = """
SELECT kc.`constraint_name`, kc.`column_name`,
kc.`referenced_table_name`, kc.`referenced_column_name`
FROM information_schema.key_column_usage AS kc
WHERE
kc.table_schema = DATABASE() AND
kc.table_name = %s
"""<|fim▁hole|> constraints[constraint] = {
'columns': OrderedSet(),
'primary_key': False,
'unique': False,
'index': False,
'check': False,
'foreign_key': (ref_table, ref_column) if ref_column else None,
}
constraints[constraint]['columns'].add(column)
# Now get the constraint types
type_query = """
SELECT c.constraint_name, c.constraint_type
FROM information_schema.table_constraints AS c
WHERE
c.table_schema = DATABASE() AND
c.table_name = %s
"""
cursor.execute(type_query, [table_name])
for constraint, kind in cursor.fetchall():
if kind.lower() == "primary key":
constraints[constraint]['primary_key'] = True
constraints[constraint]['unique'] = True
elif kind.lower() == "unique":
constraints[constraint]['unique'] = True
# Now add in the indexes
cursor.execute("SHOW INDEX FROM %s" % self.connection.ops.quote_name(table_name))
for table, non_unique, index, colseq, column, type_ in [x[:5] + (x[10],) for x in cursor.fetchall()]:
if index not in constraints:
constraints[index] = {
'columns': OrderedSet(),
'primary_key': False,
'unique': False,
'check': False,
'foreign_key': None,
}
constraints[index]['index'] = True
constraints[index]['type'] = Index.suffix if type_ == 'BTREE' else type_.lower()
constraints[index]['columns'].add(column)
# Convert the sorted sets to lists
for constraint in constraints.values():
constraint['columns'] = list(constraint['columns'])
return constraints<|fim▁end|>
|
cursor.execute(name_query, [table_name])
for constraint, column, ref_table, ref_column in cursor.fetchall():
if constraint not in constraints:
|
<|file_name|>Komponist.java<|end_file_name|><|fim▁begin|>package de.choesel.blechwiki.model;
import com.j256.ormlite.field.DatabaseField;
import com.j256.ormlite.table.DatabaseTable;
import java.util.UUID;
/**
* Created by christian on 05.05.16.
*/
@DatabaseTable(tableName = "komponist")
public class Komponist {
@DatabaseField(generatedId = true)
private UUID id;
@DatabaseField(canBeNull = true, uniqueCombo = true)
private String name;
@DatabaseField(canBeNull = true)
private String kurzname;
@DatabaseField(canBeNull = true, uniqueCombo = true)
private Integer geboren;
@DatabaseField(canBeNull = true)
private Integer gestorben;
public UUID getId() {
return id;
}
public String getName() {
return name;
}
public String getKurzname() {
return kurzname;
}
public Integer getGeboren() {
return geboren;
}
public Integer getGestorben() {
return gestorben;
}
public void setId(UUID id) {
this.id = id;
}
public void setName(String name) {
this.name = name;
}
public void setKurzname(String kurzname) {
this.kurzname = kurzname;
}
public void setGeboren(Integer geboren) {
this.geboren = geboren;<|fim▁hole|> }
public void setGestorben(Integer gestorben) {
this.gestorben = gestorben;
}
}<|fim▁end|>
| |
<|file_name|>test_lease_read.rs<|end_file_name|><|fim▁begin|>// Copyright 2016 TiKV Project Authors. Licensed under Apache-2.0.
//! A module contains test cases for lease read on Raft leader.
use std::sync::atomic::*;
use std::sync::{mpsc, Arc, Mutex};
use std::time::*;
use std::{mem, thread};
use kvproto::metapb;
use kvproto::raft_serverpb::RaftLocalState;
use raft::eraftpb::{ConfChangeType, MessageType};
use engine_rocks::{Compat, RocksSnapshot};
use engine_traits::Peekable;
use pd_client::PdClient;
use raftstore::store::{Callback, RegionSnapshot};
use test_raftstore::*;
use tikv_util::config::*;
use tikv_util::HandyRwLock;
// A helper function for testing the lease reads and lease renewing.
// The leader keeps a record of its leader lease, and uses the system's
// monotonic raw clocktime to check whether its lease has expired.
// If the leader lease has not expired, when the leader receives a read request
// 1. with `read_quorum == false`, the leader will serve it by reading local data.
// This way of handling request is called "lease read".
// 2. with `read_quorum == true`, the leader will serve it by doing index read (see raft's doc).
// This way of handling request is called "index read".
// If the leader lease has expired, leader will serve both kinds of requests by index read, and
// propose an no-op entry to raft quorum to renew the lease.
// No matter what status the leader lease is, a write request is always served by writing a Raft
// log to the Raft quorum. It is called "consistent write". All writes are consistent writes.
// Every time the leader performs a consistent read/write, it will try to renew its lease.
fn test_renew_lease<T: Simulator>(cluster: &mut Cluster<T>) {
// Avoid triggering the log compaction in this test case.
cluster.cfg.raft_store.raft_log_gc_threshold = 100;
// Increase the Raft tick interval to make this test case running reliably.
// Use large election timeout to make leadership stable.
configure_for_lease_read(cluster, Some(50), Some(10_000));
// Override max leader lease to 2 seconds.
let max_lease = Duration::from_secs(2);
cluster.cfg.raft_store.raft_store_max_leader_lease = ReadableDuration(max_lease);
let node_id = 1u64;
let store_id = 1u64;
let peer = new_peer(store_id, node_id);
cluster.pd_client.disable_default_operator();
let region_id = cluster.run_conf_change();
let key = b"k";
cluster.must_put(key, b"v0");
for id in 2..=cluster.engines.len() as u64 {
cluster.pd_client.must_add_peer(region_id, new_peer(id, id));
must_get_equal(&cluster.get_engine(id), key, b"v0");
}
// Write the initial value for a key.
let key = b"k";
cluster.must_put(key, b"v1");
// Force `peer` to become leader.
let region = cluster.get_region(key);
let region_id = region.get_id();
cluster.must_transfer_leader(region_id, peer.clone());
let engine = cluster.get_raft_engine(store_id);
let state_key = keys::raft_state_key(region_id);
let state: RaftLocalState = engine.c().get_msg(&state_key).unwrap().unwrap();
let last_index = state.get_last_index();
let detector = LeaseReadFilter::default();
cluster.add_send_filter(CloneFilterFactory(detector.clone()));
// Issue a read request and check the value on response.
must_read_on_peer(cluster, peer.clone(), region.clone(), key, b"v1");
assert_eq!(detector.ctx.rl().len(), 0);
let mut expect_lease_read = 0;
if cluster.engines.len() > 1 {
// Wait for the leader lease to expire.
thread::sleep(max_lease);
// Issue a read request and check the value on response.
must_read_on_peer(cluster, peer.clone(), region.clone(), key, b"v1");
// Check if the leader does a index read and renewed its lease.
assert_eq!(cluster.leader_of_region(region_id), Some(peer.clone()));
expect_lease_read += 1;
assert_eq!(detector.ctx.rl().len(), expect_lease_read);
}
// Wait for the leader lease to expire.
thread::sleep(max_lease);
// Issue a write request.
cluster.must_put(key, b"v2");
// Check if the leader has renewed its lease so that it can do lease read.
assert_eq!(cluster.leader_of_region(region_id), Some(peer.clone()));
let state: RaftLocalState = engine.c().get_msg(&state_key).unwrap().unwrap();
assert_eq!(state.get_last_index(), last_index + 1);
// Issue a read request and check the value on response.
must_read_on_peer(cluster, peer, region, key, b"v2");
// Check if the leader does a local read.
assert_eq!(detector.ctx.rl().len(), expect_lease_read);
}
#[test]
fn test_one_node_renew_lease() {
let count = 1;
let mut cluster = new_node_cluster(0, count);
test_renew_lease(&mut cluster);
}
#[test]
fn test_node_renew_lease() {
let count = 3;
let mut cluster = new_node_cluster(0, count);
test_renew_lease(&mut cluster);
}
// A helper function for testing the lease reads when the lease has expired.
// If the leader lease has expired, there may be new leader elected and
// the old leader will fail to renew its lease.
fn test_lease_expired<T: Simulator>(cluster: &mut Cluster<T>) {
let pd_client = Arc::clone(&cluster.pd_client);
// Disable default max peer number check.
pd_client.disable_default_operator();
// Avoid triggering the log compaction in this test case.
cluster.cfg.raft_store.raft_log_gc_threshold = 100;
// Increase the Raft tick interval to make this test case running reliably.
let election_timeout = configure_for_lease_read(cluster, Some(50), None);
let node_id = 3u64;
let store_id = 3u64;
let peer = new_peer(store_id, node_id);
cluster.run();
// Write the initial value for a key.
let key = b"k";
cluster.must_put(key, b"v1");
// Force `peer` to become leader.
let region = cluster.get_region(key);
let region_id = region.get_id();
cluster.must_transfer_leader(region_id, peer.clone());
// Isolate the leader `peer` from other peers.
cluster.add_send_filter(IsolationFilterFactory::new(store_id));
// Wait for the leader lease to expire and a new leader is elected.
thread::sleep(election_timeout * 2);
// Issue a read request and check the value on response.<|fim▁hole|>fn test_node_lease_expired() {
let count = 3;
let mut cluster = new_node_cluster(0, count);
test_lease_expired(&mut cluster);
}
// A helper function for testing the leader holds unsafe lease during the leader transfer
// procedure, so it will not do lease read.
// Since raft will not propose any request during leader transfer procedure, consistent read/write
// could not be performed neither.
// When leader transfer procedure aborts later, the leader would use and update the lease as usual.
fn test_lease_unsafe_during_leader_transfers<T: Simulator>(cluster: &mut Cluster<T>) {
// Avoid triggering the log compaction in this test case.
cluster.cfg.raft_store.raft_log_gc_threshold = 100;
// Increase the Raft tick interval to make this test case running reliably.
let election_timeout = configure_for_lease_read(cluster, Some(500), None);
let store_id = 1u64;
let peer = new_peer(store_id, 1);
let peer3_store_id = 3u64;
let peer3 = new_peer(peer3_store_id, 3);
cluster.pd_client.disable_default_operator();
let r1 = cluster.run_conf_change();
cluster.must_put(b"k0", b"v0");
cluster.pd_client.must_add_peer(r1, new_peer(2, 2));
must_get_equal(&cluster.get_engine(2), b"k0", b"v0");
cluster.pd_client.must_add_peer(r1, new_peer(3, 3));
must_get_equal(&cluster.get_engine(3), b"k0", b"v0");
let detector = LeaseReadFilter::default();
cluster.add_send_filter(CloneFilterFactory(detector.clone()));
// write the initial value for a key.
let key = b"k";
cluster.must_put(key, b"v1");
// Force `peer1` to became leader.
let region = cluster.get_region(key);
let region_id = region.get_id();
cluster.must_transfer_leader(region_id, peer.clone());
// Issue a read request and check the value on response.
must_read_on_peer(cluster, peer.clone(), region.clone(), key, b"v1");
let engine = cluster.get_raft_engine(store_id);
let state_key = keys::raft_state_key(region_id);
let state: RaftLocalState = engine.c().get_msg(&state_key).unwrap().unwrap();
let last_index = state.get_last_index();
// Check if the leader does a local read.
must_read_on_peer(cluster, peer.clone(), region.clone(), key, b"v1");
let state: RaftLocalState = engine.c().get_msg(&state_key).unwrap().unwrap();
assert_eq!(state.get_last_index(), last_index);
assert_eq!(detector.ctx.rl().len(), 0);
// Ensure peer 3 is ready to transfer leader.
must_get_equal(&cluster.get_engine(3), key, b"v1");
// Drop MsgTimeoutNow to `peer3` so that the leader transfer procedure would abort later.
cluster.add_send_filter(CloneFilterFactory(
RegionPacketFilter::new(region_id, peer3_store_id)
.msg_type(MessageType::MsgTimeoutNow)
.direction(Direction::Recv),
));
// Issue a transfer leader request to transfer leader from `peer` to `peer3`.
cluster.transfer_leader(region_id, peer3);
// Delay a while to ensure transfer leader procedure is triggered inside raft module.
thread::sleep(election_timeout / 2);
// Issue a read request and it will fall back to read index.
must_read_on_peer(cluster, peer.clone(), region.clone(), key, b"v1");
assert_eq!(detector.ctx.rl().len(), 1);
// And read index should not update lease.
must_read_on_peer(cluster, peer.clone(), region.clone(), key, b"v1");
assert_eq!(detector.ctx.rl().len(), 2);
// Make sure the leader transfer procedure timeouts.
thread::sleep(election_timeout * 2);
// Then the leader transfer procedure aborts, now the leader could do lease read or consistent
// read/write and renew/reuse the lease as usual.
// Issue a read request and check the value on response.
must_read_on_peer(cluster, peer.clone(), region.clone(), key, b"v1");
assert_eq!(detector.ctx.rl().len(), 3);
// Check if the leader also propose an entry to renew its lease.
let state: RaftLocalState = engine.c().get_msg(&state_key).unwrap().unwrap();
assert_eq!(state.get_last_index(), last_index + 1);
// wait some time for the proposal to be applied.
thread::sleep(election_timeout / 2);
// Check if the leader does a local read.
must_read_on_peer(cluster, peer, region, key, b"v1");
let state: RaftLocalState = engine.c().get_msg(&state_key).unwrap().unwrap();
assert_eq!(state.get_last_index(), last_index + 1);
assert_eq!(detector.ctx.rl().len(), 3);
}
#[test]
fn test_node_lease_unsafe_during_leader_transfers() {
let count = 3;
let mut cluster = new_node_cluster(0, count);
test_lease_unsafe_during_leader_transfers(&mut cluster);
}
#[test]
fn test_node_batch_id_in_lease() {
let count = 3;
let mut cluster = new_node_cluster(0, count);
test_batch_id_in_lease(&mut cluster);
}
fn test_batch_id_in_lease<T: Simulator>(cluster: &mut Cluster<T>) {
let pd_client = Arc::clone(&cluster.pd_client);
// Disable default max peer number check.
pd_client.disable_default_operator();
// Avoid triggering the log compaction in this test case.
cluster.cfg.raft_store.raft_log_gc_threshold = 100;
// Increase the Raft tick interval to make this test case running reliably.
let election_timeout = configure_for_lease_read(cluster, Some(100), None);
cluster.run();
let (split_key1, split_key2) = (b"k22", b"k44");
let keys = vec![b"k11", b"k33", b"k55"];
let _ = keys.iter().map(|key| {
cluster.must_put(*key, b"v1");
});
let region = pd_client.get_region(keys[0]).unwrap();
cluster.must_split(®ion, split_key1);
let region = pd_client.get_region(keys[1]).unwrap();
cluster.must_split(®ion, split_key2);
let mut peers = vec![];
// Transfer leader together to batch snapshot
for i in 0..3 {
let r = pd_client.get_region(keys[i]).unwrap();
let peer = cluster.leader_of_region(r.get_id()).unwrap();
if peer.get_store_id() != 1 {
for p in r.get_peers() {
if p.get_store_id() == 1 {
cluster.must_transfer_leader(r.get_id(), p.clone());
let peer = cluster.leader_of_region(r.get_id()).unwrap();
assert_eq!(peer.get_store_id(), 1);
peers.push(peer);
break;
}
}
} else {
peers.push(peer.clone());
}
}
// Sleep to make sure lease expired
thread::sleep(election_timeout + Duration::from_millis(200));
// Send request to region 0 and 1 to renew their lease.
cluster.must_put(b"k11", b"v2");
cluster.must_put(b"k33", b"v2");
assert_eq!(b"v2".to_vec(), cluster.must_get(b"k33").unwrap());
assert_eq!(b"v2".to_vec(), cluster.must_get(b"k11").unwrap());
let regions: Vec<_> = keys
.into_iter()
.map(|key| pd_client.get_region(key).unwrap())
.collect();
let requests: Vec<(metapb::Peer, metapb::Region)> = peers
.iter()
.zip(regions)
.map(|(p, r)| (p.clone(), r))
.collect();
let responses = batch_read_on_peer(cluster, &requests);
let snaps: Vec<RegionSnapshot<RocksSnapshot>> = responses
.into_iter()
.map(|response| {
assert!(!response.response.get_header().has_error());
response.snapshot.unwrap()
})
.collect();
// Snapshot 0 and 1 will use one RocksSnapshot because we have renew their lease.
assert!(std::ptr::eq(
snaps[0].get_snapshot(),
snaps[1].get_snapshot()
));
assert!(!std::ptr::eq(
snaps[0].get_snapshot(),
snaps[2].get_snapshot()
));
// make sure that region 2 could renew lease.
cluster.must_put(b"k55", b"v2");
let responses = batch_read_on_peer(cluster, &requests);
let snaps2: Vec<RegionSnapshot<RocksSnapshot>> = responses
.into_iter()
.map(|response| {
assert!(!response.response.get_header().has_error());
response.snapshot.unwrap()
})
.collect();
assert_eq!(3, snaps2.len());
assert!(!std::ptr::eq(
snaps[0].get_snapshot(),
snaps2[0].get_snapshot()
));
assert!(std::ptr::eq(
snaps2[0].get_snapshot(),
snaps2[1].get_snapshot()
));
assert!(std::ptr::eq(
snaps2[0].get_snapshot(),
snaps2[2].get_snapshot()
));
}
/// test whether the read index callback will be handled when a region is destroyed.
/// If it's not handled properly, it will cause dead lock in transaction scheduler.
#[test]
fn test_node_callback_when_destroyed() {
let count = 3;
let mut cluster = new_node_cluster(0, count);
// Increase the election tick to make this test case running reliably.
configure_for_lease_read(&mut cluster, None, Some(50));
cluster.run();
cluster.must_put(b"k1", b"v1");
let leader = cluster.leader_of_region(1).unwrap();
let cc = new_change_peer_request(ConfChangeType::RemoveNode, leader.clone());
let epoch = cluster.get_region_epoch(1);
let req = new_admin_request(1, &epoch, cc);
// so the leader can't commit the conf change yet.
let block = Arc::new(AtomicBool::new(true));
cluster.add_send_filter(CloneFilterFactory(
RegionPacketFilter::new(1, leader.get_store_id())
.msg_type(MessageType::MsgAppendResponse)
.direction(Direction::Recv)
.when(Arc::clone(&block)),
));
let mut filter = LeaseReadFilter::default();
filter.take = true;
// so the leader can't perform read index.
cluster.add_send_filter(CloneFilterFactory(filter.clone()));
// it always timeout, no need to wait.
let _ = cluster.call_command_on_leader(req, Duration::from_millis(500));
// To make sure `get` is handled before destroy leader, we must issue
// `get` then unblock append responses.
let leader_node_id = leader.get_store_id();
let get = new_get_cmd(b"k1");
let mut req = new_request(1, epoch, vec![get], true);
req.mut_header().set_peer(leader);
let (cb, rx) = make_cb(&req);
cluster
.sim
.rl()
.async_command_on_node(leader_node_id, req, cb)
.unwrap();
// Unblock append responses after we issue the req.
block.store(false, Ordering::SeqCst);
let resp = rx.recv_timeout(Duration::from_secs(3)).unwrap();
assert!(
!filter.ctx.rl().is_empty(),
"read index should be performed"
);
assert!(
resp.get_header().get_error().has_region_not_found(),
"{:?}",
resp
);
}
/// Test if the callback proposed by read index is cleared correctly.
#[test]
fn test_lease_read_callback_destroy() {
// Only server cluster can fake sending message successfully in raftstore layer.
let mut cluster = new_server_cluster(0, 3);
// Increase the Raft tick interval to make this test case running reliably.
let election_timeout = configure_for_lease_read(&mut cluster, Some(50), None);
cluster.run();
cluster.must_transfer_leader(1, new_peer(1, 1));
cluster.must_put(b"k1", b"v1");
must_get_equal(&cluster.get_engine(3), b"k1", b"v1");
// Isolate the target peer to make transfer leader fail.
cluster.add_send_filter(IsolationFilterFactory::new(3));
cluster.transfer_leader(1, new_peer(3, 3));
thread::sleep(election_timeout * 2);
// Trigger ReadIndex on the leader.
assert_eq!(cluster.must_get(b"k1"), Some(b"v1".to_vec()));
cluster.must_put(b"k2", b"v2");
}
/// A read index request will be appended to waiting list when there is an on-going request
/// to reduce heartbeat messages. But when leader is in suspect lease, requests should not
/// be batched because lease can be expired at anytime.
#[test]
fn test_read_index_stale_in_suspect_lease() {
let mut cluster = new_node_cluster(0, 3);
// Increase the election tick to make this test case running reliably.
configure_for_lease_read(&mut cluster, Some(50), Some(10_000));
let max_lease = Duration::from_secs(2);
// Stop log compaction to transfer leader with filter easier.
configure_for_request_snapshot(&mut cluster);
cluster.cfg.raft_store.raft_store_max_leader_lease = ReadableDuration(max_lease);
cluster.pd_client.disable_default_operator();
let r1 = cluster.run_conf_change();
cluster.pd_client.must_add_peer(r1, new_peer(2, 2));
cluster.pd_client.must_add_peer(r1, new_peer(3, 3));
let r1 = cluster.get_region(b"k1");
// Put and test again to ensure that peer 3 get the latest writes by message append
// instead of snapshot, so that transfer leader to peer 3 can 100% success.
cluster.must_put(b"k1", b"v1");
must_get_equal(&cluster.get_engine(3), b"k1", b"v1");
cluster.must_put(b"k2", b"v2");
must_get_equal(&cluster.get_engine(3), b"k2", b"v2");
// Ensure peer 3 is ready to become leader.
let rx = async_read_on_peer(&mut cluster, new_peer(3, 3), r1.clone(), b"k2", true, true);
let resp = rx.recv_timeout(Duration::from_secs(3)).unwrap();
assert!(!resp.get_header().has_error(), "{:?}", resp);
assert_eq!(
resp.get_responses()[0].get_get().get_value(),
b"v2",
"{:?}",
resp
);
let old_leader = cluster.leader_of_region(r1.get_id()).unwrap();
// Use a macro instead of a closure to avoid any capture of local variables.
macro_rules! read_on_old_leader {
() => {{
let (tx, rx) = mpsc::sync_channel(1);
let mut read_request = new_request(
r1.get_id(),
r1.get_region_epoch().clone(),
vec![new_get_cmd(b"k1")],
true, // read quorum
);
read_request.mut_header().set_peer(new_peer(1, 1));
let sim = cluster.sim.wl();
sim.async_command_on_node(
old_leader.get_id(),
read_request,
Callback::Read(Box::new(move |resp| tx.send(resp.response).unwrap())),
)
.unwrap();
rx
}};
}
// Delay all raft messages to peer 1.
let dropped_msgs = Arc::new(Mutex::new(Vec::new()));
let filter = Box::new(
RegionPacketFilter::new(r1.id, old_leader.store_id)
.direction(Direction::Recv)
.skip(MessageType::MsgTransferLeader)
.reserve_dropped(Arc::clone(&dropped_msgs)),
);
cluster
.sim
.wl()
.add_recv_filter(old_leader.get_id(), filter);
let resp1 = read_on_old_leader!();
cluster.must_transfer_leader(r1.get_id(), new_peer(3, 3));
let resp2 = read_on_old_leader!();
// Unpark all pending messages and clear all filters.
let router = cluster.sim.wl().get_router(old_leader.get_id()).unwrap();
'LOOP: loop {
for raft_msg in mem::replace(dropped_msgs.lock().unwrap().as_mut(), vec![]) {
let msg_type = raft_msg.get_message().get_msg_type();
if msg_type == MessageType::MsgHeartbeatResponse {
router.send_raft_message(raft_msg).unwrap();
continue;
}
cluster.sim.wl().clear_recv_filters(old_leader.get_id());
break 'LOOP;
}
}
let resp1 = resp1.recv().unwrap();
assert!(
resp1.get_header().get_error().has_stale_command()
|| resp1.get_responses()[0].get_get().get_value() == b"v1"
);
// Response 2 should contains an error.
let resp2 = resp2.recv().unwrap();
assert!(resp2.get_header().get_error().has_stale_command());
drop(cluster);
}
#[test]
fn test_local_read_cache() {
let mut cluster = new_node_cluster(0, 3);
configure_for_lease_read(&mut cluster, Some(50), None);
cluster.pd_client.disable_default_operator();
cluster.run();
let pd_client = Arc::clone(&cluster.pd_client);
cluster.must_put(b"k1", b"v1");
must_get_equal(&cluster.get_engine(1), b"k1", b"v1");
must_get_equal(&cluster.get_engine(2), b"k1", b"v1");
must_get_equal(&cluster.get_engine(3), b"k1", b"v1");
let r1 = cluster.get_region(b"k1");
let leader = cluster.leader_of_region(r1.get_id()).unwrap();
let new_leader = new_peer((leader.get_id() + 1) % 3 + 1, (leader.get_id() + 1) % 3 + 1);
cluster.must_transfer_leader(r1.get_id(), new_leader);
// Add the peer back and make sure it catches up latest logs.
pd_client.must_remove_peer(r1.get_id(), leader.clone());
let replace_peer = new_peer(leader.get_store_id(), 10000);
pd_client.must_add_peer(r1.get_id(), replace_peer.clone());
cluster.must_put(b"k2", b"v2");
must_get_equal(&cluster.get_engine(leader.get_store_id()), b"k2", b"v2");
cluster.must_transfer_leader(r1.get_id(), replace_peer);
cluster.must_put(b"k3", b"v3");
must_get_equal(&cluster.get_engine(leader.get_store_id()), b"k3", b"v3");
}
/// Test latency changes when a leader becomes follower right after it receives
/// read_index heartbeat response.
#[test]
fn test_not_leader_read_lease() {
let mut cluster = new_node_cluster(0, 3);
// Avoid triggering the log compaction in this test case.
cluster.cfg.raft_store.raft_log_gc_threshold = 100;
// Increase the Raft tick interval to make this test case running reliably.
configure_for_lease_read(&mut cluster, Some(50), None);
let heartbeat_interval = cluster.cfg.raft_store.raft_heartbeat_interval();
cluster.run();
cluster.must_put(b"k1", b"v1");
cluster.must_transfer_leader(1, new_peer(1, 1));
cluster.must_put(b"k2", b"v2");
must_get_equal(&cluster.get_engine(3), b"k2", b"v2");
// Add a filter to delay heartbeat response until transfer leader begins.
cluster.sim.wl().add_recv_filter(
1,
Box::new(LeadingFilter::new(
MessageType::MsgHeartbeatResponse,
MessageType::MsgRequestVote,
)),
);
cluster.add_send_filter(CloneFilterFactory(
RegionPacketFilter::new(1, 2)
.direction(Direction::Recv)
.msg_type(MessageType::MsgAppend),
));
let mut region = cluster.get_region(b"k1");
let region_id = region.get_id();
let mut req = new_request(
region_id,
region.take_region_epoch(),
vec![new_get_cmd(b"k2")],
true,
);
req.mut_header().set_peer(new_peer(1, 1));
let (cb, rx) = make_cb(&req);
cluster.sim.rl().async_command_on_node(1, req, cb).unwrap();
cluster.must_transfer_leader(region_id, new_peer(3, 3));
// Even the leader steps down, it should respond to read index in time.
rx.recv_timeout(heartbeat_interval).unwrap();
}
/// Test whether read index is greater than applied index.
/// 1. Add hearbeat msg filter.
/// 2. Propose a read index request.
/// 3. Put a key and get the latest applied index.
/// 4. Propose another read index request.
/// 5. Remove the filter and check whether the latter read index is greater than applied index.
///
/// In previous implementation, these two read index request will be batched and
/// will get the same read index which breaks the correctness because the latter one
/// is proposed after the applied index has increased and replied to client.
#[test]
fn test_read_index_after_write() {
let mut cluster = new_node_cluster(0, 3);
configure_for_lease_read(&mut cluster, Some(50), Some(10));
let heartbeat_interval = cluster.cfg.raft_store.raft_heartbeat_interval();
let pd_client = Arc::clone(&cluster.pd_client);
pd_client.disable_default_operator();
cluster.run();
cluster.must_put(b"k1", b"v1");
let region = pd_client.get_region(b"k1").unwrap();
let region_on_store1 = find_peer(®ion, 1).unwrap().to_owned();
cluster.must_transfer_leader(region.get_id(), region_on_store1.clone());
cluster.add_send_filter(IsolationFilterFactory::new(3));
// Add heartbeat msg filter to prevent the leader to reply the read index response.
let filter = Box::new(
RegionPacketFilter::new(region.get_id(), 2)
.direction(Direction::Recv)
.msg_type(MessageType::MsgHeartbeat),
);
cluster.sim.wl().add_recv_filter(2, filter);
let mut req = new_request(
region.get_id(),
region.get_region_epoch().clone(),
vec![new_read_index_cmd()],
true,
);
req.mut_header()
.set_peer(new_peer(1, region_on_store1.get_id()));
// Don't care about the first one's read index
let (cb, _) = make_cb(&req);
cluster.sim.rl().async_command_on_node(1, req, cb).unwrap();
cluster.must_put(b"k2", b"v2");
let applied_index = cluster.apply_state(region.get_id(), 1).get_applied_index();
let mut req = new_request(
region.get_id(),
region.get_region_epoch().clone(),
vec![new_read_index_cmd()],
true,
);
req.mut_header()
.set_peer(new_peer(1, region_on_store1.get_id()));
let (cb, rx) = make_cb(&req);
cluster.sim.rl().async_command_on_node(1, req, cb).unwrap();
cluster.sim.wl().clear_recv_filters(2);
let response = rx.recv_timeout(heartbeat_interval).unwrap();
assert!(
response.get_responses()[0]
.get_read_index()
.get_read_index()
>= applied_index
);
}<|fim▁end|>
|
must_error_read_on_peer(cluster, peer, region, key, Duration::from_secs(1));
}
#[test]
|
<|file_name|>texttrackcue.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use crate::dom::bindings::cell::DomRefCell;
use crate::dom::bindings::codegen::Bindings::TextTrackCueBinding::{self, TextTrackCueMethods};
use crate::dom::bindings::num::Finite;
use crate::dom::bindings::reflector::reflect_dom_object;
use crate::dom::bindings::root::{Dom, DomRoot};
use crate::dom::bindings::str::DOMString;
use crate::dom::eventtarget::EventTarget;
use crate::dom::texttrack::TextTrack;
use crate::dom::window::Window;
use dom_struct::dom_struct;
use std::cell::Cell;
#[dom_struct]
pub struct TextTrackCue {
eventtarget: EventTarget,
id: DomRefCell<DOMString>,
track: Option<Dom<TextTrack>>,
start_time: Cell<f64>,<|fim▁hole|>impl TextTrackCue {
// FIXME(#22314, dlrobertson) implement VTTCue.
#[allow(dead_code)]
pub fn new_inherited(id: DOMString, track: Option<&TextTrack>) -> TextTrackCue {
TextTrackCue {
eventtarget: EventTarget::new_inherited(),
id: DomRefCell::new(id),
track: track.map(Dom::from_ref),
start_time: Cell::new(0.),
end_time: Cell::new(0.),
pause_on_exit: Cell::new(false),
}
}
// FIXME(#22314, dlrobertson) implement VTTCue.
#[allow(dead_code)]
pub fn new(window: &Window, id: DOMString, track: Option<&TextTrack>) -> DomRoot<TextTrackCue> {
reflect_dom_object(
Box::new(TextTrackCue::new_inherited(id, track)),
window,
TextTrackCueBinding::Wrap,
)
}
pub fn id(&self) -> DOMString {
self.id.borrow().clone()
}
pub fn get_track(&self) -> Option<DomRoot<TextTrack>> {
self.track.as_ref().map(|t| DomRoot::from_ref(&**t))
}
}
impl TextTrackCueMethods for TextTrackCue {
// https://html.spec.whatwg.org/multipage/#dom-texttrackcue-id
fn Id(&self) -> DOMString {
self.id()
}
// https://html.spec.whatwg.org/multipage/#dom-texttrackcue-id
fn SetId(&self, value: DOMString) {
*self.id.borrow_mut() = value;
}
// https://html.spec.whatwg.org/multipage/#dom-texttrackcue-track
fn GetTrack(&self) -> Option<DomRoot<TextTrack>> {
self.get_track()
}
// https://html.spec.whatwg.org/multipage/#dom-texttrackcue-starttime
fn StartTime(&self) -> Finite<f64> {
Finite::wrap(self.start_time.get())
}
// https://html.spec.whatwg.org/multipage/#dom-texttrackcue-starttime
fn SetStartTime(&self, value: Finite<f64>) {
self.start_time.set(*value);
}
// https://html.spec.whatwg.org/multipage/#dom-texttrackcue-endtime
fn EndTime(&self) -> Finite<f64> {
Finite::wrap(self.end_time.get())
}
// https://html.spec.whatwg.org/multipage/#dom-texttrackcue-endtime
fn SetEndTime(&self, value: Finite<f64>) {
self.end_time.set(*value);
}
// https://html.spec.whatwg.org/multipage/#dom-texttrackcue-pauseonexit
fn PauseOnExit(&self) -> bool {
self.pause_on_exit.get()
}
// https://html.spec.whatwg.org/multipage/#dom-texttrackcue-pauseonexit
fn SetPauseOnExit(&self, value: bool) {
self.pause_on_exit.set(value);
}
// https://html.spec.whatwg.org/multipage/#handler-texttrackcue-onenter
event_handler!(enter, GetOnenter, SetOnenter);
// https://html.spec.whatwg.org/multipage/#handler-texttrackcue-onexit
event_handler!(exit, GetOnexit, SetOnexit);
}<|fim▁end|>
|
end_time: Cell<f64>,
pause_on_exit: Cell<bool>,
}
|
<|file_name|>CodeGenTest.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2014 Realm Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*<|fim▁hole|> * distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.realm.internal.test;
import io.realm.internal.DefineTable;
/**
* A helper class containing model(s) for simple code generation tests.
*/
class CodeGenTest {
@DefineTable // this is enabled only for occasional local tests
class someModel {
String name;
int age;
}
}<|fim▁end|>
|
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
|
<|file_name|>sequenced_worker_pool.cc<|end_file_name|><|fim▁begin|>// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "base/threading/sequenced_worker_pool.h"
#include <list>
#include <map>
#include <set>
#include <utility>
#include <vector>
#include "base/atomic_sequence_num.h"
#include "base/callback.h"
#include "base/compiler_specific.h"
#include "base/critical_closure.h"
#include "base/debug/trace_event.h"
#include "base/lazy_instance.h"
#include "base/logging.h"
#include "base/memory/linked_ptr.h"
#include "base/message_loop/message_loop_proxy.h"
#include "base/stl_util.h"
#include "base/strings/stringprintf.h"
#include "base/synchronization/condition_variable.h"
#include "base/synchronization/lock.h"
#include "base/threading/platform_thread.h"
#include "base/threading/simple_thread.h"
#include "base/threading/thread_local.h"
#include "base/threading/thread_restrictions.h"
#include "base/time/time.h"
#include "base/tracked_objects.h"
#if defined(OS_MACOSX)
#include "base/mac/scoped_nsautorelease_pool.h"
#elif defined(OS_WIN)
#include "base/win/scoped_com_initializer.h"
#endif
#if !defined(OS_NACL)
#include "base/metrics/histogram.h"
#endif
namespace base {
namespace {
struct SequencedTask : public TrackingInfo {
SequencedTask()
: sequence_token_id(0),
trace_id(0),
sequence_task_number(0),
shutdown_behavior(SequencedWorkerPool::BLOCK_SHUTDOWN) {}
explicit SequencedTask(const tracked_objects::Location& from_here)
: base::TrackingInfo(from_here, TimeTicks()),
sequence_token_id(0),
trace_id(0),
sequence_task_number(0),
shutdown_behavior(SequencedWorkerPool::BLOCK_SHUTDOWN) {}
~SequencedTask() {}
int sequence_token_id;
int trace_id;
int64 sequence_task_number;
SequencedWorkerPool::WorkerShutdown shutdown_behavior;
tracked_objects::Location posted_from;
Closure task;
// Non-delayed tasks and delayed tasks are managed together by time-to-run
// order. We calculate the time by adding the posted time and the given delay.
TimeTicks time_to_run;
};
struct SequencedTaskLessThan {
public:
bool operator()(const SequencedTask& lhs, const SequencedTask& rhs) const {
if (lhs.time_to_run < rhs.time_to_run)
return true;
if (lhs.time_to_run > rhs.time_to_run)
return false;
// If the time happen to match, then we use the sequence number to decide.
return lhs.sequence_task_number < rhs.sequence_task_number;
}
};
// SequencedWorkerPoolTaskRunner ---------------------------------------------
// A TaskRunner which posts tasks to a SequencedWorkerPool with a
// fixed ShutdownBehavior.
//
// Note that this class is RefCountedThreadSafe (inherited from TaskRunner).
class SequencedWorkerPoolTaskRunner : public TaskRunner {
public:
SequencedWorkerPoolTaskRunner(
const scoped_refptr<SequencedWorkerPool>& pool,
SequencedWorkerPool::WorkerShutdown shutdown_behavior);
// TaskRunner implementation
bool PostDelayedTask(const tracked_objects::Location& from_here,
const Closure& task,
TimeDelta delay) override;
bool RunsTasksOnCurrentThread() const override;
private:
~SequencedWorkerPoolTaskRunner() override;
const scoped_refptr<SequencedWorkerPool> pool_;
const SequencedWorkerPool::WorkerShutdown shutdown_behavior_;
DISALLOW_COPY_AND_ASSIGN(SequencedWorkerPoolTaskRunner);
};
SequencedWorkerPoolTaskRunner::SequencedWorkerPoolTaskRunner(
const scoped_refptr<SequencedWorkerPool>& pool,
SequencedWorkerPool::WorkerShutdown shutdown_behavior)
: pool_(pool),
shutdown_behavior_(shutdown_behavior) {
}
SequencedWorkerPoolTaskRunner::~SequencedWorkerPoolTaskRunner() {
}
bool SequencedWorkerPoolTaskRunner::PostDelayedTask(
const tracked_objects::Location& from_here,
const Closure& task,
TimeDelta delay) {
if (delay == TimeDelta()) {
return pool_->PostWorkerTaskWithShutdownBehavior(
from_here, task, shutdown_behavior_);
}
return pool_->PostDelayedWorkerTask(from_here, task, delay);
}
bool SequencedWorkerPoolTaskRunner::RunsTasksOnCurrentThread() const {
return pool_->RunsTasksOnCurrentThread();
}
// SequencedWorkerPoolSequencedTaskRunner ------------------------------------
// A SequencedTaskRunner which posts tasks to a SequencedWorkerPool with a
// fixed sequence token.
//
// Note that this class is RefCountedThreadSafe (inherited from TaskRunner).
class SequencedWorkerPoolSequencedTaskRunner : public SequencedTaskRunner {
public:
SequencedWorkerPoolSequencedTaskRunner(
const scoped_refptr<SequencedWorkerPool>& pool,
SequencedWorkerPool::SequenceToken token,
SequencedWorkerPool::WorkerShutdown shutdown_behavior);
// patch start huangshaobin ================================================
virtual void Finalize() override;
// patch end huangshaobin ================================================
// TaskRunner implementation
bool PostDelayedTask(const tracked_objects::Location& from_here,
const Closure& task,
TimeDelta delay) override;
bool RunsTasksOnCurrentThread() const override;
// SequencedTaskRunner implementation
bool PostNonNestableDelayedTask(const tracked_objects::Location& from_here,
const Closure& task,
TimeDelta delay) override;
private:
~SequencedWorkerPoolSequencedTaskRunner() override;
const scoped_refptr<SequencedWorkerPool> pool_;
const SequencedWorkerPool::SequenceToken token_;
const SequencedWorkerPool::WorkerShutdown shutdown_behavior_;
DISALLOW_COPY_AND_ASSIGN(SequencedWorkerPoolSequencedTaskRunner);
};
SequencedWorkerPoolSequencedTaskRunner::SequencedWorkerPoolSequencedTaskRunner(
const scoped_refptr<SequencedWorkerPool>& pool,
SequencedWorkerPool::SequenceToken token,
SequencedWorkerPool::WorkerShutdown shutdown_behavior)
: pool_(pool),
token_(token),
shutdown_behavior_(shutdown_behavior) {
}
SequencedWorkerPoolSequencedTaskRunner::
~SequencedWorkerPoolSequencedTaskRunner() {
}
bool SequencedWorkerPoolSequencedTaskRunner::PostDelayedTask(
const tracked_objects::Location& from_here,
const Closure& task,
TimeDelta delay) {
if (delay == TimeDelta()) {
return pool_->PostSequencedWorkerTaskWithShutdownBehavior(
token_, from_here, task, shutdown_behavior_);
}
return pool_->PostDelayedSequencedWorkerTask(token_, from_here, task, delay);
}
bool SequencedWorkerPoolSequencedTaskRunner::RunsTasksOnCurrentThread() const {
return pool_->IsRunningSequenceOnCurrentThread(token_);
}
bool SequencedWorkerPoolSequencedTaskRunner::PostNonNestableDelayedTask(
const tracked_objects::Location& from_here,
const Closure& task,
TimeDelta delay) {
// There's no way to run nested tasks, so simply forward to
// PostDelayedTask.
return PostDelayedTask(from_here, task, delay);
}
// patch start huangshaobin ====================================================
void
SequencedWorkerPoolSequencedTaskRunner::Finalize() {
pool_->FinalizeToken(token_);
}
// patch end huangshaobin ====================================================
// Create a process-wide unique ID to represent this task in trace events. This
// will be mangled with a Process ID hash to reduce the likelyhood of colliding
// with MessageLoop pointers on other processes.
uint64 GetTaskTraceID(const SequencedTask& task,
void* pool) {
return (static_cast<uint64>(task.trace_id) << 32) |
static_cast<uint64>(reinterpret_cast<intptr_t>(pool));
}
base::LazyInstance<base::ThreadLocalPointer<
SequencedWorkerPool::SequenceToken> >::Leaky g_lazy_tls_ptr =
LAZY_INSTANCE_INITIALIZER;
} // namespace
// Worker ---------------------------------------------------------------------
class SequencedWorkerPool::Worker : public SimpleThread {
public:
// Hold a (cyclic) ref to |worker_pool|, since we want to keep it
// around as long as we are running.
Worker(const scoped_refptr<SequencedWorkerPool>& worker_pool,
int thread_number,
const std::string& thread_name_prefix);
~Worker() override;
// SimpleThread implementation. This actually runs the background thread.
void Run() override;
void set_running_task_info(SequenceToken token,
WorkerShutdown shutdown_behavior) {
running_sequence_ = token;
running_shutdown_behavior_ = shutdown_behavior;
}
SequenceToken running_sequence() const {
return running_sequence_;
}
WorkerShutdown running_shutdown_behavior() const {
return running_shutdown_behavior_;
}
private:
scoped_refptr<SequencedWorkerPool> worker_pool_;
SequenceToken running_sequence_;
WorkerShutdown running_shutdown_behavior_;
DISALLOW_COPY_AND_ASSIGN(Worker);
};
// Inner ----------------------------------------------------------------------
class SequencedWorkerPool::Inner {
public:
// Take a raw pointer to |worker| to avoid cycles (since we're owned
// by it).
Inner(SequencedWorkerPool* worker_pool, size_t max_threads,
const std::string& thread_name_prefix,
TestingObserver* observer);
~Inner();
SequenceToken GetSequenceToken();
SequenceToken GetNamedSequenceToken(const std::string& name);
// This function accepts a name and an ID. If the name is null, the
// token ID is used. This allows us to implement the optional name lookup
// from a single function without having to enter the lock a separate time.
bool PostTask(const std::string* optional_token_name,
SequenceToken sequence_token,
WorkerShutdown shutdown_behavior,
const tracked_objects::Location& from_here,
const Closure& task,
TimeDelta delay);
bool RunsTasksOnCurrentThread() const;
bool IsRunningSequenceOnCurrentThread(SequenceToken sequence_token) const;
void CleanupForTesting();
void SignalHasWorkForTesting();
int GetWorkSignalCountForTesting() const;
void Shutdown(int max_blocking_tasks_after_shutdown);
bool IsShutdownInProgress();
// Runs the worker loop on the background thread.
void ThreadLoop(Worker* this_worker);
// patch start tanjianwen ====================================================
void FinalizeToken(SequenceToken token);
void FinalizeNamedToken(const std::string& name);
// patch end tanjianwen ====================================================
private:
enum GetWorkStatus {
GET_WORK_FOUND,
GET_WORK_NOT_FOUND,
GET_WORK_WAIT,
};
enum CleanupState {
CLEANUP_REQUESTED,
CLEANUP_STARTING,
CLEANUP_RUNNING,
CLEANUP_FINISHING,
CLEANUP_DONE,
};
// patch start tanjianwen ====================================================
// Every runner's finalization includes two stages.
//
// In the first one, we discard all non-blocking tasks, and we will wait till
// the last blocking task is executed in the next stage.
//
// Distinguishing one stage to another is just an improvement in performance.
enum FinalizationStage {
DISCARDING_NON_BLOCKING_TASKS,
WAITING_FOR_BLOCKING_TASKS
};
// patch end tanjianwen ====================================================
// Called from within the lock, this converts the given token name into a
// token ID, creating a new one if necessary.
int LockedGetNamedTokenID(const std::string& name);
// Called from within the lock, this returns the next sequence task number.
int64 LockedGetNextSequenceTaskNumber();
// Called from within the lock, returns the shutdown behavior of the task
// running on the currently executing worker thread. If invoked from a thread
// that is not one of the workers, returns CONTINUE_ON_SHUTDOWN.
WorkerShutdown LockedCurrentThreadShutdownBehavior() const;
// Gets new task. There are 3 cases depending on the return value:
//
// 1) If the return value is |GET_WORK_FOUND|, |task| is filled in and should
// be run immediately.
// 2) If the return value is |GET_WORK_NOT_FOUND|, there are no tasks to run,
// and |task| is not filled in. In this case, the caller should wait until
// a task is posted.
// 3) If the return value is |GET_WORK_WAIT|, there are no tasks to run
// immediately, and |task| is not filled in. Likewise, |wait_time| is
// filled in the time to wait until the next task to run. In this case, the
// caller should wait the time.
//
// In any case, the calling code should clear the given
// delete_these_outside_lock vector the next time the lock is released.
// See the implementation for a more detailed description.
GetWorkStatus GetWork(SequencedTask* task,
TimeDelta* wait_time,
std::vector<Closure>* delete_these_outside_lock);
void HandleCleanup();
// Peforms init and cleanup around running the given task. WillRun...
// returns the value from PrepareToStartAdditionalThreadIfNecessary.
// The calling code should call FinishStartingAdditionalThread once the
// lock is released if the return values is nonzero.
int WillRunWorkerTask(const SequencedTask& task);
void DidRunWorkerTask(const SequencedTask& task);
// Returns true if there are no threads currently running the given
// sequence token.
bool IsSequenceTokenRunnable(int sequence_token_id) const;
// Checks if all threads are busy and the addition of one more could run an
// additional task waiting in the queue. This must be called from within
// the lock.
//
// If another thread is helpful, this will mark the thread as being in the
// process of starting and returns the index of the new thread which will be
// 0 or more. The caller should then call FinishStartingAdditionalThread to
// complete initialization once the lock is released.
//
// If another thread is not necessary, returne 0;
//
// See the implementedion for more.
int PrepareToStartAdditionalThreadIfHelpful();
// The second part of thread creation after
// PrepareToStartAdditionalThreadIfHelpful with the thread number it
// generated. This actually creates the thread and should be called outside
// the lock to avoid blocking important work starting a thread in the lock.
void FinishStartingAdditionalThread(int thread_number);
// Signal |has_work_| and increment |has_work_signal_count_|.
void SignalHasWork();
// Checks whether there is work left that's blocking shutdown. Must be
// called inside the lock.
bool CanShutdown() const;
// patch start tanjianwen ====================================================
void DeprecateTokensIfNeeded(int last_task_token_id,
std::vector<Closure>* delete_these_outside_lock);
// patch end tanjianwen ====================================================
SequencedWorkerPool* const worker_pool_;
// The last sequence number used. Managed by GetSequenceToken, since this
// only does threadsafe increment operations, you do not need to hold the
// lock. This is class-static to make SequenceTokens issued by
// GetSequenceToken unique across SequencedWorkerPool instances.
static base::StaticAtomicSequenceNumber g_last_sequence_number_;
// This lock protects |everything in this class|. Do not read or modify
// anything without holding this lock. Do not block while holding this
// lock.
mutable Lock lock_;
// Condition variable that is waited on by worker threads until new
// tasks are posted or shutdown starts.
ConditionVariable has_work_cv_;
// Condition variable that is waited on by non-worker threads (in
// Shutdown()) until CanShutdown() goes to true.
ConditionVariable can_shutdown_cv_;
// The maximum number of worker threads we'll create.
const size_t max_threads_;
const std::string thread_name_prefix_;
// Associates all known sequence token names with their IDs.
std::map<std::string, int> named_sequence_tokens_;
// Owning pointers to all threads we've created so far, indexed by
// ID. Since we lazily create threads, this may be less than
// max_threads_ and will be initially empty.
typedef std::map<PlatformThreadId, linked_ptr<Worker> > ThreadMap;
ThreadMap threads_;
// Set to true when we're in the process of creating another thread.
// See PrepareToStartAdditionalThreadIfHelpful for more.
bool thread_being_created_;
// Number of threads currently waiting for work.
size_t waiting_thread_count_;
// Number of threads currently running tasks that have the BLOCK_SHUTDOWN
// or SKIP_ON_SHUTDOWN flag set.
size_t blocking_shutdown_thread_count_;
// A set of all pending tasks in time-to-run order. These are tasks that are
// either waiting for a thread to run on, waiting for their time to run,
// or blocked on a previous task in their sequence. We have to iterate over
// the tasks by time-to-run order, so we use the set instead of the
// traditional priority_queue.
typedef std::set<SequencedTask, SequencedTaskLessThan> PendingTaskSet;
PendingTaskSet pending_tasks_;
// The next sequence number for a new sequenced task.
int64 next_sequence_task_number_;
// Number of tasks in the pending_tasks_ list that are marked as blocking
// shutdown.
size_t blocking_shutdown_pending_task_count_;
// Lists all sequence tokens currently executing.
std::set<int> current_sequences_;
// An ID for each posted task to distinguish the task from others in traces.
int trace_id_;
// Set when Shutdown is called and no further tasks should be
// allowed, though we may still be running existing tasks.
bool shutdown_called_;
// The number of new BLOCK_SHUTDOWN tasks that may be posted after Shudown()
// has been called.
int max_blocking_tasks_after_shutdown_;
// State used to cleanup for testing, all guarded by lock_.
CleanupState cleanup_state_;
size_t cleanup_idlers_;
ConditionVariable cleanup_cv_;
TestingObserver* const testing_observer_;
// patch start tanjianwen ====================================================
//
// A token is deprecated as soon as the finalization is started.
// Posting a task associated with deprecated tokens will fail.
std::set<int> deprecated_tokens_;
typedef std::map<int, std::pair<base::WaitableEvent*, FinalizationStage>>
TokenFinalizing;
TokenFinalizing token_final_sync_;
// patch end tanjianwen ====================================================
DISALLOW_COPY_AND_ASSIGN(Inner);
};
// Worker definitions ---------------------------------------------------------
SequencedWorkerPool::Worker::Worker(
const scoped_refptr<SequencedWorkerPool>& worker_pool,
int thread_number,
const std::string& prefix)
: SimpleThread(prefix + StringPrintf("Worker%d", thread_number)),
worker_pool_(worker_pool),
running_shutdown_behavior_(CONTINUE_ON_SHUTDOWN) {
Start();
}
SequencedWorkerPool::Worker::~Worker() {
}
void SequencedWorkerPool::Worker::Run() {
#if defined(OS_WIN)
win::ScopedCOMInitializer com_initializer;
#endif
// Store a pointer to the running sequence in thread local storage for
// static function access.
g_lazy_tls_ptr.Get().Set(&running_sequence_);
// Just jump back to the Inner object to run the thread, since it has all the
// tracking information and queues. It might be more natural to implement
// using DelegateSimpleThread and have Inner implement the Delegate to avoid
// having these worker objects at all, but that method lacks the ability to
// send thread-specific information easily to the thread loop.
worker_pool_->inner_->ThreadLoop(this);
// Release our cyclic reference once we're done.
worker_pool_ = NULL;
}
// Inner definitions ---------------------------------------------------------
SequencedWorkerPool::Inner::Inner(
SequencedWorkerPool* worker_pool,
size_t max_threads,
const std::string& thread_name_prefix,
TestingObserver* observer)
: worker_pool_(worker_pool),
lock_(),
has_work_cv_(&lock_),
can_shutdown_cv_(&lock_),
max_threads_(max_threads),
thread_name_prefix_(thread_name_prefix),
thread_being_created_(false),
waiting_thread_count_(0),
blocking_shutdown_thread_count_(0),
next_sequence_task_number_(0),
blocking_shutdown_pending_task_count_(0),
trace_id_(0),
shutdown_called_(false),
max_blocking_tasks_after_shutdown_(0),
cleanup_state_(CLEANUP_DONE),
cleanup_idlers_(0),
cleanup_cv_(&lock_),
testing_observer_(observer) {}
SequencedWorkerPool::Inner::~Inner() {
// You must call Shutdown() before destroying the pool.
DCHECK(shutdown_called_);
// Need to explicitly join with the threads before they're destroyed or else
// they will be running when our object is half torn down.
for (ThreadMap::iterator it = threads_.begin(); it != threads_.end(); ++it)
it->second->Join();
threads_.clear();
if (testing_observer_)
testing_observer_->OnDestruct();
}
SequencedWorkerPool::SequenceToken
SequencedWorkerPool::Inner::GetSequenceToken() {
// Need to add one because StaticAtomicSequenceNumber starts at zero, which
// is used as a sentinel value in SequenceTokens.
return SequenceToken(g_last_sequence_number_.GetNext() + 1);
}
SequencedWorkerPool::SequenceToken
SequencedWorkerPool::Inner::GetNamedSequenceToken(const std::string& name) {
AutoLock lock(lock_);
return SequenceToken(LockedGetNamedTokenID(name));
}
bool SequencedWorkerPool::Inner::PostTask(
const std::string* optional_token_name,
SequenceToken sequence_token,
WorkerShutdown shutdown_behavior,
const tracked_objects::Location& from_here,
const Closure& task,
TimeDelta delay) {
DCHECK(delay == TimeDelta() || shutdown_behavior == SKIP_ON_SHUTDOWN);
SequencedTask sequenced(from_here);
sequenced.sequence_token_id = sequence_token.id_;
sequenced.shutdown_behavior = shutdown_behavior;
sequenced.posted_from = from_here;
sequenced.task =
shutdown_behavior == BLOCK_SHUTDOWN ?
base::MakeCriticalClosure(task) : task;
sequenced.time_to_run = TimeTicks::Now() + delay;
int create_thread_id = 0;
{
AutoLock lock(lock_);
// patch start tanjianwen ==================================================
if (!deprecated_tokens_.empty()) {
if (deprecated_tokens_.find(sequence_token.id_) !=
deprecated_tokens_.end())
return false;
}
// patch end tanjianwen ==================================================
if (shutdown_called_) {
if (shutdown_behavior != BLOCK_SHUTDOWN ||
LockedCurrentThreadShutdownBehavior() == CONTINUE_ON_SHUTDOWN) {
return false;
}
if (max_blocking_tasks_after_shutdown_ <= 0) {
DLOG(WARNING) << "BLOCK_SHUTDOWN task disallowed";
return false;
}
max_blocking_tasks_after_shutdown_ -= 1;
}
// The trace_id is used for identifying the task in about:tracing.
sequenced.trace_id = trace_id_++;
TRACE_EVENT_FLOW_BEGIN0(TRACE_DISABLED_BY_DEFAULT("toplevel.flow"),
"SequencedWorkerPool::PostTask",
TRACE_ID_MANGLE(GetTaskTraceID(sequenced, static_cast<void*>(this))));
sequenced.sequence_task_number = LockedGetNextSequenceTaskNumber();
// Now that we have the lock, apply the named token rules.
if (optional_token_name)
sequenced.sequence_token_id = LockedGetNamedTokenID(*optional_token_name);
pending_tasks_.insert(sequenced);
if (shutdown_behavior == BLOCK_SHUTDOWN)
blocking_shutdown_pending_task_count_++;
create_thread_id = PrepareToStartAdditionalThreadIfHelpful();
}
// Actually start the additional thread or signal an existing one now that
// we're outside the lock.
if (create_thread_id)
FinishStartingAdditionalThread(create_thread_id);
else
SignalHasWork();
return true;
}
bool SequencedWorkerPool::Inner::RunsTasksOnCurrentThread() const {
AutoLock lock(lock_);
return ContainsKey(threads_, PlatformThread::CurrentId());
}
bool SequencedWorkerPool::Inner::IsRunningSequenceOnCurrentThread(
SequenceToken sequence_token) const {
AutoLock lock(lock_);
ThreadMap::const_iterator found = threads_.find(PlatformThread::CurrentId());
if (found == threads_.end())
return false;
return sequence_token.Equals(found->second->running_sequence());
}
// See https://code.google.com/p/chromium/issues/detail?id=168415
void SequencedWorkerPool::Inner::CleanupForTesting() {
DCHECK(!RunsTasksOnCurrentThread());
base::ThreadRestrictions::ScopedAllowWait allow_wait;
AutoLock lock(lock_);
CHECK_EQ(CLEANUP_DONE, cleanup_state_);
if (shutdown_called_)
return;
if (pending_tasks_.empty() && waiting_thread_count_ == threads_.size())
return;
cleanup_state_ = CLEANUP_REQUESTED;
cleanup_idlers_ = 0;
has_work_cv_.Signal();
while (cleanup_state_ != CLEANUP_DONE)
cleanup_cv_.Wait();
}
void SequencedWorkerPool::Inner::SignalHasWorkForTesting() {
SignalHasWork();
}
void SequencedWorkerPool::Inner::Shutdown(
int max_new_blocking_tasks_after_shutdown) {
DCHECK_GE(max_new_blocking_tasks_after_shutdown, 0);
{
AutoLock lock(lock_);
// Cleanup and Shutdown should not be called concurrently.
CHECK_EQ(CLEANUP_DONE, cleanup_state_);
if (shutdown_called_)
return;
shutdown_called_ = true;
max_blocking_tasks_after_shutdown_ = max_new_blocking_tasks_after_shutdown;
// Tickle the threads. This will wake up a waiting one so it will know that
// it can exit, which in turn will wake up any other waiting ones.
SignalHasWork();
// There are no pending or running tasks blocking shutdown, we're done.
if (CanShutdown())
return;
}
// If we're here, then something is blocking shutdown. So wait for
// CanShutdown() to go to true.
if (testing_observer_)
testing_observer_->WillWaitForShutdown();
#if !defined(OS_NACL)
TimeTicks shutdown_wait_begin = TimeTicks::Now();
#endif
{
base::ThreadRestrictions::ScopedAllowWait allow_wait;
AutoLock lock(lock_);
while (!CanShutdown())
can_shutdown_cv_.Wait();
}
#if !defined(OS_NACL)
UMA_HISTOGRAM_TIMES("SequencedWorkerPool.ShutdownDelayTime",
TimeTicks::Now() - shutdown_wait_begin);
#endif
}
bool SequencedWorkerPool::Inner::IsShutdownInProgress() {
AutoLock lock(lock_);
return shutdown_called_;
}
void SequencedWorkerPool::Inner::ThreadLoop(Worker* this_worker) {
{
AutoLock lock(lock_);
DCHECK(thread_being_created_);
thread_being_created_ = false;
std::pair<ThreadMap::iterator, bool> result =
threads_.insert(
std::make_pair(this_worker->tid(), make_linked_ptr(this_worker)));
DCHECK(result.second);
while (true) {
#if defined(OS_MACOSX)
base::mac::ScopedNSAutoreleasePool autorelease_pool;
#endif
HandleCleanup();
// See GetWork for what delete_these_outside_lock is doing.
SequencedTask task;
TimeDelta wait_time;
std::vector<Closure> delete_these_outside_lock;
GetWorkStatus status =
GetWork(&task, &wait_time, &delete_these_outside_lock);
if (status == GET_WORK_FOUND) {
TRACE_EVENT_FLOW_END0(TRACE_DISABLED_BY_DEFAULT("toplevel.flow"),
"SequencedWorkerPool::PostTask",
TRACE_ID_MANGLE(GetTaskTraceID(task, static_cast<void*>(this))));
TRACE_EVENT2("toplevel", "SequencedWorkerPool::ThreadLoop",
"src_file", task.posted_from.file_name(),
"src_func", task.posted_from.function_name());
int new_thread_id = WillRunWorkerTask(task);
{
AutoUnlock unlock(lock_);
// There may be more work available, so wake up another
// worker thread. (Technically not required, since we
// already get a signal for each new task, but it doesn't
// hurt.)
SignalHasWork();
delete_these_outside_lock.clear();
// Complete thread creation outside the lock if necessary.
if (new_thread_id)
FinishStartingAdditionalThread(new_thread_id);
this_worker->set_running_task_info(
SequenceToken(task.sequence_token_id), task.shutdown_behavior);
tracked_objects::ThreadData::PrepareForStartOfRun(task.birth_tally);
tracked_objects::TaskStopwatch stopwatch;
stopwatch.Start();
task.task.Run();
stopwatch.Stop();
tracked_objects::ThreadData::TallyRunOnNamedThreadIfTracking(
task, stopwatch);
// Make sure our task is erased outside the lock for the
// same reason we do this with delete_these_oustide_lock.
// Also, do it before calling set_running_task_info() so
// that sequence-checking from within the task's destructor
// still works.
task.task = Closure();
this_worker->set_running_task_info(
SequenceToken(), CONTINUE_ON_SHUTDOWN);
}
DidRunWorkerTask(task); // Must be done inside the lock.
// patch start tanjianwen ==================================================
DeprecateTokensIfNeeded(task.sequence_token_id,
&delete_these_outside_lock);
// patch end tanjianwen ==================================================
} else if (cleanup_state_ == CLEANUP_RUNNING) {
switch (status) {
case GET_WORK_WAIT: {
AutoUnlock unlock(lock_);
delete_these_outside_lock.clear();
}
break;
case GET_WORK_NOT_FOUND:
CHECK(delete_these_outside_lock.empty());
cleanup_state_ = CLEANUP_FINISHING;
cleanup_cv_.Broadcast();
break;
default:
NOTREACHED();
}
} else {
// When we're terminating and there's no more work, we can
// shut down, other workers can complete any pending or new tasks.
// We can get additional tasks posted after shutdown_called_ is set
// but only worker threads are allowed to post tasks at that time, and
// the workers responsible for posting those tasks will be available
// to run them. Also, there may be some tasks stuck behind running
// ones with the same sequence token, but additional threads won't
// help this case.
if (shutdown_called_ &&
blocking_shutdown_pending_task_count_ == 0)
break;
waiting_thread_count_++;
switch (status) {
case GET_WORK_NOT_FOUND:
has_work_cv_.Wait();
break;
case GET_WORK_WAIT:
has_work_cv_.TimedWait(wait_time);
break;
default:
NOTREACHED();
}
waiting_thread_count_--;
}
}
} // Release lock_.
// We noticed we should exit. Wake up the next worker so it knows it should
// exit as well (because the Shutdown() code only signals once).
SignalHasWork();
// Possibly unblock shutdown.
can_shutdown_cv_.Signal();
}
void SequencedWorkerPool::Inner::HandleCleanup() {
lock_.AssertAcquired();
if (cleanup_state_ == CLEANUP_DONE)
return;
if (cleanup_state_ == CLEANUP_REQUESTED) {
// We win, we get to do the cleanup as soon as the others wise up and idle.
cleanup_state_ = CLEANUP_STARTING;
while (thread_being_created_ ||
cleanup_idlers_ != threads_.size() - 1) {
has_work_cv_.Signal();
cleanup_cv_.Wait();
}
cleanup_state_ = CLEANUP_RUNNING;
return;
}
if (cleanup_state_ == CLEANUP_STARTING) {
// Another worker thread is cleaning up, we idle here until thats done.
++cleanup_idlers_;
cleanup_cv_.Broadcast();
while (cleanup_state_ != CLEANUP_FINISHING) {
cleanup_cv_.Wait();
}
--cleanup_idlers_;
cleanup_cv_.Broadcast();
return;
}
if (cleanup_state_ == CLEANUP_FINISHING) {
// We wait for all idlers to wake up prior to being DONE.
while (cleanup_idlers_ != 0) {
cleanup_cv_.Broadcast();
cleanup_cv_.Wait();
}
if (cleanup_state_ == CLEANUP_FINISHING) {
cleanup_state_ = CLEANUP_DONE;
cleanup_cv_.Signal();
}
return;
}
}
int SequencedWorkerPool::Inner::LockedGetNamedTokenID(
const std::string& name) {
lock_.AssertAcquired();
DCHECK(!name.empty());
std::map<std::string, int>::const_iterator found =
named_sequence_tokens_.find(name);
if (found != named_sequence_tokens_.end())
return found->second; // Got an existing one.
// Create a new one for this name.
SequenceToken result = GetSequenceToken();
named_sequence_tokens_.insert(std::make_pair(name, result.id_));
return result.id_;
}
int64 SequencedWorkerPool::Inner::LockedGetNextSequenceTaskNumber() {
lock_.AssertAcquired();
// We assume that we never create enough tasks to wrap around.
return next_sequence_task_number_++;
}
SequencedWorkerPool::WorkerShutdown
SequencedWorkerPool::Inner::LockedCurrentThreadShutdownBehavior() const {
lock_.AssertAcquired();
ThreadMap::const_iterator found = threads_.find(PlatformThread::CurrentId());
if (found == threads_.end())
return CONTINUE_ON_SHUTDOWN;
return found->second->running_shutdown_behavior();
}
SequencedWorkerPool::Inner::GetWorkStatus SequencedWorkerPool::Inner::GetWork(
SequencedTask* task,
TimeDelta* wait_time,
std::vector<Closure>* delete_these_outside_lock) {
lock_.AssertAcquired();
// Find the next task with a sequence token that's not currently in use.
// If the token is in use, that means another thread is running something
// in that sequence, and we can't run it without going out-of-order.
//
// This algorithm is simple and fair, but inefficient in some cases. For
// example, say somebody schedules 1000 slow tasks with the same sequence
// number. We'll have to go through all those tasks each time we feel like
// there might be work to schedule. If this proves to be a problem, we
// should make this more efficient.
//
// One possible enhancement would be to keep a map from sequence ID to a
// list of pending but currently blocked SequencedTasks for that ID.
// When a worker finishes a task of one sequence token, it can pick up the
// next one from that token right away.
//
// This may lead to starvation if there are sufficient numbers of sequences
// in use. To alleviate this, we could add an incrementing priority counter
// to each SequencedTask. Then maintain a priority_queue of all runnable
// tasks, sorted by priority counter. When a sequenced task is completed
// we would pop the head element off of that tasks pending list and add it
// to the priority queue. Then we would run the first item in the priority
// queue.
GetWorkStatus status = GET_WORK_NOT_FOUND;
int unrunnable_tasks = 0;
PendingTaskSet::iterator i = pending_tasks_.begin();
// We assume that the loop below doesn't take too long and so we can just do
// a single call to TimeTicks::Now().
const TimeTicks current_time = TimeTicks::Now();
while (i != pending_tasks_.end()) {
if (!IsSequenceTokenRunnable(i->sequence_token_id)) {
unrunnable_tasks++;
++i;
continue;
}
if (shutdown_called_ && i->shutdown_behavior != BLOCK_SHUTDOWN) {
// We're shutting down and the task we just found isn't blocking
// shutdown. Delete it and get more work.
//
// Note that we do not want to delete unrunnable tasks. Deleting a task
// can have side effects (like freeing some objects) and deleting a
// task that's supposed to run after one that's currently running could
// cause an obscure crash.
//
// We really want to delete these tasks outside the lock in case the
// closures are holding refs to objects that want to post work from
// their destructorss (which would deadlock). The closures are
// internally refcounted, so we just need to keep a copy of them alive
// until the lock is exited. The calling code can just clear() the
// vector they passed to us once the lock is exited to make this
// happen.
delete_these_outside_lock->push_back(i->task);
pending_tasks_.erase(i++);
continue;
}
if (i->time_to_run > current_time) {
// The time to run has not come yet.
*wait_time = i->time_to_run - current_time;
status = GET_WORK_WAIT;
if (cleanup_state_ == CLEANUP_RUNNING) {
// Deferred tasks are deleted when cleaning up, see Inner::ThreadLoop.
delete_these_outside_lock->push_back(i->task);
pending_tasks_.erase(i);
}
break;
}
// Found a runnable task.
*task = *i;
pending_tasks_.erase(i);
if (task->shutdown_behavior == BLOCK_SHUTDOWN) {
blocking_shutdown_pending_task_count_--;
}
status = GET_WORK_FOUND;
break;
}
return status;
}
int SequencedWorkerPool::Inner::WillRunWorkerTask(const SequencedTask& task) {
lock_.AssertAcquired();
// Mark the task's sequence number as in use.
if (task.sequence_token_id)
current_sequences_.insert(task.sequence_token_id);
// Ensure that threads running tasks posted with either SKIP_ON_SHUTDOWN
// or BLOCK_SHUTDOWN will prevent shutdown until that task or thread
// completes.
if (task.shutdown_behavior != CONTINUE_ON_SHUTDOWN)
blocking_shutdown_thread_count_++;
// We just picked up a task. Since StartAdditionalThreadIfHelpful only
// creates a new thread if there is no free one, there is a race when posting
// tasks that many tasks could have been posted before a thread started
// running them, so only one thread would have been created. So we also check
// whether we should create more threads after removing our task from the
// queue, which also has the nice side effect of creating the workers from
// background threads rather than the main thread of the app.
//
// If another thread wasn't created, we want to wake up an existing thread
// if there is one waiting to pick up the next task.
//
// Note that we really need to do this *before* running the task, not
// after. Otherwise, if more than one task is posted, the creation of the
// second thread (since we only create one at a time) will be blocked by
// the execution of the first task, which could be arbitrarily long.
return PrepareToStartAdditionalThreadIfHelpful();
}
void SequencedWorkerPool::Inner::DidRunWorkerTask(const SequencedTask& task) {
lock_.AssertAcquired();
if (task.shutdown_behavior != CONTINUE_ON_SHUTDOWN) {
DCHECK_GT(blocking_shutdown_thread_count_, 0u);
blocking_shutdown_thread_count_--;
}
if (task.sequence_token_id)
current_sequences_.erase(task.sequence_token_id);
}
bool SequencedWorkerPool::Inner::IsSequenceTokenRunnable(
int sequence_token_id) const {
lock_.AssertAcquired();
return !sequence_token_id ||
current_sequences_.find(sequence_token_id) ==
current_sequences_.end();
}
int SequencedWorkerPool::Inner::PrepareToStartAdditionalThreadIfHelpful() {
lock_.AssertAcquired();
// How thread creation works:
//
// We'de like to avoid creating threads with the lock held. However, we
// need to be sure that we have an accurate accounting of the threads for
// proper Joining and deltion on shutdown.
//
// We need to figure out if we need another thread with the lock held, which
// is what this function does. It then marks us as in the process of creating
// a thread. When we do shutdown, we wait until the thread_being_created_
// flag is cleared, which ensures that the new thread is properly added to
// all the data structures and we can't leak it. Once shutdown starts, we'll
// refuse to create more threads or they would be leaked.
//
// Note that this creates a mostly benign race condition on shutdown that
// will cause fewer workers to be created than one would expect. It isn't
// much of an issue in real life, but affects some tests. Since we only spawn
// one worker at a time, the following sequence of events can happen:
//
// 1. Main thread posts a bunch of unrelated tasks that would normally be
// run on separate threads.
// 2. The first task post causes us to start a worker. Other tasks do not
// cause a worker to start since one is pending.
// 3. Main thread initiates shutdown.
// 4. No more threads are created since the shutdown_called_ flag is set.
//
// The result is that one may expect that max_threads_ workers to be created
// given the workload, but in reality fewer may be created because the
// sequence of thread creation on the background threads is racing with the
// shutdown call.
if (!shutdown_called_ &&
!thread_being_created_ &&
cleanup_state_ == CLEANUP_DONE &&
threads_.size() < max_threads_ &&
waiting_thread_count_ == 0) {
// We could use an additional thread if there's work to be done.
for (PendingTaskSet::const_iterator i = pending_tasks_.begin();
i != pending_tasks_.end(); ++i) {
if (IsSequenceTokenRunnable(i->sequence_token_id)) {
// Found a runnable task, mark the thread as being started.
thread_being_created_ = true;
return static_cast<int>(threads_.size() + 1);
}
}
}
return 0;
}
void SequencedWorkerPool::Inner::FinishStartingAdditionalThread(
int thread_number) {
// Called outside of the lock.
DCHECK(thread_number > 0);
// The worker is assigned to the list when the thread actually starts, which
// will manage the memory of the pointer.
new Worker(worker_pool_, thread_number, thread_name_prefix_);
}
void SequencedWorkerPool::Inner::SignalHasWork() {<|fim▁hole|> testing_observer_->OnHasWork();
}
}
bool SequencedWorkerPool::Inner::CanShutdown() const {
lock_.AssertAcquired();
// See PrepareToStartAdditionalThreadIfHelpful for how thread creation works.
return !thread_being_created_ &&
blocking_shutdown_thread_count_ == 0 &&
blocking_shutdown_pending_task_count_ == 0;
}
// patch start tanjianwen ======================================================
void SequencedWorkerPool::Inner::FinalizeToken(SequenceToken token) {
std::vector<Closure> delete_these_outside_lock;
{
AutoLock lock(lock_);
// Don't invoke FinalizeToken() with the same token more than once.
DCHECK(deprecated_tokens_.find(token.id_) == deprecated_tokens_.end());
deprecated_tokens_.insert(token.id_);
// Consider such a situation, that no any pending tasks within the thread
// pool at all, and the current executing task is not associated with the
// specified token, then we got no chance to signal the event. In this case,
// we just do nothing but deprecate the token.
bool no_related_task = true;
for (PendingTaskSet::iterator i = pending_tasks_.begin();
i != pending_tasks_.end();) {
if (i->sequence_token_id == token.id_) {
if (i->shutdown_behavior != BLOCK_SHUTDOWN) {
delete_these_outside_lock.push_back(i->task);
i = pending_tasks_.erase(i);
continue;
}
no_related_task = false;
break;
}
++i;
}
if (no_related_task) {
if (current_sequences_.find(token.id_) != current_sequences_.end())
no_related_task = false;
}
if (!no_related_task) {
base::WaitableEvent sync(true, false);
// Don't invoke FinalizeToken() with the same token more than once.
DCHECK(token_final_sync_.find(token.id_) == token_final_sync_.end());
token_final_sync_[token.id_] =
std::make_pair(&sync, DISCARDING_NON_BLOCKING_TASKS);
{
AutoUnlock unlock(lock_);
sync.Wait();
}
}
}
}
void SequencedWorkerPool::Inner::FinalizeNamedToken(const std::string& name) {
int id;
{
AutoLock lock(lock_);
id = LockedGetNamedTokenID(name);
}
FinalizeToken(SequenceToken(id));
}
void SequencedWorkerPool::Inner::DeprecateTokensIfNeeded(
int last_task_token_id, std::vector<Closure>* delete_these_outside_lock) {
lock_.AssertAcquired();
// For most of the time, there would be 0 or 1 items inside
// |token_final_sync_|, so the follow iteration won't take much time really.
for (TokenFinalizing::iterator i = token_final_sync_.begin();
i != token_final_sync_.end();) {
bool through_first_stage = false;
int blocking_task_count = 0;
if (i->second.second == DISCARDING_NON_BLOCKING_TASKS) {
through_first_stage = true;
// In this stage, we need to collect all of the non-blocking tasks
// associated with the specified token, and evict them.
for (PendingTaskSet::iterator j = pending_tasks_.begin();
j != pending_tasks_.end();) {
if (j->sequence_token_id == i->first) {
if (j->shutdown_behavior != BLOCK_SHUTDOWN) {
delete_these_outside_lock->push_back(j->task);
j = pending_tasks_.erase(j);
continue;
} else {
blocking_task_count++;
}
}
++j;
}
// Continue to the next stage.
i->second.second = WAITING_FOR_BLOCKING_TASKS;
}
// No blocking task found.
if (through_first_stage && !blocking_task_count) {
// Before signaling the caller requesting finalization, we have to first
// make sure there is no any task of the same token running
// simultaneously in another worker thread, in which case the event will
// be signaled within that thread after the execution.
if (current_sequences_.find(i->first) == current_sequences_.end()) {
i->second.first->Signal();
i = token_final_sync_.erase(i);
}
continue;
}
// Here we reached the waiting stage. We will not signal the event until
// the last task associated with the specified token is executed.
//
// Here is a small optimization: we don't need to do the checking if the
// last run task's token doesn't match the token being proceeded. Since we
// already ensure that each token in |token_final_sync_| has at least one
// corresponding task in the pending task queue(see FinalizeToken()).
if (last_task_token_id == i->first) {
bool found = false;
for (PendingTaskSet::iterator j = pending_tasks_.begin();
j != pending_tasks_.end(); ++j) {
if (j->sequence_token_id == i->first) {
found = true;
break;
}
}
if (!found) {
// Make sure there is no any task of the same token running
// simultaneously in another worker thread. See comments above.
if (current_sequences_.find(i->first) == current_sequences_.end()) {
i->second.first->Signal();
i = token_final_sync_.erase(i);
}
continue;
}
}
++i;
}
if (!delete_these_outside_lock->empty()) {
AutoUnlock unlock(lock_);
delete_these_outside_lock->clear();
}
}
// patch end tanjianwen ======================================================
base::StaticAtomicSequenceNumber
SequencedWorkerPool::Inner::g_last_sequence_number_;
// SequencedWorkerPool --------------------------------------------------------
// static
SequencedWorkerPool::SequenceToken
SequencedWorkerPool::GetSequenceTokenForCurrentThread() {
// Don't construct lazy instance on check.
if (g_lazy_tls_ptr == NULL)
return SequenceToken();
SequencedWorkerPool::SequenceToken* token = g_lazy_tls_ptr.Get().Get();
if (!token)
return SequenceToken();
return *token;
}
SequencedWorkerPool::SequencedWorkerPool(
size_t max_threads,
const std::string& thread_name_prefix)
: constructor_message_loop_(MessageLoopProxy::current()),
inner_(new Inner(this, max_threads, thread_name_prefix, NULL)) {
}
SequencedWorkerPool::SequencedWorkerPool(
size_t max_threads,
const std::string& thread_name_prefix,
TestingObserver* observer)
: constructor_message_loop_(MessageLoopProxy::current()),
inner_(new Inner(this, max_threads, thread_name_prefix, observer)) {
}
SequencedWorkerPool::~SequencedWorkerPool() {}
void SequencedWorkerPool::OnDestruct() const {
DCHECK(constructor_message_loop_.get());
// Avoid deleting ourselves on a worker thread (which would
// deadlock).
if (RunsTasksOnCurrentThread()) {
constructor_message_loop_->DeleteSoon(FROM_HERE, this);
} else {
delete this;
}
}
SequencedWorkerPool::SequenceToken SequencedWorkerPool::GetSequenceToken() {
return inner_->GetSequenceToken();
}
SequencedWorkerPool::SequenceToken SequencedWorkerPool::GetNamedSequenceToken(
const std::string& name) {
return inner_->GetNamedSequenceToken(name);
}
scoped_refptr<SequencedTaskRunner> SequencedWorkerPool::GetSequencedTaskRunner(
SequenceToken token) {
return GetSequencedTaskRunnerWithShutdownBehavior(token, BLOCK_SHUTDOWN);
}
scoped_refptr<SequencedTaskRunner>
SequencedWorkerPool::GetSequencedTaskRunnerWithShutdownBehavior(
SequenceToken token, WorkerShutdown shutdown_behavior) {
return new SequencedWorkerPoolSequencedTaskRunner(
this, token, shutdown_behavior);
}
scoped_refptr<TaskRunner>
SequencedWorkerPool::GetTaskRunnerWithShutdownBehavior(
WorkerShutdown shutdown_behavior) {
return new SequencedWorkerPoolTaskRunner(this, shutdown_behavior);
}
bool SequencedWorkerPool::PostWorkerTask(
const tracked_objects::Location& from_here,
const Closure& task) {
return inner_->PostTask(NULL, SequenceToken(), BLOCK_SHUTDOWN,
from_here, task, TimeDelta());
}
bool SequencedWorkerPool::PostDelayedWorkerTask(
const tracked_objects::Location& from_here,
const Closure& task,
TimeDelta delay) {
WorkerShutdown shutdown_behavior =
delay == TimeDelta() ? BLOCK_SHUTDOWN : SKIP_ON_SHUTDOWN;
return inner_->PostTask(NULL, SequenceToken(), shutdown_behavior,
from_here, task, delay);
}
bool SequencedWorkerPool::PostWorkerTaskWithShutdownBehavior(
const tracked_objects::Location& from_here,
const Closure& task,
WorkerShutdown shutdown_behavior) {
return inner_->PostTask(NULL, SequenceToken(), shutdown_behavior,
from_here, task, TimeDelta());
}
bool SequencedWorkerPool::PostSequencedWorkerTask(
SequenceToken sequence_token,
const tracked_objects::Location& from_here,
const Closure& task) {
return inner_->PostTask(NULL, sequence_token, BLOCK_SHUTDOWN,
from_here, task, TimeDelta());
}
bool SequencedWorkerPool::PostDelayedSequencedWorkerTask(
SequenceToken sequence_token,
const tracked_objects::Location& from_here,
const Closure& task,
TimeDelta delay) {
WorkerShutdown shutdown_behavior =
delay == TimeDelta() ? BLOCK_SHUTDOWN : SKIP_ON_SHUTDOWN;
return inner_->PostTask(NULL, sequence_token, shutdown_behavior,
from_here, task, delay);
}
bool SequencedWorkerPool::PostNamedSequencedWorkerTask(
const std::string& token_name,
const tracked_objects::Location& from_here,
const Closure& task) {
DCHECK(!token_name.empty());
return inner_->PostTask(&token_name, SequenceToken(), BLOCK_SHUTDOWN,
from_here, task, TimeDelta());
}
bool SequencedWorkerPool::PostSequencedWorkerTaskWithShutdownBehavior(
SequenceToken sequence_token,
const tracked_objects::Location& from_here,
const Closure& task,
WorkerShutdown shutdown_behavior) {
return inner_->PostTask(NULL, sequence_token, shutdown_behavior,
from_here, task, TimeDelta());
}
bool SequencedWorkerPool::PostDelayedTask(
const tracked_objects::Location& from_here,
const Closure& task,
TimeDelta delay) {
return PostDelayedWorkerTask(from_here, task, delay);
}
bool SequencedWorkerPool::RunsTasksOnCurrentThread() const {
return inner_->RunsTasksOnCurrentThread();
}
bool SequencedWorkerPool::IsRunningSequenceOnCurrentThread(
SequenceToken sequence_token) const {
return inner_->IsRunningSequenceOnCurrentThread(sequence_token);
}
void SequencedWorkerPool::FlushForTesting() {
inner_->CleanupForTesting();
}
void SequencedWorkerPool::SignalHasWorkForTesting() {
inner_->SignalHasWorkForTesting();
}
void SequencedWorkerPool::Shutdown(int max_new_blocking_tasks_after_shutdown) {
DCHECK(constructor_message_loop_->BelongsToCurrentThread());
inner_->Shutdown(max_new_blocking_tasks_after_shutdown);
}
bool SequencedWorkerPool::IsShutdownInProgress() {
return inner_->IsShutdownInProgress();
}
// patch start tanjianwen ==================================================
void SequencedWorkerPool::FinalizeToken(SequenceToken token) {
inner_->FinalizeToken(token);
}
void SequencedWorkerPool::FinalizeNamedToken(const std::string& name) {
inner_->FinalizeNamedToken(name);
}
// patch end tanjianwen ==================================================
} // namespace base<|fim▁end|>
|
has_work_cv_.Signal();
if (testing_observer_) {
|
<|file_name|>LambdaListParser.java<|end_file_name|><|fim▁begin|>package jcl.compiler.sa.analyzer.lambdalistparser;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.function.Predicate;
import jcl.compiler.environment.Environment;
import jcl.compiler.environment.binding.Binding;
import jcl.compiler.environment.binding.lambdalist.AuxParameter;
import jcl.compiler.environment.binding.lambdalist.BodyParameter;
import jcl.compiler.environment.binding.lambdalist.DestructuringLambdaList;
import jcl.compiler.environment.binding.lambdalist.EnvironmentParameter;
import jcl.compiler.environment.binding.lambdalist.KeyParameter;
import jcl.compiler.environment.binding.lambdalist.OptionalParameter;
import jcl.compiler.environment.binding.lambdalist.RequiredParameter;
import jcl.compiler.environment.binding.lambdalist.RestParameter;
import jcl.compiler.environment.binding.lambdalist.SuppliedPParameter;
import jcl.compiler.environment.binding.lambdalist.WholeParameter;
import jcl.compiler.sa.FormAnalyzer;
import jcl.compiler.struct.specialoperator.declare.DeclareStruct;
import jcl.compiler.struct.specialoperator.declare.SpecialDeclarationStruct;
import jcl.lang.KeywordStruct;
import jcl.lang.LispStruct;
import jcl.lang.ListStruct;
import jcl.lang.NILStruct;
import jcl.lang.PackageStruct;
import jcl.lang.PackageSymbolStruct;
import jcl.lang.SymbolStruct;
import jcl.lang.condition.exception.ProgramErrorException;
import jcl.lang.statics.CommonLispSymbols;
import jcl.lang.statics.CompilerConstants;
import jcl.lang.statics.GlobalPackageStruct;
import lombok.experimental.UtilityClass;
@UtilityClass
public final class LambdaListParser {
public static WholeParseResult parseWholeBinding(final Environment environment,
final Iterator<LispStruct> iterator,
final DeclareStruct declareElement) {
final LispStruct currentElement = iterator.next();
if (!(currentElement instanceof SymbolStruct)) {
throw new ProgramErrorException("LambdaList &whole parameters must be a symbol: " + currentElement);
}
final SymbolStruct currentParam = (SymbolStruct) currentElement;
final boolean isSpecial = declareElement.getSpecialDeclarations()
.stream()
.map(SpecialDeclarationStruct::getVar)
.anyMatch(Predicate.isEqual(currentParam));
final Binding binding = new Binding(currentParam, CommonLispSymbols.T);
if (isSpecial) {
environment.addDynamicBinding(binding);
} else {
environment.addLexicalBinding(binding);
}
final WholeParameter wholeBinding = new WholeParameter(currentParam, isSpecial);
return new WholeParseResult(wholeBinding);
}
public static EnvironmentParseResult parseEnvironmentBinding(final Environment environment,
final Iterator<LispStruct> iterator,
final boolean isAfterRequired) {
LispStruct currentElement = iterator.next();
if (!(currentElement instanceof SymbolStruct)) {
throw new ProgramErrorException("LambdaList &environment parameters must be a symbol: " + currentElement);
}
final SymbolStruct currentParam = (SymbolStruct) currentElement;
if (iterator.hasNext() && isAfterRequired) {
currentElement = iterator.next();
if (!isLambdaListKeyword(currentElement)) {
throw new ProgramErrorException("LambdaList &environment parameter must only have 1 parameter: " + currentElement);
}
}
final Binding binding = new Binding(currentParam, CommonLispSymbols.T);
environment.addDynamicBinding(binding);
final EnvironmentParameter environmentBinding = new EnvironmentParameter(currentParam);
return new EnvironmentParseResult(currentElement, environmentBinding);
}
public static RequiredParseResult parseRequiredBindings(final Environment environment,
final Iterator<LispStruct> iterator,
final DeclareStruct declareElement,
final boolean isDotted,
final boolean isDestructuringAllowed) {
final List<RequiredParameter> requiredBindings = new ArrayList<>();
LispStruct currentElement;
do {
currentElement = iterator.next();
if (!iterator.hasNext() && isDotted) {
return new RequiredParseResult(currentElement, requiredBindings);
}
if (isLambdaListKeyword(currentElement)) {
return new RequiredParseResult(currentElement, requiredBindings);
}
final SymbolStruct currentParam;
DestructuringLambdaList destructuringForm = null;
if (currentElement instanceof SymbolStruct) {
currentParam = (SymbolStruct) currentElement;
} else {
if (isDestructuringAllowed) {
if (currentElement instanceof ListStruct) {
final String destructuringName = "DestructuringSymbolName-" + System.nanoTime();
currentParam = GlobalPackageStruct.COMMON_LISP_USER.intern(destructuringName).getSymbol();
final ListStruct destructuringFormList = (ListStruct) currentElement;
destructuringForm = DestructuringLambdaListParser.parseDestructuringLambdaList(environment, destructuringFormList, declareElement);
} else {
throw new ProgramErrorException("LambdaList required parameter must be a symbol or a destructuring list: " + currentElement);
}
} else {
throw new ProgramErrorException("LambdaList required parameters must be a symbol: " + currentElement);
}
}
final boolean isSpecial = declareElement.getSpecialDeclarations()
.stream()
.map(SpecialDeclarationStruct::getVar)
.anyMatch(Predicate.isEqual(currentParam));
final Binding binding = new Binding(currentParam, CommonLispSymbols.T);
if (isSpecial) {
environment.addDynamicBinding(binding);
} else {
environment.addLexicalBinding(binding);
}
final RequiredParameter requiredBinding = new RequiredParameter(currentParam, destructuringForm, isSpecial);
requiredBindings.add(requiredBinding);
} while (iterator.hasNext());
return new RequiredParseResult(currentElement, requiredBindings);
}
public static OptionalParseResult parseOptionalBindings(final Environment environment,
final Iterator<LispStruct> iterator,
final DeclareStruct declareElement,
final boolean isDotted,
final boolean isDestructuringAllowed) {
final List<OptionalParameter> optionalBindings = new ArrayList<>();
if (!iterator.hasNext()) {
return new OptionalParseResult(null, optionalBindings);
}
LispStruct currentElement;
do {
currentElement = iterator.next();
if (isLambdaListKeyword(currentElement)) {
return new OptionalParseResult(currentElement, optionalBindings);
}
if (!iterator.hasNext() && isDotted) {
return new OptionalParseResult(currentElement, optionalBindings);
}
if (currentElement instanceof SymbolStruct) {
final SymbolStruct currentParam = (SymbolStruct) currentElement;
final boolean isSpecial = declareElement.getSpecialDeclarations()
.stream()
.map(SpecialDeclarationStruct::getVar)
.anyMatch(Predicate.isEqual(currentParam));
Binding binding = new Binding(currentParam, CommonLispSymbols.T);
if (isSpecial) {
environment.addDynamicBinding(binding);
} else {
environment.addLexicalBinding(binding);
}
final String paramName = currentParam.getName();
final String customSuppliedPName = paramName + "-P-" + System.nanoTime();
final PackageStruct currentParamPackage = currentParam.getSymbolPackage();
final SymbolStruct customSuppliedPCurrent = currentParamPackage.intern(customSuppliedPName).getSymbol();
final boolean isSuppliedPSpecial = declareElement.getSpecialDeclarations()
.stream()
.map(SpecialDeclarationStruct::getVar)
.anyMatch(Predicate.isEqual(customSuppliedPCurrent));
binding = new Binding(customSuppliedPCurrent, CommonLispSymbols.T);
if (isSuppliedPSpecial) {
environment.addDynamicBinding(binding);
} else {
environment.addLexicalBinding(binding);
}
final SuppliedPParameter suppliedPBinding = new SuppliedPParameter(customSuppliedPCurrent, isSuppliedPSpecial);
final OptionalParameter optionalBinding = new OptionalParameter(currentParam, null, NILStruct.INSTANCE, isSpecial, suppliedPBinding);
optionalBindings.add(optionalBinding);
} else if (currentElement instanceof ListStruct) {
final ListStruct currentParam = (ListStruct) currentElement;
final long currentParamLength = currentParam.length().toJavaPLong();
if ((currentParamLength < 1) || (currentParamLength > 3)) {
if (isDestructuringAllowed) {
final String destructuringName = "DestructuringSymbolName-" + System.nanoTime();
final SymbolStruct varNameCurrent = GlobalPackageStruct.COMMON_LISP_USER.intern(destructuringName).getSymbol();
final ListStruct destructuringFormList = (ListStruct) currentElement;
final DestructuringLambdaList destructuringForm = DestructuringLambdaListParser.parseDestructuringLambdaList(environment, destructuringFormList, declareElement);
final String customSuppliedPName = destructuringName + "-P-" + System.nanoTime();
final SymbolStruct customSuppliedPCurrent = GlobalPackageStruct.COMMON_LISP_USER.intern(customSuppliedPName).getSymbol();
final SuppliedPParameter suppliedPBinding = new SuppliedPParameter(customSuppliedPCurrent);
final OptionalParameter optionalBinding = new OptionalParameter(varNameCurrent, destructuringForm, NILStruct.INSTANCE, false, suppliedPBinding);<|fim▁hole|> throw new ProgramErrorException("LambdaList &optional parameters must have between 1 and 3 parameters: " + currentParam);
}
} else {
final Iterator<LispStruct> currentIterator = currentParam.iterator();
final LispStruct firstInCurrent = currentIterator.next();
final LispStruct secondInCurrent;
if (currentIterator.hasNext()) {
secondInCurrent = currentIterator.next();
} else {
secondInCurrent = NILStruct.INSTANCE;
}
final LispStruct thirdInCurrent;
if (currentIterator.hasNext()) {
thirdInCurrent = currentIterator.next();
} else {
thirdInCurrent = NILStruct.INSTANCE;
}
final SymbolStruct varNameCurrent;
DestructuringLambdaList destructuringForm = null;
if (firstInCurrent instanceof SymbolStruct) {
varNameCurrent = (SymbolStruct) firstInCurrent;
} else {
if (isDestructuringAllowed) {
if (firstInCurrent instanceof ListStruct) {
final String destructuringName = "DestructuringSymbolName-" + System.nanoTime();
varNameCurrent = GlobalPackageStruct.COMMON_LISP_USER.intern(destructuringName).getSymbol();
final ListStruct destructuringFormList = (ListStruct) firstInCurrent;
destructuringForm = DestructuringLambdaListParser.parseDestructuringLambdaList(environment, destructuringFormList, declareElement);
} else {
throw new ProgramErrorException("LambdaList &optional var name parameter must be a symbol or a destructuring list: " + firstInCurrent);
}
} else {
throw new ProgramErrorException("LambdaList &optional var name parameters must be a symbol: " + firstInCurrent);
}
}
LispStruct initForm = NILStruct.INSTANCE;
if (!secondInCurrent.eq(NILStruct.INSTANCE)) {
initForm = secondInCurrent;
}
final LispStruct parameterValueInitForm = FormAnalyzer.analyze(initForm, environment);
final boolean isSpecial = declareElement.getSpecialDeclarations()
.stream()
.map(SpecialDeclarationStruct::getVar)
.anyMatch(Predicate.isEqual(varNameCurrent));
Binding binding = new Binding(varNameCurrent, CommonLispSymbols.T);
if (isSpecial) {
environment.addDynamicBinding(binding);
} else {
environment.addLexicalBinding(binding);
}
final SuppliedPParameter suppliedPBinding;
if (thirdInCurrent.eq(NILStruct.INSTANCE)) {
final String paramName = varNameCurrent.getName();
final String customSuppliedPName = paramName + "-P-" + System.nanoTime();
final PackageStruct currentParamPackage = varNameCurrent.getSymbolPackage();
final SymbolStruct customSuppliedPCurrent = currentParamPackage.intern(customSuppliedPName).getSymbol();
final boolean isSuppliedPSpecial = declareElement.getSpecialDeclarations()
.stream()
.map(SpecialDeclarationStruct::getVar)
.anyMatch(Predicate.isEqual(customSuppliedPCurrent));
binding = new Binding(customSuppliedPCurrent, CommonLispSymbols.T);
if (isSuppliedPSpecial) {
environment.addDynamicBinding(binding);
} else {
environment.addLexicalBinding(binding);
}
suppliedPBinding = new SuppliedPParameter(customSuppliedPCurrent, isSuppliedPSpecial);
} else {
if (!(thirdInCurrent instanceof SymbolStruct)) {
throw new ProgramErrorException("LambdaList &optional supplied-p parameters must be a symbol: " + thirdInCurrent);
}
final SymbolStruct suppliedPCurrent = (SymbolStruct) thirdInCurrent;
final boolean isSuppliedPSpecial = declareElement.getSpecialDeclarations()
.stream()
.map(SpecialDeclarationStruct::getVar)
.anyMatch(Predicate.isEqual(suppliedPCurrent));
binding = new Binding(suppliedPCurrent, CommonLispSymbols.T);
if (isSuppliedPSpecial) {
environment.addDynamicBinding(binding);
} else {
environment.addLexicalBinding(binding);
}
suppliedPBinding = new SuppliedPParameter(suppliedPCurrent, isSuppliedPSpecial);
}
final OptionalParameter optionalBinding = new OptionalParameter(varNameCurrent, destructuringForm, parameterValueInitForm, isSpecial, suppliedPBinding);
optionalBindings.add(optionalBinding);
}
} else {
throw new ProgramErrorException("LambdaList &optional parameters must be a symbol or a list: " + currentElement);
}
} while (iterator.hasNext());
return new OptionalParseResult(currentElement, optionalBindings);
}
public static RestParseResult parseRestBinding(final Environment environment,
final Iterator<LispStruct> iterator,
final DeclareStruct declareElement,
final boolean isDestructuringAllowed) {
if (!iterator.hasNext()) {
throw new ProgramErrorException("LambdaList &rest parameter must be provided.");
}
LispStruct currentElement = iterator.next();
final SymbolStruct currentParam;
DestructuringLambdaList destructuringForm = null;
if (currentElement instanceof SymbolStruct) {
currentParam = (SymbolStruct) currentElement;
} else {
if (isDestructuringAllowed) {
if (currentElement instanceof ListStruct) {
final String destructuringName = "DestructuringSymbolName-" + System.nanoTime();
currentParam = GlobalPackageStruct.COMMON_LISP_USER.intern(destructuringName).getSymbol();
final ListStruct destructuringFormList = (ListStruct) currentElement;
destructuringForm = DestructuringLambdaListParser.parseDestructuringLambdaList(environment, destructuringFormList, declareElement);
} else {
throw new ProgramErrorException("LambdaList &rest parameters must be a symbol or a destructuring list: " + currentElement);
}
} else {
throw new ProgramErrorException("LambdaList &rest parameters must be a symbol: " + currentElement);
}
}
if (iterator.hasNext()) {
currentElement = iterator.next();
if (!isLambdaListKeyword(currentElement)) {
throw new ProgramErrorException("LambdaList &rest parameter must only have 1 parameter: " + currentElement);
}
}
final boolean isSpecial = declareElement.getSpecialDeclarations()
.stream()
.map(SpecialDeclarationStruct::getVar)
.anyMatch(Predicate.isEqual(currentParam));
final Binding binding = new Binding(currentParam, CommonLispSymbols.T);
if (isSpecial) {
environment.addDynamicBinding(binding);
} else {
environment.addLexicalBinding(binding);
}
final RestParameter restBinding = new RestParameter(currentParam, destructuringForm, isSpecial);
return new RestParseResult(currentElement, restBinding);
}
public static RestParseResult parseDottedRestBinding(final Environment environment,
final LispStruct dottedRest,
final DeclareStruct declareElement,
final boolean isDestructuringAllowed) {
final SymbolStruct currentParam;
DestructuringLambdaList destructuringForm = null;
if (dottedRest instanceof SymbolStruct) {
currentParam = (SymbolStruct) dottedRest;
} else {
if (isDestructuringAllowed) {
if (dottedRest instanceof ListStruct) {
final String destructuringName = "DestructuringSymbolName-" + System.nanoTime();
currentParam = GlobalPackageStruct.COMMON_LISP_USER.intern(destructuringName).getSymbol();
final ListStruct destructuringFormList = (ListStruct) dottedRest;
destructuringForm = DestructuringLambdaListParser.parseDestructuringLambdaList(environment, destructuringFormList, declareElement);
} else {
throw new ProgramErrorException("LambdaList &rest parameters must be a symbol or a destructuring list: " + dottedRest);
}
} else {
throw new ProgramErrorException("LambdaList &rest parameters must be a symbol: " + dottedRest);
}
}
final boolean isSpecial = declareElement.getSpecialDeclarations()
.stream()
.map(SpecialDeclarationStruct::getVar)
.anyMatch(Predicate.isEqual(currentParam));
final Binding binding = new Binding(currentParam, CommonLispSymbols.T);
if (isSpecial) {
environment.addDynamicBinding(binding);
} else {
environment.addLexicalBinding(binding);
}
final RestParameter restBinding = new RestParameter(currentParam, destructuringForm, isSpecial);
return new RestParseResult(dottedRest, restBinding);
}
public static BodyParseResult parseBodyBinding(final Environment environment,
final Iterator<LispStruct> iterator,
final DeclareStruct declareElement,
final boolean isDestructuringAllowed) {
if (!iterator.hasNext()) {
throw new ProgramErrorException("LambdaList &body parameter must be provided.");
}
LispStruct currentElement = iterator.next();
final SymbolStruct currentParam;
DestructuringLambdaList destructuringForm = null;
if (currentElement instanceof SymbolStruct) {
currentParam = (SymbolStruct) currentElement;
} else {
if (isDestructuringAllowed) {
if (currentElement instanceof ListStruct) {
final String destructuringName = "DestructuringSymbolName-" + System.nanoTime();
currentParam = GlobalPackageStruct.COMMON_LISP_USER.intern(destructuringName).getSymbol();
final ListStruct destructuringFormList = (ListStruct) currentElement;
destructuringForm = DestructuringLambdaListParser.parseDestructuringLambdaList(environment, destructuringFormList, declareElement);
} else {
throw new ProgramErrorException("LambdaList &rest parameters must be a symbol or a destructuring list: " + currentElement);
}
} else {
throw new ProgramErrorException("LambdaList &rest parameters must be a symbol: " + currentElement);
}
}
if (iterator.hasNext()) {
currentElement = iterator.next();
if (!isLambdaListKeyword(currentElement)) {
throw new ProgramErrorException("LambdaList &body parameter must only have 1 parameter: " + currentElement);
}
}
final boolean isSpecial = declareElement.getSpecialDeclarations()
.stream()
.map(SpecialDeclarationStruct::getVar)
.anyMatch(Predicate.isEqual(currentParam));
final Binding binding = new Binding(currentParam, CommonLispSymbols.T);
if (isSpecial) {
environment.addDynamicBinding(binding);
} else {
environment.addLexicalBinding(binding);
}
final BodyParameter bodyBinding = new BodyParameter(currentParam, destructuringForm, isSpecial);
return new BodyParseResult(currentElement, bodyBinding);
}
public static KeyParseResult parseKeyBindings(final Environment environment,
final Iterator<LispStruct> iterator,
final DeclareStruct declareElement,
final boolean isDestructuringAllowed) {
final List<KeyParameter> keyBindings = new ArrayList<>();
if (!iterator.hasNext()) {
return new KeyParseResult(null, keyBindings);
}
LispStruct currentElement;
do {
currentElement = iterator.next();
if (isLambdaListKeyword(currentElement)) {
return new KeyParseResult(currentElement, keyBindings);
}
if (currentElement instanceof SymbolStruct) {
final SymbolStruct currentParam = (SymbolStruct) currentElement;
final KeywordStruct keyName = getKeywordStruct(currentParam.getName());
final boolean isSpecial = declareElement.getSpecialDeclarations()
.stream()
.map(SpecialDeclarationStruct::getVar)
.anyMatch(Predicate.isEqual(currentParam));
Binding binding = new Binding(currentParam, CommonLispSymbols.T);
if (isSpecial) {
environment.addDynamicBinding(binding);
} else {
environment.addLexicalBinding(binding);
}
final String paramName = currentParam.getName();
final String customSuppliedPName = paramName + "-P-" + System.nanoTime();
final PackageStruct currentParamPackage = currentParam.getSymbolPackage();
final SymbolStruct customSuppliedPCurrent = currentParamPackage.intern(customSuppliedPName).getSymbol();
final boolean isSuppliedPSpecial = declareElement.getSpecialDeclarations()
.stream()
.map(SpecialDeclarationStruct::getVar)
.anyMatch(Predicate.isEqual(customSuppliedPCurrent));
binding = new Binding(customSuppliedPCurrent, CommonLispSymbols.T);
if (isSuppliedPSpecial) {
environment.addDynamicBinding(binding);
} else {
environment.addLexicalBinding(binding);
}
final SuppliedPParameter suppliedPBinding = new SuppliedPParameter(customSuppliedPCurrent, isSuppliedPSpecial);
final KeyParameter keyBinding = new KeyParameter(currentParam, null, NILStruct.INSTANCE, isSpecial, keyName, suppliedPBinding);
keyBindings.add(keyBinding);
} else if (currentElement instanceof ListStruct) {
final ListStruct currentParam = (ListStruct) currentElement;
final long currentParamLength = currentParam.length().toJavaPLong();
if ((currentParamLength < 1) || (currentParamLength > 3)) {
if (isDestructuringAllowed) {
final String destructuringName = "DestructuringSymbolName-" + System.nanoTime();
final SymbolStruct varNameCurrent = GlobalPackageStruct.COMMON_LISP_USER.intern(destructuringName).getSymbol();
final SymbolStruct varKeyNameCurrent = getKeywordStruct(varNameCurrent.getName());
final ListStruct destructuringFormList = (ListStruct) currentElement;
final DestructuringLambdaList destructuringForm = DestructuringLambdaListParser.parseDestructuringLambdaList(environment, destructuringFormList, declareElement);
final String customSuppliedPName = destructuringName + "-P-" + System.nanoTime();
final SymbolStruct customSuppliedPCurrent = GlobalPackageStruct.COMMON_LISP_USER.intern(customSuppliedPName).getSymbol();
final SuppliedPParameter suppliedPBinding = new SuppliedPParameter(customSuppliedPCurrent);
final KeyParameter keyBinding = new KeyParameter(varNameCurrent, destructuringForm, NILStruct.INSTANCE, false, varKeyNameCurrent, suppliedPBinding);
keyBindings.add(keyBinding);
} else {
throw new ProgramErrorException("LambdaList &key parameters must have between 1 and 3 parameters: " + currentParam);
}
} else {
final Iterator<LispStruct> currentIterator = currentParam.iterator();
final LispStruct firstInCurrent = currentIterator.next();
final LispStruct secondInCurrent;
if (currentIterator.hasNext()) {
secondInCurrent = currentIterator.next();
} else {
secondInCurrent = NILStruct.INSTANCE;
}
final LispStruct thirdInCurrent;
if (currentIterator.hasNext()) {
thirdInCurrent = currentIterator.next();
} else {
thirdInCurrent = NILStruct.INSTANCE;
}
final SymbolStruct varNameCurrent;
final SymbolStruct varKeyNameCurrent;
DestructuringLambdaList destructuringForm = null;
if (firstInCurrent instanceof SymbolStruct) {
varNameCurrent = (SymbolStruct) firstInCurrent;
varKeyNameCurrent = getKeywordStruct(varNameCurrent.getName());
} else if (firstInCurrent instanceof ListStruct) {
final ListStruct currentVar = (ListStruct) firstInCurrent;
final long currentVarLength = currentVar.length().toJavaPLong();
if (currentVarLength == 2) {
final LispStruct firstInCurrentVar = currentVar.car();
if (firstInCurrentVar instanceof SymbolStruct) {
varKeyNameCurrent = (SymbolStruct) firstInCurrentVar;
} else {
throw new ProgramErrorException("LambdaList &key var name list key-name parameters must be a symbol: " + firstInCurrentVar);
}
final LispStruct secondInCurrentVar = ((ListStruct) currentVar.cdr()).car();
if (!(secondInCurrentVar instanceof SymbolStruct)) {
throw new ProgramErrorException("LambdaList &key var name list name parameters must be a symbol: " + secondInCurrentVar);
}
varNameCurrent = (SymbolStruct) secondInCurrentVar;
} else {
if (isDestructuringAllowed) {
final String destructuringName = "DestructuringSymbolName-" + System.nanoTime();
varNameCurrent = GlobalPackageStruct.COMMON_LISP_USER.intern(destructuringName).getSymbol();
varKeyNameCurrent = getKeywordStruct(varNameCurrent.getName());
final ListStruct destructuringFormList = (ListStruct) currentElement;
destructuringForm = DestructuringLambdaListParser.parseDestructuringLambdaList(environment, destructuringFormList, declareElement);
} else {
throw new ProgramErrorException("LambdaList &key var name list parameters must have 2 parameters: " + currentVar);
}
}
} else {
throw new ProgramErrorException("LambdaList &key var name parameters must be a symbol or a list: " + firstInCurrent);
}
LispStruct initForm = NILStruct.INSTANCE;
if (!secondInCurrent.eq(NILStruct.INSTANCE)) {
initForm = secondInCurrent;
}
final LispStruct parameterValueInitForm = FormAnalyzer.analyze(initForm, environment);
final boolean isSpecial = declareElement.getSpecialDeclarations()
.stream()
.map(SpecialDeclarationStruct::getVar)
.anyMatch(Predicate.isEqual(varNameCurrent));
Binding binding = new Binding(varNameCurrent, CommonLispSymbols.T);
if (isSpecial) {
environment.addDynamicBinding(binding);
} else {
environment.addLexicalBinding(binding);
}
final SuppliedPParameter suppliedPBinding;
if (thirdInCurrent.eq(NILStruct.INSTANCE)) {
final String paramName = varNameCurrent.getName();
final String customSuppliedPName = paramName + "-P-" + System.nanoTime();
final PackageStruct currentParamPackage = varNameCurrent.getSymbolPackage();
final SymbolStruct customSuppliedPCurrent = currentParamPackage.intern(customSuppliedPName).getSymbol();
final boolean isSuppliedPSpecial = declareElement.getSpecialDeclarations()
.stream()
.map(SpecialDeclarationStruct::getVar)
.anyMatch(Predicate.isEqual(customSuppliedPCurrent));
binding = new Binding(customSuppliedPCurrent, CommonLispSymbols.T);
if (isSuppliedPSpecial) {
environment.addDynamicBinding(binding);
} else {
environment.addLexicalBinding(binding);
}
suppliedPBinding = new SuppliedPParameter(customSuppliedPCurrent, isSuppliedPSpecial);
} else {
if (!(thirdInCurrent instanceof SymbolStruct)) {
throw new ProgramErrorException("LambdaList &key supplied-p parameters must be a symbol: " + thirdInCurrent);
}
final SymbolStruct suppliedPCurrent = (SymbolStruct) thirdInCurrent;
final boolean isSuppliedPSpecial = declareElement.getSpecialDeclarations()
.stream()
.map(SpecialDeclarationStruct::getVar)
.anyMatch(Predicate.isEqual(suppliedPCurrent));
binding = new Binding(suppliedPCurrent, CommonLispSymbols.T);
if (isSuppliedPSpecial) {
environment.addDynamicBinding(binding);
} else {
environment.addLexicalBinding(binding);
}
suppliedPBinding = new SuppliedPParameter(suppliedPCurrent, isSuppliedPSpecial);
}
final KeyParameter keyBinding = new KeyParameter(varNameCurrent, destructuringForm, parameterValueInitForm, isSpecial, varKeyNameCurrent, suppliedPBinding);
keyBindings.add(keyBinding);
}
} else {
throw new ProgramErrorException("LambdaList &key parameters must be a symbol or a list: " + currentElement);
}
} while (iterator.hasNext());
return new KeyParseResult(currentElement, keyBindings);
}
public static AuxParseResult parseAuxBindings(final Environment environment,
final Iterator<LispStruct> iterator,
final DeclareStruct declareElement,
final boolean isDestructuringAllowed) {
final List<AuxParameter> auxBindings = new ArrayList<>();
if (!iterator.hasNext()) {
return new AuxParseResult(null, auxBindings);
}
LispStruct currentElement;
do {
currentElement = iterator.next();
if (isLambdaListKeyword(currentElement)) {
return new AuxParseResult(currentElement, auxBindings);
}
if (currentElement instanceof SymbolStruct) {
final SymbolStruct currentParam = (SymbolStruct) currentElement;
final boolean isSpecial = declareElement.getSpecialDeclarations()
.stream()
.map(SpecialDeclarationStruct::getVar)
.anyMatch(Predicate.isEqual(currentParam));
final Binding binding = new Binding(currentParam, CommonLispSymbols.T);
if (isSpecial) {
environment.addDynamicBinding(binding);
} else {
environment.addLexicalBinding(binding);
}
final AuxParameter auxBinding = new AuxParameter(currentParam, null, NILStruct.INSTANCE, isSpecial);
auxBindings.add(auxBinding);
} else if (currentElement instanceof ListStruct) {
final ListStruct currentParam = (ListStruct) currentElement;
final long currentParamLength = currentParam.length().toJavaPLong();
if ((currentParamLength < 1) || (currentParamLength > 2)) {
if (isDestructuringAllowed) {
final String destructuringName = "DestructuringSymbolName-" + System.nanoTime();
final SymbolStruct varNameCurrent = GlobalPackageStruct.COMMON_LISP_USER.intern(destructuringName).getSymbol();
final ListStruct destructuringFormList = (ListStruct) currentElement;
final DestructuringLambdaList destructuringForm = DestructuringLambdaListParser.parseDestructuringLambdaList(environment, destructuringFormList, declareElement);
final AuxParameter auxBinding = new AuxParameter(varNameCurrent, destructuringForm, NILStruct.INSTANCE);
auxBindings.add(auxBinding);
} else {
throw new ProgramErrorException("LambdaList &aux parameters must have between 1 and 2 parameters: " + currentParam);
}
} else {
final Iterator<LispStruct> currentIterator = currentParam.iterator();
final LispStruct firstInCurrent = currentIterator.next();
final LispStruct secondInCurrent;
if (currentIterator.hasNext()) {
secondInCurrent = currentIterator.next();
} else {
secondInCurrent = NILStruct.INSTANCE;
}
final SymbolStruct varNameCurrent;
DestructuringLambdaList destructuringForm = null;
if (firstInCurrent instanceof SymbolStruct) {
varNameCurrent = (SymbolStruct) firstInCurrent;
} else {
if (isDestructuringAllowed) {
if (firstInCurrent instanceof ListStruct) {
final String destructuringName = "DestructuringSymbolName-" + System.nanoTime();
varNameCurrent = GlobalPackageStruct.COMMON_LISP_USER.intern(destructuringName).getSymbol();
final ListStruct destructuringFormList = (ListStruct) firstInCurrent;
destructuringForm = DestructuringLambdaListParser.parseDestructuringLambdaList(environment, destructuringFormList, declareElement);
} else {
throw new ProgramErrorException("LambdaList &aux var name parameter must be a symbol or a destructuring list: " + firstInCurrent);
}
} else {
throw new ProgramErrorException("LambdaList &aux var name parameters must be a symbol: " + firstInCurrent);
}
}
LispStruct initForm = NILStruct.INSTANCE;
if (!secondInCurrent.eq(NILStruct.INSTANCE)) {
initForm = secondInCurrent;
}
final LispStruct parameterValueInitForm = FormAnalyzer.analyze(initForm, environment);
final boolean isSpecial = declareElement.getSpecialDeclarations()
.stream()
.map(SpecialDeclarationStruct::getVar)
.anyMatch(Predicate.isEqual(varNameCurrent));
final Binding binding = new Binding(varNameCurrent, CommonLispSymbols.T);
if (isSpecial) {
environment.addDynamicBinding(binding);
} else {
environment.addLexicalBinding(binding);
}
final AuxParameter auxBinding = new AuxParameter(varNameCurrent, destructuringForm, parameterValueInitForm, isSpecial);
auxBindings.add(auxBinding);
}
} else {
throw new ProgramErrorException("LambdaList &aux parameters must be a symbol or a list: " + currentElement);
}
} while (iterator.hasNext());
return new AuxParseResult(currentElement, auxBindings);
}
private static boolean isLambdaListKeyword(final LispStruct lispStruct) {
return lispStruct.eq(CompilerConstants.AUX)
|| lispStruct.eq(CompilerConstants.ALLOW_OTHER_KEYS)
|| lispStruct.eq(CompilerConstants.KEY)
|| lispStruct.eq(CompilerConstants.OPTIONAL)
|| lispStruct.eq(CompilerConstants.REST)
|| lispStruct.eq(CompilerConstants.WHOLE)
|| lispStruct.eq(CompilerConstants.ENVIRONMENT)
|| lispStruct.eq(CompilerConstants.BODY);
}
private static KeywordStruct getKeywordStruct(final String symbolName) {
final PackageSymbolStruct symbol = GlobalPackageStruct.KEYWORD.findSymbol(symbolName);
if (symbol.notFound()) {
return KeywordStruct.toLispKeyword(symbolName);
}
// NOTE: This should be a safe cast because we're finding the symbol in the Keyword Package and they are only
// this type of symbol.
return (KeywordStruct) symbol.getSymbol();
}
}<|fim▁end|>
|
optionalBindings.add(optionalBinding);
} else {
|
<|file_name|>tables.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software<|fim▁hole|>#
from horizon import tables
class LogManagementTable(tables.DataTable):
class Meta(object):
table_actions = ()
row_actions = ()<|fim▁end|>
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
|
<|file_name|>packageResource.spec.js<|end_file_name|><|fim▁begin|>require( "../setup" );
var packageResource = require( "../../resource/package/resource.js" );
describe( "Package Resource", function() {
var server = { checkForNew: _.noop };
describe( "when getting new package callback", function() {
describe( "with matching package", function() {
var config, serverMock, result;
before( function() {
config = {
package: {
project: "imatch"<|fim▁hole|> serverMock.expects( "checkForNew" ).once();
var envelope = { data: { project: "imatch" } };
var handler = packageResource( {}, config, server );
result = handler.actions.new.handle( envelope );
} );
it( "should call checkForNew", function() {
serverMock.verify();
} );
it( "should result in a status 200", function() {
result.should.eql( { status: 200 } );
} );
} );
describe( "with mis-matched package", function() {
var config, serverMock, result;
before( function() {
config = {
package: {
project: "lol-aint-no-such"
}
};
serverMock = sinon.mock( server );
serverMock.expects( "checkForNew" ).never();
var envelope = { data: { project: "imatch" } };
var handler = packageResource( {}, config, server );
result = handler.actions.new.handle( envelope );
} );
it( "should not call checkForNew", function() {
serverMock.verify();
} );
it( "should result in a status 200", function() {
result.should.eql( { status: 200 } );
} );
} );
} );
} );<|fim▁end|>
|
}
};
serverMock = sinon.mock( server );
|
<|file_name|>FieldMappingEntryTests.java<|end_file_name|><|fim▁begin|>package ar.wildstyle;
import java.lang.reflect.Field;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import ar.wildstyle.test.BaseTest;
import ar.wildstyle.test.ExamplePojo;
import ar.wildstyle.valuegenerator.IntegerValueGenerator;
import ar.wildstyle.valuegenerator.StringValueGenerator;
import ar.wildstyle.valuegenerator.ValueGenerator;
/**
* {@code FieldMappingEntryTests} contains tests for the {@link FieldMappingEntry} class.
*
* @author Adam Rosini
*/
public class FieldMappingEntryTests extends BaseTest {
/**
* Initializes shared test objects.
*/
@Before
public void initialize() throws Exception {
this.field = ExamplePojo.class.getDeclaredField(ExamplePojo.EXAMPLE_PRIVATE_STRING_FIELD_NAME);
this.valueGenerator = new StringValueGenerator();
this.value = "stringValue";
}
/**
* Test for creating a valid {@link FieldMappingEntry} using a {@link ValueGenerator}.
*/
@Test
public void fieldMappingEntryValueGenerator() {
final FieldMappingEntry<String> fieldMappingEntry = new FieldMappingEntry<String>(this.field, this.valueGenerator);
Assert.assertEquals(this.field, fieldMappingEntry.getField());
Assert.assertNotNull(fieldMappingEntry.getOrGenerateValue());
}
/**
* Test for creating a valid {@link FieldMappingEntry} using a value.
*/
@Test
public void fieldMappingEntryValue() {
final FieldMappingEntry<String> fieldMappingEntry = new FieldMappingEntry<String>(this.field, this.value);
Assert.assertEquals(this.field, fieldMappingEntry.getField());
Assert.assertEquals(fieldMappingEntry.getOrGenerateValue(), this.value);
}
/**
* Test for creating a valid {@link FieldMappingEntry} using a {@code null} value.
*/
@Test
public void fieldMappingEntryNullValue() {
this.value = null;
<|fim▁hole|> Assert.assertEquals(this.field, fieldMappingEntry.getField());
Assert.assertNull(fieldMappingEntry.getOrGenerateValue());
}
/**
* Test for attempting to create a {@link FieldMappingEntry} (with a value generator) using a null field parameter.
*/
@Test
public void fieldMappingEntryValueGeneratorNullField() {
this.expectedException.expect(AssertionError.class);
this.field = null;
new FieldMappingEntry<String>(this.field, this.valueGenerator);
}
/**
* Test for attempting to create a {@link FieldMappingEntry} (with a value) using a null field parameter.
*/
@Test
public void fieldMappingEntryValueNullField() {
this.expectedException.expect(AssertionError.class);
this.field = null;
new FieldMappingEntry<String>(this.field, this.value);
}
/**
* Test for attempting to create a {@link FieldMappingEntry} using a null value generator parameter.
*/
@Test
public void fieldMappingEntryNullValueGenerator() {
this.expectedException.expect(AssertionError.class);
this.valueGenerator = null;
new FieldMappingEntry<String>(this.field, this.valueGenerator);
}
/**
* Test for attempting to create a {@link FieldMappingEntry} where the value generator parameter is not compatible with the field
* parameter.
*/
@Test
public void fieldMappingEntryIncompatibleFieldAndValueGenerator() {
this.expectedException.expect(AssertionError.class);
new FieldMappingEntry<Integer>(this.field, new IntegerValueGenerator());
}
/**
* Test for attempting to create a {@link FieldMappingEntry} where the value parameter is not compatible with the field parameter.
*/
@Test
public void fieldMappingEntryIncompatibleFieldAndValue() {
this.expectedException.expect(AssertionError.class);
new FieldMappingEntry<Integer>(this.field, 1);
}
/**
* An example field to use when creating a {@link FieldMappingEntry}.
*/
private Field field;
/**
* An example value generator to use when creating a {@link FieldMappingEntry}.
*/
private ValueGenerator<String> valueGenerator;
/**
* An example value to use when creating a {@link FieldMappingEntry}.
*/
private String value;
}<|fim▁end|>
|
final FieldMappingEntry<String> fieldMappingEntry = new FieldMappingEntry<String>(this.field, this.value);
|
<|file_name|>instr_vpsubw.rs<|end_file_name|><|fim▁begin|>use ::{BroadcastMode, Instruction, MaskReg, MergeMode, Mnemonic, OperandSize, Reg, RoundingMode};
use ::RegType::*;
use ::instruction_def::*;
use ::Operand::*;
use ::Reg::*;
use ::RegScale::*;
use ::test::run_test;
#[test]
fn vpsubw_1() {
run_test(&Instruction { mnemonic: Mnemonic::VPSUBW, operand1: Some(Direct(XMM5)), operand2: Some(Direct(XMM4)), operand3: Some(Direct(XMM2)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[197, 217, 249, 234], OperandSize::Dword)
}
#[test]
fn vpsubw_2() {
run_test(&Instruction { mnemonic: Mnemonic::VPSUBW, operand1: Some(Direct(XMM4)), operand2: Some(Direct(XMM6)), operand3: Some(IndirectScaledIndexedDisplaced(EBX, ESI, Two, 1265925700, Some(OperandSize::Xmmword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[197, 201, 249, 164, 115, 68, 126, 116, 75], OperandSize::Dword)
}
#[test]
fn vpsubw_3() {
run_test(&Instruction { mnemonic: Mnemonic::VPSUBW, operand1: Some(Direct(XMM4)), operand2: Some(Direct(XMM3)), operand3: Some(Direct(XMM4)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[197, 225, 249, 228], OperandSize::Qword)
}
#[test]
fn vpsubw_4() {
run_test(&Instruction { mnemonic: Mnemonic::VPSUBW, operand1: Some(Direct(XMM4)), operand2: Some(Direct(XMM0)), operand3: Some(Indirect(RSI, Some(OperandSize::Xmmword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[197, 249, 249, 38], OperandSize::Qword)
}
#[test]
fn vpsubw_5() {<|fim▁hole|>#[test]
fn vpsubw_6() {
run_test(&Instruction { mnemonic: Mnemonic::VPSUBW, operand1: Some(Direct(YMM4)), operand2: Some(Direct(YMM5)), operand3: Some(IndirectScaledIndexed(EBX, ESI, Four, Some(OperandSize::Ymmword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[197, 213, 249, 36, 179], OperandSize::Dword)
}
#[test]
fn vpsubw_7() {
run_test(&Instruction { mnemonic: Mnemonic::VPSUBW, operand1: Some(Direct(YMM3)), operand2: Some(Direct(YMM4)), operand3: Some(Direct(YMM7)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[197, 221, 249, 223], OperandSize::Qword)
}
#[test]
fn vpsubw_8() {
run_test(&Instruction { mnemonic: Mnemonic::VPSUBW, operand1: Some(Direct(YMM2)), operand2: Some(Direct(YMM7)), operand3: Some(IndirectDisplaced(RDX, 4071821, Some(OperandSize::Ymmword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[197, 197, 249, 146, 141, 33, 62, 0], OperandSize::Qword)
}
#[test]
fn vpsubw_9() {
run_test(&Instruction { mnemonic: Mnemonic::VPSUBW, operand1: Some(Direct(XMM1)), operand2: Some(Direct(XMM4)), operand3: Some(Direct(XMM0)), operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K2), broadcast: None }, &[98, 241, 93, 138, 249, 200], OperandSize::Dword)
}
#[test]
fn vpsubw_10() {
run_test(&Instruction { mnemonic: Mnemonic::VPSUBW, operand1: Some(Direct(XMM2)), operand2: Some(Direct(XMM2)), operand3: Some(Indirect(EBX, Some(OperandSize::Xmmword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K2), broadcast: None }, &[98, 241, 109, 138, 249, 19], OperandSize::Dword)
}
#[test]
fn vpsubw_11() {
run_test(&Instruction { mnemonic: Mnemonic::VPSUBW, operand1: Some(Direct(XMM12)), operand2: Some(Direct(XMM27)), operand3: Some(Direct(XMM20)), operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K2), broadcast: None }, &[98, 49, 37, 130, 249, 228], OperandSize::Qword)
}
#[test]
fn vpsubw_12() {
run_test(&Instruction { mnemonic: Mnemonic::VPSUBW, operand1: Some(Direct(XMM14)), operand2: Some(Direct(XMM4)), operand3: Some(IndirectScaledIndexed(RDI, RBX, Two, Some(OperandSize::Xmmword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K6), broadcast: None }, &[98, 113, 93, 142, 249, 52, 95], OperandSize::Qword)
}
#[test]
fn vpsubw_13() {
run_test(&Instruction { mnemonic: Mnemonic::VPSUBW, operand1: Some(Direct(YMM1)), operand2: Some(Direct(YMM0)), operand3: Some(Direct(YMM5)), operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K1), broadcast: None }, &[98, 241, 125, 169, 249, 205], OperandSize::Dword)
}
#[test]
fn vpsubw_14() {
run_test(&Instruction { mnemonic: Mnemonic::VPSUBW, operand1: Some(Direct(YMM1)), operand2: Some(Direct(YMM5)), operand3: Some(IndirectScaledIndexedDisplaced(EDI, EDX, Eight, 1344465292, Some(OperandSize::Ymmword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K2), broadcast: None }, &[98, 241, 85, 170, 249, 140, 215, 140, 233, 34, 80], OperandSize::Dword)
}
#[test]
fn vpsubw_15() {
run_test(&Instruction { mnemonic: Mnemonic::VPSUBW, operand1: Some(Direct(YMM18)), operand2: Some(Direct(YMM18)), operand3: Some(Direct(YMM29)), operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K7), broadcast: None }, &[98, 129, 109, 167, 249, 213], OperandSize::Qword)
}
#[test]
fn vpsubw_16() {
run_test(&Instruction { mnemonic: Mnemonic::VPSUBW, operand1: Some(Direct(YMM15)), operand2: Some(Direct(YMM23)), operand3: Some(IndirectScaledIndexed(RSI, RBX, Four, Some(OperandSize::Ymmword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K5), broadcast: None }, &[98, 113, 69, 165, 249, 60, 158], OperandSize::Qword)
}
#[test]
fn vpsubw_17() {
run_test(&Instruction { mnemonic: Mnemonic::VPSUBW, operand1: Some(Direct(ZMM2)), operand2: Some(Direct(ZMM3)), operand3: Some(Direct(ZMM6)), operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K6), broadcast: None }, &[98, 241, 101, 206, 249, 214], OperandSize::Dword)
}
#[test]
fn vpsubw_18() {
run_test(&Instruction { mnemonic: Mnemonic::VPSUBW, operand1: Some(Direct(ZMM1)), operand2: Some(Direct(ZMM6)), operand3: Some(IndirectScaledDisplaced(EDI, Eight, 696675686, Some(OperandSize::Zmmword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K7), broadcast: None }, &[98, 241, 77, 207, 249, 12, 253, 102, 109, 134, 41], OperandSize::Dword)
}
#[test]
fn vpsubw_19() {
run_test(&Instruction { mnemonic: Mnemonic::VPSUBW, operand1: Some(Direct(ZMM9)), operand2: Some(Direct(ZMM26)), operand3: Some(Direct(ZMM3)), operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K1), broadcast: None }, &[98, 113, 45, 193, 249, 203], OperandSize::Qword)
}
#[test]
fn vpsubw_20() {
run_test(&Instruction { mnemonic: Mnemonic::VPSUBW, operand1: Some(Direct(ZMM2)), operand2: Some(Direct(ZMM19)), operand3: Some(IndirectScaledIndexed(RCX, RSI, Two, Some(OperandSize::Zmmword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K7), broadcast: None }, &[98, 241, 101, 199, 249, 20, 113], OperandSize::Qword)
}<|fim▁end|>
|
run_test(&Instruction { mnemonic: Mnemonic::VPSUBW, operand1: Some(Direct(YMM7)), operand2: Some(Direct(YMM0)), operand3: Some(Direct(YMM1)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[197, 253, 249, 249], OperandSize::Dword)
}
|
<|file_name|>extract.go<|end_file_name|><|fim▁begin|>package source
import (
log "github.com/Sirupsen/logrus"
"github.com/howardplus/lirest/describe"
"github.com/howardplus/lirest/util"
"os"
"strconv"
"strings"
"time"
)
// Extractor returns a generic data based
// on the converter.
// An object that implements the Extractor interface needs
// to know where to get the data, which then feeds to the
// converter.
type Extractor interface {
Extract() (*ExtractOutput, error)
}
// ExtractOutput is the output of the extracted data
// with json tags
type ExtractOutput struct {
Name string `json:"name"`
Time time.Time `json:"time"`
Data interface{} `json:"data"`
}
// NewExtractor create a new extractor based on the description
func NewExtractor(s describe.DescriptionSource, rd describe.DescriptionReadFormat, c Converter, vars map[string]string) (Extractor, error) {
var extractor Extractor
refresh := time.Duration(0)
switch s.Refresh {
case "never":
// never say never, 10 day is long enough
refresh = 240 * time.Hour
default:
// something s/m/h
v, err := strconv.Atoi(s.Refresh[:len(s.Refresh)-1])
if err == nil {
if strings.HasSuffix(s.Refresh, "s") {
refresh = time.Duration(v) * time.Second<|fim▁hole|> refresh = time.Duration(v) * time.Minute
} else if strings.HasSuffix(s.Refresh, "h") {
refresh = time.Duration(v) * time.Hour
}
}
case "":
// Did not specify, which implies always refresh
}
switch s.Type {
case "procfs", "sysfs", "sysctl":
extractor = NewGenericExtractor(rd.Path, refresh, c, vars)
case "command":
extractor = NewCommandExtractor(rd.Command, c, vars)
}
// found an extractor, use it
if extractor != nil {
return extractor, nil
}
// return error on default
return nil, util.NewError("Internal error: unknown input type")
}
// GenericExtractor extract data from reading from a file
// use this until it's not enough
type GenericExtractor struct {
path string
conv Converter
refresh time.Duration
vars map[string]string
}
// NewGenericExtractor creates a GenericExtractor
func NewGenericExtractor(path string, refresh time.Duration, conv Converter, vars map[string]string) *GenericExtractor {
return &GenericExtractor{path: path, refresh: refresh, conv: conv, vars: vars}
}
func (e *GenericExtractor) Extract() (*ExtractOutput, error) {
log.WithFields(log.Fields{
"path": e.path,
"vars": e.vars,
}).Debug("Extract from file system")
// create path from variables
path, err := util.FillVars(e.path, e.vars)
if err != nil {
return nil, util.NewError("Failed to generate path")
}
// ask data from cache
var hash string
if e.refresh != time.Duration(0) {
hash = CacheHash("command" + path)
if data, time, err := Cache(hash); err == nil {
log.WithFields(log.Fields{
"hash": hash,
"path": e.path,
}).Debug("Serve from cache")
return &ExtractOutput{
Name: e.conv.Name(),
Time: time,
Data: data,
}, nil
}
}
// open file from path
f, err := os.Open(path)
if err != nil {
return nil, util.NewError("Failed to open system path")
}
defer f.Close()
// TODO: verify the rw format on this path
// give it to the converter
data, err := e.conv.ConvertStream(f)
if err != nil {
return nil, err
}
// send to cache
if e.refresh != time.Duration(0) {
if err := SendCache(hash, data, e.refresh); err != nil {
// cache error, non-fatal
log.WithFields(log.Fields{
"path": e.path,
}).Debug("Failed to send cache")
}
}
log.WithFields(log.Fields{
"path": e.path,
}).Debug("Convert successful")
return &ExtractOutput{
Name: e.conv.Name(),
Time: time.Now(),
Data: data,
}, nil
}<|fim▁end|>
|
} else if strings.HasSuffix(s.Refresh, "m") {
|
<|file_name|>SSDAC1.C<|end_file_name|><|fim▁begin|>/*
Copyright (c) 2015, Digi International Inc.
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
/***************************************************************************
ssdac1.c
This sample program is used with Smart Star products, specifically
for 0-10V and +/-10V DAC boards.
!!!Caution this will overwrite the calibration constants set at the
factory.
This program demonstrates how to recalibrate an DAC channel using
two known voltages and defines the two coefficients, gain and offset,
which will be rewritten into the DAC's EEPROM.
Instructions:
Connect a voltage meter to an output channel.
Compile and run this program.
Follow the prompted directions of this program during execution.
***************************************************************************/
#class auto
#define LOCOUNT 400 //gives highest voltage
#define HICOUNT 3695 //gives lowest voltage
void main()
{
auto int slotnum, outputnum, msgcode;
static float voltout, volt1, volt2;
auto char tmpbuf[24];
brdInit();
printf("Please enter DAC board slot position, 0 thru 6....");
do
{
slotnum = getchar();
} while ((slotnum < '0') || (slotnum > '6'));
printf("Slot %d chosen.\n", slotnum-=0x30);
///// configure all outputs to zero volts and enable output
for (outputnum=0; outputnum<=7; outputnum++)
{
if (msgcode = anaOutEERd(ChanAddr(slotnum, outputnum)))
{
printf("Error %d: eeprom unreadable or empty slot; channel %d\n", msgcode,outputnum);
exit(0);
}
else
anaOutVolts(ChanAddr(slotnum, outputnum), 0.0);
}
anaOutEnable();
printf("Please enter an output channel, 0 thru 7....");
do
{
outputnum = getchar();
} while (!((outputnum >= '0') && (outputnum <= '7')));
printf("channel %d chosen.\n", outputnum-=0x30);
/////get voltages from two known raw data
anaOut(ChanAddr(slotnum, outputnum), HICOUNT);
printf("Type first voltage reading (in Volts) from meter and press Enter ");
volt1 = atof(gets(tmpbuf));
anaOut(ChanAddr(slotnum, outputnum), LOCOUNT);
printf("Type second voltage reading (in Volts) from meter and press Enter ");
volt2 = atof(gets(tmpbuf));
if (anaOutCalib(ChanAddr(slotnum, outputnum), HICOUNT, volt1, LOCOUNT, volt2))
printf("Cannot make coefficients\n");
else
{
/////store coefficients into eeprom
while (anaOutEEWr(ChanAddr(slotnum, outputnum)));
printf("Wrote coefficients to eeprom\n");
printf("Read coefficients from eeprom\n");
if (msgcode = anaOutEERd(ChanAddr(slotnum, outputnum)))
{
printf("Error %d: eeprom unreadable or empty slot; channel %d\n", msgcode,outputnum);
<|fim▁hole|>
while (1)
{
printf("\nType a desired voltage (in Volts) ");
voltout = atof(gets(tmpbuf));
printf("Observe voltage on meter.....\n");
anaOutVolts(ChanAddr(slotnum, outputnum), voltout);
}
}
}<|fim▁end|>
|
exit(0);
}
|
<|file_name|>gui_editor.py<|end_file_name|><|fim▁begin|>import math
from pathlib import Path
from tkinter import W, N, E, StringVar, PhotoImage
from tkinter.ttk import Button, Label, LabelFrame
from overrides import overrides
from pyminutiaeviewer.gui_common import NotebookTabBase
from pyminutiaeviewer.minutia import Minutia, MinutiaType
class MinutiaeEditorFrame(NotebookTabBase):
# TODO: I'd like to remove the <minutiae> parameter
def __init__(self, parent, load_fingerprint_func, load_minutiae_func, save_minutiae_file):
super(self.__class__, self).__init__(parent, load_fingerprint_func)
self.root = parent
self.minutiae_count = StringVar()
self._update_minutiae_count()
self.current_minutiae = None
self.load_minutiae_btn = Button(self, text="Load Minutiae", command=load_minutiae_func)
self.load_minutiae_btn.grid(row=1, column=0, sticky=N + W + E)
self.export_minutiae_btn = Button(self, text="Export Minutiae", command=save_minutiae_file)
self.export_minutiae_btn.grid(row=2, column=0, sticky=N + W + E)
self.info_frame = InfoFrame(self, "Info", self.minutiae_count)
self.info_frame.grid(row=3, column=0, padx=4, sticky=N + W + E)
@overrides
def load_fingerprint_image(self, image):
self._update_minutiae_count()
@overrides
def load_minutiae_file(self):
self._update_minutiae_count()
def _update_minutiae_count(self):
self.minutiae_count.set("Minutiae: {}".format(self.root.number_of_minutiae()))
@overrides
def on_canvas_mouse_left_click(self, event):
"""
Adds a new bifurcation at the mouse click.
"""
x, y = event.x, event.y
if not self.root.is_point_in_canvas_image(x, y):
return
self.current_minutiae = ((x, y), MinutiaType.RIDGE_ENDING)
@overrides
def on_canvas_ctrl_mouse_left_click(self, event):
"""
Adds a new ridge ending at the mouse click.
"""
x, y = event.x, event.y
if not self.root.is_point_in_canvas_image(x, y):
return
self.current_minutiae = ((x, y), MinutiaType.BIFURCATION)
@overrides
def on_canvas_mouse_right_click(self, event):
"""
Removes a minutiae close to the mouse click.
"""
x, y = event.x, event.y
if not self.root.is_point_in_canvas_image(x, y):
return
scale_factor = self.root.canvas_image_scale_factor()
x, y = x * scale_factor, y * scale_factor
possible_minutiae = []
for i in range(self.root.number_of_minutiae()):
m = self.root.minutiae[i]
dist = abs(m.x - x) + abs(m.y - y)
if dist < 10:
possible_minutiae.append((dist, i))
# Sort ascending, in-place.
possible_minutiae.sort(key=lambda tup: tup[0])
if len(possible_minutiae) == 0:
return
else:
del self.root.minutiae[possible_minutiae[0][1]]
self.root.draw_minutiae()
self._update_minutiae_count()
@overrides
def on_canvas_mouse_left_drag(self, event):
"""
Sets the angle of the minutiae being placed.
"""
x, y = event.x, event.y
((sx, sy), minutiae_type) = self.current_minutiae
angle = math.degrees(math.atan2(y - sy, x - sx)) + 90
minutia = Minutia(round(sx), round(sy), angle, minutiae_type, 1.0)
self.root.draw_single_minutia(minutia)
@overrides
def on_canvas_mouse_left_release(self, event):
"""
Places the minutiae currently being edited..
"""
x, y = event.x, event.y
scale_factor = self.root.canvas_image_scale_factor()
((px, py), minutiae_type) = self.current_minutiae
angle = math.degrees(math.atan2(y - py, x - px)) + 90
self.root.minutiae.append(Minutia(round(px * scale_factor), round(py * scale_factor), angle, minutiae_type, 1.0))
self.current_minutiae = None
self.root.draw_minutiae()
self._update_minutiae_count()
class InfoFrame(LabelFrame):
def __init__(self, parent, title, minutiae_count):
super(self.__class__, self).__init__(parent, text=title)
self.current_number_minutiae_label = Label(self, textvariable=minutiae_count)
self.current_number_minutiae_label.grid(row=0, column=0, sticky=N + W + E)
self.bifurcation_label = Label(self, text="Bifurcation (LMB):")
self.bifurcation_label.grid(row=1, column=0, sticky=W)
self.bifurcation_image = PhotoImage(file=Path(__file__).resolve().parent / 'images' / 'bifurcation.png')
self.bifurcation_image_label = Label(self, image=self.bifurcation_image)
self.bifurcation_image_label.grid(row=2, column=0, sticky=W)
self.ridge_ending_label = Label(self, text="Ridge Ending (CTRL + LMB):")
self.ridge_ending_label.grid(row=3, column=0, sticky=W)
self.ridge_ending_image = PhotoImage(file=Path(__file__).resolve().parent / 'images' / 'ridge_ending.png')<|fim▁hole|><|fim▁end|>
|
self.ridge_ending_image_label = Label(self, image=self.ridge_ending_image)
self.ridge_ending_image_label.grid(row=4, column=0, sticky=W)
|
<|file_name|>test_debug.py<|end_file_name|><|fim▁begin|>import importlib
import inspect
import os
import re
import sys
import tempfile
from io import StringIO
from pathlib import Path
from django.conf.urls import url
from django.core import mail
from django.core.files.uploadedfile import SimpleUploadedFile
from django.db import DatabaseError, connection
from django.shortcuts import render
from django.template import TemplateDoesNotExist
from django.test import RequestFactory, SimpleTestCase, override_settings
from django.test.utils import LoggingCaptureMixin, patch_logger
from django.urls import reverse
from django.utils.encoding import force_bytes
from django.utils.functional import SimpleLazyObject
from django.views.debug import (
CLEANSED_SUBSTITUTE, CallableSettingWrapper, ExceptionReporter,
cleanse_setting, technical_500_response,
)
from ..views import (
custom_exception_reporter_filter_view, index_page,
multivalue_dict_key_error, non_sensitive_view, paranoid_view,
sensitive_args_function_caller, sensitive_kwargs_function_caller,
sensitive_method_view, sensitive_view,
)
PY36 = sys.version_info >= (3, 6)
class User:
def __str__(self):
return 'jacob'
class WithoutEmptyPathUrls:
urlpatterns = [url(r'url/$', index_page, name='url')]
class CallableSettingWrapperTests(SimpleTestCase):
""" Unittests for CallableSettingWrapper
"""
def test_repr(self):
class WrappedCallable:
def __repr__(self):
return "repr from the wrapped callable"
def __call__(self):
pass
actual = repr(CallableSettingWrapper(WrappedCallable()))
self.assertEqual(actual, "repr from the wrapped callable")
@override_settings(DEBUG=True, ROOT_URLCONF='view_tests.urls')
class DebugViewTests(LoggingCaptureMixin, SimpleTestCase):
def test_files(self):
response = self.client.get('/raises/')
self.assertEqual(response.status_code, 500)
data = {
'file_data.txt': SimpleUploadedFile('file_data.txt', b'haha'),
}
response = self.client.post('/raises/', data)
self.assertContains(response, 'file_data.txt', status_code=500)
self.assertNotContains(response, 'haha', status_code=500)
def test_400(self):
# When DEBUG=True, technical_500_template() is called.
response = self.client.get('/raises400/')
self.assertContains(response, '<div class="context" id="', status_code=400)
# Ensure no 403.html template exists to test the default case.
@override_settings(TEMPLATES=[{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
}])
def test_403(self):
response = self.client.get('/raises403/')
self.assertContains(response, '<h1>403 Forbidden</h1>', status_code=403)
# Set up a test 403.html template.
@override_settings(TEMPLATES=[{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'OPTIONS': {
'loaders': [
('django.template.loaders.locmem.Loader', {
'403.html': 'This is a test template for a 403 error ({{ exception }}).',
}),
],
},
}])
def test_403_template(self):
response = self.client.get('/raises403/')
self.assertContains(response, 'test template', status_code=403)
self.assertContains(response, '(Insufficient Permissions).', status_code=403)
def test_404(self):
response = self.client.get('/raises404/')
self.assertEqual(response.status_code, 404)
def test_raised_404(self):
response = self.client.get('/views/raises404/')
self.assertContains(response, "<code>not-in-urls</code>, didn't match", status_code=404)
def test_404_not_in_urls(self):
response = self.client.get('/not-in-urls')
self.assertNotContains(response, "Raised by:", status_code=404)
self.assertContains(response, "<code>not-in-urls</code>, didn't match", status_code=404)
@override_settings(ROOT_URLCONF=WithoutEmptyPathUrls)
def test_404_empty_path_not_in_urls(self):
response = self.client.get('/')
self.assertContains(response, "The empty path didn't match any of these.", status_code=404)
def test_technical_404(self):
response = self.client.get('/views/technical404/')
self.assertContains(response, "Raised by:", status_code=404)
self.assertContains(response, "view_tests.views.technical404", status_code=404)
def test_classbased_technical_404(self):
response = self.client.get('/views/classbased404/')
self.assertContains(response, "Raised by:", status_code=404)
self.assertContains(response, "view_tests.views.Http404View", status_code=404)
def test_non_l10ned_numeric_ids(self):
"""
Numeric IDs and fancy traceback context blocks line numbers shouldn't be localized.
"""
with self.settings(DEBUG=True, USE_L10N=True):
response = self.client.get('/raises500/')
# We look for a HTML fragment of the form
# '<div class="context" id="c38123208">', not '<div class="context" id="c38,123,208"'
self.assertContains(response, '<div class="context" id="', status_code=500)
match = re.search(b'<div class="context" id="(?P<id>[^"]+)">', response.content)
self.assertIsNotNone(match)
id_repr = match.group('id')
self.assertFalse(
re.search(b'[^c0-9]', id_repr),
"Numeric IDs in debug response HTML page shouldn't be localized (value: %s)." % id_repr.decode()
)
def test_template_exceptions(self):
try:
self.client.get(reverse('template_exception'))
except Exception:
raising_loc = inspect.trace()[-1][-2][0].strip()
self.assertNotEqual(
raising_loc.find("raise Exception('boom')"), -1,
"Failed to find 'raise Exception' in last frame of "
"traceback, instead found: %s" % raising_loc
)
def test_template_loader_postmortem(self):
"""Tests for not existing file"""
template_name = "notfound.html"
with tempfile.NamedTemporaryFile(prefix=template_name) as tmpfile:
tempdir = os.path.dirname(tmpfile.name)
template_path = os.path.join(tempdir, template_name)
with override_settings(TEMPLATES=[{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [tempdir],
}]):
response = self.client.get(reverse('raises_template_does_not_exist', kwargs={"path": template_name}))
self.assertContains(response, "%s (Source does not exist)" % template_path, status_code=500, count=2)
# Assert as HTML.
self.assertContains(
response,
'<li><code>django.template.loaders.filesystem.Loader</code>: '
'%s (Source does not exist)</li>' % os.path.join(tempdir, 'notfound.html'),
status_code=500,
html=True,
)
def test_no_template_source_loaders(self):
"""
Make sure if you don't specify a template, the debug view doesn't blow up.
"""
with self.assertRaises(TemplateDoesNotExist):
self.client.get('/render_no_template/')
@override_settings(ROOT_URLCONF='view_tests.default_urls')
def test_default_urlconf_template(self):
"""
Make sure that the default URLconf template is shown shown instead
of the technical 404 page, if the user has not altered their
URLconf yet.
"""
response = self.client.get('/')
self.assertContains(
response,
"<h2>Congratulations on your first Django-powered page.</h2>"
)
@override_settings(ROOT_URLCONF='view_tests.regression_21530_urls')
def test_regression_21530(self):
"""
Regression test for bug #21530.
If the admin app include is replaced with exactly one url
pattern, then the technical 404 template should be displayed.
The bug here was that an AttributeError caused a 500 response.
"""
response = self.client.get('/')
self.assertContains(
response,
"Page not found <span>(404)</span>",
status_code=404
)
class DebugViewQueriesAllowedTests(SimpleTestCase):
# May need a query to initialize MySQL connection
allow_database_queries = True
def test_handle_db_exception(self):
"""
Ensure the debug view works when a database exception is raised by
performing an invalid query and passing the exception to the debug view.
"""
with connection.cursor() as cursor:
try:
cursor.execute('INVALID SQL')
except DatabaseError:
exc_info = sys.exc_info()
rf = RequestFactory()
response = technical_500_response(rf.get('/'), *exc_info)
self.assertContains(response, 'OperationalError at /', status_code=500)
@override_settings(
DEBUG=True,
ROOT_URLCONF='view_tests.urls',
# No template directories are configured, so no templates will be found.
TEMPLATES=[{
'BACKEND': 'django.template.backends.dummy.TemplateStrings',
}],
)
class NonDjangoTemplatesDebugViewTests(SimpleTestCase):
def test_400(self):
# When DEBUG=True, technical_500_template() is called.
with patch_logger('django.security.SuspiciousOperation', 'error'):
response = self.client.get('/raises400/')
self.assertContains(response, '<div class="context" id="', status_code=400)
def test_403(self):
response = self.client.get('/raises403/')
self.assertContains(response, '<h1>403 Forbidden</h1>', status_code=403)
def test_404(self):
response = self.client.get('/raises404/')
self.assertEqual(response.status_code, 404)
def test_template_not_found_error(self):
# Raises a TemplateDoesNotExist exception and shows the debug view.
url = reverse('raises_template_does_not_exist', kwargs={"path": "notfound.html"})
response = self.client.get(url)
self.assertContains(response, '<div class="context" id="', status_code=500)
class ExceptionReporterTests(SimpleTestCase):
rf = RequestFactory()
def test_request_and_exception(self):
"A simple exception report can be generated"
try:
request = self.rf.get('/test_view/')
request.user = User()
raise ValueError("Can't find my keys")
except ValueError:
exc_type, exc_value, tb = sys.exc_info()
reporter = ExceptionReporter(request, exc_type, exc_value, tb)
html = reporter.get_traceback_html()
self.assertInHTML('<h1>ValueError at /test_view/</h1>', html)
self.assertIn('<pre class="exception_value">Can't find my keys</pre>', html)
self.assertIn('<th>Request Method:</th>', html)
self.assertIn('<th>Request URL:</th>', html)
self.assertIn('<h3 id="user-info">USER</h3>', html)
self.assertIn('<p>jacob</p>', html)
self.assertIn('<th>Exception Type:</th>', html)
self.assertIn('<th>Exception Value:</th>', html)
self.assertIn('<h2>Traceback ', html)
self.assertIn('<h2>Request information</h2>', html)
self.assertNotIn('<p>Request data not supplied</p>', html)
self.assertIn('<p>No POST data</p>', html)
def test_no_request(self):
"An exception report can be generated without request"
try:
raise ValueError("Can't find my keys")
except ValueError:
exc_type, exc_value, tb = sys.exc_info()
reporter = ExceptionReporter(None, exc_type, exc_value, tb)
html = reporter.get_traceback_html()
self.assertInHTML('<h1>ValueError</h1>', html)
self.assertIn('<pre class="exception_value">Can't find my keys</pre>', html)
self.assertNotIn('<th>Request Method:</th>', html)
self.assertNotIn('<th>Request URL:</th>', html)
self.assertNotIn('<h3 id="user-info">USER</h3>', html)
self.assertIn('<th>Exception Type:</th>', html)
self.assertIn('<th>Exception Value:</th>', html)
self.assertIn('<h2>Traceback ', html)
self.assertIn('<h2>Request information</h2>', html)
self.assertIn('<p>Request data not supplied</p>', html)
def test_eol_support(self):
"""The ExceptionReporter supports Unix, Windows and Macintosh EOL markers"""
LINES = ['print %d' % i for i in range(1, 6)]
reporter = ExceptionReporter(None, None, None, None)
for newline in ['\n', '\r\n', '\r']:
fd, filename = tempfile.mkstemp(text=False)
os.write(fd, force_bytes(newline.join(LINES) + newline))
os.close(fd)
try:
self.assertEqual(
reporter._get_lines_from_file(filename, 3, 2),
(1, LINES[1:3], LINES[3], LINES[4:])
)
finally:
os.unlink(filename)
def test_no_exception(self):
"An exception report can be generated for just a request"
request = self.rf.get('/test_view/')
reporter = ExceptionReporter(request, None, None, None)
html = reporter.get_traceback_html()
self.assertInHTML('<h1>Report at /test_view/</h1>', html)
self.assertIn('<pre class="exception_value">No exception message supplied</pre>', html)
self.assertIn('<th>Request Method:</th>', html)
self.assertIn('<th>Request URL:</th>', html)
self.assertNotIn('<th>Exception Type:</th>', html)
self.assertNotIn('<th>Exception Value:</th>', html)
self.assertNotIn('<h2>Traceback ', html)
self.assertIn('<h2>Request information</h2>', html)
self.assertNotIn('<p>Request data not supplied</p>', html)
def test_reporting_of_nested_exceptions(self):
request = self.rf.get('/test_view/')
try:
try:
raise AttributeError('Top level')
except AttributeError as explicit:
try:
raise ValueError('Second exception') from explicit
except ValueError:
raise IndexError('Final exception')
except Exception:
# Custom exception handler, just pass it into ExceptionReporter
exc_type, exc_value, tb = sys.exc_info()
explicit_exc = 'The above exception ({0}) was the direct cause of the following exception:'
implicit_exc = 'During handling of the above exception ({0}), another exception occurred:'
reporter = ExceptionReporter(request, exc_type, exc_value, tb)
html = reporter.get_traceback_html()
# Both messages are twice on page -- one rendered as html,
# one as plain text (for pastebin)
self.assertEqual(2, html.count(explicit_exc.format("Top level")))
self.assertEqual(2, html.count(implicit_exc.format("Second exception")))
text = reporter.get_traceback_text()
self.assertIn(explicit_exc.format("Top level"), text)
self.assertIn(implicit_exc.format("Second exception"), text)
def test_request_and_message(self):
"A message can be provided in addition to a request"
request = self.rf.get('/test_view/')
reporter = ExceptionReporter(request, None, "I'm a little teapot", None)
html = reporter.get_traceback_html()
self.assertInHTML('<h1>Report at /test_view/</h1>', html)
self.assertIn('<pre class="exception_value">I'm a little teapot</pre>', html)
self.assertIn('<th>Request Method:</th>', html)
self.assertIn('<th>Request URL:</th>', html)
self.assertNotIn('<th>Exception Type:</th>', html)
self.assertNotIn('<th>Exception Value:</th>', html)
self.assertNotIn('<h2>Traceback ', html)
self.assertIn('<h2>Request information</h2>', html)
self.assertNotIn('<p>Request data not supplied</p>', html)
def test_message_only(self):
reporter = ExceptionReporter(None, None, "I'm a little teapot", None)
html = reporter.get_traceback_html()
self.assertInHTML('<h1>Report</h1>', html)
self.assertIn('<pre class="exception_value">I'm a little teapot</pre>', html)
self.assertNotIn('<th>Request Method:</th>', html)
self.assertNotIn('<th>Request URL:</th>', html)
self.assertNotIn('<th>Exception Type:</th>', html)
self.assertNotIn('<th>Exception Value:</th>', html)
self.assertNotIn('<h2>Traceback ', html)
self.assertIn('<h2>Request information</h2>', html)
self.assertIn('<p>Request data not supplied</p>', html)
def test_non_utf8_values_handling(self):
"Non-UTF-8 exceptions/values should not make the output generation choke."
try:
class NonUtf8Output(Exception):
def __repr__(self):
return b'EXC\xe9EXC'
somevar = b'VAL\xe9VAL' # NOQA
raise NonUtf8Output()
except Exception:
exc_type, exc_value, tb = sys.exc_info()
reporter = ExceptionReporter(None, exc_type, exc_value, tb)
html = reporter.get_traceback_html()
self.assertIn('VAL\\xe9VAL', html)
self.assertIn('EXC\\xe9EXC', html)
def test_unprintable_values_handling(self):
"Unprintable values should not make the output generation choke."
try:
class OomOutput:
def __repr__(self):
raise MemoryError('OOM')
oomvalue = OomOutput() # NOQA
raise ValueError()
except Exception:
exc_type, exc_value, tb = sys.exc_info()
reporter = ExceptionReporter(None, exc_type, exc_value, tb)
html = reporter.get_traceback_html()
self.assertIn('<td class="code"><pre>Error in formatting', html)
def test_too_large_values_handling(self):
"Large values should not create a large HTML."
large = 256 * 1024
repr_of_str_adds = len(repr(''))
try:
class LargeOutput:
def __repr__(self):
return repr('A' * large)
largevalue = LargeOutput() # NOQA
raise ValueError()
except Exception:
exc_type, exc_value, tb = sys.exc_info()
reporter = ExceptionReporter(None, exc_type, exc_value, tb)
html = reporter.get_traceback_html()
self.assertEqual(len(html) // 1024 // 128, 0) # still fit in 128Kb
self.assertIn('<trimmed %d bytes string>' % (large + repr_of_str_adds,), html)
def test_unfrozen_importlib(self):
"""
importlib is not a frozen app, but its loader thinks it's frozen which
results in an ImportError. Refs #21443.
"""
try:
request = self.rf.get('/test_view/')
importlib.import_module('abc.def.invalid.name')
except Exception:
exc_type, exc_value, tb = sys.exc_info()
reporter = ExceptionReporter(request, exc_type, exc_value, tb)
html = reporter.get_traceback_html()
self.assertInHTML('<h1>%sError at /test_view/</h1>' % ('ModuleNotFound' if PY36 else 'Import'), html)
def test_ignore_traceback_evaluation_exceptions(self):
"""
Don't trip over exceptions generated by crafted objects when
evaluating them while cleansing (#24455).
"""
class BrokenEvaluation(Exception):
pass
def broken_setup():
raise BrokenEvaluation
request = self.rf.get('/test_view/')
broken_lazy = SimpleLazyObject(broken_setup)
try:
bool(broken_lazy)
except BrokenEvaluation:
exc_type, exc_value, tb = sys.exc_info()
self.assertIn(
"BrokenEvaluation",
ExceptionReporter(request, exc_type, exc_value, tb).get_traceback_html(),
"Evaluation exception reason not mentioned in traceback"
)
@override_settings(ALLOWED_HOSTS='example.com')
def test_disallowed_host(self):
"An exception report can be generated even for a disallowed host."
request = self.rf.get('/', HTTP_HOST='evil.com')
reporter = ExceptionReporter(request, None, None, None)
html = reporter.get_traceback_html()
self.assertIn("http://evil.com/", html)
def test_request_with_items_key(self):
"""
An exception report can be generated for requests with 'items' in
request GET, POST, FILES, or COOKIES QueryDicts.
"""
value = '<td>items</td><td class="code"><pre>'Oops'</pre></td>'
# GET
request = self.rf.get('/test_view/?items=Oops')
reporter = ExceptionReporter(request, None, None, None)
html = reporter.get_traceback_html()
self.assertInHTML(value, html)
# POST
request = self.rf.post('/test_view/', data={'items': 'Oops'})
reporter = ExceptionReporter(request, None, None, None)
html = reporter.get_traceback_html()
self.assertInHTML(value, html)
# FILES
fp = StringIO('filecontent')
request = self.rf.post('/test_view/', data={'name': 'filename', 'items': fp})
reporter = ExceptionReporter(request, None, None, None)
html = reporter.get_traceback_html()
self.assertInHTML(
'<td>items</td><td class="code"><pre><InMemoryUploadedFile: '
'items (application/octet-stream)></pre></td>',
html
)
# COOKES
rf = RequestFactory()
rf.cookies['items'] = 'Oops'
request = rf.get('/test_view/')
reporter = ExceptionReporter(request, None, None, None)
html = reporter.get_traceback_html()
self.assertInHTML('<td>items</td><td class="code"><pre>'Oops'</pre></td>', html)
def test_exception_fetching_user(self):
"""
The error page can be rendered if the current user can't be retrieved
(such as when the database is unavailable).
"""
class ExceptionUser:
def __str__(self):
raise Exception()
request = self.rf.get('/test_view/')
request.user = ExceptionUser()
try:
raise ValueError('Oops')
except ValueError:
exc_type, exc_value, tb = sys.exc_info()
reporter = ExceptionReporter(request, exc_type, exc_value, tb)
html = reporter.get_traceback_html()
self.assertInHTML('<h1>ValueError at /test_view/</h1>', html)
self.assertIn('<pre class="exception_value">Oops</pre>', html)
self.assertIn('<h3 id="user-info">USER</h3>', html)
self.assertIn('<p>[unable to retrieve the current user]</p>', html)
text = reporter.get_traceback_text()
self.assertIn('USER: [unable to retrieve the current user]', text)
class PlainTextReportTests(SimpleTestCase):
rf = RequestFactory()
def test_request_and_exception(self):
"A simple exception report can be generated"
try:
request = self.rf.get('/test_view/')
request.user = User()
raise ValueError("Can't find my keys")
except ValueError:
exc_type, exc_value, tb = sys.exc_info()
reporter = ExceptionReporter(request, exc_type, exc_value, tb)
text = reporter.get_traceback_text()
self.assertIn('ValueError at /test_view/', text)
self.assertIn("Can't find my keys", text)
self.assertIn('Request Method:', text)
self.assertIn('Request URL:', text)
self.assertIn('USER: jacob', text)
self.assertIn('Exception Type:', text)
self.assertIn('Exception Value:', text)
self.assertIn('Traceback:', text)
self.assertIn('Request information:', text)
self.assertNotIn('Request data not supplied', text)
def test_no_request(self):
"An exception report can be generated without request"
try:
raise ValueError("Can't find my keys")
except ValueError:
exc_type, exc_value, tb = sys.exc_info()
reporter = ExceptionReporter(None, exc_type, exc_value, tb)
text = reporter.get_traceback_text()
self.assertIn('ValueError', text)
self.assertIn("Can't find my keys", text)
self.assertNotIn('Request Method:', text)
self.assertNotIn('Request URL:', text)
self.assertNotIn('USER:', text)
self.assertIn('Exception Type:', text)
self.assertIn('Exception Value:', text)
self.assertIn('Traceback:', text)
self.assertIn('Request data not supplied', text)
def test_no_exception(self):
"An exception report can be generated for just a request"
request = self.rf.get('/test_view/')
reporter = ExceptionReporter(request, None, None, None)
reporter.get_traceback_text()
def test_request_and_message(self):
"A message can be provided in addition to a request"
request = self.rf.get('/test_view/')
reporter = ExceptionReporter(request, None, "I'm a little teapot", None)
reporter.get_traceback_text()
@override_settings(DEBUG=True)
def test_template_exception(self):
request = self.rf.get('/test_view/')
try:
render(request, 'debug/template_error.html')
except Exception:
exc_type, exc_value, tb = sys.exc_info()
reporter = ExceptionReporter(request, exc_type, exc_value, tb)
text = reporter.get_traceback_text()
templ_path = Path(Path(__file__).parent.parent, 'templates', 'debug', 'template_error.html')
self.assertIn(
'Template error:\n'
'In template %(path)s, error at line 2\n'
' \'cycle\' tag requires at least two arguments\n'
' 1 : Template with error:\n'
' 2 : {%% cycle %%} \n'
' 3 : ' % {'path': templ_path},
text
)
def test_request_with_items_key(self):
"""
An exception report can be generated for requests with 'items' in
request GET, POST, FILES, or COOKIES QueryDicts.
"""
# GET
request = self.rf.get('/test_view/?items=Oops')
reporter = ExceptionReporter(request, None, None, None)
text = reporter.get_traceback_text()
self.assertIn("items = 'Oops'", text)
# POST
request = self.rf.post('/test_view/', data={'items': 'Oops'})
reporter = ExceptionReporter(request, None, None, None)
text = reporter.get_traceback_text()
self.assertIn("items = 'Oops'", text)
# FILES
fp = StringIO('filecontent')
request = self.rf.post('/test_view/', data={'name': 'filename', 'items': fp})
reporter = ExceptionReporter(request, None, None, None)
text = reporter.get_traceback_text()
self.assertIn('items = <InMemoryUploadedFile:', text)
# COOKES
rf = RequestFactory()
rf.cookies['items'] = 'Oops'
request = rf.get('/test_view/')
reporter = ExceptionReporter(request, None, None, None)
text = reporter.get_traceback_text()
self.assertIn("items = 'Oops'", text)
def test_message_only(self):
reporter = ExceptionReporter(None, None, "I'm a little teapot", None)
reporter.get_traceback_text()
@override_settings(ALLOWED_HOSTS='example.com')
def test_disallowed_host(self):
"An exception report can be generated even for a disallowed host."
request = self.rf.get('/', HTTP_HOST='evil.com')
reporter = ExceptionReporter(request, None, None, None)
text = reporter.get_traceback_text()
self.assertIn("http://evil.com/", text)
class ExceptionReportTestMixin:
# Mixin used in the ExceptionReporterFilterTests and
# AjaxResponseExceptionReporterFilter tests below
breakfast_data = {'sausage-key': 'sausage-value',
'baked-beans-key': 'baked-beans-value',
'hash-brown-key': 'hash-brown-value',
'bacon-key': 'bacon-value'}
def verify_unsafe_response(self, view, check_for_vars=True,
check_for_POST_params=True):
"""
Asserts that potentially sensitive info are displayed in the response.
"""
request = self.rf.post('/some_url/', self.breakfast_data)
response = view(request)
if check_for_vars:
# All variables are shown.
self.assertContains(response, 'cooked_eggs', status_code=500)
self.assertContains(response, 'scrambled', status_code=500)
self.assertContains(response, 'sauce', status_code=500)
self.assertContains(response, 'worcestershire', status_code=500)
if check_for_POST_params:
for k, v in self.breakfast_data.items():
# All POST parameters are shown.
self.assertContains(response, k, status_code=500)
self.assertContains(response, v, status_code=500)
def verify_safe_response(self, view, check_for_vars=True,
check_for_POST_params=True):
"""
Asserts that certain sensitive info are not displayed in the response.
"""
request = self.rf.post('/some_url/', self.breakfast_data)
response = view(request)
if check_for_vars:
# Non-sensitive variable's name and value are shown.
self.assertContains(response, 'cooked_eggs', status_code=500)
self.assertContains(response, 'scrambled', status_code=500)
# Sensitive variable's name is shown but not its value.
self.assertContains(response, 'sauce', status_code=500)
self.assertNotContains(response, 'worcestershire', status_code=500)
if check_for_POST_params:
for k, v in self.breakfast_data.items():
# All POST parameters' names are shown.
self.assertContains(response, k, status_code=500)
# Non-sensitive POST parameters' values are shown.
self.assertContains(response, 'baked-beans-value', status_code=500)
self.assertContains(response, 'hash-brown-value', status_code=500)
# Sensitive POST parameters' values are not shown.
self.assertNotContains(response, 'sausage-value', status_code=500)
self.assertNotContains(response, 'bacon-value', status_code=500)
def verify_paranoid_response(self, view, check_for_vars=True,
check_for_POST_params=True):
"""
Asserts that no variables or POST parameters are displayed in the response.
"""
request = self.rf.post('/some_url/', self.breakfast_data)
response = view(request)
if check_for_vars:
# Show variable names but not their values.
self.assertContains(response, 'cooked_eggs', status_code=500)
self.assertNotContains(response, 'scrambled', status_code=500)
self.assertContains(response, 'sauce', status_code=500)
self.assertNotContains(response, 'worcestershire', status_code=500)
if check_for_POST_params:
for k, v in self.breakfast_data.items():
# All POST parameters' names are shown.
self.assertContains(response, k, status_code=500)
# No POST parameters' values are shown.
self.assertNotContains(response, v, status_code=500)
def verify_unsafe_email(self, view, check_for_POST_params=True):
"""
Asserts that potentially sensitive info are displayed in the email report.
"""
with self.settings(ADMINS=[('Admin', '[email protected]')]):
mail.outbox = [] # Empty outbox
request = self.rf.post('/some_url/', self.breakfast_data)
view(request)
self.assertEqual(len(mail.outbox), 1)
email = mail.outbox[0]
# Frames vars are never shown in plain text email reports.
body_plain = str(email.body)
self.assertNotIn('cooked_eggs', body_plain)
self.assertNotIn('scrambled', body_plain)
self.assertNotIn('sauce', body_plain)
self.assertNotIn('worcestershire', body_plain)
# Frames vars are shown in html email reports.
body_html = str(email.alternatives[0][0])
self.assertIn('cooked_eggs', body_html)
self.assertIn('scrambled', body_html)
self.assertIn('sauce', body_html)
self.assertIn('worcestershire', body_html)
if check_for_POST_params:
for k, v in self.breakfast_data.items():
# All POST parameters are shown.
self.assertIn(k, body_plain)
self.assertIn(v, body_plain)
self.assertIn(k, body_html)
self.assertIn(v, body_html)
def verify_safe_email(self, view, check_for_POST_params=True):
"""
Asserts that certain sensitive info are not displayed in the email report.
"""
with self.settings(ADMINS=[('Admin', '[email protected]')]):
mail.outbox = [] # Empty outbox
request = self.rf.post('/some_url/', self.breakfast_data)
view(request)
self.assertEqual(len(mail.outbox), 1)
email = mail.outbox[0]
# Frames vars are never shown in plain text email reports.
body_plain = str(email.body)
self.assertNotIn('cooked_eggs', body_plain)
self.assertNotIn('scrambled', body_plain)
self.assertNotIn('sauce', body_plain)
self.assertNotIn('worcestershire', body_plain)
# Frames vars are shown in html email reports.
body_html = str(email.alternatives[0][0])
self.assertIn('cooked_eggs', body_html)
self.assertIn('scrambled', body_html)
self.assertIn('sauce', body_html)
self.assertNotIn('worcestershire', body_html)
if check_for_POST_params:
for k, v in self.breakfast_data.items():
# All POST parameters' names are shown.
self.assertIn(k, body_plain)
# Non-sensitive POST parameters' values are shown.
self.assertIn('baked-beans-value', body_plain)
self.assertIn('hash-brown-value', body_plain)
self.assertIn('baked-beans-value', body_html)
self.assertIn('hash-brown-value', body_html)
# Sensitive POST parameters' values are not shown.
self.assertNotIn('sausage-value', body_plain)
self.assertNotIn('bacon-value', body_plain)
self.assertNotIn('sausage-value', body_html)
self.assertNotIn('bacon-value', body_html)
def verify_paranoid_email(self, view):
"""
Asserts that no variables or POST parameters are displayed in the email report.
"""
with self.settings(ADMINS=[('Admin', '[email protected]')]):
mail.outbox = [] # Empty outbox
request = self.rf.post('/some_url/', self.breakfast_data)
view(request)
self.assertEqual(len(mail.outbox), 1)
email = mail.outbox[0]
# Frames vars are never shown in plain text email reports.
body = str(email.body)
self.assertNotIn('cooked_eggs', body)
self.assertNotIn('scrambled', body)<|fim▁hole|> self.assertIn(k, body)
# No POST parameters' values are shown.
self.assertNotIn(v, body)
@override_settings(ROOT_URLCONF='view_tests.urls')
class ExceptionReporterFilterTests(ExceptionReportTestMixin, LoggingCaptureMixin, SimpleTestCase):
"""
Sensitive information can be filtered out of error reports (#14614).
"""
rf = RequestFactory()
def test_non_sensitive_request(self):
"""
Everything (request info and frame variables) can bee seen
in the default error reports for non-sensitive requests.
"""
with self.settings(DEBUG=True):
self.verify_unsafe_response(non_sensitive_view)
self.verify_unsafe_email(non_sensitive_view)
with self.settings(DEBUG=False):
self.verify_unsafe_response(non_sensitive_view)
self.verify_unsafe_email(non_sensitive_view)
def test_sensitive_request(self):
"""
Sensitive POST parameters and frame variables cannot be
seen in the default error reports for sensitive requests.
"""
with self.settings(DEBUG=True):
self.verify_unsafe_response(sensitive_view)
self.verify_unsafe_email(sensitive_view)
with self.settings(DEBUG=False):
self.verify_safe_response(sensitive_view)
self.verify_safe_email(sensitive_view)
def test_paranoid_request(self):
"""
No POST parameters and frame variables can be seen in the
default error reports for "paranoid" requests.
"""
with self.settings(DEBUG=True):
self.verify_unsafe_response(paranoid_view)
self.verify_unsafe_email(paranoid_view)
with self.settings(DEBUG=False):
self.verify_paranoid_response(paranoid_view)
self.verify_paranoid_email(paranoid_view)
def test_multivalue_dict_key_error(self):
"""
#21098 -- Sensitive POST parameters cannot be seen in the
error reports for if request.POST['nonexistent_key'] throws an error.
"""
with self.settings(DEBUG=True):
self.verify_unsafe_response(multivalue_dict_key_error)
self.verify_unsafe_email(multivalue_dict_key_error)
with self.settings(DEBUG=False):
self.verify_safe_response(multivalue_dict_key_error)
self.verify_safe_email(multivalue_dict_key_error)
def test_custom_exception_reporter_filter(self):
"""
It's possible to assign an exception reporter filter to
the request to bypass the one set in DEFAULT_EXCEPTION_REPORTER_FILTER.
"""
with self.settings(DEBUG=True):
self.verify_unsafe_response(custom_exception_reporter_filter_view)
self.verify_unsafe_email(custom_exception_reporter_filter_view)
with self.settings(DEBUG=False):
self.verify_unsafe_response(custom_exception_reporter_filter_view)
self.verify_unsafe_email(custom_exception_reporter_filter_view)
def test_sensitive_method(self):
"""
The sensitive_variables decorator works with object methods.
"""
with self.settings(DEBUG=True):
self.verify_unsafe_response(sensitive_method_view, check_for_POST_params=False)
self.verify_unsafe_email(sensitive_method_view, check_for_POST_params=False)
with self.settings(DEBUG=False):
self.verify_safe_response(sensitive_method_view, check_for_POST_params=False)
self.verify_safe_email(sensitive_method_view, check_for_POST_params=False)
def test_sensitive_function_arguments(self):
"""
Sensitive variables don't leak in the sensitive_variables decorator's
frame, when those variables are passed as arguments to the decorated
function.
"""
with self.settings(DEBUG=True):
self.verify_unsafe_response(sensitive_args_function_caller)
self.verify_unsafe_email(sensitive_args_function_caller)
with self.settings(DEBUG=False):
self.verify_safe_response(sensitive_args_function_caller, check_for_POST_params=False)
self.verify_safe_email(sensitive_args_function_caller, check_for_POST_params=False)
def test_sensitive_function_keyword_arguments(self):
"""
Sensitive variables don't leak in the sensitive_variables decorator's
frame, when those variables are passed as keyword arguments to the
decorated function.
"""
with self.settings(DEBUG=True):
self.verify_unsafe_response(sensitive_kwargs_function_caller)
self.verify_unsafe_email(sensitive_kwargs_function_caller)
with self.settings(DEBUG=False):
self.verify_safe_response(sensitive_kwargs_function_caller, check_for_POST_params=False)
self.verify_safe_email(sensitive_kwargs_function_caller, check_for_POST_params=False)
def test_callable_settings(self):
"""
Callable settings should not be evaluated in the debug page (#21345).
"""
def callable_setting():
return "This should not be displayed"
with self.settings(DEBUG=True, FOOBAR=callable_setting):
response = self.client.get('/raises500/')
self.assertNotContains(response, "This should not be displayed", status_code=500)
def test_callable_settings_forbidding_to_set_attributes(self):
"""
Callable settings which forbid to set attributes should not break
the debug page (#23070).
"""
class CallableSettingWithSlots:
__slots__ = []
def __call__(self):
return "This should not be displayed"
with self.settings(DEBUG=True, WITH_SLOTS=CallableSettingWithSlots()):
response = self.client.get('/raises500/')
self.assertNotContains(response, "This should not be displayed", status_code=500)
def test_dict_setting_with_non_str_key(self):
"""
A dict setting containing a non-string key should not break the
debug page (#12744).
"""
with self.settings(DEBUG=True, FOOBAR={42: None}):
response = self.client.get('/raises500/')
self.assertContains(response, 'FOOBAR', status_code=500)
def test_sensitive_settings(self):
"""
The debug page should not show some sensitive settings
(password, secret key, ...).
"""
sensitive_settings = [
'SECRET_KEY',
'PASSWORD',
'API_KEY',
'AUTH_TOKEN',
]
for setting in sensitive_settings:
with self.settings(DEBUG=True, **{setting: "should not be displayed"}):
response = self.client.get('/raises500/')
self.assertNotContains(response, 'should not be displayed', status_code=500)
def test_settings_with_sensitive_keys(self):
"""
The debug page should filter out some sensitive information found in
dict settings.
"""
sensitive_settings = [
'SECRET_KEY',
'PASSWORD',
'API_KEY',
'AUTH_TOKEN',
]
for setting in sensitive_settings:
FOOBAR = {
setting: "should not be displayed",
'recursive': {setting: "should not be displayed"},
}
with self.settings(DEBUG=True, FOOBAR=FOOBAR):
response = self.client.get('/raises500/')
self.assertNotContains(response, 'should not be displayed', status_code=500)
class AjaxResponseExceptionReporterFilter(ExceptionReportTestMixin, LoggingCaptureMixin, SimpleTestCase):
"""
Sensitive information can be filtered out of error reports.
Here we specifically test the plain text 500 debug-only error page served
when it has been detected the request was sent by JS code. We don't check
for (non)existence of frames vars in the traceback information section of
the response content because we don't include them in these error pages.
Refs #14614.
"""
rf = RequestFactory(HTTP_X_REQUESTED_WITH='XMLHttpRequest')
def test_non_sensitive_request(self):
"""
Request info can bee seen in the default error reports for
non-sensitive requests.
"""
with self.settings(DEBUG=True):
self.verify_unsafe_response(non_sensitive_view, check_for_vars=False)
with self.settings(DEBUG=False):
self.verify_unsafe_response(non_sensitive_view, check_for_vars=False)
def test_sensitive_request(self):
"""
Sensitive POST parameters cannot be seen in the default
error reports for sensitive requests.
"""
with self.settings(DEBUG=True):
self.verify_unsafe_response(sensitive_view, check_for_vars=False)
with self.settings(DEBUG=False):
self.verify_safe_response(sensitive_view, check_for_vars=False)
def test_paranoid_request(self):
"""
No POST parameters can be seen in the default error reports
for "paranoid" requests.
"""
with self.settings(DEBUG=True):
self.verify_unsafe_response(paranoid_view, check_for_vars=False)
with self.settings(DEBUG=False):
self.verify_paranoid_response(paranoid_view, check_for_vars=False)
def test_custom_exception_reporter_filter(self):
"""
It's possible to assign an exception reporter filter to
the request to bypass the one set in DEFAULT_EXCEPTION_REPORTER_FILTER.
"""
with self.settings(DEBUG=True):
self.verify_unsafe_response(custom_exception_reporter_filter_view, check_for_vars=False)
with self.settings(DEBUG=False):
self.verify_unsafe_response(custom_exception_reporter_filter_view, check_for_vars=False)
@override_settings(DEBUG=True, ROOT_URLCONF='view_tests.urls')
def test_ajax_response_encoding(self):
response = self.client.get('/raises500/', HTTP_X_REQUESTED_WITH='XMLHttpRequest')
self.assertEqual(response['Content-Type'], 'text/plain; charset=utf-8')
class HelperFunctionTests(SimpleTestCase):
def test_cleanse_setting_basic(self):
self.assertEqual(cleanse_setting('TEST', 'TEST'), 'TEST')
self.assertEqual(cleanse_setting('PASSWORD', 'super_secret'), CLEANSED_SUBSTITUTE)
def test_cleanse_setting_ignore_case(self):
self.assertEqual(cleanse_setting('password', 'super_secret'), CLEANSED_SUBSTITUTE)
def test_cleanse_setting_recurses_in_dictionary(self):
initial = {'login': 'cooper', 'password': 'secret'}
expected = {'login': 'cooper', 'password': CLEANSED_SUBSTITUTE}
self.assertEqual(cleanse_setting('SETTING_NAME', initial), expected)<|fim▁end|>
|
self.assertNotIn('sauce', body)
self.assertNotIn('worcestershire', body)
for k, v in self.breakfast_data.items():
# All POST parameters' names are shown.
|
<|file_name|>notification_log.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright (c) 2019, Frappe Technologies and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe import _
from frappe.model.document import Document
from frappe.desk.doctype.notification_settings.notification_settings import (is_notifications_enabled, is_email_notifications_enabled_for_type, set_seen_value)
class NotificationLog(Document):
def after_insert(self):
frappe.publish_realtime('notification', after_commit=True, user=self.for_user)
set_notifications_as_unseen(self.for_user)
if is_email_notifications_enabled_for_type(self.for_user, self.type):
send_notification_email(self)
def get_permission_query_conditions(for_user):
if not for_user:
for_user = frappe.session.user
if for_user == 'Administrator':
return
return '''(`tabNotification Log`.for_user = '{user}')'''.format(user=for_user)
def get_title(doctype, docname, title_field=None):
if not title_field:
title_field = frappe.get_meta(doctype).get_title_field()
title = docname if title_field == "name" else \
frappe.db.get_value(doctype, docname, title_field)
return title
def get_title_html(title):
return '<b class="subject-title">{0}</b>'.format(title)
def enqueue_create_notification(users, doc):
'''
During installation of new site, enqueue_create_notification tries to connect to Redis.
This breaks new site creation if Redis server is not running.
We do not need any notifications in fresh installation<|fim▁hole|> '''
if frappe.flags.in_install:
return
doc = frappe._dict(doc)
if isinstance(users, str):
users = [user.strip() for user in users.split(',') if user.strip()]
users = list(set(users))
frappe.enqueue(
'frappe.desk.doctype.notification_log.notification_log.make_notification_logs',
doc=doc,
users=users,
now=frappe.flags.in_test
)
def make_notification_logs(doc, users):
from frappe.social.doctype.energy_point_settings.energy_point_settings import is_energy_point_enabled
for user in users:
if frappe.db.exists('User', {"email": user, "enabled": 1}):
if is_notifications_enabled(user):
if doc.type == 'Energy Point' and not is_energy_point_enabled():
return
_doc = frappe.new_doc('Notification Log')
_doc.update(doc)
_doc.for_user = user
if _doc.for_user != _doc.from_user or doc.type == 'Energy Point' or doc.type == 'Alert':
_doc.insert(ignore_permissions=True)
def send_notification_email(doc):
if doc.type == 'Energy Point' and doc.email_content is None:
return
from frappe.utils import get_url_to_form, strip_html
doc_link = get_url_to_form(doc.document_type, doc.document_name)
header = get_email_header(doc)
email_subject = strip_html(doc.subject)
frappe.sendmail(
recipients = doc.for_user,
subject = email_subject,
template = "new_notification",
args = {
'body_content': doc.subject,
'description': doc.email_content,
'document_type': doc.document_type,
'document_name': doc.document_name,
'doc_link': doc_link
},
header = [header, 'orange'],
now=frappe.flags.in_test
)
def get_email_header(doc):
docname = doc.document_name
header_map = {
'Default': _('New Notification'),
'Mention': _('New Mention on {0}').format(docname),
'Assignment': _('Assignment Update on {0}').format(docname),
'Share': _('New Document Shared {0}').format(docname),
'Energy Point': _('Energy Point Update on {0}').format(docname),
}
return header_map[doc.type or 'Default']
@frappe.whitelist()
def mark_all_as_read():
unread_docs_list = frappe.db.get_all('Notification Log', filters = {'read': 0, 'for_user': frappe.session.user})
unread_docnames = [doc.name for doc in unread_docs_list]
if unread_docnames:
filters = {'name': ['in', unread_docnames]}
frappe.db.set_value('Notification Log', filters, 'read', 1, update_modified=False)
@frappe.whitelist()
def mark_as_read(docname):
if docname:
frappe.db.set_value('Notification Log', docname, 'read', 1, update_modified=False)
@frappe.whitelist()
def trigger_indicator_hide():
frappe.publish_realtime('indicator_hide', user=frappe.session.user)
def set_notifications_as_unseen(user):
try:
frappe.db.set_value('Notification Settings', user, 'seen', 0)
except frappe.DoesNotExistError:
return<|fim▁end|>
| |
<|file_name|>1395_count-number-of-teams.py<|end_file_name|><|fim▁begin|># 1395. Count Number of Teams - LeetCode
# https://leetcode.com/problems/count-number-of-teams/
from typing import List
# 暴力搜索都 AC 了
# 其实有两次筛选的算法
<|fim▁hole|>class Solution:
def numTeams(self, rating: List[int]) -> int:
if len(rating) <= 2:
return 0
count = 0
for i in range(len(rating)):
for j in range(i+1,len(rating)):
for k in range(j+1,len(rating)):
if rating[i] < rating[j] and rating[j] < rating[k]:
count += 1
if rating[i] > rating[j] and rating[j] > rating[k]:
count += 1
return count
# rating = [2,5,3,4,1]
rating = [1,2,3,4]
s = Solution()
ret = s.numTeams(rating)
print(ret)<|fim▁end|>
| |
<|file_name|>Main.java<|end_file_name|><|fim▁begin|>package ch.dritz.remedy2redmine;
import java.io.File;
import java.io.IOException;
import ch.dritz.common.Config;
import ch.dritz.remedy2redmine.modules.SyncModule;
/**
* Main class for Remedy2Redmine
* @author D.Ritz
*/
public class Main
{
private static void usage(String msg)
{
if (msg != null)
System.out.println("ERROR: " + msg);
System.out.println("Remedy2Redmine " + Version.getVersion());
System.out.println("Usage: Remedy2Redmine <config.properties> <command> [<command specific args>]");
System.out.println(" <command> : one of (sync)");
System.out.println(" <mode specific args> for each mode:");
System.out.println(" - sync: none");
System.out.println("OR: Remedy2Redmine -version");
System.exit(1);
}
/**
* main() entry point
* @param args
* @throws IOException
*/
public static void main(String[] args)
throws Exception
{
if (args.length == 1 && "-version".equals(args[0])) {
System.out.println("Remedy2Redmine " + Version.getVersion());
return;
}
if (args.length < 2)
usage("Not enough arguments");
File configFile = new File(args[0]);
String command = args[1];
Config config = new Config();
config.loadFromFile(configFile);
if ("sync".equals(command)) {
File syncConfig = new File(configFile.getParentFile(),
config.getString("sync.config", "sync.properties"));
config.loadFromFile(syncConfig);
SyncModule sync = new SyncModule(config);
try {
sync.start();
} finally {
sync.shutdown();
}
} else {<|fim▁hole|><|fim▁end|>
|
usage("Unknown command");
}
}
}
|
<|file_name|>ProductionStep.py<|end_file_name|><|fim▁begin|>""" Class defining a production step """
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
__RCSID__ = "$Id$"
<|fim▁hole|>from DIRAC import S_OK, S_ERROR
class ProductionStep(object):
"""Define the Production Step object"""
def __init__(self, **kwargs):
"""Simple constructor"""
# Default values for transformation step parameters
self.Name = ""
self.Description = "description"
self.LongDescription = "longDescription"
self.Type = "MCSimulation"
self.Plugin = "Standard"
self.AgentType = "Manual"
self.FileMask = ""
#########################################
self.ParentStep = None
self.Inputquery = None
self.Outputquery = None
self.GroupSize = 1
self.Body = "body"
def getAsDict(self):
"""It returns the Step description as a dictionary"""
prodStepDict = {}
prodStepDict["name"] = self.Name
prodStepDict["parentStep"] = []
# check the ParentStep format
if self.ParentStep:
if isinstance(self.ParentStep, list):
prodStepDict["parentStep"] = []
for parentStep in self.ParentStep: # pylint: disable=not-an-iterable
if not parentStep.Name:
return S_ERROR("Parent Step does not exist")
prodStepDict["parentStep"].append(parentStep.Name)
elif isinstance(self.ParentStep, ProductionStep):
if not self.ParentStep.Name:
return S_ERROR("Parent Step does not exist")
prodStepDict["parentStep"] = [self.ParentStep.Name]
else:
return S_ERROR("Invalid Parent Step")
prodStepDict["description"] = self.Description
prodStepDict["longDescription"] = self.LongDescription
prodStepDict["stepType"] = self.Type
prodStepDict["plugin"] = self.Plugin
prodStepDict["agentType"] = self.AgentType
prodStepDict["fileMask"] = self.FileMask
# Optional fields
prodStepDict["inputquery"] = json.dumps(self.Inputquery)
prodStepDict["outputquery"] = json.dumps(self.Outputquery)
prodStepDict["groupsize"] = self.GroupSize
prodStepDict["body"] = json.dumps(self.Body)
return S_OK(prodStepDict)<|fim▁end|>
|
import json
|
<|file_name|>AddressTerm.java<|end_file_name|><|fim▁begin|>/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package javax.mail.search;
import javax.mail.Address;
/**
* Term that compares two addresses.
*
* @version $Rev: 920714 $ $Date: 2010-03-09 07:55:49 +0100 (Di, 09. Mär 2010) $
*/
public abstract class AddressTerm extends SearchTerm {
private static final long serialVersionUID = 2005405551929769980L;
/**
* The address.
*/
protected Address address;
/**
* Constructor taking the address for this term.
* @param address the address
*/<|fim▁hole|>
/**
* Return the address of this term.
*
* @return the addre4ss
*/
public Address getAddress() {
return address;
}
/**
* Match to the supplied address.
*
* @param address the address to match with
* @return true if the addresses match
*/
protected boolean match(Address address) {
return this.address.equals(address);
}
public boolean equals(Object other) {
if (this == other) return true;
if (other instanceof AddressTerm == false) return false;
return address.equals(((AddressTerm) other).address);
}
public int hashCode() {
return address.hashCode();
}
}<|fim▁end|>
|
protected AddressTerm(Address address) {
this.address = address;
}
|
<|file_name|>run_lighthouse_tests.py<|end_file_name|><|fim▁begin|># Copyright 2020 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This script performs lighthouse checks and creates lighthouse reports.
Any callers must pass in a flag, either --accessibility or --performance.
"""
from __future__ import annotations
import argparse
import contextlib
import os
import subprocess
import sys
from core.constants import constants
from scripts import build
from scripts import common
from scripts import servers
LIGHTHOUSE_MODE_PERFORMANCE = 'performance'
LIGHTHOUSE_MODE_ACCESSIBILITY = 'accessibility'
SERVER_MODE_PROD = 'dev'
SERVER_MODE_DEV = 'prod'
GOOGLE_APP_ENGINE_PORT = 8181
LIGHTHOUSE_CONFIG_FILENAMES = {
LIGHTHOUSE_MODE_PERFORMANCE: {
'1': '.lighthouserc-1.js',
'2': '.lighthouserc-2.js'
},<|fim▁hole|>}
APP_YAML_FILENAMES = {
SERVER_MODE_PROD: 'app.yaml',
SERVER_MODE_DEV: 'app_dev.yaml'
}
_PARSER = argparse.ArgumentParser(
description="""
Run the script from the oppia root folder:
python -m scripts.run_lighthouse_tests
Note that the root folder MUST be named 'oppia'.
""")
_PARSER.add_argument(
'--mode', help='Sets the mode for the lighthouse tests',
required=True, choices=['accessibility', 'performance'])
_PARSER.add_argument(
'--shard', help='Sets the shard for the lighthouse tests',
required=True, choices=['1', '2'])
def run_lighthouse_puppeteer_script():
"""Runs puppeteer script to collect dynamic urls."""
puppeteer_path = (
os.path.join('core', 'tests', 'puppeteer', 'lighthouse_setup.js'))
bash_command = [common.NODE_BIN_PATH, puppeteer_path]
process = subprocess.Popen(
bash_command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = process.communicate()
if process.returncode == 0:
print(stdout)
for line in stdout.split(b'\n'):
# Standard output is in bytes, we need to decode the line to
# print it.
export_url(line.decode('utf-8'))
print('Puppeteer script completed successfully.')
else:
print('Return code: %s' % process.returncode)
print('OUTPUT:')
# Standard output is in bytes, we need to decode the line to
# print it.
print(stdout.decode('utf-8'))
print('ERROR:')
# Error output is in bytes, we need to decode the line to
# print it.
print(stderr.decode('utf-8'))
print('Puppeteer script failed. More details can be found above.')
sys.exit(1)
def run_webpack_compilation():
"""Runs webpack compilation."""
max_tries = 5
webpack_bundles_dir_name = 'webpack_bundles'
for _ in range(max_tries):
try:
with servers.managed_webpack_compiler() as proc:
proc.wait()
except subprocess.CalledProcessError as error:
print(error.output)
sys.exit(error.returncode)
if os.path.isdir(webpack_bundles_dir_name):
break
if not os.path.isdir(webpack_bundles_dir_name):
print('Failed to complete webpack compilation, exiting...')
sys.exit(1)
def export_url(line):
"""Exports the entity ID in the given line to an environment variable, if
the line is a URL.
Args:
line: str. The line to parse and extract the entity ID from. If no
recognizable URL is present, nothing is exported to the
environment.
"""
url_parts = line.split('/')
print('Parsing and exporting entity ID in line: %s' % line)
if 'collection_editor' in line:
os.environ['collection_id'] = url_parts[5]
elif 'create' in line:
os.environ['exploration_id'] = url_parts[4]
elif 'topic_editor' in line:
os.environ['topic_id'] = url_parts[4]
elif 'story_editor' in line:
os.environ['story_id'] = url_parts[4]
elif 'skill_editor' in line:
os.environ['skill_id'] = url_parts[4]
def run_lighthouse_checks(lighthouse_mode, shard):
"""Runs the Lighthouse checks through the Lighthouse config.
Args:
lighthouse_mode: str. Represents whether the lighthouse checks are in
accessibility mode or performance mode.
shard: str. Specifies which shard of the tests should be run.
"""
lhci_path = os.path.join('node_modules', '@lhci', 'cli', 'src', 'cli.js')
# The max-old-space-size is a quick fix for node running out of heap memory
# when executing the performance tests: https://stackoverflow.com/a/59572966
bash_command = [
common.NODE_BIN_PATH, lhci_path, 'autorun',
'--config=%s' % LIGHTHOUSE_CONFIG_FILENAMES[lighthouse_mode][shard],
'--max-old-space-size=4096'
]
process = subprocess.Popen(
bash_command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = process.communicate()
if process.returncode == 0:
print('Lighthouse checks completed successfully.')
else:
print('Return code: %s' % process.returncode)
print('OUTPUT:')
# Standard output is in bytes, we need to decode the line to
# print it.
print(stdout.decode('utf-8'))
print('ERROR:')
# Error output is in bytes, we need to decode the line to
# print it.
print(stderr.decode('utf-8'))
print('Lighthouse checks failed. More details can be found above.')
sys.exit(1)
def main(args=None):
"""Runs lighthouse checks and deletes reports."""
parsed_args = _PARSER.parse_args(args=args)
if parsed_args.mode == LIGHTHOUSE_MODE_ACCESSIBILITY:
lighthouse_mode = LIGHTHOUSE_MODE_ACCESSIBILITY
server_mode = SERVER_MODE_DEV
elif parsed_args.mode == LIGHTHOUSE_MODE_PERFORMANCE:
lighthouse_mode = LIGHTHOUSE_MODE_PERFORMANCE
server_mode = SERVER_MODE_PROD
else:
raise Exception(
'Invalid parameter passed in: \'%s\', please choose'
'from \'accessibility\' or \'performance\'' % parsed_args.mode)
if lighthouse_mode == LIGHTHOUSE_MODE_PERFORMANCE:
print('Building files in production mode.')
build.main(args=['--prod_env'])
elif lighthouse_mode == LIGHTHOUSE_MODE_ACCESSIBILITY:
build.main(args=[])
run_webpack_compilation()
with contextlib.ExitStack() as stack:
stack.enter_context(servers.managed_redis_server())
stack.enter_context(servers.managed_elasticsearch_dev_server())
if constants.EMULATOR_MODE:
stack.enter_context(servers.managed_firebase_auth_emulator())
stack.enter_context(servers.managed_cloud_datastore_emulator())
stack.enter_context(servers.managed_dev_appserver(
APP_YAML_FILENAMES[server_mode],
port=GOOGLE_APP_ENGINE_PORT,
log_level='critical',
skip_sdk_update_check=True))
run_lighthouse_puppeteer_script()
run_lighthouse_checks(lighthouse_mode, parsed_args.shard)
if __name__ == '__main__':
main()<|fim▁end|>
|
LIGHTHOUSE_MODE_ACCESSIBILITY: {
'1': '.lighthouserc-accessibility-1.js',
'2': '.lighthouserc-accessibility-2.js'
}
|
<|file_name|>memo.py<|end_file_name|><|fim▁begin|># -*- coding: ISO-8859-15 -*-
from core.Uusipuu import UusipuuModule
import random, time
class Module(UusipuuModule):
def startup(self):
if 'memo' not in self.config:
self.config['memo'] = {}
def privmsg(self, user, target, msg):
if target != self.channel:
return
pieces = msg.strip().split(' ', 1)
if len(pieces) != 2:
return
cmd = pieces[0].strip()
params = pieces[1].strip()
if cmd == '??':
self.meta_show(user, params)
elif cmd == '?!':
self.meta_searchkey(user, params.strip())
elif cmd == '?#':
self.meta_searchvalue(user, params.strip())
def cmd_memo(self, user, target, params):
pieces = params.strip().split(' ', 1)
if len(pieces) != 2:
self.chanmsg('Insufficient parameters')
return
cmd = pieces[0].strip()
params = pieces[1].strip()
if cmd == 'add':
self.meta_addmemo(user, params)
elif cmd in ['del', 'delete', 'remove']:
self.meta_delmemo(user, params)
elif cmd == 'show':
self.meta_show(user, params)
elif cmd == 'info':
self.meta_info(user, params)
elif cmd in ['search', 'searchkey', 'sk']:
self.meta_searchkey(user, params.strip())
elif cmd in ['searchvalue', 'sv']:
self.meta_searchvalue(user, params.strip())
def meta_show(self, user, key):
self.do_show(user, key)
def meta_info(self, user, key):
self.do_show(user, key)
self.do_info(user, key)
<|fim▁hole|> nick = user.split('!', 1)[0]
keys = [x for x in self.config['memo'] if x.count(key)]
if not keys:
self.chanmsg('No keys found matching "%s"' % (key))
return
self.do_show(user, random.choice(keys))
def meta_searchvalue(self, user, value):
nick = user.split('!', 1)[0]
keys = [x for x in self.config['memo'] \
if self.config['memo'][x]['value'].count(value)]
if not keys:
self.chanmsg('No values found matching "%s"' % (value))
return
self.do_show(user, random.choice(keys))
def do_show(self, user, key):
nick = user.split('!', 1)[0]
if key not in self.config['memo']:
self.chanmsg('Entry not found (%s)' % key)
return ()
self.chanmsg('%s: %s' % (key, str(self.config['memo'][key]['value'])))
def do_info(self, user, key):
if key not in self.config['memo']:
return
self.chanmsg('%s created by %s [%s]' % (key,
self.config['memo'][key]['user'],
time.ctime(self.config['memo'][key]['added'])))
def meta_addmemo(self, user, params):
nick = user.split('!', 1)[0]
pieces = params.strip().split(' ', 1)
if len(pieces) < 2:
self.chanmsg('Insufficient parameters')
return
key, value = pieces[0].strip(), pieces[1].strip()
if key in self.config['memo']:
self.chanmsg('%s: An entry by that name already exists' % nick)
return
self.config['memo'][key] = {
'value': value,
'user': user,
'added': int(time.time()),
}
self.save()
self.chanmsg('Memo entry "%s" successfully added' % (str(key)))
def meta_delmemo(self, user, params):
nick = user.split('!', 1)[0]
pieces = params.strip().split(' ', 1)
key = pieces[0].strip()
if key not in self.config['memo']:
self.chanmsg('Entry not found (%s)' % key)
return
del self.config['memo'][key]
self.save()
self.chanmsg('Memo entry "%s" successfully removed' % (key))
# vim: set et sw=4:<|fim▁end|>
|
def meta_searchkey(self, user, key):
|
<|file_name|>common.py<|end_file_name|><|fim▁begin|>from __future__ import unicode_literals
import base64
import datetime
import hashlib
import json
import netrc
import os
import re
import socket
import sys
import time
import math
from ..compat import (
compat_cookiejar,
compat_cookies,
compat_etree_fromstring,
compat_getpass,
compat_http_client,
compat_os_name,
compat_str,
compat_urllib_error,
compat_urllib_parse_urlencode,
compat_urllib_request,
compat_urlparse,
)
from ..downloader.f4m import remove_encrypted_media
from ..utils import (
NO_DEFAULT,
age_restricted,
bug_reports_message,
clean_html,
compiled_regex_type,
determine_ext,
error_to_compat_str,
ExtractorError,
fix_xml_ampersands,
float_or_none,
int_or_none,
parse_iso8601,
RegexNotFoundError,
sanitize_filename,
sanitized_Request,
unescapeHTML,
unified_strdate,
unified_timestamp,
url_basename,
xpath_element,
xpath_text,
xpath_with_ns,
determine_protocol,
parse_duration,
mimetype2ext,
update_Request,
update_url_query,
parse_m3u8_attributes,
extract_attributes,
parse_codecs,
)
class InfoExtractor(object):
"""Information Extractor class.
Information extractors are the classes that, given a URL, extract
information about the video (or videos) the URL refers to. This
information includes the real video URL, the video title, author and
others. The information is stored in a dictionary which is then
passed to the YoutubeDL. The YoutubeDL processes this
information possibly downloading the video to the file system, among
other possible outcomes.
The type field determines the type of the result.
By far the most common value (and the default if _type is missing) is
"video", which indicates a single video.
For a video, the dictionaries must include the following fields:
id: Video identifier.
title: Video title, unescaped.
Additionally, it must contain either a formats entry or a url one:
formats: A list of dictionaries for each format available, ordered
from worst to best quality.
Potential fields:
* url Mandatory. The URL of the video file
* ext Will be calculated from URL if missing
* format A human-readable description of the format
("mp4 container with h264/opus").
Calculated from the format_id, width, height.
and format_note fields if missing.
* format_id A short description of the format
("mp4_h264_opus" or "19").
Technically optional, but strongly recommended.
* format_note Additional info about the format
("3D" or "DASH video")
* width Width of the video, if known
* height Height of the video, if known
* resolution Textual description of width and height
* tbr Average bitrate of audio and video in KBit/s
* abr Average audio bitrate in KBit/s
* acodec Name of the audio codec in use
* asr Audio sampling rate in Hertz
* vbr Average video bitrate in KBit/s
* fps Frame rate
* vcodec Name of the video codec in use
* container Name of the container format
* filesize The number of bytes, if known in advance
* filesize_approx An estimate for the number of bytes
* player_url SWF Player URL (used for rtmpdump).
* protocol The protocol that will be used for the actual
download, lower-case.
"http", "https", "rtsp", "rtmp", "rtmpe",
"m3u8", "m3u8_native" or "http_dash_segments".
* preference Order number of this format. If this field is
present and not None, the formats get sorted
by this field, regardless of all other values.
-1 for default (order by other properties),
-2 or smaller for less than default.
< -1000 to hide the format (if there is
another one which is strictly better)
* language Language code, e.g. "de" or "en-US".
* language_preference Is this in the language mentioned in
the URL?
10 if it's what the URL is about,
-1 for default (don't know),
-10 otherwise, other values reserved for now.
* quality Order number of the video quality of this
format, irrespective of the file format.
-1 for default (order by other properties),
-2 or smaller for less than default.
* source_preference Order number for this video source
(quality takes higher priority)
-1 for default (order by other properties),
-2 or smaller for less than default.
* http_headers A dictionary of additional HTTP headers
to add to the request.
* stretched_ratio If given and not 1, indicates that the
video's pixels are not square.
width : height ratio as float.
* no_resume The server does not support resuming the
(HTTP or RTMP) download. Boolean.
url: Final video URL.
ext: Video filename extension.
format: The video format, defaults to ext (used for --get-format)
player_url: SWF Player URL (used for rtmpdump).
The following fields are optional:
alt_title: A secondary title of the video.
display_id An alternative identifier for the video, not necessarily
unique, but available before title. Typically, id is
something like "4234987", title "Dancing naked mole rats",
and display_id "dancing-naked-mole-rats"
thumbnails: A list of dictionaries, with the following entries:
* "id" (optional, string) - Thumbnail format ID
* "url"
* "preference" (optional, int) - quality of the image
* "width" (optional, int)
* "height" (optional, int)
* "resolution" (optional, string "{width}x{height"},
deprecated)
* "filesize" (optional, int)
thumbnail: Full URL to a video thumbnail image.
description: Full video description.
uploader: Full name of the video uploader.
license: License name the video is licensed under.
creator: The creator of the video.
release_date: The date (YYYYMMDD) when the video was released.
timestamp: UNIX timestamp of the moment the video became available.
upload_date: Video upload date (YYYYMMDD).
If not explicitly set, calculated from timestamp.
uploader_id: Nickname or id of the video uploader.
uploader_url: Full URL to a personal webpage of the video uploader.
location: Physical location where the video was filmed.
subtitles: The available subtitles as a dictionary in the format
{language: subformats}. "subformats" is a list sorted from
lower to higher preference, each element is a dictionary
with the "ext" entry and one of:
* "data": The subtitles file contents
* "url": A URL pointing to the subtitles file
"ext" will be calculated from URL if missing
automatic_captions: Like 'subtitles', used by the YoutubeIE for
automatically generated captions
duration: Length of the video in seconds, as an integer or float.
view_count: How many users have watched the video on the platform.
like_count: Number of positive ratings of the video
dislike_count: Number of negative ratings of the video
repost_count: Number of reposts of the video
average_rating: Average rating give by users, the scale used depends on the webpage
comment_count: Number of comments on the video
comments: A list of comments, each with one or more of the following
properties (all but one of text or html optional):
* "author" - human-readable name of the comment author
* "author_id" - user ID of the comment author
* "id" - Comment ID
* "html" - Comment as HTML
* "text" - Plain text of the comment
* "timestamp" - UNIX timestamp of comment
* "parent" - ID of the comment this one is replying to.
Set to "root" to indicate that this is a
comment to the original video.
age_limit: Age restriction for the video, as an integer (years)
webpage_url: The URL to the video webpage, if given to youtube-dl it
should allow to get the same result again. (It will be set
by YoutubeDL if it's missing)
categories: A list of categories that the video falls in, for example
["Sports", "Berlin"]
tags: A list of tags assigned to the video, e.g. ["sweden", "pop music"]
is_live: True, False, or None (=unknown). Whether this video is a
live stream that goes on instead of a fixed-length video.
start_time: Time in seconds where the reproduction should start, as
specified in the URL.
end_time: Time in seconds where the reproduction should end, as
specified in the URL.
The following fields should only be used when the video belongs to some logical
chapter or section:<|fim▁hole|> chapter: Name or title of the chapter the video belongs to.
chapter_number: Number of the chapter the video belongs to, as an integer.
chapter_id: Id of the chapter the video belongs to, as a unicode string.
The following fields should only be used when the video is an episode of some
series or programme:
series: Title of the series or programme the video episode belongs to.
season: Title of the season the video episode belongs to.
season_number: Number of the season the video episode belongs to, as an integer.
season_id: Id of the season the video episode belongs to, as a unicode string.
episode: Title of the video episode. Unlike mandatory video title field,
this field should denote the exact title of the video episode
without any kind of decoration.
episode_number: Number of the video episode within a season, as an integer.
episode_id: Id of the video episode, as a unicode string.
The following fields should only be used when the media is a track or a part of
a music album:
track: Title of the track.
track_number: Number of the track within an album or a disc, as an integer.
track_id: Id of the track (useful in case of custom indexing, e.g. 6.iii),
as a unicode string.
artist: Artist(s) of the track.
genre: Genre(s) of the track.
album: Title of the album the track belongs to.
album_type: Type of the album (e.g. "Demo", "Full-length", "Split", "Compilation", etc).
album_artist: List of all artists appeared on the album (e.g.
"Ash Borer / Fell Voices" or "Various Artists", useful for splits
and compilations).
disc_number: Number of the disc or other physical medium the track belongs to,
as an integer.
release_year: Year (YYYY) when the album was released.
Unless mentioned otherwise, the fields should be Unicode strings.
Unless mentioned otherwise, None is equivalent to absence of information.
_type "playlist" indicates multiple videos.
There must be a key "entries", which is a list, an iterable, or a PagedList
object, each element of which is a valid dictionary by this specification.
Additionally, playlists can have "title", "description" and "id" attributes
with the same semantics as videos (see above).
_type "multi_video" indicates that there are multiple videos that
form a single show, for examples multiple acts of an opera or TV episode.
It must have an entries key like a playlist and contain all the keys
required for a video at the same time.
_type "url" indicates that the video must be extracted from another
location, possibly by a different extractor. Its only required key is:
"url" - the next URL to extract.
The key "ie_key" can be set to the class name (minus the trailing "IE",
e.g. "Youtube") if the extractor class is known in advance.
Additionally, the dictionary may have any properties of the resolved entity
known in advance, for example "title" if the title of the referred video is
known ahead of time.
_type "url_transparent" entities have the same specification as "url", but
indicate that the given additional information is more precise than the one
associated with the resolved URL.
This is useful when a site employs a video service that hosts the video and
its technical metadata, but that video service does not embed a useful
title, description etc.
Subclasses of this one should re-define the _real_initialize() and
_real_extract() methods and define a _VALID_URL regexp.
Probably, they should also be added to the list of extractors.
Finally, the _WORKING attribute should be set to False for broken IEs
in order to warn the users and skip the tests.
"""
_ready = False
_downloader = None
_WORKING = True
def __init__(self, downloader=None):
"""Constructor. Receives an optional downloader."""
self._ready = False
self.set_downloader(downloader)
@classmethod
def suitable(cls, url):
"""Receives a URL and returns True if suitable for this IE."""
# This does not use has/getattr intentionally - we want to know whether
# we have cached the regexp for *this* class, whereas getattr would also
# match the superclass
if '_VALID_URL_RE' not in cls.__dict__:
cls._VALID_URL_RE = re.compile(cls._VALID_URL)
return cls._VALID_URL_RE.match(url) is not None
@classmethod
def _match_id(cls, url):
if '_VALID_URL_RE' not in cls.__dict__:
cls._VALID_URL_RE = re.compile(cls._VALID_URL)
m = cls._VALID_URL_RE.match(url)
assert m
return m.group('id')
@classmethod
def working(cls):
"""Getter method for _WORKING."""
return cls._WORKING
def initialize(self):
"""Initializes an instance (authentication, etc)."""
if not self._ready:
self._real_initialize()
self._ready = True
def extract(self, url):
"""Extracts URL information and returns it in list of dicts."""
try:
self.initialize()
return self._real_extract(url)
except ExtractorError:
raise
except compat_http_client.IncompleteRead as e:
raise ExtractorError('A network error has occurred.', cause=e, expected=True)
except (KeyError, StopIteration) as e:
raise ExtractorError('An extractor error has occurred.', cause=e)
def set_downloader(self, downloader):
"""Sets the downloader for this IE."""
self._downloader = downloader
def _real_initialize(self):
"""Real initialization process. Redefine in subclasses."""
pass
def _real_extract(self, url):
"""Real extraction process. Redefine in subclasses."""
pass
@classmethod
def ie_key(cls):
"""A string for getting the InfoExtractor with get_info_extractor"""
return compat_str(cls.__name__[:-2])
@property
def IE_NAME(self):
return compat_str(type(self).__name__[:-2])
def _request_webpage(self, url_or_request, video_id, note=None, errnote=None, fatal=True, data=None, headers={}, query={}):
""" Returns the response handle """
if note is None:
self.report_download_webpage(video_id)
elif note is not False:
if video_id is None:
self.to_screen('%s' % (note,))
else:
self.to_screen('%s: %s' % (video_id, note))
if isinstance(url_or_request, compat_urllib_request.Request):
url_or_request = update_Request(
url_or_request, data=data, headers=headers, query=query)
else:
if query:
url_or_request = update_url_query(url_or_request, query)
if data is not None or headers:
url_or_request = sanitized_Request(url_or_request, data, headers)
try:
return self._downloader.urlopen(url_or_request)
except (compat_urllib_error.URLError, compat_http_client.HTTPException, socket.error) as err:
if errnote is False:
return False
if errnote is None:
errnote = 'Unable to download webpage'
errmsg = '%s: %s' % (errnote, error_to_compat_str(err))
if fatal:
raise ExtractorError(errmsg, sys.exc_info()[2], cause=err)
else:
self._downloader.report_warning(errmsg)
return False
def _download_webpage_handle(self, url_or_request, video_id, note=None, errnote=None, fatal=True, encoding=None, data=None, headers={}, query={}):
""" Returns a tuple (page content as string, URL handle) """
# Strip hashes from the URL (#1038)
if isinstance(url_or_request, (compat_str, str)):
url_or_request = url_or_request.partition('#')[0]
urlh = self._request_webpage(url_or_request, video_id, note, errnote, fatal, data=data, headers=headers, query=query)
if urlh is False:
assert not fatal
return False
content = self._webpage_read_content(urlh, url_or_request, video_id, note, errnote, fatal, encoding=encoding)
return (content, urlh)
@staticmethod
def _guess_encoding_from_content(content_type, webpage_bytes):
m = re.match(r'[a-zA-Z0-9_.-]+/[a-zA-Z0-9_.-]+\s*;\s*charset=(.+)', content_type)
if m:
encoding = m.group(1)
else:
m = re.search(br'<meta[^>]+charset=[\'"]?([^\'")]+)[ /\'">]',
webpage_bytes[:1024])
if m:
encoding = m.group(1).decode('ascii')
elif webpage_bytes.startswith(b'\xff\xfe'):
encoding = 'utf-16'
else:
encoding = 'utf-8'
return encoding
def _webpage_read_content(self, urlh, url_or_request, video_id, note=None, errnote=None, fatal=True, prefix=None, encoding=None):
content_type = urlh.headers.get('Content-Type', '')
webpage_bytes = urlh.read()
if prefix is not None:
webpage_bytes = prefix + webpage_bytes
if not encoding:
encoding = self._guess_encoding_from_content(content_type, webpage_bytes)
if self._downloader.params.get('dump_intermediate_pages', False):
try:
url = url_or_request.get_full_url()
except AttributeError:
url = url_or_request
self.to_screen('Dumping request to ' + url)
dump = base64.b64encode(webpage_bytes).decode('ascii')
self._downloader.to_screen(dump)
if self._downloader.params.get('write_pages', False):
try:
url = url_or_request.get_full_url()
except AttributeError:
url = url_or_request
basen = '%s_%s' % (video_id, url)
if len(basen) > 240:
h = '___' + hashlib.md5(basen.encode('utf-8')).hexdigest()
basen = basen[:240 - len(h)] + h
raw_filename = basen + '.dump'
filename = sanitize_filename(raw_filename, restricted=True)
self.to_screen('Saving request to ' + filename)
# Working around MAX_PATH limitation on Windows (see
# http://msdn.microsoft.com/en-us/library/windows/desktop/aa365247(v=vs.85).aspx)
if compat_os_name == 'nt':
absfilepath = os.path.abspath(filename)
if len(absfilepath) > 259:
filename = '\\\\?\\' + absfilepath
with open(filename, 'wb') as outf:
outf.write(webpage_bytes)
try:
content = webpage_bytes.decode(encoding, 'replace')
except LookupError:
content = webpage_bytes.decode('utf-8', 'replace')
if ('<title>Access to this site is blocked</title>' in content and
'Websense' in content[:512]):
msg = 'Access to this webpage has been blocked by Websense filtering software in your network.'
blocked_iframe = self._html_search_regex(
r'<iframe src="([^"]+)"', content,
'Websense information URL', default=None)
if blocked_iframe:
msg += ' Visit %s for more details' % blocked_iframe
raise ExtractorError(msg, expected=True)
if '<title>The URL you requested has been blocked</title>' in content[:512]:
msg = (
'Access to this webpage has been blocked by Indian censorship. '
'Use a VPN or proxy server (with --proxy) to route around it.')
block_msg = self._html_search_regex(
r'</h1><p>(.*?)</p>',
content, 'block message', default=None)
if block_msg:
msg += ' (Message: "%s")' % block_msg.replace('\n', ' ')
raise ExtractorError(msg, expected=True)
return content
def _download_webpage(self, url_or_request, video_id, note=None, errnote=None, fatal=True, tries=1, timeout=5, encoding=None, data=None, headers={}, query={}):
""" Returns the data of the page as a string """
success = False
try_count = 0
while success is False:
try:
res = self._download_webpage_handle(url_or_request, video_id, note, errnote, fatal, encoding=encoding, data=data, headers=headers, query=query)
success = True
except compat_http_client.IncompleteRead as e:
try_count += 1
if try_count >= tries:
raise e
self._sleep(timeout, video_id)
if res is False:
return res
else:
content, _ = res
return content
def _download_xml(self, url_or_request, video_id,
note='Downloading XML', errnote='Unable to download XML',
transform_source=None, fatal=True, encoding=None, data=None, headers={}, query={}):
"""Return the xml as an xml.etree.ElementTree.Element"""
xml_string = self._download_webpage(
url_or_request, video_id, note, errnote, fatal=fatal, encoding=encoding, data=data, headers=headers, query=query)
if xml_string is False:
return xml_string
if transform_source:
xml_string = transform_source(xml_string)
return compat_etree_fromstring(xml_string.encode('utf-8'))
def _download_json(self, url_or_request, video_id,
note='Downloading JSON metadata',
errnote='Unable to download JSON metadata',
transform_source=None,
fatal=True, encoding=None, data=None, headers={}, query={}):
json_string = self._download_webpage(
url_or_request, video_id, note, errnote, fatal=fatal,
encoding=encoding, data=data, headers=headers, query=query)
if (not fatal) and json_string is False:
return None
return self._parse_json(
json_string, video_id, transform_source=transform_source, fatal=fatal)
def _parse_json(self, json_string, video_id, transform_source=None, fatal=True):
if transform_source:
json_string = transform_source(json_string)
try:
return json.loads(json_string)
except ValueError as ve:
errmsg = '%s: Failed to parse JSON ' % video_id
if fatal:
raise ExtractorError(errmsg, cause=ve)
else:
self.report_warning(errmsg + str(ve))
def report_warning(self, msg, video_id=None):
idstr = '' if video_id is None else '%s: ' % video_id
self._downloader.report_warning(
'[%s] %s%s' % (self.IE_NAME, idstr, msg))
def to_screen(self, msg):
"""Print msg to screen, prefixing it with '[ie_name]'"""
self._downloader.to_screen('[%s] %s' % (self.IE_NAME, msg))
def report_extraction(self, id_or_name):
"""Report information extraction."""
self.to_screen('%s: Extracting information' % id_or_name)
def report_download_webpage(self, video_id):
"""Report webpage download."""
self.to_screen('%s: Downloading webpage' % video_id)
def report_age_confirmation(self):
"""Report attempt to confirm age."""
self.to_screen('Confirming age')
def report_login(self):
"""Report attempt to log in."""
self.to_screen('Logging in')
@staticmethod
def raise_login_required(msg='This video is only available for registered users'):
raise ExtractorError(
'%s. Use --username and --password or --netrc to provide account credentials.' % msg,
expected=True)
@staticmethod
def raise_geo_restricted(msg='This video is not available from your location due to geo restriction'):
raise ExtractorError(
'%s. You might want to use --proxy to workaround.' % msg,
expected=True)
# Methods for following #608
@staticmethod
def url_result(url, ie=None, video_id=None, video_title=None):
"""Returns a URL that points to a page that should be processed"""
# TODO: ie should be the class used for getting the info
video_info = {'_type': 'url',
'url': url,
'ie_key': ie}
if video_id is not None:
video_info['id'] = video_id
if video_title is not None:
video_info['title'] = video_title
return video_info
@staticmethod
def playlist_result(entries, playlist_id=None, playlist_title=None, playlist_description=None):
"""Returns a playlist"""
video_info = {'_type': 'playlist',
'entries': entries}
if playlist_id:
video_info['id'] = playlist_id
if playlist_title:
video_info['title'] = playlist_title
if playlist_description:
video_info['description'] = playlist_description
return video_info
def _search_regex(self, pattern, string, name, default=NO_DEFAULT, fatal=True, flags=0, group=None):
"""
Perform a regex search on the given string, using a single or a list of
patterns returning the first matching group.
In case of failure return a default value or raise a WARNING or a
RegexNotFoundError, depending on fatal, specifying the field name.
"""
if isinstance(pattern, (str, compat_str, compiled_regex_type)):
mobj = re.search(pattern, string, flags)
else:
for p in pattern:
mobj = re.search(p, string, flags)
if mobj:
break
if not self._downloader.params.get('no_color') and compat_os_name != 'nt' and sys.stderr.isatty():
_name = '\033[0;34m%s\033[0m' % name
else:
_name = name
if mobj:
if group is None:
# return the first matching group
return next(g for g in mobj.groups() if g is not None)
else:
return mobj.group(group)
elif default is not NO_DEFAULT:
return default
elif fatal:
raise RegexNotFoundError('Unable to extract %s' % _name)
else:
self._downloader.report_warning('unable to extract %s' % _name + bug_reports_message())
return None
def _html_search_regex(self, pattern, string, name, default=NO_DEFAULT, fatal=True, flags=0, group=None):
"""
Like _search_regex, but strips HTML tags and unescapes entities.
"""
res = self._search_regex(pattern, string, name, default, fatal, flags, group)
if res:
return clean_html(res).strip()
else:
return res
def _get_netrc_login_info(self, netrc_machine=None):
username = None
password = None
netrc_machine = netrc_machine or self._NETRC_MACHINE
if self._downloader.params.get('usenetrc', False):
try:
info = netrc.netrc().authenticators(netrc_machine)
if info is not None:
username = info[0]
password = info[2]
else:
raise netrc.NetrcParseError('No authenticators for %s' % netrc_machine)
except (IOError, netrc.NetrcParseError) as err:
self._downloader.report_warning('parsing .netrc: %s' % error_to_compat_str(err))
return (username, password)
def _get_login_info(self):
"""
Get the login info as (username, password)
It will look in the netrc file using the _NETRC_MACHINE value
If there's no info available, return (None, None)
"""
if self._downloader is None:
return (None, None)
username = None
password = None
downloader_params = self._downloader.params
# Attempt to use provided username and password or .netrc data
if downloader_params.get('username') is not None:
username = downloader_params['username']
password = downloader_params['password']
else:
username, password = self._get_netrc_login_info()
return (username, password)
def _get_tfa_info(self, note='two-factor verification code'):
"""
Get the two-factor authentication info
TODO - asking the user will be required for sms/phone verify
currently just uses the command line option
If there's no info available, return None
"""
if self._downloader is None:
return None
downloader_params = self._downloader.params
if downloader_params.get('twofactor') is not None:
return downloader_params['twofactor']
return compat_getpass('Type %s and press [Return]: ' % note)
# Helper functions for extracting OpenGraph info
@staticmethod
def _og_regexes(prop):
content_re = r'content=(?:"([^"]+?)"|\'([^\']+?)\'|\s*([^\s"\'=<>`]+?))'
property_re = (r'(?:name|property)=(?:\'og:%(prop)s\'|"og:%(prop)s"|\s*og:%(prop)s\b)'
% {'prop': re.escape(prop)})
template = r'<meta[^>]+?%s[^>]+?%s'
return [
template % (property_re, content_re),
template % (content_re, property_re),
]
@staticmethod
def _meta_regex(prop):
return r'''(?isx)<meta
(?=[^>]+(?:itemprop|name|property|id|http-equiv)=(["\']?)%s\1)
[^>]+?content=(["\'])(?P<content>.*?)\2''' % re.escape(prop)
def _og_search_property(self, prop, html, name=None, **kargs):
if not isinstance(prop, (list, tuple)):
prop = [prop]
if name is None:
name = 'OpenGraph %s' % prop[0]
og_regexes = []
for p in prop:
og_regexes.extend(self._og_regexes(p))
escaped = self._search_regex(og_regexes, html, name, flags=re.DOTALL, **kargs)
if escaped is None:
return None
return unescapeHTML(escaped)
def _og_search_thumbnail(self, html, **kargs):
return self._og_search_property('image', html, 'thumbnail URL', fatal=False, **kargs)
def _og_search_description(self, html, **kargs):
return self._og_search_property('description', html, fatal=False, **kargs)
def _og_search_title(self, html, **kargs):
return self._og_search_property('title', html, **kargs)
def _og_search_video_url(self, html, name='video url', secure=True, **kargs):
regexes = self._og_regexes('video') + self._og_regexes('video:url')
if secure:
regexes = self._og_regexes('video:secure_url') + regexes
return self._html_search_regex(regexes, html, name, **kargs)
def _og_search_url(self, html, **kargs):
return self._og_search_property('url', html, **kargs)
def _html_search_meta(self, name, html, display_name=None, fatal=False, **kwargs):
if not isinstance(name, (list, tuple)):
name = [name]
if display_name is None:
display_name = name[0]
return self._html_search_regex(
[self._meta_regex(n) for n in name],
html, display_name, fatal=fatal, group='content', **kwargs)
def _dc_search_uploader(self, html):
return self._html_search_meta('dc.creator', html, 'uploader')
def _rta_search(self, html):
# See http://www.rtalabel.org/index.php?content=howtofaq#single
if re.search(r'(?ix)<meta\s+name="rating"\s+'
r' content="RTA-5042-1996-1400-1577-RTA"',
html):
return 18
return 0
def _media_rating_search(self, html):
# See http://www.tjg-designs.com/WP/metadata-code-examples-adding-metadata-to-your-web-pages/
rating = self._html_search_meta('rating', html)
if not rating:
return None
RATING_TABLE = {
'safe for kids': 0,
'general': 8,
'14 years': 14,
'mature': 17,
'restricted': 19,
}
return RATING_TABLE.get(rating.lower())
def _family_friendly_search(self, html):
# See http://schema.org/VideoObject
family_friendly = self._html_search_meta('isFamilyFriendly', html)
if not family_friendly:
return None
RATING_TABLE = {
'1': 0,
'true': 0,
'0': 18,
'false': 18,
}
return RATING_TABLE.get(family_friendly.lower())
def _twitter_search_player(self, html):
return self._html_search_meta('twitter:player', html,
'twitter card player')
def _search_json_ld(self, html, video_id, expected_type=None, **kwargs):
json_ld = self._search_regex(
r'(?s)<script[^>]+type=(["\'])application/ld\+json\1[^>]*>(?P<json_ld>.+?)</script>',
html, 'JSON-LD', group='json_ld', **kwargs)
default = kwargs.get('default', NO_DEFAULT)
if not json_ld:
return default if default is not NO_DEFAULT else {}
# JSON-LD may be malformed and thus `fatal` should be respected.
# At the same time `default` may be passed that assumes `fatal=False`
# for _search_regex. Let's simulate the same behavior here as well.
fatal = kwargs.get('fatal', True) if default == NO_DEFAULT else False
return self._json_ld(json_ld, video_id, fatal=fatal, expected_type=expected_type)
def _json_ld(self, json_ld, video_id, fatal=True, expected_type=None):
if isinstance(json_ld, compat_str):
json_ld = self._parse_json(json_ld, video_id, fatal=fatal)
if not json_ld:
return {}
info = {}
if not isinstance(json_ld, (list, tuple, dict)):
return info
if isinstance(json_ld, dict):
json_ld = [json_ld]
for e in json_ld:
if e.get('@context') == 'http://schema.org':
item_type = e.get('@type')
if expected_type is not None and expected_type != item_type:
return info
if item_type == 'TVEpisode':
info.update({
'episode': unescapeHTML(e.get('name')),
'episode_number': int_or_none(e.get('episodeNumber')),
'description': unescapeHTML(e.get('description')),
})
part_of_season = e.get('partOfSeason')
if isinstance(part_of_season, dict) and part_of_season.get('@type') == 'TVSeason':
info['season_number'] = int_or_none(part_of_season.get('seasonNumber'))
part_of_series = e.get('partOfSeries') or e.get('partOfTVSeries')
if isinstance(part_of_series, dict) and part_of_series.get('@type') == 'TVSeries':
info['series'] = unescapeHTML(part_of_series.get('name'))
elif item_type == 'Article':
info.update({
'timestamp': parse_iso8601(e.get('datePublished')),
'title': unescapeHTML(e.get('headline')),
'description': unescapeHTML(e.get('articleBody')),
})
elif item_type == 'VideoObject':
info.update({
'url': e.get('contentUrl'),
'title': unescapeHTML(e.get('name')),
'description': unescapeHTML(e.get('description')),
'thumbnail': e.get('thumbnailUrl'),
'duration': parse_duration(e.get('duration')),
'timestamp': unified_timestamp(e.get('uploadDate')),
'filesize': float_or_none(e.get('contentSize')),
'tbr': int_or_none(e.get('bitrate')),
'width': int_or_none(e.get('width')),
'height': int_or_none(e.get('height')),
})
break
return dict((k, v) for k, v in info.items() if v is not None)
@staticmethod
def _hidden_inputs(html):
html = re.sub(r'<!--(?:(?!<!--).)*-->', '', html)
hidden_inputs = {}
for input in re.findall(r'(?i)<input([^>]+)>', html):
if not re.search(r'type=(["\'])(?:hidden|submit)\1', input):
continue
name = re.search(r'(?:name|id)=(["\'])(?P<value>.+?)\1', input)
if not name:
continue
value = re.search(r'value=(["\'])(?P<value>.*?)\1', input)
if not value:
continue
hidden_inputs[name.group('value')] = value.group('value')
return hidden_inputs
def _form_hidden_inputs(self, form_id, html):
form = self._search_regex(
r'(?is)<form[^>]+?id=(["\'])%s\1[^>]*>(?P<form>.+?)</form>' % form_id,
html, '%s form' % form_id, group='form')
return self._hidden_inputs(form)
def _sort_formats(self, formats, field_preference=None):
if not formats:
raise ExtractorError('No video formats found')
for f in formats:
# Automatically determine tbr when missing based on abr and vbr (improves
# formats sorting in some cases)
if 'tbr' not in f and f.get('abr') is not None and f.get('vbr') is not None:
f['tbr'] = f['abr'] + f['vbr']
def _formats_key(f):
# TODO remove the following workaround
from ..utils import determine_ext
if not f.get('ext') and 'url' in f:
f['ext'] = determine_ext(f['url'])
if isinstance(field_preference, (list, tuple)):
return tuple(
f.get(field)
if f.get(field) is not None
else ('' if field == 'format_id' else -1)
for field in field_preference)
preference = f.get('preference')
if preference is None:
preference = 0
if f.get('ext') in ['f4f', 'f4m']: # Not yet supported
preference -= 0.5
protocol = f.get('protocol') or determine_protocol(f)
proto_preference = 0 if protocol in ['http', 'https'] else (-0.5 if protocol == 'rtsp' else -0.1)
if f.get('vcodec') == 'none': # audio only
preference -= 50
if self._downloader.params.get('prefer_free_formats'):
ORDER = ['aac', 'mp3', 'm4a', 'webm', 'ogg', 'opus']
else:
ORDER = ['webm', 'opus', 'ogg', 'mp3', 'aac', 'm4a']
ext_preference = 0
try:
audio_ext_preference = ORDER.index(f['ext'])
except ValueError:
audio_ext_preference = -1
else:
if f.get('acodec') == 'none': # video only
preference -= 40
if self._downloader.params.get('prefer_free_formats'):
ORDER = ['flv', 'mp4', 'webm']
else:
ORDER = ['webm', 'flv', 'mp4']
try:
ext_preference = ORDER.index(f['ext'])
except ValueError:
ext_preference = -1
audio_ext_preference = 0
return (
preference,
f.get('language_preference') if f.get('language_preference') is not None else -1,
f.get('quality') if f.get('quality') is not None else -1,
f.get('tbr') if f.get('tbr') is not None else -1,
f.get('filesize') if f.get('filesize') is not None else -1,
f.get('vbr') if f.get('vbr') is not None else -1,
f.get('height') if f.get('height') is not None else -1,
f.get('width') if f.get('width') is not None else -1,
proto_preference,
ext_preference,
f.get('abr') if f.get('abr') is not None else -1,
audio_ext_preference,
f.get('fps') if f.get('fps') is not None else -1,
f.get('filesize_approx') if f.get('filesize_approx') is not None else -1,
f.get('source_preference') if f.get('source_preference') is not None else -1,
f.get('format_id') if f.get('format_id') is not None else '',
)
formats.sort(key=_formats_key)
def _check_formats(self, formats, video_id):
if formats:
formats[:] = filter(
lambda f: self._is_valid_url(
f['url'], video_id,
item='%s video format' % f.get('format_id') if f.get('format_id') else 'video'),
formats)
@staticmethod
def _remove_duplicate_formats(formats):
format_urls = set()
unique_formats = []
for f in formats:
if f['url'] not in format_urls:
format_urls.add(f['url'])
unique_formats.append(f)
formats[:] = unique_formats
def _is_valid_url(self, url, video_id, item='video'):
url = self._proto_relative_url(url, scheme='http:')
# For now assume non HTTP(S) URLs always valid
if not (url.startswith('http://') or url.startswith('https://')):
return True
try:
self._request_webpage(url, video_id, 'Checking %s URL' % item)
return True
except ExtractorError as e:
if isinstance(e.cause, compat_urllib_error.URLError):
self.to_screen(
'%s: %s URL is invalid, skipping' % (video_id, item))
return False
raise
def http_scheme(self):
""" Either "http:" or "https:", depending on the user's preferences """
return (
'http:'
if self._downloader.params.get('prefer_insecure', False)
else 'https:')
def _proto_relative_url(self, url, scheme=None):
if url is None:
return url
if url.startswith('//'):
if scheme is None:
scheme = self.http_scheme()
return scheme + url
else:
return url
def _sleep(self, timeout, video_id, msg_template=None):
if msg_template is None:
msg_template = '%(video_id)s: Waiting for %(timeout)s seconds'
msg = msg_template % {'video_id': video_id, 'timeout': timeout}
self.to_screen(msg)
time.sleep(timeout)
def _extract_f4m_formats(self, manifest_url, video_id, preference=None, f4m_id=None,
transform_source=lambda s: fix_xml_ampersands(s).strip(),
fatal=True, m3u8_id=None):
manifest = self._download_xml(
manifest_url, video_id, 'Downloading f4m manifest',
'Unable to download f4m manifest',
# Some manifests may be malformed, e.g. prosiebensat1 generated manifests
# (see https://github.com/rg3/youtube-dl/issues/6215#issuecomment-121704244)
transform_source=transform_source,
fatal=fatal)
if manifest is False:
return []
return self._parse_f4m_formats(
manifest, manifest_url, video_id, preference=preference, f4m_id=f4m_id,
transform_source=transform_source, fatal=fatal, m3u8_id=m3u8_id)
def _parse_f4m_formats(self, manifest, manifest_url, video_id, preference=None, f4m_id=None,
transform_source=lambda s: fix_xml_ampersands(s).strip(),
fatal=True, m3u8_id=None):
# currently youtube-dl cannot decode the playerVerificationChallenge as Akamai uses Adobe Alchemy
akamai_pv = manifest.find('{http://ns.adobe.com/f4m/1.0}pv-2.0')
if akamai_pv is not None and ';' in akamai_pv.text:
playerVerificationChallenge = akamai_pv.text.split(';')[0]
if playerVerificationChallenge.strip() != '':
return []
formats = []
manifest_version = '1.0'
media_nodes = manifest.findall('{http://ns.adobe.com/f4m/1.0}media')
if not media_nodes:
manifest_version = '2.0'
media_nodes = manifest.findall('{http://ns.adobe.com/f4m/2.0}media')
# Remove unsupported DRM protected media from final formats
# rendition (see https://github.com/rg3/youtube-dl/issues/8573).
media_nodes = remove_encrypted_media(media_nodes)
if not media_nodes:
return formats
base_url = xpath_text(
manifest, ['{http://ns.adobe.com/f4m/1.0}baseURL', '{http://ns.adobe.com/f4m/2.0}baseURL'],
'base URL', default=None)
if base_url:
base_url = base_url.strip()
bootstrap_info = xpath_element(
manifest, ['{http://ns.adobe.com/f4m/1.0}bootstrapInfo', '{http://ns.adobe.com/f4m/2.0}bootstrapInfo'],
'bootstrap info', default=None)
for i, media_el in enumerate(media_nodes):
tbr = int_or_none(media_el.attrib.get('bitrate'))
width = int_or_none(media_el.attrib.get('width'))
height = int_or_none(media_el.attrib.get('height'))
format_id = '-'.join(filter(None, [f4m_id, compat_str(i if tbr is None else tbr)]))
# If <bootstrapInfo> is present, the specified f4m is a
# stream-level manifest, and only set-level manifests may refer to
# external resources. See section 11.4 and section 4 of F4M spec
if bootstrap_info is None:
media_url = None
# @href is introduced in 2.0, see section 11.6 of F4M spec
if manifest_version == '2.0':
media_url = media_el.attrib.get('href')
if media_url is None:
media_url = media_el.attrib.get('url')
if not media_url:
continue
manifest_url = (
media_url if media_url.startswith('http://') or media_url.startswith('https://')
else ((base_url or '/'.join(manifest_url.split('/')[:-1])) + '/' + media_url))
# If media_url is itself a f4m manifest do the recursive extraction
# since bitrates in parent manifest (this one) and media_url manifest
# may differ leading to inability to resolve the format by requested
# bitrate in f4m downloader
ext = determine_ext(manifest_url)
if ext == 'f4m':
f4m_formats = self._extract_f4m_formats(
manifest_url, video_id, preference=preference, f4m_id=f4m_id,
transform_source=transform_source, fatal=fatal)
# Sometimes stream-level manifest contains single media entry that
# does not contain any quality metadata (e.g. http://matchtv.ru/#live-player).
# At the same time parent's media entry in set-level manifest may
# contain it. We will copy it from parent in such cases.
if len(f4m_formats) == 1:
f = f4m_formats[0]
f.update({
'tbr': f.get('tbr') or tbr,
'width': f.get('width') or width,
'height': f.get('height') or height,
'format_id': f.get('format_id') if not tbr else format_id,
})
formats.extend(f4m_formats)
continue
elif ext == 'm3u8':
formats.extend(self._extract_m3u8_formats(
manifest_url, video_id, 'mp4', preference=preference,
m3u8_id=m3u8_id, fatal=fatal))
continue
formats.append({
'format_id': format_id,
'url': manifest_url,
'ext': 'flv' if bootstrap_info is not None else None,
'tbr': tbr,
'width': width,
'height': height,
'preference': preference,
})
return formats
def _m3u8_meta_format(self, m3u8_url, ext=None, preference=None, m3u8_id=None):
return {
'format_id': '-'.join(filter(None, [m3u8_id, 'meta'])),
'url': m3u8_url,
'ext': ext,
'protocol': 'm3u8',
'preference': preference - 100 if preference else -100,
'resolution': 'multiple',
'format_note': 'Quality selection URL',
}
def _extract_m3u8_formats(self, m3u8_url, video_id, ext=None,
entry_protocol='m3u8', preference=None,
m3u8_id=None, note=None, errnote=None,
fatal=True, live=False):
formats = [self._m3u8_meta_format(m3u8_url, ext, preference, m3u8_id)]
format_url = lambda u: (
u
if re.match(r'^https?://', u)
else compat_urlparse.urljoin(m3u8_url, u))
res = self._download_webpage_handle(
m3u8_url, video_id,
note=note or 'Downloading m3u8 information',
errnote=errnote or 'Failed to download m3u8 information',
fatal=fatal)
if res is False:
return []
m3u8_doc, urlh = res
m3u8_url = urlh.geturl()
# We should try extracting formats only from master playlists [1], i.e.
# playlists that describe available qualities. On the other hand media
# playlists [2] should be returned as is since they contain just the media
# without qualities renditions.
# Fortunately, master playlist can be easily distinguished from media
# playlist based on particular tags availability. As of [1, 2] master
# playlist tags MUST NOT appear in a media playist and vice versa.
# As of [3] #EXT-X-TARGETDURATION tag is REQUIRED for every media playlist
# and MUST NOT appear in master playlist thus we can clearly detect media
# playlist with this criterion.
# 1. https://tools.ietf.org/html/draft-pantos-http-live-streaming-17#section-4.3.4
# 2. https://tools.ietf.org/html/draft-pantos-http-live-streaming-17#section-4.3.3
# 3. https://tools.ietf.org/html/draft-pantos-http-live-streaming-17#section-4.3.3.1
if '#EXT-X-TARGETDURATION' in m3u8_doc: # media playlist, return as is
return [{
'url': m3u8_url,
'format_id': m3u8_id,
'ext': ext,
'protocol': entry_protocol,
'preference': preference,
}]
last_info = None
for line in m3u8_doc.splitlines():
if line.startswith('#EXT-X-STREAM-INF:'):
last_info = parse_m3u8_attributes(line)
elif line.startswith('#EXT-X-MEDIA:'):
media = parse_m3u8_attributes(line)
media_type = media.get('TYPE')
if media_type in ('VIDEO', 'AUDIO'):
media_url = media.get('URI')
if media_url:
format_id = []
for v in (media.get('GROUP-ID'), media.get('NAME')):
if v:
format_id.append(v)
formats.append({
'format_id': '-'.join(format_id),
'url': format_url(media_url),
'language': media.get('LANGUAGE'),
'vcodec': 'none' if media_type == 'AUDIO' else None,
'ext': ext,
'protocol': entry_protocol,
'preference': preference,
})
elif line.startswith('#') or not line.strip():
continue
else:
if last_info is None:
formats.append({'url': format_url(line)})
continue
tbr = int_or_none(last_info.get('AVERAGE-BANDWIDTH') or last_info.get('BANDWIDTH'), scale=1000)
format_id = []
if m3u8_id:
format_id.append(m3u8_id)
# Bandwidth of live streams may differ over time thus making
# format_id unpredictable. So it's better to keep provided
# format_id intact.
if not live:
# Despite specification does not mention NAME attribute for
# EXT-X-STREAM-INF it still sometimes may be present
stream_name = last_info.get('NAME')
format_id.append(stream_name if stream_name else '%d' % (tbr if tbr else len(formats)))
f = {
'format_id': '-'.join(format_id),
'url': format_url(line.strip()),
'tbr': tbr,
'ext': ext,
'fps': float_or_none(last_info.get('FRAME-RATE')),
'protocol': entry_protocol,
'preference': preference,
}
resolution = last_info.get('RESOLUTION')
if resolution:
width_str, height_str = resolution.split('x')
f['width'] = int(width_str)
f['height'] = int(height_str)
# Unified Streaming Platform
mobj = re.search(
r'audio.*?(?:%3D|=)(\d+)(?:-video.*?(?:%3D|=)(\d+))?', f['url'])
if mobj:
abr, vbr = mobj.groups()
abr, vbr = float_or_none(abr, 1000), float_or_none(vbr, 1000)
f.update({
'vbr': vbr,
'abr': abr,
})
f.update(parse_codecs(last_info.get('CODECS')))
formats.append(f)
last_info = {}
return formats
@staticmethod
def _xpath_ns(path, namespace=None):
if not namespace:
return path
out = []
for c in path.split('/'):
if not c or c == '.':
out.append(c)
else:
out.append('{%s}%s' % (namespace, c))
return '/'.join(out)
def _extract_smil_formats(self, smil_url, video_id, fatal=True, f4m_params=None, transform_source=None):
smil = self._download_smil(smil_url, video_id, fatal=fatal, transform_source=transform_source)
if smil is False:
assert not fatal
return []
namespace = self._parse_smil_namespace(smil)
return self._parse_smil_formats(
smil, smil_url, video_id, namespace=namespace, f4m_params=f4m_params)
def _extract_smil_info(self, smil_url, video_id, fatal=True, f4m_params=None):
smil = self._download_smil(smil_url, video_id, fatal=fatal)
if smil is False:
return {}
return self._parse_smil(smil, smil_url, video_id, f4m_params=f4m_params)
def _download_smil(self, smil_url, video_id, fatal=True, transform_source=None):
return self._download_xml(
smil_url, video_id, 'Downloading SMIL file',
'Unable to download SMIL file', fatal=fatal, transform_source=transform_source)
def _parse_smil(self, smil, smil_url, video_id, f4m_params=None):
namespace = self._parse_smil_namespace(smil)
formats = self._parse_smil_formats(
smil, smil_url, video_id, namespace=namespace, f4m_params=f4m_params)
subtitles = self._parse_smil_subtitles(smil, namespace=namespace)
video_id = os.path.splitext(url_basename(smil_url))[0]
title = None
description = None
upload_date = None
for meta in smil.findall(self._xpath_ns('./head/meta', namespace)):
name = meta.attrib.get('name')
content = meta.attrib.get('content')
if not name or not content:
continue
if not title and name == 'title':
title = content
elif not description and name in ('description', 'abstract'):
description = content
elif not upload_date and name == 'date':
upload_date = unified_strdate(content)
thumbnails = [{
'id': image.get('type'),
'url': image.get('src'),
'width': int_or_none(image.get('width')),
'height': int_or_none(image.get('height')),
} for image in smil.findall(self._xpath_ns('.//image', namespace)) if image.get('src')]
return {
'id': video_id,
'title': title or video_id,
'description': description,
'upload_date': upload_date,
'thumbnails': thumbnails,
'formats': formats,
'subtitles': subtitles,
}
def _parse_smil_namespace(self, smil):
return self._search_regex(
r'(?i)^{([^}]+)?}smil$', smil.tag, 'namespace', default=None)
def _parse_smil_formats(self, smil, smil_url, video_id, namespace=None, f4m_params=None, transform_rtmp_url=None):
base = smil_url
for meta in smil.findall(self._xpath_ns('./head/meta', namespace)):
b = meta.get('base') or meta.get('httpBase')
if b:
base = b
break
formats = []
rtmp_count = 0
http_count = 0
m3u8_count = 0
srcs = []
media = smil.findall(self._xpath_ns('.//video', namespace)) + smil.findall(self._xpath_ns('.//audio', namespace))
for medium in media:
src = medium.get('src')
if not src or src in srcs:
continue
srcs.append(src)
bitrate = float_or_none(medium.get('system-bitrate') or medium.get('systemBitrate'), 1000)
filesize = int_or_none(medium.get('size') or medium.get('fileSize'))
width = int_or_none(medium.get('width'))
height = int_or_none(medium.get('height'))
proto = medium.get('proto')
ext = medium.get('ext')
src_ext = determine_ext(src)
streamer = medium.get('streamer') or base
if proto == 'rtmp' or streamer.startswith('rtmp'):
rtmp_count += 1
formats.append({
'url': streamer,
'play_path': src,
'ext': 'flv',
'format_id': 'rtmp-%d' % (rtmp_count if bitrate is None else bitrate),
'tbr': bitrate,
'filesize': filesize,
'width': width,
'height': height,
})
if transform_rtmp_url:
streamer, src = transform_rtmp_url(streamer, src)
formats[-1].update({
'url': streamer,
'play_path': src,
})
continue
src_url = src if src.startswith('http') else compat_urlparse.urljoin(base, src)
src_url = src_url.strip()
if proto == 'm3u8' or src_ext == 'm3u8':
m3u8_formats = self._extract_m3u8_formats(
src_url, video_id, ext or 'mp4', m3u8_id='hls', fatal=False)
if len(m3u8_formats) == 1:
m3u8_count += 1
m3u8_formats[0].update({
'format_id': 'hls-%d' % (m3u8_count if bitrate is None else bitrate),
'tbr': bitrate,
'width': width,
'height': height,
})
formats.extend(m3u8_formats)
continue
if src_ext == 'f4m':
f4m_url = src_url
if not f4m_params:
f4m_params = {
'hdcore': '3.2.0',
'plugin': 'flowplayer-3.2.0.1',
}
f4m_url += '&' if '?' in f4m_url else '?'
f4m_url += compat_urllib_parse_urlencode(f4m_params)
formats.extend(self._extract_f4m_formats(f4m_url, video_id, f4m_id='hds', fatal=False))
continue
if src_url.startswith('http') and self._is_valid_url(src, video_id):
http_count += 1
formats.append({
'url': src_url,
'ext': ext or src_ext or 'flv',
'format_id': 'http-%d' % (bitrate or http_count),
'tbr': bitrate,
'filesize': filesize,
'width': width,
'height': height,
})
continue
return formats
def _parse_smil_subtitles(self, smil, namespace=None, subtitles_lang='en'):
urls = []
subtitles = {}
for num, textstream in enumerate(smil.findall(self._xpath_ns('.//textstream', namespace))):
src = textstream.get('src')
if not src or src in urls:
continue
urls.append(src)
ext = textstream.get('ext') or mimetype2ext(textstream.get('type')) or determine_ext(src)
lang = textstream.get('systemLanguage') or textstream.get('systemLanguageName') or textstream.get('lang') or subtitles_lang
subtitles.setdefault(lang, []).append({
'url': src,
'ext': ext,
})
return subtitles
def _extract_xspf_playlist(self, playlist_url, playlist_id, fatal=True):
xspf = self._download_xml(
playlist_url, playlist_id, 'Downloading xpsf playlist',
'Unable to download xspf manifest', fatal=fatal)
if xspf is False:
return []
return self._parse_xspf(xspf, playlist_id)
def _parse_xspf(self, playlist, playlist_id):
NS_MAP = {
'xspf': 'http://xspf.org/ns/0/',
's1': 'http://static.streamone.nl/player/ns/0',
}
entries = []
for track in playlist.findall(xpath_with_ns('./xspf:trackList/xspf:track', NS_MAP)):
title = xpath_text(
track, xpath_with_ns('./xspf:title', NS_MAP), 'title', default=playlist_id)
description = xpath_text(
track, xpath_with_ns('./xspf:annotation', NS_MAP), 'description')
thumbnail = xpath_text(
track, xpath_with_ns('./xspf:image', NS_MAP), 'thumbnail')
duration = float_or_none(
xpath_text(track, xpath_with_ns('./xspf:duration', NS_MAP), 'duration'), 1000)
formats = [{
'url': location.text,
'format_id': location.get(xpath_with_ns('s1:label', NS_MAP)),
'width': int_or_none(location.get(xpath_with_ns('s1:width', NS_MAP))),
'height': int_or_none(location.get(xpath_with_ns('s1:height', NS_MAP))),
} for location in track.findall(xpath_with_ns('./xspf:location', NS_MAP))]
self._sort_formats(formats)
entries.append({
'id': playlist_id,
'title': title,
'description': description,
'thumbnail': thumbnail,
'duration': duration,
'formats': formats,
})
return entries
def _extract_mpd_formats(self, mpd_url, video_id, mpd_id=None, note=None, errnote=None, fatal=True, formats_dict={}):
res = self._download_webpage_handle(
mpd_url, video_id,
note=note or 'Downloading MPD manifest',
errnote=errnote or 'Failed to download MPD manifest',
fatal=fatal)
if res is False:
return []
mpd, urlh = res
mpd_base_url = re.match(r'https?://.+/', urlh.geturl()).group()
return self._parse_mpd_formats(
compat_etree_fromstring(mpd.encode('utf-8')), mpd_id, mpd_base_url, formats_dict=formats_dict)
def _parse_mpd_formats(self, mpd_doc, mpd_id=None, mpd_base_url='', formats_dict={}):
"""
Parse formats from MPD manifest.
References:
1. MPEG-DASH Standard, ISO/IEC 23009-1:2014(E),
http://standards.iso.org/ittf/PubliclyAvailableStandards/c065274_ISO_IEC_23009-1_2014.zip
2. https://en.wikipedia.org/wiki/Dynamic_Adaptive_Streaming_over_HTTP
"""
if mpd_doc.get('type') == 'dynamic':
return []
namespace = self._search_regex(r'(?i)^{([^}]+)?}MPD$', mpd_doc.tag, 'namespace', default=None)
def _add_ns(path):
return self._xpath_ns(path, namespace)
def is_drm_protected(element):
return element.find(_add_ns('ContentProtection')) is not None
def extract_multisegment_info(element, ms_parent_info):
ms_info = ms_parent_info.copy()
segment_list = element.find(_add_ns('SegmentList'))
if segment_list is not None:
segment_urls_e = segment_list.findall(_add_ns('SegmentURL'))
if segment_urls_e:
ms_info['segment_urls'] = [segment.attrib['media'] for segment in segment_urls_e]
initialization = segment_list.find(_add_ns('Initialization'))
if initialization is not None:
ms_info['initialization_url'] = initialization.attrib['sourceURL']
else:
segment_template = element.find(_add_ns('SegmentTemplate'))
if segment_template is not None:
start_number = segment_template.get('startNumber')
if start_number:
ms_info['start_number'] = int(start_number)
segment_timeline = segment_template.find(_add_ns('SegmentTimeline'))
if segment_timeline is not None:
s_e = segment_timeline.findall(_add_ns('S'))
if s_e:
ms_info['total_number'] = 0
ms_info['s'] = []
for s in s_e:
r = int(s.get('r', 0))
ms_info['total_number'] += 1 + r
ms_info['s'].append({
't': int(s.get('t', 0)),
# @d is mandatory (see [1, 5.3.9.6.2, Table 17, page 60])
'd': int(s.attrib['d']),
'r': r,
})
else:
timescale = segment_template.get('timescale')
if timescale:
ms_info['timescale'] = int(timescale)
segment_duration = segment_template.get('duration')
if segment_duration:
ms_info['segment_duration'] = int(segment_duration)
media_template = segment_template.get('media')
if media_template:
ms_info['media_template'] = media_template
initialization = segment_template.get('initialization')
if initialization:
ms_info['initialization_url'] = initialization
else:
initialization = segment_template.find(_add_ns('Initialization'))
if initialization is not None:
ms_info['initialization_url'] = initialization.attrib['sourceURL']
return ms_info
mpd_duration = parse_duration(mpd_doc.get('mediaPresentationDuration'))
formats = []
for period in mpd_doc.findall(_add_ns('Period')):
period_duration = parse_duration(period.get('duration')) or mpd_duration
period_ms_info = extract_multisegment_info(period, {
'start_number': 1,
'timescale': 1,
})
for adaptation_set in period.findall(_add_ns('AdaptationSet')):
if is_drm_protected(adaptation_set):
continue
adaption_set_ms_info = extract_multisegment_info(adaptation_set, period_ms_info)
for representation in adaptation_set.findall(_add_ns('Representation')):
if is_drm_protected(representation):
continue
representation_attrib = adaptation_set.attrib.copy()
representation_attrib.update(representation.attrib)
# According to [1, 5.3.7.2, Table 9, page 41], @mimeType is mandatory
mime_type = representation_attrib['mimeType']
content_type = mime_type.split('/')[0]
if content_type == 'text':
# TODO implement WebVTT downloading
pass
elif content_type == 'video' or content_type == 'audio':
base_url = ''
for element in (representation, adaptation_set, period, mpd_doc):
base_url_e = element.find(_add_ns('BaseURL'))
if base_url_e is not None:
base_url = base_url_e.text + base_url
if re.match(r'^https?://', base_url):
break
if mpd_base_url and not re.match(r'^https?://', base_url):
if not mpd_base_url.endswith('/') and not base_url.startswith('/'):
mpd_base_url += '/'
base_url = mpd_base_url + base_url
representation_id = representation_attrib.get('id')
lang = representation_attrib.get('lang')
url_el = representation.find(_add_ns('BaseURL'))
filesize = int_or_none(url_el.attrib.get('{http://youtube.com/yt/2012/10/10}contentLength') if url_el is not None else None)
f = {
'format_id': '%s-%s' % (mpd_id, representation_id) if mpd_id else representation_id,
'url': base_url,
'ext': mimetype2ext(mime_type),
'width': int_or_none(representation_attrib.get('width')),
'height': int_or_none(representation_attrib.get('height')),
'tbr': int_or_none(representation_attrib.get('bandwidth'), 1000),
'asr': int_or_none(representation_attrib.get('audioSamplingRate')),
'fps': int_or_none(representation_attrib.get('frameRate')),
'vcodec': 'none' if content_type == 'audio' else representation_attrib.get('codecs'),
'acodec': 'none' if content_type == 'video' else representation_attrib.get('codecs'),
'language': lang if lang not in ('mul', 'und', 'zxx', 'mis') else None,
'format_note': 'DASH %s' % content_type,
'filesize': filesize,
}
representation_ms_info = extract_multisegment_info(representation, adaption_set_ms_info)
if 'segment_urls' not in representation_ms_info and 'media_template' in representation_ms_info:
if 'total_number' not in representation_ms_info and 'segment_duration':
segment_duration = float(representation_ms_info['segment_duration']) / float(representation_ms_info['timescale'])
representation_ms_info['total_number'] = int(math.ceil(float(period_duration) / segment_duration))
media_template = representation_ms_info['media_template']
media_template = media_template.replace('$RepresentationID$', representation_id)
media_template = re.sub(r'\$(Number|Bandwidth|Time)\$', r'%(\1)d', media_template)
media_template = re.sub(r'\$(Number|Bandwidth|Time)%([^$]+)\$', r'%(\1)\2', media_template)
media_template.replace('$$', '$')
# As per [1, 5.3.9.4.4, Table 16, page 55] $Number$ and $Time$
# can't be used at the same time
if '%(Number' in media_template:
representation_ms_info['segment_urls'] = [
media_template % {
'Number': segment_number,
'Bandwidth': representation_attrib.get('bandwidth'),
}
for segment_number in range(
representation_ms_info['start_number'],
representation_ms_info['total_number'] + representation_ms_info['start_number'])]
else:
representation_ms_info['segment_urls'] = []
segment_time = 0
def add_segment_url():
representation_ms_info['segment_urls'].append(
media_template % {
'Time': segment_time,
'Bandwidth': representation_attrib.get('bandwidth'),
}
)
for num, s in enumerate(representation_ms_info['s']):
segment_time = s.get('t') or segment_time
add_segment_url()
for r in range(s.get('r', 0)):
segment_time += s['d']
add_segment_url()
segment_time += s['d']
if 'segment_urls' in representation_ms_info:
f.update({
'segment_urls': representation_ms_info['segment_urls'],
'protocol': 'http_dash_segments',
})
if 'initialization_url' in representation_ms_info:
initialization_url = representation_ms_info['initialization_url'].replace('$RepresentationID$', representation_id)
f.update({
'initialization_url': initialization_url,
})
if not f.get('url'):
f['url'] = initialization_url
try:
existing_format = next(
fo for fo in formats
if fo['format_id'] == representation_id)
except StopIteration:
full_info = formats_dict.get(representation_id, {}).copy()
full_info.update(f)
formats.append(full_info)
else:
existing_format.update(f)
else:
self.report_warning('Unknown MIME type %s in DASH manifest' % mime_type)
return formats
def _parse_html5_media_entries(self, base_url, webpage, video_id, m3u8_id=None, m3u8_entry_protocol='m3u8'):
def absolute_url(video_url):
return compat_urlparse.urljoin(base_url, video_url)
def parse_content_type(content_type):
if not content_type:
return {}
ctr = re.search(r'(?P<mimetype>[^/]+/[^;]+)(?:;\s*codecs="?(?P<codecs>[^"]+))?', content_type)
if ctr:
mimetype, codecs = ctr.groups()
f = parse_codecs(codecs)
f['ext'] = mimetype2ext(mimetype)
return f
return {}
def _media_formats(src, cur_media_type):
full_url = absolute_url(src)
if determine_ext(full_url) == 'm3u8':
is_plain_url = False
formats = self._extract_m3u8_formats(
full_url, video_id, ext='mp4',
entry_protocol=m3u8_entry_protocol, m3u8_id=m3u8_id)
else:
is_plain_url = True
formats = [{
'url': full_url,
'vcodec': 'none' if cur_media_type == 'audio' else None,
}]
return is_plain_url, formats
entries = []
for media_tag, media_type, media_content in re.findall(r'(?s)(<(?P<tag>video|audio)[^>]*>)(.*?)</(?P=tag)>', webpage):
media_info = {
'formats': [],
'subtitles': {},
}
media_attributes = extract_attributes(media_tag)
src = media_attributes.get('src')
if src:
_, formats = _media_formats(src)
media_info['formats'].extend(formats)
media_info['thumbnail'] = media_attributes.get('poster')
if media_content:
for source_tag in re.findall(r'<source[^>]+>', media_content):
source_attributes = extract_attributes(source_tag)
src = source_attributes.get('src')
if not src:
continue
is_plain_url, formats = _media_formats(src, media_type)
if is_plain_url:
f = parse_content_type(source_attributes.get('type'))
f.update(formats[0])
media_info['formats'].append(f)
else:
media_info['formats'].extend(formats)
for track_tag in re.findall(r'<track[^>]+>', media_content):
track_attributes = extract_attributes(track_tag)
kind = track_attributes.get('kind')
if not kind or kind == 'subtitles':
src = track_attributes.get('src')
if not src:
continue
lang = track_attributes.get('srclang') or track_attributes.get('lang') or track_attributes.get('label')
media_info['subtitles'].setdefault(lang, []).append({
'url': absolute_url(src),
})
if media_info['formats']:
entries.append(media_info)
return entries
def _extract_akamai_formats(self, manifest_url, video_id):
formats = []
f4m_url = re.sub(r'(https?://.+?)/i/', r'\1/z/', manifest_url).replace('/master.m3u8', '/manifest.f4m')
formats.extend(self._extract_f4m_formats(
update_url_query(f4m_url, {'hdcore': '3.7.0'}),
video_id, f4m_id='hds', fatal=False))
m3u8_url = re.sub(r'(https?://.+?)/z/', r'\1/i/', manifest_url).replace('/manifest.f4m', '/master.m3u8')
formats.extend(self._extract_m3u8_formats(
m3u8_url, video_id, 'mp4', 'm3u8_native',
m3u8_id='hls', fatal=False))
return formats
def _live_title(self, name):
""" Generate the title for a live video """
now = datetime.datetime.now()
now_str = now.strftime('%Y-%m-%d %H:%M')
return name + ' ' + now_str
def _int(self, v, name, fatal=False, **kwargs):
res = int_or_none(v, **kwargs)
if 'get_attr' in kwargs:
print(getattr(v, kwargs['get_attr']))
if res is None:
msg = 'Failed to extract %s: Could not parse value %r' % (name, v)
if fatal:
raise ExtractorError(msg)
else:
self._downloader.report_warning(msg)
return res
def _float(self, v, name, fatal=False, **kwargs):
res = float_or_none(v, **kwargs)
if res is None:
msg = 'Failed to extract %s: Could not parse value %r' % (name, v)
if fatal:
raise ExtractorError(msg)
else:
self._downloader.report_warning(msg)
return res
def _set_cookie(self, domain, name, value, expire_time=None):
cookie = compat_cookiejar.Cookie(
0, name, value, None, None, domain, None,
None, '/', True, False, expire_time, '', None, None, None)
self._downloader.cookiejar.set_cookie(cookie)
def _get_cookies(self, url):
""" Return a compat_cookies.SimpleCookie with the cookies for the url """
req = sanitized_Request(url)
self._downloader.cookiejar.add_cookie_header(req)
return compat_cookies.SimpleCookie(req.get_header('Cookie'))
def get_testcases(self, include_onlymatching=False):
t = getattr(self, '_TEST', None)
if t:
assert not hasattr(self, '_TESTS'), \
'%s has _TEST and _TESTS' % type(self).__name__
tests = [t]
else:
tests = getattr(self, '_TESTS', [])
for t in tests:
if not include_onlymatching and t.get('only_matching', False):
continue
t['name'] = type(self).__name__[:-len('IE')]
yield t
def is_suitable(self, age_limit):
""" Test whether the extractor is generally suitable for the given
age limit (i.e. pornographic sites are not, all others usually are) """
any_restricted = False
for tc in self.get_testcases(include_onlymatching=False):
if tc.get('playlist', []):
tc = tc['playlist'][0]
is_restricted = age_restricted(
tc.get('info_dict', {}).get('age_limit'), age_limit)
if not is_restricted:
return True
any_restricted = any_restricted or is_restricted
return not any_restricted
def extract_subtitles(self, *args, **kwargs):
if (self._downloader.params.get('writesubtitles', False) or
self._downloader.params.get('listsubtitles')):
return self._get_subtitles(*args, **kwargs)
return {}
def _get_subtitles(self, *args, **kwargs):
raise NotImplementedError('This method must be implemented by subclasses')
@staticmethod
def _merge_subtitle_items(subtitle_list1, subtitle_list2):
""" Merge subtitle items for one language. Items with duplicated URLs
will be dropped. """
list1_urls = set([item['url'] for item in subtitle_list1])
ret = list(subtitle_list1)
ret.extend([item for item in subtitle_list2 if item['url'] not in list1_urls])
return ret
@classmethod
def _merge_subtitles(cls, subtitle_dict1, subtitle_dict2):
""" Merge two subtitle dictionaries, language by language. """
ret = dict(subtitle_dict1)
for lang in subtitle_dict2:
ret[lang] = cls._merge_subtitle_items(subtitle_dict1.get(lang, []), subtitle_dict2[lang])
return ret
def extract_automatic_captions(self, *args, **kwargs):
if (self._downloader.params.get('writeautomaticsub', False) or
self._downloader.params.get('listsubtitles')):
return self._get_automatic_captions(*args, **kwargs)
return {}
def _get_automatic_captions(self, *args, **kwargs):
raise NotImplementedError('This method must be implemented by subclasses')
def mark_watched(self, *args, **kwargs):
if (self._downloader.params.get('mark_watched', False) and
(self._get_login_info()[0] is not None or
self._downloader.params.get('cookiefile') is not None)):
self._mark_watched(*args, **kwargs)
def _mark_watched(self, *args, **kwargs):
raise NotImplementedError('This method must be implemented by subclasses')
def geo_verification_headers(self):
headers = {}
geo_verification_proxy = self._downloader.params.get('geo_verification_proxy')
if geo_verification_proxy:
headers['Ytdl-request-proxy'] = geo_verification_proxy
return headers
class SearchInfoExtractor(InfoExtractor):
"""
Base class for paged search queries extractors.
They accept URLs in the format _SEARCH_KEY(|all|[0-9]):{query}
Instances should define _SEARCH_KEY and _MAX_RESULTS.
"""
@classmethod
def _make_valid_url(cls):
return r'%s(?P<prefix>|[1-9][0-9]*|all):(?P<query>[\s\S]+)' % cls._SEARCH_KEY
@classmethod
def suitable(cls, url):
return re.match(cls._make_valid_url(), url) is not None
def _real_extract(self, query):
mobj = re.match(self._make_valid_url(), query)
if mobj is None:
raise ExtractorError('Invalid search query "%s"' % query)
prefix = mobj.group('prefix')
query = mobj.group('query')
if prefix == '':
return self._get_n_results(query, 1)
elif prefix == 'all':
return self._get_n_results(query, self._MAX_RESULTS)
else:
n = int(prefix)
if n <= 0:
raise ExtractorError('invalid download number %s for query "%s"' % (n, query))
elif n > self._MAX_RESULTS:
self._downloader.report_warning('%s returns max %i results (you requested %i)' % (self._SEARCH_KEY, self._MAX_RESULTS, n))
n = self._MAX_RESULTS
return self._get_n_results(query, n)
def _get_n_results(self, query, n):
"""Get a specified number of results for a query"""
raise NotImplementedError('This method must be implemented by subclasses')
@property
def SEARCH_KEY(self):
return self._SEARCH_KEY<|fim▁end|>
| |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! This module contains APIs for the `profile` crate used generically in the
//! rest of Servo. These APIs are here instead of in `profile` so that these
//! modules won't have to depend on `profile`.
#![deny(unsafe_code)]
#![feature(box_syntax)]
extern crate ipc_channel;
#[macro_use]
extern crate log;
#[macro_use] extern crate serde;
extern crate servo_config;
extern crate signpost;
#[allow(unsafe_code)]
pub mod energy;<|fim▁hole|>pub mod mem;
pub mod time;<|fim▁end|>
| |
<|file_name|>ErrorProcessorTestCase.java<|end_file_name|><|fim▁begin|>/*
* Solo - A small and beautiful blogging system written in Java.
* Copyright (c) 2010-present, b3log.org
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
package org.b3log.solo.processor;
import org.apache.commons.lang.StringUtils;
import org.b3log.solo.AbstractTestCase;
import org.b3log.solo.MockHttpServletRequest;
import org.b3log.solo.MockHttpServletResponse;
import org.testng.Assert;
import org.testng.annotations.Test;
/**
* {@link ErrorProcessor} test case.
*
* @author <a href="http://88250.b3log.org">Liang Ding</a>
* @version 1.0.1.3, Feb 22, 2019
* @since 1.7.0
*/
@Test(suiteName = "processor")
public class ErrorProcessorTestCase extends AbstractTestCase {
/**
* Init.
*
* @throws Exception exception
*/
@Test
public void init() throws Exception {
super.init();
}
/**
* showErrorPage.
*/
@Test(dependsOnMethods = "init")
public void showErrorPage() {
final MockHttpServletRequest request = mockRequest();
request.setRequestURI("/error/403");
final MockHttpServletResponse response = mockResponse();<|fim▁hole|> Assert.assertTrue(StringUtils.contains(content, "<title>403 Forbidden! - Solo 的个人博客</title>"));
}
}<|fim▁end|>
|
mockDispatcherServletService(request, response);
final String content = response.body();
|
<|file_name|>run.py<|end_file_name|><|fim▁begin|>import sys
import multiprocessing
import os.path as osp
import gym
from collections import defaultdict
import tensorflow as tf
import numpy as np
from baselines.common.vec_env.vec_video_recorder import VecVideoRecorder
from baselines.common.vec_env.vec_frame_stack import VecFrameStack
from baselines.common.cmd_util import common_arg_parser, parse_unknown_args, make_vec_env, make_env
from baselines.common.tf_util import get_session
from baselines import logger
from importlib import import_module
from baselines.common.vec_env.vec_normalize import VecNormalize, VecNormalizeRewards
try:
from mpi4py import MPI
except ImportError:
MPI = None
try:
import pybullet_envs
except ImportError:
pybullet_envs = None
try:
import roboschool
except ImportError:
roboschool = None
_game_envs = defaultdict(set)
for env in gym.envs.registry.all():
# TODO: solve this with regexes
env_type = env._entry_point.split(':')[0].split('.')[-1]
_game_envs[env_type].add(env.id)
# reading benchmark names directly from retro requires
# importing retro here, and for some reason that crashes tensorflow
# in ubuntu
_game_envs['retro'] = {
'BubbleBobble-Nes',
'SuperMarioBros-Nes',
'TwinBee3PokoPokoDaimaou-Nes',
'SpaceHarrier-Nes',
'SonicTheHedgehog-Genesis',
'Vectorman-Genesis',
'FinalFight-Snes',
'SpaceInvaders-Snes',
}
def train(args, extra_args):
env_type, env_id = get_env_type(args.env)
print('env_type: {}'.format(env_type))
total_timesteps = int(args.num_timesteps)
seed = args.seed
learn = get_learn_function(args.alg)
alg_kwargs = get_learn_function_defaults(args.alg, env_type)
alg_kwargs.update(extra_args)
env = build_env(args)
if args.save_video_interval != 0:
env = VecVideoRecorder(env, osp.join(logger.Logger.CURRENT.dir, "videos"), record_video_trigger=lambda x: x % args.save_video_interval == 0, video_length=args.save_video_length)
if args.network:
alg_kwargs['network'] = args.network
else:
if alg_kwargs.get('network') is None:
alg_kwargs['network'] = get_default_network(env_type)
print('Training {} on {}:{} with arguments \n{}'.format(args.alg, env_type, env_id, alg_kwargs))
model = learn(
env=env,
seed=seed,
total_timesteps=total_timesteps,
**alg_kwargs
)
return model, env
def build_env(args):
ncpu = multiprocessing.cpu_count()
if sys.platform == 'darwin': ncpu //= 2
nenv = args.num_env or ncpu
alg = args.alg
seed = args.seed
env_type, env_id = get_env_type(args.env)
print(env_id)
#extract the agc_env_name
noskip_idx = env_id.find("NoFrameskip")
env_name = env_id[:noskip_idx].lower()
print("Env Name for Masking:", env_name)
if env_type in {'atari', 'retro'}:
if alg == 'deepq':
env = make_env(env_id, env_type, seed=seed, wrapper_kwargs={'frame_stack': True})
elif alg == 'trpo_mpi':
env = make_env(env_id, env_type, seed=seed)
else:
frame_stack_size = 4
env = make_vec_env(env_id, env_type, nenv, seed, gamestate=args.gamestate, reward_scale=args.reward_scale)
env = VecFrameStack(env, frame_stack_size)
else:
config = tf.ConfigProto(allow_soft_placement=True,
intra_op_parallelism_threads=1,
inter_op_parallelism_threads=1)
config.gpu_options.allow_growth = True
get_session(config=config)
env = make_vec_env(env_id, env_type, args.num_env or 1, seed, reward_scale=args.reward_scale)
if args.custom_reward != '':
from baselines.common.vec_env import VecEnv, VecEnvWrapper
import baselines.common.custom_reward_wrapper as W
assert isinstance(env,VecEnv) or isinstance(env,VecEnvWrapper)
custom_reward_kwargs = eval(args.custom_reward_kwargs)
if args.custom_reward == 'live_long':
env = W.VecLiveLongReward(env,**custom_reward_kwargs)
elif args.custom_reward == 'random_tf':
env = W.VecTFRandomReward(env,**custom_reward_kwargs)
elif args.custom_reward == 'preference':
env = W.VecTFPreferenceReward(env,**custom_reward_kwargs)
elif args.custom_reward == 'rl_irl':
if args.custom_reward_path == '':
assert False, 'no path for reward model'
else:
if args.custom_reward_lambda == '':
assert False, 'no combination parameter lambda'
else:
env = W.VecRLplusIRLAtariReward(env, args.custom_reward_path, args.custom_reward_lambda)
elif args.custom_reward == 'pytorch':
if args.custom_reward_path == '':
assert False, 'no path for reward model'
else:
env = W.VecPyTorchAtariReward(env, args.custom_reward_path, env_name)
else:
assert False, 'no such wrapper exist'
if env_type == 'mujoco':
env = VecNormalize(env)
# if env_type == 'atari':
# input("Normalizing for ATari game: okay? [Enter]")
# #normalize rewards but not observations for atari
# env = VecNormalizeRewards(env)
return env
def get_env_type(env_id):
if env_id in _game_envs.keys():
env_type = env_id
env_id = [g for g in _game_envs[env_type]][0]
else:
env_type = None
for g, e in _game_envs.items():
if env_id in e:
env_type = g
break
assert env_type is not None, 'env_id {} is not recognized in env types'.format(env_id, _game_envs.keys())
return env_type, env_id
def get_default_network(env_type):
if env_type in {'atari', 'retro'}:
return 'cnn'
else:<|fim▁hole|>def get_alg_module(alg, submodule=None):
submodule = submodule or alg
try:
# first try to import the alg module from baselines
alg_module = import_module('.'.join(['baselines', alg, submodule]))
except ImportError:
# then from rl_algs
alg_module = import_module('.'.join(['rl_' + 'algs', alg, submodule]))
return alg_module
def get_learn_function(alg):
return get_alg_module(alg).learn
def get_learn_function_defaults(alg, env_type):
try:
alg_defaults = get_alg_module(alg, 'defaults')
kwargs = getattr(alg_defaults, env_type)()
except (ImportError, AttributeError):
kwargs = {}
return kwargs
def parse_cmdline_kwargs(args):
'''
convert a list of '='-spaced command-line arguments to a dictionary, evaluating python objects when possible
'''
def parse(v):
assert isinstance(v, str)
try:
return eval(v)
except (NameError, SyntaxError):
return v
return {k: parse(v) for k,v in parse_unknown_args(args).items()}
def main():
# configure logger, disable logging in child MPI processes (with rank > 0)
arg_parser = common_arg_parser()
args, unknown_args = arg_parser.parse_known_args()
extra_args = parse_cmdline_kwargs(unknown_args)
if MPI is None or MPI.COMM_WORLD.Get_rank() == 0:
rank = 0
logger.configure()
else:
logger.configure(format_strs=[])
rank = MPI.COMM_WORLD.Get_rank()
model, env = train(args, extra_args)
env.close()
if args.save_path is not None and rank == 0:
save_path = osp.expanduser(args.save_path)
model.save(save_path)
if args.play:
logger.log("Running trained model")
env = build_env(args)
obs = env.reset()
def initialize_placeholders(nlstm=128,**kwargs):
return np.zeros((args.num_env or 1, 2*nlstm)), np.zeros((1))
state, dones = initialize_placeholders(**extra_args)
while True:
actions, _, state, _ = model.step(obs,S=state, M=dones)
obs, _, done, _ = env.step(actions)
env.render()
done = done.any() if isinstance(done, np.ndarray) else done
if done:
obs = env.reset()
env.close()
if __name__ == '__main__':
main()<|fim▁end|>
|
return 'mlp'
|
<|file_name|>messageevent.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::EventBinding::EventMethods;
use dom::bindings::codegen::Bindings::MessageEventBinding;
use dom::bindings::codegen::Bindings::MessageEventBinding::MessageEventMethods;
use dom::bindings::error::Fallible;
use dom::bindings::global::GlobalRef;
use dom::bindings::inheritance::Castable;
use dom::bindings::js::Root;
use dom::bindings::reflector::reflect_dom_object;
use dom::bindings::str::DOMString;
use dom::event::Event;
use dom::eventtarget::EventTarget;
use js::jsapi::{RootedValue, HandleValue, Heap, JSContext};
use js::jsval::JSVal;
use std::default::Default;
use string_cache::Atom;
#[dom_struct]
pub struct MessageEvent {
event: Event,
data: Heap<JSVal>,
origin: DOMString,
lastEventId: DOMString,
}
impl MessageEvent {
pub fn new_uninitialized(global: GlobalRef) -> Root<MessageEvent> {
MessageEvent::new_initialized(global,<|fim▁hole|>
pub fn new_initialized(global: GlobalRef,
data: HandleValue,
origin: DOMString,
lastEventId: DOMString) -> Root<MessageEvent> {
let mut ev = box MessageEvent {
event: Event::new_inherited(),
data: Heap::default(),
origin: origin,
lastEventId: lastEventId,
};
ev.data.set(data.get());
reflect_dom_object(ev, global, MessageEventBinding::Wrap)
}
pub fn new(global: GlobalRef, type_: Atom,
bubbles: bool, cancelable: bool,
data: HandleValue, origin: DOMString, lastEventId: DOMString)
-> Root<MessageEvent> {
let ev = MessageEvent::new_initialized(global, data, origin, lastEventId);
{
let event = ev.upcast::<Event>();
event.init_event(type_, bubbles, cancelable);
}
ev
}
pub fn Constructor(global: GlobalRef,
type_: DOMString,
init: &MessageEventBinding::MessageEventInit)
-> Fallible<Root<MessageEvent>> {
// Dictionaries need to be rooted
// https://github.com/servo/servo/issues/6381
let data = RootedValue::new(global.get_cx(), init.data);
let ev = MessageEvent::new(global, Atom::from(type_), init.parent.bubbles, init.parent.cancelable,
data.handle(),
init.origin.clone(), init.lastEventId.clone());
Ok(ev)
}
}
impl MessageEvent {
pub fn dispatch_jsval(target: &EventTarget,
scope: GlobalRef,
message: HandleValue) {
let messageevent = MessageEvent::new(
scope, atom!("message"), false, false, message,
DOMString::new(), DOMString::new());
messageevent.upcast::<Event>().fire(target);
}
}
impl MessageEventMethods for MessageEvent {
// https://html.spec.whatwg.org/multipage/#dom-messageevent-data
fn Data(&self, _cx: *mut JSContext) -> JSVal {
self.data.get()
}
// https://html.spec.whatwg.org/multipage/#dom-messageevent-origin
fn Origin(&self) -> DOMString {
self.origin.clone()
}
// https://html.spec.whatwg.org/multipage/#dom-messageevent-lasteventid
fn LastEventId(&self) -> DOMString {
self.lastEventId.clone()
}
// https://dom.spec.whatwg.org/#dom-event-istrusted
fn IsTrusted(&self) -> bool {
self.event.IsTrusted()
}
}<|fim▁end|>
|
HandleValue::undefined(),
DOMString::new(),
DOMString::new())
}
|
<|file_name|>feature_notifications.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
# Copyright (c) 2014-2019 The Starwels developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the -alertnotify, -blocknotify and -walletnotify options."""
import os
from test_framework.test_framework import StarwelsTestFramework
from test_framework.util import assert_equal, wait_until, connect_nodes_bi
class NotificationsTest(StarwelsTestFramework):
def set_test_params(self):
self.num_nodes = 2
self.setup_clean_chain = True
def setup_network(self):
self.alert_filename = os.path.join(self.options.tmpdir, "alert.txt")
self.block_filename = os.path.join(self.options.tmpdir, "blocks.txt")
self.tx_filename = os.path.join(self.options.tmpdir, "transactions.txt")
# -alertnotify and -blocknotify on node0, walletnotify on node1
self.extra_args = [["-blockversion=2",
"-alertnotify=echo %%s >> %s" % self.alert_filename,
"-blocknotify=echo %%s >> %s" % self.block_filename],
["-blockversion=211",
"-rescan",
"-walletnotify=echo %%s >> %s" % self.tx_filename]]
super().setup_network()
def run_test(self):
self.log.info("test -blocknotify")
block_count = 10
blocks = self.nodes[1].generate(block_count)
# wait at most 10 seconds for expected file size before reading the content
wait_until(lambda: os.path.isfile(self.block_filename) and os.stat(self.block_filename).st_size >= (block_count * 65), timeout=10)
# file content should equal the generated blocks hashes
with open(self.block_filename, 'r') as f:
assert_equal(sorted(blocks), sorted(f.read().splitlines()))
self.log.info("test -walletnotify")
# wait at most 10 seconds for expected file size before reading the content
wait_until(lambda: os.path.isfile(self.tx_filename) and os.stat(self.tx_filename).st_size >= (block_count * 65), timeout=10)
# file content should equal the generated transaction hashes
txids_rpc = list(map(lambda t: t['txid'], self.nodes[1].listtransactions("*", block_count)))
with open(self.tx_filename, 'r') as f:
assert_equal(sorted(txids_rpc), sorted(f.read().splitlines()))
os.remove(self.tx_filename)
self.log.info("test -walletnotify after rescan")
# restart node to rescan to force wallet notifications
self.restart_node(1)
connect_nodes_bi(self.nodes, 0, 1)
wait_until(lambda: os.path.isfile(self.tx_filename) and os.stat(self.tx_filename).st_size >= (block_count * 65), timeout=10)
# file content should equal the generated transaction hashes
txids_rpc = list(map(lambda t: t['txid'], self.nodes[1].listtransactions("*", block_count)))<|fim▁hole|> # Mine another 41 up-version blocks. -alertnotify should trigger on the 51st.
self.log.info("test -alertnotify")
self.nodes[1].generate(41)
self.sync_all()
# Give starwelsd 10 seconds to write the alert notification
wait_until(lambda: os.path.isfile(self.alert_filename) and os.path.getsize(self.alert_filename), timeout=10)
with open(self.alert_filename, 'r', encoding='utf8') as f:
alert_text = f.read()
# Mine more up-version blocks, should not get more alerts:
self.nodes[1].generate(2)
self.sync_all()
with open(self.alert_filename, 'r', encoding='utf8') as f:
alert_text2 = f.read()
self.log.info("-alertnotify should not continue notifying for more unknown version blocks")
assert_equal(alert_text, alert_text2)
if __name__ == '__main__':
NotificationsTest().main()<|fim▁end|>
|
with open(self.tx_filename, 'r') as f:
assert_equal(sorted(txids_rpc), sorted(f.read().splitlines()))
|
<|file_name|>lz4.rs<|end_file_name|><|fim▁begin|>extern crate lz4;
use std::env;
use std::fs::File;
use std::io::Read;
use std::io::Result;
use std::io::Write;
use std::iter::FromIterator;
use std::path::Path;
fn main() {
println!("LZ4 version: {}", lz4::version());
let suffix = ".lz4";
for arg in Vec::from_iter(env::args())[1..].iter() {
if arg.ends_with(suffix) {
decompress(
&Path::new(arg),
&Path::new(&arg[0..arg.len() - suffix.len()]),
).unwrap();
} else {
compress(&Path::new(arg), &Path::new(&(arg.to_string() + suffix))).unwrap();
}
}
}
fn compress(src: &Path, dst: &Path) -> Result<()> {
println!("Compressing: {:?} -> {:?}", src, dst);
let mut fi = try!(File::open(src));
let mut fo = try!(lz4::EncoderBuilder::new().build(try!(File::create(dst))));
try!(copy(&mut fi, &mut fo));
match fo.finish() {<|fim▁hole|> }
}
fn decompress(src: &Path, dst: &Path) -> Result<()> {
println!("Decompressing: {:?} -> {:?}", src, dst);
let mut fi = try!(lz4::Decoder::new(try!(File::open(src))));
let mut fo = try!(File::create(dst));
copy(&mut fi, &mut fo)
}
fn copy(src: &mut Read, dst: &mut Write) -> Result<()> {
let mut buffer: [u8; 1024] = [0; 1024];
loop {
let len = try!(src.read(&mut buffer));
if len == 0 {
break;
}
try!(dst.write_all(&buffer[0..len]));
}
Ok(())
}<|fim▁end|>
|
(_, result) => result,
|
<|file_name|>LibSprite.ts<|end_file_name|><|fim▁begin|>/*****************************************************************************
* Represents a 2D sprite - An image that consists of different frames
* and is used for displaying an animation.
*
* @author Christopher Stock
* @version 0.0.2
*****************************************************************************/
class LibSprite
{
/** The image being used by this sprite, containing all frames. */
private img :HTMLImageElement = null;
/** The number of horizontal frames in this sprite's image. */
private framesX :number = 0;
/** The number of vertical frames in this sprite's image. */
private framesY :number = 0;
/** The width of one frame in px. */
public frameWidth :number = 0;
/** The height of one frame in px. */
public frameHeight :number = 0;
/** The total number of frames in this sprite. */
private frameCount :number = 0;
/** The index of the current active frame in this sprite. */
private currentFrame :number = 0;
/** The current tick between frame changes in this sprite. */
private currentTick :number = 0;
/** The delay time between frame change in ticks. */
private ticksBetweenFrameChange :number = 0;
/*****************************************************************************
* Creates a new sprite with the specified properties.
*
* @param img The image that contains all the frames for this sprite.
* @param framesX The number of horizontal frames in this image.
* @param framesY The number of vertical frames in this image.
* @param frameCount The total number of frames in this image.
* @param ticksBetweenFrameChange The number of game ticks to delay till this sprite
* changes to the next frame.
*****************************************************************************/
public constructor( img:HTMLImageElement, framesX:number, framesY:number, frameCount:number, ticksBetweenFrameChange:number )
{
this.img = img;
this.framesX = framesX;
this.framesY = framesY;
this.frameCount = frameCount;
this.frameWidth = img.width / framesX;
this.frameHeight = img.height / framesY;
this.ticksBetweenFrameChange = ticksBetweenFrameChange;
}
/*****************************************************************************
* Draws the current frame of this sprite onto the specified screen position.
*
* @param ctx The 2d rendering context.
* @param x The x location to draw this sprite's current frame onto.
* @param y The y location to draw this sprite's current frame onto.
* @param alpha The alpha value to draw this image with.
*****************************************************************************/
public draw( ctx:CanvasRenderingContext2D, x:number, y:number, alpha:number )
{
/*
LibDrawing.drawImageScaledClipped
(
ctx,
this.iImg,<|fim▁hole|> x,
y,
this.frameWidth * Math.floor( this.iCurrentFrame % this.iFramesX ),
this.frameHeight * Math.floor( this.iCurrentFrame / this.iFramesX ),
this.frameWidth,
this.frameHeight,
this.frameWidth,
this.frameHeight,
alpha
);
*/
}
/*****************************************************************************
* Performs the next tick for this sprite.
*****************************************************************************/
public nextTick()
{
if ( ++this.currentTick > this.ticksBetweenFrameChange )
{
this.currentTick = 0;
this.nextFrame();
}
}
/*****************************************************************************
* Changes the current frame of this sprite to the next one
* or resets it's current frame back to the first frame
* if the last one has been reached.
*****************************************************************************/
private nextFrame()
{
++this.currentFrame;
if ( this.currentFrame >= this.frameCount )
{
this.currentFrame = 0;
}
}
}<|fim▁end|>
| |
<|file_name|>runhub.py<|end_file_name|><|fim▁begin|>"""
Launcher for homity-hub
"""
from Hub import app
from Hub.api import hub_config
import cherrypy
from paste.translogger import TransLogger
if hub_config.get('ssl_enable'):
from OpenSSL import SSL
def run_cherrypy():
"""Start CherryPy server."""
#Enable WSGI access logging via Paste
app_logged = TransLogger(app, setup_console_handler=False)
<|fim▁hole|> cherrypy_config = {
'engine.autoreload_on': True,
'log.screen': True,
'server.socket_port': 5000,
'server.socket_host': '0.0.0.0'
}
if hub_config.get('ssl_enable'):
cherrypy_config['server.ssl_module'] = 'builtin'
cherrypy_config['server.ssl_private_key'] = hub_config.get(
'ssl_private_key')
cherrypy_config['server.ssl_certificate'] = hub_config.get(
'ssl_cert')
cherrypy.config.update(cherrypy_config)
# Start the CherryPy WSGI web server
cherrypy.engine.start()
cherrypy.engine.block()
def run_werkzeug():
""" Werkzeug deprecated in favor of CherryPy. """
if hub_config.get('ssl_enable'):
context = SSL.Context(SSL.SSLv23_METHOD)
context.use_privatekey_file(hub_config.get('ssl_private_key'))
context.use_certificate_file(hub_config.get('ssl_cert'))
app.run(host='0.0.0.0',
ssl_context=context,
debug=False)
else:
app.run(host='0.0.0.0', debug=False)
if __name__ == "__main__":
run_cherrypy()<|fim▁end|>
|
# Mount the WSGI callable object (app) on the root directory
cherrypy.tree.graft(app_logged, '/')
# Set the configuration of the web server
|
<|file_name|>ajax.ts<|end_file_name|><|fim▁begin|>import { Log } from './log';
//import Url = require('./url');
import { Url } from './url';
import { HashString } from './lib';
/**
* Делаем HTTP (Ajax) запрос.
*
* @param settings A set of key/value pairs that configure the Ajax request. All settings are optional. A default can be set for any option with $.ajaxSetup().
* @see {@link https://api.jquery.com/jQuery.ajax/#jQuery-ajax-settings}
*/
export const Ajax = (opts: JQueryAjaxSettings) => {
// обязательно добавить в запрос тип возвращаемых данных
if (opts.dataType == 'json') {
if (opts.data == null) {
opts.data = { datatype: 'json' }
} else if (typeof opts.data === "string") { // opts.data - строка
let params: HashString = Url.SplitUrlParams(opts.data);
params['datatype'] = 'json';
opts.data = Url.JoinUrlParams(params);
} else { // opts.data - объект
opts.data.datatype = 'json';
}
}
if (opts.xhrFields == null || opts.xhrFields == undefined) {
opts.xhrFields = {
withCredentials: true
};
}
if (opts.error == null || typeof opts.error !== 'function') {
opts.error = function (jqXHR, textStatus, errorThrown) {
Log('error:', textStatus, errorThrown);
};
} else {
let original = opts.error;
opts.error = function (jqXHR, textStatus, errorThrown) {
// никаких call, apply надо сохранить контекст вызова иногда это важно<|fim▁hole|> };
}
return $.ajax(opts);
};<|fim▁end|>
|
original(jqXHR, textStatus, errorThrown);
Log('Ajax.error()', textStatus, errorThrown);
|
<|file_name|>plot_timebar.py<|end_file_name|><|fim▁begin|>""" Implementation of WaterFrame.plot_bar(key, ax=None, average_time=None)"""
import datetime
def plot_timebar(self, keys, ax=None, time_interval_mean=None):
"""
Make a bar plot of the input keys.
The bars are positioned at x with date/time. Their dimensions are given by height.
Parameters
----------
keys: list of str
keys of self.data to plot.
ax: matplotlib.axes object, optional (ax = None)
It is used to add the plot to an input axes object.
time_interval_mean: str, optional (time_interval_mean = None)
It calculates an average value of a time interval. You can find
all of the resample options here:
https://pandas.pydata.org/pandas-docs/stable/user_guide/timeseries.html
Returns
-------
ax: matplotlib.AxesSubplot
Axes of the plot.
"""
def format_year(x):
return datetime.datetime.\
strptime(x, '%Y-%m-%d %H:%M:%S').strftime('%Y')
def format_day(x):
return datetime.datetime.\
strptime(x, '%Y-%m-%d %H:%M:%S').strftime('%Y-%m-%d')
# Extract data
df = self.data[keys].dropna().reset_index().set_index('TIME')
df.index.rename("Date", inplace=True)
# Resample data
if time_interval_mean is None:
pass
else:
df = df.resample(time_interval_mean).mean()
<|fim▁hole|> ax = df[keys].plot.bar(ax=ax)
# Write axes
try:
ax.set_ylabel(self.vocabulary[keys]['units'])
except KeyError:
print("Warning: We don't know the units of", keys,
"Please, add info into self.meaning[", keys, "['units']")
if time_interval_mean == 'A':
ax.set_xticklabels([format_year(x.get_text())
for x in ax.get_xticklabels()], rotation=60)
elif time_interval_mean == 'D':
ax.set_xticklabels([format_day(x.get_text())
for x in ax.get_xticklabels()], rotation=60)
return ax<|fim▁end|>
|
if isinstance(keys, list):
ax = df[keys].plot.bar(ax=ax, legend=True)
else:
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.