prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>deviceTX_wC2.py<|end_file_name|><|fim▁begin|>try:
import RPi.GPIO as GPIO
from lib_nrf24 import NRF24
from math import *
import time
import spidev
import sys
import os.path
import numpy
import pickle
import sqlite3
import mat4py as m4p
import os
def compress(uncompressed):
"""Compress a string to a list of output symbols."""
# Build the dictionary.
dict_size = 256
dictionary = {chr(i): i for i in range(dict_size)}
#dictionary = dict((chr(i), i) for i in xrange(dict_size))
# in Python 3: dictionary = {chr(i): i for i in range(dict_size)}
w = ""
result = []
for c in uncompressed:
wc = w + c
if wc in dictionary:
w = wc
else:
result.append(dictionary[w])
# Add wc to the dictionary.
dictionary[wc] = dict_size
dict_size += 1
w = c
# Output the code for w.
if w:
result.append(dictionary[w])
return result
def printSummary(file1, file2):
"""
printSummary() prints out the number of bytes in the original file and in
the result file.
@params: two files that are to be checked.
@return: n/a.
"""
# Checks if the files exist in the current directory.
if (not os.path.isfile(file1)) or (not os.path.isfile(file2)):
printError(0)
# Finds out how many bytes in each file.
f1_bytes = os.path.getsize(file1)
f2_bytes = os.path.getsize(file2)
sys.stderr.write(str(file1) + ': ' + str(f1_bytes) + ' bytes\n')
sys.stderr.write(str(file2) + ': ' + str(f2_bytes) + ' bytes\n')
def main():
GPIO.setmode(GPIO.BCM)
GPIO.setup(23, GPIO.OUT, initial=GPIO.LOW)
GPIO.setup(22, GPIO.OUT, initial=GPIO.LOW)
print("Transmitter")
pipes = [[0xe7, 0xe7, 0xe7, 0xe7, 0xe7], [0xc2, 0xc2, 0xc2, 0xc2, 0xc2]]
payloadSize = 32
channel_TX = 0x40
channel_RX = 0x45
#Initializa the radio transceivers with the CE ping connected to the GPIO22 and GPIO23
radio_Tx = NRF24(GPIO, spidev.SpiDev())
radio_Rx = NRF24(GPIO, spidev.SpiDev())
radio_Tx.begin(0, 22)
radio_Rx.begin(1, 24)
#We set the Payload Size to the limit which is 32 bytes
radio_Tx.setPayloadSize(payloadSize)
radio_Rx.setPayloadSize(payloadSize)
#We choose the channels to be used for one and the other transceiver
radio_Tx.setChannel(channel_TX)
radio_Rx.setChannel(channel_RX)
#We set the Transmission Rate
radio_Tx.setDataRate(NRF24.BR_250KBPS)
radio_Rx.setDataRate(NRF24.BR_250KBPS)
#Configuration of the power level to be used by the transceiver
radio_Tx.setPALevel(NRF24.PA_MIN)
radio_Rx.setPALevel(NRF24.PA_MIN)
#We disable the Auto Acknowledgement
radio_Tx.setAutoAck(False)
radio_Rx.setAutoAck(False)
radio_Tx.enableDynamicPayloads()
radio_Rx.enableDynamicPayloads()
#Open the writing and reading pipe
radio_Tx.openWritingPipe(pipes[1])
radio_Rx.openReadingPipe(0, pipes[0])
#We print the configuration details of both transceivers
print("Transmitter Details #################################################################################")
radio_Tx.printDetails()
print("*---------------------------------------------------------------------------------------------------*")
print("Receiver Details ####################################################################################")
radio_Rx.printDetails()
print("*---------------------------------------------------------------------------------------------------*")
###############################################################################################################################
###############################################################################################################################
###############################################################################################################################
#Read file to transmit
#inFile = open("SampleTextFile1Mb.txt", "rb")<|fim▁hole|> #flag variables
original_flag_data = 'A'
flag = ""
flag_n = 0
#packet realted variables
overhead = 1
dataSize = payloadSize - overhead
dataControlSize = payloadSize - overhead
#Data Packets
packets = []
numberofPackets = 0
#ACK related variables
ack = []
handshake = []
ack_received = 0
handshakeAck_received = 0
#Time variables
time_ack = 1
start_c = time.time()
#Compression of the data to transmit into data2Tx_compressed
data2Tx_compressed = compress(data2Tx)
n=len(bin(max(data2Tx_compressed)))-2
#We create the string with the packets needed to decompress the file transmitted
controlList_extended = []
controlList = []
for val in data2Tx_compressed:
division = int(val/256)
controlList.append(division)
if(n > 16):
for val in controlList:
division = int(val/256)
controlList_extended.append(division)
data2Send = []
for iterator in range(0, len(controlList)):
data2Send.append(data2Tx_compressed[iterator])
data2Send.append(controlList[iterator])
if(n > 16):
data2Send.append(controlList_extended[iterator])
final_c = time.time()
print("Compression time: " + str(final_c-start_c))
#Now we conform all the data packets in a list
for i in range (0, len(data2Send), dataSize):
if((i+dataSize) < len(data2Send)):
packets.append(data2Send[i:i+dataSize])
else:
packets.append(data2Send[i:])
numberofPackets += 1
#Start time
start = time.time()
radio_Rx.startListening()
radio_Tx.write(str(numberofPackets) + "," + str(n))
timeout = time.time() + time_ack
str_Handshake = ""
#While we don't receive the handshake ack we keep trying
while not (handshakeAck_received):
if radio_Rx.available(0):
radio_Rx.read(handshake, radio_Rx.getDynamicPayloadSize())
print("Something received")
for c in range(0, len(handshake)):
str_Handshake = str_Handshake + chr(handshake[c])
#If the received ACK does not match the expected one we retransmit, else we set the received handshake ack to 1
if(list(str_Handshake) != list("ACK")):
radio_Tx.write(str(numberofPackets) + "," + str(n))
timeout = time.time() + time_ack
print("Handshake Message Lost")
str_Handshake = ""
else:
print("Handshake done")
handshakeAck_received = 1
#If an established time passes and we have not received anything we retransmit the handshake packet
if((time.time() + 0.2) > timeout):
print("No Handshake ACK received resending message")
radio_Tx.write(str(numberofPackets) + "," + str(n))
timeout = time.time() + time_ack
#We iterate over every packet to be sent
dec_ready = 0
for message in packets:
flag = chr(ord(original_flag_data) + flag_n)
message2Send = list(flag) + message
radio_Tx.write(message2Send)
time.sleep(1)
if(dec_ready == 200):
time.sleep(0.3)
dec_ready = 0
timeout = time.time() + time_ack
radio_Rx.startListening()
str_ack = ""
#While we don't receive a correct ack for the transmitted packet we keep trying for the same packet
while not (ack_received):
if radio_Rx.available(0):
radio_Rx.read(ack, radio_Rx.getDynamicPayloadSize())
for c in range(0, len(ack)):
str_ack = str_ack + chr(ack[c])
print(str_ack)
#If the received ACK does not match the expected one we retransmit, else we set the received data ack to 1
if(list(str_ack) != (list("ACK") + list(flag))):
radio_Tx.write(list(flag) + list(message))
timeout = time.time() + time_ack
#print("Data ACK received but not the expected one --> resending message")
str_ack = ""
else:
ack_received = 1
#If an established time passes and we have not received anything we retransmit the data packet
if((time.time() + 0.01) > timeout):
print("No Data ACK received resending message")
radio_Tx.write(message2Send)
timeout = time.time() + time_ack
dec_ready = 0
ack_received = 0
flag_n = (flag_n + 1) % 10
final = time.time()
totalTime = final - start
print(totalTime)
GPIO.output(22, 0)
GPIO.output(23, 0)
if __name__ == '__main__':
main()
except KeyboardInterrupt:
GPIO.output(22,0)
GPIO.output(23,0)
GPIO.output(24,0)
GPIO.cleanup()<|fim▁end|> | inFile = open("ElQuijote.txt", "rb")
data2Tx = inFile.read()
inFile.close()
|
<|file_name|>issue-36499.rs<|end_file_name|><|fim▁begin|>// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// error-pattern: aborting due to previous error
<|fim▁hole|>fn main() {
2 + +2;
}<|fim▁end|> | |
<|file_name|>population.py<|end_file_name|><|fim▁begin|>""" Implementaiton of a population for maintaining a GA population and
proposing structures to pair. """
from random import randrange, random
from math import tanh, sqrt, exp
from operator import itemgetter
import numpy as np
from ase.db.core import now
def count_looks_like(a, all_cand, comp):
"""Utility method for counting occurences."""
n = 0
for b in all_cand:
if a.info['confid'] == b.info['confid']:
continue
if comp.looks_like(a, b):
n += 1
return n
class Population(object):
"""Population class which maintains the current population
and proposes which candidates to pair together.
Parameters:
data_connection: DataConnection object
Bla bla bla.
population_size: int
The number of candidates in the population.
comparator: Comparator object
this will tell if two configurations are equal.
Default compare atoms objects directly.
logfile: str
Text file that contains information about the population
The format is::
timestamp: generation(if available): id1,id2,id3...
Using this file greatly speeds up convergence checks.
Default None meaning that no file is written.
use_extinct: boolean
Set this to True if mass extinction and the extinct key
are going to be used. Default is False.
"""
def __init__(self, data_connection, population_size,
comparator=None, logfile=None, use_extinct=False):
self.dc = data_connection
self.pop_size = population_size
if comparator is None:
from ase.ga.standard_comparators import AtomsComparator
comparator = AtomsComparator()
self.comparator = comparator
self.logfile = logfile
self.use_extinct = use_extinct
self.pop = []
self.pairs = None
self.all_cand = None
self.__initialize_pop__()
def __initialize_pop__(self):
""" Private method that initalizes the population when
the population is created. """
# Get all relaxed candidates from the database
ue = self.use_extinct
all_cand = self.dc.get_all_relaxed_candidates(use_extinct=ue)
all_cand.sort(key=lambda x: x.get_raw_score(), reverse=True)
# all_cand.sort(key=lambda x: x.get_potential_energy())
# Fill up the population with the self.pop_size most stable
# unique candidates.
i = 0
while i < len(all_cand) and len(self.pop) < self.pop_size:
c = all_cand[i]
i += 1
eq = False
for a in self.pop:
if self.comparator.looks_like(a, c):
eq = True
break
if not eq:
self.pop.append(c)
for a in self.pop:
a.info['looks_like'] = count_looks_like(a, all_cand,
self.comparator)
self.all_cand = all_cand
self.__calc_participation__()
def __calc_participation__(self):
""" Determines, from the database, how many times each
candidate has been used to generate new candidates. """
(participation, pairs) = self.dc.get_participation_in_pairing()
for a in self.pop:
if a.info['confid'] in participation.keys():
a.info['n_paired'] = participation[a.info['confid']]
else:
a.info['n_paired'] = 0
self.pairs = pairs
def update(self, new_cand=None):
""" New candidates can be added to the database
after the population object has been created.
This method extracts these new candidates from the
database and includes them in the population. """
if len(self.pop) == 0:
self.__initialize_pop__()
if new_cand is None:
ue = self.use_extinct
new_cand = self.dc.get_all_relaxed_candidates(only_new=True,
use_extinct=ue)
for a in new_cand:
self.__add_candidate__(a)
self.all_cand.append(a)
self.__calc_participation__()
self._write_log()
def get_current_population(self):
""" Returns a copy of the current population. """
self.update()
return [a.copy() for a in self.pop]
def get_population_after_generation(self, gen):
""" Returns a copy of the population as it where
after generation gen"""
if self.logfile is not None:
f = open(self.logfile, 'r')
gens = {}
for l in f:
_, no, popul = l.split(':')
gens[int(no)] = [int(i) for i in popul.split(',')]
f.close()
return [c.copy() for c in self.all_cand[::-1]
if c.info['relax_id'] in gens[gen]]
all_candidates = [c for c in self.all_cand
if c.info['key_value_pairs']['generation'] <= gen]
cands = [all_candidates[0]]
for b in all_candidates:
if b not in cands:
for a in cands:
if self.comparator.looks_like(a, b):
break
else:
cands.append(b)
pop = cands[:self.pop_size]
return [a.copy() for a in pop]
def __add_candidate__(self, a):
""" Adds a single candidate to the population. """
# check if the structure is too low in raw score
if a.get_raw_score() < self.pop[-1].get_raw_score() \
and len(self.pop) == self.pop_size:
return
# check if the new candidate should
# replace a similar structure in the population
for (i, b) in enumerate(self.pop):
if self.comparator.looks_like(a, b):
if b.get_raw_score() < a.get_raw_score():
del self.pop[i]
a.info['looks_like'] = count_looks_like(a,
self.all_cand,
self.comparator)
self.pop.append(a)
self.pop.sort(key=lambda x: x.get_raw_score(),
reverse=True)
return
# the new candidate needs to be added, so remove the highest
# energy one
if len(self.pop) == self.pop_size:
del self.pop[-1]
# add the new candidate
a.info['looks_like'] = count_looks_like(a,
self.all_cand,
self.comparator)
self.pop.append(a)
self.pop.sort(key=lambda x: x.get_raw_score(), reverse=True)
def __get_fitness__(self, indecies, with_history=True):
"""Calculates the fitness using the formula from
L.B. Vilhelmsen et al., JACS, 2012, 134 (30), pp 12807-12816
Sign change on the fitness compared to the formulation in the
abovementioned paper due to maximizing raw_score instead of
minimizing energy. (Set raw_score=-energy to optimize the energy)
"""
scores = [x.get_raw_score() for x in self.pop]
min_s = min(scores)
max_s = max(scores)
T = min_s - max_s
if isinstance(indecies, int):
indecies = [indecies]
f = [0.5 * (1. - tanh(2. * (scores[i] - max_s) / T - 1.))
for i in indecies]
if with_history:
M = [float(self.pop[i].info['n_paired']) for i in indecies]
L = [float(self.pop[i].info['looks_like']) for i in indecies]
f = [f[i] * 1. / sqrt(1. + M[i]) * 1. / sqrt(1. + L[i])
for i in range(len(f))]
return f
def get_two_candidates(self, with_history=True):
""" Returns two candidates for pairing employing the
fitness criteria from
L.B. Vilhelmsen et al., JACS, 2012, 134 (30), pp 12807-12816
and the roulete wheel selection scheme described in
R.L. Johnston Dalton Transactions,
Vol. 22, No. 22. (2003), pp. 4193-4207
"""
if len(self.pop) < 2:
self.update()
if len(self.pop) < 2:
return None
fit = self.__get_fitness__(range(len(self.pop)), with_history)
fmax = max(fit)
c1 = self.pop[0]
c2 = self.pop[0]
used_before = False
while c1.info['confid'] == c2.info['confid'] and not used_before:
nnf = True
while nnf:
t = randrange(0, len(self.pop), 1)
if fit[t] > random() * fmax:
c1 = self.pop[t]
nnf = False
nnf = True
while nnf:
t = randrange(0, len(self.pop), 1)
if fit[t] > random() * fmax:
c2 = self.pop[t]
nnf = False
c1id = c1.info['confid']
c2id = c2.info['confid']
used_before = (min([c1id, c2id]), max([c1id, c2id])) in self.pairs
return (c1.copy(), c2.copy())<|fim▁hole|> """Returns one candidate for mutation employing the
fitness criteria from
L.B. Vilhelmsen et al., JACS, 2012, 134 (30), pp 12807-12816
and the roulete wheel selection scheme described in
R.L. Johnston Dalton Transactions,
Vol. 22, No. 22. (2003), pp. 4193-4207
"""
if len(self.pop) < 1:
self.update()
if len(self.pop) < 1:
return None
fit = self.__get_fitness__(range(len(self.pop)), with_history)
fmax = max(fit)
nnf = True
while nnf:
t = randrange(0, len(self.pop), 1)
if fit[t] > random() * fmax:
c1 = self.pop[t]
nnf = False
return c1.copy()
def _write_log(self):
"""Writes the population to a logfile.
The format is::
timestamp: generation(if available): id1,id2,id3..."""
if self.logfile is not None:
ids = [str(a.info['relax_id']) for a in self.pop]
if ids != []:
try:
gen_nums = [c.info['key_value_pairs']['generation']
for c in self.all_cand]
max_gen = max(gen_nums)
except KeyError:
max_gen = ' '
f = open(self.logfile, 'a')
f.write('{time}: {gen}: {pop}\n'.format(time=now(),
pop=','.join(ids),
gen=max_gen))
f.close()
def is_uniform(self, func, min_std, pop=None):
"""Tests whether the current population is uniform or diverse.
Returns True if uniform, False otherwise.
Parameters:
func: function
that takes one argument an atoms object and returns a value that
will be used for testing against the rest of the population.
min_std: int or float
The minimum standard deviation, if the population has a lower
std dev it is uniform.
pop: list, optional
use this list of Atoms objects instead of the current population.
"""
if pop is None:
pop = self.pop
vals = [func(a) for a in pop]
stddev = np.std(vals)
if stddev < min_std:
return True
return False
def mass_extinction(self, ids):
"""Kills every candidate in the database with gaid in the
supplied list of ids. Typically used on the main part of the current
population if the diversity is to small.
Parameters:
ids: list
list of ids of candidates to be killed.
"""
for confid in ids:
self.dc.kill_candidate(confid)
self.pop = []
class RandomPopulation(Population):
def __init__(self, data_connection, population_size,
comparator=None, logfile=None, exclude_used_pairs=False,
bad_candidates=0, use_extinct=False):
self.exclude_used_pairs = exclude_used_pairs
self.bad_candidates = bad_candidates
Population.__init__(self, data_connection, population_size,
comparator, logfile, use_extinct)
def __initialize_pop__(self):
""" Private method that initalizes the population when
the population is created. """
# Get all relaxed candidates from the database
ue = self.use_extinct
all_cand = self.dc.get_all_relaxed_candidates(use_extinct=ue)
all_cand.sort(key=lambda x: x.get_raw_score(), reverse=True)
# all_cand.sort(key=lambda x: x.get_potential_energy())
if len(all_cand) > 0:
# Fill up the population with the self.pop_size most stable
# unique candidates.
ratings = []
best_raw = all_cand[0].get_raw_score()
i = 0
while i < len(all_cand):
c = all_cand[i]
i += 1
eq = False
for a in self.pop:
if self.comparator.looks_like(a, c):
eq = True
break
if not eq:
if len(self.pop) < self.pop_size - self.bad_candidates:
self.pop.append(c)
else:
exp_fact = exp(c.get_raw_score() / best_raw)
ratings.append([c, (exp_fact - 1) * random()])
ratings.sort(key=itemgetter(1), reverse=True)
for i in range(self.bad_candidates):
self.pop.append(ratings[i][0])
for a in self.pop:
a.info['looks_like'] = count_looks_like(a, all_cand,
self.comparator)
self.all_cand = all_cand
self.__calc_participation__()
def update(self):
""" The update method in Population will add to the end of
the population, that can't be used here since we might have
bad candidates that need to stay in the population, therefore
just recalc the population every time. """
self.pop = []
self.__initialize_pop__()
self._write_log()
def get_one_candidate(self):
"""Returns one candidates at random."""
if len(self.pop) < 1:
self.update()
if len(self.pop) < 1:
return None
t = randrange(0, len(self.pop), 1)
c = self.pop[t]
return c.copy()
def get_two_candidates(self):
"""Returns two candidates at random."""
if len(self.pop) < 2:
self.update()
if len(self.pop) < 2:
return None
c1 = self.pop[0]
c2 = self.pop[0]
used_before = False
while c1.info['confid'] == c2.info['confid'] and not used_before:
t = randrange(0, len(self.pop), 1)
c1 = self.pop[t]
t = randrange(0, len(self.pop), 1)
c2 = self.pop[t]
c1id = c1.info['confid']
c2id = c2.info['confid']
used_before = (tuple(sorted([c1id, c2id])) in self.pairs
and self.exclude_used_pairs)
return (c1.copy(), c2.copy())<|fim▁end|> |
def get_one_candidate(self, with_history=True): |
<|file_name|>datatype.rs<|end_file_name|><|fim▁begin|>use error::Result;
use handle::{Handle, ID, FromID, get_id_type};
use object::Object;
use ffi::h5i::{H5I_DATATYPE, hid_t};
use ffi::h5t::{H5T_INTEGER, H5T_FLOAT, H5T_NO_CLASS, H5T_NCLASSES, H5T_ORDER_BE, H5T_ORDER_LE,
H5T_SGN_2, H5Tcopy, H5Tget_class, H5Tget_order, H5Tget_offset, H5Tget_sign,
H5Tget_precision};
#[cfg(target_endian = "big")]
use globals::{
H5T_STD_I8BE, H5T_STD_I16BE,
H5T_STD_I32BE, H5T_STD_I64BE,
H5T_STD_U8BE, H5T_STD_U16BE,
H5T_STD_U32BE, H5T_STD_U64BE,
H5T_IEEE_F32BE, H5T_IEEE_F64BE,
};
#[cfg(target_endian = "little")]
use globals::{
H5T_STD_I8LE, H5T_STD_I16LE,
H5T_STD_I32LE, H5T_STD_I64LE,
H5T_STD_U8LE, H5T_STD_U16LE,
H5T_STD_U32LE, H5T_STD_U64LE,
H5T_IEEE_F32LE, H5T_IEEE_F64LE,
};
pub enum Datatype {
Integer(IntegerDatatype),
Float(FloatDatatype),
}
macro_rules! def_atomic {
($name:ident, $h5t:ident) => (
pub struct $name {
handle: Handle,
}
impl ID for $name {
fn id(&self) -> hid_t {
self.handle.id()
}
}
impl FromID for $name {
fn from_id(id: hid_t) -> Result<$name> {
h5lock!({
if get_id_type(id) != H5I_DATATYPE {
return Err(From::from(format!("Invalid datatype id: {}", id)));
}
let cls = H5Tget_class(id);
if cls != $h5t {
return Err(From::from(format!("Invalid datatype class: {:?}", cls)));
}
Ok($name { handle: try!(Handle::new(id)) })
})
}
}
impl Object for $name {}
impl AtomicDatatype for $name{}
)
}
def_atomic!(IntegerDatatype, H5T_INTEGER);
impl IntegerDatatype {
/// Returns true if the datatype is signed.
pub fn is_signed(&self) -> bool {
h5lock!(H5Tget_sign(self.id()) == H5T_SGN_2)
}
}
def_atomic!(FloatDatatype, H5T_FLOAT);
pub trait AtomicDatatype: ID {
/// Returns true if the datatype byte order is big endian.
fn is_be(&self) -> bool {
h5lock!(H5Tget_order(self.id()) == H5T_ORDER_BE)
}
/// Returns true if the datatype byte order is little endian.
fn is_le(&self) -> bool {
h5lock!(H5Tget_order(self.id()) == H5T_ORDER_LE)
}
/// Get the offset of the first significant bit.
fn offset(&self) -> usize {
h5call!(H5Tget_offset(self.id())).unwrap_or(0) as usize
}
/// Get the number of significant bits, excluding padding.
fn precision(&self) -> usize {
h5call!(H5Tget_precision(self.id())).unwrap_or(0) as usize
}
}
pub trait ToDatatype {
fn to_datatype() -> Result<Datatype>;
}
macro_rules! impl_atomic {
($tp:ty, $be:ident, $le:ident) => (
impl ToDatatype for $tp {
#[cfg(target_endian = "big")]
fn to_datatype() -> Result<Datatype> {
Datatype::from_id(h5try!(H5Tcopy(*$be)))
}
#[cfg(target_endian = "little")]
fn to_datatype() -> Result<Datatype> {
Datatype::from_id(h5try!(H5Tcopy(*$le)))
}
}
)
}
impl_atomic!(bool, H5T_STD_U8BE, H5T_STD_U8LE);
impl_atomic!(i8, H5T_STD_I8BE, H5T_STD_I8LE);
impl_atomic!(i16, H5T_STD_I16BE, H5T_STD_I16LE);
impl_atomic!(i32, H5T_STD_I32BE, H5T_STD_I32LE);
impl_atomic!(i64, H5T_STD_I64BE, H5T_STD_I64LE);
impl_atomic!(u8, H5T_STD_U8BE, H5T_STD_U8LE);
impl_atomic!(u16, H5T_STD_U16BE, H5T_STD_U16LE);
impl_atomic!(u32, H5T_STD_U32BE, H5T_STD_U32LE);
impl_atomic!(u64, H5T_STD_U64BE, H5T_STD_U64LE);
impl_atomic!(f32, H5T_IEEE_F32BE, H5T_IEEE_F32LE);
impl_atomic!(f64, H5T_IEEE_F64BE, H5T_IEEE_F64LE);
#[cfg(target_pointer_width = "32")] impl_atomic!(usize, H5T_STD_U32BE, H5T_STD_U32LE);
#[cfg(target_pointer_width = "32")] impl_atomic!(isize, H5T_STD_I32BE, H5T_STD_I32LE);
#[cfg(target_pointer_width = "64")] impl_atomic!(usize, H5T_STD_U64BE, H5T_STD_U64LE);
#[cfg(target_pointer_width = "64")] impl_atomic!(isize, H5T_STD_I64BE, H5T_STD_I64LE);
impl ID for Datatype {
fn id(&self) -> hid_t {
match *self {
Datatype::Integer(ref dt) => dt.id(),
Datatype::Float(ref dt) => dt.id(),
}
}
}
impl FromID for Datatype {
fn from_id(id: hid_t) -> Result<Datatype> {
h5lock!({
match get_id_type(id) {
H5I_DATATYPE => {
match H5Tget_class(id) {
H5T_INTEGER => Ok(Datatype::Integer(try!(IntegerDatatype::from_id(id)))),
H5T_FLOAT => Ok(Datatype::Float(try!(FloatDatatype::from_id(id)))),
H5T_NO_CLASS |
H5T_NCLASSES => Err(From::from("Invalid datatype class")),
cls => Err(From::from(format!("Unsupported datatype: {:?}", cls)))
}
},
_ => Err(From::from(format!("Invalid datatype id: {}", id))),
}
})
}
}
impl Object for Datatype {}
#[cfg(test)]
mod tests {
use super::{Datatype, AtomicDatatype, ToDatatype};
use super::Datatype::*;
use handle::FromID;
use ffi::h5i::H5I_INVALID_HID;
use ffi::h5t::H5Tcopy;
use globals::H5T_STD_REF_OBJ;
#[cfg(target_endian = "big")] const IS_BE: bool = true;
#[cfg(target_endian = "big")] const IS_LE: bool = false;
#[cfg(target_endian = "little")] const IS_BE: bool = false;
#[cfg(target_endian = "little")] const IS_LE: bool = true;
#[cfg(target_pointer_width = "32")] const POINTER_WIDTH: usize = 32;
#[cfg(target_pointer_width = "64")] const POINTER_WIDTH: usize = 64;
#[test]
pub fn test_invalid_datatype() {
assert_err!(Datatype::from_id(H5I_INVALID_HID), "Invalid datatype id");
assert_err!(Datatype::from_id(h5lock!(H5Tcopy(*H5T_STD_REF_OBJ))), "Unsupported datatype");
}
#[test]
pub fn test_atomic_datatype() {
macro_rules! test_integer {
($tp:ty, $signed:expr, $precision:expr) => (
match <$tp as ToDatatype>::to_datatype().unwrap() {
Datatype::Integer(dt) => {
assert_eq!(dt.is_be(), IS_BE);
assert_eq!(dt.is_le(), IS_LE);
assert_eq!(dt.offset(), 0);
assert_eq!(dt.precision(), $precision);
assert_eq!(dt.is_signed(), $signed);
},
_ => panic!("Integer datatype expected")
}
)
}
macro_rules! test_float {
($tp:ty, $precision:expr) => (
match <$tp as ToDatatype>::to_datatype().unwrap() {
Datatype::Float(dt) => {
assert_eq!(dt.is_be(), IS_BE);
assert_eq!(dt.is_le(), IS_LE);
assert_eq!(dt.offset(), 0);
assert_eq!(dt.precision(), $precision)
},
_ => panic!("Float datatype expected")
}
)
}
test_integer!(bool, false, 8);
test_integer!(i8, true, 8);
test_integer!(i16, true, 16);
test_integer!(i32, true, 32);
test_integer!(i64, true, 64);
test_integer!(u8, false, 8);
test_integer!(u16, false, 16);
test_integer!(u32, false, 32);
test_integer!(u64, false, 64);<|fim▁hole|>
test_float!(f32, 32);
test_float!(f64, 64);
test_integer!(isize, true, POINTER_WIDTH);
test_integer!(usize, false, POINTER_WIDTH);
}
}<|fim▁end|> | |
<|file_name|>generate.js<|end_file_name|><|fim▁begin|>#!/usr/bin/env node
//
var fs = require('fs'),
path = require('path'),
http = require('http'),
BufferStream = require('bufferstream'),
// http://www.ksu.ru/eng/departments/ktk/test/perl/lib/unicode/UCDFF301.html
keys = ['value', 'name', 'category', 'class',
'bidirectional_category', 'mapping', 'decimal_digit_value', 'digit_value',
'numeric_value', 'mirrored', 'unicode_name', 'comment', 'uppercase_mapping',
'lowercase_mapping', 'titlecase_mapping'],
systemfiles = [
"UnicodeData.txt"
],
refs = 0;
// based on https://github.com/mathiasbynens/jsesc
function escape(charValue) {
var hexadecimal = charValue.replace(/^0*/, ''); // is already in hexadecimal
var longhand = hexadecimal.length > 2;
return '\\' + (longhand ? 'u' : 'x') +
('0000' + hexadecimal).slice(longhand ? -4 : -2);
}
function stringify(key, value) {
return key + ":" + JSON.stringify(value).replace(/\\\\(u|x)/, "\\$1");
}
function newFile(name, callback) {
var filename = path.join(__dirname, "category", name + ".js"),
file = fs.createWriteStream(filename, {encoding:'utf8'});
file.once('close', function () {
if (!--refs) {
console.log("done.");
callback();
}
});
refs++;
return file;
}
function parser(callback) {
var data = {},
buffer = new BufferStream({encoding:'utf8', size:'flexible'}),
resume = buffer.resume.bind(buffer);
buffer.split('\n', function (line) {
var v, c, char = {},
values = line.toString().split(';');
for(var i = 0 ; i < 15 ; i++)
char[keys[i]] = values[i];
v = parseInt(char.value, 16);
char.symbol = escape(char.value);
c = char.category;
if (!data[c]) {
data[c] = newFile(c, callback)
.on('drain', resume)
.once('open', function () {
console.log("saving data as %s.js …", c);
if (this.write('module.exports={' + stringify(v, char)))
buffer.resume();
});
buffer.pause();
} else if (!data[c].write("," + stringify(v, char))) {
buffer.pause();
}
});
buffer.on('end', function () {
var cat, categories = Object.keys(data),
len = categories.length;
for(var i = 0 ; i < len ; i++) {
cat = categories[i];
data[cat].end("};");
}
});
buffer.on('error', function (err) {
if (typeof err === 'string')
err = new Error(err);
throw err;
});
return buffer;
}
function read_file(success_cb, error_cb) {
var systemfile, sysfiles = systemfiles.slice(),
try_reading = function (success, error) {
systemfile = sysfiles.shift();
if (!systemfile) return error_cb();
console.log("try to read file %s …", systemfile);
fs.exists(systemfile, function (exists) {
if (!exists) {
console.error("%s not found.", systemfile);
return try_reading(success_cb, error_cb);
}
console.log("parsing …");
fs.createReadStream(systemfile, {encoding:'utf8'}).pipe(parser(success_cb));
});
};
try_reading(success_cb, error_cb);
}
// run
if (!module.parent) { // not required
read_file(process.exit, process.exit);
} else {
module.exports = {
escape:escape,
stringify:stringify,<|fim▁hole|> newFile:newFile,
parser:parser,
read_file:read_file
};
}<|fim▁end|> | |
<|file_name|>free.rs<|end_file_name|><|fim▁begin|>// Copyleft (ↄ) meh. <[email protected]> | http://meh.schizofreni.co
//
// This file is part of cancer.
//
// cancer is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// cancer is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with cancer. If not, see <http://www.gnu.org/licenses/>.
use std::rc::Rc;
use std::collections::{VecDeque, LinkedList};
use terminal::{Cell, Row};
use style::Style;
/// Wrapper for `Row` reuse.
#[derive(Debug)]
pub struct Free {
empty: Rc<Style>,
inner: LinkedList<Row>,
}
impl Free {
/// Create a new free list.
pub fn new() -> Self {
Free {
empty: Rc::new(Style::default()),
inner: LinkedList::new(),
}
}
/// Get the empty `Style`.
pub fn style(&self) -> Rc<Style> {<|fim▁hole|> /// Create an empty `Cell`.
pub fn cell(&self) -> Cell {
Cell::empty(self.empty.clone())
}
/// Reuse or create a new `Row`.
pub fn pop(&mut self, cols: usize) -> Row {
match self.inner.pop_front() {
Some(mut row) => {
row.wrapped = false;
row.resize(cols, Cell::empty(self.empty.clone()));
for cell in row.iter_mut().filter(|c| !c.is_default()) {
cell.make_empty(self.empty.clone());
}
row
}
None => {
Row {
inner: vec_deque![Cell::empty(self.empty.clone()); cols],
wrapped: false,
}
}
}
}
/// Push a `Row` for reuse.
pub fn push(&mut self, row: Row) {
self.inner.push_front(row);
}
}<|fim▁end|> | self.empty.clone()
}
|
<|file_name|>enable_management_service_ssh_acl_v4.py<|end_file_name|><|fim▁begin|>from a10sdk.common.A10BaseClass import A10BaseClass
class VeCfg(A10BaseClass):
"""This class does not support CRUD Operations please use parent.
:param ve_end: {"type": "number", "description": "VE port", "format": "number"}
:param ve_start: {"type": "number", "description": "VE port (VE Interface number)", "format": "number"}
:param DeviceProxy: The device proxy for REST operations and session handling. Refer to `common/device_proxy.py`
"""
def __init__(self, **kwargs):
self.ERROR_MSG = ""
self.b_key = "ve-cfg"
self.DeviceProxy = ""
self.ve_end = ""
self.ve_start = ""
for keys, value in kwargs.items():
setattr(self,keys, value)
class EthCfg(A10BaseClass):
"""This class does not support CRUD Operations please use parent.
:param ethernet_start: {"type": "number", "description": "Ethernet port (Ethernet Interface number)", "format": "interface"}
:param ethernet_end: {"type": "number", "description": "Ethernet port", "format": "interface"}
:param DeviceProxy: The device proxy for REST operations and session handling. Refer to `common/device_proxy.py`
"""
def __init__(self, **kwargs):
self.ERROR_MSG = ""
self.b_key = "eth-cfg"
self.DeviceProxy = ""
self.ethernet_start = ""
self.ethernet_end = ""
for keys, value in kwargs.items():
setattr(self,keys, value)<|fim▁hole|>
class AclV4(A10BaseClass):
"""Class Description::
IPv4 ACL for SSH service.
Class acl-v4 supports CRUD Operations and inherits from `common/A10BaseClass`.
This class is the `"PARENT"` class for this module.`
:param ve_cfg: {"minItems": 1, "items": {"type": "object"}, "uniqueItems": true, "type": "array", "array": [{"properties": {"ve-end": {"type": "number", "description": "VE port", "format": "number"}, "ve-start": {"type": "number", "description": "VE port (VE Interface number)", "format": "number"}, "optional": true}}]}
:param acl_id: {"description": "ACL id", "format": "number", "type": "number", "maximum": 199, "minimum": 1, "optional": false}
:param uuid: {"description": "uuid of the object", "format": "string", "minLength": 1, "modify-not-allowed": 1, "optional": true, "maxLength": 64, "type": "string"}
:param eth_cfg: {"minItems": 1, "items": {"type": "object"}, "uniqueItems": true, "type": "array", "array": [{"properties": {"ethernet-start": {"type": "number", "description": "Ethernet port (Ethernet Interface number)", "format": "interface"}, "ethernet-end": {"type": "number", "description": "Ethernet port", "format": "interface"}, "optional": true}}]}
:param DeviceProxy: The device proxy for REST operations and session handling. Refer to `common/device_proxy.py`
URL for this object::
`https://<Hostname|Ip address>//axapi/v3/enable-management/service/ssh/acl-v4/{acl_id}`.
"""
def __init__(self, **kwargs):
self.ERROR_MSG = ""
self.required = [ "acl_id"]
self.b_key = "acl-v4"
self.a10_url="/axapi/v3/enable-management/service/ssh/acl-v4/{acl_id}"
self.DeviceProxy = ""
self.ve_cfg = []
self.acl_id = ""
self.uuid = ""
self.eth_cfg = []
for keys, value in kwargs.items():
setattr(self,keys, value)<|fim▁end|> | |
<|file_name|>test_transforms.py<|end_file_name|><|fim▁begin|>import pytest
from spinner import transforms
from spinner import coordinates
from spinner import cabinet
from example_cabinet_params import exact
def test_hex_to_cartesian():
h = coordinates.Hexagonal
c = coordinates.Cartesian2D
# Test single element cases
o0 = "o0"
o1 = "o1"
o2 = "o2"
assert transforms.hex_to_cartesian(
[(o0, h(0,0,0)), (o1, h(0,1,0)), (o2, h(1,1,0))]) == \
[(o0, c(0,0)), (o1, c(0,2)), (o2, c(1,1))]
def test_hex_to_skew_cartesian():
h = coordinates.Hexagonal
c = coordinates.Cartesian2D
# Test single element cases
o0 = "o0"
o1 = "o1"
o2 = "o2"
assert transforms.hex_to_skewed_cartesian(
[(o0, h(0,0,0)), (o1, h(0,1,0)), (o2, h(1,1,0))]) == \
[(o0, c(0,0)), (o1, c(1,2)), (o2, c(2,1))]
def test_rhombus_to_rect():
c2 = coordinates.Cartesian2D
c3 = coordinates.Cartesian3D
o0 = "o0"
o1 = "o1"
o2 = "o2"
assert transforms.rhombus_to_rect([]) == []
assert transforms.rhombus_to_rect(
[(o0, c2(-1,0)), (o1, c2(0,0)), (o2, c2(1,1))]) == \
[(o0, c2(1,0)), (o1, c2(0,0)), (o2, c2(1,1))]
assert transforms.rhombus_to_rect(
[(o0, c3(-1,-1,-1)), (o1, c3(0,1,1)), (o2, c3(1,1,0))]) == \
[(o0, c3(1,1,1)), (o1, c3(0,1,1)), (o2, c3(1,1,0))]
def test_compress():
c = coordinates.Cartesian2D
o0 = "o0"
o1 = "o1"
o2 = "o2"
o3 = "o3"
o4 = "o4"
o5 = "o5"
assert transforms.compress([(o0, c(0,0)), (o1, c(1,1)), (o2, c(2,0)),
(o3, c(0,2)), (o4, c(1,3)), (o5, c(2,2))]) == \
[(o0, c(0,0)), (o1, c(1,0)), (o2, c(2,0)),
(o3, c(0,1)), (o4, c(1,1)), (o5, c(2,1))]
def test_flip_axes():
c = coordinates.Cartesian2D
o0 = "o0"
o1 = "o1"
assert transforms.flip_axes([(o0, c(1,2)), (o1, c(3,4))]) == \
[(o0, c(2,1)), (o1, c(4,3))]
def test_folds():
c = coordinates.Cartesian2D
o0 = "o0"
o1 = "o1"
o2 = "o2"
o3 = "o3"
assert transforms.fold([], (1,1)) == []
# No folding
assert transforms.fold(
[(o0, c(0,0)), (o1, c(1,0)), (o2, c(2,0)), (o3, c(3,0))], (1,1)) == \
[(o0, c(0,0)), (o1, c(1,0)), (o2, c(2,0)), (o3, c(3,0))]
# Fold on X
assert transforms.fold(
[(o0, c(0,0)), (o1, c(1,0)), (o2, c(2,0)), (o3, c(3,0))], (2,1)) == \
[(o0, c(0,0)), (o1, c(2,0)), (o2, c(3,0)), (o3, c(1,0))]
# Fold on Y
assert transforms.fold(
[(o0, c(0,0)), (o1, c(0,1)), (o2, c(0,2)), (o3, c(0,3))], (1,2)) == \
[(o0, c(0,0)), (o1, c(0,2)), (o2, c(0,3)), (o3, c(0,1))]
def test_cabinetise():
c = coordinates.Cartesian2D
s = coordinates.Cabinet
o0 = "o0"
o1 = "o1"
o2 = "o2"
o3 = "o3"
assert transforms.cabinetise([], num_cabinets=0, frames_per_cabinet=0) == []
assert transforms.cabinetise(
[(o0, c(0,0)), (o1, c(1,0)), (o2, c(0,1)), (o3, c(1,1))],
num_cabinets=2, frames_per_cabinet=2, boards_per_frame=1) == \
[(o0, s(0,0,0)), (o1, s(1,0,0)), (o2, s(0,1,0)), (o3, s(1,1,0))]
<|fim▁hole|>def test_remove_gaps():
c = coordinates.Cabinet
o0 = "o0"
o1 = "o1"
o2 = "o2"
# Empty case
assert transforms.remove_gaps([]) == []
# Singletons (with and without need to move)
assert transforms.remove_gaps([(o0, c(0,0,0))]) == [(o0, c(0,0,0))]
assert transforms.remove_gaps([(o0, c(1,2,0))]) == [(o0, c(1,2,0))]
assert transforms.remove_gaps([(o0, c(1,2,3))]) == [(o0, c(1,2,0))]
# With and without gaps
assert set(transforms.remove_gaps(
[(o0, c(0,0,0)), (o1, c(0,0,1))])) ==\
set([(o0, c(0,0,0)), (o1, c(0,0,1))])
assert set(transforms.remove_gaps(
[(o0, c(0,0,0)), (o1, c(0,0,2))])) ==\
set([(o0, c(0,0,0)), (o1, c(0,0,1))])
assert set(transforms.remove_gaps(
[(o0, c(0,0,5)), (o1, c(0,0,2))])) ==\
set([(o0, c(0,0,1)), (o1, c(0,0,0))])
# Independent frames with restructuring needs
assert set(transforms.remove_gaps(
[(o0, c(1,0,5)), (o1, c(0,1,2))])) ==\
set([(o0, c(1,0,0)), (o1, c(0,1,0))])
assert set(transforms.remove_gaps(
[(o0, c(0,0,0)), (o1, c(0,0,3)), (o2, c(1,0,3))])) ==\
set([(o0, c(0,0,0)), (o1, c(0,0,1)), (o2, c(1,0,0))])
def test_cabinet_to_physical():
c = cabinet.Cabinet(**exact)
o0 = "o0"
o1 = "o1"
o2 = "o2"
o3 = "o3"
boards = transforms.cabinet_to_physical([(o0, coordinates.Cabinet(0, 0, 0)),
(o1, coordinates.Cabinet(0, 0, 1)),
(o2, coordinates.Cabinet(0, 1, 1)),
(o3, coordinates.Cabinet(1, 1, 1)),
], c)
b2c = dict(boards)
# Make sure all boards make it through
assert len(boards) == len(b2c)
assert set([o0, o1, o2, o3]) == set(b2c)
# Check all board positions
assert b2c[o0] == (42.0, 2.0, 2.0)
assert b2c[o1] == (40.5, 2.0, 2.0)
assert b2c[o2] == (40.5, 5.0, 2.0)
assert b2c[o3] == (14.0, 5.0, 2.0)<|fim▁end|> | |
<|file_name|>bodyParser.js<|end_file_name|><|fim▁begin|>var connect = require('..');
var assert = require('assert');
var app = connect();
app.use(connect.bodyParser());
app.use(function(req, res){
res.end(JSON.stringify(req.body));
});
describe('connect.bodyParser()', function(){
it('should default to {}', function(done){
app.request()
.post('/')
.end(function(res){
res.body.should.equal('{}');
done();
})
})
it('should parse JSON', function(done){
app.request()
.post('/')
.set('Content-Type', 'application/json')
.write('{"user":"tobi"}')
.end(function(res){
res.body.should.equal('{"user":"tobi"}');
done();
});
})
it('should parse x-www-form-urlencoded', function(done){
app.request()
.post('/')
.set('Content-Type', 'application/x-www-form-urlencoded')<|fim▁hole|> res.body.should.equal('{"user":"tobi"}');
done();
});
})
describe('with multipart/form-data', function(){
it('should populate req.body', function(done){
app.request()
.post('/')
.set('Content-Type', 'multipart/form-data; boundary=foo')
.write('--foo\r\n')
.write('Content-Disposition: form-data; name="user"\r\n')
.write('\r\n')
.write('Tobi')
.write('\r\n--foo--')
.end(function(res){
res.body.should.equal('{"user":"Tobi"}');
done();
});
})
it('should support files', function(done){
var app = connect();
app.use(connect.bodyParser());
app.use(function(req, res){
assert('Tobi' == req.body.user.name);
req.files.text.originalFilename.should.equal('foo.txt');
req.files.text.path.should.include('.txt');
res.end(req.files.text.originalFilename);
});
app.request()
.post('/')
.set('Content-Type', 'multipart/form-data; boundary=foo')
.write('--foo\r\n')
.write('Content-Disposition: form-data; name="user[name]"\r\n')
.write('\r\n')
.write('Tobi')
.write('\r\n--foo\r\n')
.write('Content-Disposition: form-data; name="text"; filename="foo.txt"\r\n')
.write('\r\n')
.write('some text here')
.write('\r\n--foo--')
.end(function(res){
res.body.should.equal('foo.txt');
done();
});
})
it('should expose options to multiparty', function(done){
var app = connect();
app.use(connect.bodyParser({
keepExtensions: true
}));
app.use(function(req, res){
assert('Tobi' == req.body.user.name);
assert(~req.files.text.path.indexOf('.txt'));
res.end(req.files.text.originalFilename);
});
app.request()
.post('/')
.set('Content-Type', 'multipart/form-data; boundary=foo')
.write('--foo\r\n')
.write('Content-Disposition: form-data; name="user[name]"\r\n')
.write('\r\n')
.write('Tobi')
.write('\r\n--foo\r\n')
.write('Content-Disposition: form-data; name="text"; filename="foo.txt"\r\n')
.write('\r\n')
.write('some text here')
.write('\r\n--foo--')
.end(function(res){
res.body.should.equal('foo.txt');
done();
});
})
it('should work with multiple fields', function(done){
app.request()
.post('/')
.set('Content-Type', 'multipart/form-data; boundary=foo')
.write('--foo\r\n')
.write('Content-Disposition: form-data; name="user"\r\n')
.write('\r\n')
.write('Tobi')
.write('\r\n--foo\r\n')
.write('Content-Disposition: form-data; name="age"\r\n')
.write('\r\n')
.write('1')
.write('\r\n--foo--')
.end(function(res){
res.body.should.equal('{"user":"Tobi","age":"1"}');
done();
});
})
it('should support nesting', function(done){
app.request()
.post('/')
.set('Content-Type', 'multipart/form-data; boundary=foo')
.write('--foo\r\n')
.write('Content-Disposition: form-data; name="user[name][first]"\r\n')
.write('\r\n')
.write('tobi')
.write('\r\n--foo\r\n')
.write('Content-Disposition: form-data; name="user[name][last]"\r\n')
.write('\r\n')
.write('holowaychuk')
.write('\r\n--foo\r\n')
.write('Content-Disposition: form-data; name="user[age]"\r\n')
.write('\r\n')
.write('1')
.write('\r\n--foo\r\n')
.write('Content-Disposition: form-data; name="species"\r\n')
.write('\r\n')
.write('ferret')
.write('\r\n--foo--')
.end(function(res){
var obj = JSON.parse(res.body);
obj.user.age.should.equal('1');
obj.user.name.should.eql({ first: 'tobi', last: 'holowaychuk' });
obj.species.should.equal('ferret');
done();
});
})
it('should support multiple files of the same name', function(done){
var app = connect();
app.use(connect.bodyParser());
app.use(function(req, res){
req.files.text.should.have.length(2);
assert(req.files.text[0]);
assert(req.files.text[1]);
res.end();
});
app.request()
.post('/')
.set('Content-Type', 'multipart/form-data; boundary=foo')
.write('--foo\r\n')
.write('Content-Disposition: form-data; name="text"; filename="foo.txt"\r\n')
.write('\r\n')
.write('some text here')
.write('\r\n--foo\r\n')
.write('Content-Disposition: form-data; name="text"; filename="bar.txt"\r\n')
.write('\r\n')
.write('some more text stuff')
.write('\r\n--foo--')
.end(function(res){
res.statusCode.should.equal(200);
done();
});
})
it('should support nested files', function(done){
var app = connect();
app.use(connect.bodyParser());
app.use(function(req, res){
Object.keys(req.files.docs).should.have.length(2);
req.files.docs.foo.originalFilename.should.equal('foo.txt');
req.files.docs.bar.originalFilename.should.equal('bar.txt');
res.end();
});
app.request()
.post('/')
.set('Content-Type', 'multipart/form-data; boundary=foo')
.write('--foo\r\n')
.write('Content-Disposition: form-data; name="docs[foo]"; filename="foo.txt"\r\n')
.write('\r\n')
.write('some text here')
.write('\r\n--foo\r\n')
.write('Content-Disposition: form-data; name="docs[bar]"; filename="bar.txt"\r\n')
.write('\r\n')
.write('some more text stuff')
.write('\r\n--foo--')
.end(function(res){
res.statusCode.should.equal(200);
done();
});
})
it('should next(err) on multipart failure', function(done){
var app = connect();
app.use(connect.bodyParser());
app.use(function(req, res){
res.end('whoop');
});
app.use(function(err, req, res, next){
err.message.should.equal('parser error, 16 of 28 bytes parsed');
res.statusCode = 500;
res.end();
});
app.request()
.post('/')
.set('Content-Type', 'multipart/form-data; boundary=foo')
.write('--foo\r\n')
.write('Content-filename="foo.txt"\r\n')
.write('\r\n')
.write('some text here')
.write('Content-Disposition: form-data; name="text"; filename="bar.txt"\r\n')
.write('\r\n')
.write('some more text stuff')
.write('\r\n--foo--')
.end(function(res){
res.statusCode.should.equal(500);
done();
});
})
})
// I'm too lazy to test this in both `.json()` and `.urlencoded()`
describe('verify', function () {
it('should throw 403 if verification fails', function (done) {
var app = connect();
app.use(connect.bodyParser({
verify: function () {
throw new Error();
}
}))
app.request()
.post('/')
.set('Content-Type', 'application/json')
.write('{"user":"tobi"}')
.expect(403, done);
})
it('should not throw if verification does not throw', function (done) {
var app = connect();
app.use(connect.bodyParser({
verify: function () {}
}))
app.use(function (req, res, next) {
res.statusCode = 204;
res.end();
})
app.request()
.post('/')
.set('Content-Type', 'application/json')
.write('{"user":"tobi"}')
.expect(204, done);
})
})
})<|fim▁end|> | .write('user=tobi')
.end(function(res){ |
<|file_name|>hue-states.ts<|end_file_name|><|fim▁begin|>var hue = require('node-hue-api');
var lightState = hue.lightState;
// WHITE STATES
var MID = 350;
var WARM = 400;
var X_WARM = 500;
var BRIGHT = 250;
// HUES
const HUE = {
BLOOD_ORANGE: 350,
RED: 0,
GREEN: 145,
BLUE: 250,
PURPLE: 280
}
const BRIGHTNESS = {
MAX: 100,
HIGH: 75,
MED: 50,
LOW: 25,
MIN: 1
};
const SATURATION = {
MAX: 100,
HIGH: 75,
MED: 50,
LOW: 25,
MIN: 1
};
export const STATES = {
bright: lightState.create().on().white(BRIGHT, BRIGHTNESS.MAX),
warm: lightState.create().on().white(X_WARM, BRIGHTNESS.MAX),
full: lightState.create().on().white(MID, BRIGHTNESS.MAX),
high: lightState.create().on().white(WARM, BRIGHTNESS.HIGH),<|fim▁hole|>
medium: lightState.create().on().white(WARM, BRIGHTNESS.MED),
med: lightState.create().on().white(WARM, BRIGHTNESS.MED),
low: lightState.create().on().white(WARM, BRIGHTNESS.LOW),
fadelow: lightState.create().on().white(WARM, BRIGHTNESS.LOW).transition(5000),
minimum: lightState.create().on().white(WARM, BRIGHTNESS.MIN),
min: lightState.create().on().white(WARM, BRIGHTNESS.MIN),
off: lightState.create().off(),
fadeoff: lightState.create().off().transition(10000),
movie: lightState.create().on().hsl(HUE.BLOOD_ORANGE, SATURATION.MAX, BRIGHTNESS.LOW).transition(3000),
red: lightState.create().hsl(HUE.RED, SATURATION.MAX, BRIGHTNESS.MAX).on(),
redalert: lightState.create().hsl(HUE.RED, SATURATION.MAX, BRIGHTNESS.MAX).on().alert('lselect'),
green: lightState.create().hsl(HUE.GREEN, SATURATION.MAX, BRIGHTNESS.MAX).on(),
greenalert: lightState.create().hsl(HUE.GREEN, SATURATION.MAX, BRIGHTNESS.MAX).on().alert('lselect'),
blue: lightState.create().hsl(HUE.BLUE, SATURATION.MAX, BRIGHTNESS.MAX).on(),
bluealert: lightState.create().hsl(HUE.BLUE, SATURATION.MAX, BRIGHTNESS.MAX).on().alert('lselect'),
purple: lightState.create().hsl(HUE.PURPLE, SATURATION.MAX, BRIGHTNESS.MAX).on(),
purplealert: lightState.create().hsl(HUE.PURPLE, SATURATION.MAX, BRIGHTNESS.MAX).on().alert('lselect'),
party: lightState.create().on().hsl(HUE.RED, SATURATION.MAX, BRIGHTNESS.MAX).effect('colorloop'),
test: lightState.create().on().hsl(300, SATURATION.MAX, 60).effect('none').transition(5000),
test2: lightState.create().alert().white(500, 10)
};<|fim▁end|> | |
<|file_name|>BookingDao.java<|end_file_name|><|fim▁begin|>/*
* This file is part of "U Turismu" project.
*
* U Turismu is an enterprise application in support of calabrian tour operators.
* This system aims to promote tourist services provided by the operators
* and to develop and improve tourism in Calabria.
*
* Copyright (C) 2012 "LagrecaSpaccarotella" team.<|fim▁hole|> * it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package uturismu.dao;
import uturismu.dto.Booking;
/**
* @author "LagrecaSpaccarotella" team.
*
*/
public interface BookingDao extends GenericDao<Booking> {
}<|fim▁end|> | *
* This program is free software: you can redistribute it and/or modify |
<|file_name|>pem.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
#
# Electrum - lightweight Bitcoin client
# Copyright (C) 2015 Thomas Voegtlin
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# This module uses code from TLSLlite
# TLSLite Author: Trevor Perrin)
import binascii
from x509 import ASN1_Node
def a2b_base64(s):
try:
b = bytearray(binascii.a2b_base64(s))
except Exception as e:
raise SyntaxError("base64 error: %s" % e)
return b
def b2a_base64(b):
return binascii.b2a_base64(b)
def dePem(s, name):
"""Decode a PEM string into a bytearray of its payload.
The input must contain an appropriate PEM prefix and postfix
based on the input name string, e.g. for name="CERTIFICATE":
-----BEGIN CERTIFICATE-----
MIIBXDCCAUSgAwIBAgIBADANBgkqhkiG9w0BAQUFADAPMQ0wCwYDVQQDEwRUQUNL
...
KoZIhvcNAQEFBQADAwA5kw==
-----END CERTIFICATE-----
The first such PEM block in the input will be found, and its
payload will be base64 decoded and returned.
"""
prefix = "-----BEGIN %s-----" % name
postfix = "-----END %s-----" % name
start = s.find(prefix)
if start == -1:
raise SyntaxError("Missing PEM prefix")
end = s.find(postfix, start+len(prefix))
if end == -1:
raise SyntaxError("Missing PEM postfix")
s = s[start+len("-----BEGIN %s-----" % name) : end]
retBytes = a2b_base64(s) # May raise SyntaxError
return retBytes
def dePemList(s, name):
"""Decode a sequence of PEM blocks into a list of bytearrays.
The input must contain any number of PEM blocks, each with the appropriate
PEM prefix and postfix based on the input name string, e.g. for
name="TACK BREAK SIG". Arbitrary text can appear between and before and
after the PEM blocks. For example:
" Created by TACK.py 0.9.3 Created at 2012-02-01T00:30:10Z -----BEGIN TACK
BREAK SIG-----
ATKhrz5C6JHJW8BF5fLVrnQss6JnWVyEaC0p89LNhKPswvcC9/s6+vWLd9snYTUv
YMEBdw69PUP8JB4AdqA3K6Ap0Fgd9SSTOECeAKOUAym8zcYaXUwpk0+WuPYa7Zmm
SkbOlK4ywqt+amhWbg9txSGUwFO5tWUHT3QrnRlE/e3PeNFXLx5Bckg= -----END TACK
BREAK SIG----- Created by TACK.py 0.9.3 Created at 2012-02-01T00:30:11Z
-----BEGIN TACK BREAK SIG-----
ATKhrz5C6JHJW8BF5fLVrnQss6JnWVyEaC0p89LNhKPswvcC9/s6+vWLd9snYTUv
YMEBdw69PUP8JB4AdqA3K6BVCWfcjN36lx6JwxmZQncS6sww7DecFO/qjSePCxwM
+kdDqX/9/183nmjx6bf0ewhPXkA0nVXsDYZaydN8rJU1GaMlnjcIYxY= -----END TACK
BREAK SIG----- "
All such PEM blocks will be found, decoded, and return in an ordered list<|fim▁hole|> prefix = "-----BEGIN %s-----" % name
postfix = "-----END %s-----" % name
while 1:
start = s.find(prefix)
if start == -1:
return bList
end = s.find(postfix, start+len(prefix))
if end == -1:
raise SyntaxError("Missing PEM postfix")
s2 = s[start+len(prefix) : end]
retBytes = a2b_base64(s2) # May raise SyntaxError
bList.append(retBytes)
s = s[end+len(postfix) : ]
def pem(b, name):
"""Encode a payload bytearray into a PEM string.
The input will be base64 encoded, then wrapped in a PEM prefix/postfix
based on the name string, e.g. for name="CERTIFICATE":
-----BEGIN CERTIFICATE-----
MIIBXDCCAUSgAwIBAgIBADANBgkqhkiG9w0BAQUFADAPMQ0wCwYDVQQDEwRUQUNL
...
KoZIhvcNAQEFBQADAwA5kw==
-----END CERTIFICATE-----
"""
s1 = b2a_base64(b)[:-1] # remove terminating \n
s2 = ""
while s1:
s2 += s1[:64] + "\n"
s1 = s1[64:]
s = ("-----BEGIN %s-----\n" % name) + s2 + \
("-----END %s-----\n" % name)
return s
def pemSniff(inStr, name):
searchStr = "-----BEGIN %s-----" % name
return searchStr in inStr
def parse_private_key(s):
"""Parse a string containing a PEM-encoded <privateKey>."""
if pemSniff(s, "PRIVATE KEY"):
bytes = dePem(s, "PRIVATE KEY")
return _parsePKCS8(bytes)
elif pemSniff(s, "RSA PRIVATE KEY"):
bytes = dePem(s, "RSA PRIVATE KEY")
return _parseSSLeay(bytes)
else:
raise SyntaxError("Not a PEM private key file")
def _parsePKCS8(bytes):
s = ASN1_Node(str(bytes))
root = s.root()
version_node = s.first_child(root)
version = bytestr_to_int(s.get_value_of_type(version_node, 'INTEGER'))
if version != 0:
raise SyntaxError("Unrecognized PKCS8 version")
rsaOID_node = s.next_node(version_node)
ii = s.first_child(rsaOID_node)
rsaOID = decode_OID(s.get_value_of_type(ii, 'OBJECT IDENTIFIER'))
if rsaOID != '1.2.840.113549.1.1.1':
raise SyntaxError("Unrecognized AlgorithmIdentifier")
privkey_node = s.next_node(rsaOID_node)
value = s.get_value_of_type(privkey_node, 'OCTET STRING')
return _parseASN1PrivateKey(value)
def _parseSSLeay(bytes):
return _parseASN1PrivateKey(ASN1_Node(str(bytes)))
def bytesToNumber(s):
return int(binascii.hexlify(s), 16)
def _parseASN1PrivateKey(s):
root = s.root()
version_node = s.first_child(root)
version = bytestr_to_int(s.get_value_of_type(version_node, 'INTEGER'))
if version != 0:
raise SyntaxError("Unrecognized RSAPrivateKey version")
n = s.next_node(version_node)
e = s.next_node(n)
d = s.next_node(e)
p = s.next_node(d)
q = s.next_node(p)
dP = s.next_node(q)
dQ = s.next_node(dP)
qInv = s.next_node(dQ)
return map(lambda x: bytesToNumber(s.get_value_of_type(x, 'INTEGER')), [n, e, d, p, q, dP, dQ, qInv])<|fim▁end|> | of bytearrays, which may have zero elements if not PEM blocks are found.
"""
bList = [] |
<|file_name|>test_triangularbarkbands.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# Copyright (C) 2006-2016 Music Technology Group - Universitat Pompeu Fabra
#
# This file is part of Essentia
#
# Essentia is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation (FSF), either version 3 of the License, or (at your
# option) any later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS<|fim▁hole|># version 3 along with this program. If not, see http://www.gnu.org/licenses/
from essentia_test import *
import numpy as np
class TestTriangularBarkBands(TestCase):
def InitTriangularBarkBands(self, nbands):
return TriangularBarkBands(inputSize=1024,
numberBands=nbands,
lowFrequencyBound=0,
highFrequencyBound=44100*.5)
def testRegression(self):
spectrum = [1]*1024
mbands = self.InitTriangularBarkBands(24)(spectrum)
self.assertEqual(len(mbands), 24 )
self.assert_(not any(numpy.isnan(mbands)))
self.assert_(not any(numpy.isinf(mbands)))
self.assertAlmostEqualVector(mbands, [1]*24, 1e-5)
mbands = self.InitTriangularBarkBands(128)(spectrum)
self.assertEqual(len(mbands), 128 )
self.assert_(not any(numpy.isnan(mbands)))
self.assert_(not any(numpy.isinf(mbands)))
self.assertAlmostEqualVector(mbands, [1]*128, 1e-5)
def testRegressionRastaMode(self):
# Test the BFCC extractor compared to Rastamat specifications
audio = MonoLoader(filename = join(testdata.audio_dir, 'recorded/vignesh.wav'),
sampleRate = 44100)()*2**15
#Expected values generated in Rastamat/MATLAB
expected = [ 20.28919141, 23.80362425, 26.69797305, 27.10461133, 26.64508125,
26.7758322, 27.1787682, 27.10699792, 26.29040982, 25.04243486,
24.24791966, 24.17377063, 24.61976518, 25.29554584, 24.87617598,
23.79018513, 23.04026225, 23.20707811, 23.09716777, 23.33050168,
22.8201923, 21.49477903, 21.63639095, 22.12937291, 22.01981441,
21.70728156]
frameSize = 1102
hopSize = 441
fftsize = 2048
paddingSize = fftsize - frameSize
spectrumSize = int(fftsize/2) + 1
w = Windowing(type = 'hann',
size = frameSize,
zeroPadding = paddingSize,
normalized = False,
zeroPhase = False)
spectrum = Spectrum(size = fftsize)
mbands = TriangularBarkBands(inputSize= spectrumSize,
type = 'power',
highFrequencyBound = 8000,
lowFrequencyBound = 0,
numberBands = 26,
weighting = 'linear',
normalize = 'unit_max')
pool = Pool()
for frame in FrameGenerator(audio, frameSize = frameSize, hopSize = hopSize, startFromZero = True, validFrameThresholdRatio = 1):
pool.add('TriangularBarkBands', mbands(spectrum(w(frame))))
np.savetxt("out.csv", np.mean(np.log(pool['TriangularBarkBands']),0), delimiter=',')
self.assertAlmostEqualVector( np.mean(np.log(pool['TriangularBarkBands']),0), expected,1e-2)
def testZero(self):
# Inputting zeros should return zero. Try with different sizes
size = 1024
while (size >= 256 ):
self.assertEqualVector(TriangularBarkBands()(zeros(size)), zeros(24))
size /= 2
def testInvalidInput(self):
# mel bands should fail for a spectrum with less than 2 bins
self.assertComputeFails(TriangularBarkBands(), [])
self.assertComputeFails(TriangularBarkBands(), [0.5])
def testInvalidParam(self):
self.assertConfigureFails(TriangularBarkBands(), { 'numberBands': 0 })
self.assertConfigureFails(TriangularBarkBands(), { 'numberBands': 1 })
self.assertConfigureFails(TriangularBarkBands(), { 'lowFrequencyBound': -100 })
self.assertConfigureFails(TriangularBarkBands(), { 'lowFrequencyBound': 100,
'highFrequencyBound': 50 })
self.assertConfigureFails(TriangularBarkBands(), { 'highFrequencyBound': 30000,
'sampleRate': 22050})
def testWrongInputSize(self):
# This test makes sure that even though the inputSize given at
# configure time does not match the input spectrum, the algorithm does
# not crash and correctly resizes internal structures to avoid errors.
spec = [.1,.4,.5,.2,.1,.01,.04]*100
np.savetxt("out.csv", TriangularBarkBands(inputSize=1024, sampleRate=10, highFrequencyBound=4)(spec), delimiter=',')
self.assertAlmostEqualVector(
TriangularBarkBands(inputSize=1024, sampleRate=10, highFrequencyBound=4)(spec),
[0.0460643246769905]*24,
1e-6)
"""
def testNotEnoughSpectrumBins(self):
self.assertConfigureFails(TriangularBarkBands(), {'numberBands': 256,
'inputSize': 1025})
"""
suite = allTests(TestTriangularBarkBands)
if __name__ == '__main__':
TextTestRunner(verbosity=2).run(suite)<|fim▁end|> | # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the Affero GNU General Public License |
<|file_name|>error.rs<|end_file_name|><|fim▁begin|>use std::error;
use std::fmt;
#[derive(Debug)]
pub enum RouterError {
RouteNotFound,<|fim▁hole|>}
impl fmt::Display for RouterError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
RouterError::RouteNotFound => write!(f, "Route not found in container"),
}
}
}
impl error::Error for RouterError {
fn description(&self) -> &str {
return match *self {
RouterError::RouteNotFound => "Route not found in container",
};
}
fn cause(&self) -> Option<&error::Error> {
return match *self {
RouterError::RouteNotFound => None,
};
}
}<|fim▁end|> | |
<|file_name|>expr-match-panic.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
<|fim▁hole|>fn test_simple() {
let r = match true { true => { true } false => { panic!() } };
assert_eq!(r, true);
}
fn test_box() {
let r = match true { true => { vec!(10) } false => { panic!() } };
assert_eq!(r[0], 10);
}
pub fn main() { test_simple(); test_box(); }<|fim▁end|> | |
<|file_name|>efs.py<|end_file_name|><|fim▁begin|>from troposphere import Tags,FindInMap, Ref, Template, Parameter,ImportValue, Ref, Output
from troposphere.efs import FileSystem, MountTarget
from troposphere.ec2 import SecurityGroup, SecurityGroupRule, Instance, Subnet
from create import export_ref, import_ref
from create.network import AclFactory, assoc_nacl_subnet
def efs_setup(template, ops, app_cfn_options, stack_name, stack_setup):
# Variable Declarations
vpc_id=ops.get('vpc_id')
efs_sg = app_cfn_options.network_names['tcpstacks'][stack_name]['sg_name']
efs_acl = app_cfn_options.network_names['tcpstacks'][stack_name]['nacl_name']<|fim▁hole|> FileSystemTags=Tags(Name='{}-{}'.format(ops.app_name, stack_name))
)
template.add_resource(efs_fs)
export_ref(template, '{}{}{}'.format(ops.app_name,stack_name,"Endpoint"), value=Ref(efs_fs), desc="Endpoint for EFS FileSystem")
# EFS FS Security Groups
efs_security_group=SecurityGroup(
title=efs_sg,
GroupDescription='Allow Access',
VpcId=vpc_id,
Tags=Tags(Name=efs_sg)
)
template.add_resource(efs_security_group)
export_ref(template, efs_sg, value=Ref(efs_sg), desc="Export for EFS Security Group")
# Create Network ACL for EFS Stack
efs_nacl = AclFactory(
template,
name=efs_acl,
vpc_id=ops.vpc_id,
in_networks=[val for key, val in sorted(ops.app_networks.items())],
in_ports=stack_setup['ports'],
out_ports=ops.out_ports,
out_networks=[val for key, val in sorted(ops.app_networks.items())],
ssh_hosts=ops.get("deploy_hosts"),
)
export_ref(
template,
export_name=efs_acl,
value=Ref(efs_acl),
desc="{}{} stack".format("NetACL for", stack_name)
)
# Create Subnets for Mount Targets
for k, v in ops['tcpstacks']['EFS']['networks'].items():
efs_subnet=Subnet(
title='{}{}{}{}'.format(ops.app_name, stack_name, "MountTargetSubnet", k.split("-")[-1]),
AvailabilityZone=k,
CidrBlock=v,
VpcId=vpc_id,
Tags=Tags(Name='{}-{}-{}-{}'.format(ops.app_name, stack_name, "MountTargetSubnet", k.split("-")[-1]))
)
template.add_resource(efs_subnet)
assoc_name = '{}{}{}'.format(stack_name,"AclAssoc",k.split("-")[-1])
assoc_nacl_subnet(template, assoc_name, Ref(efs_acl), Ref(efs_subnet))
efs_mount_target=MountTarget(
title='{}{}{}'.format(ops.app_name, "EFSMountTarget", k.split("-")[-1]),
FileSystemId=Ref(efs_fs),
SecurityGroups=[Ref(efs_security_group)],
SubnetId=Ref(efs_subnet)
)
template.add_resource(efs_mount_target)<|fim▁end|> |
# Create EFS FIleSystem
efs_fs=FileSystem(
title='{}{}'.format(ops.app_name, stack_name), |
<|file_name|>example.rs<|end_file_name|><|fim▁begin|>#pragma version(1)<|fim▁hole|>#pragma rs java_package_name(com.renderscript.courses.tlp2k14.renderscriptfilter)<|fim▁end|> | |
<|file_name|>0008_cart_tax_percentage.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2016-12-21 14:58
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
<|fim▁hole|> ]
operations = [
migrations.AddField(
model_name='cart',
name='tax_percentage',
field=models.DecimalField(decimal_places=3, default=0.085, max_digits=20),
),
]<|fim▁end|> | dependencies = [
('carts', '0007_auto_20161221_1337'), |
<|file_name|>views.py<|end_file_name|><|fim▁begin|>import datetime
import uuid
import pytz
import os
import time
from django.contrib.sites.shortcuts import get_current_site
from django.core.servers.basehttp import FileWrapper
from django.views.decorators.csrf import csrf_exempt
from django.core.urlresolvers import reverse, resolve
from django.db.models import FieldDoesNotExist
from django.http import HttpResponse
from django.shortcuts import redirect
from django.views.decorators.http import require_POST
from django.views.generic.base import TemplateView
from django.views.generic import DetailView
from django.views.generic.list import ListView
from django.contrib.auth.decorators import login_required
from django.contrib import messages
from django.contrib.humanize.templatetags.humanize import naturaltime
from . import cid
from .models import BuildInfo, Project
from settings import MAX_CONCURRENT_BUILDS
def cid_context(request):
"""
Main context processor, adds main menu.
"""
main_menu = []
if request.user.is_authenticated():
if request.user.is_staff:
main_menu = []
else:
main_menu = []
# TODO: add github url here
return {'request': request,
'main_menu': main_menu,
'admin_access': request.user.is_staff,
'messages': []}
class PageWithAjax(TemplateView):
template_name = "ajax_page.jinja"
ajax_url = None
def dispatch(self, request, *args, **kwargs):
self.ajax_url = self.kwargs.pop('ajax_url')
return super(PageWithAjax, self).dispatch(request, *args, **kwargs)
def get_context_data(self, **kwargs):
context = super(PageWithAjax, self).get_context_data(**kwargs)
ajax_url = reverse(self.ajax_url, args=self.args, kwargs=self.kwargs)
context['ajax_url'] = ajax_url
response = resolve(ajax_url).func(self.request, *self.args, **self.kwargs)
content = getattr(response, 'rendered_content')
context['initial_content'] = content
return context
page_with_ajax = login_required(PageWithAjax.as_view())
class BuildMixin(object):
status = 200
model = None
link_column = None
columns = []
live_times = []
live_times = ['time_taken']
def render_to_response(self, context, **response_kwargs):
return super(BuildMixin, self).render_to_response(context, status=self.status, **response_kwargs)
def get_context_data(self, **kwargs):
context = super(BuildMixin, self).get_context_data(**kwargs)
context['columns'] = self.columns
context['link_column'] = self.link_column
context['headings'] = self._headings()
context['get_value'] = self._get_value
context['get_verbose_name'] = self._get_verbose_name
context['live_times'] = self.live_times
return context
def _headings(self):
for attr_name in self.columns:
yield self._get_verbose_name(attr_name)
def _get_verbose_name(self, attr_name):
meta = self.model._meta
try:
field = meta.get_field_by_name(attr_name)[0]
return field.verbose_name
except FieldDoesNotExist:
if hasattr(self.model, attr_name) and hasattr(getattr(self.model, attr_name), 'short_description'):
return getattr(self.model, attr_name).short_description
else:
return attr_name
def _get_value(self, obj, attr_name):
value = getattr(obj, attr_name)
if hasattr(value, '__call__'):
value = value()
if attr_name in self.live_times and isinstance(value, datetime.datetime):
return '<span class="live-time" data-start="%s"></span>' % value.isoformat(), True
if isinstance(value, datetime.datetime):
value = naturaltime(value)
return value, False
class BuildList(BuildMixin, ListView):
"""
List of previous builds
"""
model = BuildInfo
template_name = 'build_list.jinja'
link_column = 'created'
columns = ('created', 'time_taken', 'trigger', 'label', 'author', 'show_coverage', 'successful')
paginate_by = 50
def dispatch(self, request, *args, **kwargs):
if not any_active_builds(self.request):
self.status = 201
return super(BuildList, self).dispatch(request, *args, **kwargs)
build_list_ajax = login_required(BuildList.as_view())
class BuildDetails(BuildMixin, DetailView):
"""
details of a build.
"""
model = BuildInfo
template_name = 'build.jinja'
columns = ('created',
'modified',
'time_taken',
'trigger',
'action',
'label',
'on_master',
'fetch_branch',
'commit_url',
'author',
'complete',
'queued',
'test_success',
'test_passed',
'container',)
def get_context_data(self, **kwargs):
self.object = check(self.request, self.object)
if self.object.complete:
self.status = 202
if self.object.process_log:
self.object.process_log = self.object.process_log.replace(self.object.project.github_token,
'<github token>')
return super(BuildDetails, self).get_context_data(**kwargs)
build_details_ajax = login_required(BuildDetails.as_view())
@csrf_exempt
@require_POST
def webhook(request, pk):
project = get_project(pk)
if not project:
return HttpResponse('no project created', status=403)
# this makes it even more impossible to guess key via brute force
time.sleep(0.2)
build_info = BuildInfo.objects.create(project=project)
response_code, build_info2 = cid.process_github_webhook(request, build_info)
if response_code == 202:
set_site(build_info.project, request)
if _start_queue_build(build_info2):
msg = 'build started, id = %d' % build_info2.id
else:
msg = 'build queued, id = %d' % build_info2.id
response_code = 201
else:
build_info.delete()
msg = str(build_info2)
return HttpResponse(msg, status=response_code)
def status_svg(request, pk):
project = get_project(pk)
svg = project.status_svg if project else 'null.svg'
svg_path = os.path.join(os.path.dirname(__file__), 'static', svg)
response = HttpResponse(FileWrapper(open(svg_path)), content_type='image/svg+xml')
response['Etag'] = '"%s"' % uuid.uuid4()
response['Cache-Control'] = 'no-cache'
response['Expires'] = datetime.datetime.now().replace(tzinfo=pytz.UTC).strftime('%a, %d %b %Y %H:%M:%S %Z')
return response
@login_required
@require_POST
def go_build(request):
project = get_project()
if project:
set_site(project, request)
build_info = BuildInfo.objects.create(trigger='manual',
author=request.user.username,
project=project,
on_master=True)
if not _start_queue_build(build_info):
messages.info(request, 'build queued')
else:
messages.warning(request, 'No project created')
return redirect(reverse('build-list'))
def _start_queue_build(build_info):
"""
Check whether the build can begin immediately or needs to be queued.
If it can start; start it, else set queued to True and save build_info.
:param build_info: BuildInfo instance to queue or start
:returns: True if build started, else False
"""
if BuildInfo.objects.filter(complete=False, queued=False).count() >= MAX_CONCURRENT_BUILDS:
build_info.queued = True
build_info.save()
else:
cid.build(build_info)
return not build_info.queued
def check(request, build_info):
bi = build_info
try:
set_site(bi.project, request)
bi = cid.check(build_info)
except cid.KnownError, e:
messages.error(request, str(e))
bi = build_info
finally:
return bi
def check_build(request, build_info):
extract = ['sha', 'complete', 'test_success', 'test_passed', 'start', 'finished', 'process_log', 'ci_log']
bi = check(request, build_info)
return {at: getattr(bi, at) for at in extract}
def any_active_builds(r):
return any([not check_build(r, bi)['complete'] for bi in BuildInfo.objects.filter(complete=False)])<|fim▁hole|>
def set_site(project, request):
current_site = get_current_site(request)
project.update_url = 'http://' + current_site.domain + '/'
project.save()
def get_project(pk=None):
"""
gets the first project, stop gap until we support more than one project
"""
projects = Project.objects.all()
if pk is not None:
projects = projects.filter(pk=pk)
return projects.first()<|fim▁end|> | |
<|file_name|>OServerCommandGetStaticContent.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2010-2012 Luca Garulli (l.garulli--at--orientechnologies.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.orientechnologies.orient.server.network.protocol.http.command.get;
import java.io.BufferedInputStream;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.HashMap;
import java.util.Map;
import java.util.Map.Entry;
import com.orientechnologies.common.log.OLogManager;
import com.orientechnologies.orient.core.config.OGlobalConfiguration;
import com.orientechnologies.orient.server.config.OServerCommandConfiguration;
import com.orientechnologies.orient.server.config.OServerEntryConfiguration;
import com.orientechnologies.orient.server.network.protocol.http.OHttpRequest;
import com.orientechnologies.orient.server.network.protocol.http.OHttpResponse;
import com.orientechnologies.orient.server.network.protocol.http.OHttpUtils;
public class OServerCommandGetStaticContent extends OServerCommandConfigurableAbstract {
private static final String[] DEF_PATTERN = { "GET|www", "GET|studio/", "GET|", "GET|*.htm", "GET|*.html",
"GET|*.xml", "GET|*.jpeg", "GET|*.jpg", "GET|*.png", "GET|*.gif", "GET|*.js", "GET|*.otf", "GET|*.css", "GET|*.swf",
"GET|favicon.ico", "GET|robots.txt" };
private static final String CONFIG_HTTP_CACHE = "http.cache:";
private static final String CONFIG_ROOT_PATH = "root.path";
private static final String CONFIG_FILE_PATH = "file.path";
private Map<String, OStaticContentCachedEntry> cacheContents;
private Map<String, String> cacheHttp = new HashMap<String, String>();
private String cacheHttpDefault = "Cache-Control: max-age=3000";
private String rootPath;
private String filePath;
public OServerCommandGetStaticContent() {
super(DEF_PATTERN);
}
public OServerCommandGetStaticContent(final OServerCommandConfiguration iConfiguration) {
super(iConfiguration.pattern);
// LOAD HTTP CACHE CONFIGURATION
for (OServerEntryConfiguration par : iConfiguration.parameters) {
if (par.name.startsWith(CONFIG_HTTP_CACHE)) {
final String filter = par.name.substring(CONFIG_HTTP_CACHE.length());
if (filter.equalsIgnoreCase("default"))
cacheHttpDefault = par.value;
else if (filter.length() > 0) {
final String[] filters = filter.split(" ");
for (String f : filters) {
cacheHttp.put(f, par.value);
}
}
} else if (par.name.startsWith(CONFIG_ROOT_PATH))
rootPath = par.value;
else if (par.name.startsWith(CONFIG_FILE_PATH))
filePath = par.value;
}
}
@Override
public boolean beforeExecute(OHttpRequest iRequest, OHttpResponse iResponse) throws IOException {
String header = cacheHttpDefault;
if (cacheHttp.size() > 0) {
final String resource = getResource(iRequest);
// SEARCH IN CACHE IF ANY
for (Entry<String, String> entry : cacheHttp.entrySet()) {
final int wildcardPos = entry.getKey().indexOf('*');
final String partLeft = entry.getKey().substring(0, wildcardPos);
final String partRight = entry.getKey().substring(wildcardPos + 1);
if (resource.startsWith(partLeft) && resource.endsWith(partRight)) {
// FOUND
header = entry.getValue();
break;
}
}
}
iResponse.setHeader(header);
return true;
}
@Override
public boolean execute(final OHttpRequest iRequest, final OHttpResponse iResponse) throws Exception {
iRequest.data.commandInfo = "Get static content";
iRequest.data.commandDetail = iRequest.url;
if (filePath == null && rootPath == null) {
// GET GLOBAL CONFIG
rootPath = iRequest.configuration.getValueAsString("orientdb.www.path", "src/site");
if (rootPath == null) {
OLogManager.instance().warn(this,
"No path configured. Specify the 'root.path', 'file.path' or the global 'orientdb.www.path' variable", rootPath);
return false;
}
}
if (filePath == null) {
// CHECK DIRECTORY
final File wwwPathDirectory = new File(rootPath);
if (!wwwPathDirectory.exists())
OLogManager.instance().warn(this, "path variable points to '%s' but it doesn't exists", rootPath);
if (!wwwPathDirectory.isDirectory())
OLogManager.instance().warn(this, "path variable points to '%s' but it isn't a directory", rootPath);
}
if (cacheContents == null && OGlobalConfiguration.SERVER_CACHE_FILE_STATIC.getValueAsBoolean())
// CREATE THE CACHE IF ENABLED
cacheContents = new HashMap<String, OStaticContentCachedEntry>();
InputStream is = null;
long contentSize = 0;
String type = null;
try {
String path;
if (filePath != null)
// SINGLE FILE
path = filePath;
else {
// GET FROM A DIRECTORY
final String url = getResource(iRequest);
if (url.startsWith("/www"))
path = rootPath + url.substring("/www".length(), url.length());
else
path = rootPath + url;
}
if (cacheContents != null) {
synchronized (cacheContents) {
final OStaticContentCachedEntry cachedEntry = cacheContents.get(path);
if (cachedEntry != null) {
is = new ByteArrayInputStream(cachedEntry.content);
contentSize = cachedEntry.size;
type = cachedEntry.type;
}
}
}
if (is == null) {
File inputFile = new File(path);
if (!inputFile.exists()) {
OLogManager.instance().debug(this, "Static resource not found: %s", path);
iResponse.sendStream(404, "File not found", null, null, 0);
return false;
}
if (filePath == null && inputFile.isDirectory()) {
inputFile = new File(path + "/index.htm");
if (inputFile.exists())
path = path + "/index.htm";
else {
inputFile = new File(path + "/index.html");
if (inputFile.exists())
path = path + "/index.html";
}
}
if (path.endsWith(".htm") || path.endsWith(".html"))
type = "text/html";
else if (path.endsWith(".png"))
type = "image/png";
else if (path.endsWith(".jpeg"))
type = "image/jpeg";
else if (path.endsWith(".js"))
type = "application/x-javascript";
else if (path.endsWith(".css"))
type = "text/css";
<|fim▁hole|> else
type = "text/plain";
is = new BufferedInputStream(new FileInputStream(inputFile));
contentSize = inputFile.length();
if (cacheContents != null) {
// READ THE ENTIRE STREAM AND CACHE IT IN MEMORY
final byte[] buffer = new byte[(int) contentSize];
for (int i = 0; i < contentSize; ++i)
buffer[i] = (byte) is.read();
OStaticContentCachedEntry cachedEntry = new OStaticContentCachedEntry();
cachedEntry.content = buffer;
cachedEntry.size = contentSize;
cachedEntry.type = type;
cacheContents.put(path, cachedEntry);
is = new ByteArrayInputStream(cachedEntry.content);
}
}
iResponse.sendStream(OHttpUtils.STATUS_OK_CODE, OHttpUtils.STATUS_OK_DESCRIPTION, type, is, contentSize);
} catch (IOException e) {
e.printStackTrace();
} finally {
if (is != null)
try {
is.close();
} catch (IOException e) {
}
}
return false;
}
protected String getResource(final OHttpRequest iRequest) {
final String url;
if (OHttpUtils.URL_SEPARATOR.equals(iRequest.url))
url = "/www/index.htm";
else {
int pos = iRequest.url.indexOf('?');
if (pos > -1)
url = iRequest.url.substring(0, pos);
else
url = iRequest.url;
}
return url;
}
}<|fim▁end|> | else if (path.endsWith(".ico"))
type = "image/x-icon";
else if (path.endsWith(".otf"))
type = "font/opentype";
|
<|file_name|>002.rs<|end_file_name|><|fim▁begin|>// Each new term in the Fibonacci sequence is generated by adding the previous two terms.
// By starting with 1 and 2, the first 10 terms will be:
// 1, 2, 3, 5, 8, 13, 21, 34, 55, 89, ...
// By considering the terms in the Fibonacci sequence whose values do not exceed four million,
// find the sum of the even-valued terms.
struct LimitedFibonacci {
limit: uint,
last: uint,
next: uint
}
impl LimitedFibonacci {
fn new(limit: uint) -> LimitedFibonacci {
LimitedFibonacci {
limit: limit,
last: 1,
next: 1,
}
}
}
impl Iterator<uint> for LimitedFibonacci {
fn next(&mut self) -> Option<uint> {
let ret = self.last;
self.last = self.next;
self.next = self.last + ret;
if ret < self.limit {
Some(ret)
} else {
None
}
}
}
<|fim▁hole|> let mut fibs = LimitedFibonacci::new(limit);
fibs.filter(|n| n % 2 == 0)
.fold(0, |a, b| a + b)
}
fn main() {
println!("{}", compute(4000000));
}<|fim▁end|> | fn compute(limit: uint) -> uint { |
<|file_name|>coros.py<|end_file_name|><|fim▁begin|># This module definitely remains in 1.0.x, probably in versions after that too.
import warnings
warnings.warn('gevent.coros has been renamed to gevent.lock', DeprecationWarning, stacklevel=2)<|fim▁hole|>from gevent.lock import __all__<|fim▁end|> |
from gevent.lock import * |
<|file_name|>sujet.py<|end_file_name|><|fim▁begin|># -*-coding:Utf-8 -*
# Copyright (c) 2010 LE GOFF Vincent
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Fichier contenant la classe SujetAide, détaillée plus bas."""
from abstraits.obase import BaseObj
from primaires.format.description import Description
from primaires.format.fonctions import supprimer_accents, couper_phrase
class SujetAide(BaseObj):
"""Classe représentant un sujet d'aide.
Un sujet d'aide est une aide disponible in-game sur un sujet précis.
Il peut être consultable par un certain groupe de personnes (seulement
les administrateurs du jeu, par exemple) et peut être lié à d'autres
sujets.
Ses attributs sont :
cle -- la clé identifiant le sujet pour les immortels
titre -- le titre du sujet
contenu -- le contenu du sujet d'aide
mots_cles -- des mots-clés pointant vers ce sujet
str_groupe -- une chaîne décrivant le groupe autorisé
sujets_lies -- les sujets liés (des objets SujetAide contenus
dans une liste)
"""
enregistrer = True
_nom = "sujet_aide"
_version = 1
def __init__(self, cle):
"""Constructeur du sujet d'aide."""
BaseObj.__init__(self)
self.cle = cle
self.titre = "un sujet d'aide"
self.pere = None
self.contenu = Description(parent=self, scriptable=False)
self.mots_cles = []
self._str_groupe = "joueur"
self.__sujets_lies = []
self.__sujets_fils = []
self._construire()
def __getnewargs__(self):
return ("", )
def __str__(self):
return "aide:" + self.titre
@property
def str_mots_cles(self):
return ", ".join(self.mots_cles) or "aucun mot-clé"
def _get_str_groupe(self):
return self._str_groupe or "aucun"
def _set_str_groupe(self, nom_groupe):
self._str_groupe = nom_groupe
str_groupe = property(_get_str_groupe, _set_str_groupe)
@property
def grp(self):
groupe = type(self).importeur.interpreteur.groupes[self._str_groue]
return groupe
@property
def sujets_lies(self):
"""Retourne une liste déréférencée des sujets liés."""
return [s for s in self.__sujets_lies if s is not None]
@property
def str_sujets_lies(self):
"""Retourne une chaîne contenant les sujets liés."""
return ", ".join([s.titre for s in self.sujets_lies]) or \
"aucun sujet lié"
@property
def sujets_fils(self):
"""Retourne une liste déréférencée des sujets fils."""
return [s for s in self.__sujets_fils if s is not None]
@property
def tab_sujets_fils(self):
"""Retourne un tableau des sujets fils."""
lignes = []
taille = max([len(s.titre) for s in self.sujets_fils] or (10, ))
if taille > 30:
taille = 30
sep = "+" + 17 * "-" + "+" + (taille + 2) * "-" + "+"
en_tete = sep + "\n" + "| |tit|" + "Sujet".ljust(15) + "|ff| |"
en_tete += " |tit|" + "Titre".ljust(taille) + "|ff| |\n" + sep
for s in self.sujets_fils:
ligne = "| |ent|" + s.cle.ljust(15) + "|ff| | "
ligne += couper_phrase(s.titre, taille).ljust(taille) + " |"
lignes.append(ligne)
if lignes:
return en_tete + "\n" + "\n".join(lignes) + "\n" + sep
else:
return "|att|Aucun sujet affilié.|ff|"
def sommaire(self, personnage, indent=""):
"""Renvoie le sommaire du sujet, si sommaire il y a."""
ret = ""
i = 1
for sujet in self.sujets_fils:
if importeur.interpreteur.groupes.explorer_groupes_inclus(
personnage.grp, sujet.str_groupe):
ret += "\n" + indent + str(i) + ". |cmd|"
ret += sujet.titre.capitalize() + "|ff|"
if self.sujets_fils:
ret += sujet.sommaire(personnage, \
indent=indent+"{}.".format(i))
i += 1
return ret
def est_lie(self, sujet):
"""Retourne True si le sujet est lié, False sinon."""
return sujet in self.__sujets_lies and self in sujet.__sujets_lies
def ajouter_lie(self, sujet):
"""Lie un sujet au courant."""
self.__sujets_lies.append(sujet)
sujet.__sujets_lies.append(self)
def supprimer_lie(self, sujet):
"""Supprime un sujet de la liste des sujets liés."""
self.__sujets_lies.remove(sujet)
sujet.__sujets_lies.remove(self)
def est_fils(self, sujet):
"""Retourne True si le sujet est fils de celui-ci, False sinon."""
return sujet in self.__sujets_fils and sujet.pere is self
def ajouter_fils(self, sujet):
"""Ajoute le sujet aux fils."""
self.__sujets_fils.append(sujet)
sujet.pere = self<|fim▁hole|> sujet.pere = None
def echanger_fils(self, sujet, bas=False):
"""Change un fils de place vers le haut ou le bas de la liste."""
i = self.sujets_fils.index(sujet)
if i == 0 and not bas:
raise ValueError("le sujet est déjà en haut de la liste")
elif i == len(self.__sujets_fils) - 1 and bas:
raise ValueError("le sujet est déjà en bas de la liste")
del self.__sujets_fils[i]
if not bas:
self.__sujets_fils.insert(i - 1, sujet)
else:
self.__sujets_fils.insert(i + 1, sujet)
def vider(self):
"""Prépare la destruction du sujet."""
for s in self.sujets_fils:
s.pere = self.pere
if self.pere:
self.pere.ajouter_fils(s)
if self.pere is not None:
self.pere.supprimer_fils(self)
for s in self.sujets_lies:
s.supprimer_lie(self)
def afficher_pour(self, personnage):
"""Affiche le sujet d'aide pour personnage."""
nb_ti = int((31 - len(self.titre)) / 2)
ret = "|tit|" + "-" * nb_ti + "= " + self.titre.capitalize()
ret += " =" + "-" * nb_ti
ret += "|ff|\n"
if self.sujets_fils:
ret += "\nSommaire :"
ret += self.sommaire(personnage) + "\n"
ret += "\n" + self.afficher_contenu(personnage)
if self.sujets_lies:
sujets_lies = []
for sujet in self.sujets_lies:
if self.importeur.interpreteur.groupes. \
explorer_groupes_inclus(personnage.grp,
sujet.str_groupe):
sujets_lies.append(sujet)
if sujets_lies:
s = len(sujets_lies) > 1 and "s" or ""
ret += "\n\nSujet{s} lié{s} : |ent|".format(s=s)
ret += "|ff|, |ent|".join([s.titre for s in sujets_lies])
ret += "|ff|."
return ret
def afficher_contenu(self, personnage, ident="", sp="|sp|"):
"""Affiche le contenu de self et ses sujets fils."""
ret = str(self.contenu)
for i, s in enumerate(self.sujets_fils):
if importeur.interpreteur.groupes.explorer_groupes_inclus(
personnage.grp, s.str_groupe):
ret += "\n" + sp + "\n|tit|" + ident + str(i + 1) + ". " + \
s.titre.capitalize() + "|ff|"
ret += "\n\n" + s.afficher_contenu(personnage,
ident=ident + "{}.".format(i + 1), sp="\n\n")
return ret<|fim▁end|> |
def supprimer_fils(self, sujet):
"""Supprime le sujet des fils."""
self.__sujets_fils.remove(sujet) |
<|file_name|>document_loader.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Tracking of pending loads in a document.
//!
//! <https://html.spec.whatwg.org/multipage/#the-end>
use dom::bindings::root::Dom;
use dom::document::Document;
use ipc_channel::ipc::IpcSender;
use net_traits::{CoreResourceMsg, FetchChannels, FetchResponseMsg};
use net_traits::{ResourceThreads, IpcSend};
use net_traits::request::RequestInit;
use servo_url::ServoUrl;
use std::thread;
#[derive(Clone, Debug, JSTraceable, MallocSizeOf, PartialEq)]
pub enum LoadType {
Image(ServoUrl),
Script(ServoUrl),
Subframe(ServoUrl),
Stylesheet(ServoUrl),
PageSource(ServoUrl),
Media,
}
impl LoadType {
fn url(&self) -> Option<&ServoUrl> {
match *self {
LoadType::Image(ref url) |
LoadType::Script(ref url) |
LoadType::Subframe(ref url) |
LoadType::Stylesheet(ref url) |
LoadType::PageSource(ref url) => Some(url),
LoadType::Media => None,
}
}
}
/// Canary value ensuring that manually added blocking loads (ie. ones that weren't
/// created via DocumentLoader::fetch_async) are always removed by the time
/// that the owner is destroyed.
#[derive(JSTraceable, MallocSizeOf)]
#[must_root]
pub struct LoadBlocker {
/// The document whose load event is blocked by this object existing.
doc: Dom<Document>,
/// The load that is blocking the document's load event.
load: Option<LoadType>,
}
impl LoadBlocker {
/// Mark the document's load event as blocked on this new load.
pub fn new(doc: &Document, load: LoadType) -> LoadBlocker {
doc.loader_mut().add_blocking_load(load.clone());
LoadBlocker {
doc: Dom::from_ref(doc),
load: Some(load),
}
}
/// Remove this load from the associated document's list of blocking loads.
pub fn terminate(blocker: &mut Option<LoadBlocker>) {
if let Some(this) = blocker.as_mut() {
this.doc.finish_load(this.load.take().unwrap());
}
*blocker = None;
}
/// Return the url associated with this load.
pub fn url(&self) -> Option<&ServoUrl> {
self.load.as_ref().and_then(LoadType::url)
}
}
impl Drop for LoadBlocker {
fn drop(&mut self) {
if !thread::panicking() {
debug_assert!(self.load.is_none());
}
}
}
#[derive(JSTraceable, MallocSizeOf)]
pub struct DocumentLoader {
resource_threads: ResourceThreads,
blocking_loads: Vec<LoadType>,
events_inhibited: bool,
}
impl DocumentLoader {
pub fn new(existing: &DocumentLoader) -> DocumentLoader {
DocumentLoader::new_with_threads(existing.resource_threads.clone(), None)
}
pub fn new_with_threads(resource_threads: ResourceThreads,
initial_load: Option<ServoUrl>) -> DocumentLoader {
debug!("Initial blocking load {:?}.", initial_load);
let initial_loads = initial_load.into_iter().map(LoadType::PageSource).collect();
DocumentLoader {
resource_threads: resource_threads,
blocking_loads: initial_loads,
events_inhibited: false,
}
}
/// Add a load to the list of blocking loads.
fn add_blocking_load(&mut self, load: LoadType) {
debug!("Adding blocking load {:?} ({}).", load, self.blocking_loads.len());
self.blocking_loads.push(load);
}
/// Initiate a new fetch.
pub fn fetch_async(&mut self,
load: LoadType,
request: RequestInit,
fetch_target: IpcSender<FetchResponseMsg>) {
self.add_blocking_load(load);
self.fetch_async_background(request, fetch_target);
}
/// Initiate a new fetch that does not block the document load event.
pub fn fetch_async_background(&self,
request: RequestInit,
fetch_target: IpcSender<FetchResponseMsg>) {
self.resource_threads.sender().send(
CoreResourceMsg::Fetch(request, FetchChannels::ResponseMsg(fetch_target, None))).unwrap();
}
/// Mark an in-progress network request complete.
pub fn finish_load(&mut self, load: &LoadType) {
debug!("Removing blocking load {:?} ({}).", load, self.blocking_loads.len());
let idx = self.blocking_loads.iter().position(|unfinished| *unfinished == *load);
self.blocking_loads.remove(idx.unwrap_or_else(|| panic!("unknown completed load {:?}", load)));
}
pub fn is_blocked(&self) -> bool {
// TODO: Ensure that we report blocked if parsing is still ongoing.
!self.blocking_loads.is_empty()
}
pub fn is_only_blocked_by_iframes(&self) -> bool {
self.blocking_loads.iter().all(|load| match *load {
LoadType::Subframe(_) => true,
_ => false
})
}
pub fn inhibit_events(&mut self) {
self.events_inhibited = true;
}
pub fn events_inhibited(&self) -> bool {
self.events_inhibited
}
<|fim▁hole|> pub fn resource_threads(&self) -> &ResourceThreads {
&self.resource_threads
}
}<|fim▁end|> | |
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: ascii -*-
u"""
:Copyright:
Copyright 2015 - 2021
Andr\xe9 Malo or his licensors, as applicable
:License:
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==================================================
GenSASchema - Static SQLAlchemy Schema Generator
==================================================
GenSASchema - Static SQLAlchemy Schema Generator.
"""
__author__ = u"Andr\xe9 Malo"
import os as _os
import posixpath as _posixpath
# pylint: disable = no-name-in-module, import-error, raise-missing-from
import setuptools as _setuptools
# pylint: disable = invalid-name
def _doc(filename):
""" Read docs file """
# pylint: disable = unspecified-encoding
args = {} if str is bytes else dict(encoding='utf-8')
try:
with open(_os.path.join('docs', filename), **args) as fp:
return fp.read()
except IOError:
return None
def _lines(multiline):
""" Split multiline string into single line % empty and comments """
return [line for line in (
line.strip() for line in multiline.splitlines(False)
) if line and not line.startswith('#')]
<|fim▁hole|>package = dict(
name='gensaschema',
top='gensaschema',
pathname='gensaschema',
provides=_doc('PROVIDES'),
desc=_doc('SUMMARY').strip(),
longdesc=_doc('DESCRIPTION'),
author=__author__,
email='[email protected]',
license="Apache License, Version 2.0",
# keywords=_lines(_doc('KEYWORDS')),
url='http://opensource.perlig.de/gensaschema/',
classifiers=_lines(_doc('CLASSIFIERS') or ''),
packages=True,
# py_modules=[],
# version_file='__init__.py',
install_requires=[],
)
def setup():
""" Main """
# pylint: disable = too-many-branches
# pylint: disable = unspecified-encoding
args = {} if str is bytes else dict(encoding='utf-8')
version_file = '%s/%s' % (package['pathname'],
package.get('version_file', '__init__.py'))
with open(version_file, **args) as fp:
for line in fp: # pylint: disable = redefined-outer-name
if line.startswith('__version__'):
version = line.split('=', 1)[1].strip()
if version.startswith(("'", '"')):
version = version[1:-1].strip()
break
else:
raise RuntimeError("Version not found")
kwargs = {}
if package.get('packages', True):
kwargs['packages'] = [package['top']] + [
'%s.%s' % (package['top'], item)
for item in
_setuptools.find_packages(package['pathname'])
]
if package.get('py_modules'):
kwargs['py_modules'] = package['py_modules']
_setuptools.setup(
name=package['name'],
author=package['author'],
author_email=package['email'],
license=package['license'],
classifiers=package['classifiers'],
description=package['desc'],
long_description=package['longdesc'],
url=package['url'],
install_requires=package['install_requires'],
version=version,
zip_safe=False,
**kwargs
)
if __name__ == '__main__':
setup()<|fim▁end|> | |
<|file_name|>FBStringTest.cpp<|end_file_name|><|fim▁begin|>/*
* Copyright 2016 Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
//
// Author: [email protected]
#include <folly/FBString.h>
#include <atomic>
#include <cstdlib>
#include <iomanip>
#include <list>
#include <sstream>
#include <boost/algorithm/string.hpp>
#include <boost/random.hpp>
#include <gtest/gtest.h>
#include <folly/Foreach.h>
#include <folly/Portability.h>
#include <folly/Random.h>
#include <folly/Conv.h>
using namespace std;
using namespace folly;
namespace {
static const int seed = folly::randomNumberSeed();
typedef boost::mt19937 RandomT;
static RandomT rng(seed);
static const size_t maxString = 100;
static const bool avoidAliasing = true;
template <class Integral1, class Integral2>
Integral2 random(Integral1 low, Integral2 up) {
boost::uniform_int<> range(low, up);
return range(rng);
}
template <class String>
void randomString(String* toFill, unsigned int maxSize = 1000) {
assert(toFill);
toFill->resize(random(0, maxSize));
FOR_EACH (i, *toFill) {
*i = random('a', 'z');
}
}
template <class String, class Integral>
void Num2String(String& str, Integral n) {
std::string tmp = folly::to<std::string>(n);
str = String(tmp.begin(), tmp.end());
}
std::list<char> RandomList(unsigned int maxSize) {
std::list<char> lst(random(0u, maxSize));
std::list<char>::iterator i = lst.begin();
for (; i != lst.end(); ++i) {
*i = random('a', 'z');
}
return lst;
}
}
////////////////////////////////////////////////////////////////////////////////
// Tests begin here
////////////////////////////////////////////////////////////////////////////////
template <class String> void clause11_21_4_2_a(String & test) {
test.String::~String();
new(&test) String();
}
template <class String> void clause11_21_4_2_b(String & test) {
String test2(test);
assert(test2 == test);
}
template <class String> void clause11_21_4_2_c(String & test) {
// Test move constructor. There is a more specialized test, see
// TEST(FBString, testMoveCtor)
String donor(test);
String test2(std::move(donor));
EXPECT_EQ(test2, test);
// Technically not required, but all implementations that actually
// support move will move large strings. Make a guess for 128 as the
// maximum small string optimization that's reasonable.
EXPECT_LE(donor.size(), 128);
}
template <class String> void clause11_21_4_2_d(String & test) {
// Copy constructor with position and length
const size_t pos = random(0, test.size());
String s(test, pos, random(0, 9)
? random(0, (size_t)(test.size() - pos))
: String::npos); // test for npos, too, in 10% of the cases
test = s;
}
template <class String> void clause11_21_4_2_e(String & test) {
// Constructor from char*, size_t
const size_t
pos = random(0, test.size()),
n = random(0, test.size() - pos);
String before(test.data(), test.size());
String s(test.c_str() + pos, n);
String after(test.data(), test.size());
EXPECT_EQ(before, after);
test.swap(s);
}
template <class String> void clause11_21_4_2_f(String & test) {
// Constructor from char*
const size_t pos = random(0, test.size());
String before(test.data(), test.size());
String s(test.c_str() + pos);
String after(test.data(), test.size());
EXPECT_EQ(before, after);
test.swap(s);
}
template <class String> void clause11_21_4_2_g(String & test) {
// Constructor from size_t, char
const size_t n = random(0, test.size());
const auto c = test.front();
test = String(n, c);
}
template <class String> void clause11_21_4_2_h(String & test) {
// Constructors from various iterator pairs
// Constructor from char*, char*
String s1(test.begin(), test.end());
EXPECT_EQ(test, s1);
String s2(test.data(), test.data() + test.size());
EXPECT_EQ(test, s2);
// Constructor from other iterators
std::list<char> lst;
for (auto c : test) lst.push_back(c);
String s3(lst.begin(), lst.end());
EXPECT_EQ(test, s3);
// Constructor from wchar_t iterators
std::list<wchar_t> lst1;
for (auto c : test) lst1.push_back(c);
String s4(lst1.begin(), lst1.end());
EXPECT_EQ(test, s4);
// Constructor from wchar_t pointers
wchar_t t[20];
t[0] = 'a';
t[1] = 'b';
fbstring s5(t, t + 2);;
EXPECT_EQ("ab", s5);
}
template <class String> void clause11_21_4_2_i(String & test) {
// From initializer_list<char>
std::initializer_list<typename String::value_type>
il = { 'h', 'e', 'l', 'l', 'o' };
String s(il);
test.swap(s);
}
template <class String> void clause11_21_4_2_j(String & test) {
// Assignment from const String&
auto size = random(0, 2000);
String s(size, '\0');
EXPECT_EQ(s.size(), size);
FOR_EACH_RANGE (i, 0, s.size()) {
s[i] = random('a', 'z');
}
test = s;
}
template <class String> void clause11_21_4_2_k(String & test) {
// Assignment from String&&
auto size = random(0, 2000);
String s(size, '\0');
EXPECT_EQ(s.size(), size);
FOR_EACH_RANGE (i, 0, s.size()) {
s[i] = random('a', 'z');
}
test = std::move(s);
if (typeid(String) == typeid(fbstring)) {
EXPECT_LE(s.size(), 128);
}
}
template <class String> void clause11_21_4_2_l(String & test) {
// Assignment from char*
String s(random(0, 1000), '\0');
size_t i = 0;
for (; i != s.size(); ++i) {
s[i] = random('a', 'z');
}
test = s.c_str();
}
template <class String> void clause11_21_4_2_lprime(String & test) {
// Aliased assign
const size_t pos = random(0, test.size());
if (avoidAliasing) {
test = String(test.c_str() + pos);
} else {
test = test.c_str() + pos;
}
}
template <class String> void clause11_21_4_2_m(String & test) {
// Assignment from char
test = random('a', 'z');
}
template <class String> void clause11_21_4_2_n(String & test) {
// Assignment from initializer_list<char>
initializer_list<typename String::value_type>
il = { 'h', 'e', 'l', 'l', 'o' };
test = il;
}
template <class String> void clause11_21_4_3(String & test) {
// Iterators. The code below should leave test unchanged
EXPECT_EQ(test.size(), test.end() - test.begin());
EXPECT_EQ(test.size(), test.rend() - test.rbegin());
EXPECT_EQ(test.size(), test.cend() - test.cbegin());
EXPECT_EQ(test.size(), test.crend() - test.crbegin());
auto s = test.size();
test.resize(test.end() - test.begin());
EXPECT_EQ(s, test.size());
test.resize(test.rend() - test.rbegin());
EXPECT_EQ(s, test.size());
}
template <class String> void clause11_21_4_4(String & test) {
// exercise capacity, size, max_size
EXPECT_EQ(test.size(), test.length());
EXPECT_LE(test.size(), test.max_size());
EXPECT_LE(test.capacity(), test.max_size());
EXPECT_LE(test.size(), test.capacity());
// exercise shrink_to_fit. Nonbinding request so we can't really do
// much beyond calling it.
auto copy = test;
copy.reserve(copy.capacity() * 3);
copy.shrink_to_fit();
EXPECT_EQ(copy, test);
// exercise empty
string empty("empty");
string notempty("not empty");
if (test.empty()) test = String(empty.begin(), empty.end());
else test = String(notempty.begin(), notempty.end());
}
template <class String> void clause11_21_4_5(String & test) {
// exercise element access
if (!test.empty()) {
EXPECT_EQ(test[0], test.front());
EXPECT_EQ(test[test.size() - 1], test.back());
auto const i = random(0, test.size() - 1);
EXPECT_EQ(test[i], test.at(i));
test = test[i];
}
}
template <class String> void clause11_21_4_6_1(String & test) {
// 21.3.5 modifiers (+=)
String test1;
randomString(&test1);
assert(test1.size() == char_traits
<typename String::value_type>::length(test1.c_str()));
auto len = test.size();
test += test1;
EXPECT_EQ(test.size(), test1.size() + len);
FOR_EACH_RANGE (i, 0, test1.size()) {
EXPECT_EQ(test[len + i], test1[i]);
}
// aliasing modifiers
String test2 = test;
auto dt = test2.data();
auto sz = test.c_str();
len = test.size();
EXPECT_EQ(memcmp(sz, dt, len), 0);
String copy(test.data(), test.size());
EXPECT_EQ(char_traits
<typename String::value_type>::length(test.c_str()), len);
test += test;
//test.append(test);
EXPECT_EQ(test.size(), 2 * len);
EXPECT_EQ(char_traits
<typename String::value_type>::length(test.c_str()), 2 * len);
FOR_EACH_RANGE (i, 0, len) {
EXPECT_EQ(test[i], copy[i]);
EXPECT_EQ(test[i], test[len + i]);
}
len = test.size();
EXPECT_EQ(char_traits
<typename String::value_type>::length(test.c_str()), len);
// more aliasing
auto const pos = random(0, test.size());
EXPECT_EQ(char_traits
<typename String::value_type>::length(test.c_str() + pos), len - pos);
if (avoidAliasing) {
String addMe(test.c_str() + pos);
EXPECT_EQ(addMe.size(), len - pos);
test += addMe;
} else {
test += test.c_str() + pos;
}
EXPECT_EQ(test.size(), 2 * len - pos);
// single char
len = test.size();
test += random('a', 'z');
EXPECT_EQ(test.size(), len + 1);
// initializer_list
initializer_list<typename String::value_type> il { 'a', 'b', 'c' };
test += il;
}
template <class String> void clause11_21_4_6_2(String & test) {
// 21.3.5 modifiers (append, push_back)
String s;
// Test with a small string first
char c = random('a', 'z');
s.push_back(c);
EXPECT_EQ(s[s.size() - 1], c);
EXPECT_EQ(s.size(), 1);
s.resize(s.size() - 1);
randomString(&s, maxString);
test.append(s);
randomString(&s, maxString);
test.append(s, random(0, s.size()), random(0, maxString));
randomString(&s, maxString);
test.append(s.c_str(), random(0, s.size()));
randomString(&s, maxString);
test.append(s.c_str());
test.append(random(0, maxString), random('a', 'z'));
std::list<char> lst(RandomList(maxString));
test.append(lst.begin(), lst.end());
c = random('a', 'z');
test.push_back(c);
EXPECT_EQ(test[test.size() - 1], c);
// initializer_list
initializer_list<typename String::value_type> il { 'a', 'b', 'c' };
test.append(il);
}
template <class String> void clause11_21_4_6_3_a(String & test) {
// assign
String s;
randomString(&s);
test.assign(s);
EXPECT_EQ(test, s);
// move assign
test.assign(std::move(s));
if (typeid(String) == typeid(fbstring)) {
EXPECT_LE(s.size(), 128);
}
}
template <class String> void clause11_21_4_6_3_b(String & test) {
// assign
String s;
randomString(&s, maxString);
test.assign(s, random(0, s.size()), random(0, maxString));
}
template <class String> void clause11_21_4_6_3_c(String & test) {
// assign
String s;
randomString(&s, maxString);
test.assign(s.c_str(), random(0, s.size()));
}
template <class String> void clause11_21_4_6_3_d(String & test) {
// assign
String s;
randomString(&s, maxString);
test.assign(s.c_str());
}
template <class String> void clause11_21_4_6_3_e(String & test) {
// assign
String s;
randomString(&s, maxString);
test.assign(random(0, maxString), random('a', 'z'));
}
template <class String> void clause11_21_4_6_3_f(String & test) {
// assign from bidirectional iterator
std::list<char> lst(RandomList(maxString));
test.assign(lst.begin(), lst.end());
}
template <class String> void clause11_21_4_6_3_g(String & test) {
// assign from aliased source
test.assign(test);
}
template <class String> void clause11_21_4_6_3_h(String & test) {
// assign from aliased source
test.assign(test, random(0, test.size()), random(0, maxString));
}
template <class String> void clause11_21_4_6_3_i(String & test) {
// assign from aliased source
test.assign(test.c_str(), random(0, test.size()));
}
template <class String> void clause11_21_4_6_3_j(String & test) {
// assign from aliased source
test.assign(test.c_str());
}
template <class String> void clause11_21_4_6_3_k(String & test) {
// assign from initializer_list
initializer_list<typename String::value_type> il { 'a', 'b', 'c' };
test.assign(il);
}
template <class String> void clause11_21_4_6_4(String & test) {
// insert
String s;
randomString(&s, maxString);
test.insert(random(0, test.size()), s);
randomString(&s, maxString);
test.insert(random(0, test.size()),
s, random(0, s.size()),
random(0, maxString));
randomString(&s, maxString);
test.insert(random(0, test.size()),
s.c_str(), random(0, s.size()));
randomString(&s, maxString);
test.insert(random(0, test.size()), s.c_str());
test.insert(random(0, test.size()),
random(0, maxString), random('a', 'z'));
typename String::size_type pos = random(0, test.size());
typename String::iterator res =
test.insert(test.begin() + pos, random('a', 'z'));
EXPECT_EQ(res - test.begin(), pos);
std::list<char> lst(RandomList(maxString));
pos = random(0, test.size());
// Uncomment below to see a bug in gcc
/*res = */test.insert(test.begin() + pos, lst.begin(), lst.end());
// insert from initializer_list
initializer_list<typename String::value_type> il { 'a', 'b', 'c' };
pos = random(0, test.size());
// Uncomment below to see a bug in gcc
/*res = */test.insert(test.begin() + pos, il);
// Test with actual input iterators
stringstream ss;
ss << "hello cruel world";
auto i = istream_iterator<char>(ss);
test.insert(test.begin(), i, istream_iterator<char>());
}
template <class String> void clause11_21_4_6_5(String & test) {
// erase and pop_back
if (!test.empty()) {
test.erase(random(0, test.size()), random(0, maxString));
}
if (!test.empty()) {
// TODO: is erase(end()) allowed?
test.erase(test.begin() + random(0, test.size() - 1));
}
if (!test.empty()) {
auto const i = test.begin() + random(0, test.size());
if (i != test.end()) {
test.erase(i, i + random(0, size_t(test.end() - i)));
}
}
if (!test.empty()) {
// Can't test pop_back with std::string, doesn't support it yet.
//test.pop_back();
}
}
template <class String> void clause11_21_4_6_6(String & test) {
auto pos = random(0, test.size());
if (avoidAliasing) {
test.replace(pos, random(0, test.size() - pos),
String(test));
} else {
test.replace(pos, random(0, test.size() - pos), test);
}
pos = random(0, test.size());
String s;
randomString(&s, maxString);
test.replace(pos, pos + random(0, test.size() - pos), s);
auto pos1 = random(0, test.size());
auto pos2 = random(0, test.size());
if (avoidAliasing) {
test.replace(pos1, pos1 + random(0, test.size() - pos1),
String(test),
pos2, pos2 + random(0, test.size() - pos2));
} else {
test.replace(pos1, pos1 + random(0, test.size() - pos1),
test, pos2, pos2 + random(0, test.size() - pos2));
}
pos1 = random(0, test.size());
String str;
randomString(&str, maxString);
pos2 = random(0, str.size());
test.replace(pos1, pos1 + random(0, test.size() - pos1),
str, pos2, pos2 + random(0, str.size() - pos2));
pos = random(0, test.size());
if (avoidAliasing) {
test.replace(pos, random(0, test.size() - pos),
String(test).c_str(), test.size());
} else {
test.replace(pos, random(0, test.size() - pos),
test.c_str(), test.size());
}
pos = random(0, test.size());
randomString(&str, maxString);
test.replace(pos, pos + random(0, test.size() - pos),
str.c_str(), str.size());
pos = random(0, test.size());
randomString(&str, maxString);
test.replace(pos, pos + random(0, test.size() - pos),
str.c_str());
pos = random(0, test.size());
test.replace(pos, random(0, test.size() - pos),
random(0, maxString), random('a', 'z'));
pos = random(0, test.size());
if (avoidAliasing) {
auto newString = String(test);
test.replace(
test.begin() + pos,
test.begin() + pos + random(0, test.size() - pos),
newString);
} else {
test.replace(
test.begin() + pos,
test.begin() + pos + random(0, test.size() - pos),
test);
}
pos = random(0, test.size());
if (avoidAliasing) {
auto newString = String(test);
test.replace(
test.begin() + pos,
test.begin() + pos + random(0, test.size() - pos),
newString.c_str(),
test.size() - random(0, test.size()));
} else {
test.replace(
test.begin() + pos,
test.begin() + pos + random(0, test.size() - pos),
test.c_str(),
test.size() - random(0, test.size()));
}
pos = random(0, test.size());
auto const n = random(0, test.size() - pos);
typename String::iterator b = test.begin();
String str1;
randomString(&str1, maxString);
const String & str3 = str1;
const typename String::value_type* ss = str3.c_str();
test.replace(
b + pos,
b + pos + n,
ss);
pos = random(0, test.size());
test.replace(
test.begin() + pos,
test.begin() + pos + random(0, test.size() - pos),
random(0, maxString), random('a', 'z'));
}
template <class String> void clause11_21_4_6_7(String & test) {
std::vector<typename String::value_type>
vec(random(0, maxString));
test.copy(
&vec[0],
vec.size(),
random(0, test.size()));
}
template <class String> void clause11_21_4_6_8(String & test) {
String s;
randomString(&s, maxString);
s.swap(test);
}
template <class String> void clause11_21_4_7_1(String & test) {
// 21.3.6 string operations
// exercise c_str() and data()
assert(test.c_str() == test.data());
// exercise get_allocator()
String s;
randomString(&s, maxString);
DCHECK(test.get_allocator() == s.get_allocator());
}
template <class String> void clause11_21_4_7_2_a(String & test) {
String str = test.substr(
random(0, test.size()),
random(0, test.size()));
Num2String(test, test.find(str, random(0, test.size())));
}
template <class String> void clause11_21_4_7_2_a1(String & test) {
String str = String(test).substr(
random(0, test.size()),
random(0, test.size()));
Num2String(test, test.find(str, random(0, test.size())));
}
template <class String> void clause11_21_4_7_2_a2(String & test) {
auto const& cTest = test;
String str = cTest.substr(
random(0, test.size()),
random(0, test.size()));
Num2String(test, test.find(str, random(0, test.size())));
}
template <class String> void clause11_21_4_7_2_b(String & test) {
auto from = random(0, test.size());
auto length = random(0, test.size() - from);
String str = test.substr(from, length);
Num2String(test, test.find(str.c_str(),
random(0, test.size()),
random(0, str.size())));
}
template <class String> void clause11_21_4_7_2_b1(String & test) {
auto from = random(0, test.size());
auto length = random(0, test.size() - from);
String str = String(test).substr(from, length);
Num2String(test, test.find(str.c_str(),
random(0, test.size()),
random(0, str.size())));
}
template <class String> void clause11_21_4_7_2_b2(String & test) {
auto from = random(0, test.size());
auto length = random(0, test.size() - from);
const auto& cTest = test;
String str = cTest.substr(from, length);
Num2String(test, test.find(str.c_str(),
random(0, test.size()),
random(0, str.size())));
}
template <class String> void clause11_21_4_7_2_c(String & test) {
String str = test.substr(
random(0, test.size()),
random(0, test.size()));
Num2String(test, test.find(str.c_str(),
random(0, test.size())));
}
template <class String> void clause11_21_4_7_2_c1(String & test) {
String str = String(test).substr(
random(0, test.size()),
random(0, test.size()));
Num2String(test, test.find(str.c_str(),
random(0, test.size())));
}
template <class String> void clause11_21_4_7_2_c2(String & test) {
const auto& cTest = test;
String str = cTest.substr(
random(0, test.size()),
random(0, test.size()));
Num2String(test, test.find(str.c_str(),
random(0, test.size())));
}
template <class String> void clause11_21_4_7_2_d(String & test) {
Num2String(test, test.find(
random('a', 'z'),
random(0, test.size())));
}
template <class String> void clause11_21_4_7_3_a(String & test) {
String str = test.substr(
random(0, test.size()),
random(0, test.size()));
Num2String(test, test.rfind(str, random(0, test.size())));
}
template <class String> void clause11_21_4_7_3_b(String & test) {
String str = test.substr(
random(0, test.size()),
random(0, test.size()));
Num2String(test, test.rfind(str.c_str(),
random(0, test.size()),
random(0, str.size())));
}
template <class String> void clause11_21_4_7_3_c(String & test) {
String str = test.substr(
random(0, test.size()),
random(0, test.size()));
Num2String(test, test.rfind(str.c_str(),
random(0, test.size())));
}
template <class String> void clause11_21_4_7_3_d(String & test) {
Num2String(test, test.rfind(
random('a', 'z'),
random(0, test.size())));
}
template <class String> void clause11_21_4_7_4_a(String & test) {
String str;
randomString(&str, maxString);
Num2String(test, test.find_first_of(str,
random(0, test.size())));
}
template <class String> void clause11_21_4_7_4_b(String & test) {
String str;
randomString(&str, maxString);
Num2String(test, test.find_first_of(str.c_str(),
random(0, test.size()),
random(0, str.size())));
}
template <class String> void clause11_21_4_7_4_c(String & test) {
String str;
randomString(&str, maxString);
Num2String(test, test.find_first_of(str.c_str(),
random(0, test.size())));
}
template <class String> void clause11_21_4_7_4_d(String & test) {
Num2String(test, test.find_first_of(
random('a', 'z'),
random(0, test.size())));
}
template <class String> void clause11_21_4_7_5_a(String & test) {
String str;
randomString(&str, maxString);
Num2String(test, test.find_last_of(str,
random(0, test.size())));
}
template <class String> void clause11_21_4_7_5_b(String & test) {
String str;
randomString(&str, maxString);
Num2String(test, test.find_last_of(str.c_str(),
random(0, test.size()),
random(0, str.size())));
}
template <class String> void clause11_21_4_7_5_c(String & test) {
String str;
randomString(&str, maxString);
Num2String(test, test.find_last_of(str.c_str(),
random(0, test.size())));
}
template <class String> void clause11_21_4_7_5_d(String & test) {
Num2String(test, test.find_last_of(
random('a', 'z'),
random(0, test.size())));
}
template <class String> void clause11_21_4_7_6_a(String & test) {
String str;
randomString(&str, maxString);
Num2String(test, test.find_first_not_of(str,
random(0, test.size())));
}
template <class String> void clause11_21_4_7_6_b(String & test) {
String str;
randomString(&str, maxString);
Num2String(test, test.find_first_not_of(str.c_str(),
random(0, test.size()),
random(0, str.size())));
}
template <class String> void clause11_21_4_7_6_c(String & test) {
String str;
randomString(&str, maxString);
Num2String(test, test.find_first_not_of(str.c_str(),
random(0, test.size())));
}
template <class String> void clause11_21_4_7_6_d(String & test) {
Num2String(test, test.find_first_not_of(
random('a', 'z'),
random(0, test.size())));
}
template <class String> void clause11_21_4_7_7_a(String & test) {
String str;
randomString(&str, maxString);
Num2String(test, test.find_last_not_of(str,
random(0, test.size())));
}
template <class String> void clause11_21_4_7_7_b(String & test) {
String str;
randomString(&str, maxString);
Num2String(test, test.find_last_not_of(str.c_str(),
random(0, test.size()),
random(0, str.size())));
}
template <class String> void clause11_21_4_7_7_c(String & test) {
String str;
randomString(&str, maxString);
Num2String(test, test.find_last_not_of(str.c_str(),
random(0, test.size())));
}
template <class String> void clause11_21_4_7_7_d(String & test) {
Num2String(test, test.find_last_not_of(
random('a', 'z'),
random(0, test.size())));
}
template <class String> void clause11_21_4_7_8(String & test) {
test = test.substr(random(0, test.size()), random(0, test.size()));
}
template <class String> void clause11_21_4_7_9_a(String & test) {
String s;
randomString(&s, maxString);
int tristate = test.compare(s);
if (tristate > 0) tristate = 1;
else if (tristate < 0) tristate = 2;
Num2String(test, tristate);
}
template <class String> void clause11_21_4_7_9_b(String & test) {
String s;
randomString(&s, maxString);
int tristate = test.compare(
random(0, test.size()),
random(0, test.size()),
s);
if (tristate > 0) tristate = 1;
else if (tristate < 0) tristate = 2;
Num2String(test, tristate);
}
template <class String> void clause11_21_4_7_9_c(String & test) {
String str;
randomString(&str, maxString);
int tristate = test.compare(
random(0, test.size()),
random(0, test.size()),
str,
random(0, str.size()),
random(0, str.size()));
if (tristate > 0) tristate = 1;
else if (tristate < 0) tristate = 2;
Num2String(test, tristate);
}
template <class String> void clause11_21_4_7_9_d(String & test) {
String s;
randomString(&s, maxString);
int tristate = test.compare(s.c_str());
if (tristate > 0) tristate = 1;
else if (tristate < 0) tristate = 2;
Num2String(test, tristate);
}
template <class String> void clause11_21_4_7_9_e(String & test) {
String str;
randomString(&str, maxString);
int tristate = test.compare(
random(0, test.size()),
random(0, test.size()),
str.c_str(),
random(0, str.size()));
if (tristate > 0) tristate = 1;
else if (tristate < 0) tristate = 2;
Num2String(test, tristate);
}
template <class String> void clause11_21_4_8_1_a(String & test) {
String s1;
randomString(&s1, maxString);
String s2;
randomString(&s2, maxString);
test = s1 + s2;
}
template <class String> void clause11_21_4_8_1_b(String & test) {
String s1;
randomString(&s1, maxString);
String s2;
randomString(&s2, maxString);
test = move(s1) + s2;
}
template <class String> void clause11_21_4_8_1_c(String & test) {
String s1;
randomString(&s1, maxString);
String s2;
randomString(&s2, maxString);
test = s1 + move(s2);
}
template <class String> void clause11_21_4_8_1_d(String & test) {
String s1;
randomString(&s1, maxString);
String s2;
randomString(&s2, maxString);
test = move(s1) + move(s2);
}
template <class String> void clause11_21_4_8_1_e(String & test) {
String s;<|fim▁hole|> test = s.c_str() + s1;
}
template <class String> void clause11_21_4_8_1_f(String & test) {
String s;
randomString(&s, maxString);
String s1;
randomString(&s1, maxString);
test = s.c_str() + move(s1);
}
template <class String> void clause11_21_4_8_1_g(String & test) {
String s;
randomString(&s, maxString);
test = typename String::value_type(random('a', 'z')) + s;
}
template <class String> void clause11_21_4_8_1_h(String & test) {
String s;
randomString(&s, maxString);
test = typename String::value_type(random('a', 'z')) + move(s);
}
template <class String> void clause11_21_4_8_1_i(String & test) {
String s;
randomString(&s, maxString);
String s1;
randomString(&s1, maxString);
test = s + s1.c_str();
}
template <class String> void clause11_21_4_8_1_j(String & test) {
String s;
randomString(&s, maxString);
String s1;
randomString(&s1, maxString);
test = move(s) + s1.c_str();
}
template <class String> void clause11_21_4_8_1_k(String & test) {
String s;
randomString(&s, maxString);
test = s + typename String::value_type(random('a', 'z'));
}
template <class String> void clause11_21_4_8_1_l(String & test) {
String s;
randomString(&s, maxString);
String s1;
randomString(&s1, maxString);
test = move(s) + s1.c_str();
}
// Numbering here is from C++11
template <class String> void clause11_21_4_8_9_a(String & test) {
basic_stringstream<typename String::value_type> stst(test.c_str());
String str;
while (stst) {
stst >> str;
test += str + test;
}
}
TEST(FBString, testAllClauses) {
EXPECT_TRUE(1) << "Starting with seed: " << seed;
std::string r;
folly::fbstring c;
#ifndef __ANDROID__
// Disabled on Android: wchar support is not recommended and does not
// always behave as expected
std::wstring wr;
folly::basic_fbstring<wchar_t> wc;
#endif
int count = 0;
auto l = [&](const char * const clause,
void(*f_string)(std::string&),
void(*f_fbstring)(folly::fbstring&),
void(*f_wfbstring)(folly::basic_fbstring<wchar_t>&)) {
do {
if (1) {} else EXPECT_TRUE(1) << "Testing clause " << clause;
randomString(&r);
c = r;
EXPECT_EQ(c, r);
#ifndef __ANDROID__
wr = std::wstring(r.begin(), r.end());
wc = folly::basic_fbstring<wchar_t>(wr.c_str());
#endif
auto localSeed = seed + count;
rng = RandomT(localSeed);
f_string(r);
rng = RandomT(localSeed);
f_fbstring(c);
EXPECT_EQ(r, c)
<< "Lengths: " << r.size() << " vs. " << c.size()
<< "\nReference: '" << r << "'"
<< "\nActual: '" << c.data()[0] << "'";
#ifndef __ANDROID__
rng = RandomT(localSeed);
f_wfbstring(wc);
int wret = wcslen(wc.c_str());
char mb[wret+1];
int ret = wcstombs(mb, wc.c_str(), sizeof(mb));
if (ret == wret) mb[wret] = '\0';
const char *mc = c.c_str();
std::string one(mb);
std::string two(mc);
EXPECT_EQ(one, two);
#endif
} while (++count % 100 != 0);
};
#define TEST_CLAUSE(x) \
l(#x, \
clause11_##x<std::string>, \
clause11_##x<folly::fbstring>, \
clause11_##x<folly::basic_fbstring<wchar_t>>);
TEST_CLAUSE(21_4_2_a);
TEST_CLAUSE(21_4_2_b);
TEST_CLAUSE(21_4_2_c);
TEST_CLAUSE(21_4_2_d);
TEST_CLAUSE(21_4_2_e);
TEST_CLAUSE(21_4_2_f);
TEST_CLAUSE(21_4_2_g);
TEST_CLAUSE(21_4_2_h);
TEST_CLAUSE(21_4_2_i);
TEST_CLAUSE(21_4_2_j);
TEST_CLAUSE(21_4_2_k);
TEST_CLAUSE(21_4_2_l);
TEST_CLAUSE(21_4_2_lprime);
TEST_CLAUSE(21_4_2_m);
TEST_CLAUSE(21_4_2_n);
TEST_CLAUSE(21_4_3);
TEST_CLAUSE(21_4_4);
TEST_CLAUSE(21_4_5);
TEST_CLAUSE(21_4_6_1);
TEST_CLAUSE(21_4_6_2);
TEST_CLAUSE(21_4_6_3_a);
TEST_CLAUSE(21_4_6_3_b);
TEST_CLAUSE(21_4_6_3_c);
TEST_CLAUSE(21_4_6_3_d);
TEST_CLAUSE(21_4_6_3_e);
TEST_CLAUSE(21_4_6_3_f);
TEST_CLAUSE(21_4_6_3_g);
TEST_CLAUSE(21_4_6_3_h);
TEST_CLAUSE(21_4_6_3_i);
TEST_CLAUSE(21_4_6_3_j);
TEST_CLAUSE(21_4_6_3_k);
TEST_CLAUSE(21_4_6_4);
TEST_CLAUSE(21_4_6_5);
TEST_CLAUSE(21_4_6_6);
TEST_CLAUSE(21_4_6_7);
TEST_CLAUSE(21_4_6_8);
TEST_CLAUSE(21_4_7_1);
TEST_CLAUSE(21_4_7_2_a);
TEST_CLAUSE(21_4_7_2_a1);
TEST_CLAUSE(21_4_7_2_a2);
TEST_CLAUSE(21_4_7_2_b);
TEST_CLAUSE(21_4_7_2_b1);
TEST_CLAUSE(21_4_7_2_b2);
TEST_CLAUSE(21_4_7_2_c);
TEST_CLAUSE(21_4_7_2_c1);
TEST_CLAUSE(21_4_7_2_c2);
TEST_CLAUSE(21_4_7_2_d);
TEST_CLAUSE(21_4_7_3_a);
TEST_CLAUSE(21_4_7_3_b);
TEST_CLAUSE(21_4_7_3_c);
TEST_CLAUSE(21_4_7_3_d);
TEST_CLAUSE(21_4_7_4_a);
TEST_CLAUSE(21_4_7_4_b);
TEST_CLAUSE(21_4_7_4_c);
TEST_CLAUSE(21_4_7_4_d);
TEST_CLAUSE(21_4_7_5_a);
TEST_CLAUSE(21_4_7_5_b);
TEST_CLAUSE(21_4_7_5_c);
TEST_CLAUSE(21_4_7_5_d);
TEST_CLAUSE(21_4_7_6_a);
TEST_CLAUSE(21_4_7_6_b);
TEST_CLAUSE(21_4_7_6_c);
TEST_CLAUSE(21_4_7_6_d);
TEST_CLAUSE(21_4_7_7_a);
TEST_CLAUSE(21_4_7_7_b);
TEST_CLAUSE(21_4_7_7_c);
TEST_CLAUSE(21_4_7_7_d);
TEST_CLAUSE(21_4_7_8);
TEST_CLAUSE(21_4_7_9_a);
TEST_CLAUSE(21_4_7_9_b);
TEST_CLAUSE(21_4_7_9_c);
TEST_CLAUSE(21_4_7_9_d);
TEST_CLAUSE(21_4_7_9_e);
TEST_CLAUSE(21_4_8_1_a);
TEST_CLAUSE(21_4_8_1_b);
TEST_CLAUSE(21_4_8_1_c);
TEST_CLAUSE(21_4_8_1_d);
TEST_CLAUSE(21_4_8_1_e);
TEST_CLAUSE(21_4_8_1_f);
TEST_CLAUSE(21_4_8_1_g);
TEST_CLAUSE(21_4_8_1_h);
TEST_CLAUSE(21_4_8_1_i);
TEST_CLAUSE(21_4_8_1_j);
TEST_CLAUSE(21_4_8_1_k);
TEST_CLAUSE(21_4_8_1_l);
TEST_CLAUSE(21_4_8_9_a);
}
TEST(FBString, testGetline) {
string s1 = "\
Lorem ipsum dolor sit amet, consectetur adipiscing elit. Cras accumsan \n\
elit ut urna consectetur in sagittis mi auctor. Nulla facilisi. In nec \n\
dolor leo, vitae imperdiet neque. Donec ut erat mauris, a faucibus \n\
elit. Integer consectetur gravida augue, sit amet mattis mauris auctor \n\
sed. Morbi congue libero eu nunc sodales adipiscing. In lectus nunc, \n\
vulputate a fringilla at, venenatis quis justo. Proin eu velit \n\
nibh. Maecenas vitae tellus eros. Pellentesque habitant morbi \n\
tristique senectus et netus et malesuada fames ac turpis \n\
egestas. Vivamus faucibus feugiat consequat. Donec fermentum neque sit \n\
amet ligula suscipit porta. Phasellus facilisis felis in purus luctus \n\
quis posuere leo tempor. Nam nunc purus, luctus a pharetra ut, \n\
placerat at dui. Donec imperdiet, diam quis convallis pulvinar, dui \n\
est commodo lorem, ut tincidunt diam nibh et nibh. Maecenas nec velit \n\
massa, ut accumsan magna. Donec imperdiet tempor nisi et \n\
laoreet. Phasellus lectus quam, ultricies ut tincidunt in, dignissim \n\
id eros. Mauris vulputate tortor nec neque pellentesque sagittis quis \n\
sed nisl. In diam lacus, lobortis ut posuere nec, ornare id quam.";
vector<fbstring> v;
boost::split(v, s1, boost::is_any_of("\n"));
{
istringstream input(s1);
fbstring line;
FOR_EACH (i, v) {
EXPECT_TRUE(!getline(input, line).fail());
EXPECT_EQ(line, *i);
}
}
}
TEST(FBString, testMoveCtor) {
// Move constructor. Make sure we allocate a large string, so the
// small string optimization doesn't kick in.
auto size = random(100, 2000);
fbstring s(size, 'a');
fbstring test = std::move(s);
EXPECT_TRUE(s.empty());
EXPECT_EQ(size, test.size());
}
TEST(FBString, testMoveAssign) {
// Move constructor. Make sure we allocate a large string, so the
// small string optimization doesn't kick in.
auto size = random(100, 2000);
fbstring s(size, 'a');
fbstring test;
test = std::move(s);
EXPECT_TRUE(s.empty());
EXPECT_EQ(size, test.size());
}
TEST(FBString, testMoveOperatorPlusLhs) {
// Make sure we allocate a large string, so the
// small string optimization doesn't kick in.
auto size1 = random(100, 2000);
auto size2 = random(100, 2000);
fbstring s1(size1, 'a');
fbstring s2(size2, 'b');
fbstring test;
test = std::move(s1) + s2;
EXPECT_TRUE(s1.empty());
EXPECT_EQ(size1 + size2, test.size());
}
TEST(FBString, testMoveOperatorPlusRhs) {
// Make sure we allocate a large string, so the
// small string optimization doesn't kick in.
auto size1 = random(100, 2000);
auto size2 = random(100, 2000);
fbstring s1(size1, 'a');
fbstring s2(size2, 'b');
fbstring test;
test = s1 + std::move(s2);
EXPECT_EQ(size1 + size2, test.size());
}
// The GNU C++ standard library throws an std::logic_error when an std::string
// is constructed with a null pointer. Verify that we mirror this behavior.
//
// N.B. We behave this way even if the C++ library being used is something
// other than libstdc++. Someday if we deem it important to present
// identical undefined behavior for other platforms, we can re-visit this.
TEST(FBString, testConstructionFromLiteralZero) {
EXPECT_THROW(fbstring s(0), std::logic_error);
}
TEST(FBString, testFixedBugs) {
{ // D479397
fbstring str(1337, 'f');
fbstring cp = str;
cp.clear();
cp.c_str();
EXPECT_EQ(str.front(), 'f');
}
{ // D481173
fbstring str(1337, 'f');
for (int i = 0; i < 2; ++i) {
fbstring cp = str;
cp[1] = 'b';
EXPECT_EQ(cp.c_str()[cp.size()], '\0');
cp.push_back('?');
}
}
{ // D580267
{
fbstring str(1337, 'f');
fbstring cp = str;
cp.push_back('f');
}
{
fbstring str(1337, 'f');
fbstring cp = str;
cp += "bb";
}
}
{ // D661622
folly::basic_fbstring<wchar_t> s;
EXPECT_EQ(0, s.size());
}
{ // D785057
fbstring str(1337, 'f');
std::swap(str, str);
EXPECT_EQ(1337, str.size());
}
{ // D1012196, --allocator=malloc
fbstring str(128, 'f');
str.clear(); // Empty medium string.
fbstring copy(str); // Medium string of 0 capacity.
copy.push_back('b');
EXPECT_GE(copy.capacity(), 1);
}
{ // D2813713
fbstring s1("a");
s1.reserve(8); // Trigger the optimized code path.
auto test1 = '\0' + std::move(s1);
EXPECT_EQ(2, test1.size());
fbstring s2(1, '\0');
s2.reserve(8);
auto test2 = "a" + std::move(s2);
EXPECT_EQ(2, test2.size());
}
}
TEST(FBString, findWithNpos) {
fbstring fbstr("localhost:80");
EXPECT_EQ(fbstring::npos, fbstr.find(":", fbstring::npos));
}
TEST(FBString, testHash) {
fbstring a;
fbstring b;
a.push_back(0);
a.push_back(1);
b.push_back(0);
b.push_back(2);
std::hash<fbstring> hashfunc;
EXPECT_NE(hashfunc(a), hashfunc(b));
}
TEST(FBString, testFrontBack) {
fbstring str("hello");
EXPECT_EQ(str.front(), 'h');
EXPECT_EQ(str.back(), 'o');
str.front() = 'H';
EXPECT_EQ(str.front(), 'H');
str.back() = 'O';
EXPECT_EQ(str.back(), 'O');
EXPECT_EQ(str, "HellO");
}
TEST(FBString, noexcept) {
EXPECT_TRUE(noexcept(fbstring()));
fbstring x;
EXPECT_FALSE(noexcept(fbstring(x)));
EXPECT_TRUE(noexcept(fbstring(std::move(x))));
fbstring y;
EXPECT_FALSE(noexcept(y = x));
EXPECT_TRUE(noexcept(y = std::move(x)));
}
TEST(FBString, iomanip) {
stringstream ss;
fbstring fbstr("Hello");
ss << setw(6) << fbstr;
EXPECT_EQ(ss.str(), " Hello");
ss.str("");
ss << left << setw(6) << fbstr;
EXPECT_EQ(ss.str(), "Hello ");
ss.str("");
ss << right << setw(6) << fbstr;
EXPECT_EQ(ss.str(), " Hello");
ss.str("");
ss << setw(4) << fbstr;
EXPECT_EQ(ss.str(), "Hello");
ss.str("");
ss << setfill('^') << setw(6) << fbstr;
EXPECT_EQ(ss.str(), "^Hello");
ss.str("");
}
TEST(FBString, rvalueIterators) {
// you cannot take &* of a move-iterator, so use that for testing
fbstring s = "base";
fbstring r = "hello";
r.replace(r.begin(), r.end(),
make_move_iterator(s.begin()), make_move_iterator(s.end()));
EXPECT_EQ("base", r);
// The following test is probably not required by the standard.
// i.e. this could be in the realm of undefined behavior.
fbstring b = "123abcXYZ";
auto ait = b.begin() + 3;
auto Xit = b.begin() + 6;
b.replace(ait, b.end(), b.begin(), Xit);
EXPECT_EQ("123123abc", b); // if things go wrong, you'd get "123123123"
}
TEST(FBString, moveTerminator) {
// The source of a move must remain in a valid state
fbstring s(100, 'x'); // too big to be in-situ
fbstring k;
k = std::move(s);
EXPECT_EQ(0, s.size());
EXPECT_EQ('\0', *s.c_str());
}
namespace {
/*
* t8968589: Clang 3.7 refused to compile w/ certain constructors (specifically
* those that were "explicit" and had a defaulted parameter, if they were used
* in structs which were default-initialized). Exercise these just to ensure
* they compile.
*
* In diff D2632953 the old constructor:
* explicit basic_fbstring(const A& a = A()) noexcept;
*
* was split into these two, as a workaround:
* basic_fbstring() noexcept;
* explicit basic_fbstring(const A& a) noexcept;
*/
struct TestStructDefaultAllocator {
folly::basic_fbstring<char> stringMember;
};
template <class A>
struct TestStructWithAllocator {
folly::basic_fbstring<char, std::char_traits<char>, A> stringMember;
};
std::atomic<size_t> allocatorConstructedCount(0);
struct TestStructStringAllocator : std::allocator<char> {
TestStructStringAllocator() {
++ allocatorConstructedCount;
}
};
} // anon namespace
TEST(FBStringCtorTest, DefaultInitStructDefaultAlloc) {
TestStructDefaultAllocator t1 { };
EXPECT_TRUE(t1.stringMember.empty());
}
TEST(FBStringCtorTest, DefaultInitStructAlloc) {
EXPECT_EQ(allocatorConstructedCount.load(), 0);
TestStructWithAllocator<TestStructStringAllocator> t2;
EXPECT_TRUE(t2.stringMember.empty());
EXPECT_EQ(allocatorConstructedCount.load(), 1);
}<|fim▁end|> | randomString(&s, maxString);
String s1;
randomString(&s1, maxString); |
<|file_name|>savedsearch.py<|end_file_name|><|fim▁begin|>from __future__ import absolute_import, print_function
from django.db import models
from django.utils import timezone
from sentry.db.models import FlexibleForeignKey, Model, sane_repr
from sentry.models.search_common import SearchType
class SavedSearch(Model):
"""
A saved search query.
"""
__core__ = True
# TODO: Remove this column and rows where it's not null once we've
# completely removed Sentry 9
project = FlexibleForeignKey("sentry.Project", null=True)
organization = FlexibleForeignKey("sentry.Organization", null=True)
type = models.PositiveSmallIntegerField(default=SearchType.ISSUE.value, null=True)
name = models.CharField(max_length=128)
query = models.TextField()
date_added = models.DateTimeField(default=timezone.now)
# TODO: Remove this column once we've completely removed Sentry 9
is_default = models.BooleanField(default=False)
is_global = models.NullBooleanField(null=True, default=False, db_index=True)
owner = FlexibleForeignKey("sentry.User", null=True)
class Meta:
app_label = "sentry"
db_table = "sentry_savedsearch"
# Note that we also have a partial unique constraint on:
# (organization_id, name, type) WHERE owner_id IS NULL
# (is_global, name) WHERE is_global
unique_together = (
("project", "name"),
# Each user can have one default search per org
("organization", "owner", "type"),
)
@property
def is_pinned(self):
if hasattr(self, "_is_pinned"):<|fim▁hole|> return self.owner is not None and self.organization is not None
@is_pinned.setter
def is_pinned(self, value):
self._is_pinned = value
@property
def is_org_custom_search(self):
return self.owner is None and self.organization is not None
__repr__ = sane_repr("project_id", "name")
# TODO: Remove once we've completely removed sentry 9
class SavedSearchUserDefault(Model):
"""
Indicates the default saved search for a given user
"""
__core__ = True
savedsearch = FlexibleForeignKey("sentry.SavedSearch")
project = FlexibleForeignKey("sentry.Project")
user = FlexibleForeignKey("sentry.User")
class Meta:
unique_together = (("project", "user"),)
app_label = "sentry"
db_table = "sentry_savedsearch_userdefault"<|fim▁end|> | return self._is_pinned |
<|file_name|>editProxyDialog.js<|end_file_name|><|fim▁begin|>const Clutter = imports.gi.Clutter;
const Lang = imports.lang;
const ModalDialog = imports.ui.modalDialog;
const Signals = imports.signals;
const St = imports.gi.St;
const Gettext = imports.gettext;
const _ = Gettext.domain('gnome-shell-extensions-scriptproxies').gettext;
const EditProxyDialog = new Lang.Class({
Name: 'EditProxyDialog',
Extends: ModalDialog.ModalDialog,
_init: function(callback, action) {
this.callback = callback;
this.action = action;
this.parent({
styleClass: 'prompt-dialog'
});
let label, buttons;
if (this.action == 'add') {
buttons = [{
label: _('Cancel'),
action: Lang.bind(this, this._onCancelButton),
key: Clutter.Escape
}, {
label: _('Add proxy'),
action: Lang.bind(this, this._addButton)
}];
label = new St.Label({
style_class: 'edit-proxy-dialog-label',
text: _('Enter the name for the new proxy')
});
} else if (this.action == 'edit') {
buttons = [{
label: _('Cancel'),
action: Lang.bind(this, this._onCancelButton),
key: Clutter.Escape
}, {
label: _('Modify script'),
action: Lang.bind(this, this._modifyButton)
}, {
label: _('Rename'),
action: Lang.bind(this, this._renameButton)
}];
label = new St.Label({
style_class: 'edit-proxy-dialog-label',
text: _('Modify the proxy script.') + '\n' + _('Or rename it (leave blank to remove the proxy).')
});
} else {
// this should be this.action == 'editor'
buttons = [{
label: _('Cancel'),
action: Lang.bind(this, this._onCancelButton),
key: Clutter.Escape
}, {
label: _('OK'),
action: Lang.bind(this, this._onSetEditorButton)
}];
label = new St.Label({
style_class: 'edit-proxy-dialog-label',
text: _('To edit your proxy script,') + '\n' + _('please provide the binary name of your text editor.')
});
}
this.contentLayout.add(label, {
y_align: St.Align.START
});
let entry = new St.Entry({
style_class: 'edit-proxy-dialog-entry'
});
entry.label_actor = label;
this._entryText = entry.clutter_text;
this.contentLayout.add(entry, {
y_align: St.Align.START
});
this.setInitialKeyFocus(this._entryText);
this.setButtons(buttons);
this._entryText.connect('key-press-event', Lang.bind(this, function(o, e) {<|fim▁hole|> this._addButton();
else if (this.action == 'edit')
this._renameButton();
else
this._onSetEditorButton();
}
}));
},
close: function() {
this.parent();
},
_onCancelButton: function() {
this.close();
},
_addButton: function() {
this.callback(this._entryText.get_text(), 'new');
this.close();
},
_modifyButton: function() {
this.callback(this._entryText.get_text(), 'modify');
this.close();
},
_renameButton: function() {
this.callback(this._entryText.get_text(), 'rename');
this.close();
},
_onSetEditorButton: function() {
this.callback(this._entryText.get_text());
this.close();
},
open: function(initialText) {
if (initialText === null) {
this._entryText.set_text('');
} else {
this._entryText.set_text(initialText);
}
this.parent();
}
});
Signals.addSignalMethods(EditProxyDialog.prototype);<|fim▁end|> | let symbol = e.get_key_symbol();
if (symbol == Clutter.Return || symbol == Clutter.KP_Enter) {
if (this.action == 'add') |
<|file_name|>_textfont.py<|end_file_name|><|fim▁begin|>import _plotly_utils.basevalidators
class TextfontValidator(_plotly_utils.basevalidators.CompoundValidator):
def __init__(self, plotly_name="textfont", parent_name="scattersmith", **kwargs):
super(TextfontValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
data_class_str=kwargs.pop("data_class_str", "Textfont"),
data_docs=kwargs.pop(
"data_docs",
"""
color
colorsrc
Sets the source reference on Chart Studio Cloud
for `color`.
family
HTML font family - the typeface that will be
applied by the web browser. The web browser<|fim▁hole|> available on the system which it operates.
Provide multiple font families, separated by
commas, to indicate the preference in which to
apply fonts if they aren't available on the
system. The Chart Studio Cloud (at
https://chart-studio.plotly.com or on-premise)
generates images on a server, where only a
select number of fonts are installed and
supported. These include "Arial", "Balto",
"Courier New", "Droid Sans",, "Droid Serif",
"Droid Sans Mono", "Gravitas One", "Old
Standard TT", "Open Sans", "Overpass", "PT Sans
Narrow", "Raleway", "Times New Roman".
familysrc
Sets the source reference on Chart Studio Cloud
for `family`.
size
sizesrc
Sets the source reference on Chart Studio Cloud
for `size`.
""",
),
**kwargs
)<|fim▁end|> | will only be able to apply a font if it is |
<|file_name|>run_report_with_property_quota.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# Copyright 2021 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Google Analytics Data API sample application demonstrating the usage of
property quota metadata.
See https://developers.google.com/analytics/devguides/reporting/data/v1/rest/v1beta/properties/runReport#body.request_body.FIELDS.return_property_quota
for more information.
"""
# [START analyticsdata_run_report_with_property_quota]
from google.analytics.data_v1beta import BetaAnalyticsDataClient
from google.analytics.data_v1beta.types import DateRange
from google.analytics.data_v1beta.types import Dimension
from google.analytics.data_v1beta.types import Metric
from google.analytics.data_v1beta.types import RunReportRequest
def run_sample():
"""Runs the sample."""
# TODO(developer): Replace this variable with your Google Analytics 4<|fim▁hole|> # property ID before running the sample.
property_id = "YOUR-GA4-PROPERTY-ID"
run_report_with_property_quota(property_id)
def run_report_with_property_quota(property_id="YOUR-GA4-PROPERTY-ID"):
"""Runs a report and prints property quota information."""
client = BetaAnalyticsDataClient()
request = RunReportRequest(
property=f"properties/{property_id}",
return_property_quota=True,
dimensions=[Dimension(name="country")],
metrics=[Metric(name="activeUsers")],
date_ranges=[DateRange(start_date="7daysAgo", end_date="today")],
)
response = client.run_report(request)
# [START analyticsdata_run_report_with_property_quota_print_response]
if response.property_quota:
print(
f"Tokens per day quota consumed: {response.property_quota.tokens_per_day.consumed}, "
f"remaining: {response.property_quota.tokens_per_day.remaining}."
)
print(
f"Tokens per hour quota consumed: {response.property_quota.tokens_per_hour.consumed}, "
f"remaining: {response.property_quota.tokens_per_hour.remaining}."
)
print(
f"Concurrent requests quota consumed: {response.property_quota.concurrent_requests.consumed}, "
f"remaining: {response.property_quota.concurrent_requests.remaining}."
)
print(
f"Server errors per project per hour quota consumed: {response.property_quota.server_errors_per_project_per_hour.consumed}, "
f"remaining: {response.property_quota.server_errors_per_project_per_hour.remaining}."
)
print(
f"Potentially thresholded requests per hour quota consumed: {response.property_quota.potentially_thresholded_requests_per_hour.consumed}, "
f"remaining: {response.property_quota.potentially_thresholded_requests_per_hour.remaining}."
)
# [END analyticsdata_run_report_with_property_quota_print_response]
# [END analyticsdata_run_report_with_property_quota]
if __name__ == "__main__":
run_sample()<|fim▁end|> | |
<|file_name|>conf.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import sys
import os
from recommonmark.parser import CommonMarkParser
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = []
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
source_parsers = {
'.md': CommonMarkParser,
}
source_suffix = ['.rst', '.md']
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Frank'
copyright = u'2017, Kyle Fuller'
author = u'Kyle Fuller'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = u'0.1'
# The full version, including alpha/beta/rc tags.
release = u'0.1.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'alabaster'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (relative to this directory) to use as a favicon of
# the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
html_sidebars = {
'index': ['sidebar_intro.html', 'searchbox.html'],
'**': ['sidebar_intro.html', 'localtoc.html', 'relations.html', 'searchbox.html'],
}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False<|fim▁hole|>html_show_sourcelink = True
html_show_sphinx = False
html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr'
#html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# Now only 'ja' uses this config value
#html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'Frankdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
# Latex figure (float) alignment
#'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'Frank.tex', u'Frank Documentation',
u'Kyle Fuller', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'frank', u'Frank Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'Frank', u'Frank Documentation',
author, 'Frank', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False<|fim▁end|> | |
<|file_name|>archive_unix.go<|end_file_name|><|fim▁begin|>// +build !windows
package daemon
import (
"os"
"path/filepath"
"github.com/docker/docker/container"
)
// checkIfPathIsInAVolume checks if the path is in a volume. If it is, it
// cannot be in a read-only volume. If it is not in a volume, the container
// cannot be configured with a read-only rootfs.
func checkIfPathIsInAVolume(container *container.Container, absPath string) (bool, error) {
var toVolume bool
for _, mnt := range container.MountPoints {
if toVolume = mnt.HasResource(absPath); toVolume {
if mnt.RW {
break
}
return false, ErrVolumeReadonly
}
}
return toVolume, nil
}
func fixPermissions(source, destination string, uid, gid int, destExisted bool) error {
// If the destination didn't already exist, or the destination isn't a
// directory, then we should Lchown the destination. Otherwise, we shouldn't
// Lchown the destination.
destStat, err := os.Stat(destination)
if err != nil {
// This should *never* be reached, because the destination must've already
// been created while untar-ing the context.<|fim▁hole|>
// We Walk on the source rather than on the destination because we don't
// want to change permissions on things we haven't created or modified.
return filepath.Walk(source, func(fullpath string, info os.FileInfo, err error) error {
// Do not alter the walk root iff. it existed before, as it doesn't fall under
// the domain of "things we should chown".
if !doChownDestination && (source == fullpath) {
return nil
}
// Path is prefixed by source: substitute with destination instead.
cleaned, err := filepath.Rel(source, fullpath)
if err != nil {
return err
}
fullpath = filepath.Join(destination, cleaned)
return os.Lchown(fullpath, uid, gid)
})
}<|fim▁end|> | return err
}
doChownDestination := !destExisted || !destStat.IsDir() |
<|file_name|>settings_production.py<|end_file_name|><|fim▁begin|>#inherits from standard local settings
from bukkakegram.settings import *
import dj_database_url
DATABASES['default'] = dj_database_url.config()
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
ALLOWED_HOSTS = ['.herokuapp.com', '*',]
DEBUG = config('DEBUG', cast=bool)
STATICFILES_STORAGE = 'whitenoise.django.GzipManifestStaticFilesStorage'
<|fim▁hole|>AWS_SECRET_ACCESS_KEY = config('AWS_SECRET_ACCESS_KEY')
AWS_STORAGE_BUCKET_NAME = config('AWS_STORAGE_BUCKET_NAME')
AWS_S3_CUSTOM_DOMAIN = '%s.s3.amazonaws.com' % AWS_STORAGE_BUCKET_NAME
AWS_S3_OBJECT_PARAMETERS = {
'CacheControl': 'max-age=86400',
}
# AWS_LOCATION = 'static'
# STATIC_URL = 'https://%s/%s/' % (AWS_S3_CUSTOM_DOMAIN, AWS_LOCATION)
# STATICFILES_STORAGE = 'storages.backends.s3boto3.S3Boto3Storage'
# media upload files on S3 AWS
DEFAULT_FILE_STORAGE = 'bukkakegram.storage_backends.MediaStorage'
# force overwrite speed up AWS
THUMBNAIL_FORCE_OVERWRITE = True
# RUN MAILER
EMAIL_USE_TLS = True
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
EMAIL_HOST = config('EMAIL_HOST')
EMAIL_HOST_USER = config('EMAIL_HOST_USER')
EMAIL_HOST_PASSWORD = config('EMAIL_HOST_PASSWORD')
EMAIL_PORT = config('EMAIL_PORTS')
DEFAULT_FROM_EMAIL = config('DEFAULT_FROM_EMAIL')<|fim▁end|> | # Static files deploy in S3 AWS
AWS_ACCESS_KEY_ID = config('AWS_ACCESS_KEY_ID') |
<|file_name|>icmp.py<|end_file_name|><|fim▁begin|># $Id: icmp.py,v 1.1.1.1 2005/10/29 18:20:48 provos Exp $
from dpkt import Packet, in_cksum as _icmp_cksum
import ip
# Types (icmp_type) and codes (icmp_code) -
# http://www.iana.org/assignments/icmp-parameters
ICMP_CODE_NONE = 0 # for types without codes
ICMP_ECHOREPLY = 0 # echo reply
ICMP_UNREACH = 3 # dest unreachable, codes:
ICMP_UNREACH_NET = 0 # bad net
ICMP_UNREACH_HOST = 1 # bad host<|fim▁hole|>ICMP_UNREACH_SRCFAIL = 5 # src route failed
ICMP_UNREACH_NET_UNKNOWN = 6 # unknown net
ICMP_UNREACH_HOST_UNKNOWN = 7 # unknown host
ICMP_UNREACH_ISOLATED = 8 # src host isolated
ICMP_UNREACH_NET_PROHIB = 9 # for crypto devs
ICMP_UNREACH_HOST_PROHIB = 10 # ditto
ICMP_UNREACH_TOSNET = 11 # bad tos for net
ICMP_UNREACH_TOSHOST = 12 # bad tos for host
ICMP_UNREACH_FILTER_PROHIB = 13 # prohibited access
ICMP_UNREACH_HOST_PRECEDENCE = 14 # precedence error
ICMP_UNREACH_PRECEDENCE_CUTOFF = 15 # precedence cutoff
ICMP_SRCQUENCH = 4 # packet lost, slow down
ICMP_REDIRECT = 5 # shorter route, codes:
ICMP_REDIRECT_NET = 0 # for network
ICMP_REDIRECT_HOST = 1 # for host
ICMP_REDIRECT_TOSNET = 2 # for tos and net
ICMP_REDIRECT_TOSHOST = 3 # for tos and host
ICMP_ALTHOSTADDR = 6 # alternate host address
ICMP_ECHO = 8 # echo service
ICMP_RTRADVERT = 9 # router advertise, codes:
ICMP_RTRADVERT_NORMAL = 0 # normal
ICMP_RTRADVERT_NOROUTE_COMMON = 16 # selective routing
ICMP_RTRSOLICIT = 10 # router solicitation
ICMP_TIMEXCEED = 11 # time exceeded, code:
ICMP_TIMEXCEED_INTRANS = 0 # ttl==0 in transit
ICMP_TIMEXCEED_REASS = 1 # ttl==0 in reass
ICMP_PARAMPROB = 12 # ip header bad
ICMP_PARAMPROB_ERRATPTR = 0 # req. opt. absent
ICMP_PARAMPROB_OPTABSENT = 1 # req. opt. absent
ICMP_PARAMPROB_LENGTH = 2 # bad length
ICMP_TSTAMP = 13 # timestamp request
ICMP_TSTAMPREPLY = 14 # timestamp reply
ICMP_INFO = 15 # information request
ICMP_INFOREPLY = 16 # information reply
ICMP_MASK = 17 # address mask request
ICMP_MASKREPLY = 18 # address mask reply
ICMP_TRACEROUTE = 30 # traceroute
ICMP_DATACONVERR = 31 # data conversion error
ICMP_MOBILE_REDIRECT = 32 # mobile host redirect
ICMP_IP6_WHEREAREYOU = 33 # IPv6 where-are-you
ICMP_IP6_IAMHERE = 34 # IPv6 i-am-here
ICMP_MOBILE_REG = 35 # mobile registration req
ICMP_MOBILE_REGREPLY = 36 # mobile registration reply
ICMP_DNS = 37 # domain name request
ICMP_DNSREPLY = 38 # domain name reply
ICMP_SKIP = 39 # SKIP
ICMP_PHOTURIS = 40 # Photuris
ICMP_PHOTURIS_UNKNOWN_INDEX = 0 # unknown sec index
ICMP_PHOTURIS_AUTH_FAILED = 1 # auth failed
ICMP_PHOTURIS_DECOMPRESS_FAILED = 2 # decompress failed
ICMP_PHOTURIS_DECRYPT_FAILED = 3 # decrypt failed
ICMP_PHOTURIS_NEED_AUTHN = 4 # no authentication
ICMP_PHOTURIS_NEED_AUTHZ = 5 # no authorization
ICMP_TYPE_MAX = 40
class ICMP(Packet):
"""Internet Control Message Protocol."""
__hdr__ = (
('type', 'B', 8),
('code', 'B', 0),
('sum', 'H', 0)
)
class Echo(Packet):
__hdr__ = (('id', 'H', 0), ('seq', 'H', 0))
class Quote(Packet):
__hdr__ = (('pad', 'I', 0),)
def unpack(self, buf):
Packet.unpack(self, buf)
self.data = self.ip = ip.IP(self.data)
class Unreach(Quote):
__hdr__ = (('pad', 'H', 0), ('mtu', 'H', 0))
class Quench(Quote):
pass
class Redirect(Quote):
__hdr__ = (('gw', 'I', 0),)
class ParamProbe(Quote):
__hdr__ = (('ptr', 'B', 0), ('pad1', 'B', 0), ('pad2', 'H', 0))
class TimeExceed(Quote):
pass
_typesw = { 0:Echo, 3:Unreach, 4:Quench, 5:Redirect, 8:Echo,
11:TimeExceed }
def unpack(self, buf):
Packet.unpack(self, buf)
try:
self.data = self._typesw[self.type](self.data)
setattr(self, self.data.__class__.__name__.lower(), self.data)
except:
self.data = buf
def __str__(self):
if not self.sum:
self.sum = _icmp_cksum(Packet.__str__(self))
return Packet.__str__(self)<|fim▁end|> | ICMP_UNREACH_PROTO = 2 # bad protocol
ICMP_UNREACH_PORT = 3 # bad port
ICMP_UNREACH_NEEDFRAG = 4 # IP_DF caused drop |
<|file_name|>const.py<|end_file_name|><|fim▁begin|>"""Proides the constants needed for component."""
ATTR_APP_ID = "app_id"
ATTR_APP_NAME = "app_name"
ATTR_INPUT_SOURCE = "source"
ATTR_INPUT_SOURCE_LIST = "source_list"
ATTR_MEDIA_ALBUM_ARTIST = "media_album_artist"
ATTR_MEDIA_ALBUM_NAME = "media_album_name"
ATTR_MEDIA_ARTIST = "media_artist"
ATTR_MEDIA_CHANNEL = "media_channel"
ATTR_MEDIA_CONTENT_ID = "media_content_id"
ATTR_MEDIA_CONTENT_TYPE = "media_content_type"
ATTR_MEDIA_DURATION = "media_duration"
ATTR_MEDIA_ENQUEUE = "enqueue"
ATTR_MEDIA_EPISODE = "media_episode"
ATTR_MEDIA_PLAYLIST = "media_playlist"
ATTR_MEDIA_POSITION = "media_position"
ATTR_MEDIA_POSITION_UPDATED_AT = "media_position_updated_at"
ATTR_MEDIA_SEASON = "media_season"
ATTR_MEDIA_SEEK_POSITION = "seek_position"
ATTR_MEDIA_SERIES_TITLE = "media_series_title"
ATTR_MEDIA_SHUFFLE = "shuffle"
ATTR_MEDIA_TITLE = "media_title"
ATTR_MEDIA_TRACK = "media_track"
ATTR_MEDIA_VOLUME_LEVEL = "volume_level"
ATTR_MEDIA_VOLUME_MUTED = "is_volume_muted"
ATTR_SOUND_MODE = "sound_mode"
ATTR_SOUND_MODE_LIST = "sound_mode_list"
DOMAIN = "media_player"
MEDIA_TYPE_MUSIC = "music"
MEDIA_TYPE_TVSHOW = "tvshow"<|fim▁hole|>MEDIA_TYPE_PLAYLIST = "playlist"
MEDIA_TYPE_IMAGE = "image"
MEDIA_TYPE_URL = "url"
MEDIA_TYPE_GAME = "game"
MEDIA_TYPE_APP = "app"
SERVICE_CLEAR_PLAYLIST = "clear_playlist"
SERVICE_PLAY_MEDIA = "play_media"
SERVICE_SELECT_SOUND_MODE = "select_sound_mode"
SERVICE_SELECT_SOURCE = "select_source"
SUPPORT_PAUSE = 1
SUPPORT_SEEK = 2
SUPPORT_VOLUME_SET = 4
SUPPORT_VOLUME_MUTE = 8
SUPPORT_PREVIOUS_TRACK = 16
SUPPORT_NEXT_TRACK = 32
SUPPORT_TURN_ON = 128
SUPPORT_TURN_OFF = 256
SUPPORT_PLAY_MEDIA = 512
SUPPORT_VOLUME_STEP = 1024
SUPPORT_SELECT_SOURCE = 2048
SUPPORT_STOP = 4096
SUPPORT_CLEAR_PLAYLIST = 8192
SUPPORT_PLAY = 16384
SUPPORT_SHUFFLE_SET = 32768
SUPPORT_SELECT_SOUND_MODE = 65536<|fim▁end|> | MEDIA_TYPE_MOVIE = "movie"
MEDIA_TYPE_VIDEO = "video"
MEDIA_TYPE_EPISODE = "episode"
MEDIA_TYPE_CHANNEL = "channel" |
<|file_name|>expn_asy.py<|end_file_name|><|fim▁begin|>"""Precompute the polynomials for the asymptotic expansion of the
generalized exponential integral.
Sources
-------
[1] NIST, Digital Library of Mathematical Functions,
http://dlmf.nist.gov/8.20#ii
"""
from __future__ import division, print_function, absolute_import
import os
import warnings
try:
# Can remove when sympy #11255 is resolved; see
# https://github.com/sympy/sympy/issues/11255
with warnings.catch_warnings():
warnings.simplefilter("ignore", DeprecationWarning)
import sympy
from sympy import Poly
x = sympy.symbols('x')
except ImportError:
pass
def generate_A(K):
A = [Poly(1, x)]<|fim▁hole|> for k in range(K):
A.append(Poly(1 - 2*k*x, x)*A[k] + Poly(x*(x + 1))*A[k].diff())
return A
WARNING = """\
/* This file was automatically generated by _precompute/expn_asy.py.
* Do not edit it manually!
*/
"""
def main():
print(__doc__)
fn = os.path.join('..', 'cephes', 'expn.h')
K = 12
A = generate_A(K)
with open(fn + '.new', 'w') as f:
f.write(WARNING)
f.write("#define nA {}\n".format(len(A)))
for k, Ak in enumerate(A):
tmp = ', '.join([str(x.evalf(18)) for x in Ak.coeffs()])
f.write("double A{}[] = {{{}}};\n".format(k, tmp))
tmp = ", ".join(["A{}".format(k) for k in range(K + 1)])
f.write("double *A[] = {{{}}};\n".format(tmp))
tmp = ", ".join([str(Ak.degree()) for Ak in A])
f.write("int Adegs[] = {{{}}};\n".format(tmp))
os.rename(fn + '.new', fn)
if __name__ == "__main__":
main()<|fim▁end|> | |
<|file_name|>ModelInstanceCollection.js<|end_file_name|><|fim▁begin|>/*global define*/
define([
'../Core/BoundingSphere',
'../Core/Cartesian3',
'../Core/clone',
'../Core/Color',
'../Core/ComponentDatatype',
'../Core/defaultValue',
'../Core/defined',
'../Core/defineProperties',
'../Core/destroyObject',
'../Core/DeveloperError',
'../Core/Matrix4',
'../Core/PrimitiveType',
'../Core/RuntimeError',
'../Core/Transforms',
'../Renderer/Buffer',
'../Renderer/BufferUsage',
'../Renderer/DrawCommand',
'../Renderer/ShaderSource',
'../ThirdParty/when',
'./getAttributeOrUniformBySemantic',
'./Model',
'./ModelInstance',
'./SceneMode',
'./ShadowMode'
], function(
BoundingSphere,
Cartesian3,
clone,
Color,
ComponentDatatype,
defaultValue,
defined,
defineProperties,
destroyObject,
DeveloperError,
Matrix4,
PrimitiveType,
RuntimeError,
Transforms,
Buffer,
BufferUsage,
DrawCommand,
ShaderSource,
when,
getAttributeOrUniformBySemantic,
Model,
ModelInstance,
SceneMode,
ShadowMode) {
'use strict';
var LoadState = {
NEEDS_LOAD : 0,
LOADING : 1,
LOADED : 2,
FAILED : 3
};
/**
* A 3D model instance collection. All instances reference the same underlying model, but have unique
* per-instance properties like model matrix, pick id, etc.
*
* Instances are rendered relative-to-center and for best results instances should be positioned close to one another.
* Otherwise there may be precision issues if, for example, instances are placed on opposite sides of the globe.
*
* @alias ModelInstanceCollection
* @constructor
*
* @param {Object} options Object with the following properties:
* @param {Object[]} [options.instances] An array of instances, where each instance contains a modelMatrix and optional batchId when options.batchTable is defined.
* @param {Cesium3DTileBatchTable} [options.batchTable] The batch table of the instanced 3D Tile.
* @param {String} [options.url] The url to the .gltf file.
* @param {Object} [options.headers] HTTP headers to send with the request.
* @param {Object} [options.requestType] The request type, used for request prioritization
* @param {Object|ArrayBuffer|Uint8Array} [options.gltf] The object for the glTF JSON or an arraybuffer of Binary glTF defined by the CESIUM_binary_glTF extension.
* @param {String} [options.basePath=''] The base path that paths in the glTF JSON are relative to.
* @param {Boolean} [options.dynamic=false] Hint if instance model matrices will be updated frequently.
* @param {Boolean} [options.show=true] Determines if the collection will be shown.
* @param {Boolean} [options.allowPicking=true] When <code>true</code>, each instance is pickable with {@link Scene#pick}.
* @param {Boolean} [options.asynchronous=true] Determines if model WebGL resource creation will be spread out over several frames or block until completion once all glTF files are loaded.
* @param {Boolean} [options.incrementallyLoadTextures=true] Determine if textures may continue to stream in after the model is loaded.
* @param {ShadowMode} [options.shadows=ShadowMode.ENABLED] Determines whether the collection casts or receives shadows from each light source.
* @param {Boolean} [options.debugShowBoundingVolume=false] For debugging only. Draws the bounding sphere for the collection.
* @param {Boolean} [options.debugWireframe=false] For debugging only. Draws the instances in wireframe.
*
* @exception {DeveloperError} Must specify either <options.gltf> or <options.url>, but not both.
* @exception {DeveloperError} Shader program cannot be optimized for instancing. Parameters cannot have any of the following semantics: MODEL, MODELINVERSE, MODELVIEWINVERSE, MODELVIEWPROJECTIONINVERSE, MODELINVERSETRANSPOSE.
*
* @private
*/
function ModelInstanceCollection(options) {
options = defaultValue(options, defaultValue.EMPTY_OBJECT);
//>>includeStart('debug', pragmas.debug);
if (!defined(options.gltf) && !defined(options.url)) {
throw new DeveloperError('Either options.gltf or options.url is required.');
}
if (defined(options.gltf) && defined(options.url)) {
throw new DeveloperError('Cannot pass in both options.gltf and options.url.');
}
//>>includeEnd('debug');
this.show = defaultValue(options.show, true);
this._instancingSupported = false;
this._dynamic = defaultValue(options.dynamic, false);
this._allowPicking = defaultValue(options.allowPicking, true);
this._cull = defaultValue(options.cull, true); // Undocumented option
this._ready = false;
this._readyPromise = when.defer();
this._state = LoadState.NEEDS_LOAD;
this._dirty = false;
this._instances = createInstances(this, options.instances);
// When the model instance collection is backed by an i3dm tile,
// use its batch table resources to modify the shaders, attributes, and uniform maps.
this._batchTable = options.batchTable;
this._model = undefined;
this._vertexBufferTypedArray = undefined; // Hold onto the vertex buffer contents when dynamic is true
this._vertexBuffer = undefined;
this._batchIdBuffer = undefined;
this._instancedUniformsByProgram = undefined;
this._drawCommands = [];
this._pickCommands = [];
this._modelCommands = undefined;
this._boundingSphere = createBoundingSphere(this);
this._center = Cartesian3.clone(this._boundingSphere.center);
this._rtcTransform = new Matrix4();
this._rtcModelView = new Matrix4(); // Holds onto uniform
this._mode = undefined;
this.modelMatrix = Matrix4.clone(Matrix4.IDENTITY);
this._modelMatrix = Matrix4.clone(this.modelMatrix);
// Passed on to Model
this._url = options.url;
this._headers = options.headers;
this._requestType = options.requestType;
this._gltf = options.gltf;
this._basePath = options.basePath;
this._asynchronous = options.asynchronous;
this._incrementallyLoadTextures = options.incrementallyLoadTextures;
this._upAxis = options.upAxis; // Undocumented option
this.shadows = defaultValue(options.shadows, ShadowMode.ENABLED);
this._shadows = this.shadows;
this.debugShowBoundingVolume = defaultValue(options.debugShowBoundingVolume, false);
this._debugShowBoundingVolume = false;
this.debugWireframe = defaultValue(options.debugWireframe, false);
this._debugWireframe = false;
}
defineProperties(ModelInstanceCollection.prototype, {
allowPicking : {
get : function() {
return this._allowPicking;
}
},
length : {
get : function() {
return this._instances.length;
}
},
activeAnimations : {
get : function() {
return this._model.activeAnimations;
}
},
ready : {
get : function() {
return this._ready;
}
},
readyPromise : {
get : function() {
return this._readyPromise.promise;
}
}
});
function createInstances(collection, instancesOptions) {
instancesOptions = defaultValue(instancesOptions, []);
var length = instancesOptions.length;
var instances = new Array(length);
for (var i = 0; i < length; ++i) {
var instanceOptions = instancesOptions[i];
var modelMatrix = instanceOptions.modelMatrix;
var instanceId = defaultValue(instanceOptions.batchId, i);
instances[i] = new ModelInstance(collection, modelMatrix, instanceId);
}
return instances;
}
function createBoundingSphere(collection) {
var instancesLength = collection.length;
var points = new Array(instancesLength);
for (var i = 0; i < instancesLength; ++i) {
points[i] = Matrix4.getTranslation(collection._instances[i]._modelMatrix, new Cartesian3());
}
return BoundingSphere.fromPoints(points);
}
var scratchCartesian = new Cartesian3();
var scratchMatrix = new Matrix4();
ModelInstanceCollection.prototype.expandBoundingSphere = function(instanceModelMatrix) {
var translation = Matrix4.getTranslation(instanceModelMatrix, scratchCartesian);
BoundingSphere.expand(this._boundingSphere, translation, this._boundingSphere);
};
function getInstancedUniforms(collection, programName) {
if (defined(collection._instancedUniformsByProgram)) {
return collection._instancedUniformsByProgram[programName];
}
var instancedUniformsByProgram = {};
collection._instancedUniformsByProgram = instancedUniformsByProgram;
// When using CESIUM_RTC_MODELVIEW the CESIUM_RTC center is ignored. Instances are always rendered relative-to-center.
var modelSemantics = ['MODEL', 'MODELVIEW', 'CESIUM_RTC_MODELVIEW', 'MODELVIEWPROJECTION', 'MODELINVERSE', 'MODELVIEWINVERSE', 'MODELVIEWPROJECTIONINVERSE', 'MODELINVERSETRANSPOSE', 'MODELVIEWINVERSETRANSPOSE'];
var supportedSemantics = ['MODELVIEW', 'CESIUM_RTC_MODELVIEW', 'MODELVIEWPROJECTION', 'MODELVIEWINVERSETRANSPOSE'];
var gltf = collection._model.gltf;
var techniques = gltf.techniques;
for (var techniqueName in techniques) {
if (techniques.hasOwnProperty(techniqueName)) {
var technique = techniques[techniqueName];
var parameters = technique.parameters;
var uniforms = technique.uniforms;
var program = technique.program;
// Different techniques may share the same program, skip if already processed.
// This assumes techniques that share a program do not declare different semantics for the same uniforms.
if (!defined(instancedUniformsByProgram[program])) {
var uniformMap = {};
instancedUniformsByProgram[program] = uniformMap;
for (var uniformName in uniforms) {
if (uniforms.hasOwnProperty(uniformName)) {
var parameterName = uniforms[uniformName];
var parameter = parameters[parameterName];
var semantic = parameter.semantic;
if (defined(semantic) && (modelSemantics.indexOf(semantic) > -1)) {
if (supportedSemantics.indexOf(semantic) > -1) {
uniformMap[uniformName] = semantic;
} else {
throw new RuntimeError('Shader program cannot be optimized for instancing. ' +
'Parameter "' + parameter + '" in program "' + programName +
'" uses unsupported semantic "' + semantic + '"'
);
}
}
}
}
}
}
}
return instancedUniformsByProgram[programName];
}
var vertexShaderCached;
function getVertexShaderCallback(collection) {
return function(vs, programName) {
var instancedUniforms = getInstancedUniforms(collection, programName);
var usesBatchTable = defined(collection._batchTable);
var renamedSource = ShaderSource.replaceMain(vs, 'czm_instancing_main');
var globalVarsHeader = '';
var globalVarsMain = '';
for (var uniform in instancedUniforms) {
if (instancedUniforms.hasOwnProperty(uniform)) {
var semantic = instancedUniforms[uniform];
var varName;
if (semantic === 'MODELVIEW' || semantic === 'CESIUM_RTC_MODELVIEW') {
varName = 'czm_instanced_modelView';
} else if (semantic === 'MODELVIEWPROJECTION') {
varName = 'czm_instanced_modelViewProjection';
globalVarsHeader += 'mat4 czm_instanced_modelViewProjection;\n';
globalVarsMain += 'czm_instanced_modelViewProjection = czm_projection * czm_instanced_modelView;\n';
} else if (semantic === 'MODELVIEWINVERSETRANSPOSE') {
varName = 'czm_instanced_modelViewInverseTranspose';
globalVarsHeader += 'mat3 czm_instanced_modelViewInverseTranspose;\n';
globalVarsMain += 'czm_instanced_modelViewInverseTranspose = mat3(czm_instanced_modelView);\n';
}
// Remove the uniform declaration
var regex = new RegExp('uniform.*' + uniform + '.*');
renamedSource = renamedSource.replace(regex, '');
// Replace all occurrences of the uniform with the global variable
regex = new RegExp(uniform + '\\b', 'g');
renamedSource = renamedSource.replace(regex, varName);
}
}
// czm_instanced_model is the model matrix of the instance relative to center
// czm_instanced_modifiedModelView is the transform from the center to view
// czm_instanced_nodeTransform is the local offset of the node within the model
var uniforms =
'uniform mat4 czm_instanced_modifiedModelView;\n' +
'uniform mat4 czm_instanced_nodeTransform;\n';
var batchIdAttribute = usesBatchTable ? 'attribute float a_batchId;\n' : '';
var instancedSource =
uniforms +
globalVarsHeader +
'mat4 czm_instanced_modelView;\n' +
'attribute vec4 czm_modelMatrixRow0;\n' +
'attribute vec4 czm_modelMatrixRow1;\n' +
'attribute vec4 czm_modelMatrixRow2;\n' +
batchIdAttribute +
renamedSource +
'void main()\n' +
'{\n' +
' mat4 czm_instanced_model = mat4(czm_modelMatrixRow0.x, czm_modelMatrixRow1.x, czm_modelMatrixRow2.x, 0.0, czm_modelMatrixRow0.y, czm_modelMatrixRow1.y, czm_modelMatrixRow2.y, 0.0, czm_modelMatrixRow0.z, czm_modelMatrixRow1.z, czm_modelMatrixRow2.z, 0.0, czm_modelMatrixRow0.w, czm_modelMatrixRow1.w, czm_modelMatrixRow2.w, 1.0);\n' +
' czm_instanced_modelView = czm_instanced_modifiedModelView * czm_instanced_model * czm_instanced_nodeTransform;\n' +
globalVarsMain +
' czm_instancing_main();\n' +
'}';
vertexShaderCached = instancedSource;
if (usesBatchTable) {
instancedSource = collection._batchTable.getVertexShaderCallback(true, 'a_batchId')(instancedSource);
}
return instancedSource;
};
}
function getFragmentShaderCallback(collection) {
return function(fs) {
var batchTable = collection._batchTable;
if (defined(batchTable)) {
var gltf = collection._model.gltf;
var diffuseUniformName = getAttributeOrUniformBySemantic(gltf, '_3DTILESDIFFUSE');
fs = batchTable.getFragmentShaderCallback(true, diffuseUniformName)(fs);
}
return fs;
};
}
function getPickVertexShaderCallback(collection) {
return function (vs) {
// Use the vertex shader that was generated earlier
vs = vertexShaderCached;
var usesBatchTable = defined(collection._batchTable);
var allowPicking = collection._allowPicking;
if (usesBatchTable) {
vs = collection._batchTable.getPickVertexShaderCallback('a_batchId')(vs);
} else if (allowPicking) {
vs = ShaderSource.createPickVertexShaderSource(vs);
}
return vs;
};
}
function getPickFragmentShaderCallback(collection) {
return function(fs) {
var usesBatchTable = defined(collection._batchTable);
var allowPicking = collection._allowPicking;
if (usesBatchTable) {
fs = collection._batchTable.getPickFragmentShaderCallback()(fs);
} else if (allowPicking) {
fs = ShaderSource.createPickFragmentShaderSource(fs, 'varying');
}
return fs;
};
}
function createModifiedModelView(collection, context) {
return function() {
return Matrix4.multiply(context.uniformState.view, collection._rtcTransform, collection._rtcModelView);
};
}
function createNodeTransformFunction(node) {
return function() {
return node.computedMatrix;
};
}
function getUniformMapCallback(collection, context) {
return function(uniformMap, programName, node) {
uniformMap = clone(uniformMap);
uniformMap.czm_instanced_modifiedModelView = createModifiedModelView(collection, context);
uniformMap.czm_instanced_nodeTransform = createNodeTransformFunction(node);
// Remove instanced uniforms from the uniform map
var instancedUniforms = getInstancedUniforms(collection, programName);
for (var uniform in instancedUniforms) {
if (instancedUniforms.hasOwnProperty(uniform)) {
delete uniformMap[uniform];
}
}
if (defined(collection._batchTable)) {
uniformMap = collection._batchTable.getUniformMapCallback()(uniformMap);
}
return uniformMap;
};
}
function getPickUniformMapCallback(collection) {
return function(uniformMap) {
// Uses the uniform map generated from getUniformMapCallback
if (defined(collection._batchTable)) {
uniformMap = collection._batchTable.getPickUniformMapCallback()(uniformMap);
}
return uniformMap;
};
}
function getVertexShaderNonInstancedCallback(collection) {
return function(vs) {
if (defined(collection._batchTable)) {
vs = collection._batchTable.getVertexShaderCallback(true, 'a_batchId')(vs);
// Treat a_batchId as a uniform rather than a vertex attribute
vs = 'uniform float a_batchId\n;' + vs;
}
return vs;
};
}
function getPickVertexShaderNonInstancedCallback(collection) {
return function(vs) {
if (defined(collection._batchTable)) {
vs = collection._batchTable.getPickVertexShaderCallback('a_batchId')(vs);
// Treat a_batchId as a uniform rather than a vertex attribute
vs = 'uniform float a_batchId\n;' + vs;
}
return vs;
};
}
function getPickFragmentShaderNonInstancedCallback(collection) {
return function(fs) {
var usesBatchTable = defined(collection._batchTable);
var allowPicking = collection._allowPicking;
if (usesBatchTable) {
fs = collection._batchTable.getPickFragmentShaderCallback()(fs);
} else if (allowPicking) {
fs = ShaderSource.createPickFragmentShaderSource(fs, 'uniform');
}
return fs;
};
}
function getUniformMapNonInstancedCallback(collection) {
return function(uniformMap) {
if (defined(collection._batchTable)) {
uniformMap = collection._batchTable.getUniformMapCallback()(uniformMap);
}
return uniformMap;
};
}
function getVertexBufferTypedArray(collection) {
var instances = collection._instances;
var instancesLength = collection.length;
var collectionCenter = collection._center;
var vertexSizeInFloats = 12;
var bufferData = collection._vertexBufferTypedArray;
if (!defined(bufferData)) {
bufferData = new Float32Array(instancesLength * vertexSizeInFloats);
}
if (collection._dynamic) {
// Hold onto the buffer data so we don't have to allocate new memory every frame.
collection._vertexBufferTypedArray = bufferData;
}
for (var i = 0; i < instancesLength; ++i) {
var modelMatrix = instances[i]._modelMatrix;
// Instance matrix is relative to center
var instanceMatrix = Matrix4.clone(modelMatrix, scratchMatrix);
instanceMatrix[12] -= collectionCenter.x;
instanceMatrix[13] -= collectionCenter.y;
instanceMatrix[14] -= collectionCenter.z;
var offset = i * vertexSizeInFloats;
// First three rows of the model matrix
bufferData[offset + 0] = instanceMatrix[0];
bufferData[offset + 1] = instanceMatrix[4];
bufferData[offset + 2] = instanceMatrix[8];
bufferData[offset + 3] = instanceMatrix[12];
bufferData[offset + 4] = instanceMatrix[1];
bufferData[offset + 5] = instanceMatrix[5];
bufferData[offset + 6] = instanceMatrix[9];
bufferData[offset + 7] = instanceMatrix[13];
bufferData[offset + 8] = instanceMatrix[2];
bufferData[offset + 9] = instanceMatrix[6];
bufferData[offset + 10] = instanceMatrix[10];
bufferData[offset + 11] = instanceMatrix[14];
}
return bufferData;
}
function createVertexBuffer(collection, context) {
var i;
var instances = collection._instances;
var instancesLength = collection.length;
var dynamic = collection._dynamic;
var usesBatchTable = defined(collection._batchTable);
var allowPicking = collection._allowPicking;
if (usesBatchTable) {
var batchIdBufferData = new Uint16Array(instancesLength);
for (i = 0; i < instancesLength; ++i) {
batchIdBufferData[i] = instances[i]._instanceId;
}
collection._batchIdBuffer = Buffer.createVertexBuffer({
context : context,
typedArray : batchIdBufferData,
usage : BufferUsage.STATIC_DRAW
});
}
if (allowPicking && !usesBatchTable) {
var pickIdBuffer = new Uint8Array(instancesLength * 4);
for (i = 0; i < instancesLength; ++i) {
var pickId = collection._pickIds[i];
var pickColor = pickId.color;
var offset = i * 4;
pickIdBuffer[offset] = Color.floatToByte(pickColor.red);
pickIdBuffer[offset + 1] = Color.floatToByte(pickColor.green);
pickIdBuffer[offset + 2] = Color.floatToByte(pickColor.blue);
pickIdBuffer[offset + 3] = Color.floatToByte(pickColor.alpha);
}
collection._pickIdBuffer = Buffer.createVertexBuffer({
context : context,
typedArray : pickIdBuffer,
usage : BufferUsage.STATIC_DRAW
});
}
var vertexBufferTypedArray = getVertexBufferTypedArray(collection);
collection._vertexBuffer = Buffer.createVertexBuffer({
context : context,
typedArray : vertexBufferTypedArray,
usage : dynamic ? BufferUsage.STREAM_DRAW : BufferUsage.STATIC_DRAW
});<|fim▁hole|>
function updateVertexBuffer(collection) {
var vertexBufferTypedArray = getVertexBufferTypedArray(collection);
collection._vertexBuffer.copyFromArrayView(vertexBufferTypedArray);
}
function createPickIds(collection, context) {
// PERFORMANCE_IDEA: we could skip the pick buffer completely by allocating
// a continuous range of pickIds and then converting the base pickId + batchId
// to RGBA in the shader. The only consider is precision issues, which might
// not be an issue in WebGL 2.
var instances = collection._instances;
var instancesLength = instances.length;
var pickIds = new Array(instancesLength);
for (var i = 0; i < instancesLength; ++i) {
pickIds[i] = context.createPickId(instances[i]);
}
return pickIds;
}
function createModel(collection, context) {
var instancingSupported = collection._instancingSupported;
var usesBatchTable = defined(collection._batchTable);
var allowPicking = collection._allowPicking;
var modelOptions = {
url : collection._url,
headers : collection._headers,
requestType : collection._requestType,
gltf : collection._gltf,
basePath : collection._basePath,
shadows : collection._shadows,
cacheKey : undefined,
asynchronous : collection._asynchronous,
allowPicking : allowPicking,
incrementallyLoadTextures : collection._incrementallyLoadTextures,
upAxis : collection._upAxis,
precreatedAttributes : undefined,
vertexShaderLoaded : undefined,
fragmentShaderLoaded : undefined,
uniformMapLoaded : undefined,
pickVertexShaderLoaded : undefined,
pickFragmentShaderLoaded : undefined,
pickUniformMapLoaded : undefined,
ignoreCommands : true
};
if (allowPicking && !usesBatchTable) {
collection._pickIds = createPickIds(collection, context);
}
if (instancingSupported) {
createVertexBuffer(collection, context);
var vertexSizeInFloats = 12;
var componentSizeInBytes = ComponentDatatype.getSizeInBytes(ComponentDatatype.FLOAT);
var instancedAttributes = {
czm_modelMatrixRow0 : {
index : 0, // updated in Model
vertexBuffer : collection._vertexBuffer,
componentsPerAttribute : 4,
componentDatatype : ComponentDatatype.FLOAT,
normalize : false,
offsetInBytes : 0,
strideInBytes : componentSizeInBytes * vertexSizeInFloats,
instanceDivisor : 1
},
czm_modelMatrixRow1 : {
index : 0, // updated in Model
vertexBuffer : collection._vertexBuffer,
componentsPerAttribute : 4,
componentDatatype : ComponentDatatype.FLOAT,
normalize : false,
offsetInBytes : componentSizeInBytes * 4,
strideInBytes : componentSizeInBytes * vertexSizeInFloats,
instanceDivisor : 1
},
czm_modelMatrixRow2 : {
index : 0, // updated in Model
vertexBuffer : collection._vertexBuffer,
componentsPerAttribute : 4,
componentDatatype : ComponentDatatype.FLOAT,
normalize : false,
offsetInBytes : componentSizeInBytes * 8,
strideInBytes : componentSizeInBytes * vertexSizeInFloats,
instanceDivisor : 1
}
};
// When using a batch table, add a batch id attribute
if (usesBatchTable) {
instancedAttributes.a_batchId = {
index : 0, // updated in Model
vertexBuffer : collection._batchIdBuffer,
componentsPerAttribute : 1,
componentDatatype : ComponentDatatype.UNSIGNED_SHORT,
normalize : false,
offsetInBytes : 0,
strideInBytes : 0,
instanceDivisor : 1
};
}
if (allowPicking && !usesBatchTable) {
instancedAttributes.pickColor = {
index : 0, // updated in Model
vertexBuffer : collection._pickIdBuffer,
componentsPerAttribute : 4,
componentDatatype : ComponentDatatype.UNSIGNED_BYTE,
normalize : true,
offsetInBytes : 0,
strideInBytes : 0,
instanceDivisor : 1
};
}
modelOptions.precreatedAttributes = instancedAttributes;
modelOptions.vertexShaderLoaded = getVertexShaderCallback(collection);
modelOptions.fragmentShaderLoaded = getFragmentShaderCallback(collection);
modelOptions.uniformMapLoaded = getUniformMapCallback(collection, context);
modelOptions.pickVertexShaderLoaded = getPickVertexShaderCallback(collection);
modelOptions.pickFragmentShaderLoaded = getPickFragmentShaderCallback(collection);
modelOptions.pickUniformMapLoaded = getPickUniformMapCallback(collection);
if (defined(collection._url)) {
modelOptions.cacheKey = collection._url + '#instanced';
}
} else {
modelOptions.vertexShaderLoaded = getVertexShaderNonInstancedCallback(collection);
modelOptions.fragmentShaderLoaded = getFragmentShaderCallback(collection);
modelOptions.uniformMapLoaded = getUniformMapNonInstancedCallback(collection, context);
modelOptions.pickVertexShaderLoaded = getPickVertexShaderNonInstancedCallback(collection);
modelOptions.pickFragmentShaderLoaded = getPickFragmentShaderNonInstancedCallback(collection);
modelOptions.pickUniformMapLoaded = getPickUniformMapCallback(collection);
}
if (defined(collection._url)) {
collection._model = Model.fromGltf(modelOptions);
} else {
collection._model = new Model(modelOptions);
}
}
function updateWireframe(collection) {
if (collection._debugWireframe !== collection.debugWireframe) {
collection._debugWireframe = collection.debugWireframe;
// This assumes the original primitive was TRIANGLES and that the triangles
// are connected for the wireframe to look perfect.
var primitiveType = collection.debugWireframe ? PrimitiveType.LINES : PrimitiveType.TRIANGLES;
var commands = collection._drawCommands;
var length = commands.length;
for (var i = 0; i < length; ++i) {
commands[i].primitiveType = primitiveType;
}
}
}
function updateShowBoundingVolume(collection) {
if (collection.debugShowBoundingVolume !== collection._debugShowBoundingVolume) {
collection._debugShowBoundingVolume = collection.debugShowBoundingVolume;
var commands = collection._drawCommands;
var length = commands.length;
for (var i = 0; i < length; ++i) {
commands[i].debugShowBoundingVolume = collection.debugShowBoundingVolume;
}
}
}
function createCommands(collection, drawCommands, pickCommands) {
var commandsLength = drawCommands.length;
var instancesLength = collection.length;
var allowPicking = collection.allowPicking;
var boundingSphere = collection._boundingSphere;
var cull = collection._cull;
for (var i = 0; i < commandsLength; ++i) {
var drawCommand = DrawCommand.shallowClone(drawCommands[i]);
drawCommand.instanceCount = instancesLength;
drawCommand.boundingVolume = boundingSphere;
drawCommand.cull = cull;
collection._drawCommands.push(drawCommand);
if (allowPicking) {
var pickCommand = DrawCommand.shallowClone(pickCommands[i]);
pickCommand.instanceCount = instancesLength;
pickCommand.boundingVolume = boundingSphere;
pickCommand.cull = cull;
collection._pickCommands.push(pickCommand);
}
}
}
function createBatchIdFunction(batchId) {
return function() {
return batchId;
};
}
function createPickColorFunction(color) {
return function() {
return color;
};
}
function createCommandsNonInstanced(collection, drawCommands, pickCommands) {
// When instancing is disabled, create commands for every instance.
var instances = collection._instances;
var commandsLength = drawCommands.length;
var instancesLength = collection.length;
var allowPicking = collection.allowPicking;
var usesBatchTable = defined(collection._batchTable);
var cull = collection._cull;
for (var i = 0; i < commandsLength; ++i) {
for (var j = 0; j < instancesLength; ++j) {
var drawCommand = DrawCommand.shallowClone(drawCommands[i]);
drawCommand.modelMatrix = new Matrix4(); // Updated in updateCommandsNonInstanced
drawCommand.boundingVolume = new BoundingSphere(); // Updated in updateCommandsNonInstanced
drawCommand.cull = cull;
drawCommand.uniformMap = clone(drawCommand.uniformMap);
if (usesBatchTable) {
drawCommand.uniformMap.a_batchId = createBatchIdFunction(instances[j]._instanceId);
}
collection._drawCommands.push(drawCommand);
if (allowPicking) {
var pickCommand = DrawCommand.shallowClone(pickCommands[i]);
pickCommand.modelMatrix = new Matrix4(); // Updated in updateCommandsNonInstanced
pickCommand.boundingVolume = new BoundingSphere(); // Updated in updateCommandsNonInstanced
pickCommand.cull = cull;
pickCommand.uniformMap = clone(pickCommand.uniformMap);
if (usesBatchTable) {
pickCommand.uniformMap.a_batchId = createBatchIdFunction(instances[j]._instanceId);
} else if (allowPicking) {
var pickId = collection._pickIds[j];
pickCommand.uniformMap.czm_pickColor = createPickColorFunction(pickId.color);
}
collection._pickCommands.push(pickCommand);
}
}
}
}
function updateCommandsNonInstanced(collection) {
var modelCommands = collection._modelCommands;
var commandsLength = modelCommands.length;
var instancesLength = collection.length;
var allowPicking = collection.allowPicking;
var collectionTransform = collection._rtcTransform;
var collectionCenter = collection._center;
for (var i = 0; i < commandsLength; ++i) {
var modelCommand = modelCommands[i];
for (var j = 0; j < instancesLength; ++j) {
var commandIndex = i * instancesLength + j;
var drawCommand = collection._drawCommands[commandIndex];
var instanceMatrix = Matrix4.clone(collection._instances[j]._modelMatrix, scratchMatrix);
instanceMatrix[12] -= collectionCenter.x;
instanceMatrix[13] -= collectionCenter.y;
instanceMatrix[14] -= collectionCenter.z;
instanceMatrix = Matrix4.multiply(collectionTransform, instanceMatrix, scratchMatrix);
var nodeMatrix = modelCommand.modelMatrix;
var modelMatrix = drawCommand.modelMatrix;
Matrix4.multiply(instanceMatrix, nodeMatrix, modelMatrix);
var nodeBoundingSphere = modelCommand.boundingVolume;
var boundingSphere = drawCommand.boundingVolume;
BoundingSphere.transform(nodeBoundingSphere, instanceMatrix, boundingSphere);
if (allowPicking) {
var pickCommand = collection._pickCommands[commandIndex];
Matrix4.clone(modelMatrix, pickCommand.modelMatrix);
BoundingSphere.clone(boundingSphere, pickCommand.boundingVolume);
}
}
}
}
function getModelCommands(model) {
var nodeCommands = model._nodeCommands;
var length = nodeCommands.length;
var drawCommands = [];
var pickCommands = [];
for (var i = 0; i < length; ++i) {
var nc = nodeCommands[i];
if (nc.show) {
drawCommands.push(nc.command);
pickCommands.push(nc.pickCommand);
}
}
return {
draw: drawCommands,
pick: pickCommands
};
}
function updateShadows(collection) {
if (collection.shadows !== collection._shadows) {
collection._shadows = collection.shadows;
var castShadows = ShadowMode.castShadows(collection.shadows);
var receiveShadows = ShadowMode.receiveShadows(collection.shadows);
var drawCommands = collection._drawCommands;
var length = drawCommands.length;
for (var i = 0; i < length; ++i) {
var drawCommand = drawCommands[i];
drawCommand.castShadows = castShadows;
drawCommand.receiveShadows = receiveShadows;
}
}
}
ModelInstanceCollection.prototype.update = function(frameState) {
if (frameState.mode === SceneMode.MORPHING) {
return;
}
if (!this.show) {
return;
}
if (this.length === 0) {
return;
}
var context = frameState.context;
if (this._state === LoadState.NEEDS_LOAD) {
this._state = LoadState.LOADING;
this._instancingSupported = context.instancedArrays;
createModel(this, context);
var that = this;
this._model.readyPromise.otherwise(function(error) {
that._state = LoadState.FAILED;
that._readyPromise.reject(error);
});
}
var instancingSupported = this._instancingSupported;
var model = this._model;
model.update(frameState);
if (model.ready && (this._state === LoadState.LOADING)) {
this._state = LoadState.LOADED;
this._ready = true;
// Expand bounding volume to fit the radius of the loaded model including the model's offset from the center
var modelRadius = model.boundingSphere.radius + Cartesian3.magnitude(model.boundingSphere.center);
this._boundingSphere.radius += modelRadius;
var modelCommands = getModelCommands(model);
this._modelCommands = modelCommands.draw;
if (instancingSupported) {
createCommands(this, modelCommands.draw, modelCommands.pick);
} else {
createCommandsNonInstanced(this, modelCommands.draw, modelCommands.pick);
updateCommandsNonInstanced(this);
}
this._readyPromise.resolve(this);
return;
}
if (this._state !== LoadState.LOADED) {
return;
}
var modeChanged = (frameState.mode !== this._mode);
var modelMatrix = this.modelMatrix;
var modelMatrixChanged = !Matrix4.equals(this._modelMatrix, modelMatrix);
if (modeChanged || modelMatrixChanged) {
this._mode = frameState.mode;
Matrix4.clone(modelMatrix, this._modelMatrix);
var rtcTransform = Matrix4.multiplyByTranslation(this._modelMatrix, this._center, this._rtcTransform);
if (this._mode !== SceneMode.SCENE3D) {
rtcTransform = Transforms.basisTo2D(frameState.mapProjection, rtcTransform, rtcTransform);
}
Matrix4.getTranslation(rtcTransform, this._boundingSphere.center);
}
if (instancingSupported && this._dirty) {
// If at least one instance has moved assume the collection is now dynamic
this._dynamic = true;
this._dirty = false;
// PERFORMANCE_IDEA: only update dirty sub-sections instead of the whole collection
updateVertexBuffer(this);
}
// If any node changes due to an animation, update the commands. This could be inefficient if the model is
// composed of many nodes and only one changes, however it is probably fine in the general use case.
// Only applies when instancing is disabled. The instanced shader automatically handles node transformations.
if (!instancingSupported && (model.dirty || this._dirty || modeChanged || modelMatrixChanged)) {
updateCommandsNonInstanced(this);
}
updateShadows(this);
updateWireframe(this);
updateShowBoundingVolume(this);
var passes = frameState.passes;
var commandList = frameState.commandList;
var commands = passes.render ? this._drawCommands : this._pickCommands;
var commandsLength = commands.length;
for (var i = 0; i < commandsLength; ++i) {
commandList.push(commands[i]);
}
};
ModelInstanceCollection.prototype.isDestroyed = function() {
return false;
};
ModelInstanceCollection.prototype.destroy = function() {
this._model = this._model && this._model.destroy();
var pickIds = this._pickIds;
if (defined(pickIds)) {
var length = pickIds.length;
for (var i = 0; i < length; ++i) {
pickIds[i].destroy();
}
}
return destroyObject(this);
};
return ModelInstanceCollection;
});<|fim▁end|> | } |
<|file_name|>trait-item-with-defaultness-fail-semantic.rs<|end_file_name|><|fim▁begin|>#![feature(specialization)] //~ WARN the feature `specialization` is incomplete<|fim▁hole|>fn main() {}
trait X {
default const A: u8; //~ ERROR `default` is only allowed on items in trait impls
default const B: u8 = 0; //~ ERROR `default` is only allowed on items in trait impls
default type D; //~ ERROR `default` is only allowed on items in trait impls
default type C: Ord; //~ ERROR `default` is only allowed on items in trait impls
default fn f1(); //~ ERROR `default` is only allowed on items in trait impls
default fn f2() {} //~ ERROR `default` is only allowed on items in trait impls
}<|fim▁end|> | |
<|file_name|>annotation.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
###############################################################################
#
# Copyright (C) 2013-Today Carlos Eduardo Vercelino - CLVsol
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
###############################################################################
from openerp import fields, models
class Event(models.Model):
_inherit = 'myo.event'
annotation_ids = fields.Many2many(
'myo.annotation',
'myo_event_annotation_rel',
'event_id',
'annotation_id',
'Annotations'
)
<|fim▁hole|>class Annotation(models.Model):
_inherit = 'myo.annotation'
event_ids = fields.Many2many(
'myo.event',
'myo_event_annotation_rel',
'annotation_id',
'event_id',
'Events'
)<|fim▁end|> | |
<|file_name|>SubstanceSpinnerUI.java<|end_file_name|><|fim▁begin|><|fim▁hole|> * modification, are permitted provided that the following conditions are met:
*
* o Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* o Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* o Neither the name of Substance Kirill Grouchnikov nor the names of
* its contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
* EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.pushingpixels.substance.internal.ui;
import java.awt.*;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import java.util.EnumSet;
import javax.swing.*;
import javax.swing.border.Border;
import javax.swing.border.EmptyBorder;
import javax.swing.plaf.ComponentUI;
import javax.swing.plaf.UIResource;
import javax.swing.plaf.basic.BasicSpinnerUI;
import javax.swing.text.JTextComponent;
import org.pushingpixels.substance.api.*;
import org.pushingpixels.substance.api.SubstanceConstants.Side;
import org.pushingpixels.substance.internal.utils.*;
import org.pushingpixels.substance.internal.utils.SubstanceCoreUtilities.TextComponentAware;
import org.pushingpixels.substance.internal.utils.border.SubstanceTextComponentBorder;
import org.pushingpixels.substance.internal.utils.icon.TransitionAwareIcon;
/**
* UI for spinners in <b>Substance</b> look and feel.
*
* @author Kirill Grouchnikov
*/
public class SubstanceSpinnerUI extends BasicSpinnerUI {
/**
* Tracks changes to editor, removing the border as necessary.
*/
protected PropertyChangeListener substancePropertyChangeListener;
/**
* The next (increment) button.
*/
protected SubstanceSpinnerButton nextButton;
/**
* The previous (decrement) button.
*/
protected SubstanceSpinnerButton prevButton;
/*
* (non-Javadoc)
*
* @see javax.swing.plaf.ComponentUI#createUI(javax.swing.JComponent)
*/
public static ComponentUI createUI(JComponent comp) {
SubstanceCoreUtilities.testComponentCreationThreadingViolation(comp);
return new SubstanceSpinnerUI();
}
@Override
public void installUI(JComponent c) {
super.installUI(c);
c.putClientProperty(SubstanceCoreUtilities.TEXT_COMPONENT_AWARE,
new TextComponentAware<JSpinner>() {
@Override
public JTextComponent getTextComponent(JSpinner t) {
JComponent editor = t.getEditor();
if ((editor != null)
&& (editor instanceof JSpinner.DefaultEditor)) {
return ((JSpinner.DefaultEditor) editor)
.getTextField();
}
return null;
}
});
}
@Override
public void uninstallUI(JComponent c) {
c.putClientProperty(SubstanceCoreUtilities.TEXT_COMPONENT_AWARE, null);
super.uninstallUI(c);
}
/*
* (non-Javadoc)
*
* @see javax.swing.plaf.basic.BasicSpinnerUI#createNextButton()
*/
@Override
protected Component createNextButton() {
this.nextButton = new SubstanceSpinnerButton(this.spinner,
SwingConstants.NORTH);
this.nextButton.setFont(this.spinner.getFont());
this.nextButton.setName("Spinner.nextButton");
Icon icon = new TransitionAwareIcon(this.nextButton,
new TransitionAwareIcon.Delegate() {
public Icon getColorSchemeIcon(SubstanceColorScheme scheme) {
int fontSize = SubstanceSizeUtils
.getComponentFontSize(nextButton);
return SubstanceImageCreator.getArrowIcon(
SubstanceSizeUtils
.getSpinnerArrowIconWidth(fontSize),
SubstanceSizeUtils
.getSpinnerArrowIconHeight(fontSize),
SubstanceSizeUtils
.getArrowStrokeWidth(fontSize),
SwingConstants.NORTH, scheme);
}
}, "substance.spinner.nextButton");
this.nextButton.setIcon(icon);
int spinnerButtonSize = SubstanceSizeUtils
.getScrollBarWidth(SubstanceSizeUtils
.getComponentFontSize(spinner));
this.nextButton.setPreferredSize(new Dimension(spinnerButtonSize,
spinnerButtonSize));
this.nextButton.setMinimumSize(new Dimension(5, 5));
this.nextButton.putClientProperty(
SubstanceLookAndFeel.BUTTON_OPEN_SIDE_PROPERTY, EnumSet
.of(Side.BOTTOM));
this.nextButton.putClientProperty(
SubstanceLookAndFeel.BUTTON_SIDE_PROPERTY, EnumSet
.of(Side.BOTTOM));
this.installNextButtonListeners(this.nextButton);
Color spinnerBg = this.spinner.getBackground();
if (!(spinnerBg instanceof UIResource)) {
this.nextButton.setBackground(spinnerBg);
}
return this.nextButton;
}
/*
* (non-Javadoc)
*
* @see javax.swing.plaf.basic.BasicSpinnerUI#createPreviousButton()
*/
@Override
protected Component createPreviousButton() {
this.prevButton = new SubstanceSpinnerButton(this.spinner,
SwingConstants.SOUTH);
this.prevButton.setFont(this.spinner.getFont());
this.prevButton.setName("Spinner.previousButton");
Icon icon = new TransitionAwareIcon(this.prevButton,
new TransitionAwareIcon.Delegate() {
public Icon getColorSchemeIcon(SubstanceColorScheme scheme) {
int fontSize = SubstanceSizeUtils
.getComponentFontSize(prevButton);
float spinnerArrowIconHeight = SubstanceSizeUtils
.getSpinnerArrowIconHeight(fontSize);
return SubstanceImageCreator.getArrowIcon(
SubstanceSizeUtils
.getSpinnerArrowIconWidth(fontSize),
spinnerArrowIconHeight, SubstanceSizeUtils
.getArrowStrokeWidth(fontSize),
SwingConstants.SOUTH, scheme);
}
}, "substance.spinner.prevButton");
this.prevButton.setIcon(icon);
int spinnerButtonSize = SubstanceSizeUtils
.getScrollBarWidth(SubstanceSizeUtils
.getComponentFontSize(this.prevButton));
this.prevButton.setPreferredSize(new Dimension(spinnerButtonSize,
spinnerButtonSize));
this.prevButton.setMinimumSize(new Dimension(5, 5));
this.prevButton.putClientProperty(
SubstanceLookAndFeel.BUTTON_OPEN_SIDE_PROPERTY, EnumSet
.of(Side.TOP));
this.prevButton
.putClientProperty(SubstanceLookAndFeel.BUTTON_SIDE_PROPERTY,
EnumSet.of(Side.TOP));
this.installPreviousButtonListeners(this.prevButton);
Color spinnerBg = this.spinner.getBackground();
if (!(spinnerBg instanceof UIResource)) {
this.nextButton.setBackground(spinnerBg);
}
return this.prevButton;
}
/*
* (non-Javadoc)
*
* @see javax.swing.plaf.basic.BasicSpinnerUI#installDefaults()
*/
@Override
protected void installDefaults() {
super.installDefaults();
JComponent editor = this.spinner.getEditor();
if ((editor != null) && (editor instanceof JSpinner.DefaultEditor)) {
JTextField tf = ((JSpinner.DefaultEditor) editor).getTextField();
if (tf != null) {
int fontSize = SubstanceSizeUtils
.getComponentFontSize(this.spinner);
Insets ins = SubstanceSizeUtils
.getSpinnerTextBorderInsets(fontSize);
tf.setBorder(new EmptyBorder(ins.top, ins.left, ins.bottom,
ins.right));
tf.setFont(spinner.getFont());
tf.setOpaque(false);
}
}
if (editor != null) {
editor.setOpaque(false);
}
Border b = this.spinner.getBorder();
if (b == null || b instanceof UIResource) {
this.spinner.setBorder(new SubstanceTextComponentBorder(
SubstanceSizeUtils
.getSpinnerBorderInsets(SubstanceSizeUtils
.getComponentFontSize(this.spinner))));
}
}
/*
* (non-Javadoc)
*
* @see javax.swing.plaf.basic.BasicSpinnerUI#installListeners()
*/
@Override
protected void installListeners() {
super.installListeners();
this.substancePropertyChangeListener = new PropertyChangeListener() {
public void propertyChange(PropertyChangeEvent evt) {
if ("editor".equals(evt.getPropertyName())) {
SwingUtilities.invokeLater(new Runnable() {
public void run() {
if (spinner == null)
return;
JComponent editor = spinner.getEditor();
if ((editor != null)
&& (editor instanceof JSpinner.DefaultEditor)) {
JTextField tf = ((JSpinner.DefaultEditor) editor)
.getTextField();
if (tf != null) {
Insets ins = SubstanceSizeUtils
.getSpinnerTextBorderInsets(SubstanceSizeUtils
.getComponentFontSize(spinner));
tf.setBorder(new EmptyBorder(ins.top,
ins.left, ins.bottom, ins.right));
tf.revalidate();
}
}
}
});
}
if ("font".equals(evt.getPropertyName())) {
SwingUtilities.invokeLater(new Runnable() {
public void run() {
if (spinner != null) {
spinner.updateUI();
}
}
});
}
if ("background".equals(evt.getPropertyName())) {
JComponent editor = spinner.getEditor();
if ((editor != null)
&& (editor instanceof JSpinner.DefaultEditor)) {
JTextField tf = ((JSpinner.DefaultEditor) editor)
.getTextField();
if (tf != null) {
// Use SubstanceColorResource to distingish between
// color set by application and color set
// (propagated)
// by Substance. In the second case we can replace
// that color (even though it's not a UIResource).
Color tfBackground = tf.getBackground();
boolean canReplace = SubstanceCoreUtilities
.canReplaceChildBackgroundColor(tfBackground);
// fix for issue 387 - if spinner background
// is null, do nothing
if (spinner.getBackground() == null)
canReplace = false;
if (canReplace) {
tf.setBackground(new SubstanceColorResource(
spinner.getBackground()));
}
}
}
nextButton.setBackground(spinner.getBackground());
prevButton.setBackground(spinner.getBackground());
}
}
};
this.spinner
.addPropertyChangeListener(this.substancePropertyChangeListener);
}
/*
* (non-Javadoc)
*
* @see javax.swing.plaf.basic.BasicSpinnerUI#uninstallListeners()
*/
@Override
protected void uninstallListeners() {
this.spinner
.removePropertyChangeListener(this.substancePropertyChangeListener);
this.substancePropertyChangeListener = null;
super.uninstallListeners();
}
/*
* (non-Javadoc)
*
* @see javax.swing.plaf.ComponentUI#paint(java.awt.Graphics,
* javax.swing.JComponent)
*/
@Override
public void paint(Graphics g, JComponent c) {
super.paint(g, c);
Graphics2D graphics = (Graphics2D) g.create();
int width = this.spinner.getWidth();
int height = this.spinner.getHeight();
int componentFontSize = SubstanceSizeUtils
.getComponentFontSize(this.spinner);
int borderDelta = (int) Math.floor(SubstanceSizeUtils
.getBorderStrokeWidth(componentFontSize));
Shape contour = SubstanceOutlineUtilities
.getBaseOutline(
width,
height,
Math.max(
0,
2.0f
* SubstanceSizeUtils
.getClassicButtonCornerRadius(componentFontSize)
- borderDelta), null, borderDelta);
graphics.setColor(SubstanceTextUtilities
.getTextBackgroundFillColor(this.spinner));
graphics.fill(contour);
graphics.dispose();
}
/*
* (non-Javadoc)
*
* @see
* javax.swing.plaf.ComponentUI#getPreferredSize(javax.swing.JComponent)
*/
@Override
public Dimension getPreferredSize(JComponent c) {
Dimension nextD = this.nextButton.getPreferredSize();
Dimension previousD = this.prevButton.getPreferredSize();
Dimension editorD = spinner.getEditor().getPreferredSize();
Dimension size = new Dimension(editorD.width, editorD.height);
size.width += Math.max(nextD.width, previousD.width);
Insets insets = this.spinner.getInsets();
size.width += insets.left + insets.right;
size.height += insets.top + insets.bottom;
return size;
}
/*
* (non-Javadoc)
*
* @see javax.swing.plaf.ComponentUI#update(java.awt.Graphics,
* javax.swing.JComponent)
*/
@Override
public void update(Graphics g, JComponent c) {
SubstanceTextUtilities.paintTextCompBackground(g, c);
this.paint(g, c);
}
@Override
protected LayoutManager createLayout() {
return new SpinnerLayoutManager();
}
/**
* Layout manager for the spinner.
*
* @author Kirill Grouchnikov
*/
protected class SpinnerLayoutManager implements LayoutManager {
public void addLayoutComponent(String name, Component comp) {
}
public void removeLayoutComponent(Component comp) {
}
public Dimension minimumLayoutSize(Container parent) {
return this.preferredLayoutSize(parent);
}
public Dimension preferredLayoutSize(Container parent) {
Dimension nextD = nextButton.getPreferredSize();
Dimension previousD = prevButton.getPreferredSize();
Dimension editorD = spinner.getEditor().getPreferredSize();
/*
* Force the editors height to be a multiple of 2
*/
editorD.height = ((editorD.height + 1) / 2) * 2;
Dimension size = new Dimension(editorD.width, editorD.height);
size.width += Math.max(nextD.width, previousD.width);
Insets insets = parent.getInsets();
size.width += insets.left + insets.right;
size.height += insets.top + insets.bottom;
Insets buttonInsets = SubstanceSizeUtils
.getSpinnerArrowButtonInsets(SubstanceSizeUtils
.getComponentFontSize(spinner));
size.width += (buttonInsets.left + buttonInsets.right);
return size;
}
public void layoutContainer(Container parent) {
int width = parent.getWidth();
int height = parent.getHeight();
Insets insets = parent.getInsets();
Dimension nextD = nextButton.getPreferredSize();
Dimension previousD = prevButton.getPreferredSize();
int buttonsWidth = Math.max(nextD.width, previousD.width);
int editorHeight = height - (insets.top + insets.bottom);
Insets buttonInsets = SubstanceSizeUtils
.getSpinnerArrowButtonInsets(SubstanceSizeUtils
.getComponentFontSize(spinner));
/*
* Deal with the spinner's componentOrientation property.
*/
int editorX, editorWidth, buttonsX;
if (parent.getComponentOrientation().isLeftToRight()) {
editorX = insets.left;
editorWidth = width - insets.left - buttonsWidth;
buttonsX = width - buttonsWidth;// - buttonInsets.right;
} else {
buttonsX = 0;// buttonInsets.left;
editorX = buttonsX + buttonsWidth;
editorWidth = width - editorX - insets.right;
}
int nextY = 0;// buttonInsets.top;
int nextHeight = (height / 2) + (height % 2) - nextY;
int previousY = 0 * buttonInsets.top + nextHeight;
int previousHeight = height - previousY;// - buttonInsets.bottom;
spinner.getEditor().setBounds(editorX, insets.top, editorWidth,
editorHeight);
nextButton.setBounds(buttonsX, nextY, buttonsWidth, nextHeight);
prevButton.setBounds(buttonsX, previousY, buttonsWidth,
previousHeight);
// System.out.println("next : " + nextButton.getBounds());
// System.out.println("prev : " + prevButton.getBounds());
}
}
}<|fim▁end|> | /*
* Copyright (c) 2005-2010 Substance Kirill Grouchnikov. All Rights Reserved.
*
* Redistribution and use in source and binary forms, with or without |
<|file_name|>404.js<|end_file_name|><|fim▁begin|>var React = require('react');<|fim▁hole|> render: function () {
return (
<html lang="sv">
<head>
<meta charSet="utf-8" />
<title>{this.props.title}</title>
</head>
<body style={{ width: 300 + 'px', margin: '0 auto'}}>
<h1>{this.props.title}</h1>
<p>Det finns ingen sida med addressen <b>{this.props.url}</b>. Kontrollera addressen igen eller gå till <a href="/">skissaochgissa.se</a>.</p>
</body>
</html>
);
}
});<|fim▁end|> |
module.exports = React.createClass({ |
<|file_name|>install_linux_sysroot.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
# Copyright 2018 The Crashpad Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Various code adapted from:
# https://cs.chromium.org/chromium/src/build/linux/sysroot_scripts/install-sysroot.py
import os
import shutil
import subprocess
import sys
import urllib.request
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
# Sysroot revision from:
# https://cs.chromium.org/chromium/src/build/linux/sysroot_scripts/sysroots.json
SERVER = 'https://commondatastorage.googleapis.com'
PATH = 'chrome-linux-sysroot/toolchain'
REVISION = '43a87bbebccad99325fdcf34166295b121ee15c7'
FILENAME = 'debian_sid_amd64_sysroot.tar.xz'
def main():
url = '%s/%s/%s/%s' % (SERVER, PATH, REVISION, FILENAME)
sysroot = os.path.join(SCRIPT_DIR, os.pardir, 'third_party', 'linux',
'sysroot')
stamp = os.path.join(sysroot, '.stamp')
if os.path.exists(stamp):
with open(stamp) as s:
if s.read() == url:
return
print('Installing Debian root image from %s' % url)<|fim▁hole|> tarball = os.path.join(sysroot, FILENAME)
print('Downloading %s' % url)
for _ in range(3):
response = urllib.request.urlopen(url)
with open(tarball, 'wb') as f:
f.write(response.read())
break
else:
raise Exception('Failed to download %s' % url)
subprocess.check_call(['tar', 'xf', tarball, '-C', sysroot])
os.remove(tarball)
with open(stamp, 'w') as s:
s.write(url)
if __name__ == '__main__':
main()
sys.exit(0)<|fim▁end|> |
if os.path.isdir(sysroot):
shutil.rmtree(sysroot)
os.mkdir(sysroot) |
<|file_name|>epubBundle.js<|end_file_name|><|fim▁begin|>/**
* epubBundle
*<|fim▁hole|> *
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
var util = require('util');
var epubber = require('../epubber');
module.exports = function(grunt) {
grunt.registerTask('epubBundle', function() {
var renderTo = grunt.config.get('epubtools.renderTo');
var epubFileName = grunt.config.get('epubtools.bookYaml.epub');
var done = this.async();
epubber.bundleEPUB(renderTo, epubFileName)
.then(() => { done(); })
.catch(err => { done(err); });
});
};<|fim▁end|> | * Copyright 2015 David Herron
*
* This file is part of epubtools (http://akashacms.com/). |
<|file_name|>test.replace.js<|end_file_name|><|fim▁begin|>const should = require('should');
var Regex = require('../');
var conStr = 'dHello World';
describe( 'replace test', function () {<|fim▁hole|> regex.replace(conStr,'h').should.equal('dhello World');
} );
it( 'middle area', function () {
var regex = new Regex('o\\sW');
regex.replace(conStr,'T').should.equal('dHellTorld');
} );
it( 'right area', function () {
var regex = new Regex('d$');
regex.replace(conStr,'P').should.equal('dHello WorlP');
} );
it( 'more match', function () {
var regex = new Regex('o','ig');
regex.replace(conStr,'').should.equal('dHell Wrld');
} );
} );<|fim▁end|> | it( 'left area', function () {
var regex = new Regex('H'); |
<|file_name|>setup_progress.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright (c) 2017, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe, json
from frappe.model.document import Document
class SetupProgress(Document):
pass
def get_setup_progress():
if not getattr(frappe.local, "setup_progress", None):
frappe.local.setup_progress = frappe.get_doc("Setup Progress", "Setup Progress")
return frappe.local.setup_progress
def get_action_completed_state(action_name):
for d in get_setup_progress().actions:<|fim▁hole|> return d.is_completed
def update_action_completed_state(action_name):
action_table_doc = [d for d in get_setup_progress().actions
if d.action_name == action_name][0]
update_action(action_table_doc)
def update_action(doc):
doctype = doc.action_doctype
docname = doc.action_document
field = doc.action_field
if not doc.is_completed:
if doc.min_doc_count:
if frappe.db.count(doctype) >= doc.min_doc_count:
doc.is_completed = 1
doc.save()
if docname and field:
d = frappe.get_doc(doctype, docname)
if d.get(field):
doc.is_completed = 1
doc.save()
def update_domain_actions(domain):
for d in get_setup_progress().actions:
domains = json.loads(d.domains)
if domains == [] or domain in domains:
update_action(d)
def get_domain_actions_state(domain):
state = {}
for d in get_setup_progress().actions:
domains = json.loads(d.domains)
if domains == [] or domain in domains:
state[d.action_name] = d.is_completed
return state
@frappe.whitelist()
def set_action_completed_state(action_name):
action_table_doc = [d for d in get_setup_progress().actions
if d.action_name == action_name][0]
action_table_doc.is_completed = 1
action_table_doc.save()<|fim▁end|> | if d.action_name == action_name: |
<|file_name|>keen-tracker.js<|end_file_name|><|fim▁begin|>/* eslint-disable no-unused-vars */
// This helps ember-browserify find npm modules in ember-cli addons
import md5 from 'npm:js-md5';
import config from 'ember-get-config';<|fim▁hole|>export {default} from 'ember-osf/mixins/keen-tracker';<|fim▁end|> | import _get from 'npm:lodash/get';
import Cookie from 'npm:js-cookie';
import keenTracking from 'npm:keen-tracking';
|
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>extern crate bn;
extern crate quickcheck;
<|fim▁hole|><|fim▁end|> | mod ops; |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>use std::error::Error as StdError;
use std::fmt;
use std::io;
use std::time::Duration;
pub use BaudRate::*;
pub use CharSize::*;
pub use Parity::*;
pub use StopBits::*;
pub use FlowControl::*;
/// A module that exports traits that are useful to have in scope.
///
/// It is intended to be glob imported:
///
/// ```no_run
/// use serial_core::prelude::*;
/// ```
pub mod prelude {
pub use {SerialPort, SerialPortSettings};
}
/// A type for results generated by interacting with serial ports.
///
/// The `Err` type is hard-wired to [`serial_core::Error`](struct.Error.html).
pub type Result<T> = std::result::Result<T, Error>;
/// Categories of errors that can occur when interacting with serial ports.
///
/// This list is intended to grow over time and it is not recommended to exhaustively match against
/// it.
#[derive(Debug,Clone,Copy,PartialEq,Eq)]
pub enum ErrorKind {
/// The device is not available.
///
/// This could indicate that the device is in use by another process or was disconnected while
/// performing I/O.
NoDevice,
/// A parameter was incorrect.
InvalidInput,
/// An I/O error occured.
///
/// The type of I/O error is determined by the inner `io::ErrorKind`.
Io(io::ErrorKind),
}
/// An error type for serial port operations.
#[derive(Debug)]
pub struct Error {
kind: ErrorKind,
description: String,
}
impl Error {
pub fn new<T: Into<String>>(kind: ErrorKind, description: T) -> Self {
Error {
kind: kind,
description: description.into(),
}
}
/// Returns the corresponding `ErrorKind` for this error.
pub fn kind(&self) -> ErrorKind {
self.kind
}
}
impl fmt::Display for Error {
fn fmt(&self, fmt: &mut fmt::Formatter) -> std::result::Result<(), fmt::Error> {
fmt.write_str(&self.description)
}
}
impl StdError for Error {
fn description(&self) -> &str {
&self.description
}
}
impl From<io::Error> for Error {
fn from(io_error: io::Error) -> Error {
Error::new(ErrorKind::Io(io_error.kind()), format!("{}", io_error))
}
}
impl From<Error> for io::Error {
fn from(error: Error) -> io::Error {
let kind = match error.kind {
ErrorKind::NoDevice => io::ErrorKind::NotFound,
ErrorKind::InvalidInput => io::ErrorKind::InvalidInput,
ErrorKind::Io(kind) => kind,
};
io::Error::new(kind, error.description)
}
}
/// Serial port baud rates.
///
/// ## Portability
///
/// The `BaudRate` variants with numeric suffixes, e.g., `Baud9600`, indicate standard baud rates
/// that are widely-supported on many systems. While non-standard baud rates can be set with
/// `BaudOther`, their behavior is system-dependent. Some systems may not support arbitrary baud
/// rates. Using the standard baud rates is more likely to result in portable applications.
#[derive(Debug,Copy,Clone,PartialEq,Eq)]
pub enum BaudRate {
/// 110 baud.
Baud110,
/// 300 baud.
Baud300,
/// 600 baud.
Baud600,
/// 1200 baud.
Baud1200,
/// 2400 baud.
Baud2400,
/// 4800 baud.
Baud4800,
/// 9600 baud.
Baud9600,
/// 19,200 baud.
Baud19200,
/// 38,400 baud.
Baud38400,
/// 57,600 baud.
Baud57600,
/// 115,200 baud.
Baud115200,
/// Non-standard baud rates.
///
/// `BaudOther` can be used to set non-standard baud rates by setting its member to be the
/// desired baud rate.
///
/// ```no_run
/// serial_core::BaudOther(4_000_000); // 4,000,000 baud
/// ```
///
/// Non-standard baud rates may not be supported on all systems.
BaudOther(usize),
}
impl BaudRate {
/// Creates a `BaudRate` for a particular speed.
///
/// This function can be used to select a `BaudRate` variant from an integer containing the
/// desired baud rate.
///
/// ## Example
///
/// ```
/// # use serial_core::BaudRate;
/// assert_eq!(BaudRate::Baud9600, BaudRate::from_speed(9600));
/// assert_eq!(BaudRate::Baud115200, BaudRate::from_speed(115200));
/// assert_eq!(BaudRate::BaudOther(4000000), BaudRate::from_speed(4000000));
/// ```
pub fn from_speed(speed: usize) -> BaudRate {
match speed {
110 => BaudRate::Baud110,
300 => BaudRate::Baud300,
600 => BaudRate::Baud600,
1200 => BaudRate::Baud1200,
2400 => BaudRate::Baud2400,
4800 => BaudRate::Baud4800,
9600 => BaudRate::Baud9600,
19200 => BaudRate::Baud19200,
38400 => BaudRate::Baud38400,
57600 => BaudRate::Baud57600,
115200 => BaudRate::Baud115200,
n => BaudRate::BaudOther(n),
}
}
/// Returns the baud rate as an integer.
///
/// ## Example
///
/// ```
/// # use serial_core::BaudRate;
/// assert_eq!(9600, BaudRate::Baud9600.speed());
/// assert_eq!(115200, BaudRate::Baud115200.speed());
/// assert_eq!(4000000, BaudRate::BaudOther(4000000).speed());
/// ```
pub fn speed(&self) -> usize {
match *self {
BaudRate::Baud110 => 110,
BaudRate::Baud300 => 300,
BaudRate::Baud600 => 600,
BaudRate::Baud1200 => 1200,
BaudRate::Baud2400 => 2400,
BaudRate::Baud4800 => 4800,
BaudRate::Baud9600 => 9600,
BaudRate::Baud19200 => 19200,
BaudRate::Baud38400 => 38400,
BaudRate::Baud57600 => 57600,
BaudRate::Baud115200 => 115200,
BaudRate::BaudOther(n) => n,
}
}
}
/// Number of bits per character.
#[derive(Debug,Copy,Clone,PartialEq,Eq)]
pub enum CharSize {
/// 5 bits per character.
Bits5,
/// 6 bits per character.
Bits6,
/// 7 bits per character.
Bits7,
/// 8 bits per character.
Bits8,
}
/// Parity checking modes.
///
/// When parity checking is enabled (`ParityOdd` or `ParityEven`) an extra bit is transmitted with
/// each character. The value of the parity bit is arranged so that the number of 1 bits in the
/// character (including the parity bit) is an even number (`ParityEven`) or an odd number
/// (`ParityOdd`).
///
/// Parity checking is disabled by setting `ParityNone`, in which case parity bits are not
/// transmitted.
#[derive(Debug,Copy,Clone,PartialEq,Eq)]
pub enum Parity {
/// No parity bit.
ParityNone,
/// Parity bit sets odd number of 1 bits.
ParityOdd,
/// Parity bit sets even number of 1 bits.
ParityEven,
}
/// Number of stop bits.
///
/// Stop bits are transmitted after every character.
#[derive(Debug,Copy,Clone,PartialEq,Eq)]
pub enum StopBits {
/// One stop bit.
Stop1,
/// Two stop bits.
Stop2,
}
/// Flow control modes.
#[derive(Debug,Copy,Clone,PartialEq,Eq)]
pub enum FlowControl {
/// No flow control.
FlowNone,
/// Flow control using XON/XOFF bytes.
FlowSoftware,
/// Flow control using RTS/CTS signals.
FlowHardware,
}
/// A trait for implementing serial devices.
///
/// This trait is meant to be used to implement new serial port devices. To use a serial port
/// device, the [`SerialPort`](trait.SerialPort.html) trait should be used instead. Any type that
/// implements the `SerialDevice` trait will automatically implement the `SerialPort` trait as
/// well.
///
/// To implement a new serial port device, it's necessary to define a type that can manipulate the
/// serial port device's settings (baud rate, parity mode, etc). This type is defined by the
/// `Settings` associated type. The current settings should be determined by reading from the
/// hardware or operating system for every call to `read_settings()`. The settings can then be
/// manipulated in memory before being commited to the device with `write_settings()`.
///
/// Types that implement `SerialDevice` must also implement `std::io::Read` and `std::io::Write`.
/// The `read()` and `write()` operations of these traits should honor the timeout that has been
/// set with the most recent successful call to `set_timeout()`. This timeout value should also be
/// accessible by calling the `timeout()` method.
///
/// A serial port device should also provide access to some basic control signals: RTS, DTR, CTS,
/// DSR, RI, and CD. The values for the control signals are represented as boolean values, with
/// `true` indicating the the control signal is active.
///
/// Lastly, types that implement `SerialDevice` should release any acquired resources when dropped.
pub trait SerialDevice: io::Read + io::Write {
/// A type that implements the settings for the serial port device.
///
/// The `Settings` type is used to retrieve and modify the serial port's settings. This type
/// should own any native structures used to manipulate the device's settings, but it should
/// not cause any changes in the underlying hardware until written to the device with
/// `write_settings()`.
type Settings: SerialPortSettings;
/// Returns the device's current settings.
///
/// This function attempts to read the current settings from the hardware. The hardware's
/// current settings may not match the settings that were most recently written to the hardware
/// with `write_settings()`.
///
/// ## Errors
///
/// This function returns an error if the settings could not be read from the underlying
/// hardware:
///
/// * `NoDevice` if the device was disconnected.
/// * `Io` for any other type of I/O error.
fn read_settings(&self) -> ::Result<Self::Settings>;
/// Applies new settings to the serial device.
///
/// This function attempts to apply all settings to the serial device. Some settings may not be
/// supported by the underlying hardware, in which case the result is dependent on the
/// implementation. A successful return value does not guarantee that all settings were
/// appliied successfully. To check which settings were applied by a successful write,
/// applications should use the `read_settings()` method to obtain the latest configuration
/// state from the device.
///
/// ## Errors
///
/// This function returns an error if the settings could not be applied to the underlying
/// hardware:
///
/// * `NoDevice` if the device was disconnected.
/// * `InvalidInput` if a setting is not compatible with the underlying hardware.
/// * `Io` for any other type of I/O error.
fn write_settings(&mut self, settings: &Self::Settings) -> ::Result<()>;
/// Returns the current timeout.
fn timeout(&self) -> Duration;
/// Sets the timeout for future I/O operations.
fn set_timeout(&mut self, timeout: Duration) -> ::Result<()>;
/// Sets the state of the RTS (Request To Send) control signal.
///
/// Setting a value of `true` asserts the RTS control signal. `false` clears the signal.
///
/// ## Errors
///
/// This function returns an error if the RTS control signal could not be set to the desired
/// state on the underlying hardware:
///
/// * `NoDevice` if the device was disconnected.
/// * `Io` for any other type of I/O error.
fn set_rts(&mut self, level: bool) -> ::Result<()>;
/// Sets the state of the DTR (Data Terminal Ready) control signal.
///
/// Setting a value of `true` asserts the DTR control signal. `false` clears the signal.
///
/// ## Errors
///
/// This function returns an error if the DTR control signal could not be set to the desired
/// state on the underlying hardware:
///
/// * `NoDevice` if the device was disconnected.
/// * `Io` for any other type of I/O error.
fn set_dtr(&mut self, level: bool) -> ::Result<()>;
/// Reads the state of the CTS (Clear To Send) control signal.
///
/// This function returns a boolean that indicates whether the CTS control signal is asserted.
///
/// ## Errors
///
/// This function returns an error if the state of the CTS control signal could not be read
/// from the underlying hardware:
///
/// * `NoDevice` if the device was disconnected.
/// * `Io` for any other type of I/O error.
fn read_cts(&mut self) -> ::Result<bool>;
/// Reads the state of the DSR (Data Set Ready) control signal.
///
/// This function returns a boolean that indicates whether the DSR control signal is asserted.
///
/// ## Errors
///
/// This function returns an error if the state of the DSR control signal could not be read
/// from the underlying hardware:
///
/// * `NoDevice` if the device was disconnected.
/// * `Io` for any other type of I/O error.
fn read_dsr(&mut self) -> ::Result<bool>;
/// Reads the state of the RI (Ring Indicator) control signal.
///
/// This function returns a boolean that indicates whether the RI control signal is asserted.
///
/// ## Errors
///
/// This function returns an error if the state of the RI control signal could not be read from
/// the underlying hardware:
///
/// * `NoDevice` if the device was disconnected.
/// * `Io` for any other type of I/O error.
fn read_ri(&mut self) -> ::Result<bool>;
/// Reads the state of the CD (Carrier Detect) control signal.
///
/// This function returns a boolean that indicates whether the CD control signal is asserted.
///
/// ## Errors
///
/// This function returns an error if the state of the CD control signal could not be read from
/// the underlying hardware:
///
/// * `NoDevice` if the device was disconnected.
/// * `Io` for any other type of I/O error.
fn read_cd(&mut self) -> ::Result<bool>;
}
/// A trait for serial port devices.
///
/// Serial port input and output is implemented through the `std::io::Read` and `std::io::Write`
/// traits. A timeout can be set with the `set_timeout()` method and applies to all subsequent I/O
/// operations.
///
/// The `SerialPort` trait exposes several common control signals. Each control signal is
/// represented as a boolean, where `true` indicates that the signal is asserted.
///
/// The serial port will be closed when the value is dropped.
pub trait SerialPort: io::Read + io::Write {
/// Returns the current timeout.
fn timeout(&self) -> Duration;
/// Sets the timeout for future I/O operations.
fn set_timeout(&mut self, timeout: Duration) -> ::Result<()>;
/// Configures a serial port device.
///
/// ## Errors
///
/// This function returns an error if the settings could not be applied to the underlying
/// hardware:
///
/// * `NoDevice` if the device was disconnected.
/// * `InvalidInput` if a setting is not compatible with the underlying hardware.
/// * `Io` for any other type of I/O error.
fn configure(&mut self, settings: &PortSettings) -> ::Result<()>;
/// Alter the serial port's configuration.
///
/// This method expects a function, which takes a mutable reference to the serial port's
/// configuration settings. The serial port's current settings, read from the device, are
/// yielded to the provided function. After the function returns, any changes made to the
/// settings object will be written back to the device.
///
/// ## Errors
///
/// This function returns an error if the `setup` function returns an error or if there was an
/// error while reading or writing the device's configuration settings:
///
/// * `NoDevice` if the device was disconnected.
/// * `InvalidInput` if a setting is not compatible with the underlying hardware.
/// * `Io` for any other type of I/O error.
/// * Any error returned by the `setup` function.
///
/// ## Example
///
/// The following is a function that toggles a serial port's settings between one and two stop
/// bits:
///
/// ```no_run
/// use std::io;
/// use serial_core::prelude::*;
///
/// fn toggle_stop_bits<T: SerialPort>(port: &mut T) -> serial_core::Result<()> {
/// port.reconfigure(&|settings| {
/// let stop_bits = match settings.stop_bits() {
/// Some(serial_core::Stop1) => serial_core::Stop2,
/// Some(serial_core::Stop2) | None => serial_core::Stop1,
/// };
///
/// settings.set_stop_bits(stop_bits);
/// Ok(())
/// })
/// }
/// ```
fn reconfigure(&mut self, setup: &Fn(&mut SerialPortSettings) -> ::Result<()>) -> ::Result<()>;
/// Sets the state of the RTS (Request To Send) control signal.
///
/// Setting a value of `true` asserts the RTS control signal. `false` clears the signal.
///
/// ## Errors
///
/// This function returns an error if the RTS control signal could not be set to the desired
/// state on the underlying hardware:
///
/// * `NoDevice` if the device was disconnected.
/// * `Io` for any other type of I/O error.
fn set_rts(&mut self, level: bool) -> ::Result<()>;
/// Sets the state of the DTR (Data Terminal Ready) control signal.
///
/// Setting a value of `true` asserts the DTR control signal. `false` clears the signal.
///
/// ## Errors
///
/// This function returns an error if the DTR control signal could not be set to the desired
/// state on the underlying hardware:
///
/// * `NoDevice` if the device was disconnected.
/// * `Io` for any other type of I/O error.
fn set_dtr(&mut self, level: bool) -> ::Result<()>;
/// Reads the state of the CTS (Clear To Send) control signal.
///
/// This function returns a boolean that indicates whether the CTS control signal is asserted.
///
/// ## Errors
///
/// This function returns an error if the state of the CTS control signal could not be read
/// from the underlying hardware:
///
/// * `NoDevice` if the device was disconnected.
/// * `Io` for any other type of I/O error.
fn read_cts(&mut self) -> ::Result<bool>;
/// Reads the state of the DSR (Data Set Ready) control signal.
///
/// This function returns a boolean that indicates whether the DSR control signal is asserted.
///
/// ## Errors
///
/// This function returns an error if the state of the DSR control signal could not be read
/// from the underlying hardware:
///
/// * `NoDevice` if the device was disconnected.
/// * `Io` for any other type of I/O error.
fn read_dsr(&mut self) -> ::Result<bool>;
/// Reads the state of the RI (Ring Indicator) control signal.
///
/// This function returns a boolean that indicates whether the RI control signal is asserted.
///
/// ## Errors
///
/// This function returns an error if the state of the RI control signal could not be read from
/// the underlying hardware:
///
/// * `NoDevice` if the device was disconnected.
/// * `Io` for any other type of I/O error.
fn read_ri(&mut self) -> ::Result<bool>;
/// Reads the state of the CD (Carrier Detect) control signal.
///
/// This function returns a boolean that indicates whether the CD control signal is asserted.
///
/// ## Errors
///
/// This function returns an error if the state of the CD control signal could not be read from
/// the underlying hardware:
///
/// * `NoDevice` if the device was disconnected.
/// * `Io` for any other type of I/O error.
fn read_cd(&mut self) -> ::Result<bool>;
}
impl<T> SerialPort for T
where T: SerialDevice
{
fn timeout(&self) -> Duration {
T::timeout(self)
}
fn set_timeout(&mut self, timeout: Duration) -> ::Result<()> {
T::set_timeout(self, timeout)
}
fn configure(&mut self, settings: &PortSettings) -> ::Result<()> {
let mut device_settings = try!(T::read_settings(self));
try!(device_settings.set_baud_rate(settings.baud_rate));
device_settings.set_char_size(settings.char_size);
device_settings.set_parity(settings.parity);
device_settings.set_stop_bits(settings.stop_bits);
device_settings.set_flow_control(settings.flow_control);
T::write_settings(self, &device_settings)
}
fn reconfigure(&mut self, setup: &Fn(&mut SerialPortSettings) -> ::Result<()>) -> ::Result<()> {
let mut device_settings = try!(T::read_settings(self));
try!(setup(&mut device_settings));
T::write_settings(self, &device_settings)
}
fn set_rts(&mut self, level: bool) -> ::Result<()> {
T::set_rts(self, level)
}
fn set_dtr(&mut self, level: bool) -> ::Result<()> {
T::set_dtr(self, level)
}
fn read_cts(&mut self) -> ::Result<bool> {
T::read_cts(self)
}
fn read_dsr(&mut self) -> ::Result<bool> {
T::read_dsr(self)
}
fn read_ri(&mut self) -> ::Result<bool> {
T::read_ri(self)
}
fn read_cd(&mut self) -> ::Result<bool> {
T::read_cd(self)
}
}
/// A trait for objects that implement serial port configurations.
pub trait SerialPortSettings {
/// Returns the current baud rate.
///
/// This function returns `None` if the baud rate could not be determined. This may occur if
/// the hardware is in an uninitialized state. Setting a baud rate with `set_baud_rate()`
/// should initialize the baud rate to a supported value.
fn baud_rate(&self) -> Option<BaudRate>;
/// Returns the character size.
///
/// This function returns `None` if the character size could not be determined. This may occur
/// if the hardware is in an uninitialized state or is using a non-standard character size.
/// Setting a baud rate with `set_char_size()` should initialize the character size to a
/// supported value.
fn char_size(&self) -> Option<CharSize>;
/// Returns the parity-checking mode.
///
/// This function returns `None` if the parity mode could not be determined. This may occur if
/// the hardware is in an uninitialized state or is using a non-standard parity mode. Setting
/// a parity mode with `set_parity()` should initialize the parity mode to a supported value.
fn parity(&self) -> Option<Parity>;
/// Returns the number of stop bits.
///
/// This function returns `None` if the number of stop bits could not be determined. This may
/// occur if the hardware is in an uninitialized state or is using an unsupported stop bit
/// configuration. Setting the number of stop bits with `set_stop-bits()` should initialize the
/// stop bits to a supported value.
fn stop_bits(&self) -> Option<StopBits>;
/// Returns the flow control mode.
///
/// This function returns `None` if the flow control mode could not be determined. This may
/// occur if the hardware is in an uninitialized state or is using an unsupported flow control
/// mode. Setting a flow control mode with `set_flow_control()` should initialize the flow
/// control mode to a supported value.
fn flow_control(&self) -> Option<FlowControl>;
/// Sets the baud rate.
///
/// ## Errors
///
/// If the implementation does not support the requested baud rate, this function may return an
/// `InvalidInput` error. Even if the baud rate is accepted by `set_baud_rate()`, it may not be
/// supported by the underlying hardware.
fn set_baud_rate(&mut self, baud_rate: BaudRate) -> ::Result<()>;
/// Sets the character size.
fn set_char_size(&mut self, char_size: CharSize);
/// Sets the parity-checking mode.
fn set_parity(&mut self, parity: Parity);
/// Sets the number of stop bits.
fn set_stop_bits(&mut self, stop_bits: StopBits);
/// Sets the flow control mode.
fn set_flow_control(&mut self, flow_control: FlowControl);
}
/// A device-indepenent implementation of serial port settings.
#[derive(Debug,Copy,Clone,PartialEq,Eq)]
pub struct PortSettings {
/// Baud rate.
pub baud_rate: BaudRate,
/// Character size.
pub char_size: CharSize,
/// Parity checking mode.
pub parity: Parity,
/// Number of stop bits.
pub stop_bits: StopBits,
/// Flow control mode.
pub flow_control: FlowControl,
}
impl SerialPortSettings for PortSettings {
fn baud_rate(&self) -> Option<BaudRate> {
Some(self.baud_rate)
}
fn char_size(&self) -> Option<CharSize> {
Some(self.char_size)
}
fn parity(&self) -> Option<Parity> {
Some(self.parity)
}
fn stop_bits(&self) -> Option<StopBits> {
Some(self.stop_bits)
}
fn flow_control(&self) -> Option<FlowControl> {
Some(self.flow_control)
}
fn set_baud_rate(&mut self, baud_rate: BaudRate) -> ::Result<()> {
self.baud_rate = baud_rate;
Ok(())
}
fn set_char_size(&mut self, char_size: CharSize) {
self.char_size = char_size;
}
fn set_parity(&mut self, parity: Parity) {
self.parity = parity;
}
fn set_stop_bits(&mut self, stop_bits: StopBits) {
self.stop_bits = stop_bits;
}
fn set_flow_control(&mut self, flow_control: FlowControl) {
self.flow_control = flow_control;
}
}
#[cfg(test)]
mod tests {
use super::*;
fn default_port_settings() -> PortSettings {
PortSettings {
baud_rate: BaudRate::Baud9600,
char_size: CharSize::Bits8,
parity: Parity::ParityNone,
stop_bits: StopBits::Stop1,
flow_control: FlowControl::FlowNone,
}
}
#[test]
fn port_settings_manipulates_baud_rate() {
let mut settings: PortSettings = default_port_settings();
settings.set_baud_rate(Baud115200).unwrap();
assert_eq!(settings.baud_rate(), Some(Baud115200));
}
#[test]
fn port_settings_manipulates_char_size() {
let mut settings: PortSettings = default_port_settings();
settings.set_char_size(Bits7);<|fim▁hole|> assert_eq!(settings.char_size(), Some(Bits7));
}
#[test]
fn port_settings_manipulates_parity() {
let mut settings: PortSettings = default_port_settings();
settings.set_parity(ParityEven);
assert_eq!(settings.parity(), Some(ParityEven));
}
#[test]
fn port_settings_manipulates_stop_bits() {
let mut settings: PortSettings = default_port_settings();
settings.set_stop_bits(Stop2);
assert_eq!(settings.stop_bits(), Some(Stop2));
}
#[test]
fn port_settings_manipulates_flow_control() {
let mut settings: PortSettings = default_port_settings();
settings.set_flow_control(FlowSoftware);
assert_eq!(settings.flow_control(), Some(FlowSoftware));
}
}<|fim▁end|> | |
<|file_name|>util.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from reportlab.lib.colors import Color, CMYKColor, getAllNamedColors, toColor, \
HexColor
from reportlab.lib.enums import TA_LEFT, TA_CENTER, TA_RIGHT, TA_JUSTIFY
from reportlab.lib.units import inch, cm
import base64
import httplib
import logging
import mimetypes
import os.path
import re
import reportlab
import shutil
import string
import sys
import tempfile
import types
import urllib
import urllib2
import urlparse
# Copyright 2010 Dirk Holtwick, holtwick.it
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
rgb_re = re.compile("^.*?rgb[(]([0-9]+).*?([0-9]+).*?([0-9]+)[)].*?[ ]*$")
if not(reportlab.Version[0] == "2" and reportlab.Version[2] >= "1"):
raise ImportError("Reportlab Version 2.1+ is needed!")
REPORTLAB22 = (reportlab.Version[0] == "2" and reportlab.Version[2] >= "2")
# print "***", reportlab.Version, REPORTLAB22, reportlab.__file__
log = logging.getLogger("xhtml2pdf")
try:
import cStringIO as StringIO
except:
import StringIO
try:
import pyPdf
except:
pyPdf = None
try:
from reportlab.graphics import renderPM
except:
renderPM = None
try:
from reportlab.graphics import renderSVG
except:
renderSVG = None
#===============================================================================
# Memoize decorator
#===============================================================================
class memoized(object):
"""
A kwargs-aware memoizer, better than the one in python :)
Don't pass in too large kwargs, since this turns them into a tuple of tuples
Also, avoid mutable types (as usual for memoizers)
What this does is to create a dictionnary of {(*parameters):return value},
and uses it as a cache for subsequent calls to the same method.
It is especially useful for functions that don't rely on external variables
and that are called often. It's a perfect match for our getSize etc...
"""
def __init__(self, func):
self.cache = {}
self.func = func
self.__doc__ = self.func.__doc__ # To avoid great confusion
self.__name__ = self.func.__name__ # This also avoids great confusion
def __call__(self, *args, **kwargs):
# Make sure the following line is not actually slower than what you're
# trying to memoize
args_plus = tuple(kwargs.items())
key = (args, args_plus)
if key not in self.cache:
res = self.func(*args, **kwargs)
self.cache[key] = res
return self.cache[key]
def ErrorMsg():
"""
Helper to get a nice traceback as string
"""
import traceback, sys
type = value = tb = limit = None
type, value, tb = sys.exc_info()
list = traceback.format_tb(tb, limit) + traceback.format_exception_only(type, value)
return "Traceback (innermost last):\n" + "%-20s %s" % (
string.join(list[: - 1], ""),
list[ - 1])
def toList(value):
if type(value) not in (types.ListType, types.TupleType):
return [value]
return list(value)
#def _toColor(arg, default=None):
# '''try to map an arbitrary arg to a color instance'''
# if isinstance(arg, Color):
# return arg
# tArg = type(arg)
# if tArg in (types.ListType, types.TupleType):
# assert 3 <= len(arg) <= 4, 'Can only convert 3 and 4 sequences to color'
# assert 0 <= min(arg) and max(arg) <= 1
# return len(arg) == 3 and Color(arg[0], arg[1], arg[2]) or CMYKColor(arg[0], arg[1], arg[2], arg[3])
# elif tArg == types.StringType:
# C = getAllNamedColors()
# s = arg.lower()
# if C.has_key(s): return C[s]
# try:
# return toColor(eval(arg))
# except:
# pass
# try:
# return HexColor(arg)
# except:
# if default is None:
# raise ValueError('Invalid color value %r' % arg)
# return default
@memoized
def getColor(value, default=None):
"""
Convert to color value.
This returns a Color object instance from a text bit.
"""
if isinstance(value, Color):
return value
value = str(value).strip().lower()
if value == "transparent" or value == "none":
return default
if value in COLOR_BY_NAME:
return COLOR_BY_NAME[value]
if value.startswith("#") and len(value) == 4:
value = "#" + value[1] + value[1] + value[2] + value[2] + value[3] + value[3]
elif rgb_re.search(value):
# e.g., value = "<css function: rgb(153, 51, 153)>", go figure:
r, g, b = [int(x) for x in rgb_re.search(value).groups()]
value = "#%02x%02x%02x" % (r, g, b)
else:
# Shrug
pass
return toColor(value, default) # Calling the reportlab function
def getBorderStyle(value, default=None):
# log.debug(value)
if value and (str(value).lower() not in ("none", "hidden")):
return value
return default
mm = cm / 10.0
dpi96 = (1.0 / 96.0 * inch)
_absoluteSizeTable = {
"1": 50.0 / 100.0,
"xx-small": 50.0 / 100.0,
"x-small": 50.0 / 100.0,
"2": 75.0 / 100.0,
"small": 75.0 / 100.0,
"3": 100.0 / 100.0,
"medium": 100.0 / 100.0,
"4": 125.0 / 100.0,
"large": 125.0 / 100.0,
"5": 150.0 / 100.0,
"x-large": 150.0 / 100.0,
"6": 175.0 / 100.0,
"xx-large": 175.0 / 100.0,
"7": 200.0 / 100.0,
"xxx-large": 200.0 / 100.0,
#"xx-small" : 3./5.,
#"x-small": 3./4.,
#"small": 8./9.,
#"medium": 1./1.,
#"large": 6./5.,
#"x-large": 3./2.,
#"xx-large": 2./1.,
#"xxx-large": 3./1.,
}
_relativeSizeTable = {
"larger": 1.25,
"smaller": 0.75,
"+4": 200.0 / 100.0,
"+3": 175.0 / 100.0,
"+2": 150.0 / 100.0,
"+1": 125.0 / 100.0,
"-1": 75.0 / 100.0,
"-2": 50.0 / 100.0,
"-3": 25.0 / 100.0,
}
MIN_FONT_SIZE = 1.0
@memoized
def getSize(value, relative=0, base=None, default=0.0):
"""
Converts strings to standard sizes.
That is the function taking a string of CSS size ('12pt', '1cm' and so on)
and converts it into a float in a standard unit (in our case, points).
>>> getSize('12pt')
12.0
>>> getSize('1cm')
28.346456692913385
"""
try:
original = value
if value is None:
return relative
elif type(value) is types.FloatType:
return value
elif isinstance(value, int):
return float(value)
elif type(value) in (types.TupleType, types.ListType):
value = "".join(value)
value = str(value).strip().lower().replace(",", ".")
if value[-2:] == 'cm':
return float(value[:-2].strip()) * cm
elif value[-2:] == 'mm':
return (float(value[:-2].strip()) * mm) # 1mm = 0.1cm
elif value[-2:] == 'in':
return float(value[:-2].strip()) * inch # 1pt == 1/72inch
elif value[-2:] == 'inch':
return float(value[:-4].strip()) * inch # 1pt == 1/72inch
elif value[-2:] == 'pt':
return float(value[:-2].strip())
elif value[-2:] == 'pc':
return float(value[:-2].strip()) * 12.0 # 1pc == 12pt
elif value[-2:] == 'px':
return float(value[:-2].strip()) * dpi96 # XXX W3C says, use 96pdi http://www.w3.org/TR/CSS21/syndata.html#length-units
elif value[-1:] == 'i': # 1pt == 1/72inch
return float(value[:-1].strip()) * inch
elif value in ("none", "0", "auto"):
return 0.0
elif relative:
if value[-2:] == 'em': # XXX
return (float(value[:-2].strip()) * relative) # 1em = 1 * fontSize
elif value[-2:] == 'ex': # XXX
return (float(value[:-2].strip()) * (relative / 2.0)) # 1ex = 1/2 fontSize
elif value[-1:] == '%':
# print "%", value, relative, (relative * float(value[:-1].strip())) / 100.0
return (relative * float(value[:-1].strip())) / 100.0 # 1% = (fontSize * 1) / 100
elif value in ("normal", "inherit"):
return relative
elif _relativeSizeTable.has_key(value):
if base:
return max(MIN_FONT_SIZE, base * _relativeSizeTable[value])
return max(MIN_FONT_SIZE, relative * _relativeSizeTable[value])
elif _absoluteSizeTable.has_key(value):
if base:
return max(MIN_FONT_SIZE, base * _absoluteSizeTable[value])
return max(MIN_FONT_SIZE, relative * _absoluteSizeTable[value])
try:
value = float(value)
except:
log.warn("getSize: Not a float %r", value)
return default #value = 0
return max(0, value)
except Exception:
log.warn("getSize %r %r", original, relative, exc_info=1)
return default
@memoized
def getCoords(x, y, w, h, pagesize):
"""
As a stupid programmer I like to use the upper left
corner of the document as the 0,0 coords therefore
we need to do some fancy calculations
"""
#~ print pagesize
ax, ay = pagesize
if x < 0:
x = ax + x
if y < 0:
y = ay + y
if w != None and h != None:
if w <= 0:
w = (ax - x + w)
if h <= 0:
h = (ay - y + h)
return x, (ay - y - h), w, h
return x, (ay - y)
@memoized
def getBox(box, pagesize):
"""
Parse sizes by corners in the form:
<X-Left> <Y-Upper> <Width> <Height>
The last to values with negative values are interpreted as offsets form
the right and lower border.
"""
box = str(box).split()
if len(box) != 4:
raise Exception, "box not defined right way"
x, y, w, h = [getSize(pos) for pos in box]
return getCoords(x, y, w, h, pagesize)
def getFrameDimensions(data, page_width, page_height):
"""Calculate dimensions of a frame
Returns left, top, width and height of the frame in points. <|fim▁hole|> return [getSize(x) for x in box]
top = getSize(data.get("top", 0))
left = getSize(data.get("left", 0))
bottom = getSize(data.get("bottom", 0))
right = getSize(data.get("right", 0))
if "height" in data:
height = getSize(data["height"])
if "top" in data:
top = getSize(data["top"])
bottom = page_height - (top + height)
elif "bottom" in data:
bottom = getSize(data["bottom"])
top = page_height - (bottom + height)
if "width" in data:
width = getSize(data["width"])
if "left" in data:
left = getSize(data["left"])
right = page_width - (left + width)
elif "right" in data:
right = getSize(data["right"])
left = page_width - (right + width)
top += getSize(data.get("margin-top", 0))
left += getSize(data.get("margin-left", 0))
bottom += getSize(data.get("margin-bottom", 0))
right += getSize(data.get("margin-right", 0))
width = page_width - (left + right)
height = page_height - (top + bottom)
return left, top, width, height
@memoized
def getPos(position, pagesize):
"""
Pair of coordinates
"""
position = str(position).split()
if len(position) != 2:
raise Exception, "position not defined right way"
x, y = [getSize(pos) for pos in position]
return getCoords(x, y, None, None, pagesize)
def getBool(s):
" Is it a boolean? "
return str(s).lower() in ("y", "yes", "1", "true")
_uid = 0
def getUID():
" Unique ID "
global _uid
_uid += 1
return str(_uid)
_alignments = {
"left": TA_LEFT,
"center": TA_CENTER,
"middle": TA_CENTER,
"right": TA_RIGHT,
"justify": TA_JUSTIFY,
}
def getAlign(value, default=TA_LEFT):
return _alignments.get(str(value).lower(), default)
#def getVAlign(value):
# # Unused
# return str(value).upper()
GAE = "google.appengine" in sys.modules
if GAE:
STRATEGIES = (
StringIO.StringIO,
StringIO.StringIO)
else:
STRATEGIES = (
StringIO.StringIO,
tempfile.NamedTemporaryFile)
class pisaTempFile(object):
"""A temporary file implementation that uses memory unless
either capacity is breached or fileno is requested, at which
point a real temporary file will be created and the relevant
details returned
If capacity is -1 the second strategy will never be used.
Inspired by:
http://code.activestate.com/recipes/496744/
"""
STRATEGIES = STRATEGIES
CAPACITY = 10 * 1024
def __init__(self, buffer="", capacity=CAPACITY):
"""Creates a TempFile object containing the specified buffer.
If capacity is specified, we use a real temporary file once the
file gets larger than that size. Otherwise, the data is stored
in memory.
"""
#if hasattr(buffer, "read"):
#shutil.copyfileobj( fsrc, fdst[, length])
self.capacity = capacity
self.strategy = int(len(buffer) > self.capacity)
try:
self._delegate = self.STRATEGIES[self.strategy]()
except:
# Fallback for Google AppEnginge etc.
self._delegate = self.STRATEGIES[0]()
self.write(buffer)
# we must set the file's position for preparing to read
self.seek(0)
def makeTempFile(self):
" Switch to next startegy. If an error occured stay with the first strategy "
if self.strategy == 0:
try:
new_delegate = self.STRATEGIES[1]()
new_delegate.write(self.getvalue())
self._delegate = new_delegate
self.strategy = 1
log.warn("Created temporary file %s", self.name)
except:
self.capacity = - 1
def getFileName(self):
" Get a named temporary file "
self.makeTempFile()
return self.name
def fileno(self):
"""Forces this buffer to use a temporary file as the underlying.
object and returns the fileno associated with it.
"""
self.makeTempFile()
return self._delegate.fileno()
def getvalue(self):
" Get value of file. Work around for second strategy "
if self.strategy == 0:
return self._delegate.getvalue()
self._delegate.flush()
self._delegate.seek(0)
return self._delegate.read()
def write(self, value):
" If capacity != -1 and length of file > capacity it is time to switch "
if self.capacity > 0 and self.strategy == 0:
len_value = len(value)
if len_value >= self.capacity:
needs_new_strategy = True
else:
self.seek(0, 2) # find end of file
needs_new_strategy = \
(self.tell() + len_value) >= self.capacity
if needs_new_strategy:
self.makeTempFile()
self._delegate.write(value)
def __getattr__(self, name):
try:
return getattr(self._delegate, name)
except AttributeError:
# hide the delegation
e = "object '%s' has no attribute '%s'" \
% (self.__class__.__name__, name)
raise AttributeError(e)
_rx_datauri = re.compile("^data:(?P<mime>[a-z]+/[a-z]+);base64,(?P<data>.*)$", re.M | re.DOTALL)
class pisaFileObject:
"""
XXX
"""
def __init__(self, uri, basepath=None):
self.basepath = basepath
self.mimetype = None
self.file = None
self.data = None
self.uri = None
self.local = None
self.tmp_file = None
uri = str(uri)
log.debug("FileObject %r, Basepath: %r", uri, basepath)
# Data URI
if uri.startswith("data:"):
m = _rx_datauri.match(uri)
self.mimetype = m.group("mime")
self.data = base64.decodestring(m.group("data"))
else:
# Check if we have an external scheme
if basepath and not urlparse.urlparse(uri).scheme:
urlParts = urlparse.urlparse(basepath)
else:
urlParts = urlparse.urlparse(uri)
log.debug("URLParts: %r", urlParts)
if urlParts.scheme == 'file':
if basepath and uri.startswith('/'):
uri = urlparse.urljoin(basepath, uri[1:])
urlResponse = urllib2.urlopen(uri)
self.mimetype = urlResponse.info().get("Content-Type", '').split(";")[0]
self.uri = urlResponse.geturl()
self.file = urlResponse
# Drive letters have len==1 but we are looking for things like http:
elif urlParts.scheme in ('http', 'https'):
# External data
if basepath:
uri = urlparse.urljoin(basepath, uri)
#path = urlparse.urlsplit(url)[2]
#mimetype = getMimeType(path)
# Using HTTPLIB
server, path = urllib.splithost(uri[uri.find("//"):])
if uri.startswith("https://"):
conn = httplib.HTTPSConnection(server)
else:
conn = httplib.HTTPConnection(server)
conn.request("GET", path)
r1 = conn.getresponse()
# log.debug("HTTP %r %r %r %r", server, path, uri, r1)
if (r1.status, r1.reason) == (200, "OK"):
self.mimetype = r1.getheader("Content-Type", '').split(";")[0]
self.uri = uri
if r1.getheader("content-encoding") == "gzip":
import gzip
self.file = gzip.GzipFile(mode="rb", fileobj=r1)
else:
self.file = r1
else:
urlResponse = urllib2.urlopen(uri)
self.mimetype = urlResponse.info().get("Content-Type", '').split(";")[0]
self.uri = urlResponse.geturl()
self.file = urlResponse
else:
# Local data
if basepath:
uri = os.path.normpath(os.path.join(basepath, uri))
if os.path.isfile(uri):
self.uri = uri
self.local = uri
self.setMimeTypeByName(uri)
self.file = open(uri, "rb")
def getFile(self):
if self.file is not None:
return self.file
if self.data is not None:
return pisaTempFile(self.data)
return None
def getNamedFile(self):
if self.notFound():
return None
if self.local:
return str(self.local)
if not self.tmp_file:
self.tmp_file = tempfile.NamedTemporaryFile()
if self.file:
shutil.copyfileobj(self.file, self.tmp_file)
else:
self.tmp_file.write(self.getData())
self.tmp_file.flush()
return self.tmp_file.name
def getData(self):
if self.data is not None:
return self.data
if self.file is not None:
self.data = self.file.read()
return self.data
return None
def notFound(self):
return (self.file is None) and (self.data is None)
def setMimeTypeByName(self, name):
" Guess the mime type "
mimetype = mimetypes.guess_type(name)[0]
if mimetype is not None:
self.mimetype = mimetypes.guess_type(name)[0].split(";")[0]
def getFile(*a , **kw):
file = pisaFileObject(*a, **kw)
if file.notFound():
return None
return file
COLOR_BY_NAME = {
'activeborder': Color(212, 208, 200),
'activecaption': Color(10, 36, 106),
'aliceblue': Color(.941176, .972549, 1),
'antiquewhite': Color(.980392, .921569, .843137),
'appworkspace': Color(128, 128, 128),
'aqua': Color(0, 1, 1),
'aquamarine': Color(.498039, 1, .831373),
'azure': Color(.941176, 1, 1),
'background': Color(58, 110, 165),
'beige': Color(.960784, .960784, .862745),
'bisque': Color(1, .894118, .768627),
'black': Color(0, 0, 0),
'blanchedalmond': Color(1, .921569, .803922),
'blue': Color(0, 0, 1),
'blueviolet': Color(.541176, .168627, .886275),
'brown': Color(.647059, .164706, .164706),
'burlywood': Color(.870588, .721569, .529412),
'buttonface': Color(212, 208, 200),
'buttonhighlight': Color(255, 255, 255),
'buttonshadow': Color(128, 128, 128),
'buttontext': Color(0, 0, 0),
'cadetblue': Color(.372549, .619608, .627451),
'captiontext': Color(255, 255, 255),
'chartreuse': Color(.498039, 1, 0),
'chocolate': Color(.823529, .411765, .117647),
'coral': Color(1, .498039, .313725),
'cornflowerblue': Color(.392157, .584314, .929412),
'cornsilk': Color(1, .972549, .862745),
'crimson': Color(.862745, .078431, .235294),
'cyan': Color(0, 1, 1),
'darkblue': Color(0, 0, .545098),
'darkcyan': Color(0, .545098, .545098),
'darkgoldenrod': Color(.721569, .52549, .043137),
'darkgray': Color(.662745, .662745, .662745),
'darkgreen': Color(0, .392157, 0),
'darkgrey': Color(.662745, .662745, .662745),
'darkkhaki': Color(.741176, .717647, .419608),
'darkmagenta': Color(.545098, 0, .545098),
'darkolivegreen': Color(.333333, .419608, .184314),
'darkorange': Color(1, .54902, 0),
'darkorchid': Color(.6, .196078, .8),
'darkred': Color(.545098, 0, 0),
'darksalmon': Color(.913725, .588235, .478431),
'darkseagreen': Color(.560784, .737255, .560784),
'darkslateblue': Color(.282353, .239216, .545098),
'darkslategray': Color(.184314, .309804, .309804),
'darkslategrey': Color(.184314, .309804, .309804),
'darkturquoise': Color(0, .807843, .819608),
'darkviolet': Color(.580392, 0, .827451),
'deeppink': Color(1, .078431, .576471),
'deepskyblue': Color(0, .74902, 1),
'dimgray': Color(.411765, .411765, .411765),
'dimgrey': Color(.411765, .411765, .411765),
'dodgerblue': Color(.117647, .564706, 1),
'firebrick': Color(.698039, .133333, .133333),
'floralwhite': Color(1, .980392, .941176),
'forestgreen': Color(.133333, .545098, .133333),
'fuchsia': Color(1, 0, 1),
'gainsboro': Color(.862745, .862745, .862745),
'ghostwhite': Color(.972549, .972549, 1),
'gold': Color(1, .843137, 0),
'goldenrod': Color(.854902, .647059, .12549),
'gray': Color(.501961, .501961, .501961),
'graytext': Color(128, 128, 128),
'green': Color(0, .501961, 0),
'greenyellow': Color(.678431, 1, .184314),
'grey': Color(.501961, .501961, .501961),
'highlight': Color(10, 36, 106),
'highlighttext': Color(255, 255, 255),
'honeydew': Color(.941176, 1, .941176),
'hotpink': Color(1, .411765, .705882),
'inactiveborder': Color(212, 208, 200),
'inactivecaption': Color(128, 128, 128),
'inactivecaptiontext': Color(212, 208, 200),
'indianred': Color(.803922, .360784, .360784),
'indigo': Color(.294118, 0, .509804),
'infobackground': Color(255, 255, 225),
'infotext': Color(0, 0, 0),
'ivory': Color(1, 1, .941176),
'khaki': Color(.941176, .901961, .54902),
'lavender': Color(.901961, .901961, .980392),
'lavenderblush': Color(1, .941176, .960784),
'lawngreen': Color(.486275, .988235, 0),
'lemonchiffon': Color(1, .980392, .803922),
'lightblue': Color(.678431, .847059, .901961),
'lightcoral': Color(.941176, .501961, .501961),
'lightcyan': Color(.878431, 1, 1),
'lightgoldenrodyellow': Color(.980392, .980392, .823529),
'lightgray': Color(.827451, .827451, .827451),
'lightgreen': Color(.564706, .933333, .564706),
'lightgrey': Color(.827451, .827451, .827451),
'lightpink': Color(1, .713725, .756863),
'lightsalmon': Color(1, .627451, .478431),
'lightseagreen': Color(.12549, .698039, .666667),
'lightskyblue': Color(.529412, .807843, .980392),
'lightslategray': Color(.466667, .533333, .6),
'lightslategrey': Color(.466667, .533333, .6),
'lightsteelblue': Color(.690196, .768627, .870588),
'lightyellow': Color(1, 1, .878431),
'lime': Color(0, 1, 0),
'limegreen': Color(.196078, .803922, .196078),
'linen': Color(.980392, .941176, .901961),
'magenta': Color(1, 0, 1),
'maroon': Color(.501961, 0, 0),
'mediumaquamarine': Color(.4, .803922, .666667),
'mediumblue': Color(0, 0, .803922),
'mediumorchid': Color(.729412, .333333, .827451),
'mediumpurple': Color(.576471, .439216, .858824),
'mediumseagreen': Color(.235294, .701961, .443137),
'mediumslateblue': Color(.482353, .407843, .933333),
'mediumspringgreen': Color(0, .980392, .603922),
'mediumturquoise': Color(.282353, .819608, .8),
'mediumvioletred': Color(.780392, .082353, .521569),
'menu': Color(212, 208, 200),
'menutext': Color(0, 0, 0),
'midnightblue': Color(.098039, .098039, .439216),
'mintcream': Color(.960784, 1, .980392),
'mistyrose': Color(1, .894118, .882353),
'moccasin': Color(1, .894118, .709804),
'navajowhite': Color(1, .870588, .678431),
'navy': Color(0, 0, .501961),
'oldlace': Color(.992157, .960784, .901961),
'olive': Color(.501961, .501961, 0),
'olivedrab': Color(.419608, .556863, .137255),
'orange': Color(1, .647059, 0),
'orangered': Color(1, .270588, 0),
'orchid': Color(.854902, .439216, .839216),
'palegoldenrod': Color(.933333, .909804, .666667),
'palegreen': Color(.596078, .984314, .596078),
'paleturquoise': Color(.686275, .933333, .933333),
'palevioletred': Color(.858824, .439216, .576471),
'papayawhip': Color(1, .937255, .835294),
'peachpuff': Color(1, .854902, .72549),
'peru': Color(.803922, .521569, .247059),
'pink': Color(1, .752941, .796078),
'plum': Color(.866667, .627451, .866667),
'powderblue': Color(.690196, .878431, .901961),
'purple': Color(.501961, 0, .501961),
'red': Color(1, 0, 0),
'rosybrown': Color(.737255, .560784, .560784),
'royalblue': Color(.254902, .411765, .882353),
'saddlebrown': Color(.545098, .270588, .07451),
'salmon': Color(.980392, .501961, .447059),
'sandybrown': Color(.956863, .643137, .376471),
'scrollbar': Color(212, 208, 200),
'seagreen': Color(.180392, .545098, .341176),
'seashell': Color(1, .960784, .933333),
'sienna': Color(.627451, .321569, .176471),
'silver': Color(.752941, .752941, .752941),
'skyblue': Color(.529412, .807843, .921569),
'slateblue': Color(.415686, .352941, .803922),
'slategray': Color(.439216, .501961, .564706),
'slategrey': Color(.439216, .501961, .564706),
'snow': Color(1, .980392, .980392),
'springgreen': Color(0, 1, .498039),
'steelblue': Color(.27451, .509804, .705882),
'tan': Color(.823529, .705882, .54902),
'teal': Color(0, .501961, .501961),
'thistle': Color(.847059, .74902, .847059),
'threeddarkshadow': Color(64, 64, 64),
'threedface': Color(212, 208, 200),
'threedhighlight': Color(255, 255, 255),
'threedlightshadow': Color(212, 208, 200),
'threedshadow': Color(128, 128, 128),
'tomato': Color(1, .388235, .278431),
'turquoise': Color(.25098, .878431, .815686),
'violet': Color(.933333, .509804, .933333),
'wheat': Color(.960784, .870588, .701961),
'white': Color(1, 1, 1),
'whitesmoke': Color(.960784, .960784, .960784),
'window': Color(255, 255, 255),
'windowframe': Color(0, 0, 0),
'windowtext': Color(0, 0, 0),
'yellow': Color(1, 1, 0),
'yellowgreen': Color(.603922, .803922, .196078)}<|fim▁end|> | """
box = data.get("-pdf-frame-box", [])
if len(box) == 4: |
<|file_name|>computer_system_1_0_0_system_type.py<|end_file_name|><|fim▁begin|># coding: utf-8
"""
Copyright 2015 SmartBear Software
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Ref: https://github.com/swagger-api/swagger-codegen
"""
from pprint import pformat
from six import iteritems
class ComputerSystem100SystemType(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self):
"""
ComputerSystem100SystemType - a model defined in Swagger
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.
"""
self.swagger_types = {
}
self.attribute_map = {
}
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`<|fim▁hole|> def __eq__(self, other):
"""
Returns true if both objects are equal
"""
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other<|fim▁end|> | """
return self.to_str()
|
<|file_name|>MainBigIntCs.java<|end_file_name|><|fim▁begin|>/*******************************************************************************
* Copyright (c) 2009, 2014 Tim Tiemens.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the GNU Lesser Public License v2.1
* which accompanies this distribution, and is available at
* http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
*
* Contributors:
* Tim Tiemens - initial API and implementation
*******************************************************************************/
package com.tiemens.secretshare.main.cli;
import java.io.InputStream;
import java.io.PrintStream;
import java.math.BigInteger;
import java.util.ArrayList;
import java.util.List;
import com.tiemens.secretshare.exceptions.SecretShareException;
import com.tiemens.secretshare.math.type.BigIntUtilities;
/**
* Main command line for the "bigintcs" utilities - converting to/from bigintcs, bigint, and String.
*
* Takes a mode (bics2bi, bics2s, bi2s, bi2bics, s2bics, s2bi)
* and a list of input strings
* and writes the conversion strings.
*
* @author tiemens
*
*/
public final class MainBigIntCs
{
/**
* @param args from command line
*/
public static void main(String[] args)
{
main(args, System.in, System.out);
}
public static void main(String[] args,
InputStream in,
PrintStream out)
{
try
{
BigIntCsInput input = BigIntCsInput.parse(args);
BigIntCsOutput output = input.output();
output.print(out);
}
catch (SecretShareException e)
{
out.println(e.getMessage());
usage(out);
optionallyPrintStackTrace(args, e, out);
}
}
public static void usage(PrintStream out)
{
out.println("Usage:");
out.println(" bigintcs -h -mode <bics2bi|bics2s|bi2s|bi2bics|s2bics|s2bi> " +
" [-v] [-in <bics|bi|s>] [-out <bics|bi|s>] [-sepSpace|-sepNewline] value [value2 ...]");
out.println(" -h print usage");
out.println(" -in <m> set input mode");
out.println(" s String, converted to array of bytes, constructing a Big Integer [default]");
out.println(" bi String, parsed to Big Integer, used as a Big Integer");
out.println(" bics String, parsed and checksummed to Big Integer Checksum, " +
"then used as a Big Integer");
out.println(" -out <m> set output mode");
out.println(" s Output Big Integer as array of bytes to construct a String");
out.println(" bi Output Big Integer .toString()");
out.println(" bics Output Big Integer Checksum .toString() [default]");
out.println(" -mode <m> set both input and output operation mode");
out.println(" s2bi -in s -out bi");
out.println(" s2bics -in s -out bics [default]");
out.println(" bi2s -in bi -out s");
out.println(" bics2bi -in bics -out bi");<|fim▁hole|> out.println(" Example: s2bi 'a' = 97 (ascii 'a')");
out.println(" Example: bi2s '97' = a");
out.println(" Example: s2bi 'ab' = 24930 (ascii 'a' * 256 + ascii 'b')");
out.println(" Example: s2bi '1' = 49 (NOT '1')");
out.println(" Example: s2bi '123' = 3224115 (NOT '123')");
out.println(" Example: s2bics 'Cat' = bigintcs:436174-7BF975");
out.println(" Example: bics2s 'bigintcs:436174-7BF975' = Cat");
out.println(" Example: s2bi 'Cat' = 4415860");
out.println(" Example: bi2bics '4415860' = bigintcs:436174-7BF975");
}
public static BigInteger parseBigInteger(String argname,
String[] args,
int index)
{
return MainSplit.parseBigInteger(argname, args, index);
}
public static Integer parseInt(String argname,
String[] args,
int index)
{
return MainSplit.parseInt(argname, args, index);
}
public static void checkIndex(String argname,
String[] args,
int index)
{
MainSplit.checkIndex(argname, args, index);
}
public static void optionallyPrintStackTrace(String[] args,
Exception e,
PrintStream out)
{
MainSplit.optionallyPrintStackTrace(args, e, out);
}
private MainBigIntCs()
{
// no instances
}
public static enum Type
{
bics, bi, s;
/**
* @param in type to find
* @param argName to display if an error happens
* @return Type or throw exception
* @throws SecretShareException if 'in' is not found
*/
public static Type findByString(String in, String argName)
{
Type ret = valueOf(in);
if (ret != null)
{
return ret;
}
else
{
throw new SecretShareException("Type value '" + in + "' not found." +
((argName != null) ? " Argname=" + argName : ""));
}
}
}
public static enum Type2Type
{
bics2bics, bics2bi, bics2s,
bi2bics, bi2bi, bi2s,
s2bics, s2bi, s2s;
/**
* @param in combination type2type to find
* @param argName to display if an error happens
* @return Type2Type or throw exception
* @throws SecretShareException if 'in' is not found
*/
public static Type2Type findByString(String in, String argName)
{
Type2Type ret = valueOf(in);
if (ret != null)
{
return ret;
}
else
{
throw new SecretShareException("Type2Type value '" + in + "' not found." +
((argName != null) ? " Argname=" + argName : ""));
}
}
public Type getInputType(String argName)
{
String name = this.name();
return Type.findByString(name.substring(0, name.indexOf('2')), argName);
}
public Type getOutputType(String argName)
{
String name = this.name();
return Type.findByString(name.substring(name.indexOf('2') + 1, name.length()), argName);
}
}
public static class BigIntCsInput
{
private static final String SYSTEMLINESEPARATOR =
System.getProperty("line.separator"); // jdk1.7: System.lineSeparator();
// ==================================================
// instance data
// ==================================================
// required arguments:
private final List<String> inputs = new ArrayList<String>();
private Type inputType = Type.s;
private Type outputType = Type.bics;
// optional
private boolean printHeader = false;
private String separator = SYSTEMLINESEPARATOR;
// ==================================================
// constructors
// ==================================================
public static BigIntCsInput parse(String[] args)
{
BigIntCsInput ret = new BigIntCsInput();
for (int i = 0, n = args.length; i < n; i++)
{
if (args[i] == null)
{
continue;
}
if ("-in".equals(args[i]))
{
i++;
ret.inputType = parseType("in", args, i);
}
else if ("-out".equals(args[i]))
{
i++;
ret.outputType = parseType("out", args, i);
}
else if ("-mode".equals(args[i]))
{
i++;
Type2Type t2t = parseType2Type("mode", args, i);
ret.inputType = t2t.getInputType("mode");
ret.outputType = t2t.getOutputType("mode");
}
else if ("-sep".equals(args[i]))
{
i++;
checkIndex("sep", args, i);
ret.separator = args[i];
}
else if ("-sepSpace".equals(args[i]))
{
ret.separator = " ";
}
else if ("-sepNewLine".equals(args[i]))
{
ret.separator = SYSTEMLINESEPARATOR;
}
else if ("-v".equals(args[i]))
{
ret.printHeader = true;
}
else if (args[i].startsWith("-"))
{
String m = "Argument '" + args[i] + "' not understood";
throw new SecretShareException(m);
}
else
{
String v = args[i];
ret.inputs.add(v);
}
}
return ret;
}
public static Type2Type parseType2Type(String argname, String[] args, int index)
{
checkIndex(argname, args, index);
return parseType2Type(argname, args[index]);
}
public static Type2Type parseType2Type(String argname, String value)
{
return Type2Type.findByString(value, argname);
}
public static Type parseType(String argname, String[] args, int index)
{
checkIndex(argname, args, index);
return parseType(argname, args[index]);
}
public static Type parseType(String argname, String value)
{
return Type.findByString(value, argname);
}
// ==================================================
// public methods
// ==================================================
public BigIntCsOutput output()
{
BigIntCsOutput ret = new BigIntCsOutput(this);
return ret;
}
// ==================================================
// non public methods
// ==================================================
}
public static class BigIntCsOutput
{
private final BigIntCsInput bigintcsInput;
private List<String> output;
public BigIntCsOutput(BigIntCsInput inBigIntCsInput)
{
bigintcsInput = inBigIntCsInput;
}
public void print(PrintStream out)
{
output = convert(bigintcsInput.inputs, bigintcsInput.inputType, bigintcsInput.outputType);
if (bigintcsInput.printHeader)
{
printHeaderInfo(out);
}
String sep = "";
if (output.size() > 0)
{
for (String s : output)
{
out.print(sep);
sep = bigintcsInput.separator;
out.print(s);
}
out.print(sep);
}
}
// ==================================================
// instance data
// ==================================================
// ==================================================
// constructors
// ==================================================
// ==================================================
// public methods
// ==================================================
// ==================================================
// non public methods
// ==================================================
public static List<String> convert(List<String> inputs,
Type inputType,
Type outputType)
{
List<String> ret = new ArrayList<String>();
for (String in : inputs)
{
String out = convert(in, inputType, outputType);
ret.add(out);
}
return ret;
}
public static String convert(String in,
Type inputType,
Type outputType)
{
if (Type.s.equals(inputType) && Type.s.equals(outputType))
{
String asbi = BigIntUtilities.Human.createBigInteger(in).toString();
String noop = BigIntUtilities.Human.createHumanString(new BigInteger(asbi));
if (noop.equals(in))
{
// that whole thing was a no-operation; it was just a double-check
return in;
}
else
{
throw new SecretShareException("Programmer error: in='" + in + "' asbi='" +
asbi + "' yet output string was '" + noop + "'");
}
}
else if (Type.s.equals(inputType) && Type.bi.equals(outputType))
{
return BigIntUtilities.Human.createBigInteger(in).toString();
}
else if (Type.s.equals(inputType) && Type.bics.equals(outputType))
{
BigInteger inbi = BigIntUtilities.Human.createBigInteger(in);
return BigIntUtilities.Checksum.createMd5CheckSumString(inbi);
}
else if (Type.bi.equals(inputType))
{
BigInteger inbi = new BigInteger(in);
if (Type.s.equals(outputType))
{
return BigIntUtilities.Human.createHumanString(inbi);
}
else if (Type.bi.equals(outputType))
{
return inbi.toString();
}
else if (Type.bics.equals(outputType))
{
return BigIntUtilities.Checksum.createMd5CheckSumString(inbi);
}
else
{
error("input type bi, output type unknown: " + outputType);
}
}
else if (Type.bics.equals(inputType))
{
BigInteger inbi = BigIntUtilities.Checksum.createBigInteger(in);
if (Type.s.equals(outputType))
{
return BigIntUtilities.Human.createHumanString(inbi);
}
else if (Type.bi.equals(outputType))
{
return inbi.toString();
}
else if (Type.bics.equals(outputType))
{
return BigIntUtilities.Checksum.createMd5CheckSumString(inbi);
}
else
{
return error("input type bics, output type unknown: " + outputType);
}
}
else
{
return error("input type unknown: " + inputType);
}
return error("Programmer Error - fell off if chain");
}
private static String error(String msg)
{
throw new SecretShareException(msg);
}
private void printHeaderInfo(PrintStream out)
{
out.print(Main.getVersionLine());
out.print(bigintcsInput.separator);
}
} // class BigIntCsOutput
}<|fim▁end|> | out.println(" -v print version on 1st line");
out.println(" -sepSpace outputs with spaces between values");
out.println(" -sepNewLine outputs with newlines between values [default]"); |
<|file_name|>listener.js<|end_file_name|><|fim▁begin|><|fim▁hole|>TF.listen();<|fim▁end|> | |
<|file_name|>test_bond.py<|end_file_name|><|fim▁begin|>import numpy as np
STR_NOBOND = """AU
3 1 2 1
1 0.00000000 0.00000000 0.00000000 -0.66387672 0.00000000 -0.00000000 0.34509720 3.78326969 -0.00000000 -0.00000000 3.96610412 0.00000000 3.52668267 0.00000000 -0.00000000 -2.98430053 0.00000000 -0.00000000 0.00000000 -0.00000000 1.26744725 -0.00000000 2.16730601
1 1.43043000 0.00000000 1.10716000 0.33193836 -0.16057903 -0.00000000 -0.11299312 1.55235099 -0.00000000 1.15495299 0.60859677 -0.00000000 1.21104235 -4.46820475 0.00000000 -4.55909022 -0.05601735 0.00000000 -3.72029878 -0.00000000 0.46039909 -0.00000000 -2.40410436
1 -1.43043000 0.00000000 1.10716000 0.33193836 0.16057903 -0.00000000 -0.11299312 1.55235099 -0.00000000 -1.15495299 0.60859677 0.00000000 1.21104235 4.46820475 -0.00000000 -4.55909022 0.05601735 0.00000000 3.72029878 -0.00000000 0.46039909 -0.00000000 -2.40410436
Time used in Loprop : 0.45 (cpu) 0.11 (wall)
"""
STR_BOND = """AU
5 1 22 1
1 0.00000000 0.00000000 0.00000000 -0.66387672 0.00000000 -0.00000000 0.41788500 1.19165567 0.00000000 0.00000000 2.74891057 0.00000000 1.33653383 0.00000000 0.00000000 4.18425484 0.00000000 -0.00000000 -0.00000000 -0.00000000 0.19037387 0.00000000 5.96033807
1 0.71521500 0.00000000 0.55358000 0.00000000 -0.06567795 -0.00000000 -0.07278780 2.59161403 -0.00000000 1.21719355 1.98015668 -0.00000000 2.19014883 -7.24839104 0.00000000 -7.16855538 0.59534043 0.00000000 -5.74640170 -0.00000000 1.07707338 -0.00000000 -3.79303206
1 1.43043000 0.00000000 1.10716000 0.33193836 -0.12774005 0.00000000 -0.07659922 0.25654398 0.00000000 0.16487465 -0.00000000 -0.00000000 0.11596794 -0.84400923 0.00000000 -0.97481253 -0.35368757 -0.00000000 -0.84709793 0.00000000 -0.07813759 0.00000000 -0.50758833
1 -0.71521500 0.00000000 0.55358000 0.00000000 0.06567795 -0.00000000 -0.07278780 2.59161403 -0.00000000 1.21719355 -1.98015668 0.00000000 2.19014883 7.24839104 -0.00000000 -7.16855538 -0.59534043 0.00000000 5.74640170 -0.00000000 1.07707338 -0.00000000 -3.79303206
1 -1.43043000 0.00000000 1.10716000 0.33193836 0.12774005 0.00000000 -0.07659922 0.25654398 -0.00000000 -0.16487465 0.00000000 0.00000000 0.11596794 0.84400923 -0.00000000 -0.97481253 0.35368757 0.00000000 0.84709793 -0.00000000 -0.07813759 -0.00000000 -0.50758833
Time used in Loprop : 0.45 (cpu) 0.11 (wall)
"""
class TestBondH2O:
"""H2O tests bonded versus non-bonden results"""
def setup(self):
# Read in string that is for no bonds output
lines = [line for line in STR_BOND.split("\n") if len(line.split()) > 10]
a0 = 1.0
self.n_bond = np.array([8.0, 0.0, 1.0, 0.0, 1.0], dtype=float)
self.r_bond = a0 * np.array([l.split()[1:4] for l in lines], dtype=float)
self.q_bond = np.array([l.split()[4] for l in lines], dtype=float)
self.d_bond = np.array([l.split()[5:8] for l in lines], dtype=float)
self.a_bond = np.array([l.split()[8:15] for l in lines], dtype=float)
self.b_bond = np.array([l.split()[15:26] for l in lines], dtype=float)
self.coc_bond = np.einsum("ij,i", self.r_bond, self.n_bond) / self.n_bond.sum()
# Read in string that is for bonds output -b
lines = [line for line in STR_NOBOND.split("\n") if len(line.split()) > 10]
self.n_nobond = np.array([8.0, 1.0, 1.0], dtype=float)
self.r_nobond = a0 * np.array([l.split()[1:4] for l in lines], dtype=float)
self.q_nobond = np.array([l.split()[4] for l in lines], dtype=float)
self.d_nobond = np.array([l.split()[5:8] for l in lines], dtype=float)
self.a_nobond = np.array([l.split()[8:15] for l in lines], dtype=float)
self.b_nobond = np.array([l.split()[15:26] for l in lines], dtype=float)
self.coc_nobond = (
np.einsum("ij,i", self.r_nobond, self.n_nobond) / self.n_nobond.sum()
)
def test_bond_nobond_properties(self):
"""Center-of-charge equality"""
np.testing.assert_allclose(self.coc_bond, self.coc_nobond)
def test_a(self):
"""Polarizability equality"""
a_tot_bond = np.sum(self.a_bond)
a_tot_nobond = np.sum(self.a_nobond)
np.testing.assert_allclose(a_tot_bond, a_tot_nobond)
def test_b(self):
"""Hyperpolarizability equality"""
b_tot_bond = np.sum(self.b_bond)
b_tot_nobond = np.sum(self.b_nobond)
np.testing.assert_allclose(b_tot_bond, b_tot_nobond)
def test_dip(self):
"""Dipole equality"""
dip_bond = np.einsum(
"ij,i", (self.r_bond - self.coc_bond), self.q_bond
) + self.d_bond.sum(axis=0)
dip_nobond = np.einsum(
"ij,i", (self.r_nobond - self.coc_nobond), self.q_nobond
) + self.d_nobond.sum(axis=0)
np.testing.assert_allclose(dip_bond, dip_nobond)
class TestBondH2S:
"""H2O tests bonded versus non-bonden results"""
def setup(self):
# Read in string that is for no bonds output
lines = [line for line in STR_BOND.split("\n") if len(line.split()) > 10]
a0 = 1.0
self.n_bond = np.array([16.0, 0.0, 1.0, 0.0, 1.0], dtype=float)
self.r_bond = a0 * np.array([l.split()[1:4] for l in lines], dtype=float)
self.q_bond = np.array([l.split()[4] for l in lines], dtype=float)
self.d_bond = np.array([l.split()[5:8] for l in lines], dtype=float)
self.a_bond = np.array([l.split()[8:15] for l in lines], dtype=float)
self.b_bond = np.array([l.split()[15:26] for l in lines], dtype=float)
self.coc_bond = np.einsum("ij,i", self.r_bond, self.n_bond) / self.n_bond.sum()
# Read in string that is for bonds output -b
lines = [line for line in STR_NOBOND.split("\n") if len(line.split()) > 10]
self.n_nobond = np.array([16.0, 1.0, 1.0], dtype=float)
self.r_nobond = a0 * np.array([l.split()[1:4] for l in lines], dtype=float)
self.q_nobond = np.array([l.split()[4] for l in lines], dtype=float)
self.d_nobond = np.array([l.split()[5:8] for l in lines], dtype=float)
self.a_nobond = np.array([l.split()[8:15] for l in lines], dtype=float)
self.b_nobond = np.array([l.split()[15:26] for l in lines], dtype=float)
self.coc_nobond = (
np.einsum("ij,i", self.r_nobond, self.n_nobond) / self.n_nobond.sum()
)
def test_bond_nobond_properties(self):
"""Center-of-charge equality"""
np.testing.assert_allclose(self.coc_bond, self.coc_nobond)
def test_a(self):
"""Polarizability equality"""
a_tot_bond = np.sum(self.a_bond)
a_tot_nobond = np.sum(self.a_nobond)
np.testing.assert_allclose(a_tot_bond, a_tot_nobond)
def test_b(self):
"""Hyperpolarizability equality"""
b_tot_bond = np.sum(self.b_bond)
b_tot_nobond = np.sum(self.b_nobond)
np.testing.assert_allclose(b_tot_bond, b_tot_nobond)
def test_dip(self):
"""Dipole equality"""
dip_bond = np.einsum(<|fim▁hole|> "ij,i", (self.r_nobond - self.coc_nobond), self.q_nobond
) + self.d_nobond.sum(axis=0)
np.testing.assert_allclose(dip_bond, dip_nobond)<|fim▁end|> | "ij,i", (self.r_bond - self.coc_bond), self.q_bond
) + self.d_bond.sum(axis=0)
dip_nobond = np.einsum( |
<|file_name|>newlambdas-ret-infer2.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at<|fim▁hole|>// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Test that the lambda kind is inferred correctly as a return
// expression
fn shared() -> @fn() { || () }
fn unique() -> ~fn() { || () }
pub fn main() {
}<|fim▁end|> | |
<|file_name|>ssh_key.rs<|end_file_name|><|fim▁begin|>// id number This is a unique identification number for the
// key. This can be used to reference a specific SSH key when you wish to embed
// a key into a Droplet.
// fingerprint string This attribute contains the fingerprint value
// that is generated from the public key. This is a unique identifier that will
// differentiate it from other keys using a format that SSH recognizes.
// public_key string This attribute contains the entire public key
// string that was uploaded. This is what is embedded into the root user's
// authorized_keys file if you choose to include this SSH key during Droplet
// creation.
// name string This is the human-readable display name for the
// given SSH key. This is used to easily identify the SSH keys when they are
// displayed.
use std::fmt;
use std::borrow::Cow;
use response::NamedResponse;
use response;
#[derive(Deserialize, Debug)]
pub struct SshKey {
pub id: f64,
pub fingerprint: String,
pub public_key: String,
pub name: String,
}
impl response::NotArray for SshKey {}
impl NamedResponse for SshKey {
fn name<'a>() -> Cow<'a, str> { "ssh_key".into() }
}
impl fmt::Display for SshKey {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {<|fim▁hole|> Fingerprint: {}\n\
Public Key: {}\n\
Name: {}",
self.id,
self.fingerprint,
self.public_key,
self.name)
}
}
pub type SshKeys = Vec<SshKey>;<|fim▁end|> | write!(f,
"ID: {:.0}\n\ |
<|file_name|>fuzzer-parse_json.cpp<|end_file_name|><|fim▁begin|>/*
__ _____ _____ _____
__| | __| | | | JSON for Modern C++ (fuzz test support)
| | |__ | | | | | | version 2.1.0
|_____|_____|_____|_|___| https://github.com/nlohmann/json
This file implements a parser test suitable for fuzz testing. Given a byte
array data, it performs the following steps:
- j1 = parse(data)
- s1 = serialize(j1)
- j2 = parse(s1)
- s2 = serialize(j2)
- assert(s1 == s2)
The provided function `LLVMFuzzerTestOneInput` can be used in different fuzzer
drivers.
Licensed under the MIT License <http://opensource.org/licenses/MIT>.
*/
#include <iostream>
#include <sstream>
#include <json.hpp>
using json = nlohmann::json;
// see http://llvm.org/docs/LibFuzzer.html
extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size)
{
try
{
// step 1: parse input
json j1 = json::parse(data, data + size);
try
{
// step 2: round trip
// first serialization
std::string s1 = j1.dump();
// parse serialization
json j2 = json::parse(s1);
// second serialization<|fim▁hole|> // serializations must match
assert(s1 == s2);
}
catch (const std::invalid_argument&)
{
// parsing a JSON serialization must not fail
assert(false);
}
}
catch (const std::invalid_argument&)
{
// parse errors are ok, because input may be random bytes
}
// return 0 - non-zero return values are reserved for future use
return 0;
}<|fim▁end|> | std::string s2 = j2.dump();
|
<|file_name|>eigen.rs<|end_file_name|><|fim▁begin|>use std::cmp;
use num;
use num::traits::{Float, Signed};
use ApproxEq;
use Matrix;
use internalutil::{alloc_dirty_vec, hypot};
/// Eigenvalues and eigenvectors of a real matrix.
///
/// Ported from JAMA.
///
/// If A is symmetric, then A = V*D*V' where the eigenvalue matrix D is
/// diagonal and the eigenvector matrix V is orthogonal.
/// I.e. A = V * D * V' and V * V' = I.
///
/// If A is not symmetric, then the eigenvalue matrix D is block diagonal
/// with the real eigenvalues in 1-by-1 blocks and any complex eigenvalues,
/// lambda + i*mu, in 2-by-2 blocks, [lambda, mu; -mu, lambda]. The
/// columns of V represent the eigenvectors in the sense that A*V = V*D,
/// The matrix V may be badly conditioned, or even singular, so the validity
/// of the equation A = V * D * V^-1 depends upon V.cond().
pub struct EigenDecomposition<T> {
n : usize,
d : Vec<T>,
e : Vec<T>,
v : Matrix<T>
}
//impl<T : FloatMath + ApproxEq<T>> EigenDecomposition<T> {
impl<T : Float + ApproxEq<T> + Signed> EigenDecomposition<T> {
// Symmetric Householder reduction to tridiagonal form.
fn tred2(n : usize, ddata : &mut Vec<T>, vdata : &mut Vec<T>, edata : &mut Vec<T>) {
// This is derived from the Algol procedures tred2 by Bowdler, Martin, Reinsch, and Wilkinson, Handbook for
// Auto. Comp., Vol.ii-Linear Algebra, and the corresponding Fortran subroutine in EISPACK.
for j in 0..n {
ddata[j] = vdata[(n - 1) * n + j];
}
// Householder reduction to tridiagonal form.
for i in (1..n).rev() {
// Scale to avoid under/overflow.
let mut scale : T = num::zero();
let mut h : T = num::zero();
for k in 0..i {
scale = scale + num::abs(ddata[k]);
}
if scale == num::zero() {
edata[i] = ddata[i - 1];
for j in 0..i {
ddata[j] = vdata[(i - 1) * n + j];
vdata[i * n + j] = num::zero();
vdata[j * n + i] = num::zero();
}
} else {
// Generate Householder vector.
for k in 0..i {
ddata[k] = ddata[k] / scale;
h = h + ddata[k] * ddata[k];
}
let mut f = ddata[i - 1];
let mut g = h.sqrt();
if f > num::zero() {
g = - g;
}
edata[i] = scale * g;
h = h - f * g;
ddata[i - 1] = f - g;
for j in 0..i {
edata[j] = num::zero();
}
// Apply similarity transformation to remaining columns.
for j in 0..i {
f = ddata[j];
vdata[j * n + i] = f;
g = edata[j] + vdata[j * n + j] * f;
for k in (j + 1)..i {
g = g + vdata[k * n + j] * ddata[k];
edata[k] = edata[k] + vdata[k * n + j] * f;
}
edata[j] = g;
}
f = num::zero();
for j in 0..i {
edata[j] = edata[j] / h;
f = f + edata[j] * ddata[j];
}
let hh = f / (h + h);
for j in 0..i {
edata[j] = edata[j] - hh * ddata[j];
}
for j in 0..i {
f = ddata[j];
g = edata[j];
for k in j..i {
let orig_val = vdata[k * n + j];
vdata[k * n + j] = orig_val - (f * edata[k] + g * ddata[k]);
}
ddata[j] = vdata[(i - 1) * n + j];
vdata[i * n + j] = num::zero();
}
}
ddata[i] = h;
}
// Accumulate transformations.
for i in 0..(n - 1) {
let orig_val = vdata[i * n + i];
vdata[(n - 1) * n + i] = orig_val;
vdata[i * n + i] = num::one();
let h = ddata[i + 1];
if h != num::zero() {
for k in 0..(i + 1) {
ddata[k] = vdata[k * n + (i + 1)] / h;
}
for j in 0..(i + 1) {
let mut g : T = num::zero();
for k in 0..(i + 1) {
g = g + vdata[k * n + (i + 1)] * vdata[k * n + j];
}
for k in 0..(i + 1) {
let orig_val = vdata[k * n + j];
vdata[k * n + j] = orig_val - g * ddata[k];
}
}
}
for k in 0..(i + 1) {
vdata[k * n + (i + 1)] = num::zero();
}
}
for j in 0..n {
ddata[j] = vdata[(n - 1) * n + j];
vdata[(n - 1) * n + j] = num::zero();
}
vdata[(n - 1) * n + (n - 1)] = num::one();
edata[0] = num::zero();
}
// Symmetric tridiagonal QL algorithm.
fn tql2(n : usize, edata : &mut Vec<T>, ddata : &mut Vec<T>, vdata : &mut Vec<T>) {
// This is derived from the Algol procedures tql2, by Bowdler, Martin, Reinsch, and Wilkinson, Handbook for
// Auto. Comp., Vol.ii-Linear Algebra, and the corresponding Fortran subroutine in EISPACK.
for i in 1..n {
edata[i - 1] = edata[i];
}
edata[n - 1] = num::zero();
let mut f : T = num::zero();
let mut tst1 : T = num::zero();
let eps : T = num::cast(2.0f64.powf(-52.0)).unwrap();
for l in 0..n {
// Find small subdiagonal element
tst1 = tst1.max(num::abs(ddata[l]) + num::abs(edata[l]));
let mut m = l;
while m < n {
if num::abs(edata[m]) <= (eps * tst1) {
break;
}
m += 1;
}
// If m == l, d[l] is an eigenvalue, otherwise, iterate.
if m > l {
loop {
// Compute implicit shift
let mut g = ddata[l];
let tmp : T = num::cast(2.0).unwrap();
let mut p = (ddata[l + 1] - g) / (tmp * edata[l]);
let mut r = hypot::<T>(p, num::one());
if p < num::zero() {
r = -r;
}
ddata[l] = edata[l] / (p + r);
ddata[l + 1] = edata[l] * (p + r);
let dl1 = ddata[l + 1];
let mut h = g - ddata[l];
for i in (l + 2)..n {
ddata[i] = ddata[i] - h;
}
f = f + h;
// Implicit QL transformation.
p = ddata[m];
let mut c : T = num::one();
let mut c2 = c;
let mut c3 = c;
let el1 = edata[l + 1];
let mut s : T = num::zero();
let mut s2 = num::zero();
for i in (l..m).rev() {
c3 = c2;
c2 = c;
s2 = s;
g = c * edata[i];
h = c * p;
r = hypot::<T>(p, edata[i]);
edata[i + 1] = s * r;
s = edata[i] / r;
c = p / r;
p = c * ddata[i] - s * g;
ddata[i + 1] = h + s * (c * g + s * ddata[i]);
// Accumulate transformation.
for k in 0..n {
h = vdata[k * n + (i + 1)];
vdata[k * n + (i + 1)] = s * vdata[k * n + i] + c * h;
vdata[k * n + i] = c * vdata[k * n + i] - s * h;
}
}
p = - s * s2 * c3 * el1 * edata[l] / dl1;
edata[l] = s * p;
ddata[l] = c * p;
// Check for convergence.
if num::abs(edata[l]) <= (eps * tst1) {
break;
}
}
}
ddata[l] = ddata[l] + f;
edata[l] = num::zero();
}
// Bubble sort eigenvalues and corresponding vectors.
for i in 0..(n - 1) {
let mut k = i;
let mut p = ddata[i];
for j in (i + 1)..n {
if ddata[j] > p {
k = j;
p = ddata[j];
}
}
if k != i {
// Swap columns k and i of the diagonal and v.
ddata[k] = ddata[i];
ddata[i] = p;
for j in 0..n {
p = vdata[j * n + i];
vdata[j * n + i] = vdata[j * n + k];
vdata[j * n + k] = p;
}
}
}
}
// Nonsymmetric reduction to Hessenberg form.
fn orthes(n : usize, hdata : &mut Vec<T>, vdata : &mut Vec<T>) {
// This is derived from the Algol procedures orthes and ortran, by Martin and Wilkinson, Handbook for Auto. Comp.,
// Vol.ii-Linear Algebra, and the corresponding Fortran subroutines in EISPACK.
let mut ort = alloc_dirty_vec(n);
let low = 0;
let high = n - 1;
for m in (low + 1)..high {
// Scale column.
let mut scale : T = num::zero();
for i in m..(high + 1) {
scale = scale + num::abs(hdata[i * n + (m - 1)]);
}
if scale != num::zero() {
// Compute Householder transformation.
let mut h : T = num::zero();
for i in (m..(high + 1)).rev() {
ort[i] = hdata[i * n + (m - 1)] / scale;
h = h + ort[i] * ort[i];
}
let mut g = h.sqrt();
if ort[m] > num::zero() {
g = -g;
}
h = h - ort[m] * g;
ort[m] = ort[m] - g;
// Apply Householder similarity transformation
// H = (I-u*u'/h)*H*(I-u*u')/h)
for j in m..n {
let mut f : T = num::zero();
for i in (m..(high + 1)).rev() {
f = f + ort[i] * hdata[i * n + j];
}
f = f / h;
for i in m..(high + 1) {
hdata[i * n + j] = hdata[i * n + j] - f * ort[i];
}
}
for i in 0..(high + 1) {
let mut f : T = num::zero();
for j in (m..(high + 1)).rev() {
f = f + ort[j] * hdata[i * n + j];
}
f = f / h;
for j in m..(high + 1) {
hdata[i * n + j] = hdata[i * n + j] - f * ort[j];
}
}
ort[m] = scale * ort[m];
hdata[m * n + (m - 1)] = scale * g;
}
}
// Accumulate transformations (Algol's ortran).
for i in 0..n {
for j in 0..n {
vdata[i * n + j] = if i == j { num::one() } else { num::zero() };
}
}
for m in ((low + 1)..high).rev() {
if hdata[m * n + (m - 1)] != num::zero() {
for i in (m + 1)..(high + 1) {
ort[i] = hdata[i * n + (m - 1)];
}
for j in m..(high + 1) {
let mut g : T = num::zero();
for i in m..(high + 1) {
g = g + ort[i] * vdata[i * n + j];
}
// Double division avoids possible underflow
g = (g / ort[m]) / hdata[m * n + (m - 1)];
for i in m..(high + 1) {
vdata[i * n + j] = vdata[i * n + j] + g * ort[i];
}
}
}
}
}
// Complex scalar division.
fn cdiv(xr : T, xi : T, yr : T, yi : T) -> (T, T) {
if num::abs(yr) > num::abs(yi) {
let r = yi / yr;
let d = yr + r * yi;
((xr + r * xi) / d, (xi - r * xr) / d)
} else {
let r = yr / yi;
let d = yi + r * yr;
((r * xr + xi) / d, (r * xi - xr) / d)
}
}
// Nonsymmetric reduction from Hessenberg to real Schur form.
fn hqr2(n : usize, ddata : &mut Vec<T>, edata : &mut Vec<T>, hdata : &mut Vec<T>, vdata : &mut Vec<T>) {
// This is derived from the Algol procedure hqr2, by Martin and Wilkinson, Handbook for Auto. Comp.,
// Vol.ii-Linear Algebra, and the corresponding Fortran subroutine in EISPACK.
// Initialize
let nn = n as isize;
let mut n = nn - 1;
let low : isize = 0;
let high = nn - 1;
let eps : T = num::cast(2.0f64.powf(-52.0)).unwrap();
let mut exshift = num::zero();
let mut p = num::zero();
let mut q = num::zero();
let mut r = num::zero();
let mut s = num::zero();
let mut z = num::zero();
let mut t;
let mut w;
let mut x;
let mut y;
// Store roots isolated by balanc and compute matrix norm
let mut norm : T = num::zero();
for i in 0..nn {
if (i < low) || (i > high) {
ddata[i as usize] = hdata[(i * nn + i) as usize];
edata[i as usize] = num::zero();
}
for j in cmp::max(i - 1, 0)..nn {
norm = norm + num::abs(hdata[(i * nn + j) as usize]);
}
}
// Outer loop over eigenvalue index
let mut iter = 0;
while n >= low {
// Look for single small sub-diagonal element
let mut l = n;
while l > low {
s = num::abs(hdata[((l - 1) * nn + (l - 1)) as usize]) + num::abs(hdata[(l * nn + l) as usize]);
if s == num::zero() {
s = norm;
}
if num::abs(hdata[(l * nn + (l - 1)) as usize]) < (eps * s) {
break;
}
l -= 1;
}
// Check for convergence.
if l == n {
//One root found.
hdata[(n * nn + n) as usize] = hdata[(n * nn + n) as usize] + exshift;
ddata[n as usize] = hdata[(n * nn + n) as usize];
edata[n as usize] = num::zero();
n -= 1;
iter = 0;
} else if l == (n - 1) {
// Two roots found
w = hdata[(n * nn + (n - 1)) as usize] * hdata[((n - 1) * nn + n) as usize];
p = (hdata[((n - 1) * nn + (n - 1)) as usize] - hdata[(n * nn + n) as usize]) / num::cast(2.0).unwrap();
q = p * p + w;
z = num::abs(q).sqrt();
hdata[(n * nn + n) as usize] = hdata[(n * nn + n) as usize] + exshift;
hdata[((n - 1) * nn + (n - 1)) as usize] = hdata[((n - 1) * nn + (n - 1)) as usize] + exshift;
x = hdata[(n * nn + n) as usize];
// Real pair
if q >= num::zero() {
z = if p >= num::zero() { p + z } else { p - z };
ddata[(n - 1) as usize] = x + z;
ddata[n as usize] = ddata[(n - 1) as usize];
if z != num::zero() {
ddata[n as usize] = x - w / z;
}
edata[(n - 1) as usize] = num::zero();
edata[n as usize] = num::zero();
x = hdata[(n * nn + (n - 1)) as usize];
s = num::abs(x) + num::abs(z);
p = x / s;
q = z / s;
r = (p * p + q * q).sqrt();
p = p / r;
q = q / r;
// Row modification
for j in (n - 1)..nn {
z = hdata[((n - 1) * nn + j) as usize];
hdata[((n - 1) * nn + j) as usize] = q * z + p * hdata[(n * nn + j) as usize];
hdata[(n * nn + j) as usize] = q * hdata[(n * nn + j) as usize] - p * z;
}
// Column modification
for i in 0..(n + 1) {
z = hdata[(i * nn + (n - 1)) as usize];
hdata[(i * nn + (n - 1)) as usize] = q * z + p * hdata[(i * nn + n) as usize];
hdata[(i * nn + n) as usize] = q * hdata[(i * nn + n) as usize] - p * z;
}
<|fim▁hole|> z = vdata[(i * nn + (n - 1)) as usize];
vdata[(i * nn + (n - 1)) as usize] = q * z + p * vdata[(i * nn + n) as usize];
vdata[(i * nn + n) as usize] = q * vdata[(i * nn + n) as usize] - p * z;
}
} else {
// Complex pair
ddata[(n - 1) as usize] = x + p;
ddata[n as usize] = x + p;
edata[(n - 1) as usize] = z;
edata[n as usize] = - z;
}
n = n - 2;
iter = 0;
} else {
// No convergence yet
// Form shift
x = hdata[(n * nn + n) as usize];
y = num::zero();
w = num::zero();
if l < n {
y = hdata[((n - 1) * nn + (n - 1)) as usize];
w = hdata[(n * nn + (n - 1)) as usize] * hdata[((n - 1) * nn + n) as usize];
}
// Wilkinson's original ad hoc shift
if iter == 10 {
exshift = exshift + x;
for i in low..(n + 1) {
hdata[(i * nn + i) as usize] = hdata[(i * nn + i) as usize] - x;
}
s = num::abs(hdata[(n * nn + (n - 1)) as usize]) + num::abs(hdata[((n - 1) * nn + (n - 2)) as usize]);
let tmp : T = num::cast(0.75).unwrap();
y = tmp * s;
x = y;
let tmp : T = num::cast(-0.4375).unwrap();
w = tmp * s * s;
}
// MATLAB's new ad hoc shift
if iter == 30 {
s = (y - x) / num::cast(2.0).unwrap();
s = s * s + w;
if s > num::zero() {
s = s.sqrt();
if y < x {
s = - s;
}
s = x - w / ((y - x) / num::cast(2.0).unwrap() + s);
for i in low..(n + 1) {
hdata[(i * nn + i) as usize] = hdata[(i * nn + i) as usize] - s;
}
exshift = exshift + s;
w = num::cast(0.964).unwrap();
y = w;
x = y;
}
}
iter += 1;
// Look for two consecutive small sub-diagonal elements
let mut m = n - 2;
while m >= l {
z = hdata[(m * nn + m) as usize];
r = x - z;
s = y - z;
p = (r * s - w) / hdata[((m + 1) * nn + m) as usize] + hdata[(m * nn + (m + 1)) as usize];
q = hdata[((m + 1) * nn + (m + 1)) as usize] - z - r - s;
r = hdata[((m + 2) * nn + (m + 1)) as usize];
s = num::abs(p) + num::abs(q) + num::abs(r);
p = p / s;
q = q / s;
r = r / s;
if m == l {
break;
}
if (num::abs(hdata[(m * nn + (m - 1)) as usize]) * (num::abs(q) + num::abs(r))) <
eps * (num::abs(p) * (num::abs(hdata[((m - 1) * nn + (m - 1)) as usize]) + num::abs(z) + num::abs(hdata[((m + 1) * nn + (m + 1)) as usize]))) {
break;
}
m -= 1;
}
for i in (m + 2)..(n + 1) {
hdata[(i * nn + (i - 2)) as usize] = num::zero();
if i > (m + 2) {
hdata[(i * nn + (i - 3)) as usize] = num::zero();
}
}
// Double QR step involving rows l:n and columns m:n
for k in m..n {
let notlast = k != (n - 1);
if k != m {
p = hdata[(k * nn + (k - 1)) as usize];
q = hdata[((k + 1) * nn + (k - 1)) as usize];
r = if notlast { hdata[((k + 2) * nn + (k - 1)) as usize] } else { num::zero() };
x = num::abs(p) + num::abs(q) + num::abs(r);
if x == num::zero() {
continue;
}
p = p / x;
q = q / x;
r = r / x;
}
s = (p * p + q * q + r * r).sqrt();
if p < num::zero() {
s = - s;
}
if s != num::zero() {
if k != m {
hdata[(k * nn + (k - 1)) as usize] = - s * x;
} else if l != m {
hdata[(k * nn + (k - 1)) as usize] = - hdata[(k * nn + (k - 1)) as usize];
}
p = p + s;
x = p / s;
y = q / s;
z = r / s;
q = q / p;
r = r / p;
// Row modification
for j in k..nn {
p = hdata[(k * nn + j) as usize] + q * hdata[((k + 1) * nn + j) as usize];
if notlast {
p = p + r * hdata[((k + 2) * nn + j) as usize];
hdata[((k + 2) * nn + j) as usize] = hdata[((k + 2) * nn + j) as usize] - p * z;
}
hdata[(k * nn + j) as usize] = hdata[(k * nn + j) as usize] - p * x;
hdata[((k + 1) * nn + j) as usize] = hdata[((k + 1) * nn + j) as usize] - p * y;
}
// Column modification
for i in 0..(cmp::min(n, k + 3) + 1) {
p = x * hdata[(i * nn + k) as usize] + y * hdata[(i * nn + (k + 1)) as usize];
if notlast {
p = p + z * hdata[(i * nn + (k + 2)) as usize];
hdata[(i * nn + (k + 2)) as usize] = hdata[(i * nn + (k + 2)) as usize] - p * r;
}
hdata[(i * nn + k) as usize] = hdata[(i * nn + k) as usize] - p;
hdata[(i * nn + (k + 1)) as usize] = hdata[(i * nn + (k + 1)) as usize] - p * q;
}
// Accumulate transformations
for i in low..(high + 1) {
p = x * vdata[(i * nn + k) as usize] + y * vdata[(i * nn + (k + 1)) as usize];
if notlast {
p = p + z * vdata[(i * nn + (k + 2)) as usize];
vdata[(i * nn + (k + 2)) as usize] = vdata[(i * nn + (k + 2)) as usize] - p * r;
}
vdata[(i * nn + k) as usize] = vdata[(i * nn + k) as usize] - p;
vdata[(i * nn + (k + 1)) as usize] = vdata[(i * nn + (k + 1)) as usize] - p * q;
}
}
}
}
}
// Backsubstitute to find vectors of upper triangular form
if norm == num::zero() {
return;
}
for n in (0..nn).rev() {
p = ddata[n as usize];
q = edata[n as usize];
// Real vector
if q == num::zero() {
let mut l = n;
hdata[(n * nn + n) as usize] = num::one();
for i in (0..n).rev() {
w = hdata[(i * nn + i) as usize] - p;
r = num::zero();
for j in l..(n + 1) {
r = r + hdata[(i * nn + j) as usize] * hdata[(j * nn + n) as usize];
}
if edata[i as usize] < num::zero() {
z = w;
s = r;
} else {
l = i;
if edata[i as usize] == num::zero() {
if w != num::zero() {
hdata[(i * nn + n) as usize] = - r / w;
} else {
hdata[(i * nn + n) as usize] = - r / (eps * norm);
}
} else {
// Solve real equations
x = hdata[(i * nn + (i + 1)) as usize];
y = hdata[((i + 1) * nn + i) as usize];
q = (ddata[i as usize] - p) * (ddata[i as usize] - p) + edata[i as usize] * edata[i as usize];
t = (x * s - z * r) / q;
hdata[(i * nn + n) as usize] = t;
if num::abs(x) > num::abs(z) {
hdata[((i + 1) * nn + n) as usize] = (-r - w * t) / x;
} else {
hdata[((i + 1) * nn + n) as usize] = (-s - y * t) / z;
}
}
// Overflow control
t = num::abs(hdata[(i * nn + n) as usize]);
if (eps * t) * t > num::one() {
for j in i..(n + 1) {
hdata[(j * nn + n) as usize] = hdata[(j * nn + n) as usize] / t;
}
}
}
}
} else if q < num::zero() {
// Complex vector
let mut l = n - 1;
// Last vector component imaginary so matrix is triangular
if num::abs(hdata[(n * nn + (n - 1)) as usize]) > num::abs(hdata[((n - 1) * nn + n) as usize]) {
hdata[((n - 1) * nn + (n - 1)) as usize] = q / hdata[(n * nn + (n - 1)) as usize];
hdata[((n - 1) * nn + n) as usize] = - (hdata[(n * nn + n) as usize] - p) / hdata[(n * nn + (n - 1)) as usize];
} else {
let (cdivr, cdivi) = EigenDecomposition::<T>::cdiv(num::zero(), - hdata[((n - 1) * nn + n) as usize], hdata[((n - 1) * nn + (n - 1)) as usize] - p, q);
hdata[((n - 1) * nn + (n - 1)) as usize] = cdivr;
hdata[((n - 1) * nn + n) as usize] = cdivi;
}
hdata[(n * nn + (n - 1)) as usize] = num::zero();
hdata[(n * nn + n) as usize] = num::one();
for i in (0..(n - 1)).rev() {
let mut ra : T = num::zero();
let mut sa : T = num::zero();
let mut vr;
let vi;
for j in l..(n + 1) {
ra = ra + hdata[(i * nn + j) as usize] * hdata[(j * nn + (n - 1)) as usize];
sa = sa + hdata[(i * nn + j) as usize] * hdata[(j * nn + n) as usize];
}
w = hdata[(i * nn + i) as usize] - p;
if edata[i as usize] < num::zero() {
z = w;
r = ra;
s = sa;
} else {
l = i;
if edata[i as usize] == num::zero() {
let (cdivr, cdivi) = EigenDecomposition::cdiv(- ra, - sa, w, q);
hdata[(i * nn + (n - 1)) as usize] = cdivr;
hdata[(i * nn + n) as usize] = cdivi;
} else {
// Solve complex equations
x = hdata[(i * nn + (i + 1)) as usize];
y = hdata[((i + 1) * nn + i) as usize];
vr = (ddata[i as usize] - p) * (ddata[i as usize] - p) + edata[i as usize] * edata[i as usize] - q * q;
vi = (ddata[i as usize] - p) * num::cast(2.0).unwrap() * q;
if (vr == num::zero()) && (vi == num::zero()) {
vr = eps * norm * (num::abs(w) + num::abs(q) + num::abs(x) + num::abs(y) + num::abs(z));
}
let (cdivr, cdivi) = EigenDecomposition::cdiv(x * r - z * ra + q * sa, x * s - z * sa - q * ra, vr, vi);
hdata[(i * nn + (n - 1)) as usize] = cdivr;
hdata[(i * nn + n) as usize] = cdivi;
if num::abs(x) > (num::abs(z) + num::abs(q)) {
hdata[((i + 1) * nn + (n - 1)) as usize] = (- ra - w * hdata[(i * nn + (n - 1)) as usize] + q * hdata[(i * nn + n) as usize]) / x;
hdata[((i + 1) * nn + n) as usize] = (- sa - w * hdata[(i * nn + n) as usize] - q * hdata[(i * nn + (n - 1)) as usize]) / x;
} else {
let (cdivr, cdivi) = EigenDecomposition::cdiv(- r - y * hdata[(i * nn + (n - 1)) as usize], - s - y * hdata[(i * nn + n) as usize], z, q);
hdata[((i + 1) * nn + (n - 1)) as usize] = cdivr;
hdata[((i + 1) * nn + n) as usize] = cdivi;
}
}
// Overflow control
t = num::abs(hdata[(i * nn + (n - 1)) as usize]).max(num::abs(hdata[(i * nn + n) as usize]));
if (eps * t) * t > num::one() {
for j in i..(n + 1) {
hdata[(j * nn + (n - 1)) as usize] = hdata[(j * nn + (n - 1)) as usize] / t;
hdata[(j * nn + n) as usize] = hdata[(j * nn + n) as usize] / t;
}
}
}
}
}
}
// Vectors of isolated roots
for i in 0..nn {
if (i < low) || (i > high) {
for j in i..nn {
vdata[(i * nn + j) as usize] = hdata[(i * nn + j) as usize];
}
}
}
// Back transformation to get eigenvectors of original matrix
for j in (low..nn).rev() {
for i in low..(high + 1) {
z = num::zero();
for k in low..(cmp::min(j, high) + 1) {
z = z + vdata[(i * nn + k) as usize] * hdata[(k * nn + j) as usize];
}
vdata[(i * nn + j) as usize] = z;
}
}
}
pub fn new(a : &Matrix<T>) -> EigenDecomposition<T> {
let n = a.cols();
let mut vdata = alloc_dirty_vec(n * n);
let mut ddata = alloc_dirty_vec(n);
let mut edata = alloc_dirty_vec(n);
let mut issymmetric = true;
let mut j = 0;
while (j < n) && issymmetric {
let mut i = 0;
while (i < n) && issymmetric {
issymmetric = a.get(i, j) == a.get(j, i);
i += 1;
}
j += 1;
}
if issymmetric {
for i in 0..n {
for j in 0..n {
vdata[i * n + j] = a.get(i, j);
}
}
// Tridiagonalize.
EigenDecomposition::tred2(n, &mut ddata, &mut vdata, &mut edata);
// Diagonalize.
EigenDecomposition::tql2(n, &mut edata, &mut ddata, &mut vdata);
EigenDecomposition {
n : n,
d : ddata,
e : edata,
v : Matrix::new(n, n, vdata)
}
} else {
let mut hdata = alloc_dirty_vec(n * n);
for j in 0..n {
for i in 0..n {
hdata[i * n + j] = a.get(i, j);
}
}
// Reduce to Hessenberg form.
EigenDecomposition::orthes(n, &mut hdata, &mut vdata);
// Reduce Hessenberg to real Schur form.
EigenDecomposition::hqr2(n, &mut ddata, &mut edata, &mut hdata, &mut vdata);
EigenDecomposition {
n : n,
d : ddata,
e : edata,
v : Matrix::new(n, n, vdata)
}
}
}
pub fn get_v<'lt>(&'lt self) -> &'lt Matrix<T> { &self.v }
pub fn get_real_eigenvalues<'lt>(&'lt self) -> &'lt Vec<T> { &self.d }
pub fn get_imag_eigenvalues<'lt>(&'lt self) -> &'lt Vec<T> { &self.e }
pub fn get_d(&self) -> Matrix<T> {
let mut ddata = alloc_dirty_vec(self.n * self.n);
for i in 0..self.n {
for j in 0..self.n {
ddata[i * self.n + j] = num::zero();
}
ddata[i * self.n + i] = self.d[i];
if self.e[i] > num::zero() {
ddata[i * self.n + (i + 1)] = self.e[i];
} else if self.e[i] < num::zero() {
ddata[i * self.n + (i - 1)] = self.e[i];
}
}
Matrix::new(self.n, self.n, ddata)
}
}
#[test]
fn eigen_test_symmetric() {
let a = m!(3.0, 1.0, 6.0; 2.0, 1.0, 0.0; -1.0, 0.0, -3.0);
let ata = a.t() * a;
let _eig = EigenDecomposition::new(&ata);
let r = _eig.get_real_eigenvalues();
assert!(Matrix::vector(r.clone()).approx_eq(&m!(56.661209; 4.301868; 0.036923)));
}
#[test]
fn eigen_test_asymmetric() {
let a = m!(3.0, 1.0, 6.0; 2.0, 1.0, 0.0; -1.0, 0.0, -3.0);
let _eig = EigenDecomposition::new(&a);
let r = _eig.get_real_eigenvalues();
assert!(Matrix::vector(r.clone()).approx_eq(&m!(3.0; -1.0; -1.0)));
}<|fim▁end|> | // Accumulate transformations
for i in low..(high + 1) { |
<|file_name|>ModalOverlay.js<|end_file_name|><|fim▁begin|>import React, { PropTypes } from 'react';
import Spinner from './Spinner';
const ModalOverlay = (props) => {
const isActive = props.active ? 'active' : '';
const spinner = props.spinner ? <Spinner /> : '';
return (
<div id="modal-overlay" className={isActive}>
{spinner}
</div>
);
};
ModalOverlay.propTypes = {
active: PropTypes.bool,
spinner: PropTypes.bool,
};<|fim▁hole|>export default ModalOverlay;<|fim▁end|> | |
<|file_name|>0007_auto_20161223_1013.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Generated by Django 1.10.3 on 2016-12-23 10:13
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('blog', '0006_auto_20160321_1527'),
]
operations = [
migrations.CreateModel(
name='Blog',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),<|fim▁hole|> ),
migrations.AddField(
model_name='post',
name='blog',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='blog.Blog'),
),
]<|fim▁end|> | ], |
<|file_name|>admin.py<|end_file_name|><|fim▁begin|><|fim▁hole|># Category, Ratings, Quote<|fim▁end|> | # from django.contrib import admin
# from libraryapp.models import Author, Book, Interest, History, GoogleUser, \ |
<|file_name|>EventManager.py<|end_file_name|><|fim▁begin|>"""
Module defining the Event class which is used to manage collissions and check their validity
"""
from itertools import combinations
from copy import copy
from particle import Particle
class EventParticle(object):
def __init__(self, particle1, particle2):
self.particle1 = particle1
self.particle2 = particle2
self.id = (self.particle1.getCollisionCountAsCopy(), self.particle2.getCollisionCountAsCopy())
self.timeUntilCollision = self.particle1.collideParticle(self.particle2)
def isValid(self):
return self.id == (self.particle1.getCollisionCountAsCopy(), self.particle2.getCollisionCountAsCopy())
def reevaluateCollisionTime(self):
self.id = (self.particle1.getCollisionCountAsCopy(), self.particle2.getCollisionCountAsCopy())
self.timeUntilCollision = self.particle1.collideParticle(self.particle2)
def doCollision(self):
self.particle1.bounceParticle(self.particle2)
<|fim▁hole|> def __init__(self, particle):
self.particle = particle
self.id = self.particle.getCollisionCountAsCopy()
self.timeUntilCollision = self.particle.collidesWallX()
def isValid(self):
return self.id == self.particle.getCollisionCountAsCopy()
def reevaluateCollisionTime(self):
self.id = self.particle.getCollisionCountAsCopy()
self.timeUntilCollision = self.particle.collidesWallX()
def doCollision(self):
self.particle.bounceX()
class EventWallY(object):
def __init__(self, particle):
self.particle = particle
self.id = self.particle.getCollisionCountAsCopy()
self.timeUntilCollision = self.particle.collidesWallY()
def isValid(self):
return self.id == self.particle.getCollisionCountAsCopy()
def reevaluateCollisionTime(self):
self.id = self.particle.getCollisionCountAsCopy()
self.timeUntilCollision = self.particle.collidesWallY()
def doCollision(self):
self.particle.bounceY()
class EventManager(object):
def __init__(self, ListOfParticles):
self.ListOfParticles = ListOfParticles
self.ListOfEvents = []
for (particle1, particle2) in combinations(self.ListOfParticles, 2):
self.ListOfEvents.append(EventParticle(particle1, particle2))
for particle in self.ListOfParticles:
self.ListOfEvents.append(EventWallX(particle))
self.ListOfEvents.append(EventWallY(particle))
self.sortEventList()
def sortEventList(self):
def sorting_closure(event):
if event.timeUntilCollision is None or event.timeUntilCollision < 0.0:
return 1.0e7
else:
return event.timeUntilCollision
self.ListOfEvents = sorted(self.ListOfEvents, key=sorting_closure)
def step(self):
for event in self.ListOfEvents:
if not event.isValid():
event.reevaluateCollisionTime()
self.sortEventList()
collTime = copy(self.ListOfEvents[0].timeUntilCollision)
for particle in self.ListOfParticles:
particle.advance(collTime)
self.ListOfEvents[0].doCollision()
for event in self.ListOfEvents:
if event.timeUntilCollision is not None:
event.timeUntilCollision -= collTime
if __name__ == '__main__':
import numpy as np
import pylab as plt
a = Particle(np.array([0.1, 0.5]), np.array([0.01, 0.1]), 0.05, 2.0)
b = Particle(np.array([0.4, 0.5]), np.array([-0.1, 0.01]), 0.05, 2.0)
manager = EventManager([a,b])
for i in range(20):
plt.title(a.t)
plt.scatter([a._x[0], b._x[0]], [a._x[1], b._x[1]])
print a._x
print b._x
plt.xlim([0,1])
plt.ylim([0,1])
plt.show()
manager.step()<|fim▁end|> |
class EventWallX(object):
|
<|file_name|>feed_parse_extractDellstoriesWordpressCom.py<|end_file_name|><|fim▁begin|>def extractDellstoriesWordpressCom(item):
'''
Parser for 'dellstories.wordpress.com'
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return None
tagmap = [
('PRC', 'PRC', 'translated'),
('Loiterous', 'Loiterous', 'oel'),
]
for tagname, name, tl_type in tagmap:
if tagname in item['tags']:<|fim▁hole|> return False<|fim▁end|> | return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
|
<|file_name|>index.js<|end_file_name|><|fim▁begin|>// Generated by CoffeeScript 1.3.3
(function() {
var MAXSAMPLES, Point, STAGE_LOBBY, STAGE_PLAYING, baseObject, button, calcAvgTick, canvas, delta, difficulty, enemy, explosion, initialize, lastScore, lasttime, lives, missile, missileFired, mouseDown, mouseX, mouseY, nextspawn, objectlist, origin, point, samples, score, spawnthink, stage, startButton, think, tickindex, ticklist, ticksum, time, toremove,
__hasProp = {}.hasOwnProperty,
__extends = function(child, parent) { for (var key in parent) { if (__hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; };
MAXSAMPLES = 100;
samples = 0;
tickindex = 0;
ticksum = 0;
ticklist = [];
calcAvgTick = function(newtick) {
ticklist[tickindex] = ticklist[tickindex] || 0;
ticksum -= ticklist[tickindex];
ticksum += newtick;
ticklist[tickindex] = newtick;
tickindex++;
if (tickindex === MAXSAMPLES) {
tickindex = 0;
}
if (samples < MAXSAMPLES) {<|fim▁hole|> };
canvas = $("#canvas");
mouseX = 0;
mouseY = 0;
mouseDown = false;
time = (new Date()).getTime();
lasttime = time;
delta = 0;
objectlist = [];
toremove = [];
difficulty = 0;
score = 0;
lives = 3;
lastScore = null;
STAGE_LOBBY = 0;
STAGE_PLAYING = 1;
stage = STAGE_LOBBY;
Point = (function() {
function Point(x, y) {
this.x = x;
this.y = y;
}
Point.prototype.add = function(b) {
return new Point(this.x + b.x, this.y + b.y);
};
Point.prototype.sub = function(b) {
return new Point(this.x - b.x, this.y - b.y);
};
Point.prototype.mul = function(b) {
if (typeof b === "number") {
return new Point(this.x * b, this.y * b);
} else {
return new Point(this.x * b.x, this.y * b.y);
}
};
Point.prototype.div = function(b) {
if (typeof b === "number") {
return new Point(this.x / b, this.y / b);
} else {
return new Point(this.x / b.x, this.y / b.y);
}
};
Point.prototype.distance = function(b) {
return Math.sqrt(Math.pow(b.x - this.x, 2) + Math.pow(b.y - this.y, 2));
};
Point.prototype.norm = function() {
return this.div(this.distance(origin));
};
return Point;
})();
point = function(x, y) {
return new Point(x, y);
};
origin = point(0, 0);
Math.rand = function(a, b) {
var c;
c = a;
if (b > a) {
c = b;
b = a;
}
return b + Math.round(Math.random() * (c - b));
};
Math.randD = function(a, b) {
var c;
c = a;
if (b > a) {
c = b;
b = a;
}
return b + Math.random() * (c - b);
};
baseObject = (function() {
function baseObject() {
objectlist.push(this);
this.timestamp = time;
this.initialize();
}
baseObject.prototype.initialize = function() {};
baseObject.prototype.think = function() {};
baseObject.prototype.render = function() {
return canvas.drawArc({
fillStyle: "black",
x: this.x,
y: this.y,
radius: 4
});
};
baseObject.prototype.remove = function() {
if (toremove.indexOf(this) < 0) {
return toremove.push(this);
}
};
baseObject.prototype._remove = function() {
objectlist.splice(objectlist.indexOf(this), 1);
return delete this;
};
baseObject.prototype.x = 0;
baseObject.prototype.y = 0;
baseObject.prototype.setPos = function(point, b) {
if (typeof point === "object") {
this.x = point.x;
return this.y = point.y;
} else {
this.x = point;
return this.y = b;
}
};
baseObject.prototype.getPos = function() {
return point(this.x, this.y);
};
return baseObject;
})();
enemy = (function(_super) {
__extends(enemy, _super);
function enemy() {
return enemy.__super__.constructor.apply(this, arguments);
}
enemy.prototype.speed = 0.05;
enemy.prototype.initialize = function() {
return this.speed = 0.05 + difficulty / 100;
};
enemy.prototype.isEnemy = true;
enemy.prototype.setOrigin = function(origin) {
this.origin = origin;
return this.setPos(origin);
};
enemy.prototype.setTarget = function(target) {
return this.target = target;
};
enemy.prototype.think = function() {
var pos, z;
pos = this.getPos().add((this.target.sub(this.getPos())).norm().mul(delta * this.speed));
this.setPos(pos);
if (delta * this.speed > this.getPos().distance(this.target)) {
z = new explosion;
z.setPos(this.getPos());
this.remove();
lives--;
if (lives < 1) {
lastScore = score;
return initialize();
}
}
};
enemy.prototype.render = function() {
canvas.drawLine({
strokeStyle: "silver",
strokeWidth: 1,
x1: this.x,
y1: this.y,
x2: this.origin.x,
y2: this.origin.y
});
return enemy.__super__.render.apply(this, arguments);
};
return enemy;
})(baseObject);
missileFired = false;
missile = (function(_super) {
__extends(missile, _super);
function missile() {
return missile.__super__.constructor.apply(this, arguments);
}
missile.prototype.speed = 0.15;
missile.prototype.initialize = function() {
return this.speed = 0.15 + difficulty / 75;
};
missile.prototype.setTarget = function(target) {
return this.target = target;
};
missile.prototype.think = function() {
var pos, z;
pos = this.getPos().add((this.target.sub(this.getPos())).norm().mul(delta * this.speed));
this.setPos(pos);
if (delta * this.speed > this.getPos().distance(this.target)) {
z = new explosion;
z.radius = 500;
z.setPos(this.getPos());
missileFired = false;
return this.remove();
}
};
return missile;
})(baseObject);
explosion = (function(_super) {
__extends(explosion, _super);
function explosion() {
return explosion.__super__.constructor.apply(this, arguments);
}
explosion.prototype.radius = 250;
explosion.prototype.initialize = function() {};
explosion.prototype.think = function() {
var e, z, _i, _len;
for (_i = 0, _len = objectlist.length; _i < _len; _i++) {
e = objectlist[_i];
if (e.isEnemy != null) {
if (e.getPos().distance(this.getPos()) <= (time - this.timestamp) / 10) {
z = new explosion;
z.setPos(e.getPos());
e.remove();
score++;
}
}
}
if ((time - this.timestamp) > this.radius) {
return this.remove();
}
};
explosion.prototype.render = function() {
var a;
a = Math.round(255 * (time - this.timestamp) / this.radius);
return canvas.drawArc({
strokeStyle: "rgb(255 , " + a + ", " + a + ")",
strokeWidth: 2,
x: this.x,
y: this.y,
radius: (time - this.timestamp) / 10
});
};
return explosion;
})(baseObject);
button = (function(_super) {
__extends(button, _super);
function button() {
return button.__super__.constructor.apply(this, arguments);
}
button.prototype.speed = 0.05;
button.prototype.hovered = false;
button.prototype.width = 100;
button.prototype.height = 100;
button.prototype.lastMouseDown = false;
button.prototype.clicking = false;
button.prototype.text = 'Nothing';
button.prototype.initialize = function() {};
button.prototype.setText = function(text) {
return this.text = text;
};
button.prototype.remove = function() {
return button.__super__.remove.apply(this, arguments);
};
button.prototype.think = function() {
this.hovered = !(mouseX < this.x || mouseY < this.y || mouseX > this.x + this.width || mouseY > this.y + this.height);
if (this.hovered) {
canvas.css('cursor', 'pointer');
} else {
canvas.css('cursor', 'auto');
}
if (this.hovered && mouseDown && !this.lastMouseDown) {
this.clicking = true;
}
if (!mouseDown) {
if (this.clicking && this.hovered) {
this.onclick();
}
this.clicking = false;
}
return this.lastMouseDown = mouseDown;
};
button.prototype.onclick = function() {
return canvas.css('cursor', 'auto');
};
button.prototype.render = function() {
var fill;
fill = "#555555";
if (this.hovered) {
fill = "#888888";
if (mouseDown) {
fill = "#333333";
}
}
return canvas.drawRect({
fillStyle: fill,
x: this.x,
y: this.y,
width: this.width,
height: this.height,
fromCenter: false
}).drawText({
fillStyle: "#FFF",
x: this.x + this.width / 2,
y: this.y + this.height / 2,
font: "14px sans-serif",
text: this.text,
fromCenter: true
});
};
return button;
})(baseObject);
think = function() {
var object, text, _i, _j, _k, _len, _len1, _len2;
time = (new Date()).getTime();
delta = time - lasttime;
lasttime = time;
for (_i = 0, _len = objectlist.length; _i < _len; _i++) {
object = objectlist[_i];
if (object != null) {
object.think();
}
}
canvas.clearCanvas();
for (_j = 0, _len1 = objectlist.length; _j < _len1; _j++) {
object = objectlist[_j];
object.render();
}
for (_k = 0, _len2 = toremove.length; _k < _len2; _k++) {
object = toremove[_k];
object._remove();
}
if (stage === STAGE_PLAYING) {
canvas.drawText({
fillStyle: "#000",
x: 200,
y: 10,
font: "12px Arial, sans-serif",
text: "Lives: " + lives
}).drawText({
fillStyle: "#000",
x: 100,
y: 10,
font: "12px Arial, sans-serif",
text: "Score: " + score
}).drawRect({
fillStyle: "#000",
x: 0,
y: 350,
width: 400,
height: 50,
fromCenter: false
});
difficulty += delta / 10000;
spawnthink();
} else {
canvas.drawText({
fillStyle: "#000",
x: 200,
y: 10,
font: "16px Arial, sans-serif",
text: "Defend the Earth from asteroids!"
}).drawText({
fillStyle: "#000",
x: 200,
y: 26,
font: "12px Arial, sans-serif",
text: "Use the left mouse button and shoot down the asteroids"
}).drawText({
fillStyle: "#000",
x: 200,
y: 39,
font: "12px Arial, sans-serif",
text: "before they fall!"
});
if (lastScore !== null) {
text = "Uh, you scored " + lastScore + " points...";
if (lastScore > 5) {
text = "Cool, you scored " + lastScore + " points...";
}
if (lastScore > 10) {
text = "Whoa! You scored " + lastScore + " points!";
}
if (lastScore > 25) {
text = "Awesome! You scored " + lastScore + " points!";
}
if (lastScore > 50) {
text = "HOLY SHIT! You scored " + lastScore + " points!";
}
if (lastScore > 100) {
text = "you gotta be cheating now; " + lastScore + " points?";
}
canvas.drawText({
fillStyle: "#000",
weight: "bold",
x: 200,
y: 100,
font: "18px Arial, sans-serif",
text: text
});
}
}
return toremove = [];
};
nextspawn = 0;
spawnthink = function() {
var e, _ref;
if (time > nextspawn) {
nextspawn = time + 2000 - ((_ref = difficulty > 7.5) != null ? _ref : {
1500: difficulty * 200
});
e = new enemy();
e.setTarget(point(Math.rand(0, 400), 350));
return e.setOrigin(point(Math.rand(0, 400), -10));
}
};
setInterval(think, 0);
canvas.mousemove(function(e) {
mouseX = e.offsetX;
return mouseY = e.offsetY;
}).click(function(e) {
var z;
if (stage === STAGE_PLAYING) {
if (!missileFired) {
z = new missile();
z.setTarget(point(e.offsetX, e.offsetY));
z.setPos(point(200, 350));
return missileFired = true;
}
}
}).mousedown(function(e) {
console.log(e.offsetX);
mouseX = e.offsetX;
mouseDown = true;
return think();
}).mouseup(function(e) {
mouseY = e.offsetY;
mouseDown = false;
return think();
});
startButton = null;
initialize = function() {
nextspawn = 0;
objectlist = [];
toremove = [];
stage = STAGE_LOBBY;
startButton = new button();
startButton.setText("Play");
startButton.setPos(point(150, 150));
startButton.onclick = function() {
stage = STAGE_PLAYING;
startButton.remove();
return canvas.css('cursor', 'auto');
};
difficulty = 0;
score = 0;
lives = 3;
return missileFired = false;
};
initialize();
}).call(this);<|fim▁end|> | samples++;
return 0;
}
return ticksum / MAXSAMPLES; |
<|file_name|>parity_signing.rs<|end_file_name|><|fim▁begin|>// Copyright 2015-2017 Parity Technologies (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
//! ParitySigning rpc interface.
use jsonrpc_core::Error;
use futures::BoxFuture;
use v1::types::{U256, H160, Bytes, ConfirmationResponse, TransactionRequest, Either};
<|fim▁hole|>build_rpc_trait! {
/// Signing methods implementation.
pub trait ParitySigning {
type Metadata;
/// Posts sign request asynchronously.
/// Will return a confirmation ID for later use with check_transaction.
#[rpc(meta, name = "parity_postSign")]
fn post_sign(&self, Self::Metadata, H160, Bytes) -> BoxFuture<Either<U256, ConfirmationResponse>, Error>;
/// Posts transaction asynchronously.
/// Will return a transaction ID for later use with check_transaction.
#[rpc(meta, name = "parity_postTransaction")]
fn post_transaction(&self, Self::Metadata, TransactionRequest) -> BoxFuture<Either<U256, ConfirmationResponse>, Error>;
/// Checks the progress of a previously posted request (transaction/sign).
/// Should be given a valid send_transaction ID.
#[rpc(name = "parity_checkRequest")]
fn check_request(&self, U256) -> Result<Option<ConfirmationResponse>, Error>;
/// Decrypt some ECIES-encrypted message.
/// First parameter is the address with which it is encrypted, second is the ciphertext.
#[rpc(meta, name = "parity_decryptMessage")]
fn decrypt_message(&self, Self::Metadata, H160, Bytes) -> BoxFuture<Bytes, Error>;
}
}<|fim▁end|> | |
<|file_name|>RTreeIndexExtensionCreateTest.java<|end_file_name|><|fim▁begin|>package mil.nga.geopackage.extension.rtree;
import org.junit.Test;
import java.sql.SQLException;
import mil.nga.geopackage.CreateGeoPackageTestCase;
/**
* Test RTree Extension from a created database
*
* @author osbornb
*/
public class RTreeIndexExtensionCreateTest extends CreateGeoPackageTestCase {
/**
* Constructor
*/<|fim▁hole|> }
/**
* Test RTree
*
* @throws SQLException upon error
*/
@Test
public void testRTree() throws SQLException {
RTreeIndexExtensionUtils.testRTree(geoPackage);
}
@Override
public boolean allowEmptyFeatures() {
return false;
}
}<|fim▁end|> | public RTreeIndexExtensionCreateTest() {
|
<|file_name|>test_del_group.py<|end_file_name|><|fim▁begin|>def test_delite_group(app):
app.session.login( username="admin", password="secret")<|fim▁hole|> pytest.main('test_del_group.py')<|fim▁end|> | app.group.delete_first_group()
app.session.logout()
if __name__ == '__main__': |
<|file_name|>s0095.go<|end_file_name|><|fim▁begin|>/*
The proper divisors of a number are all the divisors excluding the number
itself. For example, the proper divisors of 28 are 1, 2, 4, 7, and 14. As the
sum of these divisors is equal to 28, we call it a perfect number.
Interestingly the sum of the proper divisors of 220 is 284 and the sum of the
proper divisors of 284 is 220, forming a chain of two numbers. For this reason,
220 and 284 are called an amicable pair.
Perhaps less well known are longer chains. For example, starting with 12496, we
form a chain of five numbers:
12496 → 14288 → 15472 → 14536 → 14264 (→ 12496 → ...)
Since this chain returns to its starting point, it is called an amicable chain.
Find the smallest member of the longest amicable chain with no element
exceeding one million.
*/
package s0095
import "github.com/peterstace/project-euler/number"
func Answer() interface{} {
const limit = 1e6 + 1
sumProperDivisors := generateSumProperDivisors(limit)
_ = sumProperDivisors
var maxLength int
var smallestMember int
for i := 2; i < limit; i++ {
chainLen, smallestInChain, ok := findChain(i, sumProperDivisors)
if !ok {
continue
}
if chainLen > maxLength {
maxLength = chainLen
smallestMember = smallestInChain
}
}
return smallestMember
}
func generateSumProperDivisors(n int) []int {
s := number.PrimeFactorisationSieve(n)
d := make([]int, n)
for i := 1; i < len(s); i++ {
d[i] = s[i].SumDivisors() - i
}
return d
}
func findChain(x int, sumProperDivisors []int) (chainLength, smallestInChain int, ok bool) {
nums := make(map[int]struct{})
for {
nums[x] = struct{}{}
x = sumProperDivisors[x]
if x >= len(sumProperDivisors) {
return 0, 0, false<|fim▁hole|> }
var count int
min := x
start := x
for {
x = sumProperDivisors[x]
count++
if x < min {
min = x
}
if x == start {
break
}
}
return count, min, true
}<|fim▁end|> | }
if _, ok := nums[x]; ok {
break
} |
<|file_name|>index.js<|end_file_name|><|fim▁begin|>import React from 'react'
import { FormControl } from 'react-bootstrap'
import './@FilterListInput.css'
const FilterListInput = ({onFilter, searchValue}) => {
let handleFilter = e => {
onFilter(e.target.value)<|fim▁hole|>}
export default FilterListInput<|fim▁end|> | }
return (<FormControl className='FilterListInput' type='text' defaultValue={searchValue} placeholder='Search within this list...' onChange={handleFilter.bind(this)} />) |
<|file_name|>ball.js<|end_file_name|><|fim▁begin|>function drawBall() {
ctx.beginPath();
ctx.arc(x, y, ballRadius, 0, Math.PI*2);
ctx.fillStyle = color;
ctx.strokeStyle = "#FF0000";
ctx.stroke();
ctx.fill();
ctx.closePath();
<|fim▁hole|><|fim▁end|> | } |
<|file_name|>leak-box-as-tydesc.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
<|fim▁hole|>fn leaky<T>(t: T) { }
pub fn main() { let x = @10; leaky::<@int>(x); }<|fim▁end|> | |
<|file_name|>full_inspiration.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# coding: utf-8
"""
Copyright 2015 SYSTRAN Software, Inc. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
class FullInspiration(object):
"""
NOTE: This class is auto generated by the systran code generator program.
Do not edit the class manually.
"""
def __init__(self):
"""
Systran model
:param dict systran_types: The key is attribute name and the value is attribute type.
:param dict attribute_map: The key is attribute name and the value is json key in definition.
"""
self.systran_types = {
'id': 'str',
'location': 'FullLocation',
'type': 'str',
'title': 'str',
'introduction': 'str',
'content': 'str',
'photos': 'list[Photo]',
'videos': 'list[Video]'
}
self.attribute_map = {
'id': 'id',
'location': 'location',
'type': 'type',
'title': 'title',
'introduction': 'introduction',
'content': 'content',
'photos': 'photos',
'videos': 'videos'
}
# Inspiration Identifier
self.id = None # str
# Location
self.location = None # FullLocation
# Inspiration type
self.type = None # str
# Title
self.title = None # str
# Introduction
self.introduction = None # str
# Content
self.content = None # str
# Array of Photos
self.photos = None # list[Photo]
# Array of Videos
self.videos = None # list[Video]
def __repr__(self):
properties = []
for p in self.__dict__:<|fim▁hole|>
return '<{name} {props}>'.format(name=__name__, props=' '.join(properties))<|fim▁end|> | if p != 'systran_types' and p != 'attribute_map':
properties.append('{prop}={val!r}'.format(prop=p, val=self.__dict__[p])) |
<|file_name|>mind.rs<|end_file_name|><|fim▁begin|>use std::f32::{INFINITY, NEG_INFINITY};
use std::cmp::Ordering;
use std::default::Default;
use std::fmt;
use std::hash::BuildHasherDefault;
use std::mem;
use std::sync::Arc;
use std::sync::mpsc;
use std::thread;
use std::time::Duration;
use time;
use lru_cache::LruCache;
use parking_lot;
use twox_hash::XxHash;
use fnv;
use identity::{Agent, JobDescription, Team};
use life::{Commit, Patch, WorldState};
use landmark::{CENTER_OF_THE_WORLD, HIGH_COLONELCY, HIGH_SEVENTH_HEAVEN,
LOW_COLONELCY, LOW_SEVENTH_HEAVEN, FILES};
use space::{Pinfield, Locale};
use substrate::Bytes;
const REWARD_FOR_INITIATIVE: f32 = 0.5;
pub fn orientation(team: Team) -> f32 {
match team {
Team::Orange => 1.0,
Team::Blue => -1.0,
}
}
pub fn figurine_valuation(agent: Agent) -> f32 {
let value = match agent.job_description {
// en.wikipedia.org/wiki/
// Chess_piece_relative_value#Hans_Berliner.27s_system
JobDescription::Servant => 1.0,
JobDescription::Pony => 3.2,
JobDescription::Scholar => 3.3,
JobDescription::Cop => 5.1,
JobDescription::Princess => 8.8,
JobDescription::Figurehead => 20000.0,
};
orientation(agent.team) * value
}
pub fn score(world: WorldState) -> f32 {
let mut valuation = 0.0;
valuation += REWARD_FOR_INITIATIVE * orientation(world.initiative);
for team in Team::league() {
for agent in &Agent::dramatis_personæ(team) {
valuation += f32::from(world.agent_to_pinfield_ref(*agent)
.pincount()) *
figurine_valuation(*agent);
}
// breadth of scholarship bonus
if world.agent_to_pinfield_ref(Agent {
team,
job_description: JobDescription::Scholar,
}).pincount() >= 2 {
valuation += orientation(team) * 0.5
}
}
// ponies and servants want to be in the center of the world's action
let center = Pinfield(CENTER_OF_THE_WORLD);
// cast to signed to avoid overflow
let orange_centerism: i8 = world.orange_servants
.union(world.orange_ponies)
.intersection(center)
.pincount() as i8;
let blue_centerism: i8 = world.blue_servants
.union(world.blue_ponies)
.intersection(center)
.pincount() as i8;
valuation += 0.1 * f32::from(orange_centerism - blue_centerism);
// a cop's favorite beat is the seventh rank
let high_seventh = Pinfield(HIGH_SEVENTH_HEAVEN);
let orange_beat = world.orange_cops.intersection(high_seventh).pincount();
valuation += 0.5 * f32::from(orange_beat);
let low_seventh = Pinfield(LOW_SEVENTH_HEAVEN);
let blue_beat = world.blue_cops.intersection(low_seventh).pincount();
valuation -= 0.5 * f32::from(blue_beat);
// servants who walk behind other servants to hide must be punished
for raw_file in &FILES {
let file = Pinfield(*raw_file);
let orange_servants_in_line = world.orange_servants
.intersection(file)
.pincount();
// Putting a precise number on how bad extra servants on a file are
// seems to be quite hard, and a smarter engine might choose more
// dynamically, but half-a-point is OK, I think.
// Wikipedia has examples where a doubled servant is worth anywhere
// from .3 to .75 points.
if orange_servants_in_line > 1 {
valuation -= 0.5 * f32::from(orange_servants_in_line - 1);
}
let blue_servants_in_line = world.blue_servants
.intersection(file)
.pincount();
if blue_servants_in_line > 1 {
valuation += 0.5 * f32::from(blue_servants_in_line - 1);
}
}
// servants should aspire to something more in life someday
let orange_subascendants = world.orange_servants
.intersection(high_seventh)
.pincount();
valuation += 1.8 * f32::from(orange_subascendants);
let high_colonelcy = Pinfield(HIGH_COLONELCY);
let orange_subsubascendants = world.orange_servants
.intersection(high_colonelcy)
.pincount();
valuation += 0.6 * f32::from(orange_subsubascendants);
let blue_subascendants = world.blue_servants
.intersection(low_seventh)
.pincount();
valuation -= 1.8 * f32::from(blue_subascendants);
let low_colonelcy = Pinfield(LOW_COLONELCY);
let blue_subsubascendants = world.blue_servants
.intersection(low_colonelcy)
.pincount();
valuation -= 0.6 * f32::from(blue_subsubascendants);
// secret service eligbility has option value
if world.orange_west_service_eligibility() ||
world.orange_east_service_eligibility() {
valuation += 0.1
}
if world.blue_west_service_eligibility() || world.blue_east_service_eligibility() {
valuation -= 0.1
}
valuation
}
fn mvv_lva_heuristic(commit: &Commit) -> f32 {
// https://chessprogramming.wikispaces.com/MVV-LVA
match commit.hospitalization {
Some(patient) => {
(figurine_valuation(patient) - figurine_valuation(commit.patch.star))
}
None => 0.0,
}
}
fn order_movements_intuitively(
experience: &fnv::FnvHashMap<Patch, u32>,
commits: &mut Vec<Commit>) -> Vec<Commit> {
let mut sorted: Vec<(Commit, Option<&u32>, f32)> = Vec::with_capacity(commits.len());
for c in commits {
sorted.push((*c, experience.get(&c.patch), mvv_lva_heuristic(&c)));
}
sorted.sort_unstable_by(|a, b| {
match b.1.cmp(&a.1) {
Ordering::Equal => b.2.partial_cmp(&a.2).unwrap_or(Ordering::Equal),
other => other,
}
});
sorted.iter().map(|c| { c.0 }).collect()
}
pub type Variation = Vec<Patch>;
#[allow(ptr_arg)]
pub fn pagan_variation_format(variation: &Variation) -> String {
variation.iter()
.map(|p| p.abbreviated_pagan_movement_rune())
.collect::<Vec<_>>()
.join(" ")
}
pub trait Memory: Clone + Send {
fn recombine(&mut self, other: Self);
fn flash(patch: Patch) -> Self;
fn blank() -> Self;
fn readable(&self) -> String;
}
impl Memory for Patch {
fn recombine(&mut self, other: Self) {
self.star = other.star;
self.whence = other.whence;
self.whither = other.whither;
}
fn flash(patch: Patch) -> Self {
patch
}
fn blank() -> Self {
// deliberately illegal hyperspace warp from the Figurehead; possibly useful for debugging.
// a "blank" commit isn't really a thing.
Patch {
star: Agent::new(Team::Orange, JobDescription::Figurehead),
whence: Locale::new(0, 0),
whither: Locale::new(7, 7),
}
}
fn readable(&self) -> String {
self.abbreviated_pagan_movement_rune()
}
}
impl Memory for Variation {
fn recombine(&mut self, other: Self) {
self.extend(other);
}
fn flash(patch: Patch) -> Self {
vec![patch]
}
fn blank() -> Self {
vec![]
}
fn readable(&self) -> String {
pagan_variation_format(&self)
}
}
<|fim▁hole|> pub score: f32,
pub memory: T,
}
impl<T: Memory> Lodestar<T> {
fn new(score: f32, memory: T) -> Self {
Self {
score,
memory,
}
}
}
impl<T: Memory> fmt::Debug for Lodestar<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
write!(f,
"Lodestar {{ score: {}, memory: {} }}",
self.score,
self.memory.readable())
}
}
#[derive(Eq,PartialEq,Hash)]
pub struct SpaceTime {
world_state: WorldState,
instant: i8,
}
impl SpaceTime {
fn new(world_state: WorldState, instant: i8) -> Self {
Self { world_state, instant }
}
}
#[allow(too_many_arguments)]
pub fn α_β_negamax_search<T: Memory>(
world: WorldState, depth: i8, mut α: f32, β: f32,
memory_bank: Arc<parking_lot::Mutex<LruCache<SpaceTime, Lodestar<T>,
BuildHasherDefault<XxHash>>>>,
intuition_bank: Arc<parking_lot::Mutex<fnv::FnvHashMap<Patch, u32>>>,
quiet: Option<u8>)
-> Lodestar<T> {
let mut premonitions = world.reckless_lookahead();
let mut optimum = NEG_INFINITY;
let mut optimand = T::blank();
if depth <= 0 || premonitions.is_empty() {
let potential_score = orientation(world.initiative) * score(world);
match quiet {
None => {
return Lodestar::new(potential_score, T::blank());
},
Some(extension) => {
if depth.abs() >= extension as i8 {
return Lodestar::new(potential_score, T::blank());
}
premonitions = premonitions.into_iter()
.filter(|c| c.hospitalization.is_some())
.collect::<Vec<_>>();
if premonitions.is_empty() {
return Lodestar::new(potential_score, T::blank())
} else {
optimum = potential_score;
}
}
}
};
// Note: if sorting by heuristic were sufficiently expensive, it would, on balance, be better
// to do so only at the higher levels of the tree. From some minor empiric testing, though,
// sorting only at depth >= 1 has no performance impact, and at depth >=2 has a negative
// performance impact. So that's not the way to go.
{
let experience = intuition_bank.lock();
premonitions = order_movements_intuitively(&experience, &mut premonitions)
}
for premonition in premonitions {
let mut value = NEG_INFINITY; // can't hurt to be pessimistic
let mut memory: T = T::flash(premonition.patch);
let cached: bool;
let space_time = SpaceTime::new(premonition.tree, depth);
{
let mut open_vault = memory_bank.lock();
let remembered_lodestar_maybe = open_vault.get_mut(&space_time);
match remembered_lodestar_maybe {
Some(remembered_lodestar) => {
cached = true;
value = remembered_lodestar.score;
memory.recombine(remembered_lodestar.memory.clone());
}
None => { cached = false; }
};
}
if !cached {
let mut lodestar = α_β_negamax_search(
premonition.tree, depth - 1,
-β, -α,
memory_bank.clone(), intuition_bank.clone(),
quiet
);
lodestar.score *= -1.; // nega-
value = lodestar.score;
memory.recombine(lodestar.memory.clone());
memory_bank.lock().insert(
space_time,
lodestar,
);
}
if value > optimum {
optimum = value;
optimand = memory;
}
if value > α {
α = value;
}
if α >= β {
if depth > 0 { // not a quietness extension
let mut open_vault = intuition_bank.lock();
let intuition = open_vault.entry(premonition.patch)
.or_insert(0);
*intuition += 2u32.pow(depth as u32);
}
break; // cutoff!
}
}
Lodestar::new(optimum, optimand)
}
pub fn déjà_vu_table_size_bound<T: Memory>(gib: f32) -> usize {
let bound = usize::from(Bytes::gibi(gib)) /
(mem::size_of::<SpaceTime>() + mem::size_of::<Lodestar<T>>());
bound
}
pub fn potentially_timebound_kickoff<T: 'static + Memory>(
world: &WorldState, depth: u8,
extension_maybe: Option<u8>,
nihilistically: bool,
deadline_maybe: Option<time::Timespec>,
intuition_bank: Arc<parking_lot::Mutex<fnv::FnvHashMap<Patch, u32>>>,
déjà_vu_bound: f32)
-> Option<Vec<(Commit, f32, T)>> {
let déjà_vu_table: LruCache<SpaceTime, Lodestar<T>,
BuildHasherDefault<XxHash>> =
LruCache::with_hash_state(déjà_vu_table_size_bound::<T>(déjà_vu_bound),
Default::default());
let memory_bank = Arc::new(parking_lot::Mutex::new(déjà_vu_table));
let mut premonitions = if nihilistically {
world.reckless_lookahead()
} else {
world.lookahead()
};
{
let experience = intuition_bank.lock();
premonitions = order_movements_intuitively(&experience, &mut premonitions)
}
let mut forecasts = Vec::with_capacity(40);
let mut time_radios: Vec<(Commit, mpsc::Receiver<Lodestar<T>>)> = Vec::new();
for &premonition in &premonitions {
let travel_memory_bank = memory_bank.clone();
let travel_intuition_bank = intuition_bank.clone();
let (tx, rx) = mpsc::channel();
let explorer_radio = tx.clone();
time_radios.push((premonition, rx));
thread::spawn(move || {
let search_hit: Lodestar<T> = α_β_negamax_search(
premonition.tree, (depth - 1) as i8,
NEG_INFINITY, INFINITY,
travel_memory_bank, travel_intuition_bank,
extension_maybe
);
explorer_radio.send(search_hit).ok();
});
}
while !time_radios.is_empty() { // polling for results
if let Some(deadline) = deadline_maybe {
if time::get_time() > deadline {
return None;
}
}
// iterate over indices so that we can use swap_remove during the loop
for i in (0..time_radios.len()).rev() {
let premonition = time_radios[i].0;
if let Ok(search_hit) = time_radios[i].1.try_recv() {
let value = -search_hit.score;
let mut full_variation = T::flash(premonition.patch);
full_variation.recombine(search_hit.memory);
forecasts.push((premonition, value, full_variation));
time_radios.swap_remove(i);
}
}
thread::sleep(Duration::from_millis(2));
debug!("waiting for {} of {} first-movement search threads",
time_radios.len(), premonitions.len())
}
forecasts.sort_unstable_by(|a, b| b.1.partial_cmp(&a.1).unwrap_or(Ordering::Equal));
Some(forecasts)
}
pub fn kickoff<T: 'static + Memory>(world: &WorldState, depth: u8, extension: Option<u8>,
nihilistically: bool, déjà_vu_bound: f32)
-> Vec<(Commit, f32, T)> {
let experience_table: fnv::FnvHashMap<Patch, u32> = fnv::FnvHashMap::default();
let intuition_bank = Arc::new(parking_lot::Mutex::new(experience_table));
potentially_timebound_kickoff::<T>(world, depth, extension, nihilistically, None,
intuition_bank, déjà_vu_bound).unwrap()
}
pub fn iterative_deepening_kickoff<T: 'static + Memory>(world: &WorldState, timeout: time::Duration,
nihilistically: bool, déjà_vu_bound: f32)
-> (Vec<(Commit, f32, T)>, u8) {
let deadline = time::get_time() + timeout;
let mut depth = 1;
let experience_table = fnv::FnvHashMap::default();
let intuition_bank = Arc::new(parking_lot::Mutex::new(experience_table));
let mut forecasts = potentially_timebound_kickoff(
world, depth, None, nihilistically, None,
intuition_bank.clone(),
déjà_vu_bound).unwrap();
while let Some(prophecy) = potentially_timebound_kickoff::<T>(
world, depth, None, nihilistically, Some(deadline),
intuition_bank.clone(), déjà_vu_bound) {
forecasts = prophecy;
depth += 1;
}
(forecasts, depth-1)
}
#[allow(needless_pass_by_value)] // `depth_sequence`
pub fn fixed_depth_sequence_kickoff<T: 'static + Memory>(world: &WorldState, depth_sequence: Vec<u8>,
nihilistically: bool, déjà_vu_bound: f32)
-> Vec<(Commit, f32, T)> {
let mut depths = depth_sequence.iter();
let experience_table = fnv::FnvHashMap::default();
let intuition_bank = Arc::new(parking_lot::Mutex::new(experience_table));
let mut forecasts = potentially_timebound_kickoff::<T>(
world, *depths.next().expect("`depth_sequence` should be nonempty"),
None, nihilistically, None, intuition_bank.clone(),
déjà_vu_bound
).unwrap();
for &depth in depths {
forecasts = potentially_timebound_kickoff::<T>(
world, depth, None, nihilistically, None,
intuition_bank.clone(), déjà_vu_bound).unwrap();
}
forecasts
}
#[cfg(test)]
mod tests {
extern crate test;
use self::test::Bencher;
use time;
use super::{REWARD_FOR_INITIATIVE, kickoff, score, SpaceTime, Variation};
use space::Locale;
use life::{WorldState, Patch};
use fnv;
use twox_hash::XxHash;
use std::hash::Hash;
use std::collections::hash_map;
use identity::{Agent, JobDescription, Team};
const MOCK_DÉJÀ_VU_BOUND: f32 = 2.0;
impl WorldState {
fn no_castling_at_all(&mut self) {
self.clear_orange_east_service_eligibility();
self.clear_orange_west_service_eligibility();
self.clear_blue_east_service_eligibility();
self.clear_blue_west_service_eligibility();
}
}
#[bench]
fn benchmark_hashing_spacetime_fnv(b: &mut Bencher) {
let w = WorldState::new();
let st = SpaceTime::new(w, 3);
let mut hasher = fnv::FnvHasher::default();
b.iter(|| {
for _ in 0..1000 {
st.hash(&mut hasher);
}
});
}
#[bench]
fn benchmark_hashing_spacetime_xx(b: &mut Bencher) {
let w = WorldState::new();
let mut hasher = XxHash::default();
let st = SpaceTime::new(w, 3);
b.iter(|| {
for _ in 0..1000 {
st.hash(&mut hasher);
}
});
}
#[bench]
fn benchmark_hashing_spacetime_sip(b: &mut Bencher) {
let w = WorldState::new();
let mut hasher = hash_map::DefaultHasher::new();
let st = SpaceTime::new(w, 3);
b.iter(|| {
for _ in 0..1000 {
st.hash(&mut hasher);
}
});
}
#[bench]
fn benchmark_hashing_patch_fnv(b: &mut Bencher) {
let mut hasher = fnv::FnvHasher::default();
let p = Patch {
star: Agent {
team: Team::Orange,
job_description: JobDescription::Figurehead,
},
whence: Locale::new(1, 2),
whither: Locale::new(3, 4)
};
b.iter(|| {
for _ in 0..1000 {
p.hash(&mut hasher);
}
});
}
#[bench]
fn benchmark_hashing_patch_xx(b: &mut Bencher) {
let mut hasher = XxHash::default();
let p = Patch {
star: Agent {
team: Team::Orange,
job_description: JobDescription::Figurehead,
},
whence: Locale::new(1, 2),
whither: Locale::new(3, 4)
};
b.iter(|| {
for _ in 0..1000 {
p.hash(&mut hasher);
}
});
}
#[bench]
fn benchmark_hashing_patch_sip(b: &mut Bencher) {
let mut hasher = hash_map::DefaultHasher::new();
let p = Patch {
star: Agent {
team: Team::Orange,
job_description: JobDescription::Figurehead,
},
whence: Locale::new(1, 2),
whither: Locale::new(3, 4)
};
b.iter(|| {
for _ in 0..1000 {
p.hash(&mut hasher);
}
});
}
#[bench]
fn benchmark_scoring(b: &mut Bencher) {
b.iter(|| score(WorldState::new()));
}
#[bench]
fn benchmark_kickoff_depth_1(b: &mut Bencher) {
let ws = WorldState::new();
b.iter(|| kickoff::<Patch>(&ws, 1, None, true, MOCK_DÉJÀ_VU_BOUND));
}
#[bench]
fn benchmark_kickoff_depth_2_arbys(b: &mut Bencher) {
let ws = WorldState::new();
b.iter(|| kickoff::<Patch>(&ws, 2, None, true, MOCK_DÉJÀ_VU_BOUND));
}
#[bench]
fn benchmark_kickoff_depth_2_carefully(b: &mut Bencher) {
let ws = WorldState::new();
b.iter(|| kickoff::<Patch>(&ws, 2, None, false, MOCK_DÉJÀ_VU_BOUND));
}
#[bench]
fn benchmark_kickoff_depth_3(b: &mut Bencher) {
let ws = WorldState::new();
b.iter(|| kickoff::<Patch>(&ws, 3, None, true, MOCK_DÉJÀ_VU_BOUND));
}
#[test]
#[ignore] // more research is needed
fn concerning_short_circuiting_upon_finding_critical_endangerment() {
let ws = WorldState::reconstruct("7K/r7/1r6/8/8/8/8/7k b -");
let start = time::get_time();
kickoff::<Variation>(&ws, 30, None, true, MOCK_DÉJÀ_VU_BOUND);
let duration = time::get_time() - start;
assert!(duration.num_seconds() < 20);
}
#[test]
#[allow(float_cmp)]
fn concerning_fairness_of_the_initial_position() {
// It's okay to assume this is really 0.0. Floats may be imprecise,
// but they do have well-defined behavior.
assert_eq!(0.0, score(WorldState::new()) - REWARD_FOR_INITIATIVE);
}
#[test]
fn concerning_servant_ascension_choices() {
let ws = WorldState::reconstruct("8/q1P1k/8/8/8/8/6PP/7K w - -");
// looking ahead 3 movements allows the Leafline AI to catch the
// split, whereby transforming into a pony (rather than
// transitioning into a princess, as would usually be
// expected) endangers both the blue princess and figurehead
let tops = kickoff::<Variation>(&ws, 3, None, true, MOCK_DÉJÀ_VU_BOUND);
let best_move = tops[0].0;
let score = tops[0].1;
println!("{:?}", best_move);
assert!(score > 0.0);
assert_eq!(best_move.tree.preserve(), "2N5/q3k3/8/8/8/8/6PP/7K b - -");
}
#[test]
fn experimentally_about_kickoff() {
let mut world = WorldState::new_except_empty();
// SCENARIO: let's imagine Orange (to move) has separate attacks against
// Blue's pony and servant, against which Blue has no defense but
// to run away. We predict that Orange will take the pony, and
// then Blue will move the servant out of the way.
// scholar endangers pony
world.blue_ponies = world.blue_ponies.alight(Locale::new(0, 0));
world.orange_scholars = world.orange_scholars.alight(Locale::new(2, 2));
// pony endangers servant
world.blue_servants = world.blue_servants.alight(Locale::new(7, 1));
world.orange_ponies = world.orange_ponies.alight(Locale::new(5, 2));
// Blue has another servant sitting nowhere interesting
world.blue_servants = world.blue_servants.alight(Locale::new(3, 6));
world.no_castling_at_all();
let depth = 2;
let advisory = kickoff::<Variation>(&world, depth, None, true, MOCK_DÉJÀ_VU_BOUND);
// taking the pony is the right thing to do
assert_eq!(Locale::new(0, 0), advisory[0].0.patch.whither);
// And, furthermore, the answer should be the same if we face the
// same situation with the colors reversed
//
// XXX this would be tidier and less copy-pastey if I had more
// general figurine-placing functions that were three rather than
// two levels of abstraction above twiddling bits on an unsigned
// int ... oh, well
let mut negaworld = WorldState::new_except_empty();
negaworld.initiative = Team::Blue;
// scholar endangers pony
negaworld.orange_ponies = negaworld.orange_ponies.alight(Locale::new(0, 0));
negaworld.blue_scholars = negaworld.blue_scholars.alight(Locale::new(2, 2));
// pony endangers servant
negaworld.orange_servants = negaworld.orange_servants
.alight(Locale::new(7, 1));
negaworld.blue_ponies = negaworld.blue_ponies.alight(Locale::new(5, 2));
// Orange has another servant sitting nowhere interesting
negaworld.orange_servants = negaworld.orange_servants
.alight(Locale::new(3, 6));
negaworld.initiative = Team::Blue;
negaworld.no_castling_at_all();
let negadvisory = kickoff::<Variation>(&negaworld, depth, None, true, MOCK_DÉJÀ_VU_BOUND);
// taking the pony is still the right thing to do, even in the
// negaworld
assert_eq!(Locale::new(0, 0), negadvisory[0].0.patch.whither);
}
#[ignore] // really slow
#[test]
fn concerning_fortune_favoring_the_bold() {
// It would be nice if scores at even and odd plies were comparable,
// rather than lurching wildly with parity due to the tempo
// difference. We can try to compensate for this by accounting for
// initiative in scoring world-states, but it's important to have a
// test to demonstrate that the magnitude of that correction is
// sane. ... although this is actually a pretty subtle problem where we
// should be wary of making things worse.
//
// some "representative" scenarios ...
let world_runesets = vec![
// initial position
"rnbqkbnr/pppppppp/8/8/8/8/PPPPPPPP/RNBQKBNR w KQkq - 0 1",
// 1. e4
"rnbqkbnr/pppppppp/8/8/4P3/8/PPPP1PPP/RNBQKBNR b KQkq e3 0 1",
// princess's gambit declined I
"rnbqkbnr/ppp2ppp/4p3/3p4/2PP4/8/PP2PPPP/RNBQKBNR w KQkq - 0 3",
// princess's gambit declined II
"rnbqkbnr/ppp2ppp/4p3/3p4/2PP4/2N5/PP2PPPP/R1BQKBNR b KQkq - 1 3",
// powder keg I
"r2q1rk1/2p1ppbp/1p4p1/p2p1b2/NnPPn3/PP2PN1P/1B2BPP1/R2Q1RK1 b - - 0 14",
// powder keg II
"r2q1rk1/2p1ppbp/1pn3p1/p2p1b2/N1PPn3/PP2PN1P/1B2BPP1/R2Q1RK1 w - - 1 15"
];
let mut tempo_lurches: Vec<f32> = Vec::new();
for world_runeset in world_runesets {
let world = WorldState::reconstruct(world_runeset);
let mut previously = None;
for &depth in &[2, 3, 4] {
let premonitions = kickoff::<Variation>(&world, depth, None, true, 1.0);
let mut top_showings = 0.;
for showing in &premonitions[0..10] {
top_showings += showing.1; // (_commit, score, _variation)
}
let club_score = top_showings / 10.;
if let Some(previous_score) = previously {
let orienting_factor =
if depth % 2 == 0 { -1. } else { 1. };
let lurch = orienting_factor * (club_score - previous_score);
tempo_lurches.push(lurch);
}
previously = Some(club_score);
}
}
let average_tempo_lurch =
tempo_lurches.iter().sum::<f32>()/tempo_lurches.len() as f32;
println!("tempo lurches were {:?}, average was {}",
tempo_lurches, average_tempo_lurch);
assert_eq_within_ε!(REWARD_FOR_INITIATIVE, average_tempo_lurch, 0.8);
}
#[test]
fn concerning_lazy_servants() {
let orange_doubled = WorldState::reconstruct("k7/pp6/8/8/8/P7/P7/K7 w - -");
let orange_not_doubled = WorldState::reconstruct("k7/pp6/8/8/8/8/PP6/K7 w - -");
assert!(score(orange_doubled) < score(orange_not_doubled));
}
}<|fim▁end|> | #[derive(Clone)]
pub struct Lodestar<T: Memory> { |
<|file_name|>base.py<|end_file_name|><|fim▁begin|>import os
from common_helper_files import get_dir_of_file
from pluginbase import PluginBase
import logging
class FilterSystem():
FILTER_TYPE = None
def __init__(self, selected_filters):
self._init_plugins()
if selected_filters == 'all':
self._set_all_filters()
else:
self._set_filters_to_apply(selected_filters)
self._setup_counters()
def _init_plugins(self):
self.plugin_base = PluginBase(package='filter_plugins.{}'.format(self.FILTER_TYPE))
self.filter_plugins = dict()
self.plugin_source = self.plugin_base.make_plugin_source(searchpath=[os.path.join(get_dir_of_file(__file__), '../filter_plugins/{}'.format(self.FILTER_TYPE))])
plugin_list = self.plugin_source.list_plugins()
for item in plugin_list:
plugin = self.plugin_source.load_plugin(item)
plugin.setup(self)
def register_plugin(self, name, filter_function):
self.filter_plugins[name] = filter_function
def _set_all_filters(self):
self.filters_to_apply = list(self.filter_plugins.keys())
def _setup_counters(self):
self.counter = dict()
for item in self.filters_to_apply:<|fim▁hole|> self.counter[item] = 0
def _set_filters_to_apply(self, filter_list):
self.filters_to_apply = list()
for item in filter_list:
if item in self.filter_plugins:
self.filters_to_apply.append(item)
else:
logging.error('Filter "{}" is not available!'.format(item))<|fim▁end|> | |
<|file_name|>seg_queue.rs<|end_file_name|><|fim▁begin|>use std::sync::atomic::Ordering::{Acquire, Release, Relaxed};
use std::sync::atomic::{AtomicBool, AtomicUsize};
use std::{ptr, mem};
use std::cmp;
use std::cell::UnsafeCell;
use mem::epoch::{self, Atomic, Owned};
const SEG_SIZE: usize = 32;
/// A Michael-Scott queue that allocates "segments" (arrays of nodes)
/// for efficiency.
///
/// Usable with any number of producers and consumers.
pub struct SegQueue<T> {
head: Atomic<Segment<T>>,
tail: Atomic<Segment<T>>,
}
struct Segment<T> {
low: AtomicUsize,
data: [UnsafeCell<T>; SEG_SIZE],
ready: [AtomicBool; SEG_SIZE],
high: AtomicUsize,
next: Atomic<Segment<T>>,
}
unsafe impl<T> Sync for Segment<T> {}
impl<T> Segment<T> {
fn new() -> Segment<T> {
Segment {
data: unsafe { mem::uninitialized() },
ready: unsafe { mem::transmute([0usize; SEG_SIZE]) },
low: AtomicUsize::new(0),
high: AtomicUsize::new(0),
next: Atomic::null(),
}
}
}
impl<T> SegQueue<T> {
/// Create a enw, emtpy queue.
pub fn new() -> SegQueue<T> {
let q = SegQueue {
head: Atomic::null(),
tail: Atomic::null(),
};
let sentinel = Owned::new(Segment::new());
let guard = epoch::pin();
let sentinel = q.head.store_and_ref(sentinel, Relaxed, &guard);
q.tail.store_shared(Some(sentinel), Relaxed);
q
}
/// Add `t` to the back of the queue.
pub fn push(&self, t: T) {
let guard = epoch::pin();
loop {
let tail = self.tail.load(Acquire, &guard).unwrap();
if tail.high.load(Relaxed) >= SEG_SIZE { continue }
let i = tail.high.fetch_add(1, Relaxed);
unsafe {
if i < SEG_SIZE {
*(*tail).data.get_unchecked(i).get() = t;
tail.ready.get_unchecked(i).store(true, Release);
if i + 1 == SEG_SIZE {
let tail = tail.next.store_and_ref(Owned::new(Segment::new()), Release, &guard);
self.tail.store_shared(Some(tail), Release);
}
return
}
}
}
}
/// Attempt to dequeue from the front.
///
/// Returns `None` if the queue is observed to be empty.
pub fn pop(&self) -> Option<T> {
let guard = epoch::pin();
loop {
let head = self.head.load(Acquire, &guard).unwrap();
loop {
let low = head.low.load(Relaxed);
if low >= cmp::min(head.high.load(Relaxed), SEG_SIZE) { break }
if head.low.compare_and_swap(low, low+1, Relaxed) == low {
loop {
if unsafe { head.ready.get_unchecked(low).load(Acquire) } { break }
}
if low + 1 == SEG_SIZE {
loop {
if let Some(next) = head.next.load(Acquire, &guard) {
self.head.store_shared(Some(next), Release);
break
}
}
}
return Some(unsafe { ptr::read((*head).data.get_unchecked(low).get()) })
}
}
if head.next.load(Relaxed, &guard).is_none() { return None }
}
}
}
#[cfg(test)]
mod test {
const CONC_COUNT: i64 = 1000000;
use std::io::stderr;
use std::io::prelude::*;
use mem::epoch;
use scope;
use super::*;
#[test]
fn smoke_queue() {
let q: SegQueue<i64> = SegQueue::new();
}
#[test]
fn push_pop_1() {
let q: SegQueue<i64> = SegQueue::new();
q.push(37);
assert_eq!(q.pop(), Some(37));
}
#[test]
fn push_pop_2() {
let q: SegQueue<i64> = SegQueue::new();
q.push(37);
q.push(48);
assert_eq!(q.pop(), Some(37));
assert_eq!(q.pop(), Some(48));
}
#[test]
fn push_pop_many_seq() {
let q: SegQueue<i64> = SegQueue::new();
for i in 0..200 {
q.push(i)
}
writeln!(stderr(), "done pushing");
for i in 0..200 {
assert_eq!(q.pop(), Some(i));
}
}
#[test]
fn push_pop_many_spsc() {
let q: SegQueue<i64> = SegQueue::new();
scope(|scope| {
scope.spawn(|| {
let mut next = 0;
while next < CONC_COUNT {
if let Some(elem) = q.pop() {
assert_eq!(elem, next);
next += 1;
}
}
});
for i in 0..CONC_COUNT {
q.push(i)
}
});
}
#[test]
fn push_pop_many_spmc() {
use std::time::Duration;
fn recv(t: i32, q: &SegQueue<i64>) {
let mut cur = -1;
for i in 0..CONC_COUNT {
if let Some(elem) = q.pop() {<|fim▁hole|> cur = elem;
if cur == CONC_COUNT - 1 { break }
}
if i % 10000 == 0 {
//writeln!(stderr(), "{}: {} @ {}", t, i, cur);
}
}
}
let q: SegQueue<i64> = SegQueue::new();
let qr = &q;
scope(|scope| {
for i in 0..3 {
scope.spawn(move || recv(i, qr));
}
scope.spawn(|| {
for i in 0..CONC_COUNT {
q.push(i);
if i % 10000 == 0 {
//writeln!(stderr(), "Push: {}", i);
}
}
})
});
}
#[test]
fn push_pop_many_mpmc() {
enum LR { Left(i64), Right(i64) }
let q: SegQueue<LR> = SegQueue::new();
scope(|scope| {
for _t in 0..2 {
scope.spawn(|| {
for i in CONC_COUNT-1..CONC_COUNT {
q.push(LR::Left(i))
}
});
scope.spawn(|| {
for i in CONC_COUNT-1..CONC_COUNT {
q.push(LR::Right(i))
}
});
scope.spawn(|| {
let mut vl = vec![];
let mut vr = vec![];
for _i in 0..CONC_COUNT {
match q.pop() {
Some(LR::Left(x)) => vl.push(x),
Some(LR::Right(x)) => vr.push(x),
_ => {}
}
}
let mut vl2 = vl.clone();
let mut vr2 = vr.clone();
vl2.sort();
vr2.sort();
assert_eq!(vl, vl2);
assert_eq!(vr, vr2);
});
}
});
}
}<|fim▁end|> | if elem <= cur {
writeln!(stderr(), "{}: {} <= {}", t, elem, cur);
}
assert!(elem > cur); |
<|file_name|>util.rs<|end_file_name|><|fim▁begin|>// This is a part of rust-encoding.
// Copyright (c) 2013-2014, Kang Seonghoon.
// See README.md and LICENSE.txt for details.
//! Internal utilities.
#![macro_escape]
use std::{str, mem};
use std::default::Default;
use types;
/// Unchecked conversion to `char`.
pub fn as_char<T:Int+NumCast>(ch: T) -> char {
unsafe { mem::transmute(ch.to_u32().unwrap()) }
}
/// External iterator for a string's characters with its corresponding byte offset range.
pub struct StrCharIndexIterator<'r> {
index: uint,
string: &'r str,
}
impl<'r> Iterator<((uint,uint), char)> for StrCharIndexIterator<'r> {
#[inline]
fn next(&mut self) -> Option<((uint,uint), char)> {
if self.index < self.string.len() {
let str::CharRange {ch, next} = self.string.char_range_at(self.index);
let prev = self.index;
self.index = next;
Some(((prev, next), ch))
} else {
None
}
}
}
/// A trait providing an `index_iter` method.
pub trait StrCharIndex<'r> {
fn index_iter(&self) -> StrCharIndexIterator<'r>;
}
impl<'r> StrCharIndex<'r> for &'r str {
/// Iterates over each character with corresponding byte offset range.
fn index_iter(&self) -> StrCharIndexIterator<'r> {
StrCharIndexIterator { index: 0, string: *self }
}
}
/// A helper struct for the stateful decoder DSL.
pub struct StatefulDecoderHelper<'a, St> {
/// The current buffer.
pub buf: &'a [u8],
/// The current index to the buffer.
pub pos: uint,
/// The output buffer.
pub output: &'a mut types::StringWriter,
/// The last codec error. The caller will later collect this.
pub err: Option<types::CodecError>,
}
impl<'a, St:Default> StatefulDecoderHelper<'a, St> {
/// Reads one byte from the buffer if any.
#[inline(always)]
pub fn read(&mut self) -> Option<u8> {
match self.buf.get(self.pos) {
Some(&c) => { self.pos += 1; Some(c) }
None => None
}
}
/// Resets back to the initial state.
/// This should be the last expr in the rules.
#[inline(always)]
pub fn reset(&self) -> St {
Default::default()
}
/// Writes one Unicode scalar value to the output.
/// There is intentionally no check for `c`, so the caller should ensure that it's valid.
/// If this is the last expr in the rules, also resets back to the initial state.
#[inline(always)]
pub fn emit(&mut self, c: u32) -> St {
self.output.write_char(unsafe {mem::transmute(c)});
Default::default()
}
/// Writes a Unicode string to the output.
/// If this is the last expr in the rules, also resets back to the initial state.
#[inline(always)]
pub fn emit_str(&mut self, s: &str) -> St {
self.output.write_str(s);
Default::default()
}
/// Issues a codec error with given message at the current position.
/// If this is the last expr in the rules, also resets back to the initial state.
#[inline(always)]
pub fn err(&mut self, msg: &'static str) -> St {
self.err = Some(types::CodecError { upto: self.pos, cause: msg.into_maybe_owned() });
Default::default()
}
/// Issues a codec error with given message at the current position minus `backup` bytes.
/// If this is the last expr in the rules, also resets back to the initial state.
///
/// This should be used to implement "prepending byte to the stream" in the Encoding spec,
/// which corresponds to `ctx.backup_and_err(1, ...)`.
#[inline(always)]
pub fn backup_and_err(&mut self, backup: uint, msg: &'static str) -> St {
// XXX we should eventually handle a negative `upto`
let upto = if self.pos < backup {0} else {self.pos - backup};
self.err = Some(types::CodecError { upto: upto, cause: msg.into_maybe_owned() });
Default::default()
}
}
/// Defines a stateful decoder from given state machine.
macro_rules! stateful_decoder(
(
$(#[$decmeta:meta])*
struct $dec:ident;
module $stmod:ident; // should be unique from other existing identifiers
ascii_compatible $asciicompat:expr;
$(internal $item:item)* // will only be visible from state functions
initial state $inist:ident($inictx:ident) {
$(case $($inilhs:pat)|+ => $($inirhs:expr),+;)+
final => $($inifin:expr),+;
}
$(checkpoint state $ckst:ident($ckctx:ident $(, $ckarg:ident: $ckty:ty)*) {
$(case $($cklhs:pat)|+ => $($ckrhs:expr),+;)+
final => $($ckfin:expr),+;
})*
$(state $st:ident($ctx:ident $(, $arg:ident: $ty:ty)*) {
$(case $($lhs:pat)|+ => $($rhs:expr),+;)+
final => $($fin:expr),+;
})*
) => (
$(#[$decmeta])*
pub struct $dec {
st: $stmod::State
}
#[allow(non_snake_case_functions)]
mod $stmod {
#[deriving(PartialEq,Clone)]
pub enum State {
$inist,
$(
$ckst(() $(, $ckty)*),
)*
$(
$st(() $(, $ty)*),
)*
}
impl ::std::default::Default for State {
#[inline(always)] fn default() -> State { $inist }
}
pub mod internal {
pub type Context<'a> = ::util::StatefulDecoderHelper<'a, super::State>;
$($item)*
}
pub mod start {
use super::internal::*;
#[inline(always)]
pub fn $inist($inictx: &mut Context) -> super::State {
// prohibits all kind of recursions, including self-recursions
#[allow(unused_imports)] use super::transient::*;
match $inictx.read() {
None => super::$inist,
Some(c) => match c { $($($inilhs)|+ => { $($inirhs);+ })+ },<|fim▁hole|> #[inline(always)]
pub fn $ckst($ckctx: &mut Context $(, $ckarg: $ckty)*) -> super::State {
// prohibits all kind of recursions, including self-recursions
#[allow(unused_imports)] use super::transient::*;
match $ckctx.read() {
None => super::$ckst(() $(, $ckarg)*),
Some(c) => match c { $($($cklhs)|+ => { $($ckrhs);+ })+ },
}
}
)*
}
pub mod transient {
use super::internal::*;
#[inline(always)]
#[allow(dead_code)]
pub fn $inist(_: &mut Context) -> super::State {
super::$inist // do not recurse further
}
$(
#[inline(always)]
#[allow(dead_code)]
pub fn $ckst(_: &mut Context $(, $ckarg: $ckty)*) -> super::State {
super::$ckst(() $(, $ckarg)*) // do not recurse further
}
)*
$(
#[inline(always)]
pub fn $st($ctx: &mut Context $(, $arg: $ty)*) -> super::State {
match $inictx.read() {
None => super::$st(() $(, $arg)*),
Some(c) => match c { $($($lhs)|+ => { $($rhs);+ })+ },
}
}
)*
}
}
impl $dec {
pub fn new() -> Box<Decoder> { box $dec { st: $stmod::$inist } as Box<Decoder> }
}
impl Decoder for $dec {
fn from_self(&self) -> Box<Decoder> { $dec::new() }
fn is_ascii_compatible(&self) -> bool { $asciicompat }
fn raw_feed(&mut self, input: &[u8],
output: &mut StringWriter) -> (uint, Option<CodecError>) {
use self::$stmod::{start, transient};
output.writer_hint(input.len());
let mut ctx = ::util::StatefulDecoderHelper {
buf: input, pos: 0, output: output, err: None
};
let mut processed = 0;
let mut st = self.st;
let st_ = match st {
$stmod::$inist => $stmod::$inist,
$(
$stmod::$ckst(() $(, $ckarg)*) => start::$ckst(&mut ctx $(, $ckarg)*),
)*
$(
$stmod::$st(() $(, $arg)*) => transient::$st(&mut ctx $(, $arg)*),
)*
};
match (ctx.err.take(), st_) {
(None, $stmod::$inist) $(| (None, $stmod::$ckst(..)))* =>
{ st = st_; processed = ctx.pos; }
// XXX splitting the match case improves the performance somehow, but why?
(None, _) => { self.st = st_; return (processed, None); }
(Some(err), _) => { self.st = st_; return (processed, Some(err)); }
}
while ctx.pos < ctx.buf.len() {
let st_ = match st {
$stmod::$inist => start::$inist(&mut ctx),
$(
$stmod::$ckst(() $(, $ckarg)*) => start::$ckst(&mut ctx $(, $ckarg)*),
)*
_ => unreachable!(),
};
match (ctx.err.take(), st_) {
(None, $stmod::$inist) $(| (None, $stmod::$ckst(..)))* =>
{ st = st_; processed = ctx.pos; }
// XXX splitting the match case improves the performance somehow, but why?
(None, _) => { self.st = st_; return (processed, None); }
(Some(err), _) => { self.st = st_; return (processed, Some(err)); }
}
}
self.st = st;
(processed, None)
}
fn raw_finish(&mut self, output: &mut StringWriter) -> Option<CodecError> {
#![allow(unused_mut, unused_variable)]
let mut ctx = ::util::StatefulDecoderHelper {
buf: &[], pos: 0, output: output, err: None
};
self.st = match ::std::mem::replace(&mut self.st, $stmod::$inist) {
$stmod::$inist => { let $inictx = &mut ctx; $($inifin);+ },
$(
$stmod::$ckst(() $(, $ckarg)*) => { let $ckctx = &mut ctx; $($ckfin);+ },
)*
$(
$stmod::$st(() $(, $arg)*) => { let $ctx = &mut ctx; $($fin);+ },
)*
};
ctx.err.take()
}
}
)
)
/// Defines an ASCII-compatible stateful decoder from given state machine.
macro_rules! ascii_compatible_stateful_decoder(
(
$(#[$decmeta:meta])*
struct $dec:ident;
module $stmod:ident; // should be unique from other existing identifiers
$(internal $item:item)* // will only be visible from state functions
initial state $inist:ident($inictx:ident) {
$(case $($inilhs:pat)|+ => $($inirhs:expr),+;)+
}
$(state $st:ident($ctx:ident $(, $arg:ident: $ty:ty)*) {
$(case $($lhs:pat)|+ => $($rhs:expr),+;)+
})*
) => (
stateful_decoder!(
$(#[$decmeta])*
struct $dec;
module $stmod;
ascii_compatible true;
$(internal $item)*
initial state $inist($inictx) {
$(case $($inilhs)|+ => $($inirhs),+;)+
final => $inictx.reset();
}
$(state $st($ctx $(, $arg: $ty)*) {
$(case $($lhs)|+ => $($rhs),+;)+
final => $ctx.err("incomplete sequence");
})*
)
)
)<|fim▁end|> | }
}
$( |
<|file_name|>pad.py<|end_file_name|><|fim▁begin|>import numbers
import numpy
import cupy
###############################################################################
# Private utility functions.
def _round_if_needed(arr, dtype):
"""Rounds arr inplace if the destination dtype is an integer.
"""
if cupy.issubdtype(dtype, cupy.integer):
arr.round(out=arr) # bug in round so use rint (cupy/cupy#2330)
def _slice_at_axis(sl, axis):
"""Constructs a tuple of slices to slice an array in the given dimension.
Args:
sl(slice): The slice for the given dimension.
axis(int): The axis to which `sl` is applied. All other dimensions are
left "unsliced".
Returns:
tuple of slices: A tuple with slices matching `shape` in length.
"""
return (slice(None),) * axis + (sl,) + (Ellipsis,)
def _view_roi(array, original_area_slice, axis):
"""Gets a view of the current region of interest during iterative padding.
When padding multiple dimensions iteratively corner values are
unnecessarily overwritten multiple times. This function reduces the
working area for the first dimensions so that corners are excluded.
Args:
array(cupy.ndarray): The array with the region of interest.
original_area_slice(tuple of slices): Denotes the area with original
values of the unpadded array.
axis(int): The currently padded dimension assuming that `axis` is padded
before `axis` + 1.
Returns:
"""
axis += 1
sl = (slice(None),) * axis + original_area_slice[axis:]
return array[sl]
def _pad_simple(array, pad_width, fill_value=None):
"""Pads an array on all sides with either a constant or undefined values.
Args:
array(cupy.ndarray): Array to grow.
pad_width(sequence of tuple[int, int]): Pad width on both sides for each
dimension in `arr`.
fill_value(scalar, optional): If provided the padded area is
filled with this value, otherwise the pad area left undefined.
(Default value = None)
"""
# Allocate grown array
new_shape = tuple(
left + size + right
for size, (left, right) in zip(array.shape, pad_width)
)
order = 'F' if array.flags.fnc else 'C' # Fortran and not also C-order
padded = cupy.empty(new_shape, dtype=array.dtype, order=order)
if fill_value is not None:
padded.fill(fill_value)
# Copy old array into correct space
original_area_slice = tuple(
slice(left, left + size)
for size, (left, right) in zip(array.shape, pad_width)
)
padded[original_area_slice] = array
return padded, original_area_slice
def _set_pad_area(padded, axis, width_pair, value_pair):
"""Set an empty-padded area in given dimension.
"""
left_slice = _slice_at_axis(slice(None, width_pair[0]), axis)
padded[left_slice] = value_pair[0]
right_slice = _slice_at_axis(
slice(padded.shape[axis] - width_pair[1], None), axis
)
padded[right_slice] = value_pair[1]
def _get_edges(padded, axis, width_pair):
"""Retrieves edge values from an empty-padded array along a given axis.
Args:
padded(cupy.ndarray): Empty-padded array.
axis(int): Dimension in which the edges are considered.
width_pair((int, int)): Pair of widths that mark the pad area on both
sides in the given dimension.
"""
left_index = width_pair[0]
left_slice = _slice_at_axis(slice(left_index, left_index + 1), axis)
left_edge = padded[left_slice]
right_index = padded.shape[axis] - width_pair[1]
right_slice = _slice_at_axis(slice(right_index - 1, right_index), axis)
right_edge = padded[right_slice]
return left_edge, right_edge
def _get_linear_ramps(padded, axis, width_pair, end_value_pair):
"""Constructs linear ramps for an empty-padded array along a given axis.
Args:
padded(cupy.ndarray): Empty-padded array.
axis(int): Dimension in which the ramps are constructed.
width_pair((int, int)): Pair of widths that mark the pad area on both
sides in the given dimension.
end_value_pair((scalar, scalar)): End values for the linear ramps which
form the edge of the fully padded array. These values are included in
the linear ramps.
"""
edge_pair = _get_edges(padded, axis, width_pair)
left_ramp = cupy.linspace(
start=end_value_pair[0],
# squeeze axis replaced by linspace
stop=edge_pair[0].squeeze(axis),
num=width_pair[0],
endpoint=False,
dtype=padded.dtype,
axis=axis,
)
right_ramp = cupy.linspace(
start=end_value_pair[1],
# squeeze axis replaced by linspace
stop=edge_pair[1].squeeze(axis),
num=width_pair[1],
endpoint=False,
dtype=padded.dtype,
axis=axis,
)
# Reverse linear space in appropriate dimension
right_ramp = right_ramp[_slice_at_axis(slice(None, None, -1), axis)]
return left_ramp, right_ramp
def _get_stats(padded, axis, width_pair, length_pair, stat_func):
"""Calculates a statistic for an empty-padded array along a given axis.
Args:
padded(cupy.ndarray): Empty-padded array.
axis(int): Dimension in which the statistic is calculated.
width_pair((int, int)): Pair of widths that mark the pad area on both
sides in the given dimension.
length_pair(2-element sequence of None or int): Gives the number of
values in valid area from each side that is taken into account when
calculating the statistic. If None the entire valid area in `padded`
is considered.
stat_func(function): Function to compute statistic. The expected
signature is
``stat_func(x: ndarray, axis: int, keepdims: bool) -> ndarray``.
"""
# Calculate indices of the edges of the area with original values
left_index = width_pair[0]
right_index = padded.shape[axis] - width_pair[1]
# as well as its length
max_length = right_index - left_index
# Limit stat_lengths to max_length
left_length, right_length = length_pair
if left_length is None or max_length < left_length:
left_length = max_length
if right_length is None or max_length < right_length:
right_length = max_length
# Calculate statistic for the left side
left_slice = _slice_at_axis(
slice(left_index, left_index + left_length), axis
)
left_chunk = padded[left_slice]
left_stat = stat_func(left_chunk, axis=axis, keepdims=True)
_round_if_needed(left_stat, padded.dtype)
if left_length == right_length == max_length:
# return early as right_stat must be identical to left_stat
return left_stat, left_stat
# Calculate statistic for the right side
right_slice = _slice_at_axis(
slice(right_index - right_length, right_index), axis
)
right_chunk = padded[right_slice]
right_stat = stat_func(right_chunk, axis=axis, keepdims=True)
_round_if_needed(right_stat, padded.dtype)
return left_stat, right_stat
def _set_reflect_both(padded, axis, width_pair, method, include_edge=False):
"""Pads an `axis` of `arr` using reflection.
Args:
padded(cupy.ndarray): Input array of arbitrary shape.
axis(int): Axis along which to pad `arr`.
width_pair((int, int)): Pair of widths that mark the pad area on both
sides in the given dimension.
method(str): Controls method of reflection; options are 'even' or 'odd'.
include_edge(bool, optional): If true, edge value is included in
reflection, otherwise the edge value forms the symmetric axis to the
reflection. (Default value = False)
"""
left_pad, right_pad = width_pair
old_length = padded.shape[axis] - right_pad - left_pad
if include_edge:
# Edge is included, we need to offset the pad amount by 1
edge_offset = 1
else:
edge_offset = 0 # Edge is not included, no need to offset pad amount
old_length -= 1 # but must be omitted from the chunk
if left_pad > 0:
# Pad with reflected values on left side:
# First limit chunk size which can't be larger than pad area
chunk_length = min(old_length, left_pad)
# Slice right to left, stop on or next to edge, start relative to stop
stop = left_pad - edge_offset
start = stop + chunk_length
left_slice = _slice_at_axis(slice(start, stop, -1), axis)
left_chunk = padded[left_slice]
if method == 'odd':
# Negate chunk and align with edge
edge_slice = _slice_at_axis(slice(left_pad, left_pad + 1), axis)
left_chunk = 2 * padded[edge_slice] - left_chunk
# Insert chunk into padded area
start = left_pad - chunk_length
stop = left_pad
pad_area = _slice_at_axis(slice(start, stop), axis)
padded[pad_area] = left_chunk
# Adjust pointer to left edge for next iteration
left_pad -= chunk_length
if right_pad > 0:
# Pad with reflected values on right side:
# First limit chunk size which can't be larger than pad area
chunk_length = min(old_length, right_pad)
# Slice right to left, start on or next to edge, stop relative to start
start = -right_pad + edge_offset - 2
stop = start - chunk_length
right_slice = _slice_at_axis(slice(start, stop, -1), axis)
right_chunk = padded[right_slice]
if method == 'odd':
# Negate chunk and align with edge
edge_slice = _slice_at_axis(
slice(-right_pad - 1, -right_pad), axis
)
right_chunk = 2 * padded[edge_slice] - right_chunk
# Insert chunk into padded area
start = padded.shape[axis] - right_pad
stop = start + chunk_length
pad_area = _slice_at_axis(slice(start, stop), axis)
padded[pad_area] = right_chunk
# Adjust pointer to right edge for next iteration
right_pad -= chunk_length
return left_pad, right_pad
def _set_wrap_both(padded, axis, width_pair):
"""Pads an `axis` of `arr` with wrapped values.
Args:
padded(cupy.ndarray): Input array of arbitrary shape.
axis(int): Axis along which to pad `arr`.
width_pair((int, int)): Pair of widths that mark the pad area on both
sides in the given dimension.
"""
left_pad, right_pad = width_pair
period = padded.shape[axis] - right_pad - left_pad
# If the current dimension of `arr` doesn't contain enough valid values
# (not part of the undefined pad area) we need to pad multiple times.
# Each time the pad area shrinks on both sides which is communicated with
# these variables.
new_left_pad = 0
new_right_pad = 0
if left_pad > 0:
# Pad with wrapped values on left side
# First slice chunk from right side of the non-pad area.
# Use min(period, left_pad) to ensure that chunk is not larger than
# pad area
right_slice = _slice_at_axis(
slice(
-right_pad - min(period, left_pad),
-right_pad if right_pad != 0 else None,
),
axis,
)
right_chunk = padded[right_slice]
if left_pad > period:
# Chunk is smaller than pad area
pad_area = _slice_at_axis(slice(left_pad - period, left_pad), axis)
new_left_pad = left_pad - period
else:
# Chunk matches pad area
pad_area = _slice_at_axis(slice(None, left_pad), axis)
padded[pad_area] = right_chunk
if right_pad > 0:
# Pad with wrapped values on right side
# First slice chunk from left side of the non-pad area.
# Use min(period, right_pad) to ensure that chunk is not larger than
# pad area
left_slice = _slice_at_axis(
slice(left_pad, left_pad + min(period, right_pad)), axis
)
left_chunk = padded[left_slice]
if right_pad > period:
# Chunk is smaller than pad area
pad_area = _slice_at_axis(
slice(-right_pad, -right_pad + period), axis
)
new_right_pad = right_pad - period
else:
# Chunk matches pad area
pad_area = _slice_at_axis(slice(-right_pad, None), axis)
padded[pad_area] = left_chunk
return new_left_pad, new_right_pad
def _as_pairs(x, ndim, as_index=False):
"""Broadcasts `x` to an array with shape (`ndim`, 2).
A helper function for `pad` that prepares and validates arguments like
`pad_width` for iteration in pairs.
Args:
x(scalar or array-like, optional): The object to broadcast to the shape
(`ndim`, 2).
ndim(int): Number of pairs the broadcasted `x` will have.
as_index(bool, optional): If `x` is not None, try to round each
element of `x` to an integer (dtype `cupy.intp`) and ensure every
element is positive. (Default value = False)
Returns:
nested iterables, shape (`ndim`, 2): The broadcasted version of `x`.
"""
if x is None:
# Pass through None as a special case, otherwise cupy.round(x) fails
# with an AttributeError
return ((None, None),) * ndim
elif isinstance(x, numbers.Number):
if as_index:
x = round(x)
return ((x, x),) * ndim
x = numpy.array(x)
if as_index:
x = numpy.asarray(numpy.round(x), dtype=numpy.intp)
if x.ndim < 3:
# Optimization: Possibly use faster paths for cases where `x` has
# only 1 or 2 elements. `numpy.broadcast_to` could handle these as well
# but is currently slower
if x.size == 1:
# x was supplied as a single value
x = x.ravel() # Ensure x[0] works for x.ndim == 0, 1, 2
if as_index and x < 0:
raise ValueError("index can't contain negative values")
return ((x[0], x[0]),) * ndim
if x.size == 2 and x.shape != (2, 1):
# x was supplied with a single value for each side
# but except case when each dimension has a single value
# which should be broadcasted to a pair,
# e.g. [[1], [2]] -> [[1, 1], [2, 2]] not [[1, 2], [1, 2]]
x = x.ravel() # Ensure x[0], x[1] works
if as_index and (x[0] < 0 or x[1] < 0):
raise ValueError("index can't contain negative values")
return ((x[0], x[1]),) * ndim
if as_index and x.min() < 0:
raise ValueError("index can't contain negative values")
# Converting the array with `tolist` seems to improve performance
# when iterating and indexing the result (see usage in `pad`)
x_view = x.view()
x_view.shape = (ndim, 2)
return x_view.tolist()
# def _pad_dispatcher(array, pad_width, mode=None, **kwargs):
# return (array,)
###############################################################################
# Public functions
# @array_function_dispatch(_pad_dispatcher, module='numpy')
def pad(array, pad_width, mode='constant', **kwargs):
"""Pads an array with specified widths and values.
Args:
array(cupy.ndarray): The array to pad.
pad_width(sequence, array_like or int): Number of values padded to the
edges of each axis. ((before_1, after_1), ... (before_N, after_N))
unique pad widths for each axis. ((before, after),) yields same
before and after pad for each axis. (pad,) or int is a shortcut for
before = after = pad width for all axes. You cannot specify
``cupy.ndarray``.
mode(str or function, optional): One of the following string values or a
user supplied function
'constant' (default)
Pads with a constant value.
'edge'
Pads with the edge values of array.
'linear_ramp'
Pads with the linear ramp between end_value and the array edge
value.
'maximum'
Pads with the maximum value of all or part of the vector along
each axis.
'mean'
Pads with the mean value of all or part of the vector along each
axis.
'median'
Pads with the median value of all or part of the vector along
each axis. (Not Implemented)
'minimum'
Pads with the minimum value of all or part of the vector along
each axis.
'reflect'
Pads with the reflection of the vector mirrored on the first and
last values of the vector along each axis.
'symmetric'
Pads with the reflection of the vector mirrored along the edge
of the array.
'wrap'
Pads with the wrap of the vector along the axis. The first
values are used to pad the end and the end values are used to
pad the beginning.
'empty'
Pads with undefined values.
<function>
Padding function, see Notes.
stat_length(sequence or int, optional): Used in 'maximum', 'mean',
'median', and 'minimum'. Number of values at edge of each axis used
to calculate the statistic value.
((before_1, after_1), ... (before_N, after_N)) unique statistic
lengths for each axis. ((before, after),) yields same before and
after statistic lengths for each axis. (stat_length,) or int is a
shortcut for before = after = statistic length for all axes.
Default is ``None``, to use the entire axis. You cannot specify
``cupy.ndarray``.
constant_values(sequence or scalar, optional): Used in 'constant'. The
values to set the padded values for each axis.
((before_1, after_1), ... (before_N, after_N)) unique pad constants
for each axis.
((before, after),) yields same before and after constants for each
axis.
(constant,) or constant is a shortcut for before = after = constant
for all axes.
Default is 0. You cannot specify ``cupy.ndarray``.
end_values(sequence or scalar, optional): Used in 'linear_ramp'. The
values used for the ending value of the linear_ramp and that will
form the edge of the padded array.
((before_1, after_1), ... (before_N, after_N)) unique end values
for each axis.
((before, after),) yields same before and after end
values for each axis.
(constant,) or constant is a shortcut for before = after = constant
for all axes.
Default is 0. You cannot specify ``cupy.ndarray``.
reflect_type({'even', 'odd'}, optional): Used in 'reflect', and
'symmetric'. The 'even' style is the default with an unaltered
reflection around the edge value. For the 'odd' style, the extended
part of the array is created by subtracting the reflected values from
two times the edge value.
Returns:
cupy.ndarray: Padded array with shape extended by ``pad_width``.
.. note::
For an array with rank greater than 1, some of the padding of later
axes is calculated from padding of previous axes. This is easiest to
think about with a rank 2 array where the corners of the padded array
are calculated by using padded values from the first axis.
The padding function, if used, should modify a rank 1 array in-place.
It has the following signature:
``padding_func(vector, iaxis_pad_width, iaxis, kwargs)``
where
vector (cupy.ndarray)
A rank 1 array already padded with zeros. Padded values are
``vector[:iaxis_pad_width[0]]`` and
``vector[-iaxis_pad_width[1]:]``.
iaxis_pad_width (tuple)
A 2-tuple of ints, ``iaxis_pad_width[0]`` represents the number of
values padded at the beginning of vector where
``iaxis_pad_width[1]`` represents the number of values padded at
the end of vector.
iaxis (int)
The axis currently being calculated.
kwargs (dict)
Any keyword arguments the function requires.
Examples
--------
>>> a = cupy.array([1, 2, 3, 4, 5])
>>> cupy.pad(a, (2, 3), 'constant', constant_values=(4, 6))
array([4, 4, 1, ..., 6, 6, 6])
>>> cupy.pad(a, (2, 3), 'edge')
array([1, 1, 1, ..., 5, 5, 5])
>>> cupy.pad(a, (2, 3), 'linear_ramp', end_values=(5, -4))
array([ 5, 3, 1, 2, 3, 4, 5, 2, -1, -4])
>>> cupy.pad(a, (2,), 'maximum')
array([5, 5, 1, 2, 3, 4, 5, 5, 5])
>>> cupy.pad(a, (2,), 'mean')
array([3, 3, 1, 2, 3, 4, 5, 3, 3])
>>> a = cupy.array([[1, 2], [3, 4]])
>>> cupy.pad(a, ((3, 2), (2, 3)), 'minimum')
array([[1, 1, 1, 2, 1, 1, 1],
[1, 1, 1, 2, 1, 1, 1],
[1, 1, 1, 2, 1, 1, 1],
[1, 1, 1, 2, 1, 1, 1],
[3, 3, 3, 4, 3, 3, 3],
[1, 1, 1, 2, 1, 1, 1],
[1, 1, 1, 2, 1, 1, 1]])
>>> a = cupy.array([1, 2, 3, 4, 5])
>>> cupy.pad(a, (2, 3), 'reflect')
array([3, 2, 1, 2, 3, 4, 5, 4, 3, 2])
>>> cupy.pad(a, (2, 3), 'reflect', reflect_type='odd')
array([-1, 0, 1, 2, 3, 4, 5, 6, 7, 8])
>>> cupy.pad(a, (2, 3), 'symmetric')
array([2, 1, 1, 2, 3, 4, 5, 5, 4, 3])
>>> cupy.pad(a, (2, 3), 'symmetric', reflect_type='odd')
array([0, 1, 1, 2, 3, 4, 5, 5, 6, 7])
>>> cupy.pad(a, (2, 3), 'wrap')
array([4, 5, 1, 2, 3, 4, 5, 1, 2, 3])
>>> def pad_with(vector, pad_width, iaxis, kwargs):
... pad_value = kwargs.get('padder', 10)
... vector[:pad_width[0]] = pad_value
... vector[-pad_width[1]:] = pad_value
>>> a = cupy.arange(6)
>>> a = a.reshape((2, 3))
>>> cupy.pad(a, 2, pad_with)
array([[10, 10, 10, 10, 10, 10, 10],<|fim▁hole|> [10, 10, 10, 10, 10, 10, 10],
[10, 10, 0, 1, 2, 10, 10],
[10, 10, 3, 4, 5, 10, 10],
[10, 10, 10, 10, 10, 10, 10],
[10, 10, 10, 10, 10, 10, 10]])
>>> cupy.pad(a, 2, pad_with, padder=100)
array([[100, 100, 100, 100, 100, 100, 100],
[100, 100, 100, 100, 100, 100, 100],
[100, 100, 0, 1, 2, 100, 100],
[100, 100, 3, 4, 5, 100, 100],
[100, 100, 100, 100, 100, 100, 100],
[100, 100, 100, 100, 100, 100, 100]])
"""
if isinstance(pad_width, numbers.Integral):
pad_width = ((pad_width, pad_width),) * array.ndim
else:
pad_width = numpy.asarray(pad_width)
if not pad_width.dtype.kind == 'i':
raise TypeError('`pad_width` must be of integral type.')
# Broadcast to shape (array.ndim, 2)
pad_width = _as_pairs(pad_width, array.ndim, as_index=True)
if callable(mode):
# Old behavior: Use user-supplied function with numpy.apply_along_axis
function = mode
# Create a new zero padded array
padded, _ = _pad_simple(array, pad_width, fill_value=0)
# And apply along each axis
for axis in range(padded.ndim):
# Iterate using ndindex as in apply_along_axis, but assuming that
# function operates inplace on the padded array.
# view with the iteration axis at the end
view = cupy.moveaxis(padded, axis, -1)
# compute indices for the iteration axes, and append a trailing
# ellipsis to prevent 0d arrays decaying to scalars (gh-8642)
inds = numpy.ndindex(view.shape[:-1])
inds = (ind + (Ellipsis,) for ind in inds)
for ind in inds:
function(view[ind], pad_width[axis], axis, kwargs)
return padded
# Make sure that no unsupported keywords were passed for the current mode
allowed_kwargs = {
'empty': [],
'edge': [],
'wrap': [],
'constant': ['constant_values'],
'linear_ramp': ['end_values'],
'maximum': ['stat_length'],
'mean': ['stat_length'],
# 'median': ['stat_length'],
'minimum': ['stat_length'],
'reflect': ['reflect_type'],
'symmetric': ['reflect_type'],
}
try:
unsupported_kwargs = set(kwargs) - set(allowed_kwargs[mode])
except KeyError:
raise ValueError("mode '{}' is not supported".format(mode))
if unsupported_kwargs:
raise ValueError(
"unsupported keyword arguments for mode '{}': {}".format(
mode, unsupported_kwargs
)
)
if mode == 'constant':
values = kwargs.get('constant_values', 0)
if isinstance(values, numbers.Number) and values == 0 and (
array.ndim == 1 or array.size < 4e6):
# faster path for 1d arrays or small n-dimensional arrays
return _pad_simple(array, pad_width, 0)[0]
stat_functions = {
'maximum': cupy.max,
'minimum': cupy.min,
'mean': cupy.mean,
# 'median': cupy.median,
}
# Create array with final shape and original values
# (padded area is undefined)
padded, original_area_slice = _pad_simple(array, pad_width)
# And prepare iteration over all dimensions
# (zipping may be more readable than using enumerate)
axes = range(padded.ndim)
if mode == 'constant':
values = _as_pairs(values, padded.ndim)
for axis, width_pair, value_pair in zip(axes, pad_width, values):
roi = _view_roi(padded, original_area_slice, axis)
_set_pad_area(roi, axis, width_pair, value_pair)
elif mode == 'empty':
pass # Do nothing as _pad_simple already returned the correct result
elif array.size == 0:
# Only modes 'constant' and 'empty' can extend empty axes, all other
# modes depend on `array` not being empty
# -> ensure every empty axis is only 'padded with 0'
for axis, width_pair in zip(axes, pad_width):
if array.shape[axis] == 0 and any(width_pair):
raise ValueError(
"can't extend empty axis {} using modes other than "
"'constant' or 'empty'".format(axis)
)
# passed, don't need to do anything more as _pad_simple already
# returned the correct result
elif mode == 'edge':
for axis, width_pair in zip(axes, pad_width):
roi = _view_roi(padded, original_area_slice, axis)
edge_pair = _get_edges(roi, axis, width_pair)
_set_pad_area(roi, axis, width_pair, edge_pair)
elif mode == 'linear_ramp':
end_values = kwargs.get('end_values', 0)
end_values = _as_pairs(end_values, padded.ndim)
for axis, width_pair, value_pair in zip(axes, pad_width, end_values):
roi = _view_roi(padded, original_area_slice, axis)
ramp_pair = _get_linear_ramps(roi, axis, width_pair, value_pair)
_set_pad_area(roi, axis, width_pair, ramp_pair)
elif mode in stat_functions:
func = stat_functions[mode]
length = kwargs.get('stat_length', None)
length = _as_pairs(length, padded.ndim, as_index=True)
for axis, width_pair, length_pair in zip(axes, pad_width, length):
roi = _view_roi(padded, original_area_slice, axis)
stat_pair = _get_stats(roi, axis, width_pair, length_pair, func)
_set_pad_area(roi, axis, width_pair, stat_pair)
elif mode in {'reflect', 'symmetric'}:
method = kwargs.get('reflect_type', 'even')
include_edge = True if mode == 'symmetric' else False
for axis, (left_index, right_index) in zip(axes, pad_width):
if array.shape[axis] == 1 and (left_index > 0 or right_index > 0):
# Extending singleton dimension for 'reflect' is legacy
# behavior; it really should raise an error.
edge_pair = _get_edges(padded, axis, (left_index, right_index))
_set_pad_area(
padded, axis, (left_index, right_index), edge_pair
)
continue
roi = _view_roi(padded, original_area_slice, axis)
while left_index > 0 or right_index > 0:
# Iteratively pad until dimension is filled with reflected
# values. This is necessary if the pad area is larger than
# the length of the original values in the current dimension.
left_index, right_index = _set_reflect_both(
roi, axis, (left_index, right_index), method, include_edge
)
elif mode == 'wrap':
for axis, (left_index, right_index) in zip(axes, pad_width):
roi = _view_roi(padded, original_area_slice, axis)
while left_index > 0 or right_index > 0:
# Iteratively pad until dimension is filled with wrapped
# values. This is necessary if the pad area is larger than
# the length of the original values in the current dimension.
left_index, right_index = _set_wrap_both(
roi, axis, (left_index, right_index)
)
return padded<|fim▁end|> | |
<|file_name|>data_meeting_io.py<|end_file_name|><|fim▁begin|># Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tensorflow input/output utilities."""
import collections
import json
import math
import os
import numpy as np
import tensorflow.compat.v1 as tf
class Features(object):
"""Feature keys."""
# Waveform(s) of audio observed at receiver(s).
RECEIVER_AUDIO = 'receiver_audio'
# Images of each source at each microphone, including reverberation.
# Images are real valued with shape [sources, microphones, length].
SOURCE_IMAGES = 'source_images'
# Boolean diarization labels of shape (sources, length) which indicates
# whether a source is active or not. For nonexisting source, it is all zeros.
DIARIZATION_LABELS = 'diarization_labels'
# Speaker indices (global indices which are contiguous over all training data
# starting with 0) that are present in this meeting or meeting chunk with
# shape (sources,). If number of speakers present in the meeting is less
# than sources, for a non-existing speaker/source, the speaker index is
# set to -1. Note that, for a meeting sub-block, we still have all the
# speaker indices in the meeting even if not all the speakers are present
# in that meeting sub-block.
SPEAKER_INDEX = 'speaker_indices'
def get_inference_spec(num_receivers=1,
num_samples=None):
"""Returns a specification of features in tf.Examples in roomsim format."""
spec = {}
spec[Features.RECEIVER_AUDIO] = tf.FixedLenFeature(
[num_receivers, num_samples], tf.float32)
return spec
def get_roomsim_spec(num_sources,
num_receivers,
num_samples):
"""Returns a specification of features in tf.Examples in roomsim format.
Args:
num_sources: Expected number of sources.
num_receivers: Number of microphones in array.
num_samples: Expected length of sources in samples. 'None' for variable.
Returns:
Feature specifications suitable to pass to tf.parse_example.
"""
spec = {}
spec[Features.RECEIVER_AUDIO] = tf.FixedLenFeature(
[num_receivers, num_samples], tf.float32)
spec[Features.SOURCE_IMAGES] = tf.FixedLenFeature(
[num_sources, num_receivers, num_samples], tf.float32)
return spec
def placeholders_from_spec(feature_spec):
"""Returns placeholders compatible with a given feature spec."""
placeholders = {}
for key, feature in feature_spec.items():
placeholders[key] = tf.placeholder(dtype=feature.dtype,
shape=[1] + feature.shape,
name=key)
return placeholders
def _read_meeting_list(meeting_list, meeting_length_type):
"""Reads meeting list from json file to get necessary information.
Args:
meeting_list: A meeting list read from a json file.
meeting_length_type: One of 'maximum', 'minimum' or 'average'.
Since typically meeting lengths are not fixed, we can
set the training/eval length to the maximum, minimum or average meeting
length in the json file based on the value of this argument. We
eventually pad or clip individual meetings to attain the desired constant
meeting length in our data reading pipeline.
Returns:
num_meetings: Number of meetings.
max_num_spk_per_meeting: Maximum number of speakers in a meeting.
max_num_utt_per_spk: Maximum number of utterances per speaker.
max_dia_seg_per_utt: Maximum diarization segments per utterance.
max_utt_length: Maximum utterance length.
meeting_length: Meeting length that will be used.
speaker_ids: A list of speaker ids that appear in meetings.
"""
max_num_spk_per_meeting = 0
max_num_utt_per_meeting = 0
meeting_lengths = []
speaker_id_to_count = collections.defaultdict(int)
num_meetings = len(meeting_list)
total_spk = 0
total_utt = 0
max_utt_length = 0
max_num_utt_per_spk = 0
max_dia_seg_per_utt = 0
for one_meeting in meeting_list:
sources_start_end = one_meeting['utterance_start_end']
meeting_length = int(one_meeting['duration'])
num_utt_in_meeting = len(sources_start_end)
max_num_utt_per_meeting = max(max_num_utt_per_meeting, num_utt_in_meeting)
utt2spk = []
spk2wavs = collections.defaultdict(list)
spk_utt_idx = collections.defaultdict(int)
for start, end, spkid, wav_path in sources_start_end:
max_utt_length = max(max_utt_length, end - start)
utt2spk.append(spkid)
spk2wavs[spkid].append(wav_path)
speaker_id_to_count[spkid] += 1
spk_utt_idx[spkid] += 1
diarization_info = \
one_meeting['diarization_label'][spkid][spk_utt_idx[spkid] - 1]
num_seg_in_utt = len(diarization_info)
max_dia_seg_per_utt = max(max_dia_seg_per_utt, num_seg_in_utt)
speakers_in_meeting = list(set(utt2spk))
num_spk = len(speakers_in_meeting)
for spkid in speakers_in_meeting:
max_num_utt_per_spk = max(max_num_utt_per_spk,
len(set(spk2wavs[spkid])))
max_num_spk_per_meeting = max(max_num_spk_per_meeting, num_spk)
total_spk += num_spk
total_utt += num_utt_in_meeting
meeting_lengths.append(meeting_length)
if meeting_length_type == 'maximum':
meeting_length = int(math.ceil(np.max(meeting_lengths)))
elif meeting_length_type == 'minimum':
meeting_length = int(math.floor(np.min(meeting_lengths)))
elif meeting_length_type == 'average':
meeting_length = int(round(np.mean(meeting_lengths)))
elif isinstance(meeting_length_type, int):
meeting_length = meeting_length_type
else:
raise ValueError(f'Unknown meeting_length_type={meeting_length_type}')
speaker_ids = sorted(speaker_id_to_count.keys())
tf.logging.info('Read %s meetings from json file.', num_meetings)
tf.logging.info('Average number of speakers per meeting = %f.',
total_spk / num_meetings)
tf.logging.info('Average number of utterances per speaker = %f.',
total_utt / total_spk)
return (num_meetings, max_num_spk_per_meeting, max_num_utt_per_spk,
max_dia_seg_per_utt, max_utt_length,
meeting_length, speaker_ids)
def _pad_mics_tf(signal, new_mics):
"""Pads new mic channels to an input tensor and returns the updated tensor.
Args:
signal: A tf.tensor of shape (input_mics, samples)
new_mics: The number of new mic channels to be added (integer scalar tensor)
Returns:
padded_signal: A tf.tensor of shape (input_mics + new_mics, samples)
"""
# Take first new_mics channels and shift them by 1 sample.
new_inputs = tf.roll(signal[:new_mics, :], shift=1, axis=-1)
# Add noise 1e-3 times the RMS value in the signal.
noise_scale = 1e-3 * tf.sqrt(tf.reduce_mean(tf.square(new_inputs)))
new_inputs += noise_scale * tf.random.normal(tf.shape(new_inputs))
return tf.concat((signal, new_inputs), axis=0)
def json_to_dataset(json_file,
batch_size,
parallel_readers=tf.data.experimental.AUTOTUNE,
randomize_order=False,
num_examples=-1,
prefetch_buffer_size=tf.data.experimental.AUTOTUNE,
shuffle_buffer_size=5,
repeat=True,
num_mics=1,
sample_rate=16000,
use_relative_path=True,
meeting_length_type='maximum',
num_meeting_subdivisions=1,
sensor_noise_range=(0.0, 0.0)):
r"""Fetches features from a dictionary and source .wav files.
Args:
json_file: A json file containing meeting information.
batch_size: The number of examples to read.
parallel_readers: Number of dataset.map operations that should happen in
parallel.
randomize_order: Whether to randomly shuffle features.
num_examples: Limit number of examples to this value. Unlimited if -1.
prefetch_buffer_size: How many batches to prefecth.
shuffle_buffer_size: The size of the shuffle buffer.
repeat: If True, repeat the dataset.
num_mics: The expected number of mics in source wav files.
sample_rate: Sample rate of wav files read.
use_relative_path: If True, the path for .wav files is relative to the
json file, otherwise, the paths are absolute.
meeting_length_type: 'maximum', 'minimum' or 'average'. Can also specify
an integer value which is the length in samples, which will be used.
num_meeting_subdivisions: If > 1, chop the meeting in time into this
many chunks.
sensor_noise_range: Range of standard deviation for sensor noise. If
sensor_noise_range[1] <= 0.0, then no sensor noise is added. Otherwise,
white Gaussian sensor noise with uniformly random standard deviation
from the provided range is added as the first reference signal.
Returns:
A batch_size number of features constructed from wav files.
Raises:
ValueError if max_sources_override is less than assumed max number sources.
"""
tf.logging.info('Reading %s.', json_file)
with open(json_file, 'r') as f:
meeting_list = json.load(f)
(num_meetings, max_num_spk, max_num_utt_per_spk, max_dia_seg_per_utt,
max_utt_length, samples, speaker_id_list) = _read_meeting_list(
meeting_list, meeting_length_type)
tf.logging.info('Maximum number of speakers per meeting = %s', max_num_spk)
tf.logging.info('Maximum number of utterances per speaker = %s',
max_num_utt_per_spk)
tf.logging.info('Maximum diarization segments per utterance = %s',
max_dia_seg_per_utt)
tf.logging.info('Maximum utterance length in seconds = %s',
max_utt_length/sample_rate)
tf.logging.info('Used meeting length in seconds = %s', samples/sample_rate)
tf.logging.info('Number of speakers seen in all meetings = %s',
len(speaker_id_list))
tf.logging.info('Using %s parallel readers.', parallel_readers)
tf.logging.info('shuffle_buffer=%s, prefetch_buffer=%s, num_mics=%s, '
'randomize=%s.', shuffle_buffer_size, prefetch_buffer_size,
num_mics, randomize_order)
if use_relative_path:
base_path = os.path.dirname(json_file)
spkid2idx = {key: idx for idx, key in enumerate(speaker_id_list)}
def utterance_info_generator():
"""Yields utterance informations from each meeting.
Utterance info is in the form of a 6-tuple:
wav_path, diarization, spkidx, meeting_scale, start, gain.
"""
default_diarization = np.zeros((max_dia_seg_per_utt, 2), dtype=np.int32)
default_utt = ('0', default_diarization, -1, 0.0, 0, 0.0)
for one_meeting in meeting_list:
meeting_info = collections.defaultdict(list)
sources_start_end = one_meeting['utterance_start_end']
num_utt_in_meeting = len(sources_start_end)
spk_num_in_meeting = {}
new_spknum = 0
spkids_in_meeting = []
spk_utt_idx = collections.defaultdict(int)
meeting_scale = float(one_meeting['meeting_scale'])
for utt_idx in range(num_utt_in_meeting):
start, end, spkid, wav_path = sources_start_end[utt_idx]
spkidx = spkid2idx[spkid]
if start >= samples:
continue
if end >= samples:
end = samples
if spkidx in spk_num_in_meeting:
spknum = spk_num_in_meeting[spkidx]
else:
spknum = new_spknum
if spknum > max_num_spk:
continue
spkids_in_meeting.append(spkidx)
spk_num_in_meeting[spkidx] = spknum
new_spknum += 1
if use_relative_path:
wav_path = os.path.join(base_path, wav_path)
gain = one_meeting['utterance_gain'][utt_idx]
# Make diarization_labels array.
diarization = np.zeros((max_dia_seg_per_utt, 2), dtype=np.int32)
spk_utt_idx[spknum] += 1
diarization_info = \
one_meeting['diarization_label'][spkid][spk_utt_idx[spknum] - 1]
# Go over diarization segments in utterance.
for i, segment_st_end in enumerate(diarization_info):
segment_start, segment_end = segment_st_end
if segment_start >= samples:
continue
if segment_end > samples:
segment_end = samples
adjusted_start = segment_start - start
adjusted_end = segment_end - start
diarization[i, 0] = adjusted_start
diarization[i, 1] = adjusted_end
meeting_info[spknum].append((wav_path, diarization, spkidx,
meeting_scale, start, gain))
for spknum in range(max_num_spk):
if spknum in meeting_info:
for utt in range(max_num_utt_per_spk):
if utt < len(meeting_info[spknum]):
yield meeting_info[spknum][utt]
else:
yield default_utt
else:
for utt in range(max_num_utt_per_spk):
yield default_utt
utterance_info_list = list(utterance_info_generator())
# No need for the original meeting_list from now on.
del meeting_list
num_utterances = len(utterance_info_list)
tensor_shape = [(num_utterances, 1),
(num_utterances, max_dia_seg_per_utt, 2),
(num_utterances, 1),
(num_utterances, 1),
(num_utterances, 1),
(num_utterances, 1)]
tensor_type = [np.string_, np.int32, np.int32, np.float32,
np.int32, np.float32]
(wav_paths, diarizations, spkindices, meeting_scales, start_samples,
utterance_gains) = [np.reshape(
tensor, tensor_shape[i]).astype(tensor_type[i]) for i, tensor in
enumerate(list(zip(*utterance_info_list)))]
<|fim▁hole|> utterance_gains))
if repeat:
dataset = dataset.repeat()
if randomize_order:
# Randomize meeting order for each epoch through the dataset.
dataset = dataset.batch(max_num_spk * max_num_utt_per_spk)
dataset = dataset.shuffle(num_meetings)
dataset = dataset.flat_map(
lambda w, d, s, m, t, u: tf.data.Dataset.from_tensor_slices(
(w, d, s, m, t, u)))
# Read in wav files.
def decode_wav(wav):
audio_bytes = tf.read_file(wav)
waveform, _ = tf.audio.decode_wav(audio_bytes,
desired_samples=max_utt_length)
waveform = tf.transpose(waveform)
num_read_mics = tf.shape(waveform)[0]
waveform = tf.cond(num_read_mics >= num_mics,
lambda: waveform[:num_mics, :],
lambda: _pad_mics_tf(waveform, num_mics - num_read_mics))
waveform = tf.reshape(waveform, (num_mics, max_utt_length))
return waveform
def decode_wav_or_return_zeros(wav, gain=1.0):
return tf.cond(
tf.equal(wav, '0'),
lambda: tf.zeros((num_mics, max_utt_length), dtype=tf.float32),
lambda: gain * decode_wav(wav))
def utterance_reader(wav_path, diarization, spkidx, meet_scale, start, gain):
"""Reads wave file for utterance and scale it."""
utt_tensor = decode_wav_or_return_zeros(wav_path[0], gain=gain)
return utt_tensor, diarization, spkidx, meet_scale, start
# Sandwich heavy IO part between prefetch's.
dataset = dataset.prefetch(parallel_readers)
dataset = dataset.map(utterance_reader,
num_parallel_calls=parallel_readers)
dataset = dataset.prefetch(parallel_readers)
def pad_utterance(utt_tensor, diarization, spkidx, meeting_scale, start):
"""Pads utterance to meeting length.
Args:
utt_tensor: Utterance with shape (num_mics, max_utt_length).
diarization: Diarization with shape (max_dia_seg_per_utt, 2).
spkidx: Speaker index (global) for the utterance.
meeting_scale: Target meeting scale.
start: Start index of utterance in the meeting.
Returns:
utt_tensor_padded: Padded utt tensor (num_mics, samples + max_utt_length)
diarization_padded: Diarization updated using the start index.
spkidx: Speaker index passed unchanged.
meeting_scale: Target meeting scale passed unchanged.
"""
start = start[0]
end_paddings = samples - start
utt_tensor_padded = tf.pad(utt_tensor, ((0, 0), (start, end_paddings)))
diarization_padded = start + diarization
return utt_tensor_padded, diarization_padded, spkidx, meeting_scale
dataset = dataset.map(pad_utterance,
num_parallel_calls=parallel_readers)
dataset = dataset.batch(max_num_utt_per_spk)
def make_reference(utt_tensor, diarization, spkidx, meeting_scale):
"""Makes a reference from fixed length utterance tensors.
Args:
utt_tensor: Utterances with shape
(max_num_utt_per_spk, num_mics, samples + max_utt_len)
diarization: Diarization ranges with shape
(max_num_utt_per_spk, max_dia_seg_per_utt, 2).
spkidx: Speaker indices (repeated) with shape (max_num_utt_per_spk)
meeting_scale: Target meeting scale (repeated).
Returns:
reference: Meeting audio with shape (num_mics, samples)
diarization_labels: tf.bool with shape (samples)
spkidx: Scalar speaker index.
meeting_scale: Target meeting scale.
"""
reference_waveform = tf.reduce_sum(utt_tensor, axis=0)
reference_waveform = reference_waveform[:, :samples]
diarization = tf.reshape(diarization,
(max_num_utt_per_spk * max_dia_seg_per_utt, 2))
active_samples_list = [
tf.range(diarization[i, 0], diarization[i, 1]) for i in
range(max_num_utt_per_spk * max_dia_seg_per_utt)]
active_samples = tf.reshape(
tf.concat(active_samples_list, axis=0), (-1, 1))
dia_full_init = tf.zeros((samples + max_utt_length, 1), dtype=tf.int32)
dia_full = tf.tensor_scatter_add(
dia_full_init, active_samples, tf.ones(tf.shape(active_samples),
dtype=tf.int32))
dia_full = tf.cast(dia_full[:samples, 0], dtype=tf.bool)
spkidx = spkidx[0]
meeting_scale = meeting_scale[0]
return reference_waveform, dia_full, spkidx, meeting_scale
dataset = dataset.map(make_reference,
num_parallel_calls=parallel_readers)
dataset = dataset.batch(max_num_spk)
# If num_meeting_subdivisions > 1, split time-dependent meeting data in time
# into num_meeting_subdivisions equal chunks. Note that speaker ids and
# meeting_scale are repeated for each chunk.
if num_meeting_subdivisions > 1:
def chop_meeting_data(reference_waveforms, diarization_labels, speaker_ids,
meeting_scale, nsplit=num_meeting_subdivisions):
samples = tf.shape(reference_waveforms)[-1]
new_samples = nsplit * (samples // nsplit)
reference_waveforms = tf.stack(
tf.split(reference_waveforms[..., :new_samples],
nsplit, axis=-1), axis=0)
diarization_labels = tf.stack(
tf.split(diarization_labels[..., :new_samples],
nsplit, axis=-1), axis=0)
speaker_ids = tf.reshape(speaker_ids, (1, max_num_spk))
speaker_ids = tf.broadcast_to(speaker_ids, (nsplit, max_num_spk))
meeting_scale = meeting_scale[0] * tf.ones((nsplit, max_num_spk))
return tf.data.Dataset.from_tensor_slices((reference_waveforms,
diarization_labels,
speaker_ids,
meeting_scale))
dataset = dataset.flat_map(chop_meeting_data)
samples = (samples // num_meeting_subdivisions)
# Build mixture and sources waveforms.
def combine_mixture_and_sources(reference_waveforms, diarization_labels,
speaker_ids, meeting_scale):
# waveforms has shape (num_sources, num_mics, num_samples).
speaker_ids = tf.reshape(speaker_ids, (max_num_spk,))
meeting_scale = meeting_scale[0]
mixture_waveform = tf.reduce_sum(reference_waveforms, axis=0)
current_mixture_scale = tf.reduce_max(tf.abs(mixture_waveform))
# Note that when meetings are chopped, we cannot apply a meeting level
# scale. Instead, we apply the scale in the chunk level so that each
# chunk has a maximum scale equal to the meeting_scale. However, we should
# not apply any gain to an all noise chunk to avoid amplifying the noise,
# so we try not to scale those chunks by checking the current_mixture_scale
# value.
scale_refs = tf.cond(current_mixture_scale > 0.005,
lambda: meeting_scale / current_mixture_scale,
lambda: 1.0)
reference_waveforms *= scale_refs
num_sources = max_num_spk
if sensor_noise_range[1] > 0.0:
num_sources += 1
sensor_noise_gain = tf.random.uniform((), minval=sensor_noise_range[0],
maxval=sensor_noise_range[1])
sensor_noise = sensor_noise_gain * tf.random.normal(
(1, num_mics, samples))
reference_waveforms = tf.concat(
(sensor_noise, reference_waveforms), axis=0)
mixture_waveform = tf.reduce_sum(reference_waveforms, axis=0)
reference_waveforms.set_shape((num_sources, num_mics, samples))
mixture_waveform.set_shape((num_mics, samples))
diarization_labels.set_shape((max_num_spk, samples))
speaker_ids.set_shape((max_num_spk,))
return {'receiver_audio': mixture_waveform,
'source_images': reference_waveforms,
'diarization_labels': diarization_labels,
'speaker_indices': speaker_ids,
}
dataset = dataset.map(combine_mixture_and_sources,
num_parallel_calls=parallel_readers)
if randomize_order and num_meeting_subdivisions > 1:
# It would be good to shuffle examples to avoid having all examples
# coming from a single meeting when we split a meeting.
dataset = dataset.shuffle(shuffle_buffer_size * num_meeting_subdivisions)
dataset = dataset.prefetch(prefetch_buffer_size)
dataset = dataset.take(num_examples)
dataset = dataset.batch(batch_size, drop_remainder=True)
iterator = dataset.make_one_shot_iterator()
return iterator.get_next()
def input_fn(params):
"""An input function that uses params['feature_spec'].
Args:
params: A dictionary of experiment params.
Returns:
Features specified by params['feature_spec']. If 'inference' exists and is
True in params, then placeholders will be returned based on the spec in
params['inference_spec'], otherwise a dataset of examples read from
params['input_data'] will be returned.
"""
if params.get('inference', False):
feature_spec = params['inference_spec']
with tf.variable_scope('input_audio'):
return placeholders_from_spec(feature_spec)
else:
json_file = params.get('input_data', None)
io_params = params.get('io_params', {})
batch_size = params.get('batch_size', None)
randomize_order = params.get('randomize_order', False)
io_params['randomize_order'] = randomize_order
return json_to_dataset(json_file,
batch_size,
**io_params)<|fim▁end|> | dataset = tf.data.Dataset.from_tensor_slices(
(wav_paths, diarizations, spkindices, meeting_scales, start_samples, |
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>from distutils import log
from distutils.core import setup
from distutils.core import Command
from distutils.command.build_py import build_py
from distutils.command.install import install
import os
import zeroinstall
class adjust_scripts_for_home(Command):
"""setup.py install --home puts libraries in ~/lib/python, but Python doesn't look there.
If we're installing with --home, modify the scripts to add this to sys.path.
Don't do this otherwise; the system copy mustn't conflict with the copy in $HOME.
"""
description = "(used internally when using --home)"
user_options = [
('scripts-dir=', 'd', "directory to install scripts to"),
('lib-dir=', 'd', "directory libraries install to"),
]
def initialize_options (self):
self.scripts_dir = None
self.lib_dir = None
def finalize_options (self):
self.set_undefined_options('install',
('install_scripts', 'scripts_dir'),
('install_lib', 'lib_dir'),
)
def run(self):
for script in self.distribution.scripts:
outfile = os.path.join(self.scripts_dir, os.path.basename(script))
stream = open(outfile)
code = stream.read()
stream.close()
code = code.replace('## PATH ##', '''
import os, sys
sys.path.insert(0, %s)''' % repr(self.lib_dir))
stream = open(outfile, 'w')
stream.write(code)<|fim▁hole|>
class build_with_data(build_py):
"""Python < 2.4 doesn't support package_data_files, so add it manually."""
package_data_files = [
"zeroinstall/gtkui/desktop.ui",
]
def run(self):
old = log.set_threshold(log.ERROR) # Avoid "__init__.py not found" warning
# Copy .py files and build, as usual
build_py.run(self)
log.set_threshold(old)
# Copy data files
for data_file in self.package_data_files:
outfile = os.path.join(self.build_lib, data_file)
self.copy_file(data_file, outfile, preserve_mode=0)
executable = (os.stat(data_file).st_mode & 0o111) != 0
if executable:
os.chmod(outfile, os.stat(outfile).st_mode | 0o111)
class my_install(install):
def run(self):
install.run(self) # super.run()
if self.home:
self.run_command('adjust_scripts_for_home')
setup(name="zeroinstall-injector",
version=zeroinstall.version,
description="The Zero Install Injector (0launch)",
author="Thomas Leonard",
author_email="[email protected]",
url="http://0install.net",
scripts=['0install-python-fallback', '0alias'],
license='LGPL',
cmdclass={
'build_py': build_with_data,
'adjust_scripts_for_home': adjust_scripts_for_home,
'install': my_install,
},
long_description="""\
A running process is created by combining many different libraries (and other
components). In the Zero Install world, we have all versions of each library
available at all times. The problem then is how to choose which versions to
use.
The injector solves this problem by selecting components to meet a program's
requirements, according to a policy you give it. The injector finds out which
versions are available, and downloads and runs the ones you choose.""",
packages=["zeroinstall", "zeroinstall.support", "zeroinstall.injector", "zeroinstall.gtkui", "zeroinstall.cmd"])<|fim▁end|> | stream.close() |
<|file_name|>morestatus.py<|end_file_name|><|fim▁begin|># Copyright (c) Facebook, Inc. and its affiliates.
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2.
"""make status give a bit more context
This extension will wrap the status command to make it show more context about
the state of the repo
"""
import math
import os
from edenscm.mercurial import (
commands,
hbisect,
merge as mergemod,
node as nodeutil,
pycompat,
registrar,
scmutil,
)
from edenscm.mercurial.error import Abort
from edenscm.mercurial.extensions import wrapcommand
from edenscm.mercurial.i18n import _
UPDATEARGS = "updateargs"
configtable = {}
configitem = registrar.configitem(configtable)
configitem("morestatus", "show", default=False)
def prefixlines(raw):
"""Surround lineswith a comment char and a new line"""
lines = raw.splitlines()
commentedlines = ["# %s" % line for line in lines]
return "\n".join(commentedlines) + "\n"
def conflictsmsg(repo, ui):
mergestate = mergemod.mergestate.read(repo)
if not mergestate.active():
return
m = scmutil.match(repo[None])
unresolvedlist = [f for f in mergestate if m(f) and mergestate[f] == "u"]
if unresolvedlist:
mergeliststr = "\n".join(
[
" %s"
% os.path.relpath(os.path.join(repo.root, path), pycompat.getcwd())
for path in unresolvedlist
]
)
msg = (
_(
"""Unresolved merge conflicts:
%s
To mark files as resolved: hg resolve --mark FILE"""
)<|fim▁hole|> msg = _("No unresolved merge conflicts.")
ui.warn(prefixlines(msg))
def helpmessage(ui, continuecmd, abortcmd):
msg = _("To continue: %s\n" "To abort: %s") % (
continuecmd,
abortcmd,
)
ui.warn(prefixlines(msg))
def rebasemsg(repo, ui):
helpmessage(ui, "hg rebase --continue", "hg rebase --abort")
def histeditmsg(repo, ui):
helpmessage(ui, "hg histedit --continue", "hg histedit --abort")
def unshelvemsg(repo, ui):
helpmessage(ui, "hg unshelve --continue", "hg unshelve --abort")
def updatecleanmsg(dest=None):
warning = _("warning: this will discard uncommitted changes")
return "hg update --clean %s (%s)" % (dest or ".", warning)
def graftmsg(repo, ui):
# tweakdefaults requires `update` to have a rev hence the `.`
helpmessage(ui, "hg graft --continue", updatecleanmsg())
def updatemsg(repo, ui):
previousargs = repo.localvfs.tryreadutf8(UPDATEARGS)
if previousargs:
continuecmd = "hg " + previousargs
else:
continuecmd = "hg update " + repo.localvfs.readutf8("updatestate")[:12]
abortcmd = updatecleanmsg(repo._activebookmark)
helpmessage(ui, continuecmd, abortcmd)
def updatemergemsg(repo, ui):
helpmessage(ui, "hg update --continue", updatecleanmsg())
def mergemsg(repo, ui):
# tweakdefaults requires `update` to have a rev hence the `.`
helpmessage(ui, "hg commit", updatecleanmsg())
def bisectmsg(repo, ui):
msg = _(
"To mark the changeset good: hg bisect --good\n"
"To mark the changeset bad: hg bisect --bad\n"
"To abort: hg bisect --reset\n"
)
state = hbisect.load_state(repo)
bisectstatus = _(
"""Current bisect state: {} good commit(s), {} bad commit(s), {} skip commit(s)"""
).format(len(state["good"]), len(state["bad"]), len(state["skip"]))
ui.write_err(prefixlines(bisectstatus))
if len(state["good"]) > 0 and len(state["bad"]) > 0:
try:
nodes, commitsremaining, searching, badnode, goodnode = hbisect.bisect(
repo, state
)
searchesremaining = (
int(math.ceil(math.log(commitsremaining, 2)))
if commitsremaining > 0
else 0
)
bisectstatus = _(
"""
Current Tracker: bad commit current good commit
{}...{}...{}
Commits remaining: {}
Estimated bisects remaining: {}
"""
).format(
nodeutil.short(badnode),
nodeutil.short(nodes[0]),
nodeutil.short(goodnode),
commitsremaining,
searchesremaining,
)
ui.write_err(prefixlines(bisectstatus))
except Abort:
# ignore the output if bisect() fails
pass
ui.warn(prefixlines(msg))
def fileexistspredicate(filename):
return lambda repo: repo.localvfs.exists(filename)
def mergepredicate(repo):
return len(repo[None].parents()) > 1
STATES = (
# (state, predicate to detect states, helpful message function)
("histedit", fileexistspredicate("histedit-state"), histeditmsg),
("bisect", fileexistspredicate("bisect.state"), bisectmsg),
("graft", fileexistspredicate("graftstate"), graftmsg),
("unshelve", fileexistspredicate("unshelverebasestate"), unshelvemsg),
("rebase", fileexistspredicate("rebasestate"), rebasemsg),
# 'update --merge'. Unlike the 'update' state below, this can be
# continued.
("update", fileexistspredicate("updatemergestate"), updatemergemsg),
# The merge and update states are part of a list that will be iterated over.
# They need to be last because some of the other unfinished states may also
# be in a merge or update state (eg. rebase, histedit, graft, etc).
# We want those to have priority.
("merge", mergepredicate, mergemsg),
# Sometimes you end up in a merge state when update completes, because you
# ran `hg update --merge`. We should inform you that you can still use the
# full suite of resolve tools to deal with conflicts in this state.
("merge", fileexistspredicate("merge/state"), None),
# If there were no conflicts, you may still be in an interrupted update
# state. Ideally, we should expand this update state to include the merge
# updates mentioned above, so there's a way to "continue" and finish the
# update.
("update", fileexistspredicate("updatestate"), updatemsg),
)
def extsetup(ui):
if ui.configbool("morestatus", "show") and not ui.plain():
wrapcommand(commands.table, "status", statuscmd)
# Write down `hg update` args to show the continue command in
# interrupted update state.
ui.setconfig("hooks", "pre-update.morestatus", saveupdateargs)
ui.setconfig("hooks", "post-update.morestatus", cleanupdateargs)
def saveupdateargs(repo, args, **kwargs):
# args is a string containing all flags and arguments
with repo.wlock():
repo.localvfs.writeutf8(UPDATEARGS, args)
def cleanupdateargs(repo, **kwargs):
with repo.wlock():
repo.localvfs.tryunlink(UPDATEARGS)
def statuscmd(orig, ui, repo, *pats, **opts):
"""
Wrap the status command to barf out the state of the repository. States
being mid histediting, mid bisecting, grafting, merging, etc.
Output is to stderr to avoid breaking scripts.
"""
ret = orig(ui, repo, *pats, **opts)
statetuple = getrepostate(repo)
if statetuple:
state, statedetectionpredicate, helpfulmsg = statetuple
statemsg = _("The repository is in an unfinished *%s* state.") % state
ui.warn("\n" + prefixlines(statemsg))
conflictsmsg(repo, ui)
if helpfulmsg:
helpfulmsg(repo, ui)
# TODO(cdelahousse): check to see if current bookmark needs updating. See
# scmprompt.
return ret
def getrepostate(repo):
# experimental config: morestatus.skipstates
skip = set(repo.ui.configlist("morestatus", "skipstates", []))
for state, statedetectionpredicate, msgfn in STATES:
if state in skip:
continue
if statedetectionpredicate(repo):
return (state, statedetectionpredicate, msgfn)<|fim▁end|> | % mergeliststr
)
else: |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#![feature(ascii)]
#![feature(as_unsafe_cell)]
#![feature(borrow_state)]
#![feature(box_syntax)]
#![feature(cell_extras)]
#![feature(const_fn)]
#![feature(core_intrinsics)]
#![feature(custom_attribute)]
#![feature(custom_derive)]
#![feature(fnbox)]
#![feature(hashmap_hasher)]
#![feature(iter_arith)]
#![feature(mpsc_select)]
#![feature(nonzero)]
#![feature(on_unimplemented)]
#![feature(peekable_is_empty)]
#![feature(plugin)]
#![feature(slice_patterns)]
#![feature(str_utf16)]
#![feature(unicode)]
#![deny(unsafe_code)]
#![allow(non_snake_case)]
#![doc = "The script crate contains all matters DOM."]
#![plugin(plugins)]
extern crate angle;
extern crate app_units;
#[macro_use]
extern crate bitflags;
extern crate canvas;
extern crate canvas_traits;
extern crate caseless;
extern crate core;
extern crate cssparser;
extern crate devtools_traits;
extern crate encoding;
extern crate euclid;
extern crate fnv;
extern crate html5ever;
extern crate hyper;
extern crate image;
extern crate ipc_channel;
extern crate js;
extern crate libc;
#[macro_use]
extern crate log;
extern crate msg;
extern crate net_traits;
extern crate num;
extern crate offscreen_gl_context;
#[macro_use]
extern crate profile_traits;
extern crate rand;
extern crate ref_slice;
extern crate rustc_serialize;
extern crate rustc_unicode;
extern crate script_traits;
#[macro_use(state_pseudo_classes)] extern crate selectors;
extern crate serde;
extern crate smallvec;
#[macro_use(atom, ns)] extern crate string_cache;
#[macro_use]
extern crate style;
extern crate style_traits;
extern crate tendril;
extern crate time;
extern crate unicase;
extern crate url;
#[macro_use]
extern crate util;
extern crate uuid;
extern crate websocket;
extern crate xml5ever;
pub mod clipboard_provider;
pub mod cors;
mod devtools;
pub mod document_loader;
#[macro_use]
pub mod dom;
pub mod layout_interface;
mod mem;
mod network_listener;
pub mod page;
pub mod parse;
pub mod reporter;
#[allow(unsafe_code)]
pub mod script_task;
pub mod textinput;
mod timers;
mod unpremultiplytable;
mod webdriver_handlers;
use dom::bindings::codegen::RegisterBindings;
use js::jsapi::SetDOMProxyInformation;
use std::ptr;
#[cfg(target_os = "linux")]
#[allow(unsafe_code)]
fn perform_platform_specific_initialization() {
use std::mem;
// 4096 is default max on many linux systems
const MAX_FILE_LIMIT: libc::rlim_t = 4096;
// Bump up our number of file descriptors to save us from impending doom caused by an onslaught
// of iframes.
unsafe {
let mut rlim: libc::rlimit = mem::uninitialized();
match libc::getrlimit(libc::RLIMIT_NOFILE, &mut rlim) {
0 => {
if rlim.rlim_cur >= MAX_FILE_LIMIT {
// we have more than enough
return;
}
rlim.rlim_cur = match rlim.rlim_max {
libc::RLIM_INFINITY => MAX_FILE_LIMIT,
_ => {
if rlim.rlim_max < MAX_FILE_LIMIT {
rlim.rlim_max
} else {
MAX_FILE_LIMIT
}
}
};
match libc::setrlimit(libc::RLIMIT_NOFILE, &mut rlim) {
0 => (),
_ => warn!("Failed to set file count limit"),
};
},
_ => warn!("Failed to get file count limit"),
};
}
}
#[cfg(not(target_os = "linux"))]
fn perform_platform_specific_initialization() {}
#[allow(unsafe_code)]<|fim▁hole|> unsafe {
assert_eq!(js::jsapi::JS_Init(), true);
SetDOMProxyInformation(ptr::null(), 0, Some(script_task::shadow_check_callback));
}
// Create the global vtables used by the (generated) DOM
// bindings to implement JS proxies.
RegisterBindings::RegisterProxyHandlers();
perform_platform_specific_initialization();
}<|fim▁end|> | pub fn init() { |
<|file_name|>SkillTree.js<|end_file_name|><|fim▁begin|>angular.module('starter.controllers')
.controller('skillTreeControl', function ($scope, $storageServices, $ionicModal, $analytics, $window) {
$scope.showSkillMap = true;
$analytics.trackView('Skill Tree');
$scope.ratings = 0;
$scope.isInfinite = false;
$scope.learnedSkills = [];
$scope.modal = null;
$scope.openSkillsModal = function () {<|fim▁hole|> $analytics.trackView('All Skills');
$ionicModal.fromTemplateUrl('templates/skills/my_skills.html', {
id: 'skills',
scope: $scope,
animation: 'slide-in-up'
}).then(function (modal) {
modal.show();
$scope.modal = modal;
});
};
$scope.closeSkillsModal = function () {
$scope.modal.hide();
};
$scope.$on('$ionicView.enter', function () {
// clear badge
$storageServices.set('badgePoints', 0);
var flareChild = {};
angular.forEach(ALL_SKILLS, function (skills, index) {
var skillFlareChild = {};
angular.forEach(skills, function (skill) {
$storageServices.get(skill.text, function (result) {
var rating = parseInt(result);
if (rating) {
$scope.showSkillMap = true;
skillFlareChild[skill.text] = [rating];
$scope.ratings = $scope.ratings + rating;
if (rating >= 0) {
$scope.learnedSkills.push({
skill: skill.text,
rating: rating
});
}
var MAX_SKILL_POINTS = 250;
if ($scope.ratings > MAX_SKILL_POINTS) {
$scope.isInfinite = true;
}
}
});
if (skillFlareChild) {
flareChild[index] = skillFlareChild
}
});
$storageServices.set('points', $scope.ratings);
});
if ($scope.ratings > 0) {
RenderSkillTree($window, {
"Skill": flareChild
});
RenderBubble($storageServices, $window);
} else {
$scope.showSkillMap = false;
}
});
});<|fim▁end|> | |
<|file_name|>shift-management.component.ts<|end_file_name|><|fim▁begin|>import { Component, OnInit } from '@angular/core';
import {MdDialog, MdDialogRef} from '@angular/material';
import { Shift } from './shift';
import { ShiftService } from './shift.service';
import { CreateShiftDialog } from './create-shift-dialog.component';
import { EditShiftDialog } from './edit-shift-dialog.component';
@Component({
selector: 'shift-management',
templateUrl: './shift-management.component.html',
styleUrls: [ './shift-management.component.css' ]
})
export class ShiftManagementComponent implements OnInit {
// Clients local Shift[] array for displaying
shifts: Shift[] = [];
date = new Date().toISOString();
// Listener for all shifts
getShiftsConnection;
selectedOption: string;
constructor(private ShiftService: ShiftService, public dialog: MdDialog) { }
changeDate(newDate) {
this.date = newDate + "T12:00:00.000Z";
this.shifts = [];
this.ShiftService.getShiftByDay(this.date);
}
addShift(startDate, startTime, endDate, endTime,
startLoc, endLoc, route, driverID, busID): void {
let startD = startDate + "T" + startTime + ":00Z";
let endD = endDate + "T" + endTime + ":00Z";
this.ShiftService.addShift(startD, endD, startLoc, endLoc, route, driverID, busID);
this.ShiftService.getShiftByDay(this.date);
}
ngOnInit(): void {
this.ShiftService.connect();
this.ShiftService.getShiftByDay(this.date);
this.getShiftsConnection = this.ShiftService.getShifts()
.subscribe(array => {
for (let i in array) {
// create a Shift object in the front end for each shift JSON.
var shift = array[i];
let inShiftArr = false;
// If this shift is already in the array, just update the data.
this.shifts.forEach(function(s) {
if (s.id === shift.id) {
inShiftArr = true;
s.updateData(shift.start_time, shift.end_time,
shift.start_location, shift.end_location,
shift.route, shift.driver_id, shift.driver_name, shift.bus_id);
}
});
var currDateStr = new Date(shift.start_time).toLocaleDateString();
var shiftDateStr = new Date(this.date).toLocaleDateString();
// Insert if we didnt find it and its for the proper Date.
if (inShiftArr === false && currDateStr === shiftDateStr) {
this.shifts.push(new Shift(shift.id, shift.start_time,
shift.end_time, shift.start_location, shift.end_location,
shift.route, shift.driver_id, shift.driver_name, shift.bus_id));
}
}
})
}
ngOnDestroy() {
this.getShiftsConnection.unsubscribe();
}
openCreateNewDialog() {
let dialogRef = this.dialog.open(CreateShiftDialog);<|fim▁hole|> if(res) {
this.addShift(res[0],res[1],res[2],res[3],res[4],res[5],res[6],res[7],res[8]);
}
});
}
openEditDialog(shift) {
let dialogRef = this.dialog.open(EditShiftDialog, {
data: shift
});
dialogRef.afterClosed().subscribe(res => {
console.log(shift);
this.ShiftService.editShift(shift);
});
}
}<|fim▁end|> | dialogRef.afterClosed().subscribe(res => { |
<|file_name|>tpd_graph_xml2dot.py<|end_file_name|><|fim▁begin|>##############################################################################<|fim▁hole|># http://www.eclipse.org/legal/epl-v10.html
#
# Contributors:
# Balasko, Jeno
# Delic, Adam
#
##############################################################################
import xml.etree.ElementTree as ET
tree = ET.parse('project_hierarchy_graph.xml')
root = tree.getroot()
f = open('project_hierarchy_graph.dot', 'w')
f.write("digraph PROJECT_HIERARCHY_GRAPH {\n")
for project in root:
for reference in project:
f.write(project.attrib['name'])
f.write(" -> ")
f.write(reference.attrib['name'])
f.write(";\n")
f.write("}\n")
f.close()
# use this to generate graph:
# > dot -Tpng project_hierarchy_graph.dot -o project_hierarchy_graph.png<|fim▁end|> | # Copyright (c) 2000-2016 Ericsson Telecom AB
# All rights reserved. This program and the accompanying materials
# are made available under the terms of the Eclipse Public License v1.0
# which accompanies this distribution, and is available at |
<|file_name|>routerscheduler.py<|end_file_name|><|fim▁begin|># Copyright 2015 Cisco Systems.
# All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
from __future__ import print_function
from networking_cisco._i18n import _
from neutronclient.common import extension
from neutronclient.neutron import v2_0 as neutronV20
from neutronclient.neutron.v2_0 import router
from networking_cisco.neutronclient import hostingdevice
R_RESOURCE = 'router'
DEVICE_L3_ROUTERS = '/hosting-device-l3-routers'
class RoutersOnHostingDevice(extension.NeutronClientExtension):
resource = R_RESOURCE
resource_plural = '%ss' % resource
object_path = '/%s' % resource_plural
resource_path = '/%s/%%s' % resource_plural
versions = ['2.0']
allow_names = True
class AddRouterToHostingDevice(extension.ClientExtensionCreate,
RoutersOnHostingDevice):
"""Add a router to hosting device."""
shell_command = 'cisco-hosting-device-router-add'
def get_parser(self, prog_name):
parser = super(AddRouterToHostingDevice, self).get_parser(prog_name)
parser.add_argument(
'hosting_device',
help=_('Name or id of the hosting device.'))
parser.add_argument(
'router',
help=_('Name or id of router to add.'))
return parser
def execute(self, parsed_args):
self.log.debug('run(%s)' % parsed_args)
neutron_client = self.get_client()
neutron_client.format = parsed_args.request_format
_id_hd = neutronV20.find_resourceid_by_name_or_id(
neutron_client, 'hosting_device', parsed_args.hosting_device)
_id_r = neutronV20.find_resourceid_by_name_or_id(
neutron_client, 'router', parsed_args.router)
self.add_router_to_hosting_device(neutron_client, _id_hd,
{'router_id': _id_r})
print(_('Added router \'%(router)s\' to hosting device \'%(hd)s\'') % {
'router': parsed_args.router, 'hd': parsed_args.hosting_device},
file=self.app.stdout, end='')
return [], []
def add_router_to_hosting_device(self, client, hosting_device_id, body):
"""Adds a router to hosting device."""
res_path = hostingdevice.HostingDevice.resource_path
return client.post((res_path + DEVICE_L3_ROUTERS) %
hosting_device_id, body=body)
class RemoveRouterFromHostingDevice(extension.ClientExtensionCreate,
RoutersOnHostingDevice):<|fim▁hole|> def get_parser(self, prog_name):
parser = super(RemoveRouterFromHostingDevice, self).get_parser(
prog_name)
parser.add_argument(
'hosting_device',
help=_('Name or id of the hosting device.'))
parser.add_argument(
'router',
help=_('Name or id of router to remove.'))
return parser
def execute(self, parsed_args):
self.log.debug('run(%s)' % parsed_args)
neutron_client = self.get_client()
neutron_client.format = parsed_args.request_format
_id_hd = neutronV20.find_resourceid_by_name_or_id(
neutron_client, 'hosting_device', parsed_args.hosting_device)
_id_r = neutronV20.find_resourceid_by_name_or_id(
neutron_client, 'router', parsed_args.router)
self.remove_router_from_hosting_device(neutron_client, _id_hd, _id_r)
print(_('Removed router \'%(router)s\' from hosting device \'%(hd)s\'')
% {'router': parsed_args.router,
'hd': parsed_args.hosting_device}, file=self.app.stdout,
end='')
return [], []
def remove_router_from_hosting_device(self, client, hosting_device_id,
router_id):
"""Remove a router from hosting_device."""
res_path = hostingdevice.HostingDevice.resource_path
return client.delete((res_path + DEVICE_L3_ROUTERS + "/%s") % (
hosting_device_id, router_id))
class RoutersOnHostingDeviceList(extension.ClientExtensionList,
RoutersOnHostingDevice):
shell_command = 'cisco-hosting-device-list-hosted-routers'
_formatters = {'external_gateway_info':
router._format_external_gateway_info}
list_columns = ['id', 'name', 'external_gateway_info']
def get_parser(self, prog_name):
parser = super(RoutersOnHostingDeviceList, self).get_parser(prog_name)
parser.add_argument(
'hosting_device',
help=_('Name or id of the hosting device to query.'))
return parser
def call_server(self, neutron_client, search_opts, parsed_args):
_id = neutronV20.find_resourceid_by_name_or_id(
neutron_client, 'hosting_device', parsed_args.hosting_device)
data = self.list_routers_on_hosting_device(neutron_client, _id,
**search_opts)
return data
def list_routers_on_hosting_device(self, client, hosting_device_id,
**_params):
"""Fetches a list of routers hosted on a hosting device."""
res_path = hostingdevice.HostingDevice.resource_path
return client.get((res_path + DEVICE_L3_ROUTERS) %
hosting_device_id, params=_params)
HD_RESOURCE = 'hosting_device'
L3_ROUTER_DEVICES = '/l3-router-hosting-devices'
class HostingDeviceHostingRouter(extension.NeutronClientExtension):
resource = HD_RESOURCE
resource_plural = '%ss' % resource
object_path = '/%s' % resource_plural
resource_path = '/%s/%%s' % resource_plural
versions = ['2.0']
allow_names = True
class HostingDeviceHostingRouterList(extension.ClientExtensionList,
HostingDeviceHostingRouter):
shell_command = 'cisco-router-list-hosting-devices'
list_columns = ['id', 'name', 'status', 'admin_state_up', 'template_id']
def get_parser(self, prog_name):
parser = super(HostingDeviceHostingRouterList, self).get_parser(
prog_name)
parser.add_argument('router',
help=_('Name or id of router to query.'))
return parser
def call_server(self, neutron_client, search_opts, parsed_args):
_id = neutronV20.find_resourceid_by_name_or_id(
neutron_client, 'router', parsed_args.router)
data = self.list_hosting_devices_hosting_routers(neutron_client, _id,
**search_opts)
return data
def list_hosting_devices_hosting_routers(self, client, router_id,
**_params):
"""Fetches a list of hosting devices hosting a router."""
return client.get((client.router_path + L3_ROUTER_DEVICES) %
router_id, params=_params)<|fim▁end|> | """Remove a router from Hosting Device."""
shell_command = 'cisco-hosting-device-router-remove'
|
<|file_name|>gutters.py<|end_file_name|><|fim▁begin|>#------------------------------------------------------------------------------
# Copyright (c) 2010, Enthought Inc
# All rights reserved.
#
# This software is provided without warranty under the terms of the BSD license.
#
# Author: Enthought Inc
# Description: <Enthought pyface code editor>
#------------------------------------------------------------------------------
import math
from pyface.qt import QtCore, QtGui
class GutterWidget(QtGui.QWidget):
min_width = 5
background_color = QtGui.QColor(220, 220, 220)
def sizeHint(self):
return QtCore.QSize(self.min_width, 0)
def paintEvent(self, event):
""" Paint the line numbers.
"""
painter = QtGui.QPainter(self)
painter.fillRect(event.rect(), QtCore.Qt.lightGray)
def wheelEvent(self, event):
""" Delegate mouse wheel events to parent for seamless scrolling.
"""
self.parent().wheelEvent(event)
class StatusGutterWidget(GutterWidget):
""" Draws status markers
"""
def __init__(self, *args, **kw):
super(StatusGutterWidget, self).__init__(*args, **kw)
self.error_lines = []
self.warn_lines = []
self.info_lines = []
def sizeHint(self):
return QtCore.QSize(10, 0)
def paintEvent(self, event):
""" Paint the line numbers.
"""
painter = QtGui.QPainter(self)
painter.fillRect(event.rect(), self.background_color)
cw = self.parent()
pixels_per_block = self.height()/float(cw.blockCount())
for line in self.info_lines:
painter.fillRect(QtCore.QRect(0, line*pixels_per_block, self.width(), 3),
QtCore.Qt.green)
for line in self.warn_lines:
painter.fillRect(QtCore.QRect(0, line*pixels_per_block, self.width(), 3),
QtCore.Qt.yellow)
for line in self.error_lines:
painter.fillRect(QtCore.QRect(0, line*pixels_per_block, self.width(), 3),
QtCore.Qt.red)
class LineNumberWidget(GutterWidget):
""" Draw line numbers.
"""
min_char_width = 4
def fontMetrics(self):
# QWidget's fontMetrics method does not provide an up to date
# font metrics, just one corresponding to the initial font
return QtGui.QFontMetrics(self.font)
def set_font(self, font):
self.font = font
def digits_width(self):
nlines = max(1, self.parent().blockCount())
ndigits = max(self.min_char_width,
int(math.floor(math.log10(nlines) + 1)))
width = max(self.fontMetrics().width(u'0' * ndigits) + 3,
self.min_width)
return width
def sizeHint(self):
return QtCore.QSize(self.digits_width(), 0)
def paintEvent(self, event):
""" Paint the line numbers.
"""
painter = QtGui.QPainter(self)
painter.setFont(self.font)<|fim▁hole|> block = cw.firstVisibleBlock()
blocknum = block.blockNumber()
top = cw.blockBoundingGeometry(block).translated(
cw.contentOffset()).top()
bottom = top + int(cw.blockBoundingRect(block).height())
while block.isValid() and top <= event.rect().bottom():
if block.isVisible() and bottom >= event.rect().top():
painter.setPen(QtCore.Qt.black)
painter.drawText(0, top, self.width() - 2,
self.fontMetrics().height(),
QtCore.Qt.AlignRight, str(blocknum + 1))
block = block.next()
top = bottom
bottom = top + int(cw.blockBoundingRect(block).height())
blocknum += 1<|fim▁end|> | painter.fillRect(event.rect(), self.background_color)
cw = self.parent() |
<|file_name|>vulkan_ctx.rs<|end_file_name|><|fim▁begin|>extern crate vulkano;
use grid;
use scene;
use std::path::Path;
use std::sync::Arc;
use tracers::{RaycastingShader, Tracer};
pub struct VulkanCtx<'a> {
pub physical: vulkano::instance::PhysicalDevice<'a>,
pub device: Arc<vulkano::device::Device>,
pub queue: Arc<vulkano::device::Queue>,
pub scene_buffers: scene::ModelBuffers,
pub grid_builder: grid::GridBuilder,
pub tracer: Tracer<RaycastingShader>,
}
impl<'a> VulkanCtx<'a> {
pub fn new<P>(
instance: &'a Arc<vulkano::instance::Instance>,<|fim▁hole|> ) -> (VulkanCtx<'a>, Box<vulkano::sync::GpuFuture>)
where
for<'r> P: FnMut(&'r vulkano::instance::QueueFamily) -> bool,
{
let physical = vulkano::instance::PhysicalDevice::enumerate(instance)
.next()
.expect("no device available");
println!(
"Using device: {} (type: {:?})",
physical.name(),
physical.ty()
);
let queue = physical
.queue_families()
.find(predicate)
.expect("couldn't find a graphical queue family");
let device_ext = vulkano::device::DeviceExtensions {
khr_swapchain: true,
..vulkano::device::DeviceExtensions::none()
};
let (device, mut queues) = vulkano::device::Device::new(
physical,
physical.supported_features(),
&device_ext,
[(queue, 0.5)].iter().cloned(),
).expect("failed to create device");
let queue = queues.next().unwrap();
let (scene_buffers, load_future) =
scene::ModelBuffers::from_obj(model_path, device.clone(), queue.clone())
.expect("failed to load model");
let tracer = Tracer::new(device.clone(), &scene_buffers, RaycastingShader {}).unwrap();
let grid_builder = grid::GridBuilder::new(
queue.clone(),
scene_buffers.positions.clone(),
scene_buffers.indices.clone(),
scene_buffers.triangle_count,
);
(
VulkanCtx {
physical,
device,
queue,
scene_buffers,
grid_builder,
tracer,
},
load_future,
)
}
}<|fim▁end|> | model_path: &Path,
predicate: P, |
<|file_name|>next_back.rs<|end_file_name|><|fim▁begin|>#![feature(core, unboxed_closures)]
extern crate core;
#[cfg(test)]
mod tests {
use core::str::pattern::ReverseSearcher;
use core::str::pattern::SearchStep::{Match, Reject, Done};
use core::str::pattern::CharPredicateSearcher;
use core::str::pattern::Pattern;
use core::ops::FnMut;
use core::ops::FnOnce;
// #[derive(Copy, Clone, Eq, PartialEq, Debug)]
// pub enum SearchStep {
// /// Expresses that a match of the pattern has been found at
// /// `haystack[a..b]`.
// Match(usize, usize),
// /// Expresses that `haystack[a..b]` has been rejected as a possible match
// /// of the pattern.
// ///
// /// Note that there might be more than one `Reject` between two `Match`es,
// /// there is no requirement for them to be combined into one.
// Reject(usize, usize),
// /// Expresses that every byte of the haystack has been visted, ending
// /// the iteration.
// Done
// }
// #[derive(Clone)]
// pub struct CharPredicateSearcher<'a, F>(<CharEqPattern<F> as Pattern<'a>>::Searcher)
// where F: FnMut(char) -> bool;
// pub trait Pattern<'a>: Sized {
// /// Associated searcher for this pattern
// type Searcher: Searcher<'a>;
//
// /// Constructs the associated searcher from
// /// `self` and the `haystack` to search in.
// fn into_searcher(self, haystack: &'a str) -> Self::Searcher;
//
// /// Checks whether the pattern matches anywhere in the haystack
// #[inline]
// fn is_contained_in(self, haystack: &'a str) -> bool {
// self.into_searcher(haystack).next_match().is_some()
// }
//
// /// Checks whether the pattern matches at the front of the haystack
// #[inline]
// fn is_prefix_of(self, haystack: &'a str) -> bool {
// match self.into_searcher(haystack).next() {
// SearchStep::Match(0, _) => true,
// _ => false,
// }
// }
//
// /// Checks whether the pattern matches at the back of the haystack
// // #[inline]
// fn is_suffix_of(self, haystack: &'a str) -> bool
// where Self::Searcher: ReverseSearcher<'a>
// {
// match self.into_searcher(haystack).next_back() {
// SearchStep::Match(_, j) if haystack.len() == j => true,
// _ => false,
// }
// }
// }
// macro_rules! pattern_methods {
// ($t:ty, $pmap:expr, $smap:expr) => {
// type Searcher = $t;
//
// #[inline]
// fn into_searcher(self, haystack: &'a str) -> $t {
// ($smap)(($pmap)(self).into_searcher(haystack))
// }
//
// #[inline]
// fn is_contained_in(self, haystack: &'a str) -> bool {
// ($pmap)(self).is_contained_in(haystack)
// }
//
// #[inline]
// fn is_prefix_of(self, haystack: &'a str) -> bool {
// ($pmap)(self).is_prefix_of(haystack)
// }
//
// #[inline]
// fn is_suffix_of(self, haystack: &'a str) -> bool
// where $t: ReverseSearcher<'a>
// {
// ($pmap)(self).is_suffix_of(haystack)
// }
// }
// }
// impl<'a, F> Pattern<'a> for F where F: FnMut(char) -> bool {
// pattern_methods!(CharPredicateSearcher<'a, F>, CharEqPattern, CharPredicateSearcher);
// }
// macro_rules! searcher_methods {
// (forward) => {
// #[inline]
// fn haystack(&self) -> &'a str {
// self.0.haystack()
// }
// #[inline]
// fn next(&mut self) -> SearchStep {
// self.0.next()
// }
// #[inline]
// fn next_match(&mut self) -> Option<(usize, usize)> {
// self.0.next_match()
// }<|fim▁hole|> // }
// };
// (reverse) => {
// #[inline]
// fn next_back(&mut self) -> SearchStep {
// self.0.next_back()
// }
// #[inline]
// fn next_match_back(&mut self) -> Option<(usize, usize)> {
// self.0.next_match_back()
// }
// #[inline]
// fn next_reject_back(&mut self) -> Option<(usize, usize)> {
// self.0.next_reject_back()
// }
// }
// }
// unsafe impl<'a, F> ReverseSearcher<'a> for CharPredicateSearcher<'a, F>
// where F: FnMut(char) -> bool
// {
// searcher_methods!(reverse);
// }
struct F { c: char }
type Args = (char,);
impl FnOnce<Args> for F {
type Output = bool;
extern "rust-call" fn call_once(mut self, (c,): Args) -> Self::Output {
self.call_mut((c,))
}
}
impl FnMut<Args> for F {
extern "rust-call" fn call_mut(&mut self, (c,): Args) -> Self::Output {
self.c == c
}
}
#[test]
fn next_back_test1() {
let f: F = F { c: '而' };
let haystack: &str = "我能吞下玻璃而不傷身體。";
let mut searcher: CharPredicateSearcher<F> = f.into_searcher(haystack);
assert_eq!(searcher.next_back(), Reject(33, 36));
assert_eq!(searcher.next_back(), Reject(30, 33));
assert_eq!(searcher.next_back(), Reject(27, 30));
assert_eq!(searcher.next_back(), Reject(24, 27));
assert_eq!(searcher.next_back(), Reject(21, 24));
assert_eq!(searcher.next_back(), Match(18, 21));
assert_eq!(searcher.next_back(), Reject(15, 18));
assert_eq!(searcher.next_back(), Reject(12, 15));
assert_eq!(searcher.next_back(), Reject(9, 12));
assert_eq!(searcher.next_back(), Reject(6, 9));
assert_eq!(searcher.next_back(), Reject(3, 6));
assert_eq!(searcher.next_back(), Reject(0, 3));
assert_eq!(searcher.next_back(), Done);
}
}<|fim▁end|> | // #[inline]
// fn next_reject(&mut self) -> Option<(usize, usize)> {
// self.0.next_reject() |
<|file_name|>list.py<|end_file_name|><|fim▁begin|>#coding:utf-8
LIST_NUM = [(1,4),(5,1),(2,3),(6,9),(7,1)]
'''
用max函数获取到元素的最大值,然后用冒泡进行排序
'''
for j in range(len(LIST_NUM) -1):
for i in range(len(LIST_NUM) -1):<|fim▁hole|> LIST_NUM[i] = LIST_NUM[i + 1]
LIST_NUM[i + 1] = A
print LIST_NUM<|fim▁end|> | if max(LIST_NUM[i]) > max(LIST_NUM[i + 1]):
A = LIST_NUM[i] |
<|file_name|>Customer1794Repository.java<|end_file_name|><|fim▁begin|>package example.repo;
import example.model.Customer1794;
import java.util.List;<|fim▁hole|>import org.springframework.data.repository.CrudRepository;
public interface Customer1794Repository extends CrudRepository<Customer1794, Long> {
List<Customer1794> findByLastName(String lastName);
}<|fim▁end|> | |
<|file_name|>home_languages_persian.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
This file is part of XBMC Mega Pack Addon.
Copyright (C) 2014 Wolverine ([email protected])
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License along<|fim▁hole|> with this program. If not, see http://www.gnu.org/licenses/gpl-3.0.html
"""
class Languages_Persian():
'''Class that manages this specific menu context.'''
def open(self, plugin, menu):
menu.add_xplugins(plugin.get_xplugins(dictionaries=["Channels",
"Events", "Live", "Movies", "Sports", "TVShows"],
languages=["Persian"]))<|fim▁end|> | |
<|file_name|>event.test.ts<|end_file_name|><|fim▁begin|>/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import * as assert from 'assert';
import { Event, Emitter, EventBufferer, EventMultiplexer, AsyncEmitter, IWaitUntil } from 'vs/base/common/event';
import { IDisposable } from 'vs/base/common/lifecycle';
import * as Errors from 'vs/base/common/errors';
import { timeout } from 'vs/base/common/async';
namespace Samples {
export class EventCounter {
count = 0;
reset() {
this.count = 0;
}
onEvent() {
this.count += 1;
}
}
export class Document3 {
private _onDidChange = new Emitter<string>();
onDidChange: Event<string> = this._onDidChange.event;
setText(value: string) {
//...
this._onDidChange.fire(value);
}
}
}
suite('Event', function () {
const counter = new Samples.EventCounter();
setup(() => counter.reset());
test('Emitter plain', function () {
let doc = new Samples.Document3();
document.createElement('div').onclick = function () { };
let subscription = doc.onDidChange(counter.onEvent, counter);
doc.setText('far');
doc.setText('boo');
// unhook listener
subscription.dispose();
doc.setText('boo');
assert.equal(counter.count, 2);
});
test('Emitter, bucket', function () {
let bucket: IDisposable[] = [];
let doc = new Samples.Document3();
let subscription = doc.onDidChange(counter.onEvent, counter, bucket);
doc.setText('far');
doc.setText('boo');
// unhook listener
while (bucket.length) {
bucket.pop().dispose();
}
// noop
subscription.dispose();
doc.setText('boo');
assert.equal(counter.count, 2);
});
test('onFirstAdd|onLastRemove', () => {
let firstCount = 0;
let lastCount = 0;
let a = new Emitter({
onFirstListenerAdd() { firstCount += 1; },
onLastListenerRemove() { lastCount += 1; }
});
assert.equal(firstCount, 0);
assert.equal(lastCount, 0);
let subscription = a.event(function () { });
assert.equal(firstCount, 1);
assert.equal(lastCount, 0);
subscription.dispose();
assert.equal(firstCount, 1);
assert.equal(lastCount, 1);
subscription = a.event(function () { });
assert.equal(firstCount, 2);
assert.equal(lastCount, 1);
});
test('throwingListener', () => {
const origErrorHandler = Errors.errorHandler.getUnexpectedErrorHandler();
Errors.setUnexpectedErrorHandler(() => null);
try {
let a = new Emitter();
let hit = false;
a.event(function () {
throw 9;
});
a.event(function () {
hit = true;
});
a.fire(undefined);
assert.equal(hit, true);
} finally {
Errors.setUnexpectedErrorHandler(origErrorHandler);
}
});
test('reusing event function and context', function () {
let counter = 0;
function listener() {
counter += 1;
}
const context = {};
let emitter = new Emitter();
let reg1 = emitter.event(listener, context);
let reg2 = emitter.event(listener, context);
emitter.fire();
assert.equal(counter, 2);
reg1.dispose();
emitter.fire();
assert.equal(counter, 3);
reg2.dispose();
emitter.fire();
assert.equal(counter, 3);
});
test('Debounce Event', function (done: () => void) {
let doc = new Samples.Document3();
let onDocDidChange = Event.debounce(doc.onDidChange, (prev: string[], cur) => {
if (!prev) {
prev = [cur];
} else if (prev.indexOf(cur) < 0) {
prev.push(cur);
}
return prev;
}, 10);
let count = 0;
onDocDidChange(keys => {
count++;
assert.ok(keys, 'was not expecting keys.');
if (count === 1) {
doc.setText('4');
assert.deepEqual(keys, ['1', '2', '3']);
} else if (count === 2) {
assert.deepEqual(keys, ['4']);
done();
}
});
doc.setText('1');
doc.setText('2');
doc.setText('3');
});
test('Debounce Event - leading', async function () {
const emitter = new Emitter<void>();
let debounced = Event.debounce(emitter.event, (l, e) => e, 0, /*leading=*/true);
let calls = 0;
debounced(() => {
calls++;
});
// If the source event is fired once, the debounced (on the leading edge) event should be fired only once
emitter.fire();
await timeout(1);
assert.equal(calls, 1);
});
test('Debounce Event - leading', async function () {
const emitter = new Emitter<void>();
let debounced = Event.debounce(emitter.event, (l, e) => e, 0, /*leading=*/true);
let calls = 0;
debounced(() => {
calls++;
});
// If the source event is fired multiple times, the debounced (on the leading edge) event should be fired twice
emitter.fire();
emitter.fire();
emitter.fire();
await timeout(1);
assert.equal(calls, 2);
});
test('Emitter - In Order Delivery', function () {
const a = new Emitter<string>();
const listener2Events: string[] = [];
a.event(function listener1(event) {
if (event === 'e1') {
a.fire('e2');
// assert that all events are delivered at this point
assert.deepEqual(listener2Events, ['e1', 'e2']);
}
});
a.event(function listener2(event) {
listener2Events.push(event);<|fim▁hole|> a.fire('e1');
// assert that all events are delivered in order
assert.deepEqual(listener2Events, ['e1', 'e2']);
});
});
suite('AsyncEmitter', function () {
test('event has waitUntil-function', async function () {
interface E extends IWaitUntil {
foo: boolean;
bar: number;
}
let emitter = new AsyncEmitter<E>();
emitter.event(e => {
assert.equal(e.foo, true);
assert.equal(e.bar, 1);
assert.equal(typeof e.waitUntil, 'function');
});
emitter.fireAsync(thenables => ({
foo: true,
bar: 1,
waitUntil(t: Promise<void>) { thenables.push(t); }
}));
emitter.dispose();
});
test('sequential delivery', async function () {
interface E extends IWaitUntil {
foo: boolean;
}
let globalState = 0;
let emitter = new AsyncEmitter<E>();
emitter.event(e => {
e.waitUntil(timeout(10).then(_ => {
assert.equal(globalState, 0);
globalState += 1;
}));
});
emitter.event(e => {
e.waitUntil(timeout(1).then(_ => {
assert.equal(globalState, 1);
globalState += 1;
}));
});
await emitter.fireAsync(thenables => ({
foo: true,
waitUntil(t) {
thenables.push(t);
}
}));
assert.equal(globalState, 2);
});
test('sequential, in-order delivery', async function () {
interface E extends IWaitUntil {
foo: number;
}
let events: number[] = [];
let done = false;
let emitter = new AsyncEmitter<E>();
// e1
emitter.event(e => {
e.waitUntil(timeout(10).then(async _ => {
if (e.foo === 1) {
await emitter.fireAsync(thenables => ({
foo: 2,
waitUntil(t) {
thenables.push(t);
}
}));
assert.deepEqual(events, [1, 2]);
done = true;
}
}));
});
// e2
emitter.event(e => {
events.push(e.foo);
e.waitUntil(timeout(7));
});
await emitter.fireAsync(thenables => ({
foo: 1,
waitUntil(t) {
thenables.push(t);
}
}));
assert.ok(done);
});
});
suite('Event utils', () => {
suite('EventBufferer', () => {
test('should not buffer when not wrapped', () => {
const bufferer = new EventBufferer();
const counter = new Samples.EventCounter();
const emitter = new Emitter<void>();
const event = bufferer.wrapEvent(emitter.event);
const listener = event(counter.onEvent, counter);
assert.equal(counter.count, 0);
emitter.fire();
assert.equal(counter.count, 1);
emitter.fire();
assert.equal(counter.count, 2);
emitter.fire();
assert.equal(counter.count, 3);
listener.dispose();
});
test('should buffer when wrapped', () => {
const bufferer = new EventBufferer();
const counter = new Samples.EventCounter();
const emitter = new Emitter<void>();
const event = bufferer.wrapEvent(emitter.event);
const listener = event(counter.onEvent, counter);
assert.equal(counter.count, 0);
emitter.fire();
assert.equal(counter.count, 1);
bufferer.bufferEvents(() => {
emitter.fire();
assert.equal(counter.count, 1);
emitter.fire();
assert.equal(counter.count, 1);
});
assert.equal(counter.count, 3);
emitter.fire();
assert.equal(counter.count, 4);
listener.dispose();
});
test('once', () => {
const emitter = new Emitter<void>();
let counter1 = 0, counter2 = 0, counter3 = 0;
const listener1 = emitter.event(() => counter1++);
const listener2 = Event.once(emitter.event)(() => counter2++);
const listener3 = Event.once(emitter.event)(() => counter3++);
assert.equal(counter1, 0);
assert.equal(counter2, 0);
assert.equal(counter3, 0);
listener3.dispose();
emitter.fire();
assert.equal(counter1, 1);
assert.equal(counter2, 1);
assert.equal(counter3, 0);
emitter.fire();
assert.equal(counter1, 2);
assert.equal(counter2, 1);
assert.equal(counter3, 0);
listener1.dispose();
listener2.dispose();
});
});
suite('fromPromise', () => {
test('should emit when done', async () => {
let count = 0;
const event = Event.fromPromise(Promise.resolve(null));
event(() => count++);
assert.equal(count, 0);
await timeout(10);
assert.equal(count, 1);
});
test('should emit when done - setTimeout', async () => {
let count = 0;
const promise = timeout(5);
const event = Event.fromPromise(promise);
event(() => count++);
assert.equal(count, 0);
await promise;
assert.equal(count, 1);
});
});
suite('stopwatch', () => {
test('should emit', () => {
const emitter = new Emitter<void>();
const event = Event.stopwatch(emitter.event);
return new Promise((c, e) => {
event(duration => {
try {
assert(duration > 0);
} catch (err) {
e(err);
}
c(void 0);
});
setTimeout(() => emitter.fire(), 10);
});
});
});
suite('buffer', () => {
test('should buffer events', () => {
const result: number[] = [];
const emitter = new Emitter<number>();
const event = emitter.event;
const bufferedEvent = Event.buffer(event);
emitter.fire(1);
emitter.fire(2);
emitter.fire(3);
assert.deepEqual(result, []);
const listener = bufferedEvent(num => result.push(num));
assert.deepEqual(result, [1, 2, 3]);
emitter.fire(4);
assert.deepEqual(result, [1, 2, 3, 4]);
listener.dispose();
emitter.fire(5);
assert.deepEqual(result, [1, 2, 3, 4]);
});
test('should buffer events on next tick', async () => {
const result: number[] = [];
const emitter = new Emitter<number>();
const event = emitter.event;
const bufferedEvent = Event.buffer(event, true);
emitter.fire(1);
emitter.fire(2);
emitter.fire(3);
assert.deepEqual(result, []);
const listener = bufferedEvent(num => result.push(num));
assert.deepEqual(result, []);
await timeout(10);
emitter.fire(4);
assert.deepEqual(result, [1, 2, 3, 4]);
listener.dispose();
emitter.fire(5);
assert.deepEqual(result, [1, 2, 3, 4]);
});
test('should fire initial buffer events', () => {
const result: number[] = [];
const emitter = new Emitter<number>();
const event = emitter.event;
const bufferedEvent = Event.buffer(event, false, [-2, -1, 0]);
emitter.fire(1);
emitter.fire(2);
emitter.fire(3);
assert.deepEqual(result, []);
bufferedEvent(num => result.push(num));
assert.deepEqual(result, [-2, -1, 0, 1, 2, 3]);
});
});
suite('echo', () => {
test('should echo events', () => {
const result: number[] = [];
const emitter = new Emitter<number>();
const event = emitter.event;
const echoEvent = Event.echo(event);
emitter.fire(1);
emitter.fire(2);
emitter.fire(3);
assert.deepEqual(result, []);
const listener = echoEvent(num => result.push(num));
assert.deepEqual(result, [1, 2, 3]);
emitter.fire(4);
assert.deepEqual(result, [1, 2, 3, 4]);
listener.dispose();
emitter.fire(5);
assert.deepEqual(result, [1, 2, 3, 4]);
});
test('should echo events for every listener', () => {
const result1: number[] = [];
const result2: number[] = [];
const emitter = new Emitter<number>();
const event = emitter.event;
const echoEvent = Event.echo(event);
emitter.fire(1);
emitter.fire(2);
emitter.fire(3);
assert.deepEqual(result1, []);
assert.deepEqual(result2, []);
const listener1 = echoEvent(num => result1.push(num));
assert.deepEqual(result1, [1, 2, 3]);
assert.deepEqual(result2, []);
emitter.fire(4);
assert.deepEqual(result1, [1, 2, 3, 4]);
assert.deepEqual(result2, []);
const listener2 = echoEvent(num => result2.push(num));
assert.deepEqual(result1, [1, 2, 3, 4]);
assert.deepEqual(result2, [1, 2, 3, 4]);
emitter.fire(5);
assert.deepEqual(result1, [1, 2, 3, 4, 5]);
assert.deepEqual(result2, [1, 2, 3, 4, 5]);
listener1.dispose();
listener2.dispose();
emitter.fire(6);
assert.deepEqual(result1, [1, 2, 3, 4, 5]);
assert.deepEqual(result2, [1, 2, 3, 4, 5]);
});
});
suite('EventMultiplexer', () => {
test('works', () => {
const result: number[] = [];
const m = new EventMultiplexer<number>();
m.event(r => result.push(r));
const e1 = new Emitter<number>();
m.add(e1.event);
assert.deepEqual(result, []);
e1.fire(0);
assert.deepEqual(result, [0]);
});
test('multiplexer dispose works', () => {
const result: number[] = [];
const m = new EventMultiplexer<number>();
m.event(r => result.push(r));
const e1 = new Emitter<number>();
m.add(e1.event);
assert.deepEqual(result, []);
e1.fire(0);
assert.deepEqual(result, [0]);
m.dispose();
assert.deepEqual(result, [0]);
e1.fire(0);
assert.deepEqual(result, [0]);
});
test('event dispose works', () => {
const result: number[] = [];
const m = new EventMultiplexer<number>();
m.event(r => result.push(r));
const e1 = new Emitter<number>();
m.add(e1.event);
assert.deepEqual(result, []);
e1.fire(0);
assert.deepEqual(result, [0]);
e1.dispose();
assert.deepEqual(result, [0]);
e1.fire(0);
assert.deepEqual(result, [0]);
});
test('mutliplexer event dispose works', () => {
const result: number[] = [];
const m = new EventMultiplexer<number>();
m.event(r => result.push(r));
const e1 = new Emitter<number>();
const l1 = m.add(e1.event);
assert.deepEqual(result, []);
e1.fire(0);
assert.deepEqual(result, [0]);
l1.dispose();
assert.deepEqual(result, [0]);
e1.fire(0);
assert.deepEqual(result, [0]);
});
test('hot start works', () => {
const result: number[] = [];
const m = new EventMultiplexer<number>();
m.event(r => result.push(r));
const e1 = new Emitter<number>();
m.add(e1.event);
const e2 = new Emitter<number>();
m.add(e2.event);
const e3 = new Emitter<number>();
m.add(e3.event);
e1.fire(1);
e2.fire(2);
e3.fire(3);
assert.deepEqual(result, [1, 2, 3]);
});
test('cold start works', () => {
const result: number[] = [];
const m = new EventMultiplexer<number>();
const e1 = new Emitter<number>();
m.add(e1.event);
const e2 = new Emitter<number>();
m.add(e2.event);
const e3 = new Emitter<number>();
m.add(e3.event);
m.event(r => result.push(r));
e1.fire(1);
e2.fire(2);
e3.fire(3);
assert.deepEqual(result, [1, 2, 3]);
});
test('late add works', () => {
const result: number[] = [];
const m = new EventMultiplexer<number>();
const e1 = new Emitter<number>();
m.add(e1.event);
const e2 = new Emitter<number>();
m.add(e2.event);
m.event(r => result.push(r));
e1.fire(1);
e2.fire(2);
const e3 = new Emitter<number>();
m.add(e3.event);
e3.fire(3);
assert.deepEqual(result, [1, 2, 3]);
});
test('add dispose works', () => {
const result: number[] = [];
const m = new EventMultiplexer<number>();
const e1 = new Emitter<number>();
m.add(e1.event);
const e2 = new Emitter<number>();
m.add(e2.event);
m.event(r => result.push(r));
e1.fire(1);
e2.fire(2);
const e3 = new Emitter<number>();
const l3 = m.add(e3.event);
e3.fire(3);
assert.deepEqual(result, [1, 2, 3]);
l3.dispose();
e3.fire(4);
assert.deepEqual(result, [1, 2, 3]);
e2.fire(4);
e1.fire(5);
assert.deepEqual(result, [1, 2, 3, 4, 5]);
});
});
test('latch', () => {
const emitter = new Emitter<number>();
const event = Event.latch(emitter.event);
const result: number[] = [];
const listener = event(num => result.push(num));
assert.deepEqual(result, []);
emitter.fire(1);
assert.deepEqual(result, [1]);
emitter.fire(2);
assert.deepEqual(result, [1, 2]);
emitter.fire(2);
assert.deepEqual(result, [1, 2]);
emitter.fire(1);
assert.deepEqual(result, [1, 2, 1]);
emitter.fire(1);
assert.deepEqual(result, [1, 2, 1]);
emitter.fire(3);
assert.deepEqual(result, [1, 2, 1, 3]);
emitter.fire(3);
assert.deepEqual(result, [1, 2, 1, 3]);
emitter.fire(3);
assert.deepEqual(result, [1, 2, 1, 3]);
listener.dispose();
});
});<|fim▁end|> | }); |
<|file_name|>0002_auto_20191121_1640.py<|end_file_name|><|fim▁begin|># Generated by Django 2.2.7 on 2019-11-21 15:40
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('smmapdfs_edit', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='pdfsandwichemailconnector',
name='administrative_unit',
field=models.OneToOneField(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='aklub.AdministrativeUnit'),
),
migrations.AlterField(
model_name='pdfsandwichfontconnector',
name='administrative_unit',
field=models.OneToOneField(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='aklub.AdministrativeUnit'),
),
migrations.AlterField(
model_name='pdfsandwichtypeconnector',
name='administrative_unit',
field=models.OneToOneField(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='aklub.AdministrativeUnit'),
),<|fim▁hole|><|fim▁end|> | ] |
<|file_name|>Constants.ts<|end_file_name|><|fim▁begin|>/**
* Copyright (c) 2017 The xterm.js authors. All rights reserved.
* @license MIT
*/
export const INVERTED_DEFAULT_COLOR = 257;
export const DIM_OPACITY = 0.5;<|fim▁hole|>
export const CHAR_ATLAS_CELL_SPACING = 1;<|fim▁end|> | |
<|file_name|>extractor.py<|end_file_name|><|fim▁begin|>import json, codecs, re
from abc import ABCMeta, abstractmethod
from PIL import Image, ExifTags
from witica.util import throw, sstr, suni
#regular expressions regarding item ids
RE_METAFILE = r'^meta\/[^\n]+$'
RE_FIRST_ITEMID = r'(?!meta\/)[^\n?@.]+'
RE_ITEMFILE_EXTENSION = r'[^\n?@\/]+'
RE_ITEMID = r'^' + RE_FIRST_ITEMID + '$'
RE_ITEMFILE = r'^' + RE_FIRST_ITEMID + '\.' + RE_ITEMFILE_EXTENSION + '$'
RE_ITEM_SPLIT_ITEMID_EXTENSION = r'^(' + RE_FIRST_ITEMID + ')\.(' + RE_ITEMFILE_EXTENSION + ')$'
RE_ITEM_REFERENCE = r'^!(?:.\/)?' + RE_FIRST_ITEMID + '$'
#regular expressions to be used for md files parsing
RE_MD_SPLIT_JSON_MD = "^\s*({[\s\S]*?})?[\s]*([^}\s][\s\S]*)$" #splits md file into the json metadata and markdown sections as caputre groups
RE_MD_SPLIT_TITLE_BODY = "^(?:#(?!#)[\t ]*([\S][^\n\r]*)(?:\n|\r\n?|$))?([\s\S]*)$" #splits markdown section into title and body sections as capture groups
RE_MD_NOBRACKET = r'[^\]\[]*'
RE_MD_BRK = ( r'\[('
+ (RE_MD_NOBRACKET + r'(\[')*6
+ (RE_MD_NOBRACKET+ r'\])*')*6
+ RE_MD_NOBRACKET + r')\]' )
RE_MD_IMAGE_LINK = r'\!' + RE_MD_BRK + r'\s*\((?!\!)(<.*?>|([^")]+"[^"]*"|[^\)]*))\)'
#  or 
#RE_MD_ITEM_LINK = r'\!' + RE_MD_BRK + r'\s*\(\!(<.*?>|([^")]+"[^"]*"|[^\)]*))\)'
#  or 
RE_MD_ITEM_LINK = r'!({[\s\S]*?})?\((![\s\S]+?)\)'
# !{renderparametersjson}(!itemid)
registered_extractors = [];
def register(extension, extractor):
"""Register new metadata extractor for file extension"""
for (ext,extr) in registered_extractors:
if extension == ext:
raise ValueError("A metadata extractor for extension '" + extension + "' is already registered.")
#TODO: check type of extractor
registered_extractors.append((extension,extractor))
#print("registered: " + extension + " " + sstr(extractor))
def register_default_extractors():
register("item", JSONExtractor)
register("json", JSONExtractor)
register("md", MDExtractor)
register("txt", MDExtractor)
register("jpg", ImageExtractor)
register("jpeg", ImageExtractor)
def is_supported(extension):
for (ext,extractor) in registered_extractors:
if extension == ext:
return True
return False
def extract_metadata(filename):
extension = filename.rpartition(".")[2]
for (ext,extractor) in registered_extractors:
if extension == ext:
return extractor().extract_metadata(filename)
raise ValueError("Could not extract metadata, because a metadata extractor for extension '" + extension + "' is not registered.")
class MetadataExtractor(object):
__metaclass__ = ABCMeta
"""Abstract class representing a metadata extractor"""
supported_extensions = [];
def __init__(self):
pass
@abstractmethod
def extract_metadata(self, filename):
"""Extract metadata from filename and return metadata as json"""
pass
class JSONExtractor(MetadataExtractor):
__metaclass__ = ABCMeta
"""Extracts metadata from item or json file"""
supported_extensions = ["item", "json"];
def __init__(self):
pass
<|fim▁hole|> """Extract metadata from filename and return metadata as json"""
f = codecs.open(filename, mode="r", encoding="utf-8")
return json.loads(f.read())
class MDExtractor(MetadataExtractor):
__metaclass__ = ABCMeta
"""Extracts metadata from markdown file"""
supported_extensions = ["md", "txt"];
def __init__(self):
pass
def extract_metadata(self, filename):
try:
meta = {}
#split into json and markdown part
f = codecs.open(filename, mode="r", encoding="utf-8")
match = re.match(RE_MD_SPLIT_JSON_MD,f.read())
f.close()
if not match:
raise IOError("Extracting metadata from file '" + sstr(filename) + "' failed. Could not split JSON and markdown parts.")
jsonstr, mdstr = match.groups()
#get title string (first heading in markdown string) if available
title = re.match(RE_MD_SPLIT_TITLE_BODY,mdstr).group(1)
if not title == None:
meta["title"] = title
#update with explicit json
if not jsonstr == None:
meta.update(json.loads(jsonstr))
return meta
except Exception, e:
throw(IOError, "Extracting metadata from file '" + sstr(filename) + "' failed.", e)
class ImageExtractor(MetadataExtractor):
__metaclass__ = ABCMeta
"""Extracts metadata from markdown file"""
supported_extensions = ["jpg", "jpeg"];
def __init__(self):
pass
def extract_metadata(self, filename):
try:
meta = {"type": "image"}
img = Image.open(filename)
exif = {
ExifTags.TAGS[k]: v
for k, v in img._getexif().items()
if k in ExifTags.TAGS
}
if ("ImageDescription" in exif or "UserComment" in exif):
if "UserComment" in exif:
meta["title"] = exif["UserComment"]
if "ImageDescription" in exif:
meta["title"] = exif["ImageDescription"]
if ("Make" in exif or "Model" in exif):
meta["camera"] = (exif["Make"] if "Make" in exif else "") + " " + (exif["Model"] if "Model" in exif else "")
if ("Orientation" in exif):
meta["orientation"] = exif["Orientation"]
if ("Artist" in exif):
meta["author"] = exif["Artist"]
if ("DateTimeOriginal" in exif):
meta["created"] = exif["DateTimeOriginal"] #TODO: convert to unix time
if ("Flash" in exif):
meta["flash"] = exif["Flash"]
if ("GPSInfo" in exif):
lat, lon = self.get_lat_lon(exif["GPSInfo"])
if lat and lon:
meta["lat"] = lat
meta["lon"] = lon
return meta
except Exception, e:
throw(IOError, "Extracting metadata from file '" + sstr(filename) + "' failed.", e)
# This remaining functions in the ImageExtracotr class are originally by Eran Sandler (MIT-license), see https://gist.github.com/erans/983821
def _get_if_exist(self, data, key):
if key in data:
return data[key]
return None
def _convert_to_degress(self, value):
"""Helper function to convert the GPS coordinates stored in the EXIF to degress in float format"""
d0 = value[0][0]
d1 = value[0][1]
d = float(d0) / float(d1)
m0 = value[1][0]
m1 = value[1][1]
m = float(m0) / float(m1)
s0 = value[2][0]
s1 = value[2][1]
s = float(s0) / float(s1)
return d + (m / 60.0) + (s / 3600.0)
def get_lat_lon(self, gps_info_exif):
"""Returns the latitude and longitude, if available, from the provided exif_data (obtained through get_exif_data above)"""
lat = None
lon = None
gps_info = {
ExifTags.GPSTAGS[k]: v
for k, v in gps_info_exif.items()
if k in ExifTags.GPSTAGS
}
gps_latitude = self._get_if_exist(gps_info, "GPSLatitude")
gps_latitude_ref = self._get_if_exist(gps_info, 'GPSLatitudeRef')
gps_longitude = self._get_if_exist(gps_info, 'GPSLongitude')
gps_longitude_ref = self._get_if_exist(gps_info, 'GPSLongitudeRef')
if gps_latitude and gps_latitude_ref and gps_longitude and gps_longitude_ref:
lat = self._convert_to_degress(gps_latitude)
if gps_latitude_ref != "N":
lat = 0 - lat
lon = self._convert_to_degress(gps_longitude)
if gps_longitude_ref != "E":
lon = 0 - lon
return lat, lon<|fim▁end|> | def extract_metadata(self, filename): |
<|file_name|>InformationExtractorJdbcDatabaseMetaDataImpl.java<|end_file_name|><|fim▁begin|>/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.tool.schema.extract.internal;
import java.sql.DatabaseMetaData;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.StringTokenizer;
import org.hibernate.JDBCException;
import org.hibernate.boot.model.TruthValue;
import org.hibernate.boot.model.naming.DatabaseIdentifier;
import org.hibernate.boot.model.naming.Identifier;
import org.hibernate.boot.model.relational.QualifiedTableName;
import org.hibernate.cfg.AvailableSettings;
import org.hibernate.engine.config.spi.ConfigurationService;
import org.hibernate.engine.config.spi.StandardConverters;
import org.hibernate.engine.jdbc.env.spi.IdentifierHelper;
import org.hibernate.internal.CoreLogging;
import org.hibernate.internal.CoreMessageLogger;
import org.hibernate.internal.util.StringHelper;
import org.hibernate.internal.util.collections.ArrayHelper;
import org.hibernate.internal.util.compare.EqualsHelper;
import org.hibernate.internal.util.config.ConfigurationHelper;
import org.hibernate.tool.schema.extract.spi.ColumnInformation;
import org.hibernate.tool.schema.extract.spi.ExtractionContext;
import org.hibernate.tool.schema.extract.spi.ForeignKeyInformation;
import org.hibernate.tool.schema.extract.spi.IndexInformation;
import org.hibernate.tool.schema.extract.spi.InformationExtractor;
import org.hibernate.tool.schema.extract.spi.PrimaryKeyInformation;
import org.hibernate.tool.schema.extract.spi.SchemaExtractionException;
import org.hibernate.tool.schema.extract.spi.TableInformation;
import org.hibernate.tool.schema.spi.SchemaManagementException;
/**
* Implementation of the SchemaMetaDataExtractor contract which uses the standard JDBC {@link java.sql.DatabaseMetaData}
* API for extraction.
*
* @author Steve Ebersole
*/
public class InformationExtractorJdbcDatabaseMetaDataImpl implements InformationExtractor {
private static final CoreMessageLogger log = CoreLogging.messageLogger( InformationExtractorJdbcDatabaseMetaDataImpl.class );
private final String[] tableTypes;
private String[] extraPhysicalTableTypes;
private final ExtractionContext extractionContext;
public InformationExtractorJdbcDatabaseMetaDataImpl(ExtractionContext extractionContext) {
this.extractionContext = extractionContext;
ConfigurationService configService = extractionContext.getServiceRegistry()
.getService( ConfigurationService.class );
final String extraPhysycalTableTypesConfig = configService.getSetting(
AvailableSettings.EXTRA_PHYSICAL_TABLE_TYPES,
StandardConverters.STRING,
""
);
if ( !"".equals( extraPhysycalTableTypesConfig.trim() ) ) {
this.extraPhysicalTableTypes = StringHelper.splitTrimmingTokens(
",;",
extraPhysycalTableTypesConfig,
false
);
}
final String[] tempTableTypes;
if ( ConfigurationHelper.getBoolean( AvailableSettings.ENABLE_SYNONYMS, configService.getSettings(), false ) ) {
tempTableTypes = new String[] {"TABLE", "VIEW", "SYNONYM"};
}
else {
tempTableTypes = new String[] {"TABLE", "VIEW"};
}
if ( this.extraPhysicalTableTypes != null ) {
this.tableTypes = ArrayHelper.join( tempTableTypes, this.extraPhysicalTableTypes );
}
else {
this.tableTypes = tempTableTypes;
}
}
protected IdentifierHelper identifierHelper() {
return extractionContext.getJdbcEnvironment().getIdentifierHelper();
}
protected JDBCException convertSQLException(SQLException sqlException, String message) {
return extractionContext.getJdbcEnvironment().getSqlExceptionHelper().convert( sqlException, message );
}
protected String toMetaDataObjectName(Identifier identifier) {
return extractionContext.getJdbcEnvironment().getIdentifierHelper().toMetaDataObjectName( identifier );
}
@Override
public boolean catalogExists(Identifier catalog) {
try {
final ResultSet resultSet = extractionContext.getJdbcDatabaseMetaData().getCatalogs();
try {
while ( resultSet.next() ) {
final String existingCatalogName = resultSet.getString( "TABLE_CAT" );
// todo : hmm.. case sensitive or insensitive match...
// for now, match any case...
if ( catalog.getText().equalsIgnoreCase( existingCatalogName ) ) {
return true;
}
}
return false;
}
finally {
try {
resultSet.close();
}
catch (SQLException ignore) {
}
}
}
catch (SQLException sqlException) {
throw convertSQLException( sqlException, "Unable to query DatabaseMetaData for existing catalogs" );
}
}
@Override
public boolean schemaExists(Identifier catalog, Identifier schema) {
try {
final String catalogFilter = determineCatalogFilter( catalog );
final String schemaFilter = determineSchemaFilter( schema );
final ResultSet resultSet = extractionContext.getJdbcDatabaseMetaData().getSchemas(
catalogFilter,
schemaFilter
);
try {
if ( !resultSet.next() ) {
return false;
}
if ( resultSet.next() ) {
final String catalogName = catalog == null ? "" : catalog.getCanonicalName();
final String schemaName = schema == null ? "" : schema.getCanonicalName();
log.debugf(
"Multiple schemas found with that name [%s.%s]",
catalogName,
schemaName
);
}
return true;
}
finally {
try {
resultSet.close();
}
catch (SQLException ignore) {
}
}
}
catch (SQLException sqlException) {
throw convertSQLException( sqlException, "Unable to query DatabaseMetaData for existing schemas" );
}
}
private String determineCatalogFilter(Identifier catalog) throws SQLException {
Identifier identifierToUse = catalog;
if ( identifierToUse == null ) {
identifierToUse = extractionContext.getDefaultCatalog();
}
return extractionContext.getJdbcEnvironment().getIdentifierHelper().toMetaDataCatalogName( identifierToUse );
}
private String determineSchemaFilter(Identifier schema) throws SQLException {
Identifier identifierToUse = schema;<|fim▁hole|> }
return extractionContext.getJdbcEnvironment().getIdentifierHelper().toMetaDataSchemaName( identifierToUse );
}
public TableInformation extractTableInformation(
Identifier catalog,
Identifier schema,
Identifier name,
ResultSet resultSet) throws SQLException {
if ( catalog == null ) {
catalog = identifierHelper().toIdentifier( resultSet.getString( "TABLE_CAT" ) );
}
if ( schema == null ) {
schema = identifierHelper().toIdentifier( resultSet.getString( "TABLE_SCHEM" ) );
}
if ( name == null ) {
name = identifierHelper().toIdentifier( resultSet.getString( "TABLE_NAME" ) );
}
final QualifiedTableName tableName = new QualifiedTableName( catalog, schema, name );
return new TableInformationImpl(
this,
tableName,
isPhysicalTableType( resultSet.getString( "TABLE_TYPE" ) ),
resultSet.getString( "REMARKS" )
);
}
@Override
public TableInformation getTable(Identifier catalog, Identifier schema, Identifier tableName) {
if ( catalog != null || schema != null ) {
// The table defined an explicit namespace. In such cases we only ever want to look
// in the identified namespace
return locateTableInNamespace( catalog, schema, tableName );
}
else {
// The table did not define an explicit namespace:
// 1) look in current namespace
// 2) look in default namespace
// 3) look in all namespaces - multiple hits is considered an error
TableInformation tableInfo = null;
// 1) look in current namespace
if ( extractionContext.getJdbcEnvironment().getCurrentCatalog() != null
|| extractionContext.getJdbcEnvironment().getCurrentSchema() != null ) {
tableInfo = locateTableInNamespace(
extractionContext.getJdbcEnvironment().getCurrentCatalog(),
extractionContext.getJdbcEnvironment().getCurrentSchema(),
tableName
);
if ( tableInfo != null ) {
return tableInfo;
}
}
// 2) look in default namespace
if ( extractionContext.getDefaultCatalog() != null || extractionContext.getDefaultSchema() != null ) {
tableInfo = locateTableInNamespace(
extractionContext.getJdbcEnvironment().getCurrentCatalog(),
extractionContext.getJdbcEnvironment().getCurrentSchema(),
tableName
);
if ( tableInfo != null ) {
return tableInfo;
}
}
// 3) look in all namespaces
try {
final String tableNameFilter = toMetaDataObjectName( tableName );
final ResultSet resultSet = extractionContext.getJdbcDatabaseMetaData().getTables(
null,
null,
tableNameFilter,
tableTypes
);
try {
return processGetTableResults(
null,
null,
tableName,
resultSet
);
}
finally {
try {
resultSet.close();
}
catch (SQLException ignore) {
}
}
}
catch (SQLException sqlException) {
throw convertSQLException( sqlException, "Error accessing table metadata" );
}
}
}
private TableInformation locateTableInNamespace(
Identifier catalog,
Identifier schema,
Identifier tableName) {
Identifier catalogToUse = null;
Identifier schemaToUse = null;
final String catalogFilter;
final String schemaFilter;
if ( extractionContext.getJdbcEnvironment().getNameQualifierSupport().supportsCatalogs() ) {
if ( catalog == null ) {
catalogFilter = "";
}
else {
catalogToUse = catalog;
catalogFilter = toMetaDataObjectName( catalog );
}
}
else {
catalogFilter = null;
}
if ( extractionContext.getJdbcEnvironment().getNameQualifierSupport().supportsSchemas() ) {
if ( schema == null ) {
schemaFilter = "";
}
else {
schemaToUse = schema;
schemaFilter = toMetaDataObjectName( schema );
}
}
else {
schemaFilter = null;
}
final String tableNameFilter = toMetaDataObjectName( tableName );
try {
ResultSet resultSet = extractionContext.getJdbcDatabaseMetaData().getTables(
catalogFilter,
schemaFilter,
tableNameFilter,
tableTypes
);
return processGetTableResults(
catalogToUse,
schemaToUse,
tableName,
resultSet
);
}
catch (SQLException sqlException) {
throw convertSQLException( sqlException, "Error accessing table metadata" );
}
}
private TableInformation processGetTableResults(
Identifier catalog,
Identifier schema,
Identifier tableName,
ResultSet resultSet) throws SQLException {
try {
if ( !resultSet.next() ) {
log.tableNotFound( tableName.render() );
return null;
}
final TableInformation tableInformation = extractTableInformation(
catalog,
schema,
tableName,
resultSet
);
if ( resultSet.next() ) {
log.multipleTablesFound( tableName.render() );
final String catalogName = catalog == null ? "" : catalog.render();
final String schemaName = schema == null ? "" : schema.render();
throw new SchemaExtractionException(
String.format(
Locale.ENGLISH,
"More than one table found in namespace (%s, %s) : %s",
catalogName,
schemaName,
tableName.render()
)
);
}
return tableInformation;
}
finally {
try {
resultSet.close();
}
catch (SQLException ignore) {
}
}
}
protected boolean isPhysicalTableType(String tableType) {
if ( extraPhysicalTableTypes == null ) {
return "TABLE".equalsIgnoreCase( tableType );
}
else {
if ( "TABLE".equalsIgnoreCase( tableType ) ) {
return true;
}
for ( int i = 0; i < extraPhysicalTableTypes.length; i++ ) {
if ( extraPhysicalTableTypes[i].equalsIgnoreCase( tableType ) ) {
return true;
}
}
return false;
}
}
@Override
public ColumnInformation getColumn(TableInformation tableInformation, Identifier columnIdentifier) {
final Identifier catalog = tableInformation.getName().getCatalogName();
final Identifier schema = tableInformation.getName().getSchemaName();
final String catalogFilter;
final String schemaFilter;
if ( extractionContext.getJdbcEnvironment().getNameQualifierSupport().supportsCatalogs() ) {
if ( catalog == null ) {
catalogFilter = "";
}
else {
catalogFilter = toMetaDataObjectName( catalog );
}
}
else {
catalogFilter = null;
}
if ( extractionContext.getJdbcEnvironment().getNameQualifierSupport().supportsSchemas() ) {
if ( schema == null ) {
schemaFilter = "";
}
else {
schemaFilter = toMetaDataObjectName( schema );
}
}
else {
schemaFilter = null;
}
final String tableFilter = toMetaDataObjectName( tableInformation.getName().getTableName() );
final String columnFilter = toMetaDataObjectName( columnIdentifier );
try {
ResultSet resultSet = extractionContext.getJdbcDatabaseMetaData().getColumns(
catalogFilter,
schemaFilter,
tableFilter,
columnFilter
);
try {
if ( !resultSet.next() ) {
return null;
}
return new ColumnInformationImpl(
tableInformation,
identifierHelper().toIdentifier( resultSet.getString( "COLUMN_NAME" ) ),
resultSet.getInt( "DATA_TYPE" ),
new StringTokenizer( resultSet.getString( "TYPE_NAME" ), "() " ).nextToken(),
resultSet.getInt( "COLUMN_SIZE" ),
resultSet.getInt( "DECIMAL_DIGITS" ),
interpretTruthValue( resultSet.getString( "IS_NULLABLE" ) )
);
}
finally {
resultSet.close();
}
}
catch (SQLException e) {
throw convertSQLException( e, "Error accessing column metadata: " + tableInformation.getName().toString() );
}
}
private TruthValue interpretTruthValue(String nullable) {
if ( "yes".equalsIgnoreCase( nullable ) ) {
return TruthValue.TRUE;
}
else if ( "no".equalsIgnoreCase( nullable ) ) {
return TruthValue.FALSE;
}
return TruthValue.UNKNOWN;
}
@Override
public PrimaryKeyInformation getPrimaryKey(TableInformationImpl tableInformation) {
try {
ResultSet resultSet = extractionContext.getJdbcDatabaseMetaData().getPrimaryKeys(
identifierHelper().toMetaDataCatalogName( tableInformation.getName().getCatalogName() ),
identifierHelper().toMetaDataSchemaName( tableInformation.getName().getSchemaName() ),
identifierHelper().toMetaDataObjectName( tableInformation.getName().getTableName() )
);
final List<ColumnInformation> pkColumns = new ArrayList<ColumnInformation>();
boolean firstPass = true;
Identifier pkIdentifier = null;
try {
while ( resultSet.next() ) {
final String currentPkName = resultSet.getString( "PK_NAME" );
final Identifier currentPkIdentifier = currentPkName == null
? null
: identifierHelper().toIdentifier( currentPkName );
if ( firstPass ) {
pkIdentifier = currentPkIdentifier;
firstPass = false;
}
else {
if ( !EqualsHelper.equals( pkIdentifier, currentPkIdentifier ) ) {
throw new SchemaExtractionException(
String.format(
"Encountered primary keys differing name on table %s",
tableInformation.getName().toString()
)
);
}
}
final int columnPosition = resultSet.getInt( "KEY_SEQ" );
final String columnName = resultSet.getString( "COLUMN_NAME" );
final Identifier columnIdentifier = identifierHelper().toIdentifier( columnName );
final ColumnInformation column = tableInformation.getColumn( columnIdentifier );
pkColumns.add( columnPosition-1, column );
}
}
finally {
resultSet.close();
}
if ( firstPass ) {
// we did not find any results (no pk)
return null;
}
else {
// validate column list is properly contiguous
for ( int i = 0; i < pkColumns.size(); i++ ) {
if ( pkColumns.get( i ) == null ) {
throw new SchemaExtractionException( "Primary Key information was missing for KEY_SEQ = " + ( i+1) );
}
}
// build the return
return new PrimaryKeyInformationImpl( pkIdentifier, pkColumns );
}
}
catch (SQLException e) {
throw convertSQLException( e, "Error while reading primary key meta data for " + tableInformation.getName().toString() );
}
}
@Override
public Iterable<IndexInformation> getIndexes(TableInformation tableInformation) {
final Map<Identifier, IndexInformationImpl.Builder> builders = new HashMap<Identifier, IndexInformationImpl.Builder>();
try {
ResultSet resultSet = extractionContext.getJdbcDatabaseMetaData().getIndexInfo(
identifierHelper().toMetaDataCatalogName( tableInformation.getName().getCatalogName() ),
identifierHelper().toMetaDataSchemaName( tableInformation.getName().getSchemaName() ),
identifierHelper().toMetaDataObjectName( tableInformation.getName().getTableName() ),
false, // DO NOT limit to just unique
true // DO require up-to-date results
);
try {
while ( resultSet.next() ) {
if ( resultSet.getShort("TYPE") == DatabaseMetaData.tableIndexStatistic ) {
continue;
}
final Identifier indexIdentifier = identifierHelper().toIdentifier(
resultSet.getString(
"INDEX_NAME"
)
);
IndexInformationImpl.Builder builder = builders.get( indexIdentifier );
if ( builder == null ) {
builder = IndexInformationImpl.builder( indexIdentifier );
builders.put( indexIdentifier, builder );
}
final Identifier columnIdentifier = identifierHelper().toIdentifier( resultSet.getString( "COLUMN_NAME" ) );
final ColumnInformation columnInformation = tableInformation.getColumn( columnIdentifier );
if ( columnInformation == null ) {
// See HHH-10191: this may happen when dealing with Oracle/PostgreSQL function indexes
log.logCannotLocateIndexColumnInformation(
columnIdentifier.getText(),
indexIdentifier.getText()
);
}
builder.addColumn( columnInformation );
}
}
finally {
resultSet.close();
}
}
catch (SQLException e) {
throw convertSQLException(
e,
"Error accessing index information: " + tableInformation.getName().toString()
);
}
final List<IndexInformation> indexes = new ArrayList<IndexInformation>();
for ( IndexInformationImpl.Builder builder : builders.values() ) {
IndexInformationImpl index = builder.build();
indexes.add( index );
}
return indexes;
}
@Override
public Iterable<ForeignKeyInformation> getForeignKeys(TableInformation tableInformation) {
final Map<Identifier, ForeignKeyBuilder> fkBuilders = new HashMap<Identifier, ForeignKeyBuilder>();
try {
ResultSet resultSet = extractionContext.getJdbcDatabaseMetaData().getImportedKeys(
identifierHelper().toMetaDataCatalogName( tableInformation.getName().getCatalogName() ),
identifierHelper().toMetaDataSchemaName( tableInformation.getName().getSchemaName() ),
identifierHelper().toMetaDataObjectName( tableInformation.getName().getTableName() )
);
// todo : need to account for getCrossReference() as well...
try {
while ( resultSet.next() ) {
// IMPL NOTE : The builder is mainly used to collect the column reference mappings
final Identifier fkIdentifier = identifierHelper().toIdentifier(
resultSet.getString( "FK_NAME" )
);
ForeignKeyBuilder fkBuilder = fkBuilders.get( fkIdentifier );
if ( fkBuilder == null ) {
fkBuilder = generateForeignKeyBuilder( fkIdentifier );
fkBuilders.put( fkIdentifier, fkBuilder );
}
final QualifiedTableName incomingPkTableName = extractKeyTableName( resultSet, "PK" );
final TableInformation pkTableInformation = extractionContext.getDatabaseObjectAccess()
.locateTableInformation( incomingPkTableName );
if ( pkTableInformation == null ) {
// the assumption here is that we have not seen this table already based on fully-qualified name
// during previous step of building all table metadata so most likely this is
// not a match based solely on schema/catalog and that another row in this result set
// should match.
continue;
}
final Identifier fkColumnIdentifier = identifierHelper().toIdentifier(
resultSet.getString( "FKCOLUMN_NAME" )
);
final Identifier pkColumnIdentifier = identifierHelper().toIdentifier(
resultSet.getString( "PKCOLUMN_NAME" )
);
fkBuilder.addColumnMapping(
tableInformation.getColumn( fkColumnIdentifier ),
pkTableInformation.getColumn( pkColumnIdentifier )
);
}
}
finally {
resultSet.close();
}
}
catch (SQLException e) {
throw convertSQLException(
e,
"Error accessing column metadata: " + tableInformation.getName().toString()
);
}
final List<ForeignKeyInformation> fks = new ArrayList<ForeignKeyInformation>();
for ( ForeignKeyBuilder fkBuilder : fkBuilders.values() ) {
ForeignKeyInformation fk = fkBuilder.build();
fks.add( fk );
}
return fks;
}
private ForeignKeyBuilder generateForeignKeyBuilder(Identifier fkIdentifier) {
return new ForeignKeyBuilderImpl( fkIdentifier );
}
protected interface ForeignKeyBuilder {
ForeignKeyBuilder addColumnMapping(ColumnInformation referencing, ColumnInformation referenced);
ForeignKeyInformation build();
}
protected static class ForeignKeyBuilderImpl implements ForeignKeyBuilder {
private final Identifier fkIdentifier;
private final List<ForeignKeyInformation.ColumnReferenceMapping> columnMappingList = new ArrayList<ForeignKeyInformation.ColumnReferenceMapping>();
public ForeignKeyBuilderImpl(Identifier fkIdentifier) {
this.fkIdentifier = fkIdentifier;
}
@Override
public ForeignKeyBuilder addColumnMapping(ColumnInformation referencing, ColumnInformation referenced) {
columnMappingList.add( new ForeignKeyInformationImpl.ColumnReferenceMappingImpl( referencing, referenced ) );
return this;
}
@Override
public ForeignKeyInformationImpl build() {
if ( columnMappingList.isEmpty() ) {
throw new SchemaManagementException(
"Attempt to resolve foreign key metadata from JDBC metadata failed to find " +
"column mappings for foreign key named [" + fkIdentifier.getText() + "]"
);
}
return new ForeignKeyInformationImpl( fkIdentifier, columnMappingList );
}
}
private QualifiedTableName extractKeyTableName(ResultSet resultSet, String prefix) throws SQLException {
final String incomingCatalogName = resultSet.getString( prefix + "TABLE_CAT" );
final String incomingSchemaName = resultSet.getString( prefix + "TABLE_SCHEM" );
final String incomingTableName = resultSet.getString( prefix + "TABLE_NAME" );
final DatabaseIdentifier catalog = DatabaseIdentifier.toIdentifier( incomingCatalogName );
final DatabaseIdentifier schema = DatabaseIdentifier.toIdentifier( incomingSchemaName );
final DatabaseIdentifier table = DatabaseIdentifier.toIdentifier( incomingTableName );
return new QualifiedTableName( catalog, schema, table );
}
}<|fim▁end|> | if ( identifierToUse == null ) {
identifierToUse = extractionContext.getDefaultSchema(); |
<|file_name|>utils_test.py<|end_file_name|><|fim▁begin|>"""This contains the unit tests for treadmill.utils.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import io
import os
import shutil
import signal
import stat
import tempfile
import time
import unittest
# Disable W0402: string deprecated
# pylint: disable=W0402
import string
import mock
import six
if six.PY2 and os.name == 'posix':
import subprocess32 as subprocess # pylint: disable=import-error
else:
import subprocess # pylint: disable=wrong-import-order
from treadmill import exc
from treadmill import utils
from treadmill import yamlwrapper as yaml
class UtilsTest(unittest.TestCase):
"""This contains the treadmill.utils tests."""
def setUp(self):
self.root = tempfile.mkdtemp()
def tearDown(self):
if self.root and os.path.isdir(self.root):
shutil.rmtree(self.root)
@mock.patch('treadmill.subproc.get_aliases', mock.Mock(return_value={}))
def test_create_script(self):
"""this tests the create_script function.
the function creates executable scripts from templates that exist
in the template directory.
"""
script_file = os.path.join(self.root, 'script')
# Function we are testing
utils.create_script(
script_file,
's6.run',
user='testproid',
home='home',
shell='shell',
_alias={
's6_setuidgid': '/test/s6-setuidgid',
}
)
# Read the output from the mock filesystem
with io.open(script_file) as script:
data = script.read()
# Validate that data is what it should be
self.assertTrue(data.index(
'/test/s6-setuidgid testproid') > 0)
# Validate that the file is +x
self.assertEqual(utils.os.stat(script_file).st_mode, 33261)
@mock.patch('treadmill.subproc.get_aliases', mock.Mock(return_value={}))
def test_create_script_perms(self):
"""this tests the create_script function (permissions).
"""
script_file = os.path.join(self.root, 'script')
# Test non-default mode (+x)
mode = (stat.S_IRUSR |
stat.S_IRGRP |
stat.S_IROTH)
utils.create_script(
script_file,
's6.run',
mode=mode,<|fim▁hole|> _alias={
's6_setuidgid': '/test/s6-setuidgid',
}
)
self.assertEqual(utils.os.stat(script_file).st_mode, 33060)
def test_base_n(self):
"""Test to/from_base_n conversions."""
alphabet = (string.digits +
string.ascii_lowercase +
string.ascii_uppercase)
for base in [2, 10, 16, 36, 62]:
for num in [0, 10, 2313, 23134223879243284]:
n_num = utils.to_base_n(num, base=base, alphabet=alphabet)
_num = utils.from_base_n(n_num, base=base, alphabet=alphabet)
self.assertTrue(num == _num)
self.assertEqual(utils.to_base_n(15, base=16), 'f')
self.assertEqual(utils.to_base_n(10, base=2), '1010')
self.assertEqual(
utils.from_base_n('101', base=2),
int('101', base=2),
)
self.assertEqual(
utils.from_base_n('deadbeef', base=16),
int('deadbeef', base=16)
)
def test_ip2int(self):
"""Tests IP string to int representation conversion."""
self.assertEqual(0x40E9BB63, utils.ip2int('64.233.187.99'))
ip = utils.ip2int('192.168.100.1')
self.assertEqual('192.168.100.2', utils.int2ip(ip + 1))
self.assertEqual('192.168.100.0', utils.int2ip(ip - 1))
ip = utils.ip2int('192.168.100.255')
self.assertEqual('192.168.101.0', utils.int2ip(ip + 1))
ip = utils.ip2int('192.168.100.0')
self.assertEqual('192.168.99.255', utils.int2ip(ip - 1))
def test_to_obj(self):
"""Tests dict to namedtuple conversion."""
obj = utils.to_obj({'a': 1, 'b': 2, 'c': 3}, 'foo')
self.assertEqual(1, obj.a)
self.assertEqual(2, obj.b)
self.assertEqual(3, obj.c)
obj = utils.to_obj({'a': 1, 'b': [1, 2, 3], 'c': 3}, 'foo')
self.assertEqual(1, obj.a)
self.assertEqual([1, 2, 3], obj.b)
self.assertEqual(3, obj.c)
obj = utils.to_obj({'a': 1, 'b': {'d': 5}, 'c': 3}, 'foo')
self.assertEqual(1, obj.a)
self.assertEqual(5, obj.b.d)
self.assertEqual(3, obj.c)
obj = utils.to_obj({'a': [1, {'d': 5}, 3], 'b': 33}, 'foo')
self.assertEqual(1, obj.a[0])
self.assertEqual(5, obj.a[1].d)
self.assertEqual(3, obj.a[2])
self.assertEqual(33, obj.b)
def test_kilobytes(self):
"""Test memory/disk size string conversion."""
self.assertEqual(10, utils.kilobytes('10K'))
self.assertEqual(10, utils.kilobytes('10k'))
self.assertRaises(Exception, utils.kilobytes, '10')
self.assertEqual(10 * 1024, utils.kilobytes('10M'))
self.assertEqual(10 * 1024, utils.kilobytes('10m'))
self.assertEqual(10 * 1024 * 1024, utils.kilobytes('10G'))
self.assertEqual(10 * 1024 * 1024, utils.kilobytes('10g'))
def test_size_to_bytes(self):
"""Test conversion of units to bytes."""
self.assertEqual(10, utils.size_to_bytes(10))
self.assertEqual(-10, utils.size_to_bytes(-10))
self.assertEqual(10, utils.size_to_bytes('10'))
self.assertEqual(-10, utils.size_to_bytes('-10'))
self.assertEqual(10 * 1024, utils.size_to_bytes('10K'))
self.assertEqual(-10 * 1024, utils.size_to_bytes('-10K'))
self.assertEqual(-10 * 1024 * 1024, utils.size_to_bytes('-10M'))
def test_cpuunits(self):
"""Test conversion of cpu string to bmips."""
self.assertEqual(10, utils.cpu_units('10%'))
self.assertEqual(10, utils.cpu_units('10'))
def test_validate(self):
"""Tests dictionary validation."""
schema = [
('required', True, str),
('optional', False, str),
]
struct = {'required': 'foo'}
utils.validate(struct, schema)
self.assertNotIn('optional', struct)
struct = {'required': 'foo', 'optional': 'xxx'}
utils.validate(struct, schema)
struct = {'required': 'foo', 'optional': 1234}
self.assertRaises(Exception, utils.validate,
struct, schema)
schema = [
('required', True, list),
('optional', False, list),
]
struct = {'required': ['foo']}
utils.validate(struct, schema)
struct = {'required': 'foo'}
self.assertRaises(Exception, utils.validate,
struct, schema)
def test_to_seconds(self):
"""Tests time interval to seconds conversion."""
self.assertEqual(0, utils.to_seconds('0s'))
self.assertEqual(3, utils.to_seconds('3s'))
self.assertEqual(180, utils.to_seconds('3m'))
self.assertEqual(7200, utils.to_seconds('2h'))
self.assertEqual(259200, utils.to_seconds('3d'))
def test_find_in_path(self):
"""Tests finding program in system path."""
temp_dir = self.root
saved_path = os.environ['PATH']
# xxxx is not in path
self.assertEqual('xxxx', utils.find_in_path('xxxx'))
os.environ['PATH'] = os.environ['PATH'] + ':' + temp_dir
io.open(os.path.join(temp_dir, 'xxxx'), 'w').close()
# xxxx is in path, but not executable.
self.assertEqual('xxxx', utils.find_in_path('xxxx'))
os.chmod(os.path.join(temp_dir, 'xxxx'), int(utils.EXEC_MODE))
self.assertEqual(
os.path.join(temp_dir, 'xxxx'),
utils.find_in_path('xxxx')
)
os.environ['PATH'] = saved_path
def test_humanreadable(self):
"""Tests conversion of values into human readable format."""
self.assertEqual('1.0M', utils.bytes_to_readable(1024, 'K'))
self.assertEqual('1.0G', utils.bytes_to_readable(1024, 'M'))
self.assertEqual(
'2.5T',
utils.bytes_to_readable(1024 * 1024 * 2.5, 'M')
)
self.assertEqual('1.0K', utils.bytes_to_readable(1024, 'B'))
self.assertEqual('2,310', utils.cpu_to_readable(2310))
self.assertEqual('23.10', utils.cpu_to_cores_readable(2310))
def test_tail(self):
"""Tests utils.tail."""
filed, filepath = tempfile.mkstemp()
with os.fdopen(filed, 'w') as f:
for i in six.moves.range(0, 5):
f.write('%d\n' % i)
with io.open(filepath) as f:
lines = utils.tail_stream(f)
self.assertEqual(['0\n', '1\n', '2\n', '3\n', '4\n'], lines)
os.unlink(filepath)
filed, filepath = tempfile.mkstemp()
with os.fdopen(filed, 'w') as f:
for i in six.moves.range(0, 10000):
f.write('%d\n' % i)
with io.open(filepath) as f:
lines = utils.tail_stream(f, 5)
self.assertEqual(
['9995\n', '9996\n', '9997\n', '9998\n', '9999\n'],
lines
)
# Test utils.tail given the file name.
lines = utils.tail(filepath, 5)
self.assertEqual(
['9995\n', '9996\n', '9997\n', '9998\n', '9999\n'],
lines
)
os.unlink(filepath)
self.assertEqual([], utils.tail('/no/such/thing'))
@mock.patch('os.write', mock.Mock())
@mock.patch('os.close', mock.Mock())
def test_report_ready(self):
"""Tests reporting service readyness."""
cwd = os.getcwd()
tmpdir = self.root
os.chdir(tmpdir)
utils.report_ready()
self.assertFalse(os.write.called)
self.assertFalse(os.close.called)
with io.open('notification-fd', 'w') as f:
f.write('300')
utils.report_ready()
os.write.assert_called_with(300, mock.ANY)
os.close.assert_called_with(300)
os.write.reset()
os.close.reset()
with io.open('notification-fd', 'w') as f:
f.write('300\n')
utils.report_ready()
os.write.assert_called_with(300, mock.ANY)
os.close.assert_called_with(300)
os.chdir(cwd)
def test_signal_flag(self):
"""Tests signal flag."""
signalled = utils.make_signal_flag(signal.SIGHUP, signal.SIGTERM)
self.assertFalse(signalled)
os.kill(os.getpid(), signal.SIGHUP)
time.sleep(0.1)
self.assertTrue(signalled)
signalled.clear()
os.kill(os.getpid(), signal.SIGTERM)
time.sleep(0.1)
self.assertTrue(signalled)
def test_to_yaml(self):
"""Tests conversion of dict to yaml representation."""
obj = {
'xxx': u'abcd'
}
self.assertEqual(yaml.dump(obj), u'{xxx: abcd}\n')
@mock.patch('signal.signal', mock.Mock(spec_set=True))
@mock.patch('os.closerange', mock.Mock(spec_set=True))
@mock.patch('os.execvp', mock.Mock(spec_set=True))
def test_sane_execvp(self):
"""Tests sane execvp wrapper.
"""
# do not complain about accessing protected member _SIGNALS
# pylint: disable=W0212
utils.sane_execvp('/bin/sleep', ['sleep', '30'])
os.closerange.assert_called_with(3, subprocess.MAXFD)
signal.signal.assert_has_calls(
[
mock.call(i, signal.SIG_DFL)
for i in utils._SIGNALS
]
)
os.execvp.assert_called_with('/bin/sleep', ['sleep', '30'])
@mock.patch('treadmill.utils.sys_exit', mock.Mock())
def test_decorator_tm_exc(self):
"""Test the `exit_on_unhandled` decorator on `TreadmillError`."""
@utils.exit_on_unhandled
def test_fun():
"""raise exc.TreadmillError('test')."""
raise exc.TreadmillError('test')
test_fun()
utils.sys_exit.assert_called_with(-1)
@mock.patch('treadmill.utils.sys_exit', mock.Mock())
def test_decorator_py_exc(self):
"""Test the `exit_on_unhandled` decorator on Python `Exception`."""
@utils.exit_on_unhandled
def test_fun():
"""raise Exception('test')."""
raise Exception('test')
test_fun()
utils.sys_exit.assert_called_with(-1)
if __name__ == '__main__':
unittest.main()<|fim▁end|> | user='testproid',
home='home',
shell='shell', |
<|file_name|>log.py<|end_file_name|><|fim▁begin|>"""
Logger classes for the ZAP CLI.
.. moduleauthor:: Daniel Grunwell (grunny)
"""
import logging<|fim▁hole|>
class ColorStreamHandler(logging.StreamHandler):
"""
StreamHandler that prints color. This is used by the console client.
"""
level_map = {
logging.DEBUG: ('magenta', ['bold']),
logging.INFO: ('cyan', ['bold']),
logging.WARNING: ('yellow', ['bold']),
logging.ERROR: ('red', ['bold']),
logging.CRITICAL: ('red', ['bold', 'reverse'])
}
@property
def is_tty(self):
"""is the stream a tty?"""
isatty = getattr(self.stream, 'isatty', None)
return isatty and isatty()
def emit(self, record):
colorize = 'console' in globals() and getattr(console, 'colorize', False)
if self.is_tty and colorize:
color, attr = self.level_map[record.levelno]
prefix = colored(str('[' + record.levelname + ']').ljust(18), color, attrs=attr)
if hasattr(record, 'highlight') and record.highlight:
record.msg = colored(record.msg, color, attrs=['bold', 'reverse'])
else:
prefix = str('[' + record.levelname + ']').ljust(18)
record.msg = prefix + record.msg
logging.StreamHandler.emit(self, record)
class ConsoleLogger(logging.Logger):
"""Log to the console with some color decorations."""
def __init__(self, name):
super(ConsoleLogger, self).__init__(name)
self.setLevel(logging.DEBUG)
self.addHandler(ColorStreamHandler(sys.stdout))
# Save the current logger
default_logger_class = logging.getLoggerClass()
# Console logging for CLI
logging.setLoggerClass(ConsoleLogger)
console = logging.getLogger('zap')
# Restore the previous logger
logging.setLoggerClass(default_logger_class)<|fim▁end|> | import sys
from termcolor import colored |
<|file_name|>extendingClassFromAliasAndUsageInIndexer.js<|end_file_name|><|fim▁begin|>//// [tests/cases/compiler/extendingClassFromAliasAndUsageInIndexer.ts] ////
//// [extendingClassFromAliasAndUsageInIndexer_backbone.ts]
export class Model {
public someData: string;
}
//// [extendingClassFromAliasAndUsageInIndexer_moduleA.ts]
import Backbone = require("./extendingClassFromAliasAndUsageInIndexer_backbone");
export class VisualizationModel extends Backbone.Model {
// interesting stuff here<|fim▁hole|>export class VisualizationModel extends Backbone.Model {
// different interesting stuff here
}
//// [extendingClassFromAliasAndUsageInIndexer_main.ts]
import Backbone = require("./extendingClassFromAliasAndUsageInIndexer_backbone");
import moduleA = require("./extendingClassFromAliasAndUsageInIndexer_moduleA");
import moduleB = require("./extendingClassFromAliasAndUsageInIndexer_moduleB");
interface IHasVisualizationModel {
VisualizationModel: typeof Backbone.Model;
}
var moduleATyped: IHasVisualizationModel = moduleA;
var moduleMap: { [key: string]: IHasVisualizationModel } = {
"moduleA": moduleA,
"moduleB": moduleB
};
var moduleName: string;
var visModel = new moduleMap[moduleName].VisualizationModel();
//// [extendingClassFromAliasAndUsageInIndexer_backbone.js]
"use strict";
var Model = (function () {
function Model() {
}
return Model;
}());
exports.Model = Model;
//// [extendingClassFromAliasAndUsageInIndexer_moduleA.js]
"use strict";
var __extends = (this && this.__extends) || function (d, b) {
for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p];
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
var Backbone = require("./extendingClassFromAliasAndUsageInIndexer_backbone");
var VisualizationModel = (function (_super) {
__extends(VisualizationModel, _super);
function VisualizationModel() {
_super.apply(this, arguments);
}
return VisualizationModel;
}(Backbone.Model));
exports.VisualizationModel = VisualizationModel;
//// [extendingClassFromAliasAndUsageInIndexer_moduleB.js]
"use strict";
var __extends = (this && this.__extends) || function (d, b) {
for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p];
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
var Backbone = require("./extendingClassFromAliasAndUsageInIndexer_backbone");
var VisualizationModel = (function (_super) {
__extends(VisualizationModel, _super);
function VisualizationModel() {
_super.apply(this, arguments);
}
return VisualizationModel;
}(Backbone.Model));
exports.VisualizationModel = VisualizationModel;
//// [extendingClassFromAliasAndUsageInIndexer_main.js]
"use strict";
var moduleA = require("./extendingClassFromAliasAndUsageInIndexer_moduleA");
var moduleB = require("./extendingClassFromAliasAndUsageInIndexer_moduleB");
var moduleATyped = moduleA;
var moduleMap = {
"moduleA": moduleA,
"moduleB": moduleB
};
var moduleName;
var visModel = new moduleMap[moduleName].VisualizationModel();<|fim▁end|> | }
//// [extendingClassFromAliasAndUsageInIndexer_moduleB.ts]
import Backbone = require("./extendingClassFromAliasAndUsageInIndexer_backbone"); |
<|file_name|>eos_l3_interfaces.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright 2019 Red Hat
# GNU General Public License v3.0+
# (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#############################################
# WARNING #
#############################################
#
# This file is auto generated by the resource
# module builder playbook.
#
# Do not edit this file manually.
#
# Changes to this file will be over written
# by the resource module builder.
#
# Changes should be made in the model used to
# generate this file or in the resource module
# builder template.
#
#############################################
"""
The module file for eos_l3_interfaces
"""
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'network',
}
DOCUMENTATION = """
---
module: eos_l3_interfaces
version_added: 2.9
short_description: 'Manages L3 interface attributes of Arista EOS devices.'
description: 'This module provides declarative management of Layer 3 interfaces on Arista EOS devices.'
author: Nathaniel Case (@qalthos)
notes:
- 'Tested against vEOS v4.20.x'
- This module works with connection C(network_cli). See the
L(EOS Platform Options,../network/user_guide/platform_eos.html).
options:
config:
description: A dictionary of Layer 3 interface options
type: list
elements: dict
suboptions:
name:
description:<|fim▁hole|> - Full name of the interface, i.e. Ethernet1.
type: str
required: True
ipv4:
description:
- List of IPv4 addresses to be set for the Layer 3 interface mentioned in I(name) option.
type: list
elements: dict
suboptions:
address:
description:
- IPv4 address to be set in the format <ipv4 address>/<mask>
eg. 192.0.2.1/24, or C(dhcp) to query DHCP for an IP address.
type: str
secondary:
description:
- Whether or not this address is a secondary address.
type: bool
default: False
ipv6:
description:
- List of IPv6 addresses to be set for the Layer 3 interface mentioned in I(name) option.
type: list
elements: dict
suboptions:
address:
description:
- IPv6 address to be set in the address format is <ipv6 address>/<mask>
eg. 2001:db8:2201:1::1/64 or C(auto-config) to use SLAAC to chose an address.
type: str
state:
description:
- The state the configuration should be left in
type: str
choices:
- merged
- replaced
- overridden
- deleted
default: merged
"""
EXAMPLES = """
---
# Using deleted
# Before state:
# -------------
#
# veos#show running-config | section interface
# interface Ethernet1
# ip address 192.0.2.12/24
# !
# interface Ethernet2
# ipv6 address 2001:db8::1/64
# !
# interface Management1
# ip address dhcp
# ipv6 address auto-config
- name: Delete L3 attributes of given interfaces.
eos_l3_interfaces:
config:
- name: Ethernet1
- name: Ethernet2
state: deleted
# After state:
# ------------
#
# veos#show running-config | section interface
# interface Ethernet1
# !
# interface Ethernet2
# !
# interface Management1
# ip address dhcp
# ipv6 address auto-config
# Using merged
# Before state:
# -------------
#
# veos#show running-config | section interface
# interface Ethernet1
# ip address 192.0.2.12/24
# !
# interface Ethernet2
# ipv6 address 2001:db8::1/64
# !
# interface Management1
# ip address dhcp
# ipv6 address auto-config
- name: Merge provided configuration with device configuration.
eos_l3_interfaces:
config:
- name: Ethernet1
ipv4:
address: 198.51.100.14/24
- name: Ethernet2
ipv4:
address: 203.0.113.27/24
state: merged
# After state:
# ------------
#
# veos#show running-config | section interface
# interface Ethernet1
# ip address 198.51.100.14/24
# !
# interface Ethernet2
# ip address 203.0.113.27/24
# ipv6 address 2001:db8::1/64
# !
# interface Management1
# ip address dhcp
# ipv6 address auto-config
# Using overridden
# Before state:
# -------------
#
# veos#show running-config | section interface
# interface Ethernet1
# ip address 192.0.2.12/24
# !
# interface Ethernet2
# ipv6 address 2001:db8::1/64
# !
# interface Management1
# ip address dhcp
# ipv6 address auto-config
- name: Override device configuration of all L2 interfaces on device with provided configuration.
eos_l3_interfaces:
config:
- name: Ethernet1
ipv6:
address: 2001:db8:feed::1/96
- name: Management1
ipv4:
address: dhcp
ipv6: auto-config
state: overridden
# After state:
# ------------
#
# veos#show running-config | section interface
# interface Ethernet1
# ipv6 address 2001:db8:feed::1/96
# !
# interface Ethernet2
# !
# interface Management1
# ip address dhcp
# ipv6 address auto-config
# Using replaced
# Before state:
# -------------
#
# veos#show running-config | section interface
# interface Ethernet1
# ip address 192.0.2.12/24
# !
# interface Ethernet2
# ipv6 address 2001:db8::1/64
# !
# interface Management1
# ip address dhcp
# ipv6 address auto-config
- name: Replace device configuration of specified L2 interfaces with provided configuration.
eos_l3_interfaces:
config:
- name: Ethernet2
ipv4:
address: 203.0.113.27/24
state: replaced
# After state:
# ------------
#
# veos#show running-config | section interface
# interface Ethernet1
# ip address 192.0.2.12/24
# !
# interface Ethernet2
# ip address 203.0.113.27/24
# !
# interface Management1
# ip address dhcp
# ipv6 address auto-config
"""
RETURN = """
before:
description: The configuration prior to the model invocation.
returned: always
type: list
sample: >
The configuration returned will always be in the same format
of the parameters above.
after:
description: The resulting configuration model invocation.
returned: when changed
type: list
sample: >
The configuration returned will always be in the same format
of the parameters above.
commands:
description: The set of commands pushed to the remote device.
returned: always
type: list
sample: ['interface Ethernet2', 'ip address 192.0.2.12/24']
"""
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.network.eos.argspec.l3_interfaces.l3_interfaces import L3_interfacesArgs
from ansible.module_utils.network.eos.config.l3_interfaces.l3_interfaces import L3_interfaces
def main():
"""
Main entry point for module execution
:returns: the result form module invocation
"""
module = AnsibleModule(argument_spec=L3_interfacesArgs.argument_spec,
supports_check_mode=True)
result = L3_interfaces(module).execute_module()
module.exit_json(**result)
if __name__ == '__main__':
main()<|fim▁end|> | |
<|file_name|>urwid_tui.py<|end_file_name|><|fim▁begin|>#
# SPDX-FileCopyrightText: 2020 Dmytro Kolomoiets <[email protected]> and contributors.
#
# SPDX-License-Identifier: GPL-3.0-only
#
import pickle
import urwid
import zmq
from ..ifc import *
def tui_client(src_uri, dst_uri, log_uri):
set_current_thread_name()<|fim▁hole|>
ctx = zmq.Context.instance()
# NOTE:(cohesion): connect topology backward :: from dst to src
dst_sock = ctx.socket(zmq.PUSH)
dst_sock.connect(dst_uri)
src_sock = ctx.socket(zmq.SUB)
src_sock.connect(src_uri)
src_sock.setsockopt_string(zmq.SUBSCRIBE, '*') # custom broadcast
src_sock.setsockopt_string(zmq.SUBSCRIBE, 'ui')
try:
## BET: python-urwid
# [Urwid] key capture in different views
# http://lists.excess.org/pipermail/urwid/2011-July/001080.html
body = urwid.Text("<Press ',' to exit>")
view = urwid.Filler(body, 'top')
def unhandled_input(key):
if key in ('esc', ','):
raise urwid.ExitMainLoop()
dst_sock.send_multipart([b'key', pickle.dumps(key)])
_log.info("Press: " + key)
# FIXME: change text only in subscriber
body.set_text(repr(key))
loop = urwid.MainLoop(view, unhandled_input=unhandled_input)
loop.run()
except KeyboardInterrupt:
pass
finally:
# _log.info("Fin: " + threading.current_thread().name)
# dst_sock.send_multipart([b'*', pickle.dumps('quit')])
dst_sock.close()
src_sock.close()
# ERR:(exception): can call only once in main thread
# zmq.Context.instance().term()<|fim▁end|> | _log = getLogger() |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.