prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>average.py<|end_file_name|><|fim▁begin|>__author__ = "Guillaume"
__license__ = "MIT"
__copyright__ = "2015, ESRF"
import numpy
from freesas.model import SASModel
class Grid:
"""
This class is used to create a grid which include all the input models
"""
def __init__(self, inputfiles):
"""
:param inputfiles: list of pdb files needed for averaging
"""
self.inputs = inputfiles
self.size = []
self.nbknots = None
self.radius = None
self.coordknots = []
def __repr__(self):
return "Grid with %i knots"%self.nbknots
def spatial_extent(self):
"""
Calculate the maximal extent of input models
:return self.size: 6-list with x,y,z max and then x,y,z min
"""
atoms = []
models_fineness = []
for files in self.inputs:
m = SASModel(files)
if len(atoms)==0:
atoms = m.atoms
else:
atoms = numpy.append(atoms, m.atoms, axis=0)
models_fineness.append(m.fineness)
mean_fineness = sum(models_fineness) / len(models_fineness)
coordmin = atoms.min(axis=0) - mean_fineness
coordmax = atoms.max(axis=0) + mean_fineness
self.size = [coordmax[0],coordmax[1],coordmax[2],coordmin[0],coordmin[1],coordmin[2]]
return self.size
def calc_radius(self, nbknots=None):
"""
Calculate the radius of each point of a hexagonal close-packed grid,
knowing the total volume and the number of knots in this grid.
:param nbknots: number of knots wanted for the grid
:return radius: the radius of each knot of the grid
"""
if len(self.size)==0:
self.spatial_extent()
nbknots = nbknots if nbknots is not None else 5000
size = self.size
dx = size[0] - size[3]
dy = size[1] - size[4]
dz = size[2] - size[5]
volume = dx * dy * dz
density = numpy.pi / (3*2**0.5)
radius = ((3 /( 4 * numpy.pi)) * density * volume / nbknots)**(1.0/3)
self.radius = radius
return radius
def make_grid(self):
"""
Create a grid using the maximal size and the radius previously computed.
The geometry used is a face-centered cubic lattice (fcc).
:return knots: 2d-array, coordinates of each dot of the grid. Saved as self.coordknots.
"""
if len(self.size)==0:
self.spatial_extent()
if self.radius is None:
self.calc_radius()
radius = self.radius
a = numpy.sqrt(2.0)*radius
xmax = self.size[0]
xmin = self.size[3]
ymax = self.size[1]
ymin = self.size[4]
zmax = self.size[2]
zmin = self.size[5]
x = 0.0
y = 0.0
z = 0.0<|fim▁hole|>
xlist = []
ylist = []
zlist = []
knots = numpy.empty((1,4), dtype="float")
while (zmin + z) <= zmax:
zlist.append(z)
z += a
while (ymin + y) <= ymax:
ylist.append(y)
y += a
while (xmin + x) <= xmax:
xlist.append(x)
x += a
for i in range(len(zlist)):
z = zlist[i]
if i % 2 ==0:
for j in range(len(xlist)):
x = xlist[j]
if j % 2 == 0:
for y in ylist[0:-1:2]:
knots = numpy.append(knots, [[xmin+x, ymin+y, zmin+z, 0.0]], axis=0)
else:
for y in ylist[1:-1:2]:
knots = numpy.append(knots, [[xmin+x, ymin+y, zmin+z, 0.0]], axis=0)
else:
for j in range(len(xlist)):
x = xlist[j]
if j % 2 == 0:
for y in ylist[1:-1:2]:
knots = numpy.append(knots, [[xmin+x, ymin+y, zmin+z, 0.0]], axis=0)
else:
for y in ylist[0:-1:2]:
knots = numpy.append(knots, [[xmin+x, ymin+y, zmin+z, 0.0]], axis=0)
knots = numpy.delete(knots, 0, axis=0)
self.nbknots = knots.shape[0]
self.coordknots = knots
return knots
class AverModels():
"""
Provides tools to create an averaged models using several aligned dummy atom models
"""
def __init__(self, inputfiles, grid):
"""
:param inputfiles: list of pdb files of aligned models
:param grid: 2d-array coordinates of each point of a grid, fourth column full of zeros
"""
self.inputfiles = inputfiles
self.models = []
self.header = []
self.radius = None
self.atoms = []
self.grid = grid
def __repr__(self):
return "Average SAS model with %i atoms"%len(self.atoms)
def read_files(self, reference=None):
"""
Read all the pdb file in the inputfiles list, creating SASModels.
The SASModels created are save in a list, the reference model is the first model in the list.
:param reference: position of the reference model file in the inputfiles list
"""
ref = reference if reference is not None else 0
inputfiles = self.inputfiles
models = []
models.append(SASModel(inputfiles[ref]))
for i in range(len(inputfiles)):
if i==ref:
continue
else:
models.append(SASModel(inputfiles[i]))
self.models = models
return models
def calc_occupancy(self, griddot):
"""
Assign an occupancy and a contribution factor to the point of the grid.
:param griddot: 1d-array, coordinates of a point of the grid
:return tuple: 2-tuple containing (occupancy, contribution)
"""
occ = 0.0
contrib = 0
for model in self.models:
f = model.fineness
for i in range(model.atoms.shape[0]):
dx = model.atoms[i, 0] - griddot[0]
dy = model.atoms[i, 1] - griddot[1]
dz = model.atoms[i, 2] - griddot[2]
dist = dx * dx + dy * dy + dz * dz
add = max(1 - (dist / f), 0)
if add != 0:
contrib += 1
occ += add
return occ, contrib
def assign_occupancy(self):
"""
For each point of the grid, total occupancy and contribution factor are computed and saved.
The grid is then ordered with decreasing value of occupancy.
The fourth column of the array correspond to the occupancy of the point and the fifth to
the contribution for this point.
:return sortedgrid: 2d-array, coordinates of each point of the grid
"""
grid = self.grid
nbknots = grid.shape[0]
grid = numpy.append(grid, numpy.zeros((nbknots, 1), dtype="float"), axis=1)
for i in range(nbknots):
occ, contrib = self.calc_occupancy(grid[i, 0:3])
grid[i, 3] = occ
grid[i, 4] = contrib
order = numpy.argsort(grid, axis=0)[:, -2]
sortedgrid = numpy.empty_like(grid)
for i in range(nbknots):
sortedgrid[nbknots - i - 1, :] = grid[order[i], :]
return sortedgrid
def make_header(self):
"""
Create the layout of the pdb file for the averaged model.
"""
header = []
header.append("Number of files averaged : %s\n"%len(self.inputfiles))
for i in self.inputfiles:
header.append(i + "\n")
header.append("Total number of dots in the grid : %s\n"%self.grid.shape[0])
decade = 1
for i in range(self.grid.shape[0]):
line = "ATOM CA ASP 1 20.00 2 201\n"
line = line[:7] + "%4.i"%(i + 1) + line[11:]
if not (i + 1) % 10:
decade += 1
line = line[:21] + "%4.i"%decade + line[25:]
header.append(line)
self.header = header
return header
def save_aver(self, filename):
"""
Save the position of each occupied dot of the grid, its occupancy and its contribution
in a pdb file.
:param filename: name of the pdb file to write
"""
if len(self.header) == 0:
self.make_header()
assert self.grid.shape[-1] == 5
nr = 0
with open(filename, "w") as pdbout:
for line in self.header:
if line.startswith("ATOM"):
if nr < self.grid.shape[0] and self.grid[nr, 4] != 0:
coord = "%8.3f%8.3f%8.3f" % tuple(self.grid[nr, 0:3])
occ = "%6.2f" % self.grid[nr, 3]
contrib = "%2.f" % self.grid[nr, 4]
line = line[:30] + coord + occ + line[60:66] + contrib + line[68:]
else:
line = ""
nr += 1
pdbout.write(line)<|fim▁end|> | |
<|file_name|>pprust.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use abi;
use ast::{P, StaticRegionTyParamBound, OtherRegionTyParamBound,
TraitTyParamBound, Required, Provided};
use ast;
use ast_util;
use owned_slice::OwnedSlice;
use attr::{AttrMetaMethods, AttributeMethods};
use codemap::{CodeMap, BytePos};
use codemap;
use diagnostic;
use parse::classify::expr_is_simple_block;
use parse::token::IdentInterner;
use parse::{comments, token};
use parse;
use print::pp::{break_offset, word, space, zerobreak, hardbreak};
use print::pp::{Breaks, Consistent, Inconsistent, eof};
use print::pp;
use std::cast;
use std::io::{IoResult, MemWriter};
use std::io;
use std::rc::Rc;
use std::str;
use std::strbuf::StrBuf;
pub enum AnnNode<'a> {
NodeBlock(&'a ast::Block),
NodeItem(&'a ast::Item),
NodeExpr(&'a ast::Expr),
NodePat(&'a ast::Pat),
}
pub trait PpAnn {
fn pre(&self, _state: &mut State, _node: AnnNode) -> IoResult<()> { Ok(()) }
fn post(&self, _state: &mut State, _node: AnnNode) -> IoResult<()> { Ok(()) }
}
pub struct NoAnn;
impl PpAnn for NoAnn {}
pub struct CurrentCommentAndLiteral {
cur_cmnt: uint,<|fim▁hole|>pub struct State<'a> {
pub s: pp::Printer,
cm: Option<&'a CodeMap>,
intr: Rc<token::IdentInterner>,
comments: Option<Vec<comments::Comment> >,
literals: Option<Vec<comments::Literal> >,
cur_cmnt_and_lit: CurrentCommentAndLiteral,
boxes: Vec<pp::Breaks>,
ann: &'a PpAnn
}
pub fn rust_printer(writer: Box<io::Writer>) -> State<'static> {
static NO_ANN: NoAnn = NoAnn;
rust_printer_annotated(writer, &NO_ANN)
}
pub fn rust_printer_annotated<'a>(writer: Box<io::Writer>,
ann: &'a PpAnn) -> State<'a> {
State {
s: pp::mk_printer(writer, default_columns),
cm: None,
intr: token::get_ident_interner(),
comments: None,
literals: None,
cur_cmnt_and_lit: CurrentCommentAndLiteral {
cur_cmnt: 0,
cur_lit: 0
},
boxes: Vec::new(),
ann: ann
}
}
pub static indent_unit: uint = 4u;
pub static default_columns: uint = 78u;
// Requires you to pass an input filename and reader so that
// it can scan the input text for comments and literals to
// copy forward.
pub fn print_crate<'a>(cm: &'a CodeMap,
span_diagnostic: &diagnostic::SpanHandler,
krate: &ast::Crate,
filename: StrBuf,
input: &mut io::Reader,
out: Box<io::Writer>,
ann: &'a PpAnn,
is_expanded: bool) -> IoResult<()> {
let (cmnts, lits) = comments::gather_comments_and_literals(
span_diagnostic,
filename,
input
);
let mut s = State {
s: pp::mk_printer(out, default_columns),
cm: Some(cm),
intr: token::get_ident_interner(),
comments: Some(cmnts),
// If the code is post expansion, don't use the table of
// literals, since it doesn't correspond with the literals
// in the AST anymore.
literals: if is_expanded {
None
} else {
Some(lits)
},
cur_cmnt_and_lit: CurrentCommentAndLiteral {
cur_cmnt: 0,
cur_lit: 0
},
boxes: Vec::new(),
ann: ann
};
try!(s.print_mod(&krate.module, krate.attrs.as_slice()));
try!(s.print_remaining_comments());
eof(&mut s.s)
}
pub fn to_str(f: |&mut State| -> IoResult<()>) -> StrBuf {
let mut s = rust_printer(box MemWriter::new());
f(&mut s).unwrap();
eof(&mut s.s).unwrap();
unsafe {
// FIXME(pcwalton): A nasty function to extract the string from an `io::Writer`
// that we "know" to be a `MemWriter` that works around the lack of checked
// downcasts.
let (_, wr): (uint, Box<MemWriter>) = cast::transmute_copy(&s.s.out);
let result = str::from_utf8_owned(wr.get_ref().to_owned()).unwrap();
cast::forget(wr);
result.to_strbuf()
}
}
pub fn ty_to_str(ty: &ast::Ty) -> StrBuf {
to_str(|s| s.print_type(ty))
}
pub fn pat_to_str(pat: &ast::Pat) -> StrBuf {
to_str(|s| s.print_pat(pat))
}
pub fn expr_to_str(e: &ast::Expr) -> StrBuf {
to_str(|s| s.print_expr(e))
}
pub fn lifetime_to_str(e: &ast::Lifetime) -> StrBuf {
to_str(|s| s.print_lifetime(e))
}
pub fn tt_to_str(tt: &ast::TokenTree) -> StrBuf {
to_str(|s| s.print_tt(tt))
}
pub fn tts_to_str(tts: &[ast::TokenTree]) -> StrBuf {
to_str(|s| s.print_tts(&tts))
}
pub fn stmt_to_str(stmt: &ast::Stmt) -> StrBuf {
to_str(|s| s.print_stmt(stmt))
}
pub fn item_to_str(i: &ast::Item) -> StrBuf {
to_str(|s| s.print_item(i))
}
pub fn generics_to_str(generics: &ast::Generics) -> StrBuf {
to_str(|s| s.print_generics(generics))
}
pub fn ty_method_to_str(p: &ast::TypeMethod) -> StrBuf {
to_str(|s| s.print_ty_method(p))
}
pub fn method_to_str(p: &ast::Method) -> StrBuf {
to_str(|s| s.print_method(p))
}
pub fn fn_block_to_str(p: &ast::FnDecl) -> StrBuf {
to_str(|s| s.print_fn_block_args(p))
}
pub fn path_to_str(p: &ast::Path) -> StrBuf {
to_str(|s| s.print_path(p, false))
}
pub fn fun_to_str(decl: &ast::FnDecl, fn_style: ast::FnStyle, name: ast::Ident,
opt_explicit_self: Option<ast::ExplicitSelf_>,
generics: &ast::Generics) -> StrBuf {
to_str(|s| {
try!(s.print_fn(decl, Some(fn_style), abi::Rust,
name, generics, opt_explicit_self, ast::Inherited));
try!(s.end()); // Close the head box
s.end() // Close the outer box
})
}
pub fn block_to_str(blk: &ast::Block) -> StrBuf {
to_str(|s| {
// containing cbox, will be closed by print-block at }
try!(s.cbox(indent_unit));
// head-ibox, will be closed by print-block after {
try!(s.ibox(0u));
s.print_block(blk)
})
}
pub fn meta_item_to_str(mi: &ast::MetaItem) -> StrBuf {
to_str(|s| s.print_meta_item(mi))
}
pub fn attribute_to_str(attr: &ast::Attribute) -> StrBuf {
to_str(|s| s.print_attribute(attr))
}
pub fn lit_to_str(l: &ast::Lit) -> StrBuf {
to_str(|s| s.print_literal(l))
}
pub fn explicit_self_to_str(explicit_self: ast::ExplicitSelf_) -> StrBuf {
to_str(|s| s.print_explicit_self(explicit_self, ast::MutImmutable).map(|_| {}))
}
pub fn variant_to_str(var: &ast::Variant) -> StrBuf {
to_str(|s| s.print_variant(var))
}
pub fn visibility_qualified(vis: ast::Visibility, s: &str) -> StrBuf {
match vis {
ast::Public => format!("pub {}", s).to_strbuf(),
ast::Inherited => s.to_strbuf()
}
}
impl<'a> State<'a> {
pub fn ibox(&mut self, u: uint) -> IoResult<()> {
self.boxes.push(pp::Inconsistent);
pp::ibox(&mut self.s, u)
}
pub fn end(&mut self) -> IoResult<()> {
self.boxes.pop().unwrap();
pp::end(&mut self.s)
}
pub fn cbox(&mut self, u: uint) -> IoResult<()> {
self.boxes.push(pp::Consistent);
pp::cbox(&mut self.s, u)
}
// "raw box"
pub fn rbox(&mut self, u: uint, b: pp::Breaks) -> IoResult<()> {
self.boxes.push(b);
pp::rbox(&mut self.s, u, b)
}
pub fn nbsp(&mut self) -> IoResult<()> { word(&mut self.s, " ") }
pub fn word_nbsp(&mut self, w: &str) -> IoResult<()> {
try!(word(&mut self.s, w));
self.nbsp()
}
pub fn word_space(&mut self, w: &str) -> IoResult<()> {
try!(word(&mut self.s, w));
space(&mut self.s)
}
pub fn popen(&mut self) -> IoResult<()> { word(&mut self.s, "(") }
pub fn pclose(&mut self) -> IoResult<()> { word(&mut self.s, ")") }
pub fn head(&mut self, w: &str) -> IoResult<()> {
// outer-box is consistent
try!(self.cbox(indent_unit));
// head-box is inconsistent
try!(self.ibox(w.len() + 1));
// keyword that starts the head
if !w.is_empty() {
try!(self.word_nbsp(w));
}
Ok(())
}
pub fn bopen(&mut self) -> IoResult<()> {
try!(word(&mut self.s, "{"));
self.end() // close the head-box
}
pub fn bclose_(&mut self, span: codemap::Span,
indented: uint) -> IoResult<()> {
self.bclose_maybe_open(span, indented, true)
}
pub fn bclose_maybe_open (&mut self, span: codemap::Span,
indented: uint, close_box: bool) -> IoResult<()> {
try!(self.maybe_print_comment(span.hi));
try!(self.break_offset_if_not_bol(1u, -(indented as int)));
try!(word(&mut self.s, "}"));
if close_box {
try!(self.end()); // close the outer-box
}
Ok(())
}
pub fn bclose(&mut self, span: codemap::Span) -> IoResult<()> {
self.bclose_(span, indent_unit)
}
pub fn is_begin(&mut self) -> bool {
match self.s.last_token() { pp::Begin(_) => true, _ => false }
}
pub fn is_end(&mut self) -> bool {
match self.s.last_token() { pp::End => true, _ => false }
}
pub fn is_bol(&mut self) -> bool {
self.s.last_token().is_eof() || self.s.last_token().is_hardbreak_tok()
}
pub fn in_cbox(&self) -> bool {
match self.boxes.last() {
Some(&last_box) => last_box == pp::Consistent,
None => false
}
}
pub fn hardbreak_if_not_bol(&mut self) -> IoResult<()> {
if !self.is_bol() {
try!(hardbreak(&mut self.s))
}
Ok(())
}
pub fn space_if_not_bol(&mut self) -> IoResult<()> {
if !self.is_bol() { try!(space(&mut self.s)); }
Ok(())
}
pub fn break_offset_if_not_bol(&mut self, n: uint,
off: int) -> IoResult<()> {
if !self.is_bol() {
break_offset(&mut self.s, n, off)
} else {
if off != 0 && self.s.last_token().is_hardbreak_tok() {
// We do something pretty sketchy here: tuck the nonzero
// offset-adjustment we were going to deposit along with the
// break into the previous hardbreak.
self.s.replace_last_token(pp::hardbreak_tok_offset(off));
}
Ok(())
}
}
// Synthesizes a comment that was not textually present in the original source
// file.
pub fn synth_comment(&mut self, text: StrBuf) -> IoResult<()> {
try!(word(&mut self.s, "/*"));
try!(space(&mut self.s));
try!(word(&mut self.s, text.as_slice()));
try!(space(&mut self.s));
word(&mut self.s, "*/")
}
pub fn commasep<T>(&mut self, b: Breaks, elts: &[T],
op: |&mut State, &T| -> IoResult<()>)
-> IoResult<()> {
try!(self.rbox(0u, b));
let mut first = true;
for elt in elts.iter() {
if first { first = false; } else { try!(self.word_space(",")); }
try!(op(self, elt));
}
self.end()
}
pub fn commasep_cmnt<T>(
&mut self,
b: Breaks,
elts: &[T],
op: |&mut State, &T| -> IoResult<()>,
get_span: |&T| -> codemap::Span) -> IoResult<()> {
try!(self.rbox(0u, b));
let len = elts.len();
let mut i = 0u;
for elt in elts.iter() {
try!(self.maybe_print_comment(get_span(elt).hi));
try!(op(self, elt));
i += 1u;
if i < len {
try!(word(&mut self.s, ","));
try!(self.maybe_print_trailing_comment(get_span(elt),
Some(get_span(&elts[i]).hi)));
try!(self.space_if_not_bol());
}
}
self.end()
}
pub fn commasep_exprs(&mut self, b: Breaks,
exprs: &[@ast::Expr]) -> IoResult<()> {
self.commasep_cmnt(b, exprs, |s, &e| s.print_expr(e), |e| e.span)
}
pub fn print_mod(&mut self, _mod: &ast::Mod,
attrs: &[ast::Attribute]) -> IoResult<()> {
try!(self.print_inner_attributes(attrs));
for vitem in _mod.view_items.iter() {
try!(self.print_view_item(vitem));
}
for item in _mod.items.iter() {
try!(self.print_item(*item));
}
Ok(())
}
pub fn print_foreign_mod(&mut self, nmod: &ast::ForeignMod,
attrs: &[ast::Attribute]) -> IoResult<()> {
try!(self.print_inner_attributes(attrs));
for vitem in nmod.view_items.iter() {
try!(self.print_view_item(vitem));
}
for item in nmod.items.iter() {
try!(self.print_foreign_item(*item));
}
Ok(())
}
pub fn print_opt_lifetime(&mut self,
lifetime: &Option<ast::Lifetime>) -> IoResult<()> {
for l in lifetime.iter() {
try!(self.print_lifetime(l));
try!(self.nbsp());
}
Ok(())
}
pub fn print_type(&mut self, ty: &ast::Ty) -> IoResult<()> {
try!(self.maybe_print_comment(ty.span.lo));
try!(self.ibox(0u));
match ty.node {
ast::TyNil => try!(word(&mut self.s, "()")),
ast::TyBot => try!(word(&mut self.s, "!")),
ast::TyBox(ty) => {
try!(word(&mut self.s, "@"));
try!(self.print_type(ty));
}
ast::TyUniq(ty) => {
try!(word(&mut self.s, "~"));
try!(self.print_type(ty));
}
ast::TyVec(ty) => {
try!(word(&mut self.s, "["));
try!(self.print_type(ty));
try!(word(&mut self.s, "]"));
}
ast::TyPtr(ref mt) => {
try!(word(&mut self.s, "*"));
try!(self.print_mt(mt));
}
ast::TyRptr(ref lifetime, ref mt) => {
try!(word(&mut self.s, "&"));
try!(self.print_opt_lifetime(lifetime));
try!(self.print_mt(mt));
}
ast::TyTup(ref elts) => {
try!(self.popen());
try!(self.commasep(Inconsistent, elts.as_slice(),
|s, ty| s.print_type_ref(ty)));
if elts.len() == 1 {
try!(word(&mut self.s, ","));
}
try!(self.pclose());
}
ast::TyBareFn(f) => {
let generics = ast::Generics {
lifetimes: f.lifetimes.clone(),
ty_params: OwnedSlice::empty()
};
try!(self.print_ty_fn(Some(f.abi), None, &None,
f.fn_style, ast::Many, f.decl, None, &None,
Some(&generics), None));
}
ast::TyClosure(f, ref region) => {
let generics = ast::Generics {
lifetimes: f.lifetimes.clone(),
ty_params: OwnedSlice::empty()
};
try!(self.print_ty_fn(None, Some('&'), region, f.fn_style,
f.onceness, f.decl, None, &f.bounds,
Some(&generics), None));
}
ast::TyProc(f) => {
let generics = ast::Generics {
lifetimes: f.lifetimes.clone(),
ty_params: OwnedSlice::empty()
};
try!(self.print_ty_fn(None, Some('~'), &None, f.fn_style,
f.onceness, f.decl, None, &f.bounds,
Some(&generics), None));
}
ast::TyPath(ref path, ref bounds, _) => {
try!(self.print_bounded_path(path, bounds));
}
ast::TyFixedLengthVec(ty, v) => {
try!(word(&mut self.s, "["));
try!(self.print_type(ty));
try!(word(&mut self.s, ", .."));
try!(self.print_expr(v));
try!(word(&mut self.s, "]"));
}
ast::TyTypeof(e) => {
try!(word(&mut self.s, "typeof("));
try!(self.print_expr(e));
try!(word(&mut self.s, ")"));
}
ast::TyInfer => {
try!(word(&mut self.s, "_"));
}
}
self.end()
}
pub fn print_type_ref(&mut self, ty: &P<ast::Ty>) -> IoResult<()> {
self.print_type(*ty)
}
pub fn print_foreign_item(&mut self,
item: &ast::ForeignItem) -> IoResult<()> {
try!(self.hardbreak_if_not_bol());
try!(self.maybe_print_comment(item.span.lo));
try!(self.print_outer_attributes(item.attrs.as_slice()));
match item.node {
ast::ForeignItemFn(decl, ref generics) => {
try!(self.print_fn(decl, None, abi::Rust, item.ident, generics,
None, item.vis));
try!(self.end()); // end head-ibox
try!(word(&mut self.s, ";"));
self.end() // end the outer fn box
}
ast::ForeignItemStatic(t, m) => {
try!(self.head(visibility_qualified(item.vis,
"static").as_slice()));
if m {
try!(self.word_space("mut"));
}
try!(self.print_ident(item.ident));
try!(self.word_space(":"));
try!(self.print_type(t));
try!(word(&mut self.s, ";"));
try!(self.end()); // end the head-ibox
self.end() // end the outer cbox
}
}
}
pub fn print_item(&mut self, item: &ast::Item) -> IoResult<()> {
try!(self.hardbreak_if_not_bol());
try!(self.maybe_print_comment(item.span.lo));
try!(self.print_outer_attributes(item.attrs.as_slice()));
try!(self.ann.pre(self, NodeItem(item)));
match item.node {
ast::ItemStatic(ty, m, expr) => {
try!(self.head(visibility_qualified(item.vis,
"static").as_slice()));
if m == ast::MutMutable {
try!(self.word_space("mut"));
}
try!(self.print_ident(item.ident));
try!(self.word_space(":"));
try!(self.print_type(ty));
try!(space(&mut self.s));
try!(self.end()); // end the head-ibox
try!(self.word_space("="));
try!(self.print_expr(expr));
try!(word(&mut self.s, ";"));
try!(self.end()); // end the outer cbox
}
ast::ItemFn(decl, fn_style, abi, ref typarams, body) => {
try!(self.print_fn(
decl,
Some(fn_style),
abi,
item.ident,
typarams,
None,
item.vis
));
try!(word(&mut self.s, " "));
try!(self.print_block_with_attrs(body, item.attrs.as_slice()));
}
ast::ItemMod(ref _mod) => {
try!(self.head(visibility_qualified(item.vis,
"mod").as_slice()));
try!(self.print_ident(item.ident));
try!(self.nbsp());
try!(self.bopen());
try!(self.print_mod(_mod, item.attrs.as_slice()));
try!(self.bclose(item.span));
}
ast::ItemForeignMod(ref nmod) => {
try!(self.head("extern"));
try!(self.word_nbsp(nmod.abi.to_str()));
try!(self.bopen());
try!(self.print_foreign_mod(nmod, item.attrs.as_slice()));
try!(self.bclose(item.span));
}
ast::ItemTy(ty, ref params) => {
try!(self.ibox(indent_unit));
try!(self.ibox(0u));
try!(self.word_nbsp(visibility_qualified(item.vis,
"type").as_slice()));
try!(self.print_ident(item.ident));
try!(self.print_generics(params));
try!(self.end()); // end the inner ibox
try!(space(&mut self.s));
try!(self.word_space("="));
try!(self.print_type(ty));
try!(word(&mut self.s, ";"));
try!(self.end()); // end the outer ibox
}
ast::ItemEnum(ref enum_definition, ref params) => {
try!(self.print_enum_def(
enum_definition,
params,
item.ident,
item.span,
item.vis
));
}
ast::ItemStruct(struct_def, ref generics) => {
if struct_def.is_virtual {
try!(self.word_space("virtual"));
}
try!(self.head(visibility_qualified(item.vis,
"struct").as_slice()));
try!(self.print_struct(struct_def, generics, item.ident, item.span));
}
ast::ItemImpl(ref generics, ref opt_trait, ty, ref methods) => {
try!(self.head(visibility_qualified(item.vis,
"impl").as_slice()));
if generics.is_parameterized() {
try!(self.print_generics(generics));
try!(space(&mut self.s));
}
match opt_trait {
&Some(ref t) => {
try!(self.print_trait_ref(t));
try!(space(&mut self.s));
try!(self.word_space("for"));
}
&None => {}
}
try!(self.print_type(ty));
try!(space(&mut self.s));
try!(self.bopen());
try!(self.print_inner_attributes(item.attrs.as_slice()));
for meth in methods.iter() {
try!(self.print_method(*meth));
}
try!(self.bclose(item.span));
}
ast::ItemTrait(ref generics, ref sized, ref traits, ref methods) => {
try!(self.head(visibility_qualified(item.vis,
"trait").as_slice()));
try!(self.print_ident(item.ident));
try!(self.print_generics(generics));
if *sized == ast::DynSize {
try!(space(&mut self.s));
try!(word(&mut self.s, "for type"));
}
if traits.len() != 0u {
try!(word(&mut self.s, ":"));
for (i, trait_) in traits.iter().enumerate() {
try!(self.nbsp());
if i != 0 {
try!(self.word_space("+"));
}
try!(self.print_path(&trait_.path, false));
}
}
try!(word(&mut self.s, " "));
try!(self.bopen());
for meth in methods.iter() {
try!(self.print_trait_method(meth));
}
try!(self.bclose(item.span));
}
// I think it's reasonable to hide the context here:
ast::ItemMac(codemap::Spanned { node: ast::MacInvocTT(ref pth, ref tts, _),
..}) => {
try!(self.print_visibility(item.vis));
try!(self.print_path(pth, false));
try!(word(&mut self.s, "! "));
try!(self.print_ident(item.ident));
try!(self.cbox(indent_unit));
try!(self.popen());
try!(self.print_tts(&(tts.as_slice())));
try!(self.pclose());
try!(self.end());
}
}
self.ann.post(self, NodeItem(item))
}
fn print_trait_ref(&mut self, t: &ast::TraitRef) -> IoResult<()> {
self.print_path(&t.path, false)
}
pub fn print_enum_def(&mut self, enum_definition: &ast::EnumDef,
generics: &ast::Generics, ident: ast::Ident,
span: codemap::Span,
visibility: ast::Visibility) -> IoResult<()> {
try!(self.head(visibility_qualified(visibility, "enum").as_slice()));
try!(self.print_ident(ident));
try!(self.print_generics(generics));
try!(space(&mut self.s));
self.print_variants(enum_definition.variants.as_slice(), span)
}
pub fn print_variants(&mut self,
variants: &[P<ast::Variant>],
span: codemap::Span) -> IoResult<()> {
try!(self.bopen());
for &v in variants.iter() {
try!(self.space_if_not_bol());
try!(self.maybe_print_comment(v.span.lo));
try!(self.print_outer_attributes(v.node.attrs.as_slice()));
try!(self.ibox(indent_unit));
try!(self.print_variant(v));
try!(word(&mut self.s, ","));
try!(self.end());
try!(self.maybe_print_trailing_comment(v.span, None));
}
self.bclose(span)
}
pub fn print_visibility(&mut self, vis: ast::Visibility) -> IoResult<()> {
match vis {
ast::Public => self.word_nbsp("pub"),
ast::Inherited => Ok(())
}
}
pub fn print_struct(&mut self,
struct_def: &ast::StructDef,
generics: &ast::Generics,
ident: ast::Ident,
span: codemap::Span) -> IoResult<()> {
try!(self.print_ident(ident));
try!(self.print_generics(generics));
match struct_def.super_struct {
Some(t) => {
try!(self.word_space(":"));
try!(self.print_type(t));
},
None => {},
}
if ast_util::struct_def_is_tuple_like(struct_def) {
if !struct_def.fields.is_empty() {
try!(self.popen());
try!(self.commasep(
Inconsistent, struct_def.fields.as_slice(),
|s, field| {
match field.node.kind {
ast::NamedField(..) => fail!("unexpected named field"),
ast::UnnamedField(vis) => {
try!(s.print_visibility(vis));
try!(s.maybe_print_comment(field.span.lo));
s.print_type(field.node.ty)
}
}
}
));
try!(self.pclose());
}
try!(word(&mut self.s, ";"));
try!(self.end());
self.end() // close the outer-box
} else {
try!(self.nbsp());
try!(self.bopen());
try!(self.hardbreak_if_not_bol());
for field in struct_def.fields.iter() {
match field.node.kind {
ast::UnnamedField(..) => fail!("unexpected unnamed field"),
ast::NamedField(ident, visibility) => {
try!(self.hardbreak_if_not_bol());
try!(self.maybe_print_comment(field.span.lo));
try!(self.print_outer_attributes(field.node.attrs.as_slice()));
try!(self.print_visibility(visibility));
try!(self.print_ident(ident));
try!(self.word_nbsp(":"));
try!(self.print_type(field.node.ty));
try!(word(&mut self.s, ","));
}
}
}
self.bclose(span)
}
}
/// This doesn't deserve to be called "pretty" printing, but it should be
/// meaning-preserving. A quick hack that might help would be to look at the
/// spans embedded in the TTs to decide where to put spaces and newlines.
/// But it'd be better to parse these according to the grammar of the
/// appropriate macro, transcribe back into the grammar we just parsed from,
/// and then pretty-print the resulting AST nodes (so, e.g., we print
/// expression arguments as expressions). It can be done! I think.
pub fn print_tt(&mut self, tt: &ast::TokenTree) -> IoResult<()> {
match *tt {
ast::TTDelim(ref tts) => self.print_tts(&(tts.as_slice())),
ast::TTTok(_, ref tk) => {
word(&mut self.s, parse::token::to_str(tk).as_slice())
}
ast::TTSeq(_, ref tts, ref sep, zerok) => {
try!(word(&mut self.s, "$("));
for tt_elt in (*tts).iter() {
try!(self.print_tt(tt_elt));
}
try!(word(&mut self.s, ")"));
match *sep {
Some(ref tk) => {
try!(word(&mut self.s,
parse::token::to_str(tk).as_slice()));
}
None => ()
}
word(&mut self.s, if zerok { "*" } else { "+" })
}
ast::TTNonterminal(_, name) => {
try!(word(&mut self.s, "$"));
self.print_ident(name)
}
}
}
pub fn print_tts(&mut self, tts: & &[ast::TokenTree]) -> IoResult<()> {
try!(self.ibox(0));
for (i, tt) in tts.iter().enumerate() {
if i != 0 {
try!(space(&mut self.s));
}
try!(self.print_tt(tt));
}
self.end()
}
pub fn print_variant(&mut self, v: &ast::Variant) -> IoResult<()> {
try!(self.print_visibility(v.node.vis));
match v.node.kind {
ast::TupleVariantKind(ref args) => {
try!(self.print_ident(v.node.name));
if !args.is_empty() {
try!(self.popen());
try!(self.commasep(Consistent,
args.as_slice(),
|s, arg| s.print_type(arg.ty)));
try!(self.pclose());
}
}
ast::StructVariantKind(struct_def) => {
try!(self.head(""));
let generics = ast_util::empty_generics();
try!(self.print_struct(struct_def, &generics, v.node.name, v.span));
}
}
match v.node.disr_expr {
Some(d) => {
try!(space(&mut self.s));
try!(self.word_space("="));
self.print_expr(d)
}
_ => Ok(())
}
}
pub fn print_ty_method(&mut self, m: &ast::TypeMethod) -> IoResult<()> {
try!(self.hardbreak_if_not_bol());
try!(self.maybe_print_comment(m.span.lo));
try!(self.print_outer_attributes(m.attrs.as_slice()));
try!(self.print_ty_fn(None,
None,
&None,
m.fn_style,
ast::Many,
m.decl,
Some(m.ident),
&None,
Some(&m.generics),
Some(m.explicit_self.node)));
word(&mut self.s, ";")
}
pub fn print_trait_method(&mut self,
m: &ast::TraitMethod) -> IoResult<()> {
match *m {
Required(ref ty_m) => self.print_ty_method(ty_m),
Provided(m) => self.print_method(m)
}
}
pub fn print_method(&mut self, meth: &ast::Method) -> IoResult<()> {
try!(self.hardbreak_if_not_bol());
try!(self.maybe_print_comment(meth.span.lo));
try!(self.print_outer_attributes(meth.attrs.as_slice()));
try!(self.print_fn(meth.decl, Some(meth.fn_style), abi::Rust,
meth.ident, &meth.generics, Some(meth.explicit_self.node),
meth.vis));
try!(word(&mut self.s, " "));
self.print_block_with_attrs(meth.body, meth.attrs.as_slice())
}
pub fn print_outer_attributes(&mut self,
attrs: &[ast::Attribute]) -> IoResult<()> {
let mut count = 0;
for attr in attrs.iter() {
match attr.node.style {
ast::AttrOuter => {
try!(self.print_attribute(attr));
count += 1;
}
_ => {/* fallthrough */ }
}
}
if count > 0 {
try!(self.hardbreak_if_not_bol());
}
Ok(())
}
pub fn print_inner_attributes(&mut self,
attrs: &[ast::Attribute]) -> IoResult<()> {
let mut count = 0;
for attr in attrs.iter() {
match attr.node.style {
ast::AttrInner => {
try!(self.print_attribute(attr));
count += 1;
}
_ => {/* fallthrough */ }
}
}
if count > 0 {
try!(self.hardbreak_if_not_bol());
}
Ok(())
}
pub fn print_attribute(&mut self, attr: &ast::Attribute) -> IoResult<()> {
try!(self.hardbreak_if_not_bol());
try!(self.maybe_print_comment(attr.span.lo));
if attr.node.is_sugared_doc {
word(&mut self.s, attr.value_str().unwrap().get())
} else {
match attr.node.style {
ast::AttrInner => try!(word(&mut self.s, "#![")),
ast::AttrOuter => try!(word(&mut self.s, "#[")),
}
try!(self.print_meta_item(attr.meta()));
word(&mut self.s, "]")
}
}
pub fn print_stmt(&mut self, st: &ast::Stmt) -> IoResult<()> {
try!(self.maybe_print_comment(st.span.lo));
match st.node {
ast::StmtDecl(decl, _) => {
try!(self.print_decl(decl));
}
ast::StmtExpr(expr, _) => {
try!(self.space_if_not_bol());
try!(self.print_expr(expr));
}
ast::StmtSemi(expr, _) => {
try!(self.space_if_not_bol());
try!(self.print_expr(expr));
try!(word(&mut self.s, ";"));
}
ast::StmtMac(ref mac, semi) => {
try!(self.space_if_not_bol());
try!(self.print_mac(mac));
if semi {
try!(word(&mut self.s, ";"));
}
}
}
if parse::classify::stmt_ends_with_semi(st) {
try!(word(&mut self.s, ";"));
}
self.maybe_print_trailing_comment(st.span, None)
}
pub fn print_block(&mut self, blk: &ast::Block) -> IoResult<()> {
self.print_block_with_attrs(blk, &[])
}
pub fn print_block_unclosed(&mut self, blk: &ast::Block) -> IoResult<()> {
self.print_block_unclosed_indent(blk, indent_unit)
}
pub fn print_block_unclosed_indent(&mut self, blk: &ast::Block,
indented: uint) -> IoResult<()> {
self.print_block_maybe_unclosed(blk, indented, &[], false)
}
pub fn print_block_with_attrs(&mut self,
blk: &ast::Block,
attrs: &[ast::Attribute]) -> IoResult<()> {
self.print_block_maybe_unclosed(blk, indent_unit, attrs, true)
}
pub fn print_block_maybe_unclosed(&mut self,
blk: &ast::Block,
indented: uint,
attrs: &[ast::Attribute],
close_box: bool) -> IoResult<()> {
match blk.rules {
ast::UnsafeBlock(..) => try!(self.word_space("unsafe")),
ast::DefaultBlock => ()
}
try!(self.maybe_print_comment(blk.span.lo));
try!(self.ann.pre(self, NodeBlock(blk)));
try!(self.bopen());
try!(self.print_inner_attributes(attrs));
for vi in blk.view_items.iter() {
try!(self.print_view_item(vi));
}
for st in blk.stmts.iter() {
try!(self.print_stmt(*st));
}
match blk.expr {
Some(expr) => {
try!(self.space_if_not_bol());
try!(self.print_expr(expr));
try!(self.maybe_print_trailing_comment(expr.span, Some(blk.span.hi)));
}
_ => ()
}
try!(self.bclose_maybe_open(blk.span, indented, close_box));
self.ann.post(self, NodeBlock(blk))
}
fn print_else(&mut self, els: Option<@ast::Expr>) -> IoResult<()> {
match els {
Some(_else) => {
match _else.node {
// "another else-if"
ast::ExprIf(i, t, e) => {
try!(self.cbox(indent_unit - 1u));
try!(self.ibox(0u));
try!(word(&mut self.s, " else if "));
try!(self.print_expr(i));
try!(space(&mut self.s));
try!(self.print_block(t));
self.print_else(e)
}
// "final else"
ast::ExprBlock(b) => {
try!(self.cbox(indent_unit - 1u));
try!(self.ibox(0u));
try!(word(&mut self.s, " else "));
self.print_block(b)
}
// BLEAH, constraints would be great here
_ => {
fail!("print_if saw if with weird alternative");
}
}
}
_ => Ok(())
}
}
pub fn print_if(&mut self, test: &ast::Expr, blk: &ast::Block,
elseopt: Option<@ast::Expr>, chk: bool) -> IoResult<()> {
try!(self.head("if"));
if chk { try!(self.word_nbsp("check")); }
try!(self.print_expr(test));
try!(space(&mut self.s));
try!(self.print_block(blk));
self.print_else(elseopt)
}
pub fn print_mac(&mut self, m: &ast::Mac) -> IoResult<()> {
match m.node {
// I think it's reasonable to hide the ctxt here:
ast::MacInvocTT(ref pth, ref tts, _) => {
try!(self.print_path(pth, false));
try!(word(&mut self.s, "!"));
try!(self.popen());
try!(self.print_tts(&tts.as_slice()));
self.pclose()
}
}
}
pub fn print_expr_vstore(&mut self, t: ast::ExprVstore) -> IoResult<()> {
match t {
ast::ExprVstoreUniq => word(&mut self.s, "box "),
ast::ExprVstoreSlice => word(&mut self.s, "&"),
ast::ExprVstoreMutSlice => {
try!(word(&mut self.s, "&"));
word(&mut self.s, "mut")
}
}
}
fn print_call_post(&mut self, args: &[@ast::Expr]) -> IoResult<()> {
try!(self.popen());
try!(self.commasep_exprs(Inconsistent, args));
self.pclose()
}
pub fn print_expr(&mut self, expr: &ast::Expr) -> IoResult<()> {
try!(self.maybe_print_comment(expr.span.lo));
try!(self.ibox(indent_unit));
try!(self.ann.pre(self, NodeExpr(expr)));
match expr.node {
ast::ExprVstore(e, v) => {
try!(self.print_expr_vstore(v));
try!(self.print_expr(e));
},
ast::ExprBox(p, e) => {
try!(word(&mut self.s, "box"));
try!(word(&mut self.s, "("));
try!(self.print_expr(p));
try!(self.word_space(")"));
try!(self.print_expr(e));
}
ast::ExprVec(ref exprs) => {
try!(self.ibox(indent_unit));
try!(word(&mut self.s, "["));
try!(self.commasep_exprs(Inconsistent, exprs.as_slice()));
try!(word(&mut self.s, "]"));
try!(self.end());
}
ast::ExprRepeat(element, count) => {
try!(self.ibox(indent_unit));
try!(word(&mut self.s, "["));
try!(self.print_expr(element));
try!(word(&mut self.s, ","));
try!(word(&mut self.s, ".."));
try!(self.print_expr(count));
try!(word(&mut self.s, "]"));
try!(self.end());
}
ast::ExprStruct(ref path, ref fields, wth) => {
try!(self.print_path(path, true));
try!(word(&mut self.s, "{"));
try!(self.commasep_cmnt(
Consistent,
fields.as_slice(),
|s, field| {
try!(s.ibox(indent_unit));
try!(s.print_ident(field.ident.node));
try!(s.word_space(":"));
try!(s.print_expr(field.expr));
s.end()
},
|f| f.span));
match wth {
Some(expr) => {
try!(self.ibox(indent_unit));
if !fields.is_empty() {
try!(word(&mut self.s, ","));
try!(space(&mut self.s));
}
try!(word(&mut self.s, ".."));
try!(self.print_expr(expr));
try!(self.end());
}
_ => try!(word(&mut self.s, ","))
}
try!(word(&mut self.s, "}"));
}
ast::ExprTup(ref exprs) => {
try!(self.popen());
try!(self.commasep_exprs(Inconsistent, exprs.as_slice()));
if exprs.len() == 1 {
try!(word(&mut self.s, ","));
}
try!(self.pclose());
}
ast::ExprCall(func, ref args) => {
try!(self.print_expr(func));
try!(self.print_call_post(args.as_slice()));
}
ast::ExprMethodCall(ident, ref tys, ref args) => {
let base_args = args.slice_from(1);
try!(self.print_expr(*args.get(0)));
try!(word(&mut self.s, "."));
try!(self.print_ident(ident.node));
if tys.len() > 0u {
try!(word(&mut self.s, "::<"));
try!(self.commasep(Inconsistent, tys.as_slice(),
|s, ty| s.print_type_ref(ty)));
try!(word(&mut self.s, ">"));
}
try!(self.print_call_post(base_args));
}
ast::ExprBinary(op, lhs, rhs) => {
try!(self.print_expr(lhs));
try!(space(&mut self.s));
try!(self.word_space(ast_util::binop_to_str(op)));
try!(self.print_expr(rhs));
}
ast::ExprUnary(op, expr) => {
try!(word(&mut self.s, ast_util::unop_to_str(op)));
try!(self.print_expr(expr));
}
ast::ExprAddrOf(m, expr) => {
try!(word(&mut self.s, "&"));
try!(self.print_mutability(m));
// Avoid `& &e` => `&&e`.
match (m, &expr.node) {
(ast::MutImmutable, &ast::ExprAddrOf(..)) => try!(space(&mut self.s)),
_ => { }
}
try!(self.print_expr(expr));
}
ast::ExprLit(lit) => try!(self.print_literal(lit)),
ast::ExprCast(expr, ty) => {
try!(self.print_expr(expr));
try!(space(&mut self.s));
try!(self.word_space("as"));
try!(self.print_type(ty));
}
ast::ExprIf(test, blk, elseopt) => {
try!(self.print_if(test, blk, elseopt, false));
}
ast::ExprWhile(test, blk) => {
try!(self.head("while"));
try!(self.print_expr(test));
try!(space(&mut self.s));
try!(self.print_block(blk));
}
ast::ExprForLoop(pat, iter, blk, opt_ident) => {
for ident in opt_ident.iter() {
try!(word(&mut self.s, "'"));
try!(self.print_ident(*ident));
try!(self.word_space(":"));
}
try!(self.head("for"));
try!(self.print_pat(pat));
try!(space(&mut self.s));
try!(self.word_space("in"));
try!(self.print_expr(iter));
try!(space(&mut self.s));
try!(self.print_block(blk));
}
ast::ExprLoop(blk, opt_ident) => {
for ident in opt_ident.iter() {
try!(word(&mut self.s, "'"));
try!(self.print_ident(*ident));
try!(self.word_space(":"));
}
try!(self.head("loop"));
try!(space(&mut self.s));
try!(self.print_block(blk));
}
ast::ExprMatch(expr, ref arms) => {
try!(self.cbox(indent_unit));
try!(self.ibox(4));
try!(self.word_nbsp("match"));
try!(self.print_expr(expr));
try!(space(&mut self.s));
try!(self.bopen());
let len = arms.len();
for (i, arm) in arms.iter().enumerate() {
// I have no idea why this check is necessary, but here it
// is :(
if arm.attrs.is_empty() {
try!(space(&mut self.s));
}
try!(self.cbox(indent_unit));
try!(self.ibox(0u));
try!(self.print_outer_attributes(arm.attrs.as_slice()));
let mut first = true;
for p in arm.pats.iter() {
if first {
first = false;
} else {
try!(space(&mut self.s));
try!(self.word_space("|"));
}
try!(self.print_pat(*p));
}
try!(space(&mut self.s));
match arm.guard {
Some(e) => {
try!(self.word_space("if"));
try!(self.print_expr(e));
try!(space(&mut self.s));
}
None => ()
}
try!(self.word_space("=>"));
match arm.body.node {
ast::ExprBlock(blk) => {
// the block will close the pattern's ibox
try!(self.print_block_unclosed_indent(blk, indent_unit));
}
_ => {
try!(self.end()); // close the ibox for the pattern
try!(self.print_expr(arm.body));
}
}
if !expr_is_simple_block(expr)
&& i < len - 1 {
try!(word(&mut self.s, ","));
}
try!(self.end()); // close enclosing cbox
}
try!(self.bclose_(expr.span, indent_unit));
}
ast::ExprFnBlock(decl, body) => {
// in do/for blocks we don't want to show an empty
// argument list, but at this point we don't know which
// we are inside.
//
// if !decl.inputs.is_empty() {
try!(self.print_fn_block_args(decl));
try!(space(&mut self.s));
// }
if !body.stmts.is_empty() || !body.expr.is_some() {
try!(self.print_block_unclosed(body));
} else {
// we extract the block, so as not to create another set of boxes
match body.expr.unwrap().node {
ast::ExprBlock(blk) => {
try!(self.print_block_unclosed(blk));
}
_ => {
// this is a bare expression
try!(self.print_expr(body.expr.unwrap()));
try!(self.end()); // need to close a box
}
}
}
// a box will be closed by print_expr, but we didn't want an overall
// wrapper so we closed the corresponding opening. so create an
// empty box to satisfy the close.
try!(self.ibox(0));
}
ast::ExprProc(decl, body) => {
// in do/for blocks we don't want to show an empty
// argument list, but at this point we don't know which
// we are inside.
//
// if !decl.inputs.is_empty() {
try!(self.print_proc_args(decl));
try!(space(&mut self.s));
// }
assert!(body.stmts.is_empty());
assert!(body.expr.is_some());
// we extract the block, so as not to create another set of boxes
match body.expr.unwrap().node {
ast::ExprBlock(blk) => {
try!(self.print_block_unclosed(blk));
}
_ => {
// this is a bare expression
try!(self.print_expr(body.expr.unwrap()));
try!(self.end()); // need to close a box
}
}
// a box will be closed by print_expr, but we didn't want an overall
// wrapper so we closed the corresponding opening. so create an
// empty box to satisfy the close.
try!(self.ibox(0));
}
ast::ExprBlock(blk) => {
// containing cbox, will be closed by print-block at }
try!(self.cbox(indent_unit));
// head-box, will be closed by print-block after {
try!(self.ibox(0u));
try!(self.print_block(blk));
}
ast::ExprAssign(lhs, rhs) => {
try!(self.print_expr(lhs));
try!(space(&mut self.s));
try!(self.word_space("="));
try!(self.print_expr(rhs));
}
ast::ExprAssignOp(op, lhs, rhs) => {
try!(self.print_expr(lhs));
try!(space(&mut self.s));
try!(word(&mut self.s, ast_util::binop_to_str(op)));
try!(self.word_space("="));
try!(self.print_expr(rhs));
}
ast::ExprField(expr, id, ref tys) => {
try!(self.print_expr(expr));
try!(word(&mut self.s, "."));
try!(self.print_ident(id));
if tys.len() > 0u {
try!(word(&mut self.s, "::<"));
try!(self.commasep(
Inconsistent, tys.as_slice(),
|s, ty| s.print_type_ref(ty)));
try!(word(&mut self.s, ">"));
}
}
ast::ExprIndex(expr, index) => {
try!(self.print_expr(expr));
try!(word(&mut self.s, "["));
try!(self.print_expr(index));
try!(word(&mut self.s, "]"));
}
ast::ExprPath(ref path) => try!(self.print_path(path, true)),
ast::ExprBreak(opt_ident) => {
try!(word(&mut self.s, "break"));
try!(space(&mut self.s));
for ident in opt_ident.iter() {
try!(word(&mut self.s, "'"));
try!(self.print_ident(*ident));
try!(space(&mut self.s));
}
}
ast::ExprAgain(opt_ident) => {
try!(word(&mut self.s, "continue"));
try!(space(&mut self.s));
for ident in opt_ident.iter() {
try!(word(&mut self.s, "'"));
try!(self.print_ident(*ident));
try!(space(&mut self.s))
}
}
ast::ExprRet(result) => {
try!(word(&mut self.s, "return"));
match result {
Some(expr) => {
try!(word(&mut self.s, " "));
try!(self.print_expr(expr));
}
_ => ()
}
}
ast::ExprInlineAsm(ref a) => {
if a.volatile {
try!(word(&mut self.s, "__volatile__ asm!"));
} else {
try!(word(&mut self.s, "asm!"));
}
try!(self.popen());
try!(self.print_string(a.asm.get(), a.asm_str_style));
try!(self.word_space(":"));
for &(ref co, o) in a.outputs.iter() {
try!(self.print_string(co.get(), ast::CookedStr));
try!(self.popen());
try!(self.print_expr(o));
try!(self.pclose());
try!(self.word_space(","));
}
try!(self.word_space(":"));
for &(ref co, o) in a.inputs.iter() {
try!(self.print_string(co.get(), ast::CookedStr));
try!(self.popen());
try!(self.print_expr(o));
try!(self.pclose());
try!(self.word_space(","));
}
try!(self.word_space(":"));
try!(self.print_string(a.clobbers.get(), ast::CookedStr));
try!(self.pclose());
}
ast::ExprMac(ref m) => try!(self.print_mac(m)),
ast::ExprParen(e) => {
try!(self.popen());
try!(self.print_expr(e));
try!(self.pclose());
}
}
try!(self.ann.post(self, NodeExpr(expr)));
self.end()
}
pub fn print_local_decl(&mut self, loc: &ast::Local) -> IoResult<()> {
try!(self.print_pat(loc.pat));
match loc.ty.node {
ast::TyInfer => Ok(()),
_ => {
try!(self.word_space(":"));
self.print_type(loc.ty)
}
}
}
pub fn print_decl(&mut self, decl: &ast::Decl) -> IoResult<()> {
try!(self.maybe_print_comment(decl.span.lo));
match decl.node {
ast::DeclLocal(loc) => {
try!(self.space_if_not_bol());
try!(self.ibox(indent_unit));
try!(self.word_nbsp("let"));
try!(self.ibox(indent_unit));
try!(self.print_local_decl(loc));
try!(self.end());
match loc.init {
Some(init) => {
try!(self.nbsp());
try!(self.word_space("="));
try!(self.print_expr(init));
}
_ => {}
}
self.end()
}
ast::DeclItem(item) => self.print_item(item)
}
}
pub fn print_ident(&mut self, ident: ast::Ident) -> IoResult<()> {
word(&mut self.s, token::get_ident(ident).get())
}
pub fn print_name(&mut self, name: ast::Name) -> IoResult<()> {
word(&mut self.s, token::get_name(name).get())
}
pub fn print_for_decl(&mut self, loc: &ast::Local,
coll: &ast::Expr) -> IoResult<()> {
try!(self.print_local_decl(loc));
try!(space(&mut self.s));
try!(self.word_space("in"));
self.print_expr(coll)
}
fn print_path_(&mut self,
path: &ast::Path,
colons_before_params: bool,
opt_bounds: &Option<OwnedSlice<ast::TyParamBound>>)
-> IoResult<()> {
try!(self.maybe_print_comment(path.span.lo));
if path.global {
try!(word(&mut self.s, "::"));
}
let mut first = true;
for segment in path.segments.iter() {
if first {
first = false
} else {
try!(word(&mut self.s, "::"))
}
try!(self.print_ident(segment.identifier));
if !segment.lifetimes.is_empty() || !segment.types.is_empty() {
if colons_before_params {
try!(word(&mut self.s, "::"))
}
try!(word(&mut self.s, "<"));
let mut comma = false;
for lifetime in segment.lifetimes.iter() {
if comma {
try!(self.word_space(","))
}
try!(self.print_lifetime(lifetime));
comma = true;
}
if !segment.types.is_empty() {
if comma {
try!(self.word_space(","))
}
try!(self.commasep(
Inconsistent,
segment.types.as_slice(),
|s, ty| s.print_type_ref(ty)));
}
try!(word(&mut self.s, ">"))
}
}
match *opt_bounds {
None => Ok(()),
Some(ref bounds) => self.print_bounds(&None, bounds, true),
}
}
fn print_path(&mut self, path: &ast::Path,
colons_before_params: bool) -> IoResult<()> {
self.print_path_(path, colons_before_params, &None)
}
fn print_bounded_path(&mut self, path: &ast::Path,
bounds: &Option<OwnedSlice<ast::TyParamBound>>)
-> IoResult<()> {
self.print_path_(path, false, bounds)
}
pub fn print_pat(&mut self, pat: &ast::Pat) -> IoResult<()> {
try!(self.maybe_print_comment(pat.span.lo));
try!(self.ann.pre(self, NodePat(pat)));
/* Pat isn't normalized, but the beauty of it
is that it doesn't matter */
match pat.node {
ast::PatWild => try!(word(&mut self.s, "_")),
ast::PatWildMulti => try!(word(&mut self.s, "..")),
ast::PatIdent(binding_mode, ref path, sub) => {
match binding_mode {
ast::BindByRef(mutbl) => {
try!(self.word_nbsp("ref"));
try!(self.print_mutability(mutbl));
}
ast::BindByValue(ast::MutImmutable) => {}
ast::BindByValue(ast::MutMutable) => {
try!(self.word_nbsp("mut"));
}
}
try!(self.print_path(path, true));
match sub {
Some(p) => {
try!(word(&mut self.s, "@"));
try!(self.print_pat(p));
}
None => ()
}
}
ast::PatEnum(ref path, ref args_) => {
try!(self.print_path(path, true));
match *args_ {
None => try!(word(&mut self.s, "(..)")),
Some(ref args) => {
if !args.is_empty() {
try!(self.popen());
try!(self.commasep(Inconsistent, args.as_slice(),
|s, &p| s.print_pat(p)));
try!(self.pclose());
}
}
}
}
ast::PatStruct(ref path, ref fields, etc) => {
try!(self.print_path(path, true));
try!(word(&mut self.s, "{"));
try!(self.commasep_cmnt(
Consistent, fields.as_slice(),
|s, f| {
try!(s.cbox(indent_unit));
try!(s.print_ident(f.ident));
try!(s.word_space(":"));
try!(s.print_pat(f.pat));
s.end()
},
|f| f.pat.span));
if etc {
if fields.len() != 0u { try!(self.word_space(",")); }
try!(word(&mut self.s, ".."));
}
try!(word(&mut self.s, "}"));
}
ast::PatTup(ref elts) => {
try!(self.popen());
try!(self.commasep(Inconsistent,
elts.as_slice(),
|s, &p| s.print_pat(p)));
if elts.len() == 1 {
try!(word(&mut self.s, ","));
}
try!(self.pclose());
}
ast::PatUniq(inner) => {
try!(word(&mut self.s, "box "));
try!(self.print_pat(inner));
}
ast::PatRegion(inner) => {
try!(word(&mut self.s, "&"));
try!(self.print_pat(inner));
}
ast::PatLit(e) => try!(self.print_expr(e)),
ast::PatRange(begin, end) => {
try!(self.print_expr(begin));
try!(space(&mut self.s));
try!(word(&mut self.s, ".."));
try!(self.print_expr(end));
}
ast::PatVec(ref before, slice, ref after) => {
try!(word(&mut self.s, "["));
try!(self.commasep(Inconsistent,
before.as_slice(),
|s, &p| s.print_pat(p)));
for &p in slice.iter() {
if !before.is_empty() { try!(self.word_space(",")); }
match *p {
ast::Pat { node: ast::PatWildMulti, .. } => {
// this case is handled by print_pat
}
_ => try!(word(&mut self.s, "..")),
}
try!(self.print_pat(p));
if !after.is_empty() { try!(self.word_space(",")); }
}
try!(self.commasep(Inconsistent,
after.as_slice(),
|s, &p| s.print_pat(p)));
try!(word(&mut self.s, "]"));
}
}
self.ann.post(self, NodePat(pat))
}
// Returns whether it printed anything
fn print_explicit_self(&mut self,
explicit_self: ast::ExplicitSelf_,
mutbl: ast::Mutability) -> IoResult<bool> {
try!(self.print_mutability(mutbl));
match explicit_self {
ast::SelfStatic => { return Ok(false); }
ast::SelfValue => {
try!(word(&mut self.s, "self"));
}
ast::SelfUniq => {
try!(word(&mut self.s, "~self"));
}
ast::SelfRegion(ref lt, m) => {
try!(word(&mut self.s, "&"));
try!(self.print_opt_lifetime(lt));
try!(self.print_mutability(m));
try!(word(&mut self.s, "self"));
}
}
return Ok(true);
}
pub fn print_fn(&mut self,
decl: &ast::FnDecl,
fn_style: Option<ast::FnStyle>,
abi: abi::Abi,
name: ast::Ident,
generics: &ast::Generics,
opt_explicit_self: Option<ast::ExplicitSelf_>,
vis: ast::Visibility) -> IoResult<()> {
try!(self.head(""));
try!(self.print_fn_header_info(opt_explicit_self, fn_style, abi, vis));
try!(self.nbsp());
try!(self.print_ident(name));
try!(self.print_generics(generics));
self.print_fn_args_and_ret(decl, opt_explicit_self)
}
pub fn print_fn_args(&mut self, decl: &ast::FnDecl,
opt_explicit_self: Option<ast::ExplicitSelf_>)
-> IoResult<()> {
// It is unfortunate to duplicate the commasep logic, but we want the
// self type and the args all in the same box.
try!(self.rbox(0u, Inconsistent));
let mut first = true;
for &explicit_self in opt_explicit_self.iter() {
let m = match explicit_self {
ast::SelfStatic => ast::MutImmutable,
_ => match decl.inputs.get(0).pat.node {
ast::PatIdent(ast::BindByValue(m), _, _) => m,
_ => ast::MutImmutable
}
};
first = !try!(self.print_explicit_self(explicit_self, m));
}
// HACK(eddyb) ignore the separately printed self argument.
let args = if first {
decl.inputs.as_slice()
} else {
decl.inputs.slice_from(1)
};
for arg in args.iter() {
if first { first = false; } else { try!(self.word_space(",")); }
try!(self.print_arg(arg));
}
self.end()
}
pub fn print_fn_args_and_ret(&mut self, decl: &ast::FnDecl,
opt_explicit_self: Option<ast::ExplicitSelf_>)
-> IoResult<()> {
try!(self.popen());
try!(self.print_fn_args(decl, opt_explicit_self));
if decl.variadic {
try!(word(&mut self.s, ", ..."));
}
try!(self.pclose());
try!(self.maybe_print_comment(decl.output.span.lo));
match decl.output.node {
ast::TyNil => Ok(()),
_ => {
try!(self.space_if_not_bol());
try!(self.word_space("->"));
self.print_type(decl.output)
}
}
}
pub fn print_fn_block_args(&mut self,
decl: &ast::FnDecl) -> IoResult<()> {
try!(word(&mut self.s, "|"));
try!(self.print_fn_args(decl, None));
try!(word(&mut self.s, "|"));
match decl.output.node {
ast::TyInfer => {}
_ => {
try!(self.space_if_not_bol());
try!(self.word_space("->"));
try!(self.print_type(decl.output));
}
}
self.maybe_print_comment(decl.output.span.lo)
}
pub fn print_proc_args(&mut self, decl: &ast::FnDecl) -> IoResult<()> {
try!(word(&mut self.s, "proc"));
try!(word(&mut self.s, "("));
try!(self.print_fn_args(decl, None));
try!(word(&mut self.s, ")"));
match decl.output.node {
ast::TyInfer => {}
_ => {
try!(self.space_if_not_bol());
try!(self.word_space("->"));
try!(self.print_type(decl.output));
}
}
self.maybe_print_comment(decl.output.span.lo)
}
pub fn print_bounds(&mut self,
region: &Option<ast::Lifetime>,
bounds: &OwnedSlice<ast::TyParamBound>,
print_colon_anyway: bool) -> IoResult<()> {
if !bounds.is_empty() || region.is_some() {
try!(word(&mut self.s, ":"));
let mut first = true;
match *region {
Some(ref lt) => {
let token = token::get_name(lt.name);
if token.get() != "static" {
try!(self.nbsp());
first = false;
try!(self.print_lifetime(lt));
}
}
None => {}
}
for bound in bounds.iter() {
try!(self.nbsp());
if first {
first = false;
} else {
try!(self.word_space("+"));
}
try!(match *bound {
TraitTyParamBound(ref tref) => self.print_trait_ref(tref),
StaticRegionTyParamBound => word(&mut self.s, "'static"),
OtherRegionTyParamBound(_) => Ok(())
})
}
Ok(())
} else if print_colon_anyway {
word(&mut self.s, ":")
} else {
Ok(())
}
}
pub fn print_lifetime(&mut self,
lifetime: &ast::Lifetime) -> IoResult<()> {
try!(word(&mut self.s, "'"));
self.print_name(lifetime.name)
}
pub fn print_generics(&mut self,
generics: &ast::Generics) -> IoResult<()> {
let total = generics.lifetimes.len() + generics.ty_params.len();
if total > 0 {
try!(word(&mut self.s, "<"));
let mut ints = Vec::new();
for i in range(0u, total) {
ints.push(i);
}
try!(self.commasep(
Inconsistent, ints.as_slice(),
|s, &idx| {
if idx < generics.lifetimes.len() {
let lifetime = generics.lifetimes.get(idx);
s.print_lifetime(lifetime)
} else {
let idx = idx - generics.lifetimes.len();
let param = generics.ty_params.get(idx);
if param.sized == ast::DynSize {
try!(s.word_space("type"));
}
try!(s.print_ident(param.ident));
try!(s.print_bounds(&None, ¶m.bounds, false));
match param.default {
Some(default) => {
try!(space(&mut s.s));
try!(s.word_space("="));
s.print_type(default)
}
_ => Ok(())
}
}
}));
word(&mut self.s, ">")
} else {
Ok(())
}
}
pub fn print_meta_item(&mut self, item: &ast::MetaItem) -> IoResult<()> {
try!(self.ibox(indent_unit));
match item.node {
ast::MetaWord(ref name) => {
try!(word(&mut self.s, name.get()));
}
ast::MetaNameValue(ref name, ref value) => {
try!(self.word_space(name.get()));
try!(self.word_space("="));
try!(self.print_literal(value));
}
ast::MetaList(ref name, ref items) => {
try!(word(&mut self.s, name.get()));
try!(self.popen());
try!(self.commasep(Consistent,
items.as_slice(),
|s, &i| s.print_meta_item(i)));
try!(self.pclose());
}
}
self.end()
}
pub fn print_view_path(&mut self, vp: &ast::ViewPath) -> IoResult<()> {
match vp.node {
ast::ViewPathSimple(ident, ref path, _) => {
// FIXME(#6993) can't compare identifiers directly here
if path.segments.last().unwrap().identifier.name != ident.name {
try!(self.print_ident(ident));
try!(space(&mut self.s));
try!(self.word_space("="));
}
self.print_path(path, false)
}
ast::ViewPathGlob(ref path, _) => {
try!(self.print_path(path, false));
word(&mut self.s, "::*")
}
ast::ViewPathList(ref path, ref idents, _) => {
if path.segments.is_empty() {
try!(word(&mut self.s, "{"));
} else {
try!(self.print_path(path, false));
try!(word(&mut self.s, "::{"));
}
try!(self.commasep(Inconsistent, idents.as_slice(), |s, w| {
s.print_ident(w.node.name)
}));
word(&mut self.s, "}")
}
}
}
pub fn print_view_item(&mut self, item: &ast::ViewItem) -> IoResult<()> {
try!(self.hardbreak_if_not_bol());
try!(self.maybe_print_comment(item.span.lo));
try!(self.print_outer_attributes(item.attrs.as_slice()));
try!(self.print_visibility(item.vis));
match item.node {
ast::ViewItemExternCrate(id, ref optional_path, _) => {
try!(self.head("extern crate"));
try!(self.print_ident(id));
for &(ref p, style) in optional_path.iter() {
try!(space(&mut self.s));
try!(word(&mut self.s, "="));
try!(space(&mut self.s));
try!(self.print_string(p.get(), style));
}
}
ast::ViewItemUse(ref vp) => {
try!(self.head("use"));
try!(self.print_view_path(*vp));
}
}
try!(word(&mut self.s, ";"));
try!(self.end()); // end inner head-block
self.end() // end outer head-block
}
pub fn print_mutability(&mut self,
mutbl: ast::Mutability) -> IoResult<()> {
match mutbl {
ast::MutMutable => self.word_nbsp("mut"),
ast::MutImmutable => Ok(()),
}
}
pub fn print_mt(&mut self, mt: &ast::MutTy) -> IoResult<()> {
try!(self.print_mutability(mt.mutbl));
self.print_type(mt.ty)
}
pub fn print_arg(&mut self, input: &ast::Arg) -> IoResult<()> {
try!(self.ibox(indent_unit));
match input.ty.node {
ast::TyInfer => try!(self.print_pat(input.pat)),
_ => {
match input.pat.node {
ast::PatIdent(_, ref path, _) if
path.segments.len() == 1 &&
path.segments.get(0).identifier.name ==
parse::token::special_idents::invalid.name => {
// Do nothing.
}
_ => {
try!(self.print_pat(input.pat));
try!(word(&mut self.s, ":"));
try!(space(&mut self.s));
}
}
try!(self.print_type(input.ty));
}
}
self.end()
}
pub fn print_ty_fn(&mut self,
opt_abi: Option<abi::Abi>,
opt_sigil: Option<char>,
opt_region: &Option<ast::Lifetime>,
fn_style: ast::FnStyle,
onceness: ast::Onceness,
decl: &ast::FnDecl,
id: Option<ast::Ident>,
opt_bounds: &Option<OwnedSlice<ast::TyParamBound>>,
generics: Option<&ast::Generics>,
opt_explicit_self: Option<ast::ExplicitSelf_>)
-> IoResult<()> {
try!(self.ibox(indent_unit));
// Duplicates the logic in `print_fn_header_info()`. This is because that
// function prints the sigil in the wrong place. That should be fixed.
if opt_sigil == Some('~') && onceness == ast::Once {
try!(word(&mut self.s, "proc"));
} else if opt_sigil == Some('&') {
try!(self.print_fn_style(fn_style));
try!(self.print_extern_opt_abi(opt_abi));
try!(self.print_onceness(onceness));
} else {
assert!(opt_sigil.is_none());
try!(self.print_fn_style(fn_style));
try!(self.print_opt_abi_and_extern_if_nondefault(opt_abi));
try!(self.print_onceness(onceness));
try!(word(&mut self.s, "fn"));
}
match id {
Some(id) => {
try!(word(&mut self.s, " "));
try!(self.print_ident(id));
}
_ => ()
}
match generics { Some(g) => try!(self.print_generics(g)), _ => () }
try!(zerobreak(&mut self.s));
if opt_sigil == Some('&') {
try!(word(&mut self.s, "|"));
} else {
try!(self.popen());
}
try!(self.print_fn_args(decl, opt_explicit_self));
if opt_sigil == Some('&') {
try!(word(&mut self.s, "|"));
} else {
if decl.variadic {
try!(word(&mut self.s, ", ..."));
}
try!(self.pclose());
}
opt_bounds.as_ref().map(|bounds| {
self.print_bounds(opt_region, bounds, true)
});
try!(self.maybe_print_comment(decl.output.span.lo));
match decl.output.node {
ast::TyNil => {}
_ => {
try!(self.space_if_not_bol());
try!(self.ibox(indent_unit));
try!(self.word_space("->"));
if decl.cf == ast::NoReturn {
try!(self.word_nbsp("!"));
} else {
try!(self.print_type(decl.output));
}
try!(self.end());
}
}
self.end()
}
pub fn maybe_print_trailing_comment(&mut self, span: codemap::Span,
next_pos: Option<BytePos>)
-> IoResult<()> {
let cm = match self.cm {
Some(cm) => cm,
_ => return Ok(())
};
match self.next_comment() {
Some(ref cmnt) => {
if (*cmnt).style != comments::Trailing { return Ok(()) }
let span_line = cm.lookup_char_pos(span.hi);
let comment_line = cm.lookup_char_pos((*cmnt).pos);
let mut next = (*cmnt).pos + BytePos(1);
match next_pos { None => (), Some(p) => next = p }
if span.hi < (*cmnt).pos && (*cmnt).pos < next &&
span_line.line == comment_line.line {
try!(self.print_comment(cmnt));
self.cur_cmnt_and_lit.cur_cmnt += 1u;
}
}
_ => ()
}
Ok(())
}
pub fn print_remaining_comments(&mut self) -> IoResult<()> {
// If there aren't any remaining comments, then we need to manually
// make sure there is a line break at the end.
if self.next_comment().is_none() {
try!(hardbreak(&mut self.s));
}
loop {
match self.next_comment() {
Some(ref cmnt) => {
try!(self.print_comment(cmnt));
self.cur_cmnt_and_lit.cur_cmnt += 1u;
}
_ => break
}
}
Ok(())
}
pub fn print_literal(&mut self, lit: &ast::Lit) -> IoResult<()> {
try!(self.maybe_print_comment(lit.span.lo));
match self.next_lit(lit.span.lo) {
Some(ref ltrl) => {
return word(&mut self.s, (*ltrl).lit.as_slice());
}
_ => ()
}
match lit.node {
ast::LitStr(ref st, style) => self.print_string(st.get(), style),
ast::LitChar(ch) => {
let mut res = StrBuf::from_str("'");
ch.escape_default(|c| res.push_char(c));
res.push_char('\'');
word(&mut self.s, res.into_owned())
}
ast::LitInt(i, t) => {
word(&mut self.s,
ast_util::int_ty_to_str(t, Some(i)).as_slice())
}
ast::LitUint(u, t) => {
word(&mut self.s,
ast_util::uint_ty_to_str(t, Some(u)).as_slice())
}
ast::LitIntUnsuffixed(i) => {
word(&mut self.s, format!("{}", i))
}
ast::LitFloat(ref f, t) => {
word(&mut self.s,
f.get() + ast_util::float_ty_to_str(t).as_slice())
}
ast::LitFloatUnsuffixed(ref f) => word(&mut self.s, f.get()),
ast::LitNil => word(&mut self.s, "()"),
ast::LitBool(val) => {
if val { word(&mut self.s, "true") } else { word(&mut self.s, "false") }
}
ast::LitBinary(ref arr) => {
try!(self.ibox(indent_unit));
try!(word(&mut self.s, "["));
try!(self.commasep_cmnt(Inconsistent, arr.as_slice(),
|s, u| word(&mut s.s, format!("{}", *u)),
|_| lit.span));
try!(word(&mut self.s, "]"));
self.end()
}
}
}
pub fn next_lit(&mut self, pos: BytePos) -> Option<comments::Literal> {
match self.literals {
Some(ref lits) => {
while self.cur_cmnt_and_lit.cur_lit < lits.len() {
let ltrl = (*(*lits).get(self.cur_cmnt_and_lit.cur_lit)).clone();
if ltrl.pos > pos { return None; }
self.cur_cmnt_and_lit.cur_lit += 1u;
if ltrl.pos == pos { return Some(ltrl); }
}
None
}
_ => None
}
}
pub fn maybe_print_comment(&mut self, pos: BytePos) -> IoResult<()> {
loop {
match self.next_comment() {
Some(ref cmnt) => {
if (*cmnt).pos < pos {
try!(self.print_comment(cmnt));
self.cur_cmnt_and_lit.cur_cmnt += 1u;
} else { break; }
}
_ => break
}
}
Ok(())
}
pub fn print_comment(&mut self,
cmnt: &comments::Comment) -> IoResult<()> {
match cmnt.style {
comments::Mixed => {
assert_eq!(cmnt.lines.len(), 1u);
try!(zerobreak(&mut self.s));
try!(word(&mut self.s, cmnt.lines.get(0).as_slice()));
zerobreak(&mut self.s)
}
comments::Isolated => {
try!(self.hardbreak_if_not_bol());
for line in cmnt.lines.iter() {
// Don't print empty lines because they will end up as trailing
// whitespace
if !line.is_empty() {
try!(word(&mut self.s, line.as_slice()));
}
try!(hardbreak(&mut self.s));
}
Ok(())
}
comments::Trailing => {
try!(word(&mut self.s, " "));
if cmnt.lines.len() == 1u {
try!(word(&mut self.s, cmnt.lines.get(0).as_slice()));
hardbreak(&mut self.s)
} else {
try!(self.ibox(0u));
for line in cmnt.lines.iter() {
if !line.is_empty() {
try!(word(&mut self.s, line.as_slice()));
}
try!(hardbreak(&mut self.s));
}
self.end()
}
}
comments::BlankLine => {
// We need to do at least one, possibly two hardbreaks.
let is_semi = match self.s.last_token() {
pp::String(s, _) => ";" == s.as_slice(),
_ => false
};
if is_semi || self.is_begin() || self.is_end() {
try!(hardbreak(&mut self.s));
}
hardbreak(&mut self.s)
}
}
}
pub fn print_string(&mut self, st: &str,
style: ast::StrStyle) -> IoResult<()> {
let st = match style {
ast::CookedStr => format!("\"{}\"", st.escape_default()),
ast::RawStr(n) => format!("r{delim}\"{string}\"{delim}",
delim="#".repeat(n), string=st)
};
word(&mut self.s, st)
}
pub fn next_comment(&mut self) -> Option<comments::Comment> {
match self.comments {
Some(ref cmnts) => {
if self.cur_cmnt_and_lit.cur_cmnt < cmnts.len() {
Some((*cmnts.get(self.cur_cmnt_and_lit.cur_cmnt)).clone())
} else {
None
}
}
_ => None
}
}
pub fn print_opt_fn_style(&mut self,
opt_fn_style: Option<ast::FnStyle>) -> IoResult<()> {
match opt_fn_style {
Some(fn_style) => self.print_fn_style(fn_style),
None => Ok(())
}
}
pub fn print_opt_abi_and_extern_if_nondefault(&mut self,
opt_abi: Option<abi::Abi>)
-> IoResult<()> {
match opt_abi {
Some(abi::Rust) => Ok(()),
Some(abi) => {
try!(self.word_nbsp("extern"));
self.word_nbsp(abi.to_str())
}
None => Ok(())
}
}
pub fn print_extern_opt_abi(&mut self,
opt_abi: Option<abi::Abi>) -> IoResult<()> {
match opt_abi {
Some(abi) => {
try!(self.word_nbsp("extern"));
self.word_nbsp(abi.to_str())
}
None => Ok(())
}
}
pub fn print_fn_header_info(&mut self,
_opt_explicit_self: Option<ast::ExplicitSelf_>,
opt_fn_style: Option<ast::FnStyle>,
abi: abi::Abi,
vis: ast::Visibility) -> IoResult<()> {
try!(word(&mut self.s, visibility_qualified(vis, "").as_slice()));
try!(self.print_opt_fn_style(opt_fn_style));
if abi != abi::Rust {
try!(self.word_nbsp("extern"));
try!(self.word_nbsp(abi.to_str()));
}
word(&mut self.s, "fn")
}
pub fn print_fn_style(&mut self, s: ast::FnStyle) -> IoResult<()> {
match s {
ast::NormalFn => Ok(()),
ast::UnsafeFn => self.word_nbsp("unsafe"),
}
}
pub fn print_onceness(&mut self, o: ast::Onceness) -> IoResult<()> {
match o {
ast::Once => self.word_nbsp("once"),
ast::Many => Ok(())
}
}
}
#[cfg(test)]
mod test {
use super::*;
use ast;
use ast_util;
use codemap;
use parse::token;
#[test]
fn test_fun_to_str() {
let abba_ident = token::str_to_ident("abba");
let decl = ast::FnDecl {
inputs: Vec::new(),
output: ast::P(ast::Ty {id: 0,
node: ast::TyNil,
span: codemap::DUMMY_SP}),
cf: ast::Return,
variadic: false
};
let generics = ast_util::empty_generics();
assert_eq!(&fun_to_str(&decl, ast::NormalFn, abba_ident,
None, &generics),
&"fn abba()".to_strbuf());
}
#[test]
fn test_variant_to_str() {
let ident = token::str_to_ident("principal_skinner");
let var = codemap::respan(codemap::DUMMY_SP, ast::Variant_ {
name: ident,
attrs: Vec::new(),
// making this up as I go.... ?
kind: ast::TupleVariantKind(Vec::new()),
id: 0,
disr_expr: None,
vis: ast::Public,
});
let varstr = variant_to_str(&var);
assert_eq!(&varstr,&"pub principal_skinner".to_strbuf());
}
}<|fim▁end|> | cur_lit: uint,
}
|
<|file_name|>test_sample.py<|end_file_name|><|fim▁begin|>import numpy as np
import pytest
from pandas import (
DataFrame,
Index,
Series,
)
import pandas._testing as tm
import pandas.core.common as com
class TestSample:
@pytest.fixture(params=[Series, DataFrame])
def obj(self, request):
klass = request.param
if klass is Series:
arr = np.random.randn(10)
else:
arr = np.random.randn(10, 10)
return klass(arr, dtype=None)
@pytest.mark.parametrize("test", list(range(10)))
def test_sample(self, test, obj):
# Fixes issue: 2419
# Check behavior of random_state argument
# Check for stability when receives seed or random state -- run 10
# times.
seed = np.random.randint(0, 100)
tm.assert_equal(
obj.sample(n=4, random_state=seed), obj.sample(n=4, random_state=seed)
)
tm.assert_equal(
obj.sample(frac=0.7, random_state=seed),
obj.sample(frac=0.7, random_state=seed),
)
tm.assert_equal(
obj.sample(n=4, random_state=np.random.RandomState(test)),
obj.sample(n=4, random_state=np.random.RandomState(test)),
)
tm.assert_equal(
obj.sample(frac=0.7, random_state=np.random.RandomState(test)),
obj.sample(frac=0.7, random_state=np.random.RandomState(test)),
)
tm.assert_equal(
obj.sample(frac=2, replace=True, random_state=np.random.RandomState(test)),
obj.sample(frac=2, replace=True, random_state=np.random.RandomState(test)),
)
os1, os2 = [], []
for _ in range(2):
np.random.seed(test)
os1.append(obj.sample(n=4))
os2.append(obj.sample(frac=0.7))
tm.assert_equal(*os1)
tm.assert_equal(*os2)
def test_sample_lengths(self, obj):
# Check lengths are right
assert len(obj.sample(n=4) == 4)
assert len(obj.sample(frac=0.34) == 3)
assert len(obj.sample(frac=0.36) == 4)
def test_sample_invalid_random_state(self, obj):
# Check for error when random_state argument invalid.
msg = (
"random_state must be an integer, array-like, a BitGenerator, Generator, "
"a numpy RandomState, or None"
)
with pytest.raises(ValueError, match=msg):
obj.sample(random_state="a_string")
def test_sample_wont_accept_n_and_frac(self, obj):
# Giving both frac and N throws error
msg = "Please enter a value for `frac` OR `n`, not both"
with pytest.raises(ValueError, match=msg):
obj.sample(n=3, frac=0.3)
def test_sample_requires_positive_n_frac(self, obj):
with pytest.raises(
ValueError,
match="A negative number of rows requested. Please provide `n` >= 0",
):
obj.sample(n=-3)
with pytest.raises(
ValueError,
match="A negative number of rows requested. Please provide `frac` >= 0",
):
obj.sample(frac=-0.3)
def test_sample_requires_integer_n(self, obj):
# Make sure float values of `n` give error
with pytest.raises(ValueError, match="Only integers accepted as `n` values"):
obj.sample(n=3.2)
def test_sample_invalid_weight_lengths(self, obj):
# Weight length must be right
msg = "Weights and axis to be sampled must be of same length"
with pytest.raises(ValueError, match=msg):
obj.sample(n=3, weights=[0, 1])
with pytest.raises(ValueError, match=msg):
bad_weights = [0.5] * 11
obj.sample(n=3, weights=bad_weights)
with pytest.raises(ValueError, match="Fewer non-zero entries in p than size"):
bad_weight_series = Series([0, 0, 0.2])
obj.sample(n=4, weights=bad_weight_series)
def test_sample_negative_weights(self, obj):
# Check won't accept negative weights
bad_weights = [-0.1] * 10
msg = "weight vector many not include negative values"
with pytest.raises(ValueError, match=msg):
obj.sample(n=3, weights=bad_weights)
def test_sample_inf_weights(self, obj):
# Check inf and -inf throw errors:
weights_with_inf = [0.1] * 10
weights_with_inf[0] = np.inf
msg = "weight vector may not include `inf` values"
with pytest.raises(ValueError, match=msg):
obj.sample(n=3, weights=weights_with_inf)
weights_with_ninf = [0.1] * 10
weights_with_ninf[0] = -np.inf
with pytest.raises(ValueError, match=msg):
obj.sample(n=3, weights=weights_with_ninf)
def test_sample_zero_weights(self, obj):
# All zeros raises errors
zero_weights = [0] * 10
with pytest.raises(ValueError, match="Invalid weights: weights sum to zero"):
obj.sample(n=3, weights=zero_weights)
def test_sample_missing_weights(self, obj):
# All missing weights
nan_weights = [np.nan] * 10
with pytest.raises(ValueError, match="Invalid weights: weights sum to zero"):
obj.sample(n=3, weights=nan_weights)
def test_sample_none_weights(self, obj):
# Check None are also replaced by zeros.
weights_with_None = [None] * 10
weights_with_None[5] = 0.5
tm.assert_equal(
obj.sample(n=1, axis=0, weights=weights_with_None), obj.iloc[5:6]
)
@pytest.mark.parametrize(
"func_str,arg",
[
("np.array", [2, 3, 1, 0]),
("np.random.MT19937", 3),
("np.random.PCG64", 11),
],
)
def test_sample_random_state(self, func_str, arg, frame_or_series):
# GH#32503
obj = DataFrame({"col1": range(10, 20), "col2": range(20, 30)})
if frame_or_series is Series:
obj = obj["col1"]
result = obj.sample(n=3, random_state=eval(func_str)(arg))
expected = obj.sample(n=3, random_state=com.random_state(eval(func_str)(arg)))
tm.assert_equal(result, expected)
def test_sample_generator(self, frame_or_series):
# GH#38100
obj = frame_or_series(np.arange(100))
rng = np.random.default_rng()
# Consecutive calls should advance the seed
result1 = obj.sample(n=50, random_state=rng)
result2 = obj.sample(n=50, random_state=rng)
assert not (result1.index.values == result2.index.values).all()
# Matching generator initialization must give same result
# Consecutive calls should advance the seed
result1 = obj.sample(n=50, random_state=np.random.default_rng(11))
result2 = obj.sample(n=50, random_state=np.random.default_rng(11))
tm.assert_equal(result1, result2)
def test_sample_upsampling_without_replacement(self, frame_or_series):
# GH#27451
obj = DataFrame({"A": list("abc")})
if frame_or_series is Series:
obj = obj["A"]
msg = (
"Replace has to be set to `True` when "
"upsampling the population `frac` > 1."
)
with pytest.raises(ValueError, match=msg):
obj.sample(frac=2, replace=False)
class TestSampleDataFrame:
# Tests which are relevant only for DataFrame, so these are
# as fully parametrized as they can get.
def test_sample(self):
# GH#2419
# additional specific object based tests
# A few dataframe test with degenerate weights.
easy_weight_list = [0] * 10
easy_weight_list[5] = 1
df = DataFrame(
{
"col1": range(10, 20),
"col2": range(20, 30),
"colString": ["a"] * 10,
"easyweights": easy_weight_list,
}
)
sample1 = df.sample(n=1, weights="easyweights")
tm.assert_frame_equal(sample1, df.iloc[5:6])
# Ensure proper error if string given as weight for Series or
# DataFrame with axis = 1.
ser = Series(range(10))
msg = "Strings cannot be passed as weights when sampling from a Series."
with pytest.raises(ValueError, match=msg):
ser.sample(n=3, weights="weight_column")
msg = (
"Strings can only be passed to weights when sampling from rows on a "
"DataFrame"
)
with pytest.raises(ValueError, match=msg):
df.sample(n=1, weights="weight_column", axis=1)
# Check weighting key error
with pytest.raises(
KeyError, match="'String passed to weights not a valid column'"
):
df.sample(n=3, weights="not_a_real_column_name")
# Check that re-normalizes weights that don't sum to one.
weights_less_than_1 = [0] * 10
weights_less_than_1[0] = 0.5
tm.assert_frame_equal(df.sample(n=1, weights=weights_less_than_1), df.iloc[:1])
###
# Test axis argument
###
# Test axis argument
df = DataFrame({"col1": range(10), "col2": ["a"] * 10})
second_column_weight = [0, 1]
tm.assert_frame_equal(
df.sample(n=1, axis=1, weights=second_column_weight), df[["col2"]]
)
# Different axis arg types
tm.assert_frame_equal(
df.sample(n=1, axis="columns", weights=second_column_weight), df[["col2"]]
)
weight = [0] * 10
weight[5] = 0.5
tm.assert_frame_equal(df.sample(n=1, axis="rows", weights=weight), df.iloc[5:6])
tm.assert_frame_equal(
df.sample(n=1, axis="index", weights=weight), df.iloc[5:6]
)
# Check out of range axis values
msg = "No axis named 2 for object type DataFrame"
with pytest.raises(ValueError, match=msg):
df.sample(n=1, axis=2)
msg = "No axis named not_a_name for object type DataFrame"
with pytest.raises(ValueError, match=msg):
df.sample(n=1, axis="not_a_name")
ser = Series(range(10))
with pytest.raises(ValueError, match="No axis named 1 for object type Series"):
ser.sample(n=1, axis=1)
# Test weight length compared to correct axis
msg = "Weights and axis to be sampled must be of same length"
with pytest.raises(ValueError, match=msg):
df.sample(n=1, axis=1, weights=[0.5] * 10)
def test_sample_axis1(self):
# Check weights with axis = 1
easy_weight_list = [0] * 3
easy_weight_list[2] = 1
df = DataFrame(
{"col1": range(10, 20), "col2": range(20, 30), "colString": ["a"] * 10}
)
sample1 = df.sample(n=1, axis=1, weights=easy_weight_list)
tm.assert_frame_equal(sample1, df[["colString"]])
# Test default axes
tm.assert_frame_equal(
df.sample(n=3, random_state=42), df.sample(n=3, axis=0, random_state=42)
)
def test_sample_aligns_weights_with_frame(self):
# Test that function aligns weights with frame
df = DataFrame({"col1": [5, 6, 7], "col2": ["a", "b", "c"]}, index=[9, 5, 3])
ser = Series([1, 0, 0], index=[3, 5, 9])
tm.assert_frame_equal(df.loc[[3]], df.sample(1, weights=ser))
# Weights have index values to be dropped because not in
# sampled DataFrame
ser2 = Series([0.001, 0, 10000], index=[3, 5, 10])
tm.assert_frame_equal(df.loc[[3]], df.sample(1, weights=ser2))
# Weights have empty values to be filed with zeros
ser3 = Series([0.01, 0], index=[3, 5])
tm.assert_frame_equal(df.loc[[3]], df.sample(1, weights=ser3))
# No overlap in weight and sampled DataFrame indices
ser4 = Series([1, 0], index=[1, 2])
with pytest.raises(ValueError, match="Invalid weights: weights sum to zero"):
df.sample(1, weights=ser4)
def test_sample_is_copy(self):
# GH#27357, GH#30784: ensure the result of sample is an actual copy and
# doesn't track the parent dataframe / doesn't give SettingWithCopy warnings
df = DataFrame(np.random.randn(10, 3), columns=["a", "b", "c"])
df2 = df.sample(3)
with tm.assert_produces_warning(None):
df2["d"] = 1
def test_sample_ignore_index(self):
# GH 38581
df = DataFrame(
{"col1": range(10, 20), "col2": range(20, 30), "colString": ["a"] * 10}
)
result = df.sample(3, ignore_index=True)
expected_index = Index([0, 1, 2])<|fim▁hole|><|fim▁end|> | tm.assert_index_equal(result.index, expected_index) |
<|file_name|>test_version_subcommand.py<|end_file_name|><|fim▁begin|>from click.testing import CliRunner
from sqlitebiter.__main__ import cmd
from sqlitebiter._const import ExitCode
from .common import print_traceback
class Test_version_subcommand:
def test_smoke(self):
runner = CliRunner()
result = runner.invoke(cmd, ["version"])
print_traceback(result)<|fim▁hole|> assert result.exit_code == ExitCode.SUCCESS<|fim▁end|> | |
<|file_name|>config.py<|end_file_name|><|fim▁begin|><|fim▁hole|># #
# Jun Nie #
# Last modification: 19-09-2017 #
##################################################
import sys, os
import numpy as np
class Config:
"""
config class wchich is used for fvm solver, mlmc & parallelization
TODO: adding read config parameters from file.
"""
def __init__(self, config_file):
# === fvm solver parameters
self.DIM = 2
self.ORDER = 1
self.case = 'vayu_burgers' # 'vayu_ls89', 'su2_ls89'
self.mesh_ncoarsest = 8+1
self.mesh_nfinest = 128+1
self.mesh_filename = '/home/jun/vayu/TestMatrix/Burgers.Test/mesh/' + \
'cartesian_tube_0009x0009x2.BlockMesh'
# === mlmc parameters
self.eps = 0.
self.alpha = 0.
self.beta = 0.
self.gamma = 0.
self.L = 2 # highest level
self.ML = 8 # number of samples on finest level
self.M = 2 # refinement factor
self.SAMPLES_FACTOR = 1
self.mlmc_convergence_test = True
self.READ_NUMBER_OF_SAMPLES_FROM_FILE = False
self.USE_OPTIMAL_NUMBER_OF_SAMPLES = False
self.USE_EQUIDISTRIBUTED_NUMBER_OF_SAMPLES = True
self.COMPUTE_IN_DIFFERENCE = True
# === qoi
self.STATS = 'MEAN_VAR'
# === parallelization parameters
self.multi = 'mpi' # 'mpi' for parallel, 'single' for serial
self.MULTIN = 1 # number of processes for fvm solver, 1 or multiples of 2
self.MULTIM = 4 # number of samplers (processor group)
self.MULTI_CORES = 0
# === update
self.update(config_file)
def update(self, config_file):
''' read config file and update parameters'''
pass
if __name__ == '__main__':
pass<|fim▁end|> | #!/usr/bin/env python
##################################################
# Parallel MLMC: Config class # |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#<|fim▁hole|>##############################################################################
from . import models<|fim▁end|> | # You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# |
<|file_name|>table.js<|end_file_name|><|fim▁begin|>/*global _, _s, Backbone*/
define(function(require) {
var Super = require('views/base'),
B = require('bluebird'),
THEAD = require('hbs!./table/thead.tpl'),
TBODY = require('hbs!./table/tbody.tpl'),
TD = require('hbs!./table/td.tpl'),
TR = require('hbs!./table/tr.tpl'),
Template = require('hbs!./table.tpl');
var View = Super.extend({});
View.Columns = Backbone.Collection.extend();
View.prototype.initialize = function(options) {
//super(options)
Super.prototype.initialize.call(this, options);
this.columns = this.getColumns();
};
View.prototype.render = function() {
var that = this;
return B.resolve()
.then(function() {
that.$el.html(Template({
id: that.id
}));
that.mapControls();
that.renderHead();
that.renderBody();
var events = {};
events['click th.sortable'] = 'sortableColumnClickHandler';
that.delegateEvents(events);
that.collection.on('sync add remove', that.renderBody.bind(that));
that.on('sort', that.sortHandler.bind(that));
});
};
View.prototype.sortableColumnClickHandler = function(event) {
var that = this;
var e = $(event.currentTarget);
var field = e.data('id');
var column = that.columns.get(field);
var direction = '';
that.columns.forEach(function(column) {
if (column.id !== field) {
column.set('direction', '');
}
});
switch (column.get('direction')) {
case 'asc':
direction = 'desc';
break;
case 'desc':
direction = '';
break;
default:
direction = 'asc';
}
column.set('direction', direction);
that.trigger('sort');
};
View.prototype.sortHandler = function(event) {
var that = this;
that.renderHead();
};
View.prototype.renderHead = function() {
var that = this;
// console.log('renderHead()', that.columns.toJSON());
that.controls.thead.html(THEAD({
id: that.id,
columns: that.columns.map(function(column, index) {
return that.tranformColumn(column, index);
})
}));
};
View.prototype.getColumns = function() {
return new View.Columns();<|fim▁hole|>
View.prototype.tranformColumn = function(column, index) {
return _.extend(column.toJSON(), {
sortIcon: (function() {
if (column.get('sortable')) {
if (column.get('direction') === 'asc') {
if (column.get('type') === 'number') {
return 'fa-sort-numeric-asc text-success';
}
else {
return 'fa-sort-alpha-asc text-success';
}
}
else if (column.get('direction') === 'desc') {
if (column.get('type') === 'number') {
return 'fa-sort-numeric-desc text-warning';
}
else {
return 'fa-sort-alpha-desc text-warning';
}
}
else {
return 'fa-sort';
}
}
else {
return '';
}
})()
});
};
View.prototype.renderBody = function() {
var that = this;
that.controls.tbody.html(TBODY({
id: that.id,
rows: that.collection.map(function(model, index) {
return that.tranformRow(model, index);
}),
columns: that.columns.toJSON()
}));
};
View.prototype.tranformRow = function(model, index) {
var that = this;
var DefaultRenderer = that.getDefaultRenderer();
var cells = that.columns.map(function(column, columnIndex) {
var value = ' ';
var renderer = column.get('renderer') || DefaultRenderer;
var td = column.get('td') || TD;
if (typeof renderer === 'function') {
try {
value = renderer(model, column, index, columnIndex);
}
catch (e) {
console.error(e);
}
}
if (typeof td === 'function') {
return td({
value: value,
field: column.id,
data: model.toJSON(),
column: column.toJSON(),
rowIndex: index,
columnIndex: columnIndex,
className: column.get('className')
});
}
});
return TR({
data: model.toJSON(),
cells: cells.join('')
});
};
View.prototype.getDefaultRenderer = function() {
return function(model, column, rowIndex, columnIndex) {
return model.get(column.id);
};
};
View.prototype.getSortedColumn = function() {
var that = this;
return that.columns.find(function(column) {
return column.get('direction') === 'asc' || column.get('direction') === 'desc';
});
};
return View;
});<|fim▁end|> | };
|
<|file_name|>EnergyMetersView.js<|end_file_name|><|fim▁begin|>const Marionette = require('backbone.marionette');
const MeterValuesView = require('./MeterValuesView.js');
<|fim▁hole|>
regions() {
return {
metersByPeriod: '.metersByPeriod'
};
}
modelEvents() {
return {change: 'render'};
}
onRender() {
const metersView = new Marionette.CollectionView({
collection: this.model.metersByPeriod(),
childView: MeterValuesView
});
this.showChildView('metersByPeriod', metersView);
}
}<|fim▁end|> | module.exports = class EnergyMetersView extends Marionette.View {
template = Templates['capabilities/energy/meters'];
className() { return 'energy-meters'; } |
<|file_name|>0005_auto_20161125_1908.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('cd_subscription', '0004_auto_20161125_1901'),
]
operations = [
migrations.AlterField(
model_name='cdsubscription',
name='badgelength',
field=models.IntegerField(blank=True, default=0, verbose_name='Length badge image'),
),
migrations.AlterField(
model_name='cdsubscription',
name='emailparent',
field=models.EmailField(blank=True, max_length=40, default='', verbose_name='Email parent'),
),
migrations.AlterField(
model_name='cdsubscription',
name='emailplayer',
field=models.EmailField(blank=True, max_length=40, default='', verbose_name='Email player'),
),
migrations.AlterField(
model_name='cdsubscription',
name='fullnameattendant',
field=models.CharField(blank=True, max_length=50, default='', verbose_name='Full name responsible on site'),
),
migrations.AlterField(
model_name='cdsubscription',
name='fullnameparent',
field=models.CharField(blank=True, max_length=50, default='', verbose_name='Full name parent'),
),
migrations.AlterField(
model_name='cdsubscription',
name='mobileattendant',
field=models.CharField(blank=True, max_length=15, default='', verbose_name='GSM number responsible on site'),
),
migrations.AlterField(
model_name='cdsubscription',
name='mobileparent',
field=models.CharField(blank=True, max_length=15, default='', verbose_name='GSM parent'),
),
migrations.AlterField(
model_name='cdsubscription',
name='mobileplayer',
field=models.CharField(blank=True, max_length=15, default='', verbose_name='GSM player'),
),
migrations.AlterField(
model_name='cdsubscription',
name='payamount',
field=models.IntegerField(blank=True, default=0, verbose_name='Amount to pay'),<|fim▁hole|> migrations.AlterField(
model_name='cdsubscription',
name='paydate',
field=models.DateField(null=True, verbose_name='Payment date'),
),
]<|fim▁end|> | ), |
<|file_name|>plist.js<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2015 by Rafael Angel Aznar Aparici (rafaaznar at gmail dot com)
*
* openAUSIAS: The stunning micro-library that helps you to develop easily
* AJAX web applications by using Java and jQuery
* openAUSIAS is distributed under the MIT License (MIT)
* Sources at https://github.com/rafaelaznar/
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*
*/
'use strict';
/* Controllers */
moduloUsuariocursoxusuario.controller('UsuariocursoxusuarioPListController', ['$scope', '$routeParams', 'serverService', '$location', 'redirectService', 'sharedSpaceService', 'checkSessionStorageService',
function ($scope, $routeParams, serverService, $location, redirectService, sharedSpaceService, checkSessionStorageService) {
checkSessionStorageService.isSessionSessionStoraged();
$scope.visibles = {};
$scope.visibles.id = true;
$scope.visibles.titulo = true;
$scope.visibles.descripcion = true;
$scope.visibles.nota = true;
$scope.ob = "usuariocursoxusuario";
$scope.op = "plist";
$scope.title = "Listado de usuariocursoxusuario";
$scope.icon = "fa-file-text-o";
$scope.neighbourhood = 2;
if (!$routeParams.page) {
$routeParams.page = 1;
}
if (!$routeParams.rpp) {
$routeParams.rpp = 999;
}
$scope.numpage = $routeParams.page;
$scope.rpp = $routeParams.rpp;
$scope.predicate = 'id';
$scope.reverse = false;
$scope.orderCliente = function (predicate) {
$scope.predicate = predicate;
$scope.reverse = ($scope.predicate === predicate) ? !$scope.reverse : false;
};
$scope.getListaCursos = function () {
serverService.getDataFromPromise(serverService.promise_getListaCursos()).then(function (data) {
redirectService.checkAndRedirect(data);
if (data.status != 200) {
$scope.statusListaCursos = "Error en la recepción de datos del servidor";
} else {
$scope.listaCursos = data.message;
for (var i = 0; i < $scope.listaCursos.length; i++) {
if (i > 0) {
if ($scope.listaCursos[i].obj_curso.id == $scope.listaCursos[i - 1].obj_curso.id) {
$scope.listaCursos[i - 1].alProfesores.push($scope.listaCursos[i].alProfesores[0]);
$scope.listaCursos.splice(i, 1);
i = i - 1;
}
}
}
}
});
};
//$scope.rppPad = serverService.getNrppBar($scope.ob, $scope.op, $scope.numpage, $scope.rpp);
// $scope.order = $routeParams.order;
// $scope.ordervalue = $routeParams.value;
//
// $scope.filter = $routeParams.filter;
// $scope.filteroperator = $routeParams.filteroperator;
// $scope.filtervalue = $routeParams.filtervalue;
//
// $scope.systemfilter = $routeParams.systemfilter;
// $scope.systemfilteroperator = $routeParams.systemfilteroperator;
// $scope.systemfiltervalue = $routeParams.systemfiltervalue;
$scope.order = "";
$scope.ordervalue = "";
$scope.filter = "id";
$scope.filteroperator = "like";
$scope.filtervalue = "";
$scope.systemfilter = "";
$scope.systemfilteroperator = "";
$scope.systemfiltervalue = "";
$scope.params = "";
$scope.paramsWithoutOrder = "";
$scope.paramsWithoutFilter = "";
$scope.paramsWithoutSystemFilter = "";
if ($routeParams.order && $routeParams.ordervalue) {
$scope.order = $routeParams.order;
$scope.ordervalue = $routeParams.ordervalue;
$scope.orderParams = "&order=" + $routeParams.order + "&ordervalue=" + $routeParams.ordervalue;
$scope.paramsWithoutFilter += $scope.orderParams;
$scope.paramsWithoutSystemFilter += $scope.orderParams;
} else {
$scope.orderParams = "";
}
if ($routeParams.filter && $routeParams.filteroperator && $routeParams.filtervalue) {
$scope.filter = $routeParams.filter;
$scope.filteroperator = $routeParams.filteroperator;
$scope.filtervalue = $routeParams.filtervalue;
$scope.filterParams = "&filter=" + $routeParams.filter + "&filteroperator=" + $routeParams.filteroperator + "&filtervalue=" + $routeParams.filtervalue;
$scope.paramsWithoutOrder += $scope.filterParams;
$scope.paramsWithoutSystemFilter += $scope.filterParams;
} else {
$scope.filterParams = "";
}
if ($routeParams.systemfilter && $routeParams.systemfilteroperator && $routeParams.systemfiltervalue) {
$scope.systemFilterParams = "&systemfilter=" + $routeParams.systemfilter + "&systemfilteroperator=" + $routeParams.systemfilteroperator + "&systemfiltervalue=" + $routeParams.systemfiltervalue;
$scope.paramsWithoutOrder += $scope.systemFilterParams;
$scope.paramsWithoutFilter += $scope.systemFilterParams;
} else {
$scope.systemFilterParams = "";
}
$scope.params = ($scope.orderParams + $scope.filterParams + $scope.systemFilterParams);
//$scope.paramsWithoutOrder = $scope.paramsWithoutOrder.replace('&', '?');
//$scope.paramsWithoutFilter = $scope.paramsWithoutFilter.replace('&', '?');
//$scope.paramsWithoutSystemFilter = $scope.paramsWithoutSystemFilter.replace('&', '?');
$scope.params = $scope.params.replace('&', '?');
$scope.getCursosPage = function () {
serverService.getDataFromPromise(serverService.promise_getSomeUsuariocursoXUsuario($scope.ob, $scope.rpp, $scope.numpage, $scope.filterParams, $scope.orderParams, $scope.systemFilterParams)).then(function (data) {
redirectService.checkAndRedirect(data);
if (data.status != 200) {
$scope.status = "Error en la recepción de datos del servidor";
} else {
$scope.pages = data.message.pages.message;
if (parseInt($scope.numpage) > parseInt($scope.pages))
$scope.numpage = $scope.pages;
$scope.page = data.message.page.message;
$scope.registers = data.message.registers.message;
$scope.status = "";
}
});
}
$scope.getCursosPage();
// $scope.pages = serverService.getPages($scope.ob, $scope.rpp, null, null, null, null, null, null).then(function (datos5) {
// $scope.pages = data['data'];
// if (parseInt($scope.page) > parseInt($scope.pages))
// $scope.page = $scope.pages;
// //$location.path( "#/clientes/" +$scope.pages + "/" + $scope.pages);
// });
// $scope.$watch('pages', function () {
// $scope.$broadcast('myApp.construirBotoneraPaginas');
// }, true)
//
$scope.getRangeArray = function (lowEnd, highEnd) {
var rangeArray = [];
for (var i = lowEnd; i <= highEnd; i++) {
rangeArray.push(i);
}
return rangeArray;
};
$scope.evaluateMin = function (lowEnd, highEnd) {
return Math.min(lowEnd, highEnd);
};
$scope.evaluateMax = function (lowEnd, highEnd) {
return Math.max(lowEnd, highEnd);
};
$scope.dofilter = function () {
if ($scope.filter != "" && $scope.filteroperator != "" && $scope.filtervalue != "") {
//console.log('#/' + $scope.ob + '/' + $scope.op + '/' + $scope.numpage + '/' + $scope.rpp + '?filter=' + $scope.filter + '&filteroperator=' + $scope.filteroperator + '&filtervalue=' + $scope.filtervalue + $scope.paramsWithoutFilter);
if ($routeParams.order && $routeParams.ordervalue) {
if ($routeParams.systemfilter && $routeParams.systemfilteroperator) {
$location.path($scope.ob + '/' + $scope.op + '/' + $scope.numpage + '/' + $scope.rpp).search('filter', $scope.filter).search('filteroperator', $scope.filteroperator).search('filtervalue', $scope.filtervalue).search('order', $routeParams.order).search('ordervalue', $routeParams.ordervalue).search('systemfilter', $routeParams.systemfilter).search('systemfilteroperator', $routeParams.systemfilteroperator).search('systemfiltervalue', $routeParams.systemfiltervalue);
} else {
$location.path($scope.ob + '/' + $scope.op + '/' + $scope.numpage + '/' + $scope.rpp).search('filter', $scope.filter).search('filteroperator', $scope.filteroperator).search('filtervalue', $scope.filtervalue).search('order', $routeParams.order).search('ordervalue', $routeParams.ordervalue);
}
} else {
$location.path($scope.ob + '/' + $scope.op + '/' + $scope.numpage + '/' + $scope.rpp).search('filter', $scope.filter).search('filteroperator', $scope.filteroperator).search('filtervalue', $scope.filtervalue);
}
}
return false;
};
//$scope.$on('myApp.construirBotoneraPaginas', function () {
// $scope.botoneraPaginas = serverService.getPaginationBar($scope.ob, $scope.op, $scope.page, $scope.pages, 2, $scope.rpp);
//})
//
// $scope.prettyFieldNames = serverService.getPrettyFieldNames($scope.ob).then(function (datos4) {
// datos4['data'].push('acciones');
// $scope.prettyFieldNames = datos4['data'];
// });
//
// $scope.clientes = serverService.getPage($scope.ob, $scope.page, null, null, $scope.rpp, null, null, null, null, null, null).then(function (datos3) {
// $scope.clientes = datos3['list'];
//
// });
//
// $scope.fieldNames = serverService.getFieldNames($scope.ob).then(function (datos6) {
// $scope.fieldNames = datos6['data'];
// $scope.selectedFilterFieldName = null;
// });
//
//
// $scope.$watch('numPagina', function () {
// $scope.$broadcast('myApp.construirPagina');
// }, true)
//
// $scope.$on('myApp.construirPagina', function () {
//
// $scope.clientes = serverService.getPage($scope.ob, $scope.page, null, null, $scope.rpp, null, null, null, null, null, null).then(function (datos3) {
// $scope.clientes = datos3['list'];
//
// });
//
// })
//
// $scope.filtrar = function () {
// alert("f")
//
//
// };
// $scope.$watch('filteroperator', function () {
// console.log($scope.filter);
// console.log($scope.filteroperator);
// console.log($scope.filtervalue);
// }, true)
$scope.newObj = function (args) {
$scope.objEdit = {};
$scope.position = undefined;
switch (args.strClass) {
case 'curso':
$scope.objEdit.id = 0;
break;
case 'inscribirse':
$scope.mostrarPass={};
if (sharedSpaceService.getFase() == 0) {
serverService.getDataFromPromise(serverService.promise_getOne('curso', args.id)).then(function (data) {
$scope.objEdit = data.message;
$scope.objEdit.password;
$scope.mostrarPass.mostrar=true;
$scope.mostrarPass.idCurso=args.id;
});
} else {
$scope.objEdit = sharedSpaceService.getObject();
sharedSpaceService.setFase(0);
}
break;
}
;
};
$scope.edit = function (args) {
$scope.objEdit = {};
switch (args.strClass) {
case 'curso':
$scope.position = $scope.page.alUsuariocursos.indexOf(args.levelOne);
if (sharedSpaceService.getFase() == 0) {
serverService.getDataFromPromise(serverService.promise_getOne(args.strClass, args.id)).then(function (data) {
$scope.objEdit = data.message;
//date conversion
});
} else {
$scope.objEdit = sharedSpaceService.getObject();
sharedSpaceService.setFase(0);
}
break;
}
;
};
$scope.save = function (type) {
switch (type) {
case 'curso':
serverService.getDataFromPromise(serverService.promise_setCurso('curso', {json: JSON.stringify(serverService.array_identificarArray($scope.objEdit))})).then(function (data) {
redirectService.checkAndRedirect(data);
if (data.status = 200) {
$scope.result = data;
if ($scope.position !== undefined) {
$scope.page.alUsuariocursos[$scope.position].obj_curso = $scope.objEdit;
$scope.position = undefined;
} else {
$scope.objFinal = {};
$scope.objFinal.id = $scope.result.message.idUsuarioCurso;
$scope.objFinal.obj_curso = {id: $scope.result.message.idCurso};
$scope.objFinal.obj_curso.titulo = $scope.objEdit.titulo;
$scope.objFinal.obj_curso.descripcion = $scope.objEdit.descripcion;
$scope.page.alUsuariocursos.push($scope.objFinal);
}
}
});<|fim▁hole|> break;
case 'inscribirse':
serverService.getDataFromPromise(serverService.promise_setUsuariocurso('usuariocurso', {json: JSON.stringify(serverService.array_identificarArray($scope.objEdit))})).then(function (data) {
redirectService.checkAndRedirect(data);
if (data.status = 200) {
$scope.getCursosPage();
$scope.mostrarContraseña.mostrar=undefined;
$scope.mostrarContraseña.idCurso=undefined;
}
});
break;
}
;
};
$scope.remove = function (type, id, idUsuarioCurso) {
switch (type) {
case 'curso':
serverService.getDataFromPromise(serverService.promise_removeCurso(type, id, idUsuarioCurso)).then(function (data) {
redirectService.checkAndRedirect(data);
if (data.status = 200) {
$scope.result = data;
$scope.page.alUsuariocursos.splice($scope.position, 1);
$scope.position = undefined;
}
});
break;
}
};
}]);<|fim▁end|> | |
<|file_name|>jsUnitTracer.js<|end_file_name|><|fim▁begin|>var TRACE_LEVEL_NONE = new JsUnitTraceLevel(0, null);
var TRACE_LEVEL_WARNING = new JsUnitTraceLevel(1, "#FF0000");
var TRACE_LEVEL_INFO = new JsUnitTraceLevel(2, "#009966");
var TRACE_LEVEL_DEBUG = new JsUnitTraceLevel(3, "#0000FF");
function JsUnitTracer(testManager) {
this._testManager = testManager;
this._traceWindow = null;
this.popupWindowsBlocked = false;
}
JsUnitTracer.prototype.initialize = function() {
if (this._traceWindow != null && top.testManager.closeTraceWindowOnNewRun.checked)
this._traceWindow.close();
this._traceWindow = null;
}
JsUnitTracer.prototype.finalize = function() {
if (this._traceWindow != null) {
this._traceWindow.document.write('<\/body>\n<\/html>');
this._traceWindow.document.close();
}
}
JsUnitTracer.prototype.warn = function() {
this._trace(arguments[0], arguments[1], TRACE_LEVEL_WARNING);
}
JsUnitTracer.prototype.inform = function() {
this._trace(arguments[0], arguments[1], TRACE_LEVEL_INFO);
}
JsUnitTracer.prototype.debug = function() {
this._trace(arguments[0], arguments[1], TRACE_LEVEL_DEBUG);
}
JsUnitTracer.prototype._trace = function(message, value, traceLevel) {
<|fim▁hole|> if (!top.shouldSubmitResults() && this._getChosenTraceLevel().matches(traceLevel)) {
var traceString = message;
if (value)
traceString += ': ' + value;
var prefix = this._testManager.getTestFileName() + ":" +
this._testManager.getTestFunctionName() + " - ";
this._writeToTraceWindow(prefix, traceString, traceLevel);
}
}
JsUnitTracer.prototype._getChosenTraceLevel = function() {
var levelNumber = eval(top.testManager.traceLevel.value);
return traceLevelByLevelNumber(levelNumber);
}
JsUnitTracer.prototype._writeToTraceWindow = function(prefix, traceString, traceLevel) {
var htmlToAppend = '<p class="jsUnitDefault">' + prefix + '<font color="' + traceLevel.getColor() + '">' + traceString + '</font><\/p>\n';
this._getTraceWindow().document.write(htmlToAppend);
}
JsUnitTracer.prototype._getTraceWindow = function() {
if (this._traceWindow == null && !top.shouldSubmitResults() && !this.popupWindowsBlocked) {
this._traceWindow = window.open('', '', 'width=600, height=350,status=no,resizable=yes,scrollbars=yes');
if (!this._traceWindow)
this.popupWindowsBlocked = true;
else {
var resDoc = this._traceWindow.document;
resDoc.write('<html>\n<head>\n<link rel="stylesheet" href="css/jsUnitStyle.css">\n<title>Tracing - JsUnit<\/title>\n<head>\n<body>');
resDoc.write('<h2>Tracing - JsUnit<\/h2>\n');
resDoc.write('<p class="jsUnitDefault"><i>(Traces are color coded: ');
resDoc.write('<font color="' + TRACE_LEVEL_WARNING.getColor() + '">Warning</font> - ');
resDoc.write('<font color="' + TRACE_LEVEL_INFO.getColor() + '">Information</font> - ');
resDoc.write('<font color="' + TRACE_LEVEL_DEBUG.getColor() + '">Debug</font>');
resDoc.write(')</i></p>');
}
}
return this._traceWindow;
}
if (xbDEBUG.on) {
xbDebugTraceObject('window', 'JsUnitTracer');
}
function JsUnitTraceLevel(levelNumber, color) {
this._levelNumber = levelNumber;
this._color = color;
}
JsUnitTraceLevel.prototype.matches = function(anotherTraceLevel) {
return this._levelNumber >= anotherTraceLevel._levelNumber;
}
JsUnitTraceLevel.prototype.getColor = function() {
return this._color;
}
function traceLevelByLevelNumber(levelNumber) {
switch (levelNumber) {
case 0: return TRACE_LEVEL_NONE;
case 1: return TRACE_LEVEL_WARNING;
case 2: return TRACE_LEVEL_INFO;
case 3: return TRACE_LEVEL_DEBUG;
}
return null;
}<|fim▁end|> | |
<|file_name|>example.js<|end_file_name|><|fim▁begin|>/**
* Example
* @version 0.0.1
*/
var MetaReponse = require('./MetaResponse');
var MetaData = require('./MetaData');
var MetaError = require('./MetaError');
var data1 = new MetaData();
var mr = new MetaReponse();
data1.setId(3);
data1.setType('recipe');
data1.addAttribute('title', 'Risotto al funghi');
data1.addAttribute('time', '45mins');
data1.addLink('self', 'http://www.monsite.com/recipe/3');
data1.addLink('next', 'http://www.monsite.com/recipe/4');
data1.addLink('prev', 'http://www.monsite.com/recipe/2');
mr.addLink('self', 'http://www.monsite.com/');
mr.addLink('next', 'http://www.monsite.com/next');
mr.addData(data1.getResponse());
mr.addData(data1);
console.log(data1.getResponse());
console.log(mr.getResponse());
var error = new MetaError();
error
.setStatusCode(401)<|fim▁hole|>
console.log(response.getResponse().errors);<|fim▁end|> | .setStatus("401 Unauthorized");
var response = new MetaReponse();
response.addError(error); |
<|file_name|>main.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
"""
Calculate the total cost of tile it would take to cover a floor
plan of width and height, using a cost entered by the user.
"""
from __future__ import print_function
import argparse
import sys
class App(object):
"""Application."""
def __init__(self, args):
self._raw_args = args
self._args = None
self._argparse = argparse.ArgumentParser(
description="Calculate Fibbonaci numbers ...")
self.prepare_parser()
def prepare_parser(self):
"""Prepare Argument Parser."""
self._argparse.add_argument(
"w", type=int, help="Width")
self._argparse.add_argument(
"h", type=int, help="Height")
self._argparse.add_argument(
"c", type=float, help="Cost of Tile assuming that a tile is 1x1")
def run(self):
"""Run the application."""
self._args = self._argparse.parse_args(self._raw_args)
rez = App.get_cost(self._args.w, self._args.h, self._args.c)
output = "The cost is : {}".format(rez)
print(output)
@staticmethod
def get_cost(widht, height, cost):
"""Compute the cost."""
return (widht * height) * float(cost)<|fim▁hole|> App(sys.argv[1:]).run()<|fim▁end|> |
if __name__ == "__main__": |
<|file_name|>font.rs<|end_file_name|><|fim▁begin|>use std::fmt;
use crossfont::Size as FontSize;
use serde::de::{self, Visitor};
use serde::{Deserialize, Deserializer};
use alacritty_config_derive::ConfigDeserialize;
use crate::config::ui_config::Delta;
/// Font config.
///<|fim▁hole|>#[derive(ConfigDeserialize, Debug, Clone, PartialEq, Eq)]
pub struct Font {
/// Extra spacing per character.
pub offset: Delta<i8>,
/// Glyph offset within character cell.
pub glyph_offset: Delta<i8>,
pub use_thin_strokes: bool,
/// Normal font face.
normal: FontDescription,
/// Bold font face.
bold: SecondaryFontDescription,
/// Italic font face.
italic: SecondaryFontDescription,
/// Bold italic font face.
bold_italic: SecondaryFontDescription,
/// Font size in points.
size: Size,
/// Whether to use the built-in font for box drawing characters.
pub builtin_box_drawing: bool,
}
impl Font {
/// Get a font clone with a size modification.
pub fn with_size(self, size: FontSize) -> Font {
Font { size: Size(size), ..self }
}
#[inline]
pub fn size(&self) -> FontSize {
self.size.0
}
/// Get normal font description.
pub fn normal(&self) -> &FontDescription {
&self.normal
}
/// Get bold font description.
pub fn bold(&self) -> FontDescription {
self.bold.desc(&self.normal)
}
/// Get italic font description.
pub fn italic(&self) -> FontDescription {
self.italic.desc(&self.normal)
}
/// Get bold italic font description.
pub fn bold_italic(&self) -> FontDescription {
self.bold_italic.desc(&self.normal)
}
}
impl Default for Font {
fn default() -> Font {
Self {
builtin_box_drawing: true,
use_thin_strokes: Default::default(),
glyph_offset: Default::default(),
bold_italic: Default::default(),
italic: Default::default(),
offset: Default::default(),
normal: Default::default(),
bold: Default::default(),
size: Default::default(),
}
}
}
/// Description of the normal font.
#[derive(ConfigDeserialize, Debug, Clone, PartialEq, Eq)]
pub struct FontDescription {
pub family: String,
pub style: Option<String>,
}
impl Default for FontDescription {
fn default() -> FontDescription {
FontDescription {
#[cfg(not(any(target_os = "macos", windows)))]
family: "monospace".into(),
#[cfg(target_os = "macos")]
family: "Menlo".into(),
#[cfg(windows)]
family: "Consolas".into(),
style: None,
}
}
}
/// Description of the italic and bold font.
#[derive(ConfigDeserialize, Debug, Default, Clone, PartialEq, Eq)]
pub struct SecondaryFontDescription {
family: Option<String>,
style: Option<String>,
}
impl SecondaryFontDescription {
pub fn desc(&self, fallback: &FontDescription) -> FontDescription {
FontDescription {
family: self.family.clone().unwrap_or_else(|| fallback.family.clone()),
style: self.style.clone(),
}
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
struct Size(FontSize);
impl Default for Size {
fn default() -> Self {
Self(FontSize::new(11.))
}
}
impl<'de> Deserialize<'de> for Size {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
struct NumVisitor;
impl<'v> Visitor<'v> for NumVisitor {
type Value = Size;
fn expecting(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str("f64 or u64")
}
fn visit_f64<E: de::Error>(self, value: f64) -> Result<Self::Value, E> {
Ok(Size(FontSize::new(value as f32)))
}
fn visit_u64<E: de::Error>(self, value: u64) -> Result<Self::Value, E> {
Ok(Size(FontSize::new(value as f32)))
}
}
deserializer.deserialize_any(NumVisitor)
}
}<|fim▁end|> | /// Defaults are provided at the level of this struct per platform, but not per
/// field in this struct. It might be nice in the future to have defaults for
/// each value independently. Alternatively, maybe erroring when the user
/// doesn't provide complete config is Ok. |
<|file_name|>intcheckers.go<|end_file_name|><|fim▁begin|>// -*- Mode: Go; indent-tabs-mode: t -*-
/*
* Copyright (C) 2015-2018 Canonical Ltd
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License version 3 as
* published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
package testutil
import (
"fmt"
"gopkg.in/check.v1"
)
type intChecker struct {
*check.CheckerInfo
rel string
}
func (checker *intChecker) Check(params []interface{}, names []string) (result bool, error string) {
a, ok := params[0].(int)
if !ok {
return false, "left-hand-side argument must be an int"
}
b, ok := params[1].(int)
if !ok {
return false, "right-hand-side argument must be an int"
}
switch checker.rel {
case "<":
result = a < b
case "<=":
result = a <= b
case "==":
result = a == b
case "!=":
result = a != b
case ">":
result = a > b
case ">=":
result = a >= b
default:
return false, fmt.Sprintf("unexpected relation %q", checker.rel)
}
if !result {
error = fmt.Sprintf("relation %d %s %d is not true", a, checker.rel, b)
}
return result, error
}
// IntLessThan checker verifies that one integer is less than other integer.
//
// For example:
// c.Assert(1, IntLessThan, 2)
var IntLessThan = &intChecker{CheckerInfo: &check.CheckerInfo{Name: "IntLessThan", Params: []string{"a", "b"}}, rel: "<"}
// IntLessEqual checker verifies that one integer is less than or equal to other integer.
//
// For example:
// c.Assert(1, IntLessEqual, 1)
var IntLessEqual = &intChecker{CheckerInfo: &check.CheckerInfo{Name: "IntLessEqual", Params: []string{"a", "b"}}, rel: "<="}
// IntEqual checker verifies that one integer is equal to other integer.
//
// For example:
// c.Assert(1, IntEqual, 1)
var IntEqual = &intChecker{CheckerInfo: &check.CheckerInfo{Name: "IntEqual", Params: []string{"a", "b"}}, rel: "=="}
// IntNotEqual checker verifies that one integer is not equal to other integer.
//
// For example:
// c.Assert(1, IntNotEqual, 2)
var IntNotEqual = &intChecker{CheckerInfo: &check.CheckerInfo{Name: "IntNotEqual", Params: []string{"a", "b"}}, rel: "!="}
// IntGreaterThan checker verifies that one integer is greater than other integer.
//
// For example:
// c.Assert(2, IntGreaterThan, 1)
var IntGreaterThan = &intChecker{CheckerInfo: &check.CheckerInfo{Name: "IntGreaterThan", Params: []string{"a", "b"}}, rel: ">"}
// IntGreaterEqual checker verifies that one integer is greater than or equal to other integer.
//
// For example:
// c.Assert(1, IntGreaterEqual, 2)<|fim▁hole|><|fim▁end|> | var IntGreaterEqual = &intChecker{CheckerInfo: &check.CheckerInfo{Name: "IntGreaterEqual", Params: []string{"a", "b"}}, rel: ">="} |
<|file_name|>task.hpp<|end_file_name|><|fim▁begin|>#ifndef INCLUDE_MODEL_TASK_HPP_
#define INCLUDE_MODEL_TASK_HPP_
#include <unordered_map>
#include "building.hpp"
#include "entity.hpp"
#include "item.hpp"
#include "tile.hpp"
namespace villa
{
/**
* Task data union.
*/
struct taskdata
{
taskdata(std::pair<int, int> target_coords);
taskdata(std::pair<int, int> target_coords, entity* target_entity);
taskdata(std::pair<int, int> target_coords, building* target_building);
taskdata(std::pair<int, int> target_coords, std::pair<entity*, item*> target_item);
taskdata(std::pair<int, int> target_coords, int time);
std::pair<int, int> target_coords;
union
{
entity* target_entity;
building* target_building;
std::pair<entity*, item*> target_item;
int time;
};
};
/**
* Task type enumeration.
*/
enum class tasktype
{
idle, //!< idle
move, //!< move
build, //!< build
harvest, //!< harvest
take_item, //!< take_item
store_item,//!< store_item
rest //!< rest
};
<|fim▁hole|> /**
* Task class.
* Represents a task that can be carried out by villagers.
*/
class task
{
public:
task(tasktype type, taskdata data);
tasktype get_type();
taskdata get_data();
private:
std::pair<tasktype, taskdata> data;
};
}
#endif /* INCLUDE_MODEL_TASK_HPP_ */<|fim▁end|> | |
<|file_name|>flow_management_test.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
"""Test the flow_management interface."""
import os
from grr.gui import gui_test_lib
from grr.gui import runtests_test
from grr.lib import action_mocks
from grr.lib import aff4
from grr.lib import flags
from grr.lib import flow
from grr.lib import hunts
from grr.lib import test_lib
from grr.lib.flows.general import filesystem as flows_filesystem
from grr.lib.flows.general import processes as flows_processes
from grr.lib.flows.general import transfer as flows_transfer
from grr.lib.flows.general import webhistory as flows_webhistory
from grr.lib.hunts import standard
from grr.lib.hunts import standard_test
from grr.lib.rdfvalues import client as rdf_client
from grr.lib.rdfvalues import flows as rdf_flows
from grr.lib.rdfvalues import paths as rdf_paths
class TestFlowManagement(gui_test_lib.GRRSeleniumTest,
standard_test.StandardHuntTestMixin):
"""Test the flow management GUI."""
def setUp(self):
super(TestFlowManagement, self).setUp()
with self.ACLChecksDisabled():
self.client_id = rdf_client.ClientURN("C.0000000000000001")
with aff4.FACTORY.Open(
self.client_id, mode="rw", token=self.token) as client:
client.Set(client.Schema.HOSTNAME("HostC.0000000000000001"))
self.RequestAndGrantClientApproval(self.client_id)
self.action_mock = action_mocks.FileFinderClientMock()
def testOpeningManageFlowsOfUnapprovedClientRedirectsToHostInfoPage(self):
self.Open("/#/clients/C.0000000000000002/flows/")
# As we don't have an approval for C.0000000000000002, we should be
# redirected to the host info page.
self.WaitUntilEqual("/#/clients/C.0000000000000002/host-info",
self.GetCurrentUrlPath)
self.WaitUntil(self.IsTextPresent,
"You do not have an approval for this client.")
def testPageTitleReflectsSelectedFlow(self):
pathspec = rdf_paths.PathSpec(
path=os.path.join(self.base_path, "test.plist"),
pathtype=rdf_paths.PathSpec.PathType.OS)
flow_urn = flow.GRRFlow.StartFlow(
flow_name=flows_transfer.GetFile.__name__,
client_id=self.client_id,
pathspec=pathspec,
token=self.token)
self.Open("/#/clients/C.0000000000000001/flows/")
self.WaitUntilEqual("GRR | C.0000000000000001 | Flows", self.GetPageTitle)
self.Click("css=td:contains('GetFile')")
self.WaitUntilEqual("GRR | C.0000000000000001 | " + flow_urn.Basename(),
self.GetPageTitle)
def testFlowManagement(self):
"""Test that scheduling flows works."""
self.Open("/")
self.Type("client_query", "C.0000000000000001")
self.Click("client_query_submit")
self.WaitUntilEqual(u"C.0000000000000001", self.GetText,
"css=span[type=subject]")
# Choose client 1
self.Click("css=td:contains('0001')")
# First screen should be the Host Information already.
self.WaitUntil(self.IsTextPresent, "HostC.0000000000000001")
self.Click("css=a[grrtarget='client.launchFlows']")
self.Click("css=#_Processes")
self.Click("link=" + flows_processes.ListProcesses.__name__)
self.WaitUntil(self.IsTextPresent, "C.0000000000000001")
self.WaitUntil(self.IsTextPresent, "List running processes on a system.")
self.Click("css=button.Launch")
self.WaitUntil(self.IsTextPresent, "Launched Flow ListProcesses")
self.Click("css=#_Browser")
# Wait until the tree has expanded.
self.WaitUntil(self.IsTextPresent, flows_webhistory.FirefoxHistory.__name__)
# Check that we can get a file in chinese
self.Click("css=#_Filesystem")
# Wait until the tree has expanded.
self.WaitUntil(self.IsTextPresent,
flows_filesystem.UpdateSparseImageChunks.__name__)
self.Click("link=" + flows_transfer.GetFile.__name__)
self.Select("css=.form-group:has(> label:contains('Pathtype')) select",
"OS")
self.Type("css=.form-group:has(> label:contains('Path')) input",
u"/dev/c/msn[1].exe")
self.Click("css=button.Launch")
self.WaitUntil(self.IsTextPresent, "Launched Flow GetFile")
# Test that recursive tests are shown in a tree table.
with self.ACLChecksDisabled():
flow.GRRFlow.StartFlow(
client_id="aff4:/C.0000000000000001",
flow_name=gui_test_lib.RecursiveTestFlow.__name__,
token=self.token)
self.Click("css=a[grrtarget='client.flows']")
# Some rows are present in the DOM but hidden because parent flow row
# wasn't expanded yet. Due to this, we have to explicitly filter rows
# with "visible" jQuery filter.
self.WaitUntilEqual("RecursiveTestFlow", self.GetText,
"css=grr-client-flows-list tr:visible:nth(1) td:nth(2)")
self.WaitUntilEqual("GetFile", self.GetText,
"css=grr-client-flows-list tr:visible:nth(2) td:nth(2)")
# Click on the first tree_closed to open it.
self.Click("css=grr-client-flows-list tr:visible:nth(1) .tree_closed")
self.WaitUntilEqual("RecursiveTestFlow", self.GetText,
"css=grr-client-flows-list tr:visible:nth(2) td:nth(2)")
# Select the requests tab
self.Click("css=td:contains(GetFile)")
self.Click("css=li[heading=Requests]")
self.WaitUntil(self.IsElementPresent,
"css=td:contains(flow:request:00000001)")
# Check that a StatFile client action was issued as part of the GetFile
# flow.
self.WaitUntil(self.IsElementPresent,
"css=.tab-content td.proto_value:contains(StatFile)")
def testOverviewIsShownForNestedFlows(self):
with self.ACLChecksDisabled():
for _ in test_lib.TestFlowHelper(
gui_test_lib.RecursiveTestFlow.__name__,
self.action_mock,
client_id=self.client_id,
token=self.token):
pass
self.Open("/#c=C.0000000000000001")
self.Click("css=a[grrtarget='client.flows']")
# There should be a RecursiveTestFlow in the list. Expand nested flows.
self.Click("css=tr:contains('RecursiveTestFlow') span.tree_branch")
# Click on a nested flow.
self.Click("css=tr:contains('RecursiveTestFlow'):nth(2)")
# Nested flow should have Depth argument set to 1.
self.WaitUntil(self.IsElementPresent,
"css=td:contains('Depth') ~ td:nth(0):contains('1')")
# Check that flow id of this flow has forward slash - i.e. consists of
# 2 components.<|fim▁hole|>
def testOverviewIsShownForNestedHuntFlows(self):
with self.ACLChecksDisabled():
with hunts.GRRHunt.StartHunt(
hunt_name=standard.GenericHunt.__name__,
flow_runner_args=rdf_flows.FlowRunnerArgs(
flow_name=gui_test_lib.RecursiveTestFlow.__name__),
client_rate=0,
token=self.token) as hunt:
hunt.Run()
self.AssignTasksToClients(client_ids=[self.client_id])
self.RunHunt(client_ids=[self.client_id])
self.Open("/#c=C.0000000000000001")
self.Click("css=a[grrtarget='client.flows']")
# There should be a RecursiveTestFlow in the list. Expand nested flows.
self.Click("css=tr:contains('RecursiveTestFlow') span.tree_branch")
# Click on a nested flow.
self.Click("css=tr:contains('RecursiveTestFlow'):nth(2)")
# Nested flow should have Depth argument set to 1.
self.WaitUntil(self.IsElementPresent,
"css=td:contains('Depth') ~ td:nth(0):contains('1')")
# Check that flow id of this flow has forward slash - i.e. consists of
# 2 components.
self.WaitUntil(self.IsTextPresent, "Flow ID")
flow_id = self.GetText("css=dt:contains('Flow ID') ~ dd:nth(0)")
self.assertTrue("/" in flow_id)
def testLogsCanBeOpenedByClickingOnLogsTab(self):
# RecursiveTestFlow doesn't send any results back.
with self.ACLChecksDisabled():
for _ in test_lib.TestFlowHelper(
"FlowWithOneLogStatement",
self.action_mock,
client_id=self.client_id,
token=self.token):
pass
self.Open("/#c=C.0000000000000001")
self.Click("css=a[grrtarget='client.flows']")
self.Click("css=td:contains('FlowWithOneLogStatement')")
self.Click("css=li[heading=Log]")
self.WaitUntil(self.IsTextPresent, "I do log.")
def testLogTimestampsArePresentedInUTC(self):
with self.ACLChecksDisabled():
with test_lib.FakeTime(42):
for _ in test_lib.TestFlowHelper(
"FlowWithOneLogStatement",
self.action_mock,
client_id=self.client_id,
token=self.token):
pass
self.Open("/#c=C.0000000000000001")
self.Click("css=a[grrtarget='client.flows']")
self.Click("css=td:contains('FlowWithOneLogStatement')")
self.Click("css=li[heading=Log]")
self.WaitUntil(self.IsTextPresent, "1970-01-01 00:00:42 UTC")
def testResultsAreDisplayedInResultsTab(self):
with self.ACLChecksDisabled():
for _ in test_lib.TestFlowHelper(
"FlowWithOneStatEntryResult",
self.action_mock,
client_id=self.client_id,
token=self.token):
pass
self.Open("/#c=C.0000000000000001")
self.Click("css=a[grrtarget='client.flows']")
self.Click("css=td:contains('FlowWithOneStatEntryResult')")
self.Click("css=li[heading=Results]")
self.WaitUntil(self.IsTextPresent, "aff4:/some/unique/path")
def testEmptyTableIsDisplayedInResultsWhenNoResults(self):
with self.ACLChecksDisabled():
flow.GRRFlow.StartFlow(
flow_name="FlowWithOneStatEntryResult",
client_id=self.client_id,
sync=False,
token=self.token)
self.Open("/#c=" + self.client_id.Basename())
self.Click("css=a[grrtarget='client.flows']")
self.Click("css=td:contains('FlowWithOneStatEntryResult')")
self.Click("css=li[heading=Results]")
self.WaitUntil(self.IsElementPresent, "css=#main_bottomPane table thead "
"th:contains('Value')")
def testHashesAreDisplayedCorrectly(self):
with self.ACLChecksDisabled():
for _ in test_lib.TestFlowHelper(
"FlowWithOneHashEntryResult",
self.action_mock,
client_id=self.client_id,
token=self.token):
pass
self.Open("/#c=C.0000000000000001")
self.Click("css=a[grrtarget='client.flows']")
self.Click("css=td:contains('FlowWithOneHashEntryResult')")
self.Click("css=li[heading=Results]")
self.WaitUntil(self.IsTextPresent,
"9e8dc93e150021bb4752029ebbff51394aa36f069cf19901578"
"e4f06017acdb5")
self.WaitUntil(self.IsTextPresent,
"6dd6bee591dfcb6d75eb705405302c3eab65e21a")
self.WaitUntil(self.IsTextPresent, "8b0a15eefe63fd41f8dc9dee01c5cf9a")
def testChangingTabUpdatesUrl(self):
with self.ACLChecksDisabled():
flow_urn = flow.GRRFlow.StartFlow(
flow_name=gui_test_lib.FlowWithOneStatEntryResult.__name__,
client_id=self.client_id,
token=self.token)
flow_id = flow_urn.Basename()
base_url = "/#/clients/C.0000000000000001/flows/%s" % flow_id
self.Open(base_url)
self.Click("css=li[heading=Requests]")
self.WaitUntilEqual(base_url + "/requests", self.GetCurrentUrlPath)
self.Click("css=li[heading=Results]")
self.WaitUntilEqual(base_url + "/results", self.GetCurrentUrlPath)
self.Click("css=li[heading=Log]")
self.WaitUntilEqual(base_url + "/log", self.GetCurrentUrlPath)
self.Click("css=li[heading='Flow Information']")
self.WaitUntilEqual(base_url, self.GetCurrentUrlPath)
def testDirectLinksToFlowsTabsWorkCorrectly(self):
with self.ACLChecksDisabled():
flow_urn = flow.GRRFlow.StartFlow(
flow_name=gui_test_lib.FlowWithOneStatEntryResult.__name__,
client_id=self.client_id,
token=self.token)
flow_id = flow_urn.Basename()
base_url = "/#/clients/C.0000000000000001/flows/%s" % flow_id
self.Open(base_url + "/requests")
self.WaitUntil(self.IsElementPresent, "css=li.active[heading=Requests]")
self.Open(base_url + "/results")
self.WaitUntil(self.IsElementPresent, "css=li.active[heading=Results]")
self.Open(base_url + "/log")
self.WaitUntil(self.IsElementPresent, "css=li.active[heading=Log]")
# Check that both clients/.../flows/... and clients/.../flows/.../ URLs
# work.
self.Open(base_url)
self.WaitUntil(self.IsElementPresent,
"css=li.active[heading='Flow Information']")
self.Open(base_url + "/")
self.WaitUntil(self.IsElementPresent,
"css=li.active[heading='Flow Information']")
def testCancelFlowWorksCorrectly(self):
"""Tests that cancelling flows works."""
flow.GRRFlow.StartFlow(
client_id=self.client_id,
flow_name=gui_test_lib.RecursiveTestFlow.__name__,
token=self.token)
# Open client and find the flow
self.Open("/")
self.Type("client_query", "C.0000000000000001")
self.Click("client_query_submit")
self.WaitUntilEqual(u"C.0000000000000001", self.GetText,
"css=span[type=subject]")
self.Click("css=td:contains('0001')")
self.Click("css=a[grrtarget='client.flows']")
self.Click("css=td:contains('RecursiveTestFlow')")
self.Click("css=button[name=cancel_flow]")
# The window should be updated now
self.WaitUntil(self.IsTextPresent, "Cancelled in GUI")
def testGlobalFlowManagement(self):
"""Test that scheduling flows works."""
with self.ACLChecksDisabled():
self.CreateAdminUser(self.token.username)
self.Open("/")
self.Click("css=a[grrtarget=globalFlows]")
self.Click("css=#_Reporting")
self.assertEqual("RunReport", self.GetText("link=RunReport"))
self.Click("link=RunReport")
self.WaitUntil(self.IsTextPresent, "Report name")
def main(argv):
# Run the full test suite
runtests_test.SeleniumTestProgram(argv=argv)
if __name__ == "__main__":
flags.StartMain(main)<|fim▁end|> | self.WaitUntil(self.IsTextPresent, "Flow ID")
flow_id = self.GetText("css=dt:contains('Flow ID') ~ dd:nth(0)")
self.assertTrue("/" in flow_id) |
<|file_name|>zopetestbrowser.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright 2012 splinter authors. All rights reserved.
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.
import re
from lxml.cssselect import CSSSelector
from zope.testbrowser.browser import Browser
from splinter.element_list import ElementList
from splinter.exceptions import ElementDoesNotExist
from splinter.driver import DriverAPI, ElementAPI
from splinter.cookie_manager import CookieManagerAPI
import mimetypes
import lxml.html
import mechanize
import time
class CookieManager(CookieManagerAPI):
def __init__(self, browser_cookies):
self._cookies = browser_cookies
def add(self, cookies):
if isinstance(cookies, list):
for cookie in cookies:
for key, value in cookie.items():
self._cookies[key] = value
return
for key, value in cookies.items():
self._cookies[key] = value
def delete(self, *cookies):
if cookies:
for cookie in cookies:
try:
del self._cookies[cookie]
except KeyError:
pass
else:
self._cookies.clearAll()
def all(self, verbose=False):
cookies = {}
for key, value in self._cookies.items():
cookies[key] = value
return cookies
def __getitem__(self, item):
return self._cookies[item]<|fim▁hole|>
class ZopeTestBrowser(DriverAPI):
driver_name = "zope.testbrowser"
def __init__(self, user_agent=None, wait_time=2):
self.wait_time = wait_time
mech_browser = self._get_mech_browser(user_agent)
self._browser = Browser(mech_browser=mech_browser)
self._cookie_manager = CookieManager(self._browser.cookies)
self._last_urls = []
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
pass
def visit(self, url):
self._browser.open(url)
def back(self):
self._last_urls.insert(0, self.url)
self._browser.goBack()
def forward(self):
try:
self.visit(self._last_urls.pop())
except IndexError:
pass
def reload(self):
self._browser.reload()
def quit(self):
pass
@property
def htmltree(self):
return lxml.html.fromstring(self.html.decode('utf-8'))
@property
def title(self):
return self._browser.title
@property
def html(self):
return self._browser.contents
@property
def url(self):
return self._browser.url
def find_option_by_value(self, value):
html = self.htmltree
element = html.xpath('//option[@value="%s"]' % value)[0]
control = self._browser.getControl(element.text)
return ElementList([ZopeTestBrowserOptionElement(control, self)], find_by="value", query=value)
def find_option_by_text(self, text):
html = self.htmltree
element = html.xpath('//option[normalize-space(text())="%s"]' % text)[0]
control = self._browser.getControl(element.text)
return ElementList([ZopeTestBrowserOptionElement(control, self)], find_by="text", query=text)
def find_by_css(self, selector):
xpath = CSSSelector(selector).path
return self.find_by_xpath(xpath, original_find="css", original_selector=selector)
def find_by_xpath(self, xpath, original_find=None, original_selector=None):
html = self.htmltree
elements = []
for xpath_element in html.xpath(xpath):
if self._element_is_link(xpath_element):
return self._find_links_by_xpath(xpath)
elif self._element_is_control(xpath_element):
return self.find_by_name(xpath_element.name)
else:
elements.append(xpath_element)
find_by = original_find or "xpath"
query = original_selector or xpath
return ElementList([ZopeTestBrowserElement(element, self) for element in elements], find_by=find_by, query=query)
def find_by_tag(self, tag):
return self.find_by_xpath('//%s' % tag, original_find="tag", original_selector=tag)
def find_by_value(self, value):
return self.find_by_xpath('//*[@value="%s"]' % value, original_find="value", original_selector=value)
def find_by_id(self, id_value):
return self.find_by_xpath('//*[@id="%s"][1]' % id_value, original_find="id", original_selector=id_value)
def find_by_name(self, name):
elements = []
index = 0
while True:
try:
control = self._browser.getControl(name=name, index=index)
elements.append(control)
index += 1
except LookupError:
break
return ElementList([ZopeTestBrowserControlElement(element, self) for element in elements], find_by="name", query=name)
def find_link_by_text(self, text):
return self._find_links_by_xpath("//a[text()='%s']" % text)
def find_link_by_href(self, href):
return self._find_links_by_xpath("//a[@href='%s']" % href)
def find_link_by_partial_href(self, partial_href):
return self._find_links_by_xpath("//a[contains(@href, '%s')]" % partial_href)
def find_link_by_partial_text(self, partial_text):
return self._find_links_by_xpath("//a[contains(normalize-space(.), '%s')]" % partial_text)
def fill(self, name, value):
self.find_by_name(name=name).first._control.value = value
def fill_form(self, field_values):
for name, value in field_values.items():
element = self.find_by_name(name)
control = element.first._control
if control.type == 'checkbox':
if value:
control.value = control.options
else:
control.value = []
elif control.type == 'radio':
control.value = [option for option in control.options if option == value]
elif control.type == 'select':
control.value = [value]
else:
# text, textarea, password, tel
control.value = value
def choose(self, name, value):
control = self._browser.getControl(name=name)
control.value = [option for option in control.options if option == value]
def check(self, name):
control = self._browser.getControl(name=name)
control.value = control.options
def uncheck(self, name):
control = self._browser.getControl(name=name)
control.value = []
def attach_file(self, name, file_path):
filename = file_path.split('/')[-1]
control = self._browser.getControl(name=name)
content_type, _ = mimetypes.guess_type(file_path)
control.add_file(open(file_path), content_type, filename)
def _find_links_by_xpath(self, xpath):
html = self.htmltree
links = html.xpath(xpath)
return ElementList([ZopeTestBrowserLinkElement(link, self) for link in links], find_by="xpath", query=xpath)
def select(self, name, value):
self.find_by_name(name).first._control.value = [value]
def is_text_present(self, text, wait_time=None):
wait_time = wait_time or self.wait_time
end_time = time.time() + wait_time
while time.time() < end_time:
if self._is_text_present(text):
return True
return False
def _is_text_present(self, text):
try:
body = self.find_by_tag('body').first
return text in body.text
except ElementDoesNotExist:
# This exception will be thrown if the body tag isn't present
# This has occasionally been observed. Assume that the
# page isn't fully loaded yet
return False
def is_text_not_present(self, text, wait_time=None):
wait_time = wait_time or self.wait_time
end_time = time.time() + wait_time
while time.time() < end_time:
if not self._is_text_present(text):
return True
return False
def _element_is_link(self, element):
return element.tag == 'a'
def _element_is_control(self, element):
return hasattr(element, 'type')
def _get_mech_browser(self, user_agent):
mech_browser = mechanize.Browser()
if user_agent is not None:
mech_browser.addheaders = [("User-agent", user_agent), ]
return mech_browser
@property
def cookies(self):
return self._cookie_manager
re_extract_inner_html = re.compile(r'^<[^<>]+>(.*)</[^<>]+>$')
class ZopeTestBrowserElement(ElementAPI):
def __init__(self, element, parent):
self._element = element
self.parent = parent
def __getitem__(self, attr):
return self._element.attrib[attr]
def find_by_css(self, selector):
elements = self._element.cssselect(selector)
return ElementList([self.__class__(element, self) for element in elements])
def find_by_xpath(self, selector):
elements = self._element.xpath(selector)
return ElementList([self.__class__(element, self) for element in elements])
def find_by_name(self, name):
elements = self._element.cssselect('[name="%s"]' % name)
return ElementList([self.__class__(element, self) for element in elements])
def find_by_tag(self, name):
elements = self._element.cssselect(name)
return ElementList([self.__class__(element, self) for element in elements])
def find_by_value(self, value):
elements = self._element.cssselect('[value="%s"]' % value)
return ElementList([self.__class__(element, self) for element in elements])
def find_by_id(self, id):
elements = self._element.cssselect('#%s' % id)
return ElementList([self.__class__(element, self) for element in elements])
@property
def value(self):
return self._element.text_content()
@property
def text(self):
return self.value
@property
def outer_html(self):
return lxml.html.tostring(self._element, encoding='unicode').strip()
@property
def html(self):
return re_extract_inner_html.match(self.outer_html).group(1)
def has_class(self, class_name):
return len(self._element.find_class(class_name)) > 0
class ZopeTestBrowserLinkElement(ZopeTestBrowserElement):
def __init__(self, element, parent):
super(ZopeTestBrowserLinkElement, self).__init__(element, parent)
self._browser = parent._browser
def __getitem__(self, attr):
return super(ZopeTestBrowserLinkElement, self).__getitem__(attr)
def click(self):
return self._browser.open(self["href"])
class ZopeTestBrowserControlElement(ZopeTestBrowserElement):
def __init__(self, control, parent):
self._control = control
self.parent = parent
def __getitem__(self, attr):
return self._control.mech_control.attrs[attr]
@property
def value(self):
return self._control.value
@property
def checked(self):
return bool(self._control.value)
def click(self):
return self._control.click()
def fill(self, value):
self._control.value = value
def select(self, value):
self._control.value = [value]
class ZopeTestBrowserOptionElement(ZopeTestBrowserElement):
def __init__(self, control, parent):
self._control = control
self.parent = parent
def __getitem__(self, attr):
return self._control.mech_item.attrs[attr]
@property
def text(self):
return self._control.mech_item.get_labels()[0]._text
@property
def value(self):
return self._control.optionValue
@property
def selected(self):
return self._control.mech_item._selected<|fim▁end|> |
def __eq__(self, other_object):
if isinstance(other_object, dict):
return dict(self._cookies) == other_object |
<|file_name|>test_list.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# Copyright 2012-2014 Keith Fancher
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import tempfile
import unittest
from list import TodoTxtList
class TestTodoTxtList(unittest.TestCase):
def test_init_from_text(self):
todo_text = "(A) Item one\n(Z) Item two\nx Item three\n\n \n"
test_list = TodoTxtList(None, todo_text)
self.assertEqual(3, test_list.num_items())
self.assertEqual('Item one', test_list.items[0].text)
self.assertEqual('A', test_list.items[0].priority)
self.assertFalse(test_list.items[0].is_completed)
self.assertEqual('Item two', test_list.items[1].text)
self.assertEqual('Z', test_list.items[1].priority)
self.assertFalse(test_list.items[1].is_completed)
self.assertEqual('Item three', test_list.items[2].text)
self.assertEqual(None, test_list.items[2].priority)
self.assertTrue(test_list.items[2].is_completed)
def test_init_from_file(self):
file_name = 'sample-todo.txt'
test_list = TodoTxtList(file_name)
self.assertEqual(8, test_list.num_items())
self.assertEqual('Do that really important thing', test_list.items[0].text)
self.assertEqual('A', test_list.items[0].priority)
self.assertFalse(test_list.items[0].is_completed)
self.assertEqual('Summon AppIndicator documentation from my ass', test_list.items[1].text)
self.assertEqual('D', test_list.items[1].priority)
self.assertFalse(test_list.items[1].is_completed)
self.assertEqual('This other important thing', test_list.items[2].text)
self.assertEqual('A', test_list.items[2].priority)
self.assertFalse(test_list.items[2].is_completed)
self.assertEqual('Walk the cat', test_list.items[3].text)
self.assertEqual('B', test_list.items[3].priority)
self.assertFalse(test_list.items[3].is_completed)
self.assertEqual('Something with no priority!', test_list.items[4].text)
self.assertEqual(None, test_list.items[4].priority)
self.assertFalse(test_list.items[4].is_completed)
self.assertEqual('Cook the dog', test_list.items[5].text)
self.assertEqual('C', test_list.items[5].priority)
self.assertFalse(test_list.items[5].is_completed)
self.assertEqual('Be annoyed at GTK3 docs', test_list.items[6].text)
self.assertEqual(None, test_list.items[6].priority)
self.assertTrue(test_list.items[6].is_completed)
self.assertEqual('Something I already did', test_list.items[7].text)
self.assertEqual(None, test_list.items[7].priority)
self.assertTrue(test_list.items[7].is_completed)
def test_reload_from_file(self):
test_list = TodoTxtList() # Start with an empty list
test_list.reload_from_file() # Should do nothing
test_list.todo_filename = 'sample-todo.txt'
test_list.reload_from_file()
self.assertEqual(8, test_list.num_items())
self.assertEqual('Do that really important thing', test_list.items[0].text)
self.assertEqual('A', test_list.items[0].priority)
self.assertFalse(test_list.items[0].is_completed)
self.assertEqual('Summon AppIndicator documentation from my ass', test_list.items[1].text)
self.assertEqual('D', test_list.items[1].priority)
self.assertFalse(test_list.items[1].is_completed)
self.assertEqual('This other important thing', test_list.items[2].text)
self.assertEqual('A', test_list.items[2].priority)
self.assertFalse(test_list.items[2].is_completed)
self.assertEqual('Walk the cat', test_list.items[3].text)
self.assertEqual('B', test_list.items[3].priority)
self.assertFalse(test_list.items[3].is_completed)
self.assertEqual('Something with no priority!', test_list.items[4].text)
self.assertEqual(None, test_list.items[4].priority)
self.assertFalse(test_list.items[4].is_completed)
self.assertEqual('Cook the dog', test_list.items[5].text)
self.assertEqual('C', test_list.items[5].priority)
self.assertFalse(test_list.items[5].is_completed)
self.assertEqual('Be annoyed at GTK3 docs', test_list.items[6].text)
self.assertEqual(None, test_list.items[6].priority)
self.assertTrue(test_list.items[6].is_completed)
self.assertEqual('Something I already did', test_list.items[7].text)
self.assertEqual(None, test_list.items[7].priority)
self.assertTrue(test_list.items[7].is_completed)
def test_has_items(self):
test_list = TodoTxtList()
self.assertFalse(test_list.has_items())
test_list = TodoTxtList(None, 'An item')
self.assertTrue(test_list.has_items())
def test_remove_item(self):
todo_text = "(A) Item one\n(Z) Item two\nx Item three\n\n \n"
test_list = TodoTxtList(None, todo_text)
self.assertEqual(3, test_list.num_items())
test_list.remove_item('Item two')
self.assertEqual(2, test_list.num_items())<|fim▁hole|> self.assertEqual('Item one', test_list.items[0].text)
self.assertEqual('A', test_list.items[0].priority)
self.assertFalse(test_list.items[0].is_completed)
self.assertEqual('Item three', test_list.items[1].text)
self.assertEqual(None, test_list.items[1].priority)
self.assertTrue(test_list.items[1].is_completed)
def test_remove_completed_items(self):
todo_text = "(A) Item one\n(Z) Item two\nx Item three\n\n \n"
test_list = TodoTxtList(None, todo_text)
self.assertEqual(3, test_list.num_items())
test_list.remove_completed_items()
self.assertEqual(2, test_list.num_items())
self.assertEqual('Item one', test_list.items[0].text)
self.assertEqual('A', test_list.items[0].priority)
self.assertFalse(test_list.items[0].is_completed)
self.assertEqual('Item two', test_list.items[1].text)
self.assertEqual('Z', test_list.items[1].priority)
self.assertFalse(test_list.items[1].is_completed)
def test_mark_item_completed(self):
todo_text = "(A) Item one\n(Z) Item two\nx Item three\n\n \n"
test_list = TodoTxtList(None, todo_text)
test_list.mark_item_completed('Item two')
self.assertEqual('Item one', test_list.items[0].text)
self.assertEqual('A', test_list.items[0].priority)
self.assertFalse(test_list.items[0].is_completed)
self.assertEqual('Item two', test_list.items[1].text)
self.assertEqual('Z', test_list.items[1].priority)
self.assertTrue(test_list.items[1].is_completed)
self.assertEqual('Item three', test_list.items[2].text)
self.assertEqual(None, test_list.items[2].priority)
self.assertTrue(test_list.items[2].is_completed)
def test_mark_item_completed_with_full_text(self):
todo_text = "(A) Item one\n(Z) Item two\nx Item three\n\n \n"
test_list = TodoTxtList(None, todo_text)
test_list.mark_item_completed_with_full_text('(Z) Item two')
self.assertEqual('Item one', test_list.items[0].text)
self.assertEqual('A', test_list.items[0].priority)
self.assertFalse(test_list.items[0].is_completed)
self.assertEqual('Item two', test_list.items[1].text)
self.assertEqual('Z', test_list.items[1].priority)
self.assertTrue(test_list.items[1].is_completed)
self.assertEqual('Item three', test_list.items[2].text)
self.assertEqual(None, test_list.items[2].priority)
self.assertTrue(test_list.items[2].is_completed)
def test_sort_list(self):
todo_text = "x (C) No biggie\n(Z) aaaaa\nNothing\n(B) hey hey\n(Z) bbbbb\n(A) aaaaa\nx Item three\n\nx (B) Done it\n"
test_list = TodoTxtList(None, todo_text)
test_list.sort_list()
self.assertEqual(8, test_list.num_items())
self.assertEqual('aaaaa', test_list.items[0].text)
self.assertEqual('A', test_list.items[0].priority)
self.assertFalse(test_list.items[0].is_completed)
self.assertEqual('hey hey', test_list.items[1].text)
self.assertEqual('B', test_list.items[1].priority)
self.assertFalse(test_list.items[1].is_completed)
self.assertEqual('aaaaa', test_list.items[2].text)
self.assertEqual('Z', test_list.items[2].priority)
self.assertFalse(test_list.items[2].is_completed)
self.assertEqual('bbbbb', test_list.items[3].text)
self.assertEqual('Z', test_list.items[3].priority)
self.assertFalse(test_list.items[3].is_completed)
self.assertEqual('Nothing', test_list.items[4].text)
self.assertEqual(None, test_list.items[4].priority)
self.assertFalse(test_list.items[4].is_completed)
self.assertEqual('Done it', test_list.items[5].text)
self.assertEqual('B', test_list.items[5].priority)
self.assertTrue(test_list.items[5].is_completed)
self.assertEqual('No biggie', test_list.items[6].text)
self.assertEqual('C', test_list.items[6].priority)
self.assertTrue(test_list.items[6].is_completed)
self.assertEqual('Item three', test_list.items[7].text)
self.assertEqual(None, test_list.items[7].priority)
self.assertTrue(test_list.items[7].is_completed)
def test_to_text(self):
test_list = TodoTxtList()
# Empty list yields empty string:
self.assertEqual('', str(test_list))
todo_text = "(A) Do one thing\n (B) Do another thing\n x One last thing"
expected_output = "(A) Do one thing\n(B) Do another thing\nx One last thing"
test_list.init_from_text(todo_text)
self.assertEqual(expected_output, str(test_list))
def test_write_to_file(self):
todo_text = "(A) Do one thing\n (B) Do another thing\n x One last thing"
expected_output = "(A) Do one thing\n(B) Do another thing\nx One last thing"
test_list = TodoTxtList(None, todo_text)
# Write to a temporary output file:
output_file = tempfile.NamedTemporaryFile(mode='w+')
test_list.todo_filename = output_file.name
test_list.write_to_file()
# Now read the file in and see that it all matches up:
self.assertEqual(expected_output, output_file.read())
if __name__ == '__main__':
unittest.main()<|fim▁end|> | |
<|file_name|>find_path_sha1.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
#------------------------------------------------------------
#
# Ciro D. Santilli
#
# Prints a list of paths which are files followed by their inodes and sha1 sums.
#
# Useful to make a backup of paths names before mass renaming them,
# supposing your files are distinct by SHA1 and that SHA1 has not changed,
# or that the inodes have not changed.
#
#------------------------------------------------------------
import os
import os.path
import stat
import hashlib
import sys
SHA1_MAX_BYTES_READ_DEFAULT = float("inf") # defaults to read entire file
def sha1_hex_file(filepath, max_bytes=None):
"""
Returns the SHA1 of a given filepath in hexadecimal.
Opt-args:
* max_bytes. If given, reads at most max_bytes bytes from the file.
"""
sha1 = hashlib.sha1()
f = open(filepath, 'rb')
try:
if max_bytes:
data = f.read(max_bytes)
else:
data = f.read()
sha1.update(data)
finally:
f.close()
return sha1.hexdigest()
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser(description="""Finds files and creates a lists of their paths, inodes and sha1 checksums.' +
Useful to make a backup of filepaths before renaming them, for example before a large number of renames by a script.
SAMPLE CALLS
find_path_sha1.py
#finds, calculates sha1 based on the entire files, and prints path\nsha1 to stdout.
find_path_sha1.py -n 100000
#finds, calculates sha1 based on 100000 bytes
""",
epilog="Report any bugs to [email protected]",
prog='Program')
parser.add_argument('-m', '--max-sha1-bytes',
action="store",
dest="sha1_max_bytes_read",
type=int,
default=SHA1_MAX_BYTES_READ_DEFAULT,
help='Maximum number of bytes to read to calculate SHA1 checksum.'+
'Reading the whole file might be too slow, and unnecessary for some applications.')
args = parser.parse_args(sys.argv[1:])
sha1_max_bytes_read = args.sha1_max_bytes_read
file_output = ""
print "sha1_max_bytes_read"
print sha1_max_bytes_read
print
paths = []
for root, dirs, files in os.walk('.'):
for bname in files:
paths.append(os.path.join(root,bname))<|fim▁hole|> paths.sort()
for path in paths:
print path
print str(sha1_hex_file(path,sha1_max_bytes_read))
print<|fim▁end|> | |
<|file_name|>MarkWrapper.ts<|end_file_name|><|fim▁begin|>module statoscope.marks {
"use strict";
export class MarkWrapper extends
statoscope.controls.AbstractWrapper<storage.IMarkConfig> {
static sType = "s-mark-wrapper";
private _dayInfo: utils.IDayInfo;
constructor(markConfig: storage.IMarkConfig, dayInfo: utils.IDayInfo) {
this._dayInfo = dayInfo;
super(markConfig);
}
newItem(config: storage.IMarkConfig): AbstractMark {
return createMark(config, this._dayInfo);
}
newEditor(): view.Control {
var editor = new statoscope.controls.EditorBar(this.item.config.title,
getMarkTypes().map(type => ({
value: type,
label: getMarkName(type)
})), this.item.config.type);
editor.onSaveClick.addHandler(() => {
this.item.config.title = editor.titleText;
var typeChanged = this.item.config.type !== editor.selectedType;
this.item.config.type = editor.selectedType;<|fim▁hole|> this.createItem(this.item.config);
}
else {
this.item.update();
}
this.settings = false;
});
editor.onRemoveClick.addHandler(() => {
var result = this.element.classList.contains("new") ||
window.confirm(common.i18n.tr(
"This mark will be deleted from all days."));
if (result) {
var config: storage.IDayConfig = this._dayInfo.dayConfig;
config.marks = config.marks.filter(mark => mark !== this.item.config);
storage.instance().saveDayConfig(this._dayInfo.dayConfig);
this.cleanup();
}
});
return editor;
}
}
}<|fim▁end|> | storage.instance().saveDayConfig(this._dayInfo.dayConfig);
if (typeChanged) { |
<|file_name|>common-tap.js<|end_file_name|><|fim▁begin|>var spawn = require('child_process').spawn
var port = exports.port = 1337
exports.registry = "http://localhost:" + port
exports.run = run
function run (cmd, t, opts, cb) {
if (!opts)
opts = {}
if (!Array.isArray(cmd))
throw new Error("cmd must be an Array")
if (!t || !t.end)
throw new Error("node-tap instance is missing")
var stdout = ""
, stderr = ""
, node = process.execPath
, child = spawn(node, cmd, opts)
child.stderr.on("data", function (chunk) {
stderr += chunk
})
child.stdout.on("data", function (chunk) {
stdout += chunk
})
child.on("close", function (code) {
if (cb)
cb(t, stdout, stderr, code, { cmd: cmd, opts: opts })
else<|fim▁hole|> })
}<|fim▁end|> | t.end() |
<|file_name|>buttons.rs<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2017-2020 Boucher, Antoni <[email protected]>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of
* this software and associated documentation files (the "Software"), to deal in
* the Software without restriction, including without limitation the rights to
* use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
* the Software, and to permit persons to whom the Software is furnished to do so,
* subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
* FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
* COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
* IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
use gtk::{
Button,
Inhibit,
Label,
Window,
WindowType,
prelude::ButtonExt,
prelude::ContainerExt,
prelude::LabelExt,
prelude::WidgetExt,
};
use gtk::Orientation::Vertical;
use relm_derive::Msg;
use relm::{connect, Relm, Update, Widget, WidgetTest};
struct Model {
counter: i32,
}
#[derive(Msg)]
enum Msg {
Decrement,
Increment,
Quit,
}
// Create the structure that holds the widgets used in the view.
#[derive(Clone)]
struct Widgets {
counter_label: Label,
minus_button: Button,
plus_button: Button,
window: Window,
}
struct Win {
model: Model,
widgets: Widgets,
}
impl Update for Win {
// Specify the model used for this widget.
type Model = Model;
// Specify the model parameter used to init the model.
type ModelParam = ();
// Specify the type of the messages sent to the update function.
type Msg = Msg;
fn model(_: &Relm<Self>, _: ()) -> Model {
Model {
counter: 0,
}
}
fn update(&mut self, event: Msg) {
let label = &self.widgets.counter_label;
match event {
Msg::Decrement => {
self.model.counter -= 1;
// Manually update the view.
label.set_text(&self.model.counter.to_string());
},
Msg::Increment => {
self.model.counter += 1;
label.set_text(&self.model.counter.to_string());
},
Msg::Quit => gtk::main_quit(),
}
}
}
impl Widget for Win {
// Specify the type of the root widget.
type Root = Window;
// Return the root widget.
fn root(&self) -> Self::Root {
self.widgets.window.clone()
}
fn view(relm: &Relm<Self>, model: Self::Model) -> Self {
// Create the view using the normal GTK+ method calls.
let vbox = gtk::Box::new(Vertical, 0);
let plus_button = Button::with_label("+");
vbox.add(&plus_button);
let counter_label = Label::new(Some("0"));<|fim▁hole|>
let window = Window::new(WindowType::Toplevel);
window.add(&vbox);
window.show_all();
// Send the message Increment when the button is clicked.
connect!(relm, plus_button, connect_clicked(_), Msg::Increment);
connect!(relm, minus_button, connect_clicked(_), Msg::Decrement);
connect!(relm, window, connect_delete_event(_, _), return (Some(Msg::Quit), Inhibit(false)));
Win {
model,
widgets: Widgets {
counter_label,
minus_button,
plus_button,
window: window,
},
}
}
}
impl WidgetTest for Win {
type Streams = ();
fn get_streams(&self) -> Self::Streams {
}
type Widgets = Widgets;
fn get_widgets(&self) -> Self::Widgets {
self.widgets.clone()
}
}
fn main() {
Win::run(()).expect("Win::run failed");
}
#[cfg(test)]
mod tests {
use gtk::prelude::LabelExt;
use gtk_test::assert_text;
use relm_test::click;
use crate::Win;
#[test]
fn label_change() {
let (_component, _, widgets) = relm::init_test::<Win>(()).expect("init_test failed");
let plus_button = &widgets.plus_button;
let minus_button = &widgets.minus_button;
let label = &widgets.counter_label;
assert_text!(label, 0);
click(plus_button);
assert_text!(label, 1);
click(plus_button);
assert_text!(label, 2);
click(plus_button);
assert_text!(label, 3);
click(plus_button);
assert_text!(label, 4);
click(minus_button);
assert_text!(label, 3);
click(minus_button);
assert_text!(label, 2);
click(minus_button);
assert_text!(label, 1);
click(minus_button);
assert_text!(label, 0);
click(minus_button);
assert_text!(label, -1);
}
}<|fim▁end|> | vbox.add(&counter_label);
let minus_button = Button::with_label("-");
vbox.add(&minus_button); |
<|file_name|>setup_ranger_hbase.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
"""
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from resource_management.core.logger import Logger
def setup_ranger_hbase(upgrade_type=None, service_name="hbase-master"):
import params
if params.enable_ranger_hbase:
stack_version = None
if upgrade_type is not None:
stack_version = params.version
if params.retryAble:
Logger.info("HBase: Setup ranger: command retry enables thus retrying if ranger admin is down !")
else:
Logger.info("HBase: Setup ranger: command retry not enabled thus skipping if ranger admin is down !")
if params.xml_configurations_supported and params.enable_ranger_hbase and params.xa_audit_hdfs_is_enabled and service_name == 'hbase-master' :
params.HdfsResource("/ranger/audit",
type="directory",
action="create_on_execute",
owner=params.hdfs_user,
group=params.hdfs_user,
mode=0755,
recursive_chmod=True
)
params.HdfsResource("/ranger/audit/hbaseMaster",
type="directory",
action="create_on_execute",
owner=params.hbase_user,
group=params.hbase_user,
mode=0700,
recursive_chmod=True
)
params.HdfsResource("/ranger/audit/hbaseRegional",
type="directory",
action="create_on_execute",
owner=params.hbase_user,
group=params.hbase_user,
mode=0700,
recursive_chmod=True
)
params.HdfsResource(None, action="execute")
if params.xml_configurations_supported:
api_version=None
if params.stack_supports_ranger_kerberos:
api_version='v2'
from resource_management.libraries.functions.adh_setup_ranger_plugin_xml import setup_ranger_plugin
setup_ranger_plugin('hbase', 'hbase', params.previous_jdbc_jar, params.downloaded_custom_connector,
params.driver_curl_source, params.driver_curl_target, params.java64_home,
params.repo_name, params.hbase_ranger_plugin_repo,
params.ranger_env, params.ranger_plugin_properties,
params.policy_user, params.policymgr_mgr_url,
params.enable_ranger_hbase, conf_dict=params.hbase_conf_dir,
component_user=params.hbase_user, component_group=params.user_group, cache_service_list=['hbaseMaster', 'hbaseRegional'],
plugin_audit_properties=params.config['configurations']['ranger-hbase-audit'], plugin_audit_attributes=params.config['configuration_attributes']['ranger-hbase-audit'],
plugin_security_properties=params.config['configurations']['ranger-hbase-security'], plugin_security_attributes=params.config['configuration_attributes']['ranger-hbase-security'],
plugin_policymgr_ssl_properties=params.config['configurations']['ranger-hbase-policymgr-ssl'], plugin_policymgr_ssl_attributes=params.config['configuration_attributes']['ranger-hbase-policymgr-ssl'],
component_list=['hbase'], audit_db_is_enabled=params.xa_audit_db_is_enabled,
credential_file=params.credential_file, xa_audit_db_password=params.xa_audit_db_password,
ssl_truststore_password=params.ssl_truststore_password, ssl_keystore_password=params.ssl_keystore_password,
stack_version_override = stack_version, skip_if_rangeradmin_down= not params.retryAble, api_version=api_version,
is_security_enabled = params.security_enabled,
is_stack_supports_ranger_kerberos = params.stack_supports_ranger_kerberos if params.security_enabled else None,
component_user_principal=params.ranger_hbase_principal if params.security_enabled else None,
component_user_keytab=params.ranger_hbase_keytab if params.security_enabled else None)
else:
from resource_management.libraries.functions.adh_setup_ranger_plugin import setup_ranger_plugin
setup_ranger_plugin('hbase', 'hbase', params.previous_jdbc_jar,<|fim▁hole|> params.driver_curl_target, params.java64_home,
params.repo_name, params.hbase_ranger_plugin_repo,
params.ranger_env, params.ranger_plugin_properties,
params.policy_user, params.policymgr_mgr_url,
params.enable_ranger_hbase, conf_dict=params.hbase_conf_dir,
component_user=params.hbase_user, component_group=params.user_group, cache_service_list=['hbaseMaster', 'hbaseRegional'],
plugin_audit_properties=params.config['configurations']['ranger-hbase-audit'], plugin_audit_attributes=params.config['configuration_attributes']['ranger-hbase-audit'],
plugin_security_properties=params.config['configurations']['ranger-hbase-security'], plugin_security_attributes=params.config['configuration_attributes']['ranger-hbase-security'],
plugin_policymgr_ssl_properties=params.config['configurations']['ranger-hbase-policymgr-ssl'], plugin_policymgr_ssl_attributes=params.config['configuration_attributes']['ranger-hbase-policymgr-ssl'],
component_list=['hbase'], audit_db_is_enabled=params.xa_audit_db_is_enabled,
credential_file=params.credential_file, xa_audit_db_password=params.xa_audit_db_password,
ssl_truststore_password=params.ssl_truststore_password, ssl_keystore_password=params.ssl_keystore_password,
stack_version_override = stack_version, skip_if_rangeradmin_down= not params.retryAble)
else:
Logger.info('Ranger HBase plugin is not enabled')<|fim▁end|> | params.downloaded_custom_connector, params.driver_curl_source, |
<|file_name|>test_temperature_sensors.py<|end_file_name|><|fim▁begin|># -*- encoding: utf-8 -*-
from __future__ import print_function, unicode_literals, division, absolute_import
from enocean.protocol.eep import EEP<|fim▁hole|>eep = EEP()
# profiles = eep.
def test_first_range():
offset = -40
values = range(0x01, 0x0C)
for i in range(len(values)):
minimum = float(i * 10 + offset)
maximum = minimum + 40
profile = eep.find_profile([], 0xA5, 0x02, values[i])
assert minimum == float(profile.find('value', {'shortcut': 'TMP'}).find('scale').find('min').text)
assert maximum == float(profile.find('value', {'shortcut': 'TMP'}).find('scale').find('max').text)
def test_second_range():
offset = -60
values = range(0x10, 0x1C)
for i in range(len(values)):
minimum = float(i * 10 + offset)
maximum = minimum + 80
profile = eep.find_profile([], 0xA5, 0x02, values[i])
assert minimum == float(profile.find('value', {'shortcut': 'TMP'}).find('scale').find('min').text)
assert maximum == float(profile.find('value', {'shortcut': 'TMP'}).find('scale').find('max').text)
def test_rest():
profile = eep.find_profile([], 0xA5, 0x02, 0x20)
assert -10 == float(profile.find('value', {'shortcut': 'TMP'}).find('scale').find('min').text)
assert +41.2 == float(profile.find('value', {'shortcut': 'TMP'}).find('scale').find('max').text)
profile = eep.find_profile([], 0xA5, 0x02, 0x30)
assert -40 == float(profile.find('value', {'shortcut': 'TMP'}).find('scale').find('min').text)
assert +62.3 == float(profile.find('value', {'shortcut': 'TMP'}).find('scale').find('max').text)<|fim▁end|> | |
<|file_name|>statistic_presenter.js<|end_file_name|><|fim▁begin|><|fim▁hole|>(function() {
var StatisticPresenter = function() {};
BH.Presenters.StatisticPresenter = StatisticPresenter;
})();<|fim▁end|> | |
<|file_name|>type.go<|end_file_name|><|fim▁begin|>// Copyright 2010 The "goconfig" Authors
//
// Use of this source code is governed by the Simplified BSD License
// that can be found in the LICENSE file.
//
// This software is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
// OR CONDITIONS OF ANY KIND, either express or implied. See the License
// for more details.
package config
import (
"errors"
"strconv"
"strings"
)
// Bool has the same behaviour as String but converts the response to bool.
// See "boolString" for string values converted to bool.
func (self *Config) Bool(section string, option string) (value bool, err error) {
sv, err := self.String(section, option)
if err != nil {
return false, err
}
value, ok := boolString[strings.ToLower(sv)]
if !ok {
return false, errors.New("could not parse bool value: " + sv)
}
return value, nil
}
// Float has the same behaviour as String but converts the response to float.
func (self *Config) Float(section string, option string) (value float64, err error) {
sv, err := self.String(section, option)
if err == nil {
value, err = strconv.ParseFloat(sv, 64)
}
return value, err
}
// Int has the same behaviour as String but converts the response to int.
func (self *Config) Int(section string, option string) (value int, err error) {
sv, err := self.String(section, option)
if err == nil {
value, err = strconv.Atoi(sv)
}
return value, err
}
<|fim▁hole|>// It returns an error if either the section or the option do not exist.
func (self *Config) RawString(section string, option string) (value string, err error) {
if _, ok := self.data[section]; ok {
if tValue, ok := self.data[section][option]; ok {
return tValue.v, nil
}
return "", errors.New(optionError(option).String())
}
return "", errors.New(sectionError(section).String())
}
// String gets the string value for the given option in the section.
// If the value needs to be unfolded (see e.g. %(host)s example in the beginning
// of this documentation), then String does this unfolding automatically, up to
// _DEPTH_VALUES number of iterations.
//
// It returns an error if either the section or the option do not exist, or the
// unfolding cycled.
func (self *Config) String(section string, option string) (value string, err error) {
value, err = self.RawString(section, option)
if err != nil {
return "", err
}
var i int
for i = 0; i < _DEPTH_VALUES; i++ { // keep a sane depth
vr := varRegExp.FindString(value)
if len(vr) == 0 {
break
}
// Take off leading '%(' and trailing ')s'
noption := strings.TrimLeft(vr, "%(")
noption = strings.TrimRight(noption, ")s")
// Search variable in default section
nvalue, _ := self.data[_DEFAULT_SECTION][noption]
if _, ok := self.data[section][noption]; ok {
nvalue = self.data[section][noption]
}
if nvalue.v == "" {
return "", errors.New(optionError(noption).String())
}
// substitute by new value and take off leading '%(' and trailing ')s'
value = strings.Replace(value, vr, nvalue.v, -1)
}
if i == _DEPTH_VALUES {
return "", errors.New("possible cycle while unfolding variables: " +
"max depth of " + strconv.Itoa(_DEPTH_VALUES) + " reached")
}
return value, nil
}<|fim▁end|> | // RawString gets the (raw) string value for the given option in the section.
// The raw string value is not subjected to unfolding, which was illustrated in
// the beginning of this documentation.
// |
<|file_name|>test_encoding.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- encoding: utf-8 -*-
# Copyright 2002-2018, Neo4j
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from collections import OrderedDict
from unittest import TestCase
from cypy.graph import Node, relationship_type, Path
from cypy.encoding import cypher_repr, cypher_escape
KNOWS = relationship_type("KNOWS")
LOVES = relationship_type("LOVES")
HATES = relationship_type("HATES")
KNOWS_FR = relationship_type(u"CONNAÎT")
class CypherEscapeTestCase(TestCase):
def test_can_write_simple_identifier(self):
escaped = cypher_escape("foo")
assert escaped == "foo"
def test_can_write_identifier_with_odd_chars(self):
escaped = cypher_escape("foo bar")
assert escaped == "`foo bar`"
def test_can_write_identifier_containing_back_ticks(self):
escaped = cypher_escape("foo `bar`")
assert escaped == "`foo ``bar```"
def test_cannot_write_empty_identifier(self):
with self.assertRaises(ValueError):
_ = cypher_escape("")
def test_cannot_write_none_identifier(self):
with self.assertRaises(TypeError):
_ = cypher_escape(None)
class CypherNoneRepresentationTestCase(TestCase):
def test_should_encode_none(self):
encoded = cypher_repr(None)
assert encoded == u"null"
class CypherBooleanRepresentationTestCase(TestCase):
def test_should_encode_true(self):
encoded = cypher_repr(True)
assert encoded == u"true"
def test_should_encode_false(self):
encoded = cypher_repr(False)
assert encoded == u"false"
class CypherIntegerRepresentationTestCase(TestCase):
def test_should_encode_zero(self):
encoded = cypher_repr(0)
assert encoded == u"0"
def test_should_encode_positive_integer(self):
encoded = cypher_repr(123)
assert encoded == u"123"
def test_should_encode_negative_integer(self):
encoded = cypher_repr(-123)
assert encoded == u"-123"
class CypherFloatRepresentationTestCase(TestCase):
def test_should_encode_zero(self):
encoded = cypher_repr(0.0)
assert encoded == u"0.0"
def test_should_encode_positive_float(self):
encoded = cypher_repr(123.456)
assert encoded == u"123.456"
def test_should_encode_negative_float(self):
encoded = cypher_repr(-123.456)
assert encoded == u"-123.456"
class CypherStringRepresentationTestCase(TestCase):
def test_should_encode_bytes(self):
encoded = cypher_repr(b"hello, world")
assert encoded == u"'hello, world'"
def test_should_encode_unicode(self):
encoded = cypher_repr(u"hello, world")
assert encoded == u"'hello, world'"
def test_should_encode_bytes_with_escaped_chars(self):
encoded = cypher_repr(b"hello, 'world'", quote=u"'")
assert encoded == u"'hello, \\'world\\''"
def test_should_encode_unicode_with_escaped_chars(self):
encoded = cypher_repr(u"hello, 'world'", quote=u"'")
assert encoded == u"'hello, \\'world\\''"
def test_should_encode_empty_string(self):
encoded = cypher_repr(u"")
assert encoded == u"''"
def test_should_encode_bell(self):
encoded = cypher_repr(u"\a")
assert encoded == u"'\\u0007'"
def test_should_encode_backspace(self):
encoded = cypher_repr(u"\b")
assert encoded == u"'\\b'"
def test_should_encode_form_feed(self):
encoded = cypher_repr(u"\f")
assert encoded == u"'\\f'"
def test_should_encode_new_line(self):
encoded = cypher_repr(u"\n")
assert encoded == u"'\\n'"
def test_should_encode_carriage_return(self):
encoded = cypher_repr(u"\r")
assert encoded == u"'\\r'"
def test_should_encode_horizontal_tab(self):
encoded = cypher_repr(u"\t")
assert encoded == u"'\\t'"
def test_should_encode_double_quote_when_single_quoted(self):
encoded = cypher_repr(u"\"")
assert encoded == u"'\"'"
def test_should_encode_single_quote_when_single_quoted(self):
encoded = cypher_repr(u"'", quote=u"'")
assert encoded == u"'\\''"
def test_should_encode_double_quote_when_double_quoted(self):
encoded = cypher_repr(u"\"", quote=u"\"")
assert encoded == u'"\\""'
def test_should_encode_single_quote_when_double_quoted(self):
encoded = cypher_repr(u"'", quote=u"\"")
assert encoded == u'"\'"'
def test_should_encode_2_byte_extended_character(self):
encoded = cypher_repr(u"\xAB")
assert encoded == u"'\\u00ab'"
def test_should_encode_4_byte_extended_character(self):
encoded = cypher_repr(u"\uABCD")
assert encoded == u"'\\uabcd'"
def test_should_encode_8_byte_extended_character(self):
encoded = cypher_repr(u"\U0010ABCD")
assert encoded == u"'\\U0010abcd'"
def test_should_encode_complex_sequence(self):
encoded = cypher_repr(u"' '' '''")
assert encoded == u"\"' '' '''\""
class CypherListRepresentationTestCase(TestCase):
def test_should_encode_list(self):
encoded = cypher_repr([1, 2.0, u"three"])
assert encoded == u"[1, 2.0, 'three']"
def test_should_encode_empty_list(self):
encoded = cypher_repr([])
assert encoded == u"[]"
class CypherMapRepresentationTestCase(TestCase):
def test_should_encode_map(self):
encoded = cypher_repr(OrderedDict([("one", 1), ("two", 2.0), ("number three", u"three")]))
assert encoded == u"{one: 1, two: 2.0, `number three`: 'three'}"
def test_should_encode_empty_map(self):
encoded = cypher_repr({})
assert encoded == u"{}"
class CypherNodeRepresentationTestCase(TestCase):
def test_should_encode_empty_node(self):
a = Node()
encoded = cypher_repr(a, node_template="{labels} {properties}")
assert encoded == u"({})"
def test_should_encode_node_with_property(self):
a = Node(name="Alice")
encoded = cypher_repr(a, node_template="{labels} {properties}")
assert encoded == u"({name: 'Alice'})"
def test_should_encode_node_with_label(self):
a = Node("Person")
encoded = cypher_repr(a, node_template="{labels} {properties}")
assert encoded == u"(:Person {})"
def test_should_encode_node_with_label_and_property(self):
a = Node("Person", name="Alice")
encoded = cypher_repr(a, node_template="{labels} {properties}")
assert encoded == u"(:Person {name: 'Alice'})"
class CypherRelationshipRepresentationTestCase(TestCase):
def test_can_encode_relationship(self):
a = Node(name="Alice")
b = Node(name="Bob")
ab = KNOWS(a, b)
encoded = cypher_repr(ab, related_node_template="{property.name}")
self.assertEqual("(Alice)-[:KNOWS {}]->(Bob)", encoded)
def test_can_encode_relationship_with_names(self):
a = Node("Person", name="Alice")
b = Node("Person", name="Bob")
ab = KNOWS(a, b)
encoded = cypher_repr(ab, related_node_template="{property.name}")
self.assertEqual("(Alice)-[:KNOWS {}]->(Bob)", encoded)
def test_can_encode_relationship_with_alternative_names(self):
a = Node("Person", nom=u"Aimée")
b = Node("Person", nom=u"Baptiste")
ab = KNOWS_FR(a, b)
encoded = cypher_repr(ab, related_node_template=u"{property.nom}")
self.assertEqual(u"(Aimée)-[:CONNAÎT {}]->(Baptiste)", encoded)
def test_can_encode_relationship_with_properties(self):
a = Node("Person", name="Alice")
b = Node("Person", name="Bob")
ab = KNOWS(a, b, since=1999)
encoded = cypher_repr(ab, related_node_template="{property.name}")
self.assertEqual("(Alice)-[:KNOWS {since: 1999}]->(Bob)", encoded)
class CypherPathRepresentationTestCase(TestCase):
def test_can_write_path(self):
alice, bob, carol, dave = Node(name="Alice"), Node(name="Bob"), \
Node(name="Carol"), Node(name="Dave")
ab = LOVES(alice, bob)<|fim▁hole|> path = Path(alice, ab, bob, cb, carol, cd, dave)
encoded = cypher_repr(path, related_node_template="{property.name}")
self.assertEqual("(Alice)-[:LOVES {}]->(Bob)<-[:HATES {}]-(Carol)-[:KNOWS {}]->(Dave)", encoded)<|fim▁end|> | cb = HATES(carol, bob)
cd = KNOWS(carol, dave) |
<|file_name|>widget-min.js<|end_file_name|><|fim▁begin|>version https://git-lfs.github.com/spec/v1
oid sha256:012c0c10efb1958941ed2fd9f393df39f1ae6f76369bf56e500e39ade0496295<|fim▁hole|><|fim▁end|> | size 8392 |
<|file_name|>models.py<|end_file_name|><|fim▁begin|>from django.db.models import CharField
from django.utils.translation import ugettext_lazy as _
from localflavor.deprecation import DeprecatedPhoneNumberField
from . import forms
from .au_states import STATE_CHOICES
from .validators import AUBusinessNumberFieldValidator, AUCompanyNumberFieldValidator, AUTaxFileNumberFieldValidator
class AUStateField(CharField):
"""
A model field that stores the three-letter Australian state abbreviation in the database.
It is represented with :data:`~localflavor.au.au_states.STATE_CHOICES`` choices.
"""
description = _("Australian State")
def __init__(self, *args, **kwargs):
kwargs['choices'] = STATE_CHOICES
kwargs['max_length'] = 3
super(AUStateField, self).__init__(*args, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super(AUStateField, self).deconstruct()
del kwargs['choices']
return name, path, args, kwargs
class AUPostCodeField(CharField):
"""
A model field that stores the four-digit Australian postcode in the database.
This field is represented by forms as a :class:`~localflavor.au.forms.AUPostCodeField` field.
"""
description = _("Australian Postcode")
def __init__(self, *args, **kwargs):
kwargs['max_length'] = 4
super(AUPostCodeField, self).__init__(*args, **kwargs)
def formfield(self, **kwargs):
defaults = {'form_class': forms.AUPostCodeField}
defaults.update(kwargs)
return super(AUPostCodeField, self).formfield(**defaults)
class AUPhoneNumberField(CharField, DeprecatedPhoneNumberField):
"""
A model field that checks that the value is a valid Australian phone number (ten digits).
.. deprecated:: 1.4
Use the django-phonenumber-field_ library instead.
.. _django-phonenumber-field: https://github.com/stefanfoulis/django-phonenumber-field
"""
description = _("Australian Phone number")
def __init__(self, *args, **kwargs):
kwargs['max_length'] = 20
super(AUPhoneNumberField, self).__init__(*args, **kwargs)
def formfield(self, **kwargs):
defaults = {'form_class': forms.AUPhoneNumberField}
defaults.update(kwargs)
return super(AUPhoneNumberField, self).formfield(**defaults)
class AUBusinessNumberField(CharField):
"""
A model field that checks that the value is a valid Australian Business Number (ABN).
.. versionadded:: 1.3
"""
description = _("Australian Business Number")
validators = [AUBusinessNumberFieldValidator()]
def __init__(self, *args, **kwargs):
kwargs['max_length'] = 11
super(AUBusinessNumberField, self).__init__(*args, **kwargs)
def formfield(self, **kwargs):
defaults = {'form_class': forms.AUBusinessNumberField}
defaults.update(kwargs)
return super(AUBusinessNumberField, self).formfield(**defaults)
def to_python(self, value):
"""Ensure the ABN is stored without spaces."""
value = super(AUBusinessNumberField, self).to_python(value)
if value is not None:
return ''.join(value.split())
return value
class AUCompanyNumberField(CharField):
"""
A model field that checks that the value is a valid Australian Company Number (ACN).
.. versionadded:: 1.5
"""
description = _("Australian Company Number")
validators = [AUCompanyNumberFieldValidator()]
def __init__(self, *args, **kwargs):
kwargs['max_length'] = 9
super(AUCompanyNumberField, self).__init__(*args, **kwargs)
def formfield(self, **kwargs):
defaults = {'form_class': forms.AUCompanyNumberField}
defaults.update(kwargs)<|fim▁hole|> value = super(AUCompanyNumberField, self).to_python(value)
if value is not None:
return ''.join(value.split())
return value
class AUTaxFileNumberField(CharField):
"""
A model field that checks that the value is a valid Tax File Number (TFN).
A TFN is a number issued to a person by the Commissioner of Taxation and
is used to verify client identity and establish their income levels.
It is a eight or nine digit number without any embedded meaning.
.. versionadded:: 1.4
"""
description = _("Australian Tax File Number")
validators = [AUTaxFileNumberFieldValidator()]
def __init__(self, *args, **kwargs):
kwargs['max_length'] = 11
super(AUTaxFileNumberField, self).__init__(*args, **kwargs)
def formfield(self, **kwargs):
defaults = {'form_class': forms.AUTaxFileNumberField}
defaults.update(kwargs)
return super(AUTaxFileNumberField, self).formfield(**defaults)
def to_python(self, value):
"""Ensure the TFN is stored without spaces."""
value = super(AUTaxFileNumberField, self).to_python(value)
if value is not None:
return ''.join(value.split())
return value<|fim▁end|> | return super(AUCompanyNumberField, self).formfield(**defaults)
def to_python(self, value):
"""Ensure the ACN is stored without spaces.""" |
<|file_name|>parameterDeclarationStructurePrinterTests.ts<|end_file_name|><|fim▁begin|>import { expect } from "chai";
import { ParameterDeclarationStructurePrinter } from "../../../structurePrinters";
import { OptionalKind, ParameterDeclarationStructure } from "../../../structures";
import { getStructureFactoryAndWriter } from "../../testHelpers";
<|fim▁hole|>describe(nameof(ParameterDeclarationStructurePrinter), () => {
describe(nameof<ParameterDeclarationStructurePrinter>(p => p.printTextsWithParenthesis), () => {
function doTest(structures: OptionalKind<ParameterDeclarationStructure>[], expectedOutput: string) {
const { writer, factory } = getStructureFactoryAndWriter();
factory.forParameterDeclaration().printTextsWithParenthesis(writer, structures);
expect(writer.toString()).to.equal(expectedOutput);
}
it("should print multiple on a single line", () => {
doTest([{ name: "p" }, { name: "p1" }], "(p, p1)");
});
it("should handle when there are newlines in the type", () => {
doTest(
[{ name: "p", type: writer => writer.write("string").newLine().write("| number") }, { name: "p1" }],
"(p: string\n | number, p1)",
);
});
});
});<|fim▁end|> | |
<|file_name|>SkuSaleMappingDaoImpl.java<|end_file_name|><|fim▁begin|>package com.swfarm.biz.product.dao.impl;
import com.swfarm.biz.product.bo.SkuSaleMapping;
import com.swfarm.biz.product.dao.SkuSaleMappingDao;
import com.swfarm.pub.framework.dao.GenericDaoHibernateImpl;
public class SkuSaleMappingDaoImpl extends GenericDaoHibernateImpl<SkuSaleMapping, Long> implements SkuSaleMappingDao {
public SkuSaleMappingDaoImpl(Class<SkuSaleMapping> type) {
<|fim▁hole|>}<|fim▁end|> | super(type);
}
|
<|file_name|>matrixCalculator.ts<|end_file_name|><|fim▁begin|>import * as process from "process";
import { MatrixResult } from "./results";
function createEmptyMatrix(side: number) {
let result = new Array(side);
for (var rowIndex = 0; rowIndex < side; rowIndex++)
result[rowIndex] = new Array(side);
return result;
};
class CurrentHolder {
private startRow: number;
private startColumn: number;
private side: number;
constructor(public row: number, public column: number, side: number) {
this.row = this.startRow = row;
this.column = this.startColumn = column;
this.side = side;
}
actual(value: number) {
if (value < 0)
return value + this.side;
if (value >= this.side)
return value - this.side;
return value;
}
get actualRow() { return this.actual(this.row); }
get actualColumn() { return this.actual(this.column); }
next(count:number) {
if (count % this.side != 0) {
this.row++;
this.column++;
}
else {
this.row = ++this.startRow;
this.column = --this.startColumn;
}
}
}
function testResult(matrix: number[][], expectedSum:number, side:number):boolean {
for (var rowIndex = 0; rowIndex < side; rowIndex++) {
var sum = 0;
for (var columnIndex = 0; columnIndex < side; columnIndex++)
sum += matrix[rowIndex][columnIndex];
if (sum != expectedSum)
return false;
}
for (var columnIndex = 0; columnIndex < side; columnIndex++) {
var sum = 0;
for (var rowIndex = 0; rowIndex < side; rowIndex++)
sum += matrix[rowIndex][columnIndex];
if (sum != expectedSum)
return false;
}
var diagonalSum = 0;
for (var diagonalIndex = 0; diagonalIndex < side; diagonalIndex++)
diagonalSum += matrix[diagonalIndex][diagonalIndex];
if (diagonalSum != expectedSum)
return false;
diagonalSum = 0;
for (var diagonalIndex = 0; diagonalIndex < side; diagonalIndex++)
diagonalSum += matrix[diagonalIndex][side - diagonalIndex - 1];
return diagonalSum == expectedSum;
}
export function calculate(side: number): MatrixResult {
if (side % 2 !== 1)
throw new Error("Side needs to be an odd number.");
let expectedSum = side * (side * side + 1) / 2;
let matrix = createEmptyMatrix(side);
let current = new CurrentHolder(-(side - 1) / 2, Math.floor(side / 2), side);<|fim▁hole|> current.next(count);
}
return new MatrixResult(matrix, expectedSum, side, testResult(matrix, expectedSum, side));
}<|fim▁end|> |
for (let count = 1; count <= side * side; count++) {
matrix[current.actualRow][current.actualColumn] = count;
|
<|file_name|>create.py<|end_file_name|><|fim▁begin|>import os
from cpenv import api, paths
from cpenv.cli import core
from cpenv.module import parse_module_path
class Create(core.CLI):
'''Create a new Module.'''
def setup_parser(self, parser):
parser.add_argument(
'where',
help='Path to new module',
)
def run(self, args):
where = paths.normalize(args.where)
if os.path.isdir(where):
core.echo()
core.echo('Error: Can not create module in existing directory.')
core.exit(1)
default_name, default_version = parse_module_path(where)
core.echo()
core.echo('This command will guide you through creating a new module.')
core.echo()
name = core.prompt(' Module Name [%s]: ' % default_name)
version = core.prompt(' Version [%s]: ' % default_version.string)
description = core.prompt(' Description []: ')
author = core.prompt(' Author []: ')
email = core.prompt(' Email []: ')
core.echo()
core.echo('- Creating your new Module...', end='')
module = api.create(<|fim▁hole|> description=description,
author=author,
email=email,
)
core.echo('OK!')
core.echo()
core.echo(' ' + module.path)
core.echo()
core.echo('Steps you might take before publishing...')
core.echo()
core.echo(' - Include binaries your module depends on')
core.echo(' - Edit the module.yml file')
core.echo(' - Add variables to the environment section')
core.echo(' - Add other modules to the requires section')
core.echo(' - Add python hooks like post_activate')
core.echo()<|fim▁end|> | where=where,
name=name or default_name,
version=version or default_version.string, |
<|file_name|>x86.rs<|end_file_name|><|fim▁begin|>pub type c_char = i8;
pub type wchar_t = i32;
pub type greg_t = i32;
s! {
pub struct _libc_fpreg {
pub significand: [u16; 4],
pub exponent: u16,
}
pub struct _libc_fpstate {
pub cw: ::c_ulong,
pub sw: ::c_ulong,
pub tag: ::c_ulong,
pub ipoff: ::c_ulong,
pub cssel: ::c_ulong,
pub dataoff: ::c_ulong,
pub datasel: ::c_ulong,
pub _st: [_libc_fpreg; 8],
pub status: ::c_ulong,
}
pub struct user_fpregs_struct {
pub cwd: ::c_long,
pub swd: ::c_long,
pub twd: ::c_long,
pub fip: ::c_long,
pub fcs: ::c_long,
pub foo: ::c_long,
pub fos: ::c_long,
pub st_space: [::c_long; 20],
}
pub struct user_fpxregs_struct {
pub cwd: ::c_ushort,
pub swd: ::c_ushort,
pub twd: ::c_ushort,
pub fop: ::c_ushort,
pub fip: ::c_long,
pub fcs: ::c_long,
pub foo: ::c_long,
pub fos: ::c_long,
pub mxcsr: ::c_long,
__reserved: ::c_long,
pub st_space: [::c_long; 32],
pub xmm_space: [::c_long; 32],
padding: [::c_long; 56],
}
pub struct user_regs_struct {
pub ebx: ::c_long,
pub ecx: ::c_long,
pub edx: ::c_long,
pub esi: ::c_long,
pub edi: ::c_long,
pub ebp: ::c_long,
pub eax: ::c_long,
pub xds: ::c_long,
pub xes: ::c_long,
pub xfs: ::c_long,
pub xgs: ::c_long,
pub orig_eax: ::c_long,
pub eip: ::c_long,
pub xcs: ::c_long,
pub eflags: ::c_long,
pub esp: ::c_long,
pub xss: ::c_long,
}
pub struct user {
pub regs: user_regs_struct,
pub u_fpvalid: ::c_int,
pub i387: user_fpregs_struct,
pub u_tsize: ::c_ulong,
pub u_dsize: ::c_ulong,
pub u_ssize: ::c_ulong,
pub start_code: ::c_ulong,
pub start_stack: ::c_ulong,
pub signal: ::c_long,
__reserved: ::c_int,
pub u_ar0: *mut user_regs_struct,
pub u_fpstate: *mut user_fpregs_struct,
pub magic: ::c_ulong,
pub u_comm: [c_char; 32],
pub u_debugreg: [::c_int; 8],
}
pub struct mcontext_t {
pub gregs: [greg_t; 19],
pub fpregs: *mut _libc_fpstate,
pub oldmask: ::c_ulong,
pub cr2: ::c_ulong,
}
pub struct ucontext_t {
pub uc_flags: ::c_ulong,
pub uc_link: *mut ucontext_t,
pub uc_stack: ::stack_t,
pub uc_mcontext: mcontext_t,
pub uc_sigmask: ::sigset_t,
__private: [u8; 112],
}
pub struct ipc_perm {
pub __key: ::key_t,
pub uid: ::uid_t,
pub gid: ::gid_t,
pub cuid: ::uid_t,
pub cgid: ::gid_t,
pub mode: ::c_ushort,
__pad1: ::c_ushort,
pub __seq: ::c_ushort,
__pad2: ::c_ushort,
__unused1: ::c_ulong,
__unused2: ::c_ulong
}
pub struct stat64 {
pub st_dev: ::dev_t,
__pad1: ::c_uint,
__st_ino: ::ino_t,
pub st_mode: ::mode_t,
pub st_nlink: ::nlink_t,
pub st_uid: ::uid_t,
pub st_gid: ::gid_t,
pub st_rdev: ::dev_t,
__pad2: ::c_uint,
pub st_size: ::off64_t,
pub st_blksize: ::blksize_t,
pub st_blocks: ::blkcnt64_t,
pub st_atime: ::time_t,
pub st_atime_nsec: ::c_long,
pub st_mtime: ::time_t,
pub st_mtime_nsec: ::c_long,
pub st_ctime: ::time_t,
pub st_ctime_nsec: ::c_long,
pub st_ino: ::ino64_t,
}
pub struct shmid_ds {
pub shm_perm: ::ipc_perm,
pub shm_segsz: ::size_t,
pub shm_atime: ::time_t,
__unused1: ::c_ulong,
pub shm_dtime: ::time_t,
__unused2: ::c_ulong,
pub shm_ctime: ::time_t,
__unused3: ::c_ulong,
pub shm_cpid: ::pid_t,
pub shm_lpid: ::pid_t,
pub shm_nattch: ::shmatt_t,
__unused4: ::c_ulong,
__unused5: ::c_ulong
}
pub struct msqid_ds {
pub msg_perm: ::ipc_perm,
pub msg_stime: ::time_t,
__glibc_reserved1: ::c_ulong,
pub msg_rtime: ::time_t,
__glibc_reserved2: ::c_ulong,
pub msg_ctime: ::time_t,
__glibc_reserved3: ::c_ulong,
__msg_cbytes: ::c_ulong,
pub msg_qnum: ::msgqnum_t,
pub msg_qbytes: ::msglen_t,
pub msg_lspid: ::pid_t,
pub msg_lrpid: ::pid_t,
__glibc_reserved4: ::c_ulong,
__glibc_reserved5: ::c_ulong,
}
}
pub const O_DIRECT: ::c_int = 0x4000;
pub const O_DIRECTORY: ::c_int = 0x10000;
pub const O_NOFOLLOW: ::c_int = 0x20000;
pub const O_LARGEFILE: ::c_int = 0o0100000;
pub const MAP_LOCKED: ::c_int = 0x02000;
pub const MAP_NORESERVE: ::c_int = 0x04000;
pub const MAP_32BIT: ::c_int = 0x0040;
pub const EDEADLOCK: ::c_int = 35;
pub const SO_SNDBUFFORCE: ::c_int = 32;
pub const SO_RCVBUFFORCE: ::c_int = 33;
pub const SO_NO_CHECK: ::c_int = 11;
pub const SO_PASSCRED: ::c_int = 16;
pub const SO_PEERCRED: ::c_int = 17;
pub const SO_RCVLOWAT: ::c_int = 18;
pub const SO_SNDLOWAT: ::c_int = 19;
pub const SO_RCVTIMEO: ::c_int = 20;
pub const SO_SNDTIMEO: ::c_int = 21;
pub const FIOCLEX: ::c_ulong = 0x5451;
pub const FIONBIO: ::c_ulong = 0x5421;
pub const PTRACE_GETFPXREGS: ::c_uint = 18;
pub const PTRACE_SETFPXREGS: ::c_uint = 19;
pub const MCL_CURRENT: ::c_int = 0x0001;
pub const MCL_FUTURE: ::c_int = 0x0002;
pub const SIGSTKSZ: ::size_t = 8192;
pub const MINSIGSTKSZ: ::size_t = 2048;
pub const CBAUD: ::tcflag_t = 0o0010017;
pub const TAB1: ::c_int = 0x00000800;
pub const TAB2: ::c_int = 0x00001000;
pub const TAB3: ::c_int = 0x00001800;
pub const CR1: ::c_int = 0x00000200;
pub const CR2: ::c_int = 0x00000400;
pub const CR3: ::c_int = 0x00000600;
pub const FF1: ::c_int = 0x00008000;
pub const BS1: ::c_int = 0x00002000;
pub const VT1: ::c_int = 0x00004000;
pub const VWERASE: usize = 14;
pub const VREPRINT: usize = 12;
pub const VSUSP: usize = 10;
pub const VSTART: usize = 8;
pub const VSTOP: usize = 9;
pub const VDISCARD: usize = 13;
pub const VTIME: usize = 5;
pub const IXON: ::tcflag_t = 0x00000400;
pub const IXOFF: ::tcflag_t = 0x00001000;
pub const ONLCR: ::tcflag_t = 0x4;
pub const CSIZE: ::tcflag_t = 0x00000030;
pub const CS6: ::tcflag_t = 0x00000010;
pub const CS7: ::tcflag_t = 0x00000020;
pub const CS8: ::tcflag_t = 0x00000030;
pub const CSTOPB: ::tcflag_t = 0x00000040;
pub const CREAD: ::tcflag_t = 0x00000080;
pub const PARENB: ::tcflag_t = 0x00000100;
pub const PARODD: ::tcflag_t = 0x00000200;
pub const HUPCL: ::tcflag_t = 0x00000400;
pub const CLOCAL: ::tcflag_t = 0x00000800;
pub const ECHOKE: ::tcflag_t = 0x00000800;
pub const ECHOE: ::tcflag_t = 0x00000010;
pub const ECHOK: ::tcflag_t = 0x00000020;
pub const ECHONL: ::tcflag_t = 0x00000040;
pub const ECHOPRT: ::tcflag_t = 0x00000400;
pub const ECHOCTL: ::tcflag_t = 0x00000200;
pub const ISIG: ::tcflag_t = 0x00000001;
pub const ICANON: ::tcflag_t = 0x00000002;
pub const PENDIN: ::tcflag_t = 0x00004000;
pub const NOFLSH: ::tcflag_t = 0x00000080;
pub const CIBAUD: ::tcflag_t = 0o02003600000;
pub const CBAUDEX: ::tcflag_t = 0o010000;
pub const VSWTC: usize = 7;
pub const OLCUC: ::tcflag_t = 0o000002;
pub const NLDLY: ::tcflag_t = 0o000400;
pub const CRDLY: ::tcflag_t = 0o003000;
pub const TABDLY: ::tcflag_t = 0o014000;
pub const BSDLY: ::tcflag_t = 0o020000;
pub const FFDLY: ::tcflag_t = 0o100000;
pub const VTDLY: ::tcflag_t = 0o040000;
pub const XTABS: ::tcflag_t = 0o014000;
pub const B0: ::speed_t = 0o000000;
pub const B50: ::speed_t = 0o000001;
pub const B75: ::speed_t = 0o000002;
pub const B110: ::speed_t = 0o000003;
pub const B134: ::speed_t = 0o000004;
pub const B150: ::speed_t = 0o000005;
pub const B200: ::speed_t = 0o000006;
pub const B300: ::speed_t = 0o000007;
pub const B600: ::speed_t = 0o000010;
pub const B1200: ::speed_t = 0o000011;
pub const B1800: ::speed_t = 0o000012;
pub const B2400: ::speed_t = 0o000013;
pub const B4800: ::speed_t = 0o000014;
pub const B9600: ::speed_t = 0o000015;
pub const B19200: ::speed_t = 0o000016;
pub const B38400: ::speed_t = 0o000017;
pub const EXTA: ::speed_t = B19200;
pub const EXTB: ::speed_t = B38400;
pub const B57600: ::speed_t = 0o010001;
pub const B115200: ::speed_t = 0o010002;
pub const B230400: ::speed_t = 0o010003;
pub const B460800: ::speed_t = 0o010004;
pub const B500000: ::speed_t = 0o010005;
pub const B576000: ::speed_t = 0o010006;
pub const B921600: ::speed_t = 0o010007;
pub const B1000000: ::speed_t = 0o010010;
pub const B1152000: ::speed_t = 0o010011;
pub const B1500000: ::speed_t = 0o010012;
pub const B2000000: ::speed_t = 0o010013;
pub const B2500000: ::speed_t = 0o010014;
pub const B3000000: ::speed_t = 0o010015;
pub const B3500000: ::speed_t = 0o010016;
pub const B4000000: ::speed_t = 0o010017;
pub const VEOL: usize = 11;
pub const VEOL2: usize = 16;
pub const VMIN: usize = 6;
pub const IEXTEN: ::tcflag_t = 0x00008000;
pub const TOSTOP: ::tcflag_t = 0x00000100;
pub const FLUSHO: ::tcflag_t = 0x00001000;
pub const EXTPROC: ::tcflag_t = 0x00010000;
pub const TCGETS: ::c_ulong = 0x5401;
pub const TCSETS: ::c_ulong = 0x5402;
pub const TCSETSW: ::c_ulong = 0x5403;
pub const TCSETSF: ::c_ulong = 0x5404;
pub const TCGETA: ::c_ulong = 0x5405;
pub const TCSETA: ::c_ulong = 0x5406;
pub const TCSETAW: ::c_ulong = 0x5407;
pub const TCSETAF: ::c_ulong = 0x5408;
pub const TCSBRK: ::c_ulong = 0x5409;
pub const TCXONC: ::c_ulong = 0x540A;
pub const TCFLSH: ::c_ulong = 0x540B;
pub const TIOCINQ: ::c_ulong = 0x541B;
pub const TIOCGPGRP: ::c_ulong = 0x540F;
pub const TIOCSPGRP: ::c_ulong = 0x5410;
pub const TIOCOUTQ: ::c_ulong = 0x5411;
pub const TIOCGWINSZ: ::c_ulong = 0x5413;
pub const TIOCSWINSZ: ::c_ulong = 0x5414;
pub const FIONREAD: ::c_ulong = 0x541B;
// Syscall table
pub const SYS_restart_syscall: ::c_long = 0;
pub const SYS_exit: ::c_long = 1;
pub const SYS_fork: ::c_long = 2;
pub const SYS_read: ::c_long = 3;
pub const SYS_write: ::c_long = 4;
pub const SYS_open: ::c_long = 5;
pub const SYS_close: ::c_long = 6;
pub const SYS_waitpid: ::c_long = 7;
pub const SYS_creat: ::c_long = 8;
pub const SYS_link: ::c_long = 9;
pub const SYS_unlink: ::c_long = 10;
pub const SYS_execve: ::c_long = 11;
pub const SYS_chdir: ::c_long = 12;
pub const SYS_time: ::c_long = 13;
pub const SYS_mknod: ::c_long = 14;
pub const SYS_chmod: ::c_long = 15;
pub const SYS_lchown: ::c_long = 16;
pub const SYS_break: ::c_long = 17;
pub const SYS_oldstat: ::c_long = 18;
pub const SYS_lseek: ::c_long = 19;
pub const SYS_getpid: ::c_long = 20;
pub const SYS_mount: ::c_long = 21;
pub const SYS_umount: ::c_long = 22;
pub const SYS_setuid: ::c_long = 23;
pub const SYS_getuid: ::c_long = 24;
pub const SYS_stime: ::c_long = 25;
pub const SYS_ptrace: ::c_long = 26;
pub const SYS_alarm: ::c_long = 27;
pub const SYS_oldfstat: ::c_long = 28;
pub const SYS_pause: ::c_long = 29;
pub const SYS_utime: ::c_long = 30;
pub const SYS_stty: ::c_long = 31;
pub const SYS_gtty: ::c_long = 32;
pub const SYS_access: ::c_long = 33;
pub const SYS_nice: ::c_long = 34;
pub const SYS_ftime: ::c_long = 35;
pub const SYS_sync: ::c_long = 36;
pub const SYS_kill: ::c_long = 37;
pub const SYS_rename: ::c_long = 38;
pub const SYS_mkdir: ::c_long = 39;
pub const SYS_rmdir: ::c_long = 40;
pub const SYS_dup: ::c_long = 41;
pub const SYS_pipe: ::c_long = 42;
pub const SYS_times: ::c_long = 43;
pub const SYS_prof: ::c_long = 44;
pub const SYS_brk: ::c_long = 45;
pub const SYS_setgid: ::c_long = 46;
pub const SYS_getgid: ::c_long = 47;
pub const SYS_signal: ::c_long = 48;
pub const SYS_geteuid: ::c_long = 49;
pub const SYS_getegid: ::c_long = 50;
pub const SYS_acct: ::c_long = 51;
pub const SYS_umount2: ::c_long = 52;
pub const SYS_lock: ::c_long = 53;
pub const SYS_ioctl: ::c_long = 54;
pub const SYS_fcntl: ::c_long = 55;
pub const SYS_mpx: ::c_long = 56;
pub const SYS_setpgid: ::c_long = 57;
pub const SYS_ulimit: ::c_long = 58;
pub const SYS_oldolduname: ::c_long = 59;
pub const SYS_umask: ::c_long = 60;
pub const SYS_chroot: ::c_long = 61;
pub const SYS_ustat: ::c_long = 62;
pub const SYS_dup2: ::c_long = 63;
pub const SYS_getppid: ::c_long = 64;
pub const SYS_getpgrp: ::c_long = 65;
pub const SYS_setsid: ::c_long = 66;
pub const SYS_sigaction: ::c_long = 67;
pub const SYS_sgetmask: ::c_long = 68;
pub const SYS_ssetmask: ::c_long = 69;
pub const SYS_setreuid: ::c_long = 70;
pub const SYS_setregid: ::c_long = 71;
pub const SYS_sigsuspend: ::c_long = 72;
pub const SYS_sigpending: ::c_long = 73;
pub const SYS_sethostname: ::c_long = 74;
pub const SYS_setrlimit: ::c_long = 75;
pub const SYS_getrlimit: ::c_long = 76;
pub const SYS_getrusage: ::c_long = 77;
pub const SYS_gettimeofday: ::c_long = 78;
pub const SYS_settimeofday: ::c_long = 79;
pub const SYS_getgroups: ::c_long = 80;
pub const SYS_setgroups: ::c_long = 81;
pub const SYS_select: ::c_long = 82;
pub const SYS_symlink: ::c_long = 83;
pub const SYS_oldlstat: ::c_long = 84;
pub const SYS_readlink: ::c_long = 85;
pub const SYS_uselib: ::c_long = 86;
pub const SYS_swapon: ::c_long = 87;
pub const SYS_reboot: ::c_long = 88;
pub const SYS_readdir: ::c_long = 89;
pub const SYS_mmap: ::c_long = 90;
pub const SYS_munmap: ::c_long = 91;
pub const SYS_truncate: ::c_long = 92;
pub const SYS_ftruncate: ::c_long = 93;
pub const SYS_fchmod: ::c_long = 94;
pub const SYS_fchown: ::c_long = 95;
pub const SYS_getpriority: ::c_long = 96;
pub const SYS_setpriority: ::c_long = 97;
pub const SYS_profil: ::c_long = 98;
pub const SYS_statfs: ::c_long = 99;
pub const SYS_fstatfs: ::c_long = 100;
pub const SYS_ioperm: ::c_long = 101;
pub const SYS_socketcall: ::c_long = 102;
pub const SYS_syslog: ::c_long = 103;
pub const SYS_setitimer: ::c_long = 104;
pub const SYS_getitimer: ::c_long = 105;
pub const SYS_stat: ::c_long = 106;
pub const SYS_lstat: ::c_long = 107;
pub const SYS_fstat: ::c_long = 108;
pub const SYS_olduname: ::c_long = 109;
pub const SYS_iopl: ::c_long = 110;
pub const SYS_vhangup: ::c_long = 111;
pub const SYS_idle: ::c_long = 112;
pub const SYS_vm86old: ::c_long = 113;
pub const SYS_wait4: ::c_long = 114;
pub const SYS_swapoff: ::c_long = 115;
pub const SYS_sysinfo: ::c_long = 116;
pub const SYS_ipc: ::c_long = 117;
pub const SYS_fsync: ::c_long = 118;
pub const SYS_sigreturn: ::c_long = 119;
pub const SYS_clone: ::c_long = 120;
pub const SYS_setdomainname: ::c_long = 121;
pub const SYS_uname: ::c_long = 122;
pub const SYS_modify_ldt: ::c_long = 123;
pub const SYS_adjtimex: ::c_long = 124;
pub const SYS_mprotect: ::c_long = 125;
pub const SYS_sigprocmask: ::c_long = 126;
pub const SYS_create_module: ::c_long = 127;
pub const SYS_init_module: ::c_long = 128;
pub const SYS_delete_module: ::c_long = 129;
pub const SYS_get_kernel_syms: ::c_long = 130;
pub const SYS_quotactl: ::c_long = 131;
pub const SYS_getpgid: ::c_long = 132;<|fim▁hole|>pub const SYS_fchdir: ::c_long = 133;
pub const SYS_bdflush: ::c_long = 134;
pub const SYS_sysfs: ::c_long = 135;
pub const SYS_personality: ::c_long = 136;
pub const SYS_afs_syscall: ::c_long = 137;
pub const SYS_setfsuid: ::c_long = 138;
pub const SYS_setfsgid: ::c_long = 139;
pub const SYS__llseek: ::c_long = 140;
pub const SYS_getdents: ::c_long = 141;
pub const SYS__newselect: ::c_long = 142;
pub const SYS_flock: ::c_long = 143;
pub const SYS_msync: ::c_long = 144;
pub const SYS_readv: ::c_long = 145;
pub const SYS_writev: ::c_long = 146;
pub const SYS_getsid: ::c_long = 147;
pub const SYS_fdatasync: ::c_long = 148;
pub const SYS__sysctl: ::c_long = 149;
pub const SYS_mlock: ::c_long = 150;
pub const SYS_munlock: ::c_long = 151;
pub const SYS_mlockall: ::c_long = 152;
pub const SYS_munlockall: ::c_long = 153;
pub const SYS_sched_setparam: ::c_long = 154;
pub const SYS_sched_getparam: ::c_long = 155;
pub const SYS_sched_setscheduler: ::c_long = 156;
pub const SYS_sched_getscheduler: ::c_long = 157;
pub const SYS_sched_yield: ::c_long = 158;
pub const SYS_sched_get_priority_max: ::c_long = 159;
pub const SYS_sched_get_priority_min: ::c_long = 160;
pub const SYS_sched_rr_get_interval: ::c_long = 161;
pub const SYS_nanosleep: ::c_long = 162;
pub const SYS_mremap: ::c_long = 163;
pub const SYS_setresuid: ::c_long = 164;
pub const SYS_getresuid: ::c_long = 165;
pub const SYS_vm86: ::c_long = 166;
pub const SYS_query_module: ::c_long = 167;
pub const SYS_poll: ::c_long = 168;
pub const SYS_nfsservctl: ::c_long = 169;
pub const SYS_setresgid: ::c_long = 170;
pub const SYS_getresgid: ::c_long = 171;
pub const SYS_prctl: ::c_long = 172;
pub const SYS_rt_sigreturn: ::c_long = 173;
pub const SYS_rt_sigaction: ::c_long = 174;
pub const SYS_rt_sigprocmask: ::c_long = 175;
pub const SYS_rt_sigpending: ::c_long = 176;
pub const SYS_rt_sigtimedwait: ::c_long = 177;
pub const SYS_rt_sigqueueinfo: ::c_long = 178;
pub const SYS_rt_sigsuspend: ::c_long = 179;
pub const SYS_pread64: ::c_long = 180;
pub const SYS_pwrite64: ::c_long = 181;
pub const SYS_chown: ::c_long = 182;
pub const SYS_getcwd: ::c_long = 183;
pub const SYS_capget: ::c_long = 184;
pub const SYS_capset: ::c_long = 185;
pub const SYS_sigaltstack: ::c_long = 186;
pub const SYS_sendfile: ::c_long = 187;
pub const SYS_getpmsg: ::c_long = 188;
pub const SYS_putpmsg: ::c_long = 189;
pub const SYS_vfork: ::c_long = 190;
pub const SYS_ugetrlimit: ::c_long = 191;
pub const SYS_mmap2: ::c_long = 192;
pub const SYS_truncate64: ::c_long = 193;
pub const SYS_ftruncate64: ::c_long = 194;
pub const SYS_stat64: ::c_long = 195;
pub const SYS_lstat64: ::c_long = 196;
pub const SYS_fstat64: ::c_long = 197;
pub const SYS_lchown32: ::c_long = 198;
pub const SYS_getuid32: ::c_long = 199;
pub const SYS_getgid32: ::c_long = 200;
pub const SYS_geteuid32: ::c_long = 201;
pub const SYS_getegid32: ::c_long = 202;
pub const SYS_setreuid32: ::c_long = 203;
pub const SYS_setregid32: ::c_long = 204;
pub const SYS_getgroups32: ::c_long = 205;
pub const SYS_setgroups32: ::c_long = 206;
pub const SYS_fchown32: ::c_long = 207;
pub const SYS_setresuid32: ::c_long = 208;
pub const SYS_getresuid32: ::c_long = 209;
pub const SYS_setresgid32: ::c_long = 210;
pub const SYS_getresgid32: ::c_long = 211;
pub const SYS_chown32: ::c_long = 212;
pub const SYS_setuid32: ::c_long = 213;
pub const SYS_setgid32: ::c_long = 214;
pub const SYS_setfsuid32: ::c_long = 215;
pub const SYS_setfsgid32: ::c_long = 216;
pub const SYS_pivot_root: ::c_long = 217;
pub const SYS_mincore: ::c_long = 218;
pub const SYS_madvise: ::c_long = 219;
pub const SYS_getdents64: ::c_long = 220;
pub const SYS_fcntl64: ::c_long = 221;
pub const SYS_gettid: ::c_long = 224;
pub const SYS_readahead: ::c_long = 225;
pub const SYS_setxattr: ::c_long = 226;
pub const SYS_lsetxattr: ::c_long = 227;
pub const SYS_fsetxattr: ::c_long = 228;
pub const SYS_getxattr: ::c_long = 229;
pub const SYS_lgetxattr: ::c_long = 230;
pub const SYS_fgetxattr: ::c_long = 231;
pub const SYS_listxattr: ::c_long = 232;
pub const SYS_llistxattr: ::c_long = 233;
pub const SYS_flistxattr: ::c_long = 234;
pub const SYS_removexattr: ::c_long = 235;
pub const SYS_lremovexattr: ::c_long = 236;
pub const SYS_fremovexattr: ::c_long = 237;
pub const SYS_tkill: ::c_long = 238;
pub const SYS_sendfile64: ::c_long = 239;
pub const SYS_futex: ::c_long = 240;
pub const SYS_sched_setaffinity: ::c_long = 241;
pub const SYS_sched_getaffinity: ::c_long = 242;
pub const SYS_set_thread_area: ::c_long = 243;
pub const SYS_get_thread_area: ::c_long = 244;
pub const SYS_io_setup: ::c_long = 245;
pub const SYS_io_destroy: ::c_long = 246;
pub const SYS_io_getevents: ::c_long = 247;
pub const SYS_io_submit: ::c_long = 248;
pub const SYS_io_cancel: ::c_long = 249;
pub const SYS_fadvise64: ::c_long = 250;
pub const SYS_exit_group: ::c_long = 252;
pub const SYS_lookup_dcookie: ::c_long = 253;
pub const SYS_epoll_create: ::c_long = 254;
pub const SYS_epoll_ctl: ::c_long = 255;
pub const SYS_epoll_wait: ::c_long = 256;
pub const SYS_remap_file_pages: ::c_long = 257;
pub const SYS_set_tid_address: ::c_long = 258;
pub const SYS_timer_create: ::c_long = 259;
pub const SYS_timer_settime: ::c_long = 260;
pub const SYS_timer_gettime: ::c_long = 261;
pub const SYS_timer_getoverrun: ::c_long = 262;
pub const SYS_timer_delete: ::c_long = 263;
pub const SYS_clock_settime: ::c_long = 264;
pub const SYS_clock_gettime: ::c_long = 265;
pub const SYS_clock_getres: ::c_long = 266;
pub const SYS_clock_nanosleep: ::c_long = 267;
pub const SYS_statfs64: ::c_long = 268;
pub const SYS_fstatfs64: ::c_long = 269;
pub const SYS_tgkill: ::c_long = 270;
pub const SYS_utimes: ::c_long = 271;
pub const SYS_fadvise64_64: ::c_long = 272;
pub const SYS_vserver: ::c_long = 273;
pub const SYS_mbind: ::c_long = 274;
pub const SYS_get_mempolicy: ::c_long = 275;
pub const SYS_set_mempolicy: ::c_long = 276;
pub const SYS_mq_open: ::c_long = 277;
pub const SYS_mq_unlink: ::c_long = 278;
pub const SYS_mq_timedsend: ::c_long = 279;
pub const SYS_mq_timedreceive: ::c_long = 280;
pub const SYS_mq_notify: ::c_long = 281;
pub const SYS_mq_getsetattr: ::c_long = 282;
pub const SYS_kexec_load: ::c_long = 283;
pub const SYS_waitid: ::c_long = 284;
pub const SYS_add_key: ::c_long = 286;
pub const SYS_request_key: ::c_long = 287;
pub const SYS_keyctl: ::c_long = 288;
pub const SYS_ioprio_set: ::c_long = 289;
pub const SYS_ioprio_get: ::c_long = 290;
pub const SYS_inotify_init: ::c_long = 291;
pub const SYS_inotify_add_watch: ::c_long = 292;
pub const SYS_inotify_rm_watch: ::c_long = 293;
pub const SYS_migrate_pages: ::c_long = 294;
pub const SYS_openat: ::c_long = 295;
pub const SYS_mkdirat: ::c_long = 296;
pub const SYS_mknodat: ::c_long = 297;
pub const SYS_fchownat: ::c_long = 298;
pub const SYS_futimesat: ::c_long = 299;
pub const SYS_fstatat64: ::c_long = 300;
pub const SYS_unlinkat: ::c_long = 301;
pub const SYS_renameat: ::c_long = 302;
pub const SYS_linkat: ::c_long = 303;
pub const SYS_symlinkat: ::c_long = 304;
pub const SYS_readlinkat: ::c_long = 305;
pub const SYS_fchmodat: ::c_long = 306;
pub const SYS_faccessat: ::c_long = 307;
pub const SYS_pselect6: ::c_long = 308;
pub const SYS_ppoll: ::c_long = 309;
pub const SYS_unshare: ::c_long = 310;
pub const SYS_set_robust_list: ::c_long = 311;
pub const SYS_get_robust_list: ::c_long = 312;
pub const SYS_splice: ::c_long = 313;
pub const SYS_sync_file_range: ::c_long = 314;
pub const SYS_tee: ::c_long = 315;
pub const SYS_vmsplice: ::c_long = 316;
pub const SYS_move_pages: ::c_long = 317;
pub const SYS_getcpu: ::c_long = 318;
pub const SYS_epoll_pwait: ::c_long = 319;
pub const SYS_utimensat: ::c_long = 320;
pub const SYS_signalfd: ::c_long = 321;
pub const SYS_timerfd_create: ::c_long = 322;
pub const SYS_eventfd: ::c_long = 323;
pub const SYS_fallocate: ::c_long = 324;
pub const SYS_timerfd_settime: ::c_long = 325;
pub const SYS_timerfd_gettime: ::c_long = 326;
pub const SYS_signalfd4: ::c_long = 327;
pub const SYS_eventfd2: ::c_long = 328;
pub const SYS_epoll_create1: ::c_long = 329;
pub const SYS_dup3: ::c_long = 330;
pub const SYS_pipe2: ::c_long = 331;
pub const SYS_inotify_init1: ::c_long = 332;
pub const SYS_preadv: ::c_long = 333;
pub const SYS_pwritev: ::c_long = 334;
pub const SYS_rt_tgsigqueueinfo: ::c_long = 335;
pub const SYS_perf_event_open: ::c_long = 336;
pub const SYS_recvmmsg: ::c_long = 337;
pub const SYS_fanotify_init: ::c_long = 338;
pub const SYS_fanotify_mark: ::c_long = 339;
pub const SYS_prlimit64: ::c_long = 340;
pub const SYS_name_to_handle_at: ::c_long = 341;
pub const SYS_open_by_handle_at: ::c_long = 342;
pub const SYS_clock_adjtime: ::c_long = 343;
pub const SYS_syncfs: ::c_long = 344;
pub const SYS_sendmmsg: ::c_long = 345;
pub const SYS_setns: ::c_long = 346;
pub const SYS_process_vm_readv: ::c_long = 347;
pub const SYS_process_vm_writev: ::c_long = 348;
pub const SYS_kcmp: ::c_long = 349;
pub const SYS_finit_module: ::c_long = 350;
pub const SYS_sched_setattr: ::c_long = 351;
pub const SYS_sched_getattr: ::c_long = 352;
pub const SYS_renameat2: ::c_long = 353;
pub const SYS_seccomp: ::c_long = 354;
pub const SYS_getrandom: ::c_long = 355;
pub const SYS_memfd_create: ::c_long = 356;
pub const SYS_bpf: ::c_long = 357;
pub const SYS_execveat: ::c_long = 358;
pub const SYS_socket: ::c_long = 359;
pub const SYS_socketpair: ::c_long = 360;
pub const SYS_bind: ::c_long = 361;
pub const SYS_connect: ::c_long = 362;
pub const SYS_listen: ::c_long = 363;
pub const SYS_accept4: ::c_long = 364;
pub const SYS_getsockopt: ::c_long = 365;
pub const SYS_setsockopt: ::c_long = 366;
pub const SYS_getsockname: ::c_long = 367;
pub const SYS_getpeername: ::c_long = 368;
pub const SYS_sendto: ::c_long = 369;
pub const SYS_sendmsg: ::c_long = 370;
pub const SYS_recvfrom: ::c_long = 371;
pub const SYS_recvmsg: ::c_long = 372;
pub const SYS_shutdown: ::c_long = 373;
pub const SYS_userfaultfd: ::c_long = 374;
pub const SYS_membarrier: ::c_long = 375;
pub const SYS_mlock2: ::c_long = 376;
pub const SYS_copy_file_range: ::c_long = 377;
pub const SYS_preadv2: ::c_long = 378;
pub const SYS_pwritev2: ::c_long = 379;
pub const SYS_pkey_mprotect: ::c_long = 380;
pub const SYS_pkey_alloc: ::c_long = 381;
pub const SYS_pkey_free: ::c_long = 382;
// offsets in user_regs_structs, from sys/reg.h
pub const EBX: ::c_int = 0;
pub const ECX: ::c_int = 1;
pub const EDX: ::c_int = 2;
pub const ESI: ::c_int = 3;
pub const EDI: ::c_int = 4;
pub const EBP: ::c_int = 5;
pub const EAX: ::c_int = 6;
pub const DS: ::c_int = 7;
pub const ES: ::c_int = 8;
pub const FS: ::c_int = 9;
pub const GS: ::c_int = 10;
pub const ORIG_EAX: ::c_int = 11;
pub const EIP: ::c_int = 12;
pub const CS: ::c_int = 13;
pub const EFL: ::c_int = 14;
pub const UESP: ::c_int = 15;
pub const SS: ::c_int = 16;
extern {
pub fn getcontext(ucp: *mut ucontext_t) -> ::c_int;
pub fn setcontext(ucp: *const ucontext_t) -> ::c_int;
pub fn makecontext(ucp: *mut ucontext_t,
func: extern fn (),
argc: ::c_int, ...);
pub fn swapcontext(uocp: *mut ucontext_t,
ucp: *const ucontext_t) -> ::c_int;
}<|fim▁end|> | |
<|file_name|>index.js<|end_file_name|><|fim▁begin|>var _ = require('../../util')
var handlers = {
text: require('./text'),
radio: require('./radio'),
select: require('./select'),
checkbox: require('./checkbox')
}
module.exports = {
priority: 800,
twoWay: true,
handlers: handlers,
/**
* Possible elements:
* <select>
* <textarea>
* <input type="*">
* - text
* - checkbox
* - radio
* - number
* - TODO: more types may be supplied as a plugin
*/
bind: function () {
// friendly warning...
this.checkFilters()
if (this.hasRead && !this.hasWrite) {
process.env.NODE_ENV !== 'production' && _.warn(
'It seems you are using a read-only filter with ' +
'v-model. You might want to use a two-way filter ' +
'to ensure correct behavior.'
)
}
var el = this.el
var tag = el.tagName
var handler
if (tag === 'INPUT') {
handler = handlers[el.type] || handlers.text
} else if (tag === 'SELECT') {
handler = handlers.select
} else if (tag === 'TEXTAREA') {
handler = handlers.text
} else {
process.env.NODE_ENV !== 'production' && _.warn(
'v-model does not support element type: ' + tag
)
return
}
handler.bind.call(this)
this.update = handler.update
this.unbind = handler.unbind
},
/**
* Check read/write filter stats.
*/
checkFilters: function () {
var filters = this.filters
if (!filters) return
var i = filters.length
while (i--) {<|fim▁hole|> if (filter.write) {
this.hasWrite = true
}
}
}
}<|fim▁end|> | var filter = _.resolveAsset(this.vm.$options, 'filters', filters[i].name)
if (typeof filter === 'function' || filter.read) {
this.hasRead = true
} |
<|file_name|>u8g_com_HAL_LPC1768_st7920_hw_spi.cpp<|end_file_name|><|fim▁begin|>/**
* Marlin 3D Printer Firmware
* Copyright (C) 2016, 2017 MarlinFirmware [https://github.com/MarlinFirmware/Marlin]
*
* Based on Sprinter and grbl.
* Copyright (C) 2011 Camiel Gubbels / Erik van der Zalm
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
/*
based on u8g_com_LPC1768_st7920_hw_spi.c
Universal 8bit Graphics Library
Copyright (c) 2011, [email protected]
All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this list
of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice, this
list of conditions and the following disclaimer in the documentation and/or other
materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#ifdef TARGET_LPC1768
// #include <inttypes.h>
// #include "src/core/macros.h"
// #include "Configuration.h"
#include <U8glib.h>
#define SPI_FULL_SPEED 0
#define SPI_HALF_SPEED 1
#define SPI_QUARTER_SPEED 2
#define SPI_EIGHTH_SPEED 3
#define SPI_SIXTEENTH_SPEED 4
#define SPI_SPEED_5 5
#define SPI_SPEED_6 6
void spiBegin();
void spiInit(uint8_t spiRate);
void spiSend(uint8_t b);
void spiSend(const uint8_t* buf, size_t n);
static uint8_t rs_last_state = 255;
static void u8g_com_LPC1768_st7920_write_byte_hw_spi(uint8_t rs, uint8_t val)
{
uint8_t i;
if ( rs != rs_last_state) { // time to send a command/data byte
rs_last_state = rs;
if ( rs == 0 )
/* command */
spiSend(0x0f8);
else
/* data */
spiSend(0x0fa);
for( i = 0; i < 4; i++ ) // give the controller some time to process the data<|fim▁hole|>
spiSend(val & 0x0f0);
spiSend(val << 4);
}
uint8_t u8g_com_HAL_LPC1768_ST7920_hw_spi_fn(u8g_t *u8g, uint8_t msg, uint8_t arg_val, void *arg_ptr)
{
switch(msg)
{
case U8G_COM_MSG_INIT:
u8g_SetPILevel(u8g, U8G_PI_CS, 0);
u8g_SetPIOutput(u8g, U8G_PI_CS);
u8g_Delay(5);
spiBegin();
spiInit(SPI_EIGHTH_SPEED); // ST7920 max speed is about 1.1 MHz
u8g->pin_list[U8G_PI_A0_STATE] = 0; /* inital RS state: command mode */
break;
case U8G_COM_MSG_STOP:
break;
case U8G_COM_MSG_RESET:
u8g_SetPILevel(u8g, U8G_PI_RESET, arg_val);
break;
case U8G_COM_MSG_ADDRESS: /* define cmd (arg_val = 0) or data mode (arg_val = 1) */
u8g->pin_list[U8G_PI_A0_STATE] = arg_val;
break;
case U8G_COM_MSG_CHIP_SELECT:
u8g_SetPILevel(u8g, U8G_PI_CS, arg_val); //note: the st7920 has an active high chip select
break;
case U8G_COM_MSG_WRITE_BYTE:
u8g_com_LPC1768_st7920_write_byte_hw_spi(u8g->pin_list[U8G_PI_A0_STATE], arg_val);
break;
case U8G_COM_MSG_WRITE_SEQ:
{
uint8_t *ptr = (uint8_t*) arg_ptr;
while( arg_val > 0 )
{
u8g_com_LPC1768_st7920_write_byte_hw_spi(u8g->pin_list[U8G_PI_A0_STATE], *ptr++);
arg_val--;
}
}
break;
case U8G_COM_MSG_WRITE_SEQ_P:
{
uint8_t *ptr = (uint8_t*) arg_ptr;
while( arg_val > 0 )
{
u8g_com_LPC1768_st7920_write_byte_hw_spi(u8g->pin_list[U8G_PI_A0_STATE], *ptr++);
arg_val--;
}
}
break;
}
return 1;
}
#endif // TARGET_LPC1768<|fim▁end|> | u8g_10MicroDelay(); // 2 is bad, 3 is OK, 4 is safe
} |
<|file_name|>sapi.rs<|end_file_name|><|fim▁begin|>// Licensed under the Apache License, Version 2.0
// <LICENSE-APACHE or http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your option.
// All files in the project carrying such notice may not be copied, modified, or distributed
// except according to those terms.
//! SAPI 5.4 definitions
use shared::guiddef::GUID;
use shared::minwindef::{BYTE, ULONG, WORD};
use um::unknwnbase::{IUnknown, IUnknownVtbl};
use um::winnt::{HRESULT, LPWSTR, ULONGLONG};
pub use um::sapi53::{
SPDATAKEYLOCATION,
SPDKL_DefaultLocation,
SPDKL_CurrentUser,
SPDKL_LocalMachine,
SPDKL_CurrentConfig,
SPDUI_EngineProperties,
SPDUI_AddRemoveWord,
SPDUI_UserTraining,
SPDUI_MicTraining,
SPDUI_RecoProfileProperties,
SPDUI_AudioProperties,
SPDUI_AudioVolume,
SPDUI_UserEnrollment,
SPDUI_ShareData,
SPDUI_Tutorial,
SPSTREAMFORMAT,
SPSF_Default,
SPSF_NoAssignedFormat,
SPSF_Text,
SPSF_NonStandardFormat,
SPSF_ExtendedAudioFormat,
SPSF_8kHz8BitMono,
SPSF_8kHz8BitStereo,
SPSF_8kHz16BitMono,
SPSF_8kHz16BitStereo,
SPSF_11kHz8BitMono,
SPSF_11kHz8BitStereo,
SPSF_11kHz16BitMono,
SPSF_11kHz16BitStereo,
SPSF_12kHz8BitMono,
SPSF_12kHz8BitStereo,
SPSF_12kHz16BitMono,
SPSF_12kHz16BitStereo,
SPSF_16kHz8BitMono,
SPSF_16kHz8BitStereo,
SPSF_16kHz16BitMono,
SPSF_16kHz16BitStereo,
SPSF_22kHz8BitMono,
SPSF_22kHz8BitStereo,
SPSF_22kHz16BitMono,
SPSF_22kHz16BitStereo,
SPSF_24kHz8BitMono,
SPSF_24kHz8BitStereo,
SPSF_24kHz16BitMono,
SPSF_24kHz16BitStereo,
SPSF_32kHz8BitMono,
SPSF_32kHz8BitStereo,
SPSF_32kHz16BitMono,
SPSF_32kHz16BitStereo,
SPSF_44kHz8BitMono,
SPSF_44kHz8BitStereo,
SPSF_44kHz16BitMono,
SPSF_44kHz16BitStereo,
SPSF_48kHz8BitMono,
SPSF_48kHz8BitStereo,
SPSF_48kHz16BitMono,
SPSF_48kHz16BitStereo,
SPSF_TrueSpeech_8kHz1BitMono,
SPSF_CCITT_ALaw_8kHzMono,
SPSF_CCITT_ALaw_8kHzStereo,
SPSF_CCITT_ALaw_11kHzMono,
SPSF_CCITT_ALaw_11kHzStereo,
SPSF_CCITT_ALaw_22kHzMono,
SPSF_CCITT_ALaw_22kHzStereo,
SPSF_CCITT_ALaw_44kHzMono,
SPSF_CCITT_ALaw_44kHzStereo,
SPSF_CCITT_uLaw_8kHzMono,
SPSF_CCITT_uLaw_8kHzStereo,
SPSF_CCITT_uLaw_11kHzMono,
SPSF_CCITT_uLaw_11kHzStereo,
SPSF_CCITT_uLaw_22kHzMono,
SPSF_CCITT_uLaw_22kHzStereo,
SPSF_CCITT_uLaw_44kHzMono,
SPSF_CCITT_uLaw_44kHzStereo,
SPSF_ADPCM_8kHzMono,
SPSF_ADPCM_8kHzStereo,
SPSF_ADPCM_11kHzMono,
SPSF_ADPCM_11kHzStereo,
SPSF_ADPCM_22kHzMono,
SPSF_ADPCM_22kHzStereo,
SPSF_ADPCM_44kHzMono,
SPSF_ADPCM_44kHzStereo,
SPSF_GSM610_8kHzMono,
SPSF_GSM610_11kHzMono,
SPSF_GSM610_22kHzMono,
SPSF_GSM610_44kHzMono,
SPSF_NUM_FORMATS,
SPDFID_Text,
SPDFID_WaveFormatEx,
SPREG_USER_ROOT,
SPREG_LOCAL_MACHINE_ROOT,
SPCAT_AUDIOOUT,
SPCAT_AUDIOIN,
SPCAT_VOICES,
SPCAT_RECOGNIZERS,
SPCAT_APPLEXICONS,
SPCAT_PHONECONVERTERS,
SPCAT_TEXTNORMALIZERS,
SPCAT_RECOPROFILES,
SPMMSYS_AUDIO_IN_TOKEN_ID,
SPMMSYS_AUDIO_OUT_TOKEN_ID,
SPCURRENT_USER_LEXICON_TOKEN_ID,
SPTOKENVALUE_CLSID,
SPTOKENKEY_FILES,
SPTOKENKEY_UI,
SPTOKENKEY_ATTRIBUTES,
SPTOKENKEY_RETAINEDAUDIO,
SPTOKENKEY_AUDIO_LATENCY_WARNING,
SPTOKENKEY_AUDIO_LATENCY_TRUNCATE,
SPTOKENKEY_AUDIO_LATENCY_UPDATE_INTERVAL,
SPVOICECATEGORY_TTSRATE,
SPPROP_RESOURCE_USAGE,
SPPROP_HIGH_CONFIDENCE_THRESHOLD,
SPPROP_NORMAL_CONFIDENCE_THRESHOLD,
SPPROP_LOW_CONFIDENCE_THRESHOLD,
SPPROP_RESPONSE_SPEED,
SPPROP_COMPLEX_RESPONSE_SPEED,
SPPROP_ADAPTATION_ON,
SPPROP_PERSISTED_BACKGROUND_ADAPTATION,
SPPROP_PERSISTED_LANGUAGE_MODEL_ADAPTATION,
SPPROP_UX_IS_LISTENING,
SPTOPIC_SPELLING,
SPWILDCARD,
SPDICTATION,
SPREG_SAFE_USER_TOKENS,
SPINFDICTATION,
SP_LOW_CONFIDENCE,
SP_NORMAL_CONFIDENCE,
SP_HIGH_CONFIDENCE,
DEFAULT_WEIGHT,
SP_MAX_WORD_LENGTH,
SP_MAX_PRON_LENGTH,
SP_EMULATE_RESULT,
ISpNotifyCallback,
SPNOTIFYCALLBACK,
ISpNotifySource, ISpNotifySourceVtbl,
ISpNotifySink, ISpNotifySinkVtbl,
ISpNotifyTranslator, ISpNotifyTranslatorVtbl,
ISpDataKey, ISpDataKeyVtbl,
ISpRegDataKey, ISpRegDataKeyVtbl,
ISpObjectTokenCategory, ISpObjectTokenCategoryVtbl,
ISpObjectToken, ISpObjectTokenVtbl,
ISpObjectTokenInit, ISpObjectTokenInitVtbl,
IEnumSpObjectTokens, IEnumSpObjectTokensVtbl,
ISpObjectWithToken, ISpObjectWithTokenVtbl,
ISpResourceManager, ISpResourceManagerVtbl,
SPEVENTLPARAMTYPE,
SPET_LPARAM_IS_UNDEFINED,
SPET_LPARAM_IS_TOKEN,
SPET_LPARAM_IS_OBJECT,
SPET_LPARAM_IS_POINTER,
SPET_LPARAM_IS_STRING,
SPEVENTENUM,
SPEI_UNDEFINED,
SPEI_START_INPUT_STREAM,
SPEI_END_INPUT_STREAM,
SPEI_VOICE_CHANGE,
SPEI_TTS_BOOKMARK,
SPEI_WORD_BOUNDARY,
SPEI_PHONEME,
SPEI_SENTENCE_BOUNDARY,
SPEI_VISEME,
SPEI_TTS_AUDIO_LEVEL,
SPEI_TTS_PRIVATE,
SPEI_MIN_TTS,
SPEI_MAX_TTS,
SPEI_END_SR_STREAM,
SPEI_SOUND_START,
SPEI_SOUND_END,
SPEI_PHRASE_START,
SPEI_RECOGNITION,
SPEI_HYPOTHESIS,
SPEI_SR_BOOKMARK,
SPEI_PROPERTY_NUM_CHANGE,
SPEI_PROPERTY_STRING_CHANGE,
SPEI_FALSE_RECOGNITION,
SPEI_INTERFERENCE,
SPEI_REQUEST_UI,
SPEI_RECO_STATE_CHANGE,
SPEI_ADAPTATION,
SPEI_START_SR_STREAM,
SPEI_RECO_OTHER_CONTEXT,
SPEI_SR_AUDIO_LEVEL,
SPEI_SR_RETAINEDAUDIO,
SPEI_SR_PRIVATE,
};
pub const ACTIVE_CATEGORY_CHANGED: SPEVENTENUM = 53;
pub use um::sapi53::{
SPEI_RESERVED5,
SPEI_RESERVED6,
SPEI_MIN_SR,
SPEI_MAX_SR,
SPEI_RESERVED1,
SPEI_RESERVED2,
SPEI_RESERVED3,
SPFEI_FLAGCHECK,
SPFEI_ALL_TTS_EVENTS,
SPFEI_ALL_SR_EVENTS,
SPFEI_ALL_EVENTS,
SPFEI,
SPEVENT,
SPSERIALIZEDEVENT,
SPSERIALIZEDEVENT64,
SPEVENTEX,
SPINTERFERENCE,
SPINTERFERENCE_NONE,
SPINTERFERENCE_NOISE,
SPINTERFERENCE_NOSIGNAL,
SPINTERFERENCE_TOOLOUD,
SPINTERFERENCE_TOOQUIET,
SPINTERFERENCE_TOOFAST,
SPINTERFERENCE_TOOSLOW,
SPINTERFERENCE_LATENCY_WARNING,
SPINTERFERENCE_LATENCY_TRUNCATE_BEGIN,
SPINTERFERENCE_LATENCY_TRUNCATE_END,
SPENDSRSTREAMFLAGS,
SPESF_NONE,
SPESF_STREAM_RELEASED,
SPESF_EMULATED,
SPVFEATURE,
SPVFEATURE_STRESSED,
SPVFEATURE_EMPHASIS,
SPVISEMES,
SP_VISEME_0,
SP_VISEME_1,
SP_VISEME_2,
SP_VISEME_3,
SP_VISEME_4,
SP_VISEME_5,
SP_VISEME_6,
SP_VISEME_7,
SP_VISEME_8,
SP_VISEME_9,
SP_VISEME_10,
SP_VISEME_11,
SP_VISEME_12,
SP_VISEME_13,
SP_VISEME_14,
SP_VISEME_15,
SP_VISEME_16,
SP_VISEME_17,
SP_VISEME_18,
SP_VISEME_19,
SP_VISEME_20,
SP_VISEME_21,
SPEVENTSOURCEINFO,
ISpEventSource, ISpEventSourceVtbl,
ISpEventSource2, ISpEventSource2Vtbl,
ISpEventSink, ISpEventSinkVtbl,
ISpStreamFormat, ISpStreamFormatVtbl,
SPFILEMODE,
SPFM_OPEN_READONLY,
SPFM_OPEN_READWRITE,
SPFM_CREATE,
SPFM_CREATE_ALWAYS,
SPFM_NUM_MODES,
ISpStream, ISpStreamVtbl,
ISpStreamFormatConverter, ISpStreamFormatConverterVtbl,
SPAUDIOSTATE,
SPAS_CLOSED,
SPAS_STOP,
SPAS_PAUSE,
SPAS_RUN,
SPAUDIOSTATUS,
SPAUDIOBUFFERINFO,
ISpAudio, ISpAudioVtbl,
ISpMMSysAudio, ISpMMSysAudioVtbl,
ISpTranscript, ISpTranscriptVtbl,
SPDISPLAYATTRIBUTES,
SPAF_ONE_TRAILING_SPACE,
SPAF_TWO_TRAILING_SPACES,
SPAF_CONSUME_LEADING_SPACES,
SPAF_BUFFER_POSITION,
SPAF_ALL,
SPAF_USER_SPECIFIED,
SPPHONEID,
PSPPHONEID,
PCSPPHONEID,
SPPHRASEELEMENT,
SPPHRASERULE,
SPPHRASEPROPERTYUNIONTYPE,
SPPPUT_UNUSED,
SPPPUT_ARRAY_INDEX,
SPPHRASEPROPERTY,
SPPHRASEREPLACEMENT,
SPSEMANTICERRORINFO,
SPSEMANTICFORMAT,
SPPHRASE_50,
// SPPHRASESIZE_500,
};
pub use um::sapi53::SPPHRASE as SPPHRASE_53;
STRUCT!{struct SPPHRASE {
cbSize: ULONG,
LangID: WORD,
wHomophoneGroupId: WORD,
ullGrammarID: ULONGLONG,
ftStartTime: ULONGLONG,
ullAudioStreamPosition: ULONGLONG,
ulAudioSizeBytes: ULONG,
ulRetainedSizeBytes: ULONG,
ulAudioSizeTime: ULONG,
Rule: SPPHRASERULE,
pProperties: *const SPPHRASEPROPERTY,
pElements: *const SPPHRASEELEMENT,
cReplacements: ULONG,
pReplacements: *const SPPHRASEREPLACEMENT,
SREngineID: GUID,
ulSREnginePrivateDataSize: ULONG,
pSREnginePrivateData: *const BYTE,
pSML: LPWSTR,
pSemanticErrorInfo: *mut SPSEMANTICERRORINFO,
SemanticTagFormat: SPSEMANTICFORMAT,
}}
pub use um::sapi53::{
SPSERIALIZEDPHRASE,
SPRULE,
SPVALUETYPE,
SPDF_PROPERTY,
SPDF_REPLACEMENT,
SPDF_RULE,
SPDF_DISPLAYTEXT,
SPDF_LEXICALFORM ,
SPDF_PRONUNCIATION,
SPDF_AUDIO,
SPDF_ALTERNATES,
SPDF_ALL,
SPBINARYGRAMMAR,
SPPHRASERNG,
SPPR_ALL_ELEMENTS,
SP_GETWHOLEPHRASE,
SPRR_ALL_ELEMENTS,
SPSTATEHANDLE,
SPRECOEVENTFLAGS,
SPREF_AutoPause,
SPREF_Emulated,
SPREF_SMLTimeout,
SPREF_ExtendableParse,
SPREF_ReSent,
SPREF_Hypothesis,
SPREF_FalseRecognition,
SPPARTOFSPEECH,
SPPS_NotOverriden,
SPPS_Unknown,
SPPS_Noun,
SPPS_Verb,
SPPS_Modifier,
SPPS_Function,
SPPS_Interjection,
SPPS_Noncontent,
SPPS_LMA,
SPPS_SuppressWord,
SPLEXICONTYPE,
eLEXTYPE_USER,
eLEXTYPE_APP,
eLEXTYPE_VENDORLEXICON,
eLEXTYPE_LETTERTOSOUND,
eLEXTYPE_MORPHOLOGY,
eLEXTYPE_RESERVED4,
eLEXTYPE_USER_SHORTCUT,
eLEXTYPE_RESERVED6,
eLEXTYPE_RESERVED7,
eLEXTYPE_RESERVED8,
eLEXTYPE_RESERVED9,
eLEXTYPE_RESERVED10,
eLEXTYPE_PRIVATE1,
eLEXTYPE_PRIVATE2,
eLEXTYPE_PRIVATE3,
eLEXTYPE_PRIVATE4,
eLEXTYPE_PRIVATE5,
eLEXTYPE_PRIVATE6,
eLEXTYPE_PRIVATE7,
eLEXTYPE_PRIVATE8,
eLEXTYPE_PRIVATE9,
eLEXTYPE_PRIVATE10,
eLEXTYPE_PRIVATE11,
eLEXTYPE_PRIVATE12,
eLEXTYPE_PRIVATE13,
eLEXTYPE_PRIVATE14,
eLEXTYPE_PRIVATE15,
eLEXTYPE_PRIVATE16,
eLEXTYPE_PRIVATE17,
eLEXTYPE_PRIVATE18,
eLEXTYPE_PRIVATE19,
eLEXTYPE_PRIVATE20,
SPWORDTYPE,
eWORDTYPE_ADDED,
eWORDTYPE_DELETED,
SPPRONUNCIATIONFLAGS,
ePRONFLAG_USED,
SPWORDPRONUNCIATION,
SPWORDPRONUNCIATIONLIST,
SPWORD,
SPWORDLIST,
ISpLexicon, ISpLexiconVtbl,
ISpContainerLexicon, ISpContainerLexiconVtbl,
SPSHORTCUTTYPE,
SPSHT_NotOverriden,
SPSHT_Unknown,
SPSHT_EMAIL,
SPSHT_OTHER,
SPPS_RESERVED1,
SPPS_RESERVED2,
SPPS_RESERVED3,
SPPS_RESERVED4,
SPSHORTCUTPAIR,
SPSHORTCUTPAIRLIST,
ISpShortcut, ISpShortcutVtbl,
ISpPhoneConverter, ISpPhoneConverterVtbl,
ISpPhoneticAlphabetConverter, ISpPhoneticAlphabetConverterVtbl,
ISpPhoneticAlphabetSelection, ISpPhoneticAlphabetSelectionVtbl,
SPVPITCH,
SPVACTIONS,
SPVA_Speak,
SPVA_Silence,
SPVA_Pronounce,
SPVA_Bookmark,
SPVA_SpellOut,
SPVA_Section,
SPVA_ParseUnknownTag,
SPVCONTEXT,
SPVSTATE,
SPRUNSTATE,
SPRS_DONE,
SPRS_IS_SPEAKING,
SPVLIMITS,
SPMIN_VOLUME,
SPMAX_VOLUME,
SPMIN_RATE,
SPMAX_RATE,
SPVPRIORITY,
SPVPRI_NORMAL,
SPVPRI_ALERT,
SPVPRI_OVER,
SPVOICESTATUS,
SPEAKFLAGS,
SPF_DEFAULT,
SPF_ASYNC,
SPF_PURGEBEFORESPEAK,
SPF_IS_FILENAME,
SPF_IS_XML,
SPF_IS_NOT_XML,
SPF_PERSIST_XML,
SPF_NLP_SPEAK_PUNC,
SPF_PARSE_SAPI,
SPF_PARSE_SSML,
SPF_PARSE_AUTODETECT,
SPF_NLP_MASK,
SPF_PARSE_MASK,
SPF_VOICE_MASK,
SPF_UNUSED_FLAGS,
ISpVoice, ISpVoiceVtbl,
ISpPhrase, ISpPhraseVtbl,
ISpPhraseAlt, ISpPhraseAltVtbl,
SPXMLRESULTOPTIONS,
SPXRO_SML,
SPXRO_Alternates_SML,
ISpPhrase2, ISpPhrase2Vtbl,
SPRECORESULTTIMES,
SPSERIALIZEDRESULT,
ISpRecoResult, ISpRecoResultVtbl,
SPCOMMITFLAGS,
SPCF_NONE,
SPCF_ADD_TO_USER_LEXICON,
SPCF_DEFINITE_CORRECTION,
ISpRecoResult2, ISpRecoResult2Vtbl,
ISpXMLRecoResult, ISpXMLRecoResultVtbl,
SPTEXTSELECTIONINFO,
SPWORDPRONOUNCEABLE,
SPWP_UNKNOWN_WORD_UNPRONOUNCEABLE,
SPWP_UNKNOWN_WORD_PRONOUNCEABLE,
SPWP_KNOWN_WORD_PRONOUNCEABLE,
SPGRAMMARSTATE,
SPGS_DISABLED,
SPGS_ENABLED,
SPGS_EXCLUSIVE,
SPCONTEXTSTATE,
SPCS_DISABLED,
SPCS_ENABLED,
SPRULESTATE,
SPRS_INACTIVE,
SPRS_ACTIVE,
SPRS_ACTIVE_WITH_AUTO_PAUSE,
SPWT_LEXICAL_NO_SPECIAL_CHARS,
SPPROPERTYINFO,
SPCFGRULEATTRIBUTES,
SPRAF_TopLevel,
SPRAF_Active,
SPRAF_Export,
SPRAF_Import,
SPRAF_Interpreter,
SPRAF_Dynamic,
SPRAF_Root,
SPRAF_AutoPause,
SPRAF_UserDelimited,
ISpGrammarBuilder, ISpGrammarBuilderVtbl,
SPLOADOPTIONS,
SPLO_STATIC,
SPLO_DYNAMIC,
ISpRecoGrammar, ISpRecoGrammarVtbl,
SPMATCHINGMODE,
AllWords,
Subsequence,
OrderedSubset,
SubsequenceContentRequired,
OrderedSubsetContentRequired,
PHONETICALPHABET,
PA_Ipa,
PA_Ups,
PA_Sapi,
ISpGrammarBuilder2, ISpGrammarBuilder2Vtbl,
SPRP_NORMAL,
ISpRecoGrammar2, ISpRecoGrammar2Vtbl,
ISpeechResourceLoader, ISpeechResourceLoaderVtbl,
SPRECOCONTEXTSTATUS,
SPBOOKMARKOPTIONS,
SPBO_NONE,
SPBO_PAUSE,
SPBO_AHEAD,
SPBO_TIME_UNITS,
SPAUDIOOPTIONS,
SPAO_NONE,
SPAO_RETAIN_AUDIO,
ISpRecoContext, ISpRecoContextVtbl,
SPGRAMMAROPTIONS,
SPGO_SAPI,
SPGO_SRGS,
SPGO_UPS,
SPGO_SRGS_MS_SCRIPT,
SPGO_SRGS_W3C_SCRIPT,
SPGO_SRGS_STG_SCRIPT,
SPGO_SRGS_SCRIPT,
SPGO_FILE,
SPGO_HTTP,
SPGO_RES,
SPGO_OBJECT,
SPGO_DEFAULT,
SPGO_ALL,
SPADAPTATIONSETTINGS,
SPADS_Default,
SPADS_CurrentRecognizer,
SPADS_RecoProfile,
SPADS_Immediate,
SPADS_Reset,
SPADS_HighVolumeDataSource,
SPADAPTATIONRELEVANCE,
SPAR_Unknown,
SPAR_Low,
SPAR_Medium,
SPAR_High,
ISpRecoContext2, ISpRecoContext2Vtbl,
ISpProperties, ISpPropertiesVtbl,
SP_MAX_LANGIDS,
SPRECOGNIZERSTATUS,
SPWAVEFORMATTYPE,
SPWF_INPUT,
SPWF_SRENGINE,
SPSTREAMFORMATTYPE,
SPRECOSTATE,
SPRST_INACTIVE,
SPRST_ACTIVE,
SPRST_ACTIVE_ALWAYS,
SPRST_INACTIVE_WITH_PURGE,
SPRST_NUM_STATES,
ISpRecognizer, ISpRecognizerVtbl,
ISpSerializeState, ISpSerializeStateVtbl,
ISpRecognizer2, ISpRecognizer2Vtbl,
};
ENUM!{enum SPCATEGORYTYPE {
SPCT_COMMAND,
SPCT_DICTATION,
SPCT_SLEEP,
SPCT_SUB_COMMAND,
SPCT_SUB_DICTATION,
}}
RIDL!{#[uuid(0xda0cd0f9, 0x14a2, 0x4f09, 0x8c, 0x2a, 0x85, 0xcc, 0x48, 0x97, 0x93, 0x45)]
interface ISpRecoCategory(ISpRecoCategoryVtbl): IUnknown(IUnknownVtbl) {
fn GetType(
peCategoryType: *mut SPCATEGORYTYPE,
) -> HRESULT,
}}
RIDL!{#[uuid(0xdf1b943c, 0x5838, 0x4aa2, 0x87, 0x06, 0xd7, 0xcd, 0x5b, 0x33, 0x34, 0x99)]
interface ISpRecognizer3(ISpRecognizer3Vtbl): IUnknown(IUnknownVtbl) {
fn GetCategory(
categoryType: SPCATEGORYTYPE,
ppCategory: *mut *mut ISpRecoCategory,
) -> HRESULT,
fn SetActiveCategory(
pCategory: *mut ISpRecoCategory,
) -> HRESULT,
fn GetActiveCategory(
ppCategory: *mut *mut ISpRecoCategory,
) -> HRESULT,
}}
pub use um::sapi53::{
SPNORMALIZATIONLIST,
ISpEnginePronunciation, ISpEnginePronunciationVtbl,
SPDISPLAYTOKEN,
SPDISPLAYPHRASE,
ISpDisplayAlternates, ISpDisplayAlternatesVtbl,
SpeechLanguageId,
DISPID_SpeechDataKey,
DISPID_SDKSetBinaryValue,
DISPID_SDKGetBinaryValue,
DISPID_SDKSetStringValue,
DISPID_SDKGetStringValue,
DISPID_SDKSetLongValue,
DISPID_SDKGetlongValue,
DISPID_SDKOpenKey,
DISPID_SDKCreateKey,
DISPID_SDKDeleteKey,
DISPID_SDKDeleteValue,
DISPID_SDKEnumKeys,
DISPID_SDKEnumValues,
DISPID_SpeechObjectToken,
DISPID_SOTId,
DISPID_SOTDataKey,
DISPID_SOTCategory,
DISPID_SOTGetDescription,
DISPID_SOTSetId,
DISPID_SOTGetAttribute,<|fim▁hole|> DISPID_SOTRemoveStorageFileName,
DISPID_SOTIsUISupported,
DISPID_SOTDisplayUI,
DISPID_SOTMatchesAttributes,
SpeechDataKeyLocation,
SDKLDefaultLocation,
SDKLCurrentUser,
SDKLLocalMachine,
SDKLCurrentConfig,
SpeechTokenContext,
STCInprocServer,
STCInprocHandler ,
STCLocalServer,
STCRemoteServer,
STCAll,
SpeechTokenShellFolder,
STSF_AppData,
STSF_LocalAppData,
STSF_CommonAppData,
STSF_FlagCreate,
DISPID_SpeechObjectTokens,
DISPID_SOTsCount,
DISPID_SOTsItem,
DISPID_SOTs_NewEnum,
DISPID_SpeechObjectTokenCategory,
DISPID_SOTCId,
DISPID_SOTCDefault,
DISPID_SOTCSetId,
DISPID_SOTCGetDataKey,
DISPID_SOTCEnumerateTokens,
SpeechAudioFormatType,
SAFTDefault,
SAFTNoAssignedFormat,
SAFTText,
SAFTNonStandardFormat,
SAFTExtendedAudioFormat,
SAFT8kHz8BitMono,
SAFT8kHz8BitStereo,
SAFT8kHz16BitMono,
SAFT8kHz16BitStereo,
SAFT11kHz8BitMono,
SAFT11kHz8BitStereo,
SAFT11kHz16BitMono,
SAFT11kHz16BitStereo,
SAFT12kHz8BitMono,
SAFT12kHz8BitStereo,
SAFT12kHz16BitMono,
SAFT12kHz16BitStereo,
SAFT16kHz8BitMono,
SAFT16kHz8BitStereo,
SAFT16kHz16BitMono,
SAFT16kHz16BitStereo,
SAFT22kHz8BitMono,
SAFT22kHz8BitStereo,
SAFT22kHz16BitMono,
SAFT22kHz16BitStereo,
SAFT24kHz8BitMono,
SAFT24kHz8BitStereo,
SAFT24kHz16BitMono,
SAFT24kHz16BitStereo,
SAFT32kHz8BitMono,
SAFT32kHz8BitStereo,
SAFT32kHz16BitMono,
SAFT32kHz16BitStereo,
SAFT44kHz8BitMono,
SAFT44kHz8BitStereo,
SAFT44kHz16BitMono,
SAFT44kHz16BitStereo,
SAFT48kHz8BitMono,
SAFT48kHz8BitStereo,
SAFT48kHz16BitMono,
SAFT48kHz16BitStereo,
SAFTTrueSpeech_8kHz1BitMono,
SAFTCCITT_ALaw_8kHzMono,
SAFTCCITT_ALaw_8kHzStereo,
SAFTCCITT_ALaw_11kHzMono,
SAFTCCITT_ALaw_11kHzStereo,
SAFTCCITT_ALaw_22kHzMono,
SAFTCCITT_ALaw_22kHzStereo,
SAFTCCITT_ALaw_44kHzMono,
SAFTCCITT_ALaw_44kHzStereo,
SAFTCCITT_uLaw_8kHzMono,
SAFTCCITT_uLaw_8kHzStereo,
SAFTCCITT_uLaw_11kHzMono,
SAFTCCITT_uLaw_11kHzStereo,
SAFTCCITT_uLaw_22kHzMono,
SAFTCCITT_uLaw_22kHzStereo,
SAFTCCITT_uLaw_44kHzMono,
SAFTCCITT_uLaw_44kHzStereo,
SAFTADPCM_8kHzMono,
SAFTADPCM_8kHzStereo,
SAFTADPCM_11kHzMono,
SAFTADPCM_11kHzStereo,
SAFTADPCM_22kHzMono,
SAFTADPCM_22kHzStereo,
SAFTADPCM_44kHzMono,
SAFTADPCM_44kHzStereo,
SAFTGSM610_8kHzMono,
SAFTGSM610_11kHzMono,
SAFTGSM610_22kHzMono,
SAFTGSM610_44kHzMono,
DISPID_SpeechAudioFormat,
DISPID_SAFType,
DISPID_SAFGuid,
DISPID_SAFGetWaveFormatEx,
DISPID_SAFSetWaveFormatEx,
DISPID_SpeechBaseStream,
DISPID_SBSFormat,
DISPID_SBSRead,
DISPID_SBSWrite,
DISPID_SBSSeek,
SpeechStreamSeekPositionType,
SSSPTRelativeToStart,
SSSPTRelativeToCurrentPosition,
SSSPTRelativeToEnd,
DISPID_SpeechAudio,
DISPID_SAStatus,
DISPID_SABufferInfo,
DISPID_SADefaultFormat,
DISPID_SAVolume,
DISPID_SABufferNotifySize,
DISPID_SAEventHandle,
DISPID_SASetState,
SpeechAudioState,
SASClosed,
SASStop,
SASPause,
SASRun,
DISPID_SpeechMMSysAudio,
DISPID_SMSADeviceId,
DISPID_SMSALineId,
DISPID_SMSAMMHandle,
DISPID_SpeechFileStream,
DISPID_SFSOpen,
DISPID_SFSClose,
SpeechStreamFileMode,
SSFMOpenForRead,
SSFMOpenReadWrite,
SSFMCreate,
SSFMCreateForWrite,
DISPID_SpeechCustomStream,
DISPID_SCSBaseStream,
DISPID_SpeechMemoryStream,
DISPID_SMSSetData,
DISPID_SMSGetData,
DISPID_SpeechAudioStatus,
DISPID_SASFreeBufferSpace,
DISPID_SASNonBlockingIO,
DISPID_SASState,
DISPID_SASCurrentSeekPosition,
DISPID_SASCurrentDevicePosition,
DISPID_SpeechAudioBufferInfo,
DISPID_SABIMinNotification,
DISPID_SABIBufferSize,
DISPID_SABIEventBias,
DISPID_SpeechWaveFormatEx,
DISPID_SWFEFormatTag,
DISPID_SWFEChannels,
DISPID_SWFESamplesPerSec,
DISPID_SWFEAvgBytesPerSec,
DISPID_SWFEBlockAlign,
DISPID_SWFEBitsPerSample,
DISPID_SWFEExtraData,
DISPID_SpeechVoice,
DISPID_SVStatus,
DISPID_SVVoice,
DISPID_SVAudioOutput,
DISPID_SVAudioOutputStream,
DISPID_SVRate,
DISPID_SVVolume,
DISPID_SVAllowAudioOuputFormatChangesOnNextSet,
DISPID_SVEventInterests,
DISPID_SVPriority,
DISPID_SVAlertBoundary,
DISPID_SVSyncronousSpeakTimeout,
DISPID_SVSpeak,
DISPID_SVSpeakStream,
DISPID_SVPause,
DISPID_SVResume,
DISPID_SVSkip,
DISPID_SVGetVoices,
DISPID_SVGetAudioOutputs,
DISPID_SVWaitUntilDone,
DISPID_SVSpeakCompleteEvent,
DISPID_SVIsUISupported,
DISPID_SVDisplayUI,
SpeechVoicePriority,
SVPNormal,
SVPAlert,
SVPOver,
SpeechVoiceSpeakFlags,
SVSFDefault,
SVSFlagsAsync,
SVSFPurgeBeforeSpeak,
SVSFIsFilename,
SVSFIsXML,
SVSFIsNotXML,
SVSFPersistXML,
SVSFNLPSpeakPunc,
SVSFParseSapi,
SVSFParseSsml,
SVSFParseAutodetect,
SVSFNLPMask,
SVSFParseMask,
SVSFVoiceMask,
SVSFUnusedFlags,
SpeechVoiceEvents,
SVEStartInputStream,
SVEEndInputStream,
SVEVoiceChange,
SVEBookmark,
SVEWordBoundary,
SVEPhoneme,
SVESentenceBoundary,
SVEViseme,
SVEAudioLevel,
SVEPrivate,
SVEAllEvents,
DISPID_SpeechVoiceStatus,
DISPID_SVSCurrentStreamNumber,
DISPID_SVSLastStreamNumberQueued,
DISPID_SVSLastResult,
DISPID_SVSRunningState,
DISPID_SVSInputWordPosition,
DISPID_SVSInputWordLength,
DISPID_SVSInputSentencePosition,
DISPID_SVSInputSentenceLength,
DISPID_SVSLastBookmark,
DISPID_SVSLastBookmarkId,
DISPID_SVSPhonemeId,
DISPID_SVSVisemeId,
SpeechRunState,
SRSEDone,
SRSEIsSpeaking,
SpeechVisemeType,
SVP_0,
SVP_1,
SVP_2,
SVP_3,
SVP_4,
SVP_5,
SVP_6,
SVP_7,
SVP_8,
SVP_9,
SVP_10,
SVP_11,
SVP_12,
SVP_13,
SVP_14,
SVP_15,
SVP_16,
SVP_17,
SVP_18,
SVP_19,
SVP_20,
SVP_21,
SpeechVisemeFeature,
SVF_None,
SVF_Stressed,
SVF_Emphasis,
DISPID_SpeechVoiceEvent,
DISPID_SVEStreamStart,
DISPID_SVEStreamEnd,
DISPID_SVEVoiceChange,
DISPID_SVEBookmark,
DISPID_SVEWord,
DISPID_SVEPhoneme,
DISPID_SVESentenceBoundary,
DISPID_SVEViseme,
DISPID_SVEAudioLevel,
DISPID_SVEEnginePrivate,
DISPID_SpeechRecognizer,
DISPID_SRRecognizer,
DISPID_SRAllowAudioInputFormatChangesOnNextSet,
DISPID_SRAudioInput,
DISPID_SRAudioInputStream,
DISPID_SRIsShared,
DISPID_SRState,
DISPID_SRStatus,
DISPID_SRProfile,
DISPID_SREmulateRecognition,
DISPID_SRCreateRecoContext,
DISPID_SRGetFormat,
DISPID_SRSetPropertyNumber,
DISPID_SRGetPropertyNumber,
DISPID_SRSetPropertyString,
DISPID_SRGetPropertyString,
DISPID_SRIsUISupported,
DISPID_SRDisplayUI,
DISPID_SRGetRecognizers,
DISPID_SVGetAudioInputs,
DISPID_SVGetProfiles,
SpeechRecognizerState,
SRSInactive,
SRSActive,
SRSActiveAlways,
SRSInactiveWithPurge,
SpeechDisplayAttributes,
SDA_No_Trailing_Space,
SDA_One_Trailing_Space,
SDA_Two_Trailing_Spaces,
SDA_Consume_Leading_Spaces,
SpeechFormatType,
SFTInput,
SFTSREngine,
SpeechEmulationCompareFlags,
SECFIgnoreCase,
SECFIgnoreKanaType,
SECFIgnoreWidth,
SECFNoSpecialChars,
SECFEmulateResult,
SECFDefault,
DISPID_SpeechRecognizerStatus,
DISPID_SRSAudioStatus,
DISPID_SRSCurrentStreamPosition,
DISPID_SRSCurrentStreamNumber,
DISPID_SRSNumberOfActiveRules,
DISPID_SRSClsidEngine,
DISPID_SRSSupportedLanguages,
DISPID_SpeechRecoContext,
DISPID_SRCRecognizer,
DISPID_SRCAudioInInterferenceStatus,
DISPID_SRCRequestedUIType,
DISPID_SRCVoice,
DISPID_SRAllowVoiceFormatMatchingOnNextSet,
DISPID_SRCVoicePurgeEvent,
DISPID_SRCEventInterests,
DISPID_SRCCmdMaxAlternates,
DISPID_SRCState,
DISPID_SRCRetainedAudio,
DISPID_SRCRetainedAudioFormat,
DISPID_SRCPause,
DISPID_SRCResume,
DISPID_SRCCreateGrammar,
DISPID_SRCCreateResultFromMemory,
DISPID_SRCBookmark,
DISPID_SRCSetAdaptationData,
SpeechRetainedAudioOptions,
SRAONone,
SRAORetainAudio,
SpeechBookmarkOptions,
SBONone,
SBOPause,
SpeechInterference,
SINone,
SINoise,
SINoSignal,
SITooLoud,
SITooQuiet,
SITooFast,
SITooSlow,
SpeechRecoEvents,
SREStreamEnd,
SRESoundStart,
SRESoundEnd,
SREPhraseStart,
SRERecognition,
SREHypothesis,
SREBookmark,
SREPropertyNumChange,
SREPropertyStringChange,
SREFalseRecognition,
SREInterference,
SRERequestUI,
SREStateChange,
SREAdaptation,
SREStreamStart,
SRERecoOtherContext,
SREAudioLevel,
SREPrivate,
SREAllEvents,
SpeechRecoContextState,
SRCS_Disabled,
SRCS_Enabled,
DISPIDSPRG,
DISPID_SRGId,
DISPID_SRGRecoContext,
DISPID_SRGState,
DISPID_SRGRules,
DISPID_SRGReset,
DISPID_SRGCommit,
DISPID_SRGCmdLoadFromFile,
DISPID_SRGCmdLoadFromObject,
DISPID_SRGCmdLoadFromResource,
DISPID_SRGCmdLoadFromMemory,
DISPID_SRGCmdLoadFromProprietaryGrammar,
DISPID_SRGCmdSetRuleState,
DISPID_SRGCmdSetRuleIdState,
DISPID_SRGDictationLoad,
DISPID_SRGDictationUnload,
DISPID_SRGDictationSetState,
DISPID_SRGSetWordSequenceData,
DISPID_SRGSetTextSelection,
DISPID_SRGIsPronounceable,
SpeechLoadOption,
SLOStatic,
SLODynamic,
SpeechWordPronounceable,
SWPUnknownWordUnpronounceable,
SWPUnknownWordPronounceable,
SWPKnownWordPronounceable,
SpeechGrammarState,
SGSEnabled,
SGSDisabled,
SGSExclusive,
SpeechRuleState,
SGDSInactive,
SGDSActive,
SGDSActiveWithAutoPause,
SGDSActiveUserDelimited,
SpeechRuleAttributes,
SRATopLevel,
SRADefaultToActive,
SRAExport,
SRAImport,
SRAInterpreter,
SRADynamic,
SRARoot,
SpeechGrammarWordType,
SGDisplay,
SGLexical,
SGPronounciation,
SGLexicalNoSpecialChars,
DISPID_SpeechRecoContextEvents,
DISPID_SRCEStartStream,
DISPID_SRCEEndStream,
DISPID_SRCEBookmark,
DISPID_SRCESoundStart,
DISPID_SRCESoundEnd,
DISPID_SRCEPhraseStart,
DISPID_SRCERecognition,
DISPID_SRCEHypothesis,
DISPID_SRCEPropertyNumberChange,
DISPID_SRCEPropertyStringChange,
DISPID_SRCEFalseRecognition,
DISPID_SRCEInterference,
DISPID_SRCERequestUI,
DISPID_SRCERecognizerStateChange,
DISPID_SRCEAdaptation,
DISPID_SRCERecognitionForOtherContext,
DISPID_SRCEAudioLevel,
DISPID_SRCEEnginePrivate,
SpeechRecognitionType,
SRTStandard,
SRTAutopause,
SRTEmulated,
SRTSMLTimeout,
SRTExtendableParse,
SRTReSent,
DISPID_SpeechGrammarRule,
DISPID_SGRAttributes,
DISPID_SGRInitialState,
DISPID_SGRName,
DISPID_SGRId,
DISPID_SGRClear,
DISPID_SGRAddResource,
DISPID_SGRAddState,
DISPID_SpeechGrammarRules,
DISPID_SGRsCount,
DISPID_SGRsDynamic,
DISPID_SGRsAdd,
DISPID_SGRsCommit,
DISPID_SGRsCommitAndSave,
DISPID_SGRsFindRule,
DISPID_SGRsItem,
DISPID_SGRs_NewEnum,
DISPID_SpeechGrammarRuleState,
DISPID_SGRSRule,
DISPID_SGRSTransitions,
DISPID_SGRSAddWordTransition,
DISPID_SGRSAddRuleTransition,
DISPID_SGRSAddSpecialTransition,
SpeechSpecialTransitionType,
SSTTWildcard,
SSTTDictation,
SSTTTextBuffer,
DISPID_SpeechGrammarRuleStateTransitions,
DISPID_SGRSTsCount,
DISPID_SGRSTsItem,
DISPID_SGRSTs_NewEnum,
DISPID_SpeechGrammarRuleStateTransition,
DISPID_SGRSTType,
DISPID_SGRSTText,
DISPID_SGRSTRule,
DISPID_SGRSTWeight,
DISPID_SGRSTPropertyName,
DISPID_SGRSTPropertyId,
DISPID_SGRSTPropertyValue,
DISPID_SGRSTNextState,
SpeechGrammarRuleStateTransitionType,
SGRSTTEpsilon,
SGRSTTWord,
SGRSTTRule,
SGRSTTDictation,
SGRSTTWildcard,
SGRSTTTextBuffer,
DISPIDSPTSI,
DISPIDSPTSI_ActiveOffset,
DISPIDSPTSI_ActiveLength,
DISPIDSPTSI_SelectionOffset,
DISPIDSPTSI_SelectionLength,
DISPID_SpeechRecoResult,
DISPID_SRRRecoContext,
DISPID_SRRTimes,
DISPID_SRRAudioFormat,
DISPID_SRRPhraseInfo,
DISPID_SRRAlternates,
DISPID_SRRAudio,
DISPID_SRRSpeakAudio,
DISPID_SRRSaveToMemory,
DISPID_SRRDiscardResultInfo,
SpeechDiscardType,
SDTProperty,
SDTReplacement,
SDTRule,
SDTDisplayText,
SDTLexicalForm,
SDTPronunciation,
SDTAudio,
SDTAlternates,
SDTAll,
DISPID_SpeechXMLRecoResult,
DISPID_SRRGetXMLResult,
DISPID_SRRGetXMLErrorInfo,
DISPID_SpeechRecoResult2,
DISPID_SRRSetTextFeedback,
DISPID_SpeechPhraseBuilder,
DISPID_SPPBRestorePhraseFromMemory,
DISPID_SpeechRecoResultTimes,
DISPID_SRRTStreamTime,
DISPID_SRRTLength,
DISPID_SRRTTickCount,
DISPID_SRRTOffsetFromStart,
DISPID_SpeechPhraseAlternate,
DISPID_SPARecoResult,
DISPID_SPAStartElementInResult,
DISPID_SPANumberOfElementsInResult,
DISPID_SPAPhraseInfo,
DISPID_SPACommit,
DISPID_SpeechPhraseAlternates,
DISPID_SPAsCount,
DISPID_SPAsItem,
DISPID_SPAs_NewEnum,
DISPID_SpeechPhraseInfo,
DISPID_SPILanguageId,
DISPID_SPIGrammarId,
DISPID_SPIStartTime,
DISPID_SPIAudioStreamPosition,
DISPID_SPIAudioSizeBytes,
DISPID_SPIRetainedSizeBytes,
DISPID_SPIAudioSizeTime,
DISPID_SPIRule,
DISPID_SPIProperties,
DISPID_SPIElements,
DISPID_SPIReplacements,
DISPID_SPIEngineId,
DISPID_SPIEnginePrivateData,
DISPID_SPISaveToMemory,
DISPID_SPIGetText,
DISPID_SPIGetDisplayAttributes,
DISPID_SpeechPhraseElement,
DISPID_SPEAudioTimeOffset,
DISPID_SPEAudioSizeTime,
DISPID_SPEAudioStreamOffset,
DISPID_SPEAudioSizeBytes,
DISPID_SPERetainedStreamOffset,
DISPID_SPERetainedSizeBytes,
DISPID_SPEDisplayText,
DISPID_SPELexicalForm,
DISPID_SPEPronunciation,
DISPID_SPEDisplayAttributes,
DISPID_SPERequiredConfidence,
DISPID_SPEActualConfidence,
DISPID_SPEEngineConfidence,
SpeechEngineConfidence,
SECLowConfidence,
SECNormalConfidence,
SECHighConfidence,
DISPID_SpeechPhraseElements,
DISPID_SPEsCount,
DISPID_SPEsItem,
DISPID_SPEs_NewEnum,
DISPID_SpeechPhraseReplacement,
DISPID_SPRDisplayAttributes,
DISPID_SPRText,
DISPID_SPRFirstElement,
DISPID_SPRNumberOfElements,
DISPID_SpeechPhraseReplacements,
DISPID_SPRsCount,
DISPID_SPRsItem,
DISPID_SPRs_NewEnum,
DISPID_SpeechPhraseProperty,
DISPID_SPPName,
DISPID_SPPId,
DISPID_SPPValue,
DISPID_SPPFirstElement,
DISPID_SPPNumberOfElements,
DISPID_SPPEngineConfidence,
DISPID_SPPConfidence,
DISPID_SPPParent,
DISPID_SPPChildren,
DISPID_SpeechPhraseProperties,
DISPID_SPPsCount,
DISPID_SPPsItem,
DISPID_SPPs_NewEnum,
DISPID_SpeechPhraseRule,
DISPID_SPRuleName,
DISPID_SPRuleId,
DISPID_SPRuleFirstElement,
DISPID_SPRuleNumberOfElements,
DISPID_SPRuleParent,
DISPID_SPRuleChildren,
DISPID_SPRuleConfidence,
DISPID_SPRuleEngineConfidence,
DISPID_SpeechPhraseRules,
DISPID_SPRulesCount,
DISPID_SPRulesItem,
DISPID_SPRules_NewEnum,
DISPID_SpeechLexicon,
DISPID_SLGenerationId,
DISPID_SLGetWords,
DISPID_SLAddPronunciation,
DISPID_SLAddPronunciationByPhoneIds,
DISPID_SLRemovePronunciation,
DISPID_SLRemovePronunciationByPhoneIds,
DISPID_SLGetPronunciations,
DISPID_SLGetGenerationChange,
SpeechLexiconType,
SLTUser,
SLTApp,
SpeechPartOfSpeech,
SPSNotOverriden,
SPSUnknown,
SPSNoun,
SPSVerb,
SPSModifier,
SPSFunction,
SPSInterjection,
SPSLMA,
SPSSuppressWord,
DISPID_SpeechLexiconWords,
DISPID_SLWsCount,
DISPID_SLWsItem,
DISPID_SLWs_NewEnum,
SpeechWordType,
SWTAdded,
SWTDeleted,
DISPID_SpeechLexiconWord,
DISPID_SLWLangId,
DISPID_SLWType,
DISPID_SLWWord,
DISPID_SLWPronunciations,
DISPID_SpeechLexiconProns,
DISPID_SLPsCount,
DISPID_SLPsItem,
DISPID_SLPs_NewEnum,
DISPID_SpeechLexiconPronunciation,
DISPID_SLPType,
DISPID_SLPLangId,
DISPID_SLPPartOfSpeech,
DISPID_SLPPhoneIds,
DISPID_SLPSymbolic,
DISPID_SpeechPhoneConverter,
DISPID_SPCLangId,
DISPID_SPCPhoneToId,
DISPID_SPCIdToPhone,
LIBID_SpeechLib,
ISpeechDataKey, ISpeechDataKeyVtbl,
ISpeechObjectToken, ISpeechObjectTokenVtbl,
ISpeechObjectTokens, ISpeechObjectTokensVtbl,
ISpeechObjectTokenCategory, ISpeechObjectTokenCategoryVtbl,
ISpeechAudioBufferInfo, ISpeechAudioBufferInfoVtbl,
ISpeechAudioStatus, ISpeechAudioStatusVtbl,
ISpeechAudioFormat, ISpeechAudioFormatVtbl,
ISpeechWaveFormatEx, ISpeechWaveFormatExVtbl,
ISpeechBaseStream, ISpeechBaseStreamVtbl,
ISpeechFileStream, ISpeechFileStreamVtbl,
ISpeechMemoryStream, ISpeechMemoryStreamVtbl,
ISpeechCustomStream, ISpeechCustomStreamVtbl,
ISpeechAudio, ISpeechAudioVtbl,
ISpeechMMSysAudio, ISpeechMMSysAudioVtbl,
ISpeechVoice, ISpeechVoiceVtbl,
ISpeechVoiceStatus, ISpeechVoiceStatusVtbl,
_ISpeechVoiceEvents, _ISpeechVoiceEventsVtbl,
ISpeechRecognizer, ISpeechRecognizerVtbl,
ISpeechRecognizerStatus, ISpeechRecognizerStatusVtbl,
ISpeechRecoContext, ISpeechRecoContextVtbl,
ISpeechRecoGrammar, ISpeechRecoGrammarVtbl,
_ISpeechRecoContextEvents, _ISpeechRecoContextEventsVtbl,
ISpeechGrammarRule, ISpeechGrammarRuleVtbl,
ISpeechGrammarRules, ISpeechGrammarRulesVtbl,
ISpeechGrammarRuleState, ISpeechGrammarRuleStateVtbl,
ISpeechGrammarRuleStateTransition, ISpeechGrammarRuleStateTransitionVtbl,
ISpeechGrammarRuleStateTransitions, ISpeechGrammarRuleStateTransitionsVtbl,
ISpeechTextSelectionInformation, ISpeechTextSelectionInformationVtbl,
ISpeechRecoResult, ISpeechRecoResultVtbl,
ISpeechRecoResult2, ISpeechRecoResult2Vtbl,
ISpeechRecoResultTimes, ISpeechRecoResultTimesVtbl,
ISpeechPhraseAlternate, ISpeechPhraseAlternateVtbl,
ISpeechPhraseAlternates, ISpeechPhraseAlternatesVtbl,
ISpeechPhraseInfo, ISpeechPhraseInfoVtbl,
ISpeechPhraseElement, ISpeechPhraseElementVtbl,
ISpeechPhraseElements, ISpeechPhraseElementsVtbl,
ISpeechPhraseReplacement, ISpeechPhraseReplacementVtbl,
ISpeechPhraseReplacements, ISpeechPhraseReplacementsVtbl,
ISpeechPhraseProperty, ISpeechPhrasePropertyVtbl,
ISpeechPhraseProperties, ISpeechPhrasePropertiesVtbl,
ISpeechPhraseRule, ISpeechPhraseRuleVtbl,
ISpeechPhraseRules, ISpeechPhraseRulesVtbl,
ISpeechLexicon, ISpeechLexiconVtbl,
ISpeechLexiconWords, ISpeechLexiconWordsVtbl,
ISpeechLexiconWord, ISpeechLexiconWordVtbl,
ISpeechLexiconPronunciations, ISpeechLexiconPronunciationsVtbl,
ISpeechLexiconPronunciation, ISpeechLexiconPronunciationVtbl,
Speech_Default_Weight,
Speech_Max_Word_Length,
Speech_Max_Pron_Length,
Speech_StreamPos_Asap,
Speech_StreamPos_RealTime,
SpeechAllElements,
ISpeechXMLRecoResult, ISpeechXMLRecoResultVtbl,
ISpeechRecoResultDispatch, ISpeechRecoResultDispatchVtbl,
ISpeechPhraseInfoBuilder, ISpeechPhraseInfoBuilderVtbl,
ISpeechPhoneConverter, ISpeechPhoneConverterVtbl,
CLSID_SpNotifyTranslator,
CLSID_SpObjectTokenCategory,
CLSID_SpObjectToken,
CLSID_SpResourceManager,
CLSID_SpStreamFormatConverter,
CLSID_SpMMAudioEnum,
CLSID_SpMMAudioIn,
CLSID_SpMMAudioOut,
CLSID_SpStream,
CLSID_SpVoice,
CLSID_SpSharedRecoContext,
CLSID_SpInprocRecognizer,
CLSID_SpSharedRecognizer,
CLSID_SpLexicon,
CLSID_SpUnCompressedLexicon,
CLSID_SpCompressedLexicon,
CLSID_SpShortcut,
CLSID_SpPhoneConverter,
CLSID_SpPhoneticAlphabetConverter,
CLSID_SpNullPhoneConverter,
CLSID_SpTextSelectionInformation,
CLSID_SpPhraseInfoBuilder,
CLSID_SpAudioFormat,
CLSID_SpWaveFormatEx,
CLSID_SpInProcRecoContext,
CLSID_SpCustomStream,
CLSID_SpFileStream,
CLSID_SpMemoryStream,
};<|fim▁end|> | DISPID_SOTCreateInstance,
DISPID_SOTRemove,
DISPID_SOTGetStorageFileName, |
<|file_name|>HelloControllerTest.java<|end_file_name|><|fim▁begin|>package hello;
import static org.hamcrest.Matchers.equalTo;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;<|fim▁hole|>import org.springframework.boot.test.autoconfigure.web.servlet.AutoConfigureMockMvc;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.http.MediaType;
import org.springframework.test.context.junit4.SpringRunner;
import org.springframework.test.web.servlet.MockMvc;
import org.springframework.test.web.servlet.request.MockMvcRequestBuilders;
/**
* HelloControllerTest
*/
@RunWith(SpringRunner.class)
@SpringBootTest
@AutoConfigureMockMvc
public class HelloControllerTest {
@Autowired
private MockMvc mvc;
@Test
public void testGetHello() throws Exception{
mvc.perform(MockMvcRequestBuilders.get("/").accept(MediaType.APPLICATION_JSON))
.andExpect(status().isOk())
.andExpect(content().string(equalTo("Greetings from Spring Boot!")));
}
}<|fim▁end|> | |
<|file_name|>hello.rs<|end_file_name|><|fim▁begin|>// compile with `rustc hello.rs`
// run with `./hello`
fn greeter(greeting_text: &str) {
println!("Hello {}!", greeting_text);
}
<|fim▁hole|> greeter("World");
}<|fim▁end|> | fn main() { |
<|file_name|>bundles.js<|end_file_name|><|fim▁begin|>$(document).ready(function() {
SVGUpInstance.init('inforamaui',
{"icons": {
"logo":{"url":"images/inforama-icon.svg"},
"downarrow":{"url":"images/down-arrow.svg"},
"usericon":{"url":"images/user-icon.svg"}
},
"classes":{
"mainstyle":{
"svgdefault":{"fillcolor":"#AA8833"},
"svghover":{"fillcolor":"#8CC63E"},
"cssdefault":{"opacity":"0.3", "width":"40px", "height":"40px", "transition":"all 0.5s"},<|fim▁hole|> );
});<|fim▁end|> | "csshover":{"opacity":"1", "width":"50px", "height":"50px"}
}
}}
|
<|file_name|>server.go<|end_file_name|><|fim▁begin|>package auth
import (
"fmt"
"io/ioutil"
"net/http"
"strings"
"golang.org/x/oauth2"
"golang.org/x/oauth2/github"
"github.com/gorilla/mux"
"github.com/gorilla/schema"
"github.com/gorilla/sessions"
"github.com/wolfeidau/proxy-auth/assets"
)
const (
// PathPrefix the path used by this authentication module
PathPrefix = "/auth"
// LoginURL is the URL which users are redirected to if they aren't logged in
LoginURL = "/auth/login"
)
// Server the authentication server
type Server struct {
mux *mux.Router
sessionStore sessions.Store
oauthConfig *oauth2.Config
}
// NewServer creates a server with the standard endpionts registered
func NewServer(sessionStore sessions.Store) *Server {
conf := &oauth2.Config{
ClientID: defaultGitHubConfig.ClientID,
ClientSecret: defaultGitHubConfig.ClientSecret,
Scopes: []string{"user:email"},
Endpoint: github.Endpoint,
}
r := mux.NewRouter().PathPrefix("/auth").Subrouter()
s := &Server{mux: r, sessionStore: sessionStore, oauthConfig: conf}
r.HandleFunc("/github/authorize", s.loginGitHubAuthorise).Methods("GET")
r.HandleFunc("/login", s.loginHandler).Methods("GET")
r.HandleFunc("/logout", s.logoutHandler).Methods("GET")
r.HandleFunc("/github/redirect", s.redirectGitHubHandler).Methods("GET")
r.HandleFunc("/{asset}", s.assetHandler).Methods("GET")
return s
}
// GetMux returns the mux with the standard http handlers already registered
func (s *Server) GetMux() *mux.Router {
return s.mux
}
// CheckSession middleware function to validate the session cookie is set
func CheckSession(handler http.Handler, store sessions.Store) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
session, _ := store.Get(r, "auth")
<|fim▁hole|> if !strings.HasPrefix(r.URL.String(), "/auth/") && session.Values["email"] == nil {
http.Redirect(w, r, LoginURL, http.StatusFound)
return
}
handler.ServeHTTP(w, r)
})
}
func (s *Server) loginGitHubAuthorise(w http.ResponseWriter, r *http.Request) {
fmt.Println("loginGitHubAuthorise")
session, _ := s.sessionStore.Get(r, "auth")
state, _ := generateState()
// assign the state variable in the session
session.Values["state"] = state
session.Save(r, w)
url := s.oauthConfig.AuthCodeURL(state, oauth2.AccessTypeOnline)
http.Redirect(w, r, url, http.StatusFound)
}
func (s *Server) loginHandler(w http.ResponseWriter, r *http.Request) {
fmt.Println("loginHandler")
//session, _ := s.sessionStore.Get(r, "auth")
buf, err := assets.Asset("index.html")
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
w.Write(buf)
}
func (s *Server) logoutHandler(w http.ResponseWriter, r *http.Request) {
fmt.Println("logoutHandler")
session, _ := s.sessionStore.Get(r, "auth")
delete(session.Values, "name")
delete(session.Values, "email")
session.Save(r, w)
http.Redirect(w, r, LoginURL, http.StatusFound)
}
type redirectOptions struct {
Code string `schema:"code"`
State string `schema:"state"`
}
func (s *Server) redirectGitHubHandler(w http.ResponseWriter, r *http.Request) {
fmt.Println("redirectGitHubHandler")
session, _ := s.sessionStore.Get(r, "auth")
state := session.Values["state"]
decoder := schema.NewDecoder()
reqOpts := new(redirectOptions)
decoder.Decode(reqOpts, r.URL.Query())
// check state
if state != reqOpts.State {
//fmt.Printf("woops got %s expected %s\n", state, reqOpts.State)
err := fmt.Errorf("state missing")
http.Error(w, err.Error(), http.StatusInternalServerError)
return
} else {
delete(session.Values, "state")
}
tok, err := s.oauthConfig.Exchange(oauth2.NoContext, reqOpts.Code)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
client := s.oauthConfig.Client(oauth2.NoContext, tok)
resp, err := client.Get("https://api.github.com/user")
defer resp.Body.Close()
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
user, err := DecodeGitHubUser(body)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
session.Values["name"] = user.Name
session.Values["email"] = user.Email
session.Save(r, w)
http.Redirect(w, r, "/", http.StatusFound)
}
func (s *Server) assetHandler(w http.ResponseWriter, r *http.Request) {
assetName := mux.Vars(r)["asset"]
fmt.Printf("url=%s asset=%s\n", r.URL.String(), assetName)
buf, err := assets.Asset(assetName)
if err != nil {
http.Error(w, err.Error(), http.StatusNotFound)
return
}
w.Write(buf)
}<|fim▁end|> | fmt.Printf("CheckSession email=%v url=%s\n", session.Values["email"], r.URL.String())
|
<|file_name|>config_dump.rs<|end_file_name|><|fim▁begin|>/*
* Copyright 2021 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
use crate::cluster::cluster_manager::SharedClusterManager;
use crate::filters::manager::SharedFilterManager;
use crate::endpoint::Endpoint;
use hyper::http::HeaderValue;
use hyper::{Body, Response, StatusCode};
use serde::Serialize;
use std::sync::Arc;
#[derive(Debug, Serialize)]
struct ClusterDump {
name: &'static str,
endpoints: Vec<Endpoint>,
}
#[derive(Debug, Serialize)]
struct ConfigDump {
clusters: Vec<ClusterDump>,
filterchain: FilterChainDump,
}
#[derive(Debug, Serialize)]
struct FilterConfigDump {
name: String,
config: Arc<serde_json::Value>,
}
#[derive(Debug, Serialize)]
struct FilterChainDump {
filters: Vec<FilterConfigDump>,
}
pub(crate) fn handle_request(
cluster_manager: SharedClusterManager,
filter_manager: SharedFilterManager,
) -> Response<Body> {
let mut response = Response::new(Body::empty());
match create_config_dump_json(cluster_manager, filter_manager) {
Ok(body) => {
*response.status_mut() = StatusCode::OK;
response
.headers_mut()
.insert("Content-Type", HeaderValue::from_static("application/json"));
*response.body_mut() = Body::from(body);
}
Err(err) => {
*response.status_mut() = StatusCode::INTERNAL_SERVER_ERROR;
*response.body_mut() = Body::from(format!("failed to create config dump: {err}"));
}
}
response
}
fn create_config_dump_json(
cluster_manager: SharedClusterManager,
filter_manager: SharedFilterManager,
) -> Result<String, serde_json::Error> {
let endpoints = {
let cluster_manager = cluster_manager.read();
// Clone the list of endpoints immediately so that we don't hold on
// to the cluster manager's lock while serializing.
cluster_manager
.get_all_endpoints()
.map(|upstream_endpoints| upstream_endpoints.iter().cloned().collect::<Vec<_>>())
.unwrap_or_default()
};
let filters = {
let filter_manager = filter_manager.read();
// Clone the list of filter configs immediately so that we don't hold on
// to the filter manager's lock while serializing.
filter_manager
.get_filter_chain()
.get_configs()
.map(|(name, config)| FilterConfigDump {
name: name.into(),
config,
})
.collect::<Vec<_>>()
};
let dump = ConfigDump {
clusters: vec![ClusterDump {
name: "default-quilkin-cluster",
endpoints,
}],
filterchain: FilterChainDump { filters },
};
serde_json::to_string_pretty(&dump)
}
#[cfg(test)]
mod tests {
use super::handle_request;
use crate::cluster::cluster_manager::ClusterManager;
use crate::endpoint::{Endpoint, Endpoints};
use crate::filters::{manager::FilterManager, CreateFilterArgs, FilterChain};
use std::sync::Arc;
#[tokio::test]
async fn test_handle_request() {
let cluster_manager = ClusterManager::fixed(
Endpoints::new(vec![Endpoint::new(([127, 0, 0, 1], 8080).into())]).unwrap(),
)
.unwrap();
let debug_config = serde_yaml::from_str("id: hello").unwrap();
let debug_factory = crate::filters::debug::factory();
let debug_filter = debug_factory
.create_filter(CreateFilterArgs::fixed(Some(debug_config)))
.unwrap();
let filter_manager = FilterManager::fixed(Arc::new(
FilterChain::new(vec![(debug_factory.name().into(), debug_filter)]).unwrap(),
));
let mut response = handle_request(cluster_manager, filter_manager);
assert_eq!(response.status(), hyper::StatusCode::OK);
assert_eq!(
response.headers().get("Content-Type").unwrap(),
"application/json"
);
let body = hyper::body::to_bytes(response.body_mut()).await.unwrap();
let body = String::from_utf8(body.into_iter().collect()).unwrap();
let expected = serde_json::json!({
"clusters": [{
"name": "default-quilkin-cluster",
"endpoints": [{
"address": {
"host": "127.0.0.1",
"port": 8080,
},
"metadata": {<|fim▁hole|> "quilkin.dev": {
"tokens": []
}
}
}]
}],
"filterchain": {
"filters": [{
"name": "quilkin.filters.debug.v1alpha1.Debug",
"config":{
"id": "hello"
}
}]
}
});
assert_eq!(
expected,
serde_json::from_str::<serde_json::Value>(body.as_str()).unwrap()
);
}
}<|fim▁end|> | |
<|file_name|>adder.spec.ts<|end_file_name|><|fim▁begin|>import Adder from "./adder";
describe("test adder", () => {
it("should add", () => {<|fim▁hole|>});<|fim▁end|> | let adder = new Adder(1);
expect(adder.add(1)).toBe(2);
}); |
<|file_name|>plugin.py<|end_file_name|><|fim▁begin|>from __future__ import absolute_import
from Plugins.Plugin import PluginDescriptor
from Components.PluginComponent import plugins
from enigma import eDBoxLCD
from .qpip import QuadPipScreen, setDecoderMode
def main(session, **kwargs):
session.open(QuadPipScreen)
def autoStart(reason, **kwargs):
if reason == 0:
setDecoderMode("normal")
elif reason == 1:
pass
def Plugins(**kwargs):
list = []
list.append(
PluginDescriptor(name=_("Enable Quad PIP"),
description="Quad Picture in Picture",
where=[PluginDescriptor.WHERE_EXTENSIONSMENU],
fnc=main))
list.append(<|fim▁hole|>
return list<|fim▁end|> | PluginDescriptor(
where=[PluginDescriptor.WHERE_AUTOSTART],
fnc=autoStart)) |
<|file_name|>celery.py<|end_file_name|><|fim▁begin|>{% if cookiecutter.use_celery == 'y' %}
import os
from celery import Celery
from django.apps import apps, AppConfig
from django.conf import settings
if not settings.configured:
# set the default Django settings module for the 'celery' program.
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'config.settings.local') # pragma: no cover
app = Celery('{{cookiecutter.project_slug}}')
class CeleryConfig(AppConfig):
name = '{{cookiecutter.project_slug}}.taskapp'
verbose_name = 'Celery Config'
def ready(self):
# Using a string here means the worker will not have to
# pickle the object when using Windows.
app.config_from_object('django.conf:settings')
installed_apps = [app_config.name for app_config in apps.get_app_configs()]
app.autodiscover_tasks(lambda: installed_apps, force=True)
{% if cookiecutter.use_sentry_for_error_reporting == 'y' -%}
if hasattr(settings, 'RAVEN_CONFIG'):
# Celery signal registration
from raven import Client as RavenClient
from raven.contrib.celery import register_signal as raven_register_signal
from raven.contrib.celery import register_logger_signal as raven_register_logger_signal
raven_client = RavenClient(dsn=settings.RAVEN_CONFIG['DSN'])
raven_register_logger_signal(raven_client)
raven_register_signal(raven_client)
{%- endif %}
<|fim▁hole|> print('Request: {0!r}'.format(self.request)) # pragma: no cover
{% else %}
# Use this as a starting point for your project with celery.
# If you are not using celery, you can remove this app
{% endif -%}<|fim▁end|> | @app.task(bind=True)
def debug_task(self): |
<|file_name|>rotationTest.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# encoding: utf-8
################################################################################
#
# RMG - Reaction Mechanism Generator
#
# Copyright (c) 2002-2009 Prof. William H. Green ([email protected]) and the
# RMG Team ([email protected])
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
#
################################################################################
"""
This script contains unit tests of the :mod:`rmgpy.statmech.rotation` module.
"""
import unittest
import math
import numpy
from rmgpy.statmech.rotation import LinearRotor, NonlinearRotor, KRotor, SphericalTopRotor
import rmgpy.constants as constants
################################################################################
class TestLinearRotor(unittest.TestCase):
"""
Contains unit tests of the LinearRotor class.
"""
def setUp(self):
"""
A function run before each unit test in this class.
"""
self.inertia = 11.75
self.symmetry = 2
self.quantum = False
self.mode = LinearRotor(
inertia = (self.inertia,"amu*angstrom^2"),
symmetry = self.symmetry,
quantum = self.quantum,
)
def test_getRotationalConstant(self):
"""
Test getting the LinearRotor.rotationalConstant property.
"""
Bexp = 1.434692
Bact = self.mode.rotationalConstant.value_si
self.assertAlmostEqual(Bexp, Bact, 4)
def test_setRotationalConstant(self):
"""
Test setting the LinearRotor.rotationalConstant property.
"""
B = self.mode.rotationalConstant
B.value_si *= 2
self.mode.rotationalConstant = B
Iexp = 0.5 * self.inertia
Iact = self.mode.inertia.value_si * constants.Na * 1e23
self.assertAlmostEqual(Iexp, Iact, 4)
def test_getLevelEnergy(self):
"""
Test the LinearRotor.getLevelEnergy() method.
"""
B = self.mode.rotationalConstant.value_si * constants.h * constants.c * 100.
B *= constants.Na
for J in range(0, 100):
Eexp = B * J * (J + 1)
Eact = self.mode.getLevelEnergy(J)
if J == 0:
self.assertEqual(Eact, 0)
else:
self.assertAlmostEqual(Eexp, Eact, delta=1e-4*Eexp)
def test_getLevelDegeneracy(self):
"""
Test the LinearRotor.getLevelDegeneracy() method.
"""
for J in range(0, 100):
gexp = 2 * J + 1
gact = self.mode.getLevelDegeneracy(J)
self.assertEqual(gexp, gact)
def test_getPartitionFunction_classical(self):
"""
Test the LinearRotor.getPartitionFunction() method for a classical
rotor.
"""
self.mode.quantum = False
Tlist = numpy.array([300,500,1000,1500,2000])
Qexplist = numpy.array([72.6691, 121.115, 242.230, 363.346, 484.461])
for T, Qexp in zip(Tlist, Qexplist):
Qact = self.mode.getPartitionFunction(T)
self.assertAlmostEqual(Qexp, Qact, delta=1e-4*Qexp)
def test_getPartitionFunction_quantum(self):
"""
Test the LinearRotor.getPartitionFunction() method for a quantum
rotor.
"""
self.mode.quantum = True
Tlist = numpy.array([300,500,1000,1500,2000])
Qexplist = numpy.array([72.8360, 121.282, 242.391, 363.512, 484.627])
for T, Qexp in zip(Tlist, Qexplist):
Qact = self.mode.getPartitionFunction(T)
self.assertAlmostEqual(Qexp, Qact, delta=1e-4*Qexp)
def test_getHeatCapacity_classical(self):
"""
Test the LinearRotor.getHeatCapacity() method using a classical rotor.
"""
self.mode.quantum = False
Tlist = numpy.array([300,500,1000,1500,2000])
Cvexplist = numpy.array([1, 1, 1, 1, 1]) * constants.R
for T, Cvexp in zip(Tlist, Cvexplist):
Cvact = self.mode.getHeatCapacity(T)
self.assertAlmostEqual(Cvexp, Cvact, delta=1e-4*Cvexp)
def test_getHeatCapacity_quantum(self):
"""
Test the LinearRotor.getHeatCapacity() method using a quantum rotor.
"""
self.mode.quantum = True
Tlist = numpy.array([300,500,1000,1500,2000])
Cvexplist = numpy.array([1, 1, 1, 1, 1]) * constants.R
for T, Cvexp in zip(Tlist, Cvexplist):
Cvact = self.mode.getHeatCapacity(T)
self.assertAlmostEqual(Cvexp, Cvact, delta=1e-4*Cvexp)
def test_getEnthalpy_classical(self):
"""
Test the LinearRotor.getEnthalpy() method using a classical rotor.
"""
self.mode.quantum = False
Tlist = numpy.array([300,500,1000,1500,2000])
Hexplist = numpy.array([1, 1, 1, 1, 1]) * constants.R * Tlist
for T, Hexp in zip(Tlist, Hexplist):
Hact = self.mode.getEnthalpy(T)
self.assertAlmostEqual(Hexp, Hact, delta=1e-4*Hexp)
def test_getEnthalpy_quantum(self):
"""
Test the LinearRotor.getEnthalpy() method using a quantum rotor.
"""
self.mode.quantum = True
Tlist = numpy.array([300,500,1000,1500,2000])
Hexplist = numpy.array([0.997705, 0.998624, 0.999312, 0.999541, 0.999656]) * constants.R * Tlist
for T, Hexp in zip(Tlist, Hexplist):
Hact = self.mode.getEnthalpy(T)
self.assertAlmostEqual(Hexp, Hact, delta=1e-4*Hexp)
def test_getEntropy_classical(self):
"""
Test the LinearRotor.getEntropy() method using a classical rotor.
"""
self.mode.quantum = False
Tlist = numpy.array([300,500,1000,1500,2000])
Sexplist = numpy.array([5.28592, 5.79674, 6.48989, 6.89535, 7.18304]) * constants.R
for T, Sexp in zip(Tlist, Sexplist):
Sact = self.mode.getEntropy(T)
self.assertAlmostEqual(Sexp, Sact, delta=1e-4*Sexp)
def test_getEntropy_quantum(self):
"""
Test the LinearRotor.getEntropy() method using a quantum rotor.
"""
self.mode.quantum = True
Tlist = numpy.array([300,500,1000,1500,2000])
Sexplist = numpy.array([5.28592, 5.79674, 6.48989, 6.89535, 7.18304]) * constants.R
for T, Sexp in zip(Tlist, Sexplist):
Sact = self.mode.getEntropy(T)
self.assertAlmostEqual(Sexp, Sact, delta=1e-4*Sexp)
def test_getSumOfStates_classical(self):
"""
Test the LinearRotor.getSumOfStates() method using a classical rotor.
"""
self.mode.quantum = False
Elist = numpy.arange(0, 2000*11.96, 1.0*11.96)
densStates = self.mode.getDensityOfStates(Elist)
sumStates = self.mode.getSumOfStates(Elist)
for n in range(1, len(Elist)):
self.assertAlmostEqual(numpy.sum(densStates[0:n]) / sumStates[n], 1.0, 3)
def test_getSumOfStates_quantum(self):
"""
Test the LinearRotor.getSumOfStates() method using a quantum rotor.
"""
self.mode.quantum = True
Elist = numpy.arange(0, 4000.*11.96, 2.0*11.96)
densStates = self.mode.getDensityOfStates(Elist)
sumStates = self.mode.getSumOfStates(Elist)
for n in range(1, len(Elist)):
self.assertAlmostEqual(numpy.sum(densStates[0:n+1]) / sumStates[n], 1.0, 3)
def test_getDensityOfStates_classical(self):
"""
Test the LinearRotor.getDensityOfStates() method using a classical
rotor.
"""
self.mode.quantum = False
Tlist = numpy.array([300,400,500])
Elist = numpy.arange(0, 4000.*11.96, 1.0*11.96)
for T in Tlist:
densStates = self.mode.getDensityOfStates(Elist)
Qact = numpy.sum(densStates * numpy.exp(-Elist / constants.R / T))
Qexp = self.mode.getPartitionFunction(T)
self.assertAlmostEqual(Qexp, Qact, delta=1e-2*Qexp)
def test_getDensityOfStates_quantum(self):
"""
Test the LinearRotor.getDensityOfStates() method using a quantum rotor.
"""
self.mode.quantum = True
Tlist = numpy.array([300,400,500])
Elist = numpy.arange(0, 4000.*11.96, 2.0*11.96)
for T in Tlist:
densStates = self.mode.getDensityOfStates(Elist)
Qact = numpy.sum(densStates * numpy.exp(-Elist / constants.R / T))
Qexp = self.mode.getPartitionFunction(T)
self.assertAlmostEqual(Qexp, Qact, delta=1e-2*Qexp)
def test_repr(self):
"""
Test that a LinearRotor object can be reconstructed from its repr()
output with no loss of information.
"""
mode = None
exec('mode = {0!r}'.format(self.mode))
self.assertAlmostEqual(self.mode.inertia.value, mode.inertia.value, 6)
self.assertEqual(self.mode.inertia.units, mode.inertia.units)
self.assertEqual(self.mode.symmetry, mode.symmetry)
self.assertEqual(self.mode.quantum, mode.quantum)
def test_pickle(self):
"""
Test that a LinearRotor object can be pickled and unpickled with no
loss of information.
"""
import cPickle
mode = cPickle.loads(cPickle.dumps(self.mode,-1))
self.assertAlmostEqual(self.mode.inertia.value, mode.inertia.value, 6)
self.assertEqual(self.mode.inertia.units, mode.inertia.units)
self.assertEqual(self.mode.symmetry, mode.symmetry)
self.assertEqual(self.mode.quantum, mode.quantum)
################################################################################
class TestNonlinearRotor(unittest.TestCase):
"""
Contains unit tests of the NonlinearRotor class.
"""
def setUp(self):
"""
A function run before each unit test in this class.
"""
self.inertia = numpy.array([3.415, 16.65, 20.07])
self.symmetry = 4
self.quantum = False
self.mode = NonlinearRotor(
inertia = (self.inertia,"amu*angstrom^2"),
symmetry = self.symmetry,
quantum = self.quantum,
)
def test_getRotationalConstant(self):
"""
Test getting the NonlinearRotor.rotationalConstant property.
"""
Bexp = numpy.array([4.93635, 1.0125, 0.839942])
Bact = self.mode.rotationalConstant.value_si
for B0, B in zip(Bexp, Bact):
self.assertAlmostEqual(B0, B, 4)
def test_setRotationalConstant(self):
"""
Test setting the NonlinearRotor.rotationalConstant property.
"""
B = self.mode.rotationalConstant
B.value_si *= 2
self.mode.rotationalConstant = B
Iexp = 0.5 * self.inertia
Iact = self.mode.inertia.value_si * constants.Na * 1e23
for I0, I in zip(Iexp, Iact):
self.assertAlmostEqual(I0, I, 4)
def test_getPartitionFunction_classical(self):
"""
Test the NonlinearRotor.getPartitionFunction() method for a classical
rotor.
"""
self.mode.quantum = False
Tlist = numpy.array([300,500,1000,1500,2000])
Qexplist = numpy.array([651.162, 1401.08, 3962.84, 7280.21, 11208.6])
for T, Qexp in zip(Tlist, Qexplist):
Qact = self.mode.getPartitionFunction(T)
self.assertAlmostEqual(Qexp, Qact, delta=1e-4*Qexp)
def test_getHeatCapacity_classical(self):
"""
Test the NonlinearRotor.getHeatCapacity() method using a classical
rotor.
"""
self.mode.quantum = False
Tlist = numpy.array([300,500,1000,1500,2000])
Cvexplist = numpy.array([1.5, 1.5, 1.5, 1.5, 1.5]) * constants.R
for T, Cvexp in zip(Tlist, Cvexplist):
Cvact = self.mode.getHeatCapacity(T)
self.assertAlmostEqual(Cvexp, Cvact, delta=1e-4*Cvexp)
def test_getEnthalpy_classical(self):
"""
Test the NonlinearRotor.getEnthalpy() method using a classical rotor.
"""
self.mode.quantum = False
Tlist = numpy.array([300,500,1000,1500,2000])
Hexplist = numpy.array([1.5, 1.5, 1.5, 1.5, 1.5]) * constants.R * Tlist
for T, Hexp in zip(Tlist, Hexplist):
Hact = self.mode.getEnthalpy(T)
self.assertAlmostEqual(Hexp, Hact, delta=1e-4*Hexp)
def test_getEntropy_classical(self):
"""
Test the NonlinearRotor.getEntropy() method using a classical rotor.
"""
self.mode.quantum = False
Tlist = numpy.array([300,500,1000,1500,2000])
Sexplist = numpy.array([7.97876, 8.74500, 9.78472, 10.3929, 10.8244]) * constants.R
for T, Sexp in zip(Tlist, Sexplist):
Sact = self.mode.getEntropy(T)
self.assertAlmostEqual(Sexp, Sact, delta=1e-4*Sexp)
def test_getSumOfStates_classical(self):
"""
Test the NonlinearRotor.getSumOfStates() method using a classical rotor.
"""
self.mode.quantum = False
Elist = numpy.arange(0, 1000*11.96, 1*11.96)
sumStates = self.mode.getSumOfStates(Elist)
densStates = self.mode.getDensityOfStates(Elist)
for n in range(10, len(Elist)):
self.assertTrue(0.8 < numpy.sum(densStates[0:n]) / sumStates[n] < 1.25, '{0} != {1}'.format(numpy.sum(densStates[0:n]), sumStates[n]))
def test_getDensityOfStates_classical(self):
"""
Test the NonlinearRotor.getDensityOfStates() method using a classical
rotor.
"""
self.mode.quantum = False
Elist = numpy.arange(0, 1000*11.96, 1*11.96)
densStates = self.mode.getDensityOfStates(Elist)
T = 100
Qact = numpy.sum(densStates * numpy.exp(-Elist / constants.R / T))
Qexp = self.mode.getPartitionFunction(T)
self.assertAlmostEqual(Qexp, Qact, delta=1e-2*Qexp)
def test_repr(self):
"""
Test that a NonlinearRotor object can be reconstructed from its
repr() output with no loss of information.
"""
mode = None
exec('mode = {0!r}'.format(self.mode))
self.assertEqual(self.mode.inertia.value.shape, mode.inertia.value.shape)
for I0, I in zip(self.mode.inertia.value, mode.inertia.value):
self.assertAlmostEqual(I0, I, 6)
self.assertEqual(self.mode.inertia.units, mode.inertia.units)
self.assertEqual(self.mode.symmetry, mode.symmetry)
self.assertEqual(self.mode.quantum, mode.quantum)
def test_pickle(self):
"""
Test that a NonlinearRotor object can be pickled and unpickled with
no loss of information.
"""
import cPickle
mode = cPickle.loads(cPickle.dumps(self.mode,-1))
self.assertEqual(self.mode.inertia.value.shape, mode.inertia.value.shape)
for I0, I in zip(self.mode.inertia.value, mode.inertia.value):
self.assertAlmostEqual(I0, I, 6)
self.assertEqual(self.mode.inertia.units, mode.inertia.units)
self.assertEqual(self.mode.symmetry, mode.symmetry)
self.assertEqual(self.mode.quantum, mode.quantum)
################################################################################
class TestKRotor(unittest.TestCase):
"""
Contains unit tests of the KRotor class.
"""
def setUp(self):
"""
A function run before each unit test in this class.
"""
self.inertia = 11.75
self.symmetry = 2
self.quantum = False
self.mode = KRotor(
inertia = (self.inertia,"amu*angstrom^2"),
symmetry = self.symmetry,
quantum = self.quantum,
)
def test_getRotationalConstant(self):
"""
Test getting the KRotor.rotationalConstant property.
"""
Bexp = 1.434692
Bact = self.mode.rotationalConstant.value_si
self.assertAlmostEqual(Bexp, Bact, 4)
def test_setRotationalConstant(self):
"""
Test setting the KRotor.rotationalConstant property.
"""
B = self.mode.rotationalConstant
B.value_si *= 2
self.mode.rotationalConstant = B
Iexp = 0.5 * self.inertia
Iact = self.mode.inertia.value_si * constants.Na * 1e23
self.assertAlmostEqual(Iexp, Iact, 4)
def test_getLevelEnergy(self):
"""
Test the KRotor.getLevelEnergy() method.
"""
B = self.mode.rotationalConstant.value_si * constants.h * constants.c * 100.
B *= constants.Na
for J in range(0, 100):
Eexp = float(B * J * J)
Eact = float(self.mode.getLevelEnergy(J))
if J == 0:
self.assertEqual(Eact, 0)
else:
self.assertAlmostEqual(Eexp, Eact, delta=1e-4*Eexp)
def test_getLevelDegeneracy(self):
"""
Test the KRotor.getLevelDegeneracy() method.
"""
for J in range(0, 100):
gexp = 1 if J == 0 else 2
gact = self.mode.getLevelDegeneracy(J)
self.assertEqual(gexp, gact, '{0} != {1}'.format(gact, gexp))
def test_getPartitionFunction_classical(self):
"""
Test the KRotor.getPartitionFunction() method for a classical
rotor.
"""
self.mode.quantum = False
Tlist = numpy.array([300,500,1000,1500,2000])
Qexplist = numpy.array([10.6839, 13.7929, 19.5060, 23.8899, 27.5857])
for T, Qexp in zip(Tlist, Qexplist):
Qact = self.mode.getPartitionFunction(T)
self.assertAlmostEqual(Qexp, Qact, delta=1e-4*Qexp)
def test_getPartitionFunction_quantum(self):
"""
Test the KRotor.getPartitionFunction() method for a quantum
rotor.
"""
self.mode.quantum = True
Tlist = numpy.array([300,500,1000,1500,2000])
Qexplist = numpy.array([10.6839, 13.7929, 19.5060, 23.8899, 27.5857])
for T, Qexp in zip(Tlist, Qexplist):
Qact = self.mode.getPartitionFunction(T)
self.assertAlmostEqual(Qexp, Qact, delta=1e-4*Qexp)
def test_getHeatCapacity_classical(self):
"""
Test the KRotor.getHeatCapacity() method using a classical rotor.
"""
self.mode.quantum = False
Tlist = numpy.array([300,500,1000,1500,2000])
Cvexplist = numpy.array([0.5, 0.5, 0.5, 0.5, 0.5]) * constants.R
for T, Cvexp in zip(Tlist, Cvexplist):
Cvact = self.mode.getHeatCapacity(T)
self.assertAlmostEqual(Cvexp, Cvact, delta=1e-4*Cvexp)
def test_getHeatCapacity_quantum(self):
"""
Test the KRotor.getHeatCapacity() method using a quantum rotor.
"""
self.mode.quantum = True
Tlist = numpy.array([300,500,1000,1500,2000])
Cvexplist = numpy.array([0.5, 0.5, 0.5, 0.5, 0.5]) * constants.R
for T, Cvexp in zip(Tlist, Cvexplist):
Cvact = self.mode.getHeatCapacity(T)
self.assertAlmostEqual(Cvexp, Cvact, delta=1e-4*Cvexp)
def test_getEnthalpy_classical(self):
"""
Test the KRotor.getEnthalpy() method using a classical rotor.
"""
self.mode.quantum = False
Tlist = numpy.array([300,500,1000,1500,2000])
Hexplist = numpy.array([0.5, 0.5, 0.5, 0.5, 0.5]) * constants.R * Tlist
for T, Hexp in zip(Tlist, Hexplist):
Hact = self.mode.getEnthalpy(T)
self.assertAlmostEqual(Hexp, Hact, delta=1e-4*Hexp)
def test_getEnthalpy_quantum(self):
"""
Test the KRotor.getEnthalpy() method using a quantum rotor.
"""
self.mode.quantum = True
Tlist = numpy.array([300,500,1000,1500,2000])
Hexplist = numpy.array([0.5, 0.5, 0.5, 0.5, 0.5]) * constants.R * Tlist
for T, Hexp in zip(Tlist, Hexplist):
Hact = self.mode.getEnthalpy(T)<|fim▁hole|> self.assertAlmostEqual(Hexp, Hact, delta=1e-4*Hexp)
def test_getEntropy_classical(self):
"""
Test the KRotor.getEntropy() method using a classical rotor.
"""
self.mode.quantum = False
Tlist = numpy.array([300,500,1000,1500,2000])
Sexplist = numpy.array([2.86874, 3.12415, 3.47072, 3.67346, 3.81730]) * constants.R
for T, Sexp in zip(Tlist, Sexplist):
Sact = self.mode.getEntropy(T)
self.assertAlmostEqual(Sexp, Sact, delta=1e-4*Sexp)
def test_getEntropy_quantum(self):
"""
Test the KRotor.getEntropy() method using a quantum rotor.
"""
self.mode.quantum = True
Tlist = numpy.array([300,500,1000,1500,2000])
Sexplist = numpy.array([2.86874, 3.12415, 3.47072, 3.67346, 3.81730]) * constants.R
for T, Sexp in zip(Tlist, Sexplist):
Sact = self.mode.getEntropy(T)
self.assertAlmostEqual(Sexp, Sact, delta=1e-4*Sexp)
def test_getSumOfStates_classical(self):
"""
Test the KRotor.getSumOfStates() method using a classical rotor.
"""
self.mode.quantum = False
Elist = numpy.arange(0, 1000*11.96, 1*11.96)
sumStates = self.mode.getSumOfStates(Elist)
densStates = self.mode.getDensityOfStates(Elist)
for n in range(10, len(Elist)):
self.assertTrue(0.75 < numpy.sum(densStates[0:n+1]) / sumStates[n] < 1.3333, '{0} != {1}'.format(numpy.sum(densStates[0:n+1]), sumStates[n]))
def test_getSumOfStates_quantum(self):
"""
Test the KRotor.getSumOfStates() method using a quantum rotor.
"""
self.mode.quantum = True
Elist = numpy.arange(0, 1000*11.96, 1*11.96)
sumStates = self.mode.getSumOfStates(Elist)
densStates = self.mode.getDensityOfStates(Elist)
for n in range(10, len(Elist)):
self.assertTrue(0.8 < numpy.sum(densStates[0:n+1]) / sumStates[n] < 1.25, '{0} != {1}'.format(numpy.sum(densStates[0:n+1]), sumStates[n]))
def test_getDensityOfStates_classical(self):
"""
Test the KRotor.getDensityOfStates() method using a classical
rotor.
"""
self.mode.quantum = False
Elist = numpy.arange(0, 3000*11.96, 0.05*11.96)
densStates = self.mode.getDensityOfStates(Elist)
T = 500
Qact = numpy.sum(densStates * numpy.exp(-Elist / constants.R / T))
Qexp = self.mode.getPartitionFunction(T)
self.assertAlmostEqual(Qexp, Qact, delta=1e-2*Qexp)
def test_getDensityOfStates_quantum(self):
"""
Test the KRotor.getDensityOfStates() method using a quantum rotor.
"""
self.mode.quantum = True
Elist = numpy.arange(0, 4000*11.96, 2*11.96)
densStates = self.mode.getDensityOfStates(Elist)
T = 500
Qact = numpy.sum(densStates * numpy.exp(-Elist / constants.R / T))
Qexp = self.mode.getPartitionFunction(T)
self.assertAlmostEqual(Qexp, Qact, delta=1e-2*Qexp)
def test_repr(self):
"""
Test that a KRotor object can be reconstructed from its repr() output
with no loss of information.
"""
mode = None
exec('mode = {0!r}'.format(self.mode))
self.assertAlmostEqual(self.mode.inertia.value, mode.inertia.value, 6)
self.assertEqual(self.mode.inertia.units, mode.inertia.units)
self.assertEqual(self.mode.symmetry, mode.symmetry)
self.assertEqual(self.mode.quantum, mode.quantum)
def test_pickle(self):
"""
Test that a KRotor object can be pickled and unpickled with no loss
of information.
"""
import cPickle
mode = cPickle.loads(cPickle.dumps(self.mode,-1))
self.assertAlmostEqual(self.mode.inertia.value, mode.inertia.value, 6)
self.assertEqual(self.mode.inertia.units, mode.inertia.units)
self.assertEqual(self.mode.symmetry, mode.symmetry)
self.assertEqual(self.mode.quantum, mode.quantum)
################################################################################
class TestSphericalTopRotor(unittest.TestCase):
"""
Contains unit tests of the SphericalTopRotor class.
"""
def setUp(self):
"""
A function run before each unit test in this class.
"""
self.inertia = 11.75
self.symmetry = 2
self.quantum = False
self.mode = SphericalTopRotor(
inertia = (self.inertia,"amu*angstrom^2"),
symmetry = self.symmetry,
quantum = self.quantum,
)
def test_getRotationalConstant(self):
"""
Test getting the SphericalTopRotor.rotationalConstant property.
"""
Bexp = 1.434692
Bact = self.mode.rotationalConstant.value_si
self.assertAlmostEqual(Bexp, Bact, 4)
def test_setRotationalConstant(self):
"""
Test setting the SphericalTopRotor.rotationalConstant property.
"""
B = self.mode.rotationalConstant
B.value_si *= 2
self.mode.rotationalConstant = B
Iexp = 0.5 * self.inertia
Iact = self.mode.inertia.value_si * constants.Na * 1e23
self.assertAlmostEqual(Iexp, Iact, 4)
def test_getLevelEnergy(self):
"""
Test the SphericalTopRotor.getLevelEnergy() method.
"""
B = self.mode.rotationalConstant.value_si * constants.h * constants.c * 100.
B *= constants.Na
for J in range(0, 100):
Eexp = B * J * (J + 1)
Eact = self.mode.getLevelEnergy(J)
if J == 0:
self.assertEqual(Eact, 0)
else:
self.assertAlmostEqual(Eexp, Eact, delta=1e-4*Eexp)
def test_getLevelDegeneracy(self):
"""
Test the SphericalTopRotor.getLevelDegeneracy() method.
"""
for J in range(0, 100):
gexp = (2 * J + 1)**2
gact = self.mode.getLevelDegeneracy(J)
self.assertEqual(gexp, gact, '{0} != {1}'.format(gact, gexp))
def test_getPartitionFunction_classical(self):
"""
Test the SphericalTopRotor.getPartitionFunction() method for a classical
rotor.
"""
self.mode.quantum = False
Tlist = numpy.array([300,500,1000,1500,2000])
Qexplist = numpy.array([1552.74, 3340.97, 9449.69, 17360.2, 26727.8])
for T, Qexp in zip(Tlist, Qexplist):
Qact = self.mode.getPartitionFunction(T)
self.assertAlmostEqual(Qexp, Qact, delta=1e-4*Qexp)
def test_getPartitionFunction_quantum(self):
"""
Test the SphericalTopRotor.getPartitionFunction() method for a quantum
rotor.
"""
self.mode.quantum = True
Tlist = numpy.array([300,500,1000,1500,2000])
Qexplist = numpy.array([1555.42, 3344.42, 9454.57, 17366.2, 26734.7])
for T, Qexp in zip(Tlist, Qexplist):
Qact = self.mode.getPartitionFunction(T)
self.assertAlmostEqual(Qexp, Qact, delta=1e-4*Qexp)
def test_getHeatCapacity_classical(self):
"""
Test the SphericalTopRotor.getHeatCapacity() method using a classical rotor.
"""
self.mode.quantum = False
Tlist = numpy.array([300,500,1000,1500,2000])
Cvexplist = numpy.array([1.5, 1.5, 1.5, 1.5, 1.5]) * constants.R
for T, Cvexp in zip(Tlist, Cvexplist):
Cvact = self.mode.getHeatCapacity(T)
self.assertAlmostEqual(Cvexp, Cvact, delta=1e-4*Cvexp)
def test_getHeatCapacity_quantum(self):
"""
Test the SphericalTopRotor.getHeatCapacity() method using a quantum rotor.
"""
self.mode.quantum = True
Tlist = numpy.array([300,500,1000,1500,2000])
Cvexplist = numpy.array([1.5, 1.5, 1.5, 1.5, 1.5]) * constants.R
for T, Cvexp in zip(Tlist, Cvexplist):
Cvact = self.mode.getHeatCapacity(T)
self.assertAlmostEqual(Cvexp, Cvact, delta=1e-4*Cvexp)
def test_getEnthalpy_classical(self):
"""
Test the SphericalTopRotor.getEnthalpy() method using a classical rotor.
"""
self.mode.quantum = False
Tlist = numpy.array([300,500,1000,1500,2000])
Hexplist = numpy.array([1.5, 1.5, 1.5, 1.5, 1.5]) * constants.R * Tlist
for T, Hexp in zip(Tlist, Hexplist):
Hact = self.mode.getEnthalpy(T)
self.assertAlmostEqual(Hexp, Hact, delta=1e-4*Hexp)
def test_getEnthalpy_quantum(self):
"""
Test the SphericalTopRotor.getEnthalpy() method using a quantum rotor.
"""
self.mode.quantum = True
Tlist = numpy.array([300,500,1000,1500,2000])
Hexplist = numpy.array([1.49828, 1.49897, 1.49948, 1.49966, 1.49974]) * constants.R * Tlist
for T, Hexp in zip(Tlist, Hexplist):
Hact = self.mode.getEnthalpy(T)
self.assertAlmostEqual(Hexp, Hact, delta=1e-4*Hexp)
def test_getEntropy_classical(self):
"""
Test the SphericalTopRotor.getEntropy() method using a classical rotor.
"""
self.mode.quantum = False
Tlist = numpy.array([300,500,1000,1500,2000])
Sexplist = numpy.array([8.84778, 9.61402, 10.6537, 11.2619, 11.6935]) * constants.R
for T, Sexp in zip(Tlist, Sexplist):
Sact = self.mode.getEntropy(T)
self.assertAlmostEqual(Sexp, Sact, delta=1e-4*Sexp)
def test_getEntropy_quantum(self):
"""
Test the SphericalTopRotor.getEntropy() method using a quantum rotor.
"""
self.mode.quantum = True
Tlist = numpy.array([300,500,1000,1500,2000])
Sexplist = numpy.array([8.84778, 9.61402, 10.6537, 11.2619, 11.6935]) * constants.R
for T, Sexp in zip(Tlist, Sexplist):
Sact = self.mode.getEntropy(T)
self.assertAlmostEqual(Sexp, Sact, delta=1e-4*Sexp)
def test_getSumOfStates_classical(self):
"""
Test the SphericalTopRotor.getSumOfStates() method using a classical rotor.
"""
self.mode.quantum = False
Elist = numpy.arange(0, 2000*11.96, 1.0*11.96)
densStates = self.mode.getDensityOfStates(Elist)
sumStates = self.mode.getSumOfStates(Elist)
for n in range(20, len(Elist)):
self.assertAlmostEqual(numpy.sum(densStates[0:n+1]) / sumStates[n], 1.0, 1)
def test_getSumOfStates_quantum(self):
"""
Test the SphericalTopRotor.getSumOfStates() method using a quantum rotor.
"""
self.mode.quantum = True
Elist = numpy.arange(0, 2000*11.96, 1.0*11.96)
densStates = self.mode.getDensityOfStates(Elist)
sumStates = self.mode.getSumOfStates(Elist)
for n in range(1, len(Elist)):
self.assertAlmostEqual(numpy.sum(densStates[0:n+1]) / sumStates[n], 1.0, 3)
def test_getDensityOfStates_classical(self):
"""
Test the SphericalTopRotor.getDensityOfStates() method using a classical
rotor.
"""
self.mode.quantum = False
Tlist = numpy.array([300,400,500])
Elist = numpy.arange(0, 2000*11.96, 1.0*11.96)
for T in Tlist:
densStates = self.mode.getDensityOfStates(Elist)
Qact = numpy.sum(densStates * numpy.exp(-Elist / constants.R / T))
Qexp = self.mode.getPartitionFunction(T)
self.assertAlmostEqual(Qexp, Qact, delta=1e-2*Qexp)
def test_getDensityOfStates_quantum(self):
"""
Test the SphericalTopRotor.getDensityOfStates() method using a quantum rotor.
"""
self.mode.quantum = True
Tlist = numpy.array([300,400,500])
Elist = numpy.arange(0, 4000*11.96, 2.0*11.96)
for T in Tlist:
densStates = self.mode.getDensityOfStates(Elist)
Qact = numpy.sum(densStates * numpy.exp(-Elist / constants.R / T))
Qexp = self.mode.getPartitionFunction(T)
self.assertAlmostEqual(Qexp, Qact, delta=1e-2*Qexp)
def test_repr(self):
"""
Test that a SphericalTopRotor object can be reconstructed from its
repr() output with no loss of information.
"""
mode = None
exec('mode = {0!r}'.format(self.mode))
self.assertAlmostEqual(self.mode.inertia.value, mode.inertia.value, 6)
self.assertEqual(self.mode.inertia.units, mode.inertia.units)
self.assertEqual(self.mode.symmetry, mode.symmetry)
self.assertEqual(self.mode.quantum, mode.quantum)
def test_pickle(self):
"""
Test that a SphericalTopRotor object can be pickled and unpickled
with no loss of information.
"""
import cPickle
mode = cPickle.loads(cPickle.dumps(self.mode,-1))
self.assertAlmostEqual(self.mode.inertia.value, mode.inertia.value, 6)
self.assertEqual(self.mode.inertia.units, mode.inertia.units)
self.assertEqual(self.mode.symmetry, mode.symmetry)
self.assertEqual(self.mode.quantum, mode.quantum)<|fim▁end|> | |
<|file_name|>const-enum-structlike.rs<|end_file_name|><|fim▁begin|>// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(struct_variant)]
enum E {
S0 { s: String },
S1 { u: uint }
}
<|fim▁hole|>
pub fn main() {
match C {
S0 { .. } => panic!(),
S1 { u } => assert!(u == 23)
}
}<|fim▁end|> | static C: E = S1 { u: 23 }; |
<|file_name|>Init.js<|end_file_name|><|fim▁begin|>// urlParams is null when used for embedding
window.urlParams = window.urlParams || {};
// isLocalStorage controls access to local storage
window.isLocalStorage = window.isLocalStorage || false;
// Checks for SVG support
window.isSvgBrowser = window.isSvgBrowser || (navigator.userAgent.indexOf('MSIE') < 0 || document.documentMode >= 9);
// CUSTOM_PARAMETERS - URLs for save and export
window.EXPORT_URL = window.EXPORT_URL || 'https://exp.draw.io/ImageExport4/export';
window.SAVE_URL = window.SAVE_URL || 'save';
window.OPEN_URL = window.OPEN_URL || 'open';
window.PROXY_URL = window.PROXY_URL || 'proxy';
// Paths and files
window.SHAPES_PATH = window.SHAPES_PATH || 'shapes';
// Path for images inside the diagram
window.GRAPH_IMAGE_PATH = window.GRAPH_IMAGE_PATH || 'img';
window.ICONSEARCH_PATH = window.ICONSEARCH_PATH || (navigator.userAgent.indexOf('MSIE') >= 0 ||
urlParams['dev']) ? 'iconSearch' : 'https://www.draw.io/iconSearch';
window.TEMPLATE_PATH = window.TEMPLATE_PATH || '/templates';
// Directory for i18 files and basename for main i18n file
window.RESOURCES_PATH = window.RESOURCES_PATH || 'resources';
window.RESOURCE_BASE = window.RESOURCE_BASE || RESOURCES_PATH + '/dia';
// URL for logging
window.DRAWIO_LOG_URL = window.DRAWIO_LOG_URL || '';
// Sets the base path, the UI language via URL param and configures the
// supported languages to avoid 404s. The loading of all core language
// resources is disabled as all required resources are in grapheditor.
// properties. Note that in this example the loading of two resource
// files (the special bundle and the default bundle) is disabled to
// save a GET request. This requires that all resources be present in
// the special bundle.
window.mxLoadResources = window.mxLoadResources || false;
window.mxLanguage = window.mxLanguage || (function()
{
var lang = (urlParams['offline'] == '1') ? 'en' : urlParams['lang'];
// Known issue: No JSON object at this point in quirks in IE8
if (lang == null && typeof(JSON) != 'undefined')
{
// Cannot use mxSettings here
if (isLocalStorage)
{
try
{
var value = localStorage.getItem('.drawio-config');
if (value != null)
{
lang = JSON.parse(value).language || null;
}
}
catch (e)
{
// cookies are disabled, attempts to use local storage will cause
// a DOM error at a minimum on Chrome
isLocalStorage = false;
}
}
}
return lang;
})();
// Add new languages here. First entry is translated to [Automatic]
// in the menu defintion in Diagramly.js.
window.mxLanguageMap = window.mxLanguageMap ||
{
'i18n': '',
'id' : 'Bahasa Indonesia',
'ms' : 'Bahasa Melayu',
'bs' : 'Bosanski',
'ca' : 'Català',
'cs' : 'Čeština',
'da' : 'Dansk',
'de' : 'Deutsch',
'et' : 'Eesti',
'en' : 'English',
'es' : 'Español',
'eo' : 'Esperanto',
'fil' : 'Filipino',
'fr' : 'Français',
'it' : 'Italiano',
'hu' : 'Magyar',
'nl' : 'Nederlands',
'no' : 'Norsk',
'pl' : 'Polski',
'pt-br' : 'Português (Brasil)',
'pt' : 'Português (Portugal)',
'ro' : 'Română',
'fi' : 'Suomi',
'sv' : 'Svenska',
'vi' : 'Tiếng Việt',
'tr' : 'Türkçe',
'el' : 'Ελληνικά',
'ru' : 'Русский',
'sr' : 'Српски',
'uk' : 'Українська',
'he' : 'עברית',
'ar' : 'العربية',
'th' : 'ไทย',
'ko' : '한국어',
'ja' : '日本語',
'zh' : '中文(中国)',
'zh-tw' : '中文(台灣)'
};
if (typeof window.mxBasePath === 'undefined')
{
window.mxBasePath = 'mxgraph';
}
if (window.mxLanguages == null)
{
window.mxLanguages = [];
// Populates the list of supported special language bundles
for (var lang in mxLanguageMap)
{
// Empty means default (ie. browser language), "en" means English (default for unsupported languages)
// Since "en" uses no extension this must not be added to the array of supported language bundles.
if (lang != 'en')
{
window.mxLanguages.push(lang);
}
}
}
/**
* Returns the global UI setting before runngin static draw.io code
*/
window.uiTheme = window.uiTheme || (function()
{
var ui = urlParams['ui'];
// Known issue: No JSON object at this point in quirks in IE8
if (ui == null && typeof JSON !== 'undefined')
{
// Cannot use mxSettings here
if (isLocalStorage)
{
try
{
var value = localStorage.getItem('.drawio-config');
if (value != null)
{
ui = JSON.parse(value).ui || null;
}
}
catch (e)
{
// cookies are disabled, attempts to use local storage will cause
// a DOM error at a minimum on Chrome
isLocalStorage = false;
}
}
}
return ui;
})();
/**
* Global function for loading local files via servlet
*/
function setCurrentXml(data, filename)
{
if (window.parent != null && window.parent.openFile != null)
{
window.parent.openFile.setData(data, filename);
}
};
/**
* Overrides splash URL parameter via local storage
*/
(function()
{
// Known issue: No JSON object at this point in quirks in IE8
if (typeof JSON !== 'undefined')
{
// Cannot use mxSettings here
if (isLocalStorage)
{
try
{
var value = localStorage.getItem('.drawio-config');
var showSplash = true;
if (value != null)
{
showSplash = JSON.parse(value).showStartScreen;
}
// Undefined means true
if (showSplash == false)
{
urlParams['splash'] = '0';
}
}
catch (e)
{
// ignore
}
}
}
})();
// Customizes export URL
var ex = urlParams['export'];
if (ex != null)
{
if (ex.substring(0, 7) != 'http://' && ex.substring(0, 8) != 'https://')
{
ex = 'http://' + ex;
}
EXPORT_URL = ex;
}
// Enables offline mode
if (urlParams['offline'] == '1' || urlParams['demo'] == '1' || urlParams['stealth'] == '1' || urlParams['local'] == '1')
{
urlParams['analytics'] = '0';
urlParams['picker'] = '0';
urlParams['gapi'] = '0';
urlParams['db'] = '0';
urlParams['od'] = '0';
urlParams['gh'] = '0';
}
// Disables math in offline mode
if (urlParams['offline'] == '1' || urlParams['local'] == '1')
{
urlParams['math'] = '0';
}
// Lightbox enabled chromeless mode
if (urlParams['lightbox'] == '1')
{
urlParams['chrome'] = '0';
}
// Adds hard-coded logging domain for draw.io domains
var host = window.location.host;
var searchString = 'draw.io';
var position = host.length - searchString.length;
var lastIndex = host.lastIndexOf(searchString, position);
if (lastIndex !== -1 && lastIndex === position && host != 'test.draw.io')
{
// endsWith polyfill<|fim▁hole|><|fim▁end|> | window.DRAWIO_LOG_URL = 'https://log.draw.io';
} |
<|file_name|>map.js<|end_file_name|><|fim▁begin|>'use strict';
var React = require('react'),
coreViews = require('../../core/views'),
mixins = require('../mixins');
var Map = React.createBackboneClass({
mixins: [
mixins.LayersMixin()
],
componentDidMount: function() {
this.mapView = new coreViews.MapView({
el: '#map'
});
},
componentWillUnmount: function() {
this.mapView.destroy();
},
componentDidUpdate: function() {
var model = this.getActiveLayer();<|fim▁hole|> this.mapView.clearLayers();
if (model) {
this.mapView.addLayer(model.getLeafletLayer(), model.getBounds());
this.mapView.fitBounds(model.getBounds());
}
},
render: function() {
return (
<div id="map-container">
<div id="map"></div>
</div>
);
}
});
module.exports = Map;<|fim▁end|> | |
<|file_name|>encoder.go<|end_file_name|><|fim▁begin|>// Copyright 2015 Reborndb Org. All Rights Reserved.
// Licensed under the MIT (MIT-LICENSE.txt) license.<|fim▁hole|>package resp
import (
"bufio"
"bytes"
"strconv"
"github.com/juju/errors"
"github.com/ngaut/log"
)
type encoder struct {
w *bufio.Writer
}
var (
imap []string
)
func init() {
imap = make([]string, 1024*512+1024)
for i := 0; i < len(imap); i++ {
imap[i] = strconv.Itoa(i - 1024)
}
}
func itos(i int64) string {
if n := i + 1024; n >= 0 && n < int64(len(imap)) {
return imap[n]
} else {
return strconv.FormatInt(i, 10)
}
}
func Encode(w *bufio.Writer, r Resp) error {
return encode(w, r, false)
}
func encode(w *bufio.Writer, r Resp, needFlush bool) error {
e := &encoder{w}
if err := e.encodeResp(r); err != nil {
return err
}
if needFlush {
return w.Flush()
} else {
return nil
}
}
func MustEncode(w *bufio.Writer, r Resp) {
if err := Encode(w, r); err != nil {
log.Fatalf("encode redis resp failed - %s", err)
}
}
const defaultEncodeBufSize = 16
func EncodeToBytes(r Resp) ([]byte, error) {
var b bytes.Buffer
err := encode(bufio.NewWriterSize(&b, defaultEncodeBufSize), r, true)
return b.Bytes(), err
}
func EncodeToString(r Resp) (string, error) {
var b bytes.Buffer
err := encode(bufio.NewWriterSize(&b, defaultEncodeBufSize), r, true)
return b.String(), err
}
func MustEncodeToBytes(r Resp) []byte {
b, err := EncodeToBytes(r)
if err != nil {
log.Fatalf("encode redis resp to bytes failed - %s", err)
}
return b
}
func (e *encoder) encodeResp(r Resp) error {
switch x := r.(type) {
default:
return errors.Trace(ErrBadRespType)
case *String:
if err := e.encodeType(TypeString); err != nil {
return err
}
return e.encodeText(x.Value)
case *Error:
if err := e.encodeType(TypeError); err != nil {
return err
}
return e.encodeText(x.Value)
case *Int:
if err := e.encodeType(TypeInt); err != nil {
return err
}
return e.encodeInt(x.Value)
case *BulkBytes:
if err := e.encodeType(TypeBulkBytes); err != nil {
return err
}
return e.encodeBulkBytes(x.Value)
case *Array:
if err := e.encodeType(TypeArray); err != nil {
return err
}
return e.encodeArray(x.Value)
case Ping:
return errors.Trace(e.w.WriteByte('\n'))
}
}
func (e *encoder) encodeType(t RespType) error {
return errors.Trace(e.w.WriteByte(byte(t)))
}
func (e *encoder) encodeText(s string) error {
if _, err := e.w.WriteString(s); err != nil {
return errors.Trace(err)
}
if _, err := e.w.WriteString("\r\n"); err != nil {
return errors.Trace(err)
}
return nil
}
func (e *encoder) encodeInt(v int64) error {
return e.encodeText(itos(v))
}
func (e *encoder) encodeBulkBytes(b []byte) error {
if b == nil {
return e.encodeInt(-1)
} else {
if err := e.encodeInt(int64(len(b))); err != nil {
return err
}
if _, err := e.w.Write(b); err != nil {
return errors.Trace(err)
}
if _, err := e.w.WriteString("\r\n"); err != nil {
return errors.Trace(err)
}
return nil
}
}
func (e *encoder) encodeArray(a []Resp) error {
if a == nil {
return e.encodeInt(-1)
} else {
if err := e.encodeInt(int64(len(a))); err != nil {
return err
}
for i := 0; i < len(a); i++ {
if err := e.encodeResp(a[i]); err != nil {
return err
}
}
return nil
}
}<|fim▁end|> | |
<|file_name|>MainActivity.java<|end_file_name|><|fim▁begin|>package com.sunnyface.popularmovies;
import android.content.Intent;
import android.databinding.DataBindingUtil;
import android.os.Build;
import android.os.Bundle;
import android.support.v4.app.ActivityOptionsCompat;
import android.support.v4.app.FragmentManager;
import android.support.v4.app.FragmentTransaction;
import android.support.v4.util.Pair;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.Toolbar;
import android.util.Log;
import android.view.View;
import android.widget.ImageView;
import android.widget.TextView;
import com.sunnyface.popularmovies.databinding.ActivityMainBinding;
import com.sunnyface.popularmovies.libs.Constants;
import com.sunnyface.popularmovies.models.Movie;
/**
* Root Activity.
* Loads
*/
public class MainActivity extends AppCompatActivity implements MainActivityFragment.Callback {
private boolean isTableLayout;
private FragmentManager fragmentManager;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
ActivityMainBinding binding = DataBindingUtil.setContentView(this, R.layout.activity_main);
Toolbar toolbar = binding.actionBar.toolbar;
setSupportActionBar(toolbar);<|fim▁hole|> isTableLayout = binding.movieDetailContainer != null;
}
@Override
public void onItemSelected(Movie movie, View view) {
if (isTableLayout) {
Bundle args = new Bundle();
args.putBoolean("isTabletLayout", true);
args.putParcelable("movie", movie);
DetailFragment fragment = new DetailFragment();
fragment.setArguments(args);
FragmentTransaction fragmentTransaction = fragmentManager.beginTransaction();
fragmentTransaction.replace(R.id.movie_detail_container, fragment, Constants.MOVIE_DETAIL_FRAGMENT_TAG); //Replace its key.
fragmentTransaction.commit();
} else {
Intent intent = new Intent(this, DetailActivity.class);
intent.putExtra("movie", movie);
String movieID = "" + movie.getId();
Log.i("movieID: ", movieID);
// Doing some view transitions with style,
// But, we must check if we're running on Android 5.0 or higher to work.
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
//KK Bind this to remove findViewByID??????
ImageView thumbnailImageView = (ImageView) view.findViewById(R.id.thumbnail);
TextView titleTextView = (TextView) view.findViewById(R.id.title);
Pair<View, String> transition_a = Pair.create((View) thumbnailImageView, "movie_cover");
Pair<View, String> transition_b = Pair.create((View) titleTextView, "movie_title");
ActivityOptionsCompat options = ActivityOptionsCompat.makeSceneTransitionAnimation(this, transition_a, transition_b);
startActivity(intent, options.toBundle());
} else {
startActivity(intent);
}
}
}
}<|fim▁end|> |
fragmentManager = getSupportFragmentManager(); |
<|file_name|>application.js<|end_file_name|><|fim▁begin|>import Route from '@ember/routing/route';
import { A } from '@ember/array';
import { hash } from 'rsvp';
import EmberObject from '@ember/object'
export default Route.extend({
model: function() {
return hash({
exampleModel: EmberObject.create(),
disableSubmit: false,
selectedLanguage: null,
selectOptions: A([
{label: 'French', value: 'fr'},
{label: 'English', value: 'en'},
{label: 'German', value: 'gr'}
]),
radioOptions: A([
{label: 'Ruby', value: 'ruby'},
{label: 'Javascript', value: 'js'},
{label: 'Cold Fusion', value: 'cf'}
])
});
},
actions: {
submit: function() {
window.alert('You triggered a form submit!');
},
toggleErrors: function() {
var model = this.get('currentModel').exampleModel;
if(model.get('errors')) {
model.set('errors', null);
}else{
var errors = {
first_name: A(['That first name is wrong']),
last_name: A(['That last name is silly']),
language: A(['Please choose a better language']),
isAwesome: A(['You must be awesome to submit this form']),
bestLanguage: A(['Wrong, Cold Fusion is the best language']),
essay: A(['This essay is not very good'])
};
model.set('errors', errors);
}
},
toggleSelectValue: function() {
if(this.get('currentModel.exampleModel.language')) {
this.set('currentModel.exampleModel.language', null);
}else{
this.set('currentModel.exampleModel.language', 'fr');
}
},
toggleSubmit: function() {
if(this.get('currentModel.disableSubmit')) {
this.set('currentModel.disableSubmit', false);
}else{
this.set('currentModel.disableSubmit', true);
}<|fim▁hole|> this.set('currentModel.exampleModel.isAwesome', false);
} else {
this.set('currentModel.exampleModel.isAwesome', true);
}
},
toggleRadio: function() {
if(this.get('currentModel.exampleModel.bestLanguage')) {
this.set('currentModel.exampleModel.bestLanguage', null);
}else{
this.set('currentModel.exampleModel.bestLanguage', 'js');
}
}
}
});<|fim▁end|> | },
toggleCheckbox: function() {
if(this.get('currentModel.exampleModel.isAwesome')) { |
<|file_name|>const-tuple-struct.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.<|fim▁hole|>// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
struct Bar(int, int);
static X: Bar = Bar(1, 2);
pub fn main() {
match X {
Bar(x, y) => {
assert!(x == 1);
assert!(y == 2);
}
}
}<|fim▁end|> | // |
<|file_name|>jquery.captcha.js<|end_file_name|><|fim▁begin|>/**
* @name ElkArte Forum
* @copyright ElkArte Forum contributors
* @license BSD http://opensource.org/licenses/BSD-3-Clause
*
* This software is a derived product, based on:
*
* Simple Machines Forum (SMF)
* copyright: 2011 Simple Machines (http://www.simplemachines.org)
* license: BSD, See included LICENSE.TXT for terms and conditions.
*
* @version 1.1 dev
*/
/**
* This file contains javascript associated with the captcha visual verification stuffs.
*/
<|fim▁hole|> var settings = {
// By default the letter count is five.
'letterCount' : 5,
'uniqueID' : '',
'imageURL' : '',
'useLibrary' : false,
'refreshevent': 'click',
'playevent': 'click',
'admin': false
};
$.extend(settings, options);
return this.each(function() {
$this = $(this);
if ($this.data('type') == 'sound')
{
// Maybe a voice is here to spread light?
$this.on(settings.playevent, function(e) {
e.preventDefault();
// Don't follow the link if the popup worked, which it would have done!
popupFailed = reqWin(settings.imageURL + ";sound", 400, 300);
if (!popupFailed)
{
if (is_ie && e.cancelBubble)
e.cancelBubble = true;
else if (e.stopPropagation)
{
e.stopPropagation();
e.preventDefault();
}
}
return popupFailed;
});
}
else
{
$this.on(settings.refreshevent, function(e) {
e.preventDefault();
var uniqueID = settings.uniqueID ? '_' + settings.uniqueID : '',
new_url = '',
i = 0;
// The Admin area is a bit different unfortunately
if (settings.admin)
{
settings.imageURL = $('#verification_image' + uniqueID).attr('src').replace(/.$/, '') + $this.val();
new_url = String(settings.imageURL);
}
else
{
// Make sure we are using a new rand code.
new_url = String(settings.imageURL);
new_url = new_url.substr(0, new_url.indexOf("rand=") + 5);
// Quick and dirty way of converting decimal to hex
var hexstr = "0123456789abcdef";
for (i = 0; i < 32; i++)
new_url = new_url + hexstr.substr(Math.floor(Math.random() * 16), 1);
}
if (settings.useLibrary)
{
$('#verification_image' + uniqueID).attr('src', new_url);
}
else if (document.getElementById("verification_image" + uniqueID))
{
for (i = 1; i <= settings.letterCount; i++)
if (document.getElementById("verification_image" + uniqueID + "_" + i))
document.getElementById("verification_image" + uniqueID + "_" + i).src = new_url + ";letter=" + i;
}
});
}
});
};
})( jQuery );<|fim▁end|> | (function($) {
$.fn.Elk_Captcha = function(options) { |
<|file_name|>tendermint.rs<|end_file_name|><|fim▁begin|>// Copyright 2015-2017 Parity Technologies (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
//! Tendermint params deserialization.
use uint::Uint;
use hash::Address;
use super::ValidatorSet;
/// Tendermint params deserialization.
#[derive(Debug, PartialEq, Deserialize)]
pub struct TendermintParams {
/// Gas limit divisor.
#[serde(rename="gasLimitBoundDivisor")]
pub gas_limit_bound_divisor: Uint,
/// Valid validators.
pub validators: ValidatorSet,
/// Propose step timeout in milliseconds.
#[serde(rename="timeoutPropose")]
pub timeout_propose: Option<Uint>,
/// Prevote step timeout in milliseconds.
#[serde(rename="timeoutPrevote")]
pub timeout_prevote: Option<Uint>,
/// Precommit step timeout in milliseconds.
#[serde(rename="timeoutPrecommit")]
pub timeout_precommit: Option<Uint>,
/// Commit step timeout in milliseconds.
#[serde(rename="timeoutCommit")]
pub timeout_commit: Option<Uint>,
/// Block reward.
#[serde(rename="blockReward")]
pub block_reward: Option<Uint>,
/// Address of the registrar contract.
pub registrar: Option<Address>,
}
/// Tendermint engine deserialization.
#[derive(Debug, PartialEq, Deserialize)]
pub struct Tendermint {
/// Ethash params.<|fim▁hole|>mod tests {
use serde_json;
use uint::Uint;
use util::U256;
use hash::Address;
use util::hash::H160;
use spec::tendermint::Tendermint;
use spec::validator_set::ValidatorSet;
#[test]
fn tendermint_deserialization() {
let s = r#"{
"params": {
"gasLimitBoundDivisor": "0x0400",
"validators": {
"list": ["0xc6d9d2cd449a754c494264e1809c50e34d64562b"]
},
"blockReward": "0x50"
}
}"#;
let deserialized: Tendermint = serde_json::from_str(s).unwrap();
assert_eq!(deserialized.params.gas_limit_bound_divisor, Uint(U256::from(0x0400)));
let vs = ValidatorSet::List(vec![Address(H160::from("0xc6d9d2cd449a754c494264e1809c50e34d64562b"))]);
assert_eq!(deserialized.params.validators, vs);
assert_eq!(deserialized.params.block_reward, Some(Uint(U256::from(0x50))));
}
}<|fim▁end|> | pub params: TendermintParams,
}
#[cfg(test)] |
<|file_name|>product_search.delete_reference_image.js<|end_file_name|><|fim▁begin|>// Copyright 2022 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//<|fim▁hole|>// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// ** This file is automatically generated by gapic-generator-typescript. **
// ** https://github.com/googleapis/gapic-generator-typescript **
// ** All changes to this file may be overwritten. **
'use strict';
function main(name) {
// [START vision_v1p3beta1_generated_ProductSearch_DeleteReferenceImage_async]
/**
* TODO(developer): Uncomment these variables before running the sample.
*/
/**
* Required. The resource name of the reference image to delete.
* Format is:
* `projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID/referenceImages/IMAGE_ID`
*/
// const name = 'abc123'
// Imports the Vision library
const {ProductSearchClient} = require('@google-cloud/vision').v1p3beta1;
// Instantiates a client
const visionClient = new ProductSearchClient();
async function callDeleteReferenceImage() {
// Construct request
const request = {
name,
};
// Run request
const response = await visionClient.deleteReferenceImage(request);
console.log(response);
}
callDeleteReferenceImage();
// [END vision_v1p3beta1_generated_ProductSearch_DeleteReferenceImage_async]
}
process.on('unhandledRejection', err => {
console.error(err.message);
process.exitCode = 1;
});
main(...process.argv.slice(2));<|fim▁end|> | // https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, |
<|file_name|>Mask.java<|end_file_name|><|fim▁begin|><|fim▁hole|> *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
**************************************************************************/
package com.emitrom.touch4j.client.ui;
import com.emitrom.touch4j.client.core.Component;
import com.emitrom.touch4j.client.core.config.Attribute;
import com.emitrom.touch4j.client.core.config.Event;
import com.emitrom.touch4j.client.core.config.XType;
import com.emitrom.touch4j.client.core.handlers.CallbackRegistration;
import com.emitrom.touch4j.client.core.handlers.mask.MaskTapHandler;
import com.google.gwt.core.client.JavaScriptObject;
/**
* A simple class used to mask any Container. This should rarely be used
* directly, instead look at the Container.mask configuration.
*
* @see <a href=http://docs.sencha.com/touch/2-0/#!/api/Ext.Mask>Ext.Mask</a>
*/
public class Mask extends Component {
@Override
protected native void init()/*-{
var c = new $wnd.Ext.Mask();
[email protected]::configPrototype = c.initialConfig;
}-*/;
@Override
public String getXType() {
return XType.MASK.getValue();
}
@Override
protected native JavaScriptObject create(JavaScriptObject config) /*-{
return new $wnd.Ext.Mask(config);
}-*/;
public Mask() {
}
protected Mask(JavaScriptObject jso) {
super(jso);
}
/**
* True to make this mask transparent.
*
* Defaults to: false
*
* @param value
*/
public void setTransparent(String value) {
setAttribute(Attribute.TRANSPARENT.getValue(), value, true);
}
/**
* A tap event fired when a user taps on this mask
*
* @param handler
*/
public CallbackRegistration addTapHandler(MaskTapHandler handler) {
return this.addWidgetListener(Event.TAP.getValue(), handler.getJsoPeer());
}
}<|fim▁end|> | /**************************************************************************
* Mask.java is part of Touch4j 4.0. Copyright 2012 Emitrom LLC |
<|file_name|>MinMaxProfileRanking.java<|end_file_name|><|fim▁begin|><|fim▁hole|>
import com.google.common.collect.MinMaxPriorityQueue;
import uk.ac.ebi.atlas.commons.streams.ObjectInputStream;
import uk.ac.ebi.atlas.model.GeneProfilesList;
import uk.ac.ebi.atlas.model.Profile;
import java.util.Comparator;
import java.util.function.Supplier;
public class MinMaxProfileRanking<T extends Profile, L extends GeneProfilesList<T>> implements SelectProfiles<T, L> {
private final Comparator<T> comparator;
private final Supplier<L> newList;
public MinMaxProfileRanking(Comparator<T> comparator, Supplier<L> newList) {
this.comparator = comparator;
this.newList = newList;
}
@Override
public L select(ObjectInputStream<T> profiles, int maxSize) {
MinMaxPriorityQueue<T> rankingQueue =
maxSize > 0 ?
MinMaxPriorityQueue.orderedBy(comparator).maximumSize(maxSize).create() :
MinMaxPriorityQueue.orderedBy(comparator).create();
int count = 0;
for (T profile : new IterableObjectInputStream<>(profiles)) {
rankingQueue.add(profile);
count++;
}
L list = newList.get();
T profile;
while ((profile = rankingQueue.poll()) != null) {
list.add(profile);
}
list.setTotalResultCount(count);
return list;
}
}<|fim▁end|> | package uk.ac.ebi.atlas.profiles; |
<|file_name|>controller.d.ts<|end_file_name|><|fim▁begin|>/// <reference types="node" />
import * as cp from 'child_process';
import * as data from './data';
import { TSEventEmitter } from './events';
import * as GameJolt from './gamejolt';
export declare function getExecutable(): string;
export declare type Events = {
'fatal': (err: Error) => void;
'err': (err: Error) => void;
'close': () => void;
'gameLaunchBegin': (dir: string, ...args: string[]) => void;
'gameLaunchFinished': () => void;
'gameLaunchFailed': (reason: string) => void;
'gameCrashed': (reason: string) => void;
'gameClosed': () => void;<|fim▁hole|> 'gameKilled': () => void;
'gameRelaunchBegin': (dir: string, ...args: string[]) => void;
'gameRelaunchFailed': (reason: string) => void;
'noUpdateAvailable': () => void;
'updateAvailable': (metadata: data.UpdateMetadata) => void;
'updateBegin': (dir: string, metadata: data.UpdateMetadata) => void;
'updateFinished': () => void;
'updateReady': () => void;
'updateApply': (...args: string[]) => void;
'updateFailed': (reason: string) => void;
'paused': (queue: boolean) => void;
'resumed': (unqueue: boolean) => void;
'canceled': () => void;
'openRequested': () => void;
'uninstallBegin': (dir: string) => void;
'uninstallFailed': (reason: string) => void;
'uninstallFinished': () => void;
'rollbackBegin': (dir: string) => void;
'rollbackFailed': (reason: string) => void;
'rollbackFinished': () => void;
'patcherState': (state: data.PatcherState) => void;
'progress': (progress: data.MsgProgress) => void;
};
export declare type Options = {
process?: cp.ChildProcess | number;
keepConnected?: boolean;
sequentialMessageId?: boolean;
};
export declare type LaunchOptions = cp.SpawnOptions & {
keepConnected?: boolean;
};
export declare class Controller extends TSEventEmitter<Events> {
readonly port: number;
private process;
private reconnector;
private connectionLock;
private conn;
private _nextMessageId;
private sequentialMessageId;
private sendQueue;
private sentMessage;
private consumingQueue;
private expectingQueuePauseIds;
private expectingQueueResumeIds;
private expectingQueuePause;
private expectingQueueResume;
constructor(port: number, options?: Options);
private nextMessageId;
private newJsonStream;
static ensureMigrationFile(localPackage: GameJolt.IGamePackage): Promise<void>;
static launchNew(args: string[], options?: LaunchOptions): Promise<Controller>;
readonly connected: boolean;
connect(): Promise<void>;
disconnect(): Promise<void>;
dispose(): Promise<void>;
private consumeSendQueue;
private send;
private sendControl;
sendKillGame(timeout?: number): Promise<data.MsgResultResponse>;
sendPause(options?: {
queue?: boolean;
timeout?: number;
}): Promise<data.MsgResultResponse>;
sendResume(options?: {
queue?: boolean;
authToken?: string;
extraMetadata?: string;
timeout?: number;
}): Promise<data.MsgResultResponse>;
sendCancel(timeout?: number, waitOnlyForSend?: boolean): Promise<void> | Promise<data.MsgResultResponse>;
sendGetState(includePatchInfo: boolean, timeout?: number): Promise<data.MsgStateResponse>;
sendCheckForUpdates(gameUID: string, platformURL: string, authToken?: string, metadata?: string, timeout?: number): Promise<data.MsgResultResponse>;
sendUpdateAvailable(updateMetadata: data.UpdateMetadata, timeout?: number): Promise<unknown>;
sendUpdateBegin(timeout?: number): Promise<data.MsgResultResponse>;
sendUpdateApply(env: Object, args: string[], timeout?: number): Promise<data.MsgResultResponse>;
kill(): Promise<void>;
}<|fim▁end|> | |
<|file_name|>AddressApi_test.go<|end_file_name|><|fim▁begin|>package cloudstack
import (
"fmt"
"net/http"
"net/http/httptest"
"net/url"
"testing"
)
func TestAssociateIpAddress(t *testing.T) {
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
responses := map[string]string{
"associateIpAddress": `
{
"associateipaddressresponse": {
"id": "2c7fb564-38e0-44db-821f-6486a544e423",
"jobid": "7bc6769c-bbeb-4de2-ace2-e57f702af44a"
}
}
`,
"queryAsyncJobResult": `<|fim▁hole|> {
"queryasyncjobresultresponse": {
"accountid": "6c9ce4be-ed0b-477b-aa76-f0cb1b7a200b",
"cmd": "org.apache.cloudstack.api.command.user.address.AssociateIPAddrCmd",
"created": "2014-11-23T15:42:26+0900",
"jobid": "7bc6769c-bbeb-4de2-ace2-e57f702af44a",
"jobprocstatus": 0,
"jobresult": {
"ipaddress": {
"account": "account1",
"allocated": "2014-11-23T15:42:26+0900",
"associatednetworkid": "7eef65ed-b952-46c5-bea4-25d106c37a3b",
"associatednetworkname": "network1",
"domain": "domain1",
"domainid": "9d9dddc3-ce38-494b-973e-b80519d76b22",
"forvirtualnetwork": true,
"id": "2c7fb564-38e0-44db-821f-6486a544e423",
"ipaddress": "1.1.1.1",
"isportable": false,
"issourcenat": false,
"isstaticnat": false,
"issystem": false,
"networkid": "79132c74-fe77-4bd5-9915-ce7c577fb95f",
"physicalnetworkid": "4a00ce42-6a30-4494-afdd-3531d883237b",
"state": "Allocating",
"tags": [],
"zoneid": "a117e75f-d02e-4074-806d-889c61261394",
"zonename": "tesla"
}
},
"jobresultcode": 0,
"jobresulttype": "object",
"jobstatus": 1,
"userid": "c8be3f37-1175-475b-8f67-08bb33d6f6ea"
}
}
`,
}
fmt.Fprintln(w, responses[r.FormValue("command")])
}))
defer server.Close()
endpoint, _ := url.Parse(server.URL)
client, _ := NewClient(endpoint, "APIKEY", "SECRETKEY", "", "")
p := NewAssociateIpAddressParameter()
p.ZoneId.Set("a117e75f-d02e-4074-806d-889c61261394")
ip, err := client.AssociateIpAddress(p)
if err != nil {
t.Errorf(err.Error())
}
if ip.IpAddress.String() != "1.1.1.1" {
t.Errorf("ipaddress: actual %s, expected 1.1.1.1", ip.IpAddress.String())
}
if ip.client.EndPoint != endpoint {
t.Errorf("endpoint: actual %v, expected %v", ip.client.EndPoint, endpoint)
}
}<|fim▁end|> | |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>"""
YANK, a package for alchemical free energy calculations using OpenMM
"""
__author__ = "John D. Chodera"
__license__ = "GPL"
__maintainer__ = "John D. Chodera"
__email__ = "[email protected]"<|fim▁hole|>from yank import Yank
import analysis
__all__ = ['Yank', 'analysis']<|fim▁end|> | __version__ = "1.0beta"
|
<|file_name|>server_internals_test.go<|end_file_name|><|fim▁begin|>package raft
import (
"bytes"
"encoding/gob"
"fmt"
"testing"
"time"
)
func TestFollowerAllegiance(t *testing.T) {
// a follower with allegiance to leader=2
s := Server{
id: 1,
term: 5,
state: &protectedString{value: follower},
leader: 2,
log: newRaftLog(&bytes.Buffer{}, noop),
}
// receives an appendEntries from a future term and different leader
_, stepDown := s.handleAppendEntries(appendEntries{
Term: 6,
LeaderID: 3,
})
<|fim▁hole|> t.Errorf("wasn't told to step down (i.e. abandon leader)")
}
if s.term != 6 {
t.Errorf("no term change")
}
}
func TestStrongLeader(t *testing.T) {
// a leader in term=2
s := Server{
id: 1,
term: 2,
state: &protectedString{value: leader},
leader: 1,
log: newRaftLog(&bytes.Buffer{}, noop),
}
// receives a requestVote from someone also in term=2
resp, stepDown := s.handleRequestVote(requestVote{
Term: 2,
CandidateID: 3,
LastLogIndex: 0,
LastLogTerm: 0,
})
// and should retain his leadership
if resp.VoteGranted {
t.Errorf("shouldn't have granted vote")
}
if stepDown {
t.Errorf("shouldn't have stepped down")
}
}
func TestLimitedClientPatience(t *testing.T) {
// a client issues a command
// it's written to a leader log
// but the leader is deposed before he can replicate it
// the new leader truncates the command away
// the client should not be stuck forever
}
func TestLenientCommit(t *testing.T) {
// a log that's fully committed
log := &raftLog{
entries: []logEntry{
logEntry{Index: 1, Term: 1},
logEntry{Index: 2, Term: 1},
logEntry{Index: 3, Term: 2},
logEntry{Index: 4, Term: 2},
logEntry{Index: 5, Term: 2},
},
commitPos: 4,
}
// belongs to a follower
s := Server{
id: 100,
term: 2,
leader: 101,
log: log,
state: &protectedString{value: follower},
}
// an appendEntries comes with correct PrevLogIndex but older CommitIndex
resp, stepDown := s.handleAppendEntries(appendEntries{
Term: 2,
LeaderID: 101,
PrevLogIndex: 5,
PrevLogTerm: 2,
CommitIndex: 4, // i.e. commitPos=3
})
// this should not fail
if !resp.Success {
t.Errorf("failed (%s)", resp.reason)
}
if stepDown {
t.Errorf("shouldn't step down")
}
}
func TestConfigurationReceipt(t *testing.T) {
// a follower
s := Server{
id: 2,
term: 1,
leader: 1,
log: &raftLog{
entries: []logEntry{logEntry{Index: 1, Term: 1}},
commitPos: 0,
},
state: &protectedString{value: follower},
config: newConfiguration(peerMap{}),
}
// receives a configuration change
pm := makePeerMap(
serializablePeer{1, "foo"},
serializablePeer{2, "bar"},
serializablePeer{3, "baz"},
)
configurationBuf := &bytes.Buffer{}
gob.Register(&serializablePeer{})
if err := gob.NewEncoder(configurationBuf).Encode(pm); err != nil {
t.Fatal(err)
}
// via an appendEntries
aer, _ := s.handleAppendEntries(appendEntries{
Term: 1,
LeaderID: 1,
PrevLogIndex: 1,
PrevLogTerm: 1,
Entries: []logEntry{
logEntry{
Index: 2,
Term: 1,
Command: configurationBuf.Bytes(),
isConfiguration: true,
},
},
CommitIndex: 1,
})
// it should succeed
if !aer.Success {
t.Fatalf("appendEntriesResponse: no success: %s", aer.reason)
}
// and the follower's configuration should be immediately updated
if expected, got := 3, s.config.allPeers().count(); expected != got {
t.Fatalf("follower peer count: expected %d, got %d", expected, got)
}
peer, ok := s.config.get(3)
if !ok {
t.Fatal("follower didn't get peer 3")
}
if peer.id() != 3 {
t.Fatal("follower got bad peer 3")
}
}
func TestNonLeaderExpulsion(t *testing.T) {
// a follower
s := Server{
id: 2,
term: 1,
leader: 1,
log: &raftLog{
store: &bytes.Buffer{},
entries: []logEntry{logEntry{Index: 1, Term: 1}},
commitPos: 0,
},
state: &protectedString{value: follower},
config: newConfiguration(peerMap{}),
quit: make(chan chan struct{}),
}
// receives a configuration change that doesn't include itself
pm := makePeerMap(
serializablePeer{1, "foo"},
serializablePeer{3, "baz"},
serializablePeer{5, "bat"},
)
configurationBuf := &bytes.Buffer{}
gob.Register(&serializablePeer{})
if err := gob.NewEncoder(configurationBuf).Encode(pm); err != nil {
t.Fatal(err)
}
// via an appendEntries
s.handleAppendEntries(appendEntries{
Term: 1,
LeaderID: 1,
PrevLogIndex: 1,
PrevLogTerm: 1,
Entries: []logEntry{
logEntry{
Index: 2,
Term: 1,
Command: configurationBuf.Bytes(),
isConfiguration: true,
},
},
CommitIndex: 1,
})
// and once committed
s.handleAppendEntries(appendEntries{
Term: 1,
LeaderID: 1,
PrevLogIndex: 2,
PrevLogTerm: 1,
CommitIndex: 2,
})
// the follower should shut down
select {
case q := <-s.quit:
q <- struct{}{}
case <-time.After(maximumElectionTimeout()):
t.Fatal("didn't shut down")
}
}
type serializablePeer struct {
MyID uint64
Err string
}
func (p serializablePeer) id() uint64 { return p.MyID }
func (p serializablePeer) callAppendEntries(appendEntries) appendEntriesResponse {
return appendEntriesResponse{}
}
func (p serializablePeer) callRequestVote(requestVote) requestVoteResponse {
return requestVoteResponse{}
}
func (p serializablePeer) callCommand([]byte, chan<- []byte) error {
return fmt.Errorf("%s", p.Err)
}
func (p serializablePeer) callSetConfiguration(...Peer) error {
return fmt.Errorf("%s", p.Err)
}<|fim▁end|> | // should now step down and have a new term
if !stepDown { |
<|file_name|>protein.cpp<|end_file_name|><|fim▁begin|>/**********************************************************************
Protein - Protein class
Copyright (C) 2009 Tim Vandermeersch
This file is part of the Avogadro molecular editor project.
For more information, see <http://avogadro.cc/>
Avogadro is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
Avogadro is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
02110-1301, USA.
**********************************************************************/
#include "protein.h"
#include <avogadro/molecule.h>
#include <avogadro/residue.h>
#include <avogadro/atom.h>
#include <avogadro/neighborlist.h>
#include <QVector>
#include <QVariant>
#include <QStringList>
#include <QDebug>
namespace Avogadro {
class ProteinPrivate
{
public:
Molecule *molecule;
QVector<QVector<Residue*> > chains;
QVector<QVector<Residue*> > hbondPairs;
QByteArray structure;
mutable int num3turnHelixes;
mutable int num4turnHelixes;
mutable int num5turnHelixes;
};
Protein::Protein(Molecule *molecule) : d(new ProteinPrivate)
{
d->molecule = molecule;
sortResiduesByChain();
if (!extractFromPDB()) {
detectHBonds();
detectStructure();
}
/*
foreach (const QVector<Residue*> &residues, d->chains) { // for each chain
qDebug() << "chain: " << d->chains.indexOf(residues);
QByteArray chain;
foreach (Residue *residue, residues) { // for each residue in the chain
chain.append( d->structure.at(residue->index()) );
}
qDebug() << chain;
}
*/
}
Protein::~Protein()
{
delete d;
}
QByteArray Protein::secondaryStructure() const
{
return d->structure;
}
const QVector<QVector<Residue*> >& Protein::chains() const
{
return d->chains;
}
bool Protein::isHelix(Residue *residue) const
{
char key = d->structure.at(residue->index());
switch(key) {
case 'G':
case 'H':
case 'I':
return true;
default:
return false;
}
}
bool Protein::isSheet(Residue *residue) const
{
char key = d->structure.at(residue->index());
switch(key) {
case 'E':
case 'B':
return true;
default:
return false;
}
}
bool Protein::extractFromPDB()
{
bool found = false;
/*
COLUMNS DATA TYPE FIELD DEFINITION
-----------------------------------------------------------------------------------
1 - 6 Record name "HELIX "
8 - 10 Integer serNum Serial number of the helix. This starts
at 1 and increases incrementally.
12 - 14 LString(3) helixID Helix identifier. In addition to a serial
number, each helix is given an
alphanumeric character helix identifier.
16 - 18 Residue name initResName Name of the initial residue.
20 Character initChainID Chain identifier for the chain containing
this helix.
22 - 25 Integer initSeqNum Sequence number of the initial residue.
26 AChar initICode Insertion code of the initial residue.
28 - 30 Residue name endResName Name of the terminal residue of the helix.
32 Character endChainID Chain identifier for the chain containing
this helix.
34 - 37 Integer endSeqNum Sequence number of the terminal residue.
38 AChar endICode Insertion code of the terminal residue.
39 - 40 Integer helixClass Helix class (see below).
41 - 70 String comment Comment about this helix.
72 - 76 Integer length Length of this helix.
*/
QVariant helix = d->molecule->property("HELIX");
if (helix.isValid()) {
found = true;
QStringList lines = helix.toString().split('\n');
foreach (const QString &line, lines) {
//qDebug() << "line:" << line;
bool ok;
QString helixID = line.mid(5, 3);
//
// initial residue
//
QString initResName = line.mid(9, 3);
QString initChainID = line.mid(13, 1);
int initChainNum = -1;
foreach (Residue *residue, d->molecule->residues()) {
if (QString(residue->chainID()) != initChainID)
continue;
initChainNum = residue->chainNumber();
break;
}
if (initChainNum < 0) {
qDebug() << "Protein: Error, invalid initChainID for helix" << helixID;
return false;
}
int initSeqNum = line.mid(15, 4).toInt(&ok);
if (!ok) {
qDebug() << "Protein: Error, can't read interger from initSeqNum for helix" << helixID;
return false;
}
Residue *initResidue = 0;
foreach (Residue *residue, d->chains.at(initChainNum)) {
if (residue->number().toInt() == initSeqNum) {
initResidue = residue;
break;
}
}
if (!initResidue) {
qDebug() << "Protein: Error, could not find initResidue in the chain for helix" << helixID;
return false;
}
if (initResidue->name() != initResName) {
qDebug() << "Protein: Error, initResName does not match the residue "
"at the specified position for helix" << helixID;
qDebug() << initResName << "!=" << initResidue->name();
return false;
}
//
// end residue
//
QString endResName = line.mid(21, 3);
QString endChainID = line.mid(25, 1);
int endChainNum = -1;
foreach (Residue *residue, d->molecule->residues()) {
if (QString(residue->chainID()) != endChainID)
continue;
endChainNum = residue->chainNumber();
}
if (endChainNum < 0) {
qDebug() << "Protein: Error, invalid endChainID for helix" << helixID;
return false;
}
int endSeqNum = line.mid(27, 4).toInt(&ok);
if (!ok) {
qDebug() << "Protein: Error, can't read interger from endSeqNum for helix" << helixID;
return false;
}
Residue *endResidue = 0;
foreach (Residue *residue, d->chains.at(endChainNum)) {
if (residue->number().toInt() == endSeqNum) {
endResidue = residue;
break;
}
}
if (!endResidue) {
qDebug() << "Protein: Error, could not find endResidue in the chain for helix" << helixID;
return false;
}
if (endResidue->name() != endResName) {
qDebug() << "Protein: Error, endResName does not match the residue "
"at the specified position for helix" << helixID;
qDebug() << endResName << "!=" << endResidue->name();
return false;
}
int helixClass = line.mid(32, 2).toInt(&ok);
if (!ok) {
qDebug() << "Protein: Error, can't read helix class for helix" << helixID;
return false;
}
/* CLASS NUMBER
TYPE OF HELIX (COLUMNS 39 - 40)
--------------------------------------------------------------
Right-handed alpha (default) 1
Right-handed omega 2
Right-handed pi 3
Right-handed gamma 4
Right-handed 3 - 10 5
Left-handed alpha 6
Left-handed omega 7
Left-handed gamma 8
2 - 7 ribbon/helix 9
Polyproline 10
*/
char key;
switch (helixClass) {
case 1:
key = 'H';
break;
case 3:
key = 'I';
break;
case 5:
key = 'G';
break;
default:
key = '-';
break;
}
int initIndex = d->chains.at(initChainNum).indexOf(initResidue);
int endIndex = d->chains.at(initChainNum).indexOf(endResidue);
for (int i = initIndex; i < endIndex; ++i) {
Residue *residue = d->chains.at(initChainNum).at(i);
d->structure.data()[residue->index()] = key;
}
}
}
/*
COLUMNS DATA TYPE FIELD DEFINITION
-------------------------------------------------------------------------------------
1 - 6 Record name "SHEET "
8 - 10 Integer strand Strand number which starts at 1 for each
strand within a sheet and increases by one.
12 - 14 LString(3) sheetID Sheet identifier.
15 - 16 Integer numStrands Number of strands in sheet.
18 - 20 Residue name initResName Residue name of initial residue.
22 Character initChainID Chain identifier of initial residue
in strand.
23 - 26 Integer initSeqNum Sequence number of initial residue
in strand.
27 AChar initICode Insertion code of initial residue
in strand.
29 - 31 Residue name endResName Residue name of terminal residue.
33 Character endChainID Chain identifier of terminal residue.
34 - 37 Integer endSeqNum Sequence number of terminal residue.
38 AChar endICode Insertion code of terminal residue.
39 - 40 Integer sense Sense of strand with respect to previous
strand in the sheet. 0 if first strand,
1 if parallel,and -1 if anti-parallel.
42 - 45 Atom curAtom Registration. Atom name in current strand.
46 - 48 Residue name curResName Registration. Residue name in current strand
50 Character curChainId Registration. Chain identifier in
current strand.
51 - 54 Integer curResSeq Registration. Residue sequence number
in current strand.
55 AChar curICode Registration. Insertion code in
current strand.
57 - 60 Atom prevAtom Registration. Atom name in previous strand.
61 - 63 Residue name prevResName Registration. Residue name in
previous strand.
65 Character prevChainId Registration. Chain identifier in
previous strand.
66 - 69 Integer prevResSeq Registration. Residue sequence number
in previous strand.
70 AChar prevICode Registration. Insertion code in
previous strand.
*/
QVariant sheet = d->molecule->property("SHEET");
if (sheet.isValid()) {
found = true;
QStringList lines = sheet.toString().split('\n');
foreach (const QString &line, lines) {
//qDebug() << "line:" << line;
bool ok;
QString sheetID = line.mid(5, 3);
/*
int numStrands = line.mid(8, 2).toInt(&ok);
if (!ok) {
qDebug() << "Protein: Error, can't read interger from numStrands for sheet" << sheetID;
return false;
}
*/
//
// initial residue
//
QString initResName = line.mid(11, 3);
QString initChainID = line.mid(15, 1);
int initChainNum = -1;
foreach (Residue *residue, d->molecule->residues()) {
if (QString(residue->chainID()) != initChainID)
continue;
initChainNum = residue->chainNumber();
}
if (initChainNum < 0) {
qDebug() << "Protein: Error, invalid initChainID for sheet" << sheetID;
return false;
}
int initSeqNum = line.mid(16, 4).toInt(&ok);
if (!ok || !initSeqNum) {
qDebug() << "Protein: Error, can't read interger from initSeqNum for sheet" << sheetID;
return false;
}
Residue *initResidue = 0;
foreach (Residue *residue, d->chains.at(initChainNum)) {
if (residue->number().toInt() == initSeqNum) {
initResidue = residue;
break;
}
}
if (!initResidue) {
qDebug() << "Protein: Error, could not find initResidue in the chain for sheet" << sheetID;
return false;
}
if (initResidue->name() != initResName) {
qDebug() << "Protein: Error, initResName does not match the residue "
"at the specified position for sheet" << sheetID;
qDebug() << initResName << "!=" << initResidue->name();
return false;
}
//
// end residue
//
QString endResName = line.mid(22, 3);
QString endChainID = line.mid(26, 1);
int endChainNum = -1;
foreach (Residue *residue, d->molecule->residues()) {
if (QString(residue->chainID()) != endChainID)
continue;
endChainNum = residue->chainNumber();
}
if (endChainNum < 0) {
qDebug() << "Protein: Error, invalid endChainID for sheet" << sheetID;
return false;
}
int endSeqNum = line.mid(27, 4).toInt(&ok);
if (!ok || !endSeqNum) {
qDebug() << "Protein: Error, can't read interger from endSeqNum for sheet" << sheetID;
return false;
}
Residue *endResidue = 0;
foreach (Residue *residue, d->chains.at(endChainNum)) {
if (residue->number().toInt() == endSeqNum) {
endResidue = residue;
break;
}
}
if (!endResidue) {
qDebug() << "Protein: Error, could not find endResidue in the chain for sheet" << sheetID;
return false;
}
if (endResidue->name() != endResName) {
qDebug() << "Protein: Error, endResName does not match the residue "
"at the specified position for sheet" << sheetID;
qDebug() << endResName << "!=" << endResidue->name();
return false;
}
char key = '-';
int length = endSeqNum - initSeqNum;
if (length == 1)
key = 'B';
if (length > 1)
key = 'E';
int initIndex = d->chains.at(initChainNum).indexOf(initResidue);
int endIndex = d->chains.at(initChainNum).indexOf(endResidue);
for (int i = initIndex; i < endIndex; ++i) {
Residue *residue = d->chains.at(initChainNum).at(i);
d->structure.data()[residue->index()] = key;
}
}
}
d->num3turnHelixes = -1;
d->num4turnHelixes = -1;
d->num5turnHelixes = -1;
return found;
}
int Protein::numChains() const
{
return d->chains.size();
}
QList<unsigned long> Protein::chainAtoms(int index) const
{
QList<unsigned long> ids;
if (index >= d->chains.size())
return ids;
foreach (Residue *res, d->chains.at(index))
foreach (unsigned long id, res->atoms())
ids.append(id);
return ids;
}
QList<unsigned long> Protein::chainResidues(int index) const
{
QList<unsigned long> ids;
if (index >= d->chains.size())
return ids;
foreach (Residue *res, d->chains.at(index))
ids.append(res->id());
return ids;
}
/*
int Protein::numHelixes(char c) const
{
int count = 0;
foreach (const QVector<Residue*> &residues, d->chains) { // for each chain
for (int i = 0 ; i < residues.size(); ++i) {
if (d->structure.at(residues.at(i)->index()) == c) {
count++;
while (d->structure.at(residues.at(i)->index()) == c)
++i;
}
}
}
return count;
}
int Protein::num3turnHelixes() const
{
if (d->num3turnHelixes >= 0)
return d->num3turnHelixes;
d->num3turnHelixes = numHelixes('G');
return d->num3turnHelixes;
}
int Protein::num4turnHelixes() const
{
if (d->num4turnHelixes >= 0)
return d->num4turnHelixes;
d->num4turnHelixes = numHelixes('H');
return d->num4turnHelixes;
}
int Protein::num5turnHelixes() const
{
if (d->num5turnHelixes >= 0)
return d->num5turnHelixes;
d->num5turnHelixes = numHelixes('I');
return d->num5turnHelixes;
}
*/
QList<unsigned long> Protein::helixBackboneAtoms(char c, int index)
{
QList <unsigned long> ids;
int count = 0;
for (int i = 0 ; i < d->structure.size(); ++i) {
if (d->structure.at(i) == c) {
if (count == index) {
while (d->structure.at(i) == c) {
Residue *residue = d->molecule->residue(i);
unsigned long O, N, C, CA;
foreach (unsigned long id, residue->atoms()) {
QString atomId = residue->atomId(id).trimmed();
if (atomId == "N" ) N = id;
if (atomId == "CA") CA = id;
if (atomId == "C" ) C = id;
if (atomId == "O" ) O = id;
}
ids.append(N);
ids.append(CA);
ids.append(C);
ids.append(O);
++i;
}
return ids;
}
count++;
// skip to next non 'H' char
while (d->structure.at(i) == c)
++i;
}
}
return ids;
}
/*
QList<unsigned long> Protein::helix3BackboneAtoms(int index)
{
return helixBackboneAtoms('G', index);
}
QList<unsigned long> Protein::helix4BackboneAtoms(int index)
{
return helixBackboneAtoms('H', index);
}
QList<unsigned long> Protein::helix5BackboneAtoms(int index)
{
return helixBackboneAtoms('I', index);
}
*/
int Protein::residueIndex(Residue *residue) const
{
return d->chains.at(residue->chainNumber()).indexOf(residue);
}
bool isAminoAcid(Residue *residue)
{
QString resname = residue->name();
if (resname == "ALA")
return true;
if (resname == "ARG")
return true;
if (resname == "ASN")
return true;
if (resname == "ASP")
return true;
if (resname == "CYS")
return true;
if (resname == "GLU")
return true;
if (resname == "GLN")
return true;
if (resname == "GLY")
return true;
if (resname == "HIS")
return true;
if (resname == "ILE")
return true;
if (resname == "LEU")
return true;
if (resname == "LYS")
return true;
if (resname == "MET")
return true;
if (resname == "PHE")
return true;
if (resname == "PRO")
return true;
if (resname == "SER")
return true;
if (resname == "THR")
return true;
if (resname == "TRP")
return true;
if (resname == "TYR")
return true;
if (resname == "VAL")
return true;
return false;
}
void Protein::iterateBackward(Atom *prevN, Atom *currC, QVector<bool> &visited)
{
Residue *residue = currC->residue();
visited[residue->index()] = true;
if (!isAminoAcid(residue))
return;
d->chains[residue->chainNumber()].prepend(residue);
foreach (unsigned long id1, currC->neighbors()) {
Atom *nbr1 = d->molecule->atomById(id1);
if (nbr1 == prevN)
continue;
QString nbr1Id = nbr1->residue()->atomId(nbr1->id()).trimmed();
if (nbr1Id == "CA") {
foreach (unsigned long id2, nbr1->neighbors()) {
Atom *nbr2 = d->molecule->atomById(id2);
if (nbr2 == currC)
continue;
QString nbr2Id = nbr2->residue()->atomId(nbr2->id()).trimmed();
if (nbr2Id == "N") {
foreach (unsigned long id3, nbr2->neighbors()) {
Atom *nbr3 = d->molecule->atomById(id3);
if (nbr3 == nbr1)
continue;
QString nbr3Id = nbr3->residue()->atomId(nbr3->id()).trimmed();
if (nbr3Id == "C") {
if (!visited.at(nbr3->residue()->index()))
iterateBackward(nbr2, nbr3, visited);
}
}
}
}
} else if (nbr1Id == "N") {
if (!visited.at(nbr1->residue()->index()))
iterateForward(currC, nbr1, visited);
}
}
}
void Protein::iterateForward(Atom *prevC, Atom *currN, QVector<bool> &visited)
{
Residue *residue = currN->residue();
visited[residue->index()] = true;
if (!isAminoAcid(residue))
return;
d->chains[residue->chainNumber()].append(residue);
foreach (unsigned long id1, currN->neighbors()) {
Atom *nbr1 = d->molecule->atomById(id1);
if (nbr1 == prevC)
continue;
QString nbr1Id = nbr1->residue()->atomId(nbr1->id()).trimmed();
if (nbr1Id == "CA") {
foreach (unsigned long id2, nbr1->neighbors()) {
Atom *nbr2 = d->molecule->atomById(id2);
if (nbr2 == currN)
continue;
QString nbr2Id = nbr2->residue()->atomId(nbr2->id()).trimmed();
if (nbr2Id == "C") {
foreach (unsigned long id3, nbr2->neighbors()) {
Atom *nbr3 = d->molecule->atomById(id3);
if (nbr3 == nbr1)
continue;
QString nbr3Id = nbr3->residue()->atomId(nbr3->id()).trimmed();
if (nbr3Id == "N") {
if (!visited.at(nbr3->residue()->index()))
iterateForward(nbr2, nbr3, visited);
}
}
}
}
} else if (nbr1Id == "C") {
if (!visited.at(nbr1->residue()->index()))
iterateBackward(currN, nbr1, visited);
}
}
}
void Protein::sortResiduesByChain()
{
d->structure.resize(d->molecule->numResidues());
for (int i = 0 ; i < d->structure.size(); ++i)
d->structure[i] = '-';
// determine the number of chains
unsigned int numChains = 0;
foreach (Residue *residue, d->molecule->residues()) {
if (!isAminoAcid(residue))
continue;
if (residue->chainNumber() > numChains)
numChains = residue->chainNumber();
}
d->chains.resize(numChains+1);
QVector<bool> visited(d->molecule->numResidues());
foreach (Residue *residue, d->molecule->residues()) {
if (residue->atoms().size() < 4)
continue;
foreach (unsigned long id, residue->atoms()) {
Atom *atom = d->molecule->atomById(id);
QString atomId = residue->atomId(id).trimmed();
if (visited.at(atom->residue()->index()))
continue;
if (atomId == "N")
iterateForward(0, atom, visited);
else if (atomId == "CA")
iterateBackward(0, atom, visited);
} // end atoms in residue
}
}
void Protein::detectHBonds()
{
d->hbondPairs.resize(d->molecule->numResidues());
NeighborList neighborList(d->molecule, 4.0);
for (unsigned int i = 0; i < d->molecule->numAtoms(); ++i) {
Atom *atom = d->molecule->atom(i);
QList<Atom*> nbrs = neighborList.nbrs(atom);
foreach(Atom *nbr, nbrs) {
Residue *residue1 = atom->residue();
if (!residue1)
continue;
Residue *residue2 = nbr->residue();
if (!residue2)
continue;
if (residue1 == residue2)
continue;
if (d->hbondPairs.at(residue1->index()).contains(residue2))
continue;
int res1 = residueIndex(residue1);
int res2 = residueIndex(residue2);
int delta = abs(res1 - res2);
if (delta <= 2)
continue;
// residue 1 has the N-H
// residue 2 has the C=O
if (residue1->atomId(atom->id()).trimmed() != "O") {
if (residue2->atomId(nbr->id()).trimmed() != "O")
continue;
} else {
Residue *swap = residue1;
residue1 = residue2;
residue2 = swap;
}
Eigen::Vector3d H_pos(Eigen::Vector3d::Zero());
Atom *H = 0, *N = 0, *C = 0, *O = 0;
// find N in first residue
foreach (unsigned long id, residue1->atoms()) {
if (residue1->atomId(id).trimmed() == "N")
N = d->molecule->atomById(id);
}
if (!N)
continue;
// find neighboring H, or compute it's position if there are no hydrogens
foreach (unsigned long nbrId, N->neighbors()) {
Atom *neighbor = d->molecule->atomById(nbrId);
if (neighbor->isHydrogen()) {
H = d->molecule->atomById(nbrId);
H_pos = *H->pos();
break;
} else {
H_pos += *N->pos() - *neighbor->pos();
}
}
if (!H) {
H_pos = *N->pos() + 1.1 * H_pos.normalized();
}
// find C & O in residue 2
foreach (unsigned long id, residue2->atoms()) {
if (residue2->atomId(id).trimmed() == "C") C = d->molecule->atomById(id);
if (residue2->atomId(id).trimmed() == "O") O = d->molecule->atomById(id);
}
if (!C || !O)
continue;
// C=O ~ H-N
//
// C +0.42e O -0.42e
// H +0.20e N -0.20e
double rON = (*O->pos() - *N->pos()).norm();
double rCH = (*C->pos() - H_pos).norm();
double rOH = (*O->pos() - H_pos).norm();
double rCN = (*C->pos() - *N->pos()).norm();
double eON = 332 * (-0.42 * -0.20) / rON;
double eCH = 332 * ( 0.42 * 0.20) / rCH;
double eOH = 332 * (-0.42 * 0.20) / rOH;
double eCN = 332 * ( 0.42 * -0.20) / rCN;
double E = eON + eCH + eOH + eCN;
if (E >= -0.5)
continue;
d->hbondPairs[residue1->index()].append(residue2);
d->hbondPairs[residue2->index()].append(residue1);
//qDebug() << atom->residue()->index() << "-" << nbr->residue()->index() << "=" << delta;
}
}
}
void Protein::detectStructure()
{
foreach (const QVector<Residue*> &residues, d->chains) { // for each chain
foreach (Residue *residue, residues) { // for each residue in the chain
//qDebug() << "extending 3-trun helix...";
extendHelix('G', 3, residue, residues);
//qDebug() << "3 turn helix:" << d->structure;
clearShortPatterns('G', 3);
//qDebug() << " cleaned:" << d->structure;
//qDebug() << "extending 4-trun helix...";
extendHelix('H', 4, residue, residues);
//qDebug() << "4 trun helix:" << d->structure;
clearShortPatterns('H', 4);
//qDebug() << " cleaned:" << d->structure;
//qDebug() << "extending 5-trun helix";
extendHelix('I', 5, residue, residues);
//qDebug() << "5 trun helix:" << d->structure;
clearShortPatterns('I', 5);
//qDebug() << " cleaned:" << d->structure;
if (d->structure.at(residue->index()) != '-')
continue;
//extendSheet(0, residue, residues);
}
}
d->num3turnHelixes = -1;
d->num4turnHelixes = -1;
d->num5turnHelixes = -1;
}
void Protein::extendHelix(char c, int turn, Residue *residue, const QVector<Residue*> &residues)
{
if (d->structure.at(residue->index()) != '-')
return;
// 4-turn helix
foreach (Residue *partner, d->hbondPairs.at(residue->index())) { // for each H-bond partner
if (residue->chainNumber() != partner->chainNumber())
continue;
int res1 = residues.indexOf(residue);
int res2 = residues.indexOf(partner);
int delta = abs(res1 - res2);
if (delta == turn) {
d->structure.data()[residue->index()] = c;
int next = res1 + 1;
if (next >= residues.size())
return;
Residue *nextResidue = residues.at(next);
extendHelix(c, turn, nextResidue, residues);
}
}
}
void Protein::extendSheet(int delta, Residue *residue, const QVector<Residue*> &residues)
{
// 4-turn helix
foreach (Residue *partner, d->hbondPairs.at(residue->index())) { // for each H-bond partner
int res1 = residues.indexOf(residue);
int res2 = residues.indexOf(partner);
int del = abs(res1 - res2);
if ((del == delta) || !delta) {
int next = res1 + 1;
if (next == residues.size())
continue;
Residue *nextResidue = residues.at(next);
d->structure.data()[residue->index()] = 'B';
extendSheet(del, nextResidue, residues);
}
}
}
void Protein::clearShortPatterns()
{
clearShortPatterns('G', 3);
clearShortPatterns('H', 4);
}
void Protein::clearShortPatterns(char c, int min)
{
for (int i = 0 ; i < d->structure.size(); ++i) {
if (d->structure.at(i) == c) {
QByteArray array;
for (int j = i ; j < d->structure.size(); ++j) {
if (d->structure.at(j) == c)
array.append('-');
else
break;
}
if (array.size() < min)
d->structure.replace(i, array.size(), array);
i += array.size();<|fim▁hole|>} // End namespace<|fim▁end|> | }
}
}
|
<|file_name|>connected-accounts.spec.ts<|end_file_name|><|fim▁begin|>import { Component } from '@angular/core';
import { TooltipModule } from 'ngx-bootstrap';
import { Contexts } from 'ngx-fabric8-wit';
import { AuthenticationService, UserService } from 'ngx-login-client';
import { empty, of, throwError } from 'rxjs';
import { initContext } from 'testing/test-context';
import { ProviderService } from '../../../shared/account/provider.service';
import { TenantService } from '../../services/tenant.service';
import { ConnectedAccountsComponent } from './connected-accounts.component';
@Component({
template: `<|fim▁hole|> <alm-connected-accounts></alm-connected-accounts>
`,
})
class SampleTestComponent {}
describe('Connected Accounts Component', () => {
const expectedOsoUser: string = 'oso-test-user';
const ctx: any = {
user: {
attributes: {
username: expectedOsoUser,
},
},
};
const mockTenantData: any = {
attributes: {
namespaces: [
{
'cluster-console-url': 'http://example.cluster-name.something.com',
},
],
},
};
const contextsMock: any = jasmine.createSpy('Contexts');
const authMock: any = jasmine.createSpyObj('AuthenticationService', ['isOpenShiftConnected']);
const providersMock: any = jasmine.createSpyObj('ProviderService', [
'getGitHubStatus',
'getOpenShiftStatus',
]);
const userServiceMock: any = jasmine.createSpy('UserService');
const tenantSeriveMock: any = jasmine.createSpyObj('TenantService', ['getTenant']);
describe('User has only OpenShift account connected', (): void => {
beforeAll(
(): void => {
authMock.gitHubToken = empty();
// authMock.openShiftToken = of('oso-token');
authMock.isOpenShiftConnected.and.returnValue(of(true));
contextsMock.current = of(ctx);
userServiceMock.loggedInUser = empty();
userServiceMock.currentLoggedInUser = ctx.user;
providersMock.getGitHubStatus.and.returnValue(throwError('failure'));
providersMock.getOpenShiftStatus.and.returnValue(of({ username: expectedOsoUser }));
tenantSeriveMock.getTenant.and.returnValue(of(mockTenantData));
},
);
const testContext = initContext(ConnectedAccountsComponent, SampleTestComponent, {
imports: [TooltipModule.forRoot()],
providers: [
{ provide: AuthenticationService, useValue: authMock },
{ provide: Contexts, useValue: contextsMock },
{ provide: UserService, useValue: userServiceMock },
{ provide: ProviderService, useValue: providersMock },
{ provide: TenantService, useValue: tenantSeriveMock },
],
});
it('should have absence of GitHub connection indicated', (): void => {
const actualText: string = testContext.testedElement.textContent;
expect(actualText).toMatch(new RegExp('GitHub\\s+Disconnected'));
});
it('should have OpenShift connection indicated', (): void => {
const actualText: string = testContext.testedElement.textContent;
expect(actualText).toMatch(new RegExp(expectedOsoUser));
});
it('should set cluster name and cluster url by calling tenant service', (): void => {
expect(testContext.testedDirective.consoleUrl).toBe(
'http://example.cluster-name.something.com',
);
expect(testContext.testedDirective.clusterName).toBe('cluster-name');
});
});
describe('User has both Github and OpenShift accounts connected', (): void => {
beforeAll(
(): void => {
authMock.gitHubToken = of('gh-test-user');
// authMock.openShiftToken = of('oso-token');
authMock.isOpenShiftConnected.and.returnValue(of(true));
contextsMock.current = of(ctx);
userServiceMock.loggedInUser = empty();
userServiceMock.currentLoggedInUser = ctx.user;
providersMock.getGitHubStatus.and.returnValue(of({ username: 'username' }));
providersMock.getOpenShiftStatus.and.returnValue(of({ username: expectedOsoUser }));
},
);
const testContext = initContext(ConnectedAccountsComponent, SampleTestComponent, {
imports: [TooltipModule.forRoot()],
providers: [
{ provide: AuthenticationService, useValue: authMock },
{ provide: Contexts, useValue: contextsMock },
{ provide: UserService, useValue: userServiceMock },
{ provide: ProviderService, useValue: providersMock },
{ provide: TenantService, useValue: tenantSeriveMock },
],
});
it('should have GitHub connection indicated', (): void => {
const actualText: string = testContext.testedElement.textContent;
expect(actualText).toContain('username');
});
it('should have OpenShift connection indicated', (): void => {
const actualText: string = testContext.testedElement.textContent;
expect(actualText).toMatch(new RegExp(expectedOsoUser));
});
});
});<|fim▁end|> | |
<|file_name|>thread8.rs<|end_file_name|><|fim▁begin|><|fim▁hole|>use std::sync::Arc;
use std::sync::Mutex;
fn main() {
let answer = Arc::new(Mutex::new(42));
let answer_ref = answer.clone();
let t = thread::spawn(move || {
let mut answer = answer_ref.lock().unwrap();
*answer = 55;
});
t.join().unwrap();
let ar = answer.lock().unwrap();
assert_eq!(*ar, 55);
}<|fim▁end|> | // thread9.rs
use std::thread; |
<|file_name|>parsers_cache.py<|end_file_name|><|fim▁begin|>import os
import pickle
from parsers_backend import get_tree
from directories import dirs
def get_cached_sentence_image(argv, output_path, img_path):
"""Returns if the image is already generated or not, and avoids generating if yes.
Args:
argv: The command line arguments.
output_path: The path for the output folder.
img_path: The path to the image file to be checked.
Returns:
A boolean flagging if image is already generated or not.
"""
cache_file_final = output_path + img_path
if argv.tetre_force_clean:
return False
else:
return os.path.isfile(cache_file_final)
def get_cached_tokens(argv):
"""Returns the already parsed sentences containing the word being search, if the folder was not modified.
Args:
argv: The command line arguments.
Returns:
A list of tree.FullSentence objects, the sentences parsed from the raw text.
"""
updated_at_date = os.path.getmtime(dirs['raw_input']['path'])
cache_key = argv.tetre_word.lower() + str(int(updated_at_date))
cache_file = dirs['output_cache']['path'] + cache_key + ".spacy"
if os.path.isfile(cache_file) and not argv.tetre_force_clean:
# is cached<|fim▁hole|> # is not cached, so generates it again
sentences = get_tree(argv)
# saves to disk
with open(cache_file, "wb") as f:
pickle.dump(sentences, f, protocol=pickle.HIGHEST_PROTOCOL)
return sentences<|fim▁end|> | with open(cache_file, 'rb') as f:
sentences = pickle.load(f)
else: |
<|file_name|>unitTest.cpp<|end_file_name|><|fim▁begin|>#include "Distance.h"
#include "FileName.h"
#include "IException.h"
#include "ShapeModelFactory.h"
#include "ShapeModel.h"
#include "Camera.h"
#include "Preference.h"
#include "CameraFactory.h"
#include "Target.h"
/**
* This application tests the ShapeModelFactory class.
*
* @author 2010-10-11 Debbie A. Cook
*
* @internal
* @history
*/
using namespace std;
using namespace Isis;
int main() {
Isis::Preference::Preferences(true);
cout << "Unit test for Isis::ShapeModel" << endl;
// Test sky target
// Build label for sky target test
PvlGroup inst1("Instrument");
inst1 += PvlKeyword("TargetName", "Sky");
PvlGroup inst2("Instrument");
inst2 += PvlKeyword("TargetName", "Mars");
PvlGroup kern1("Kernels");
FileName f("$base/testData/kernels");
FileName f2("$base/dems");
FileName f3("$mgs/testData");
QString dir = f.expanded() + "/";
QString dir2 = f2.expanded() + "/";
QString dir3 = f3.expanded() + "/";
kern1 += PvlKeyword("NaifFrameCode", toString(-94031));
kern1 += PvlKeyword("LeapSecond", dir + "naif0007.tls");
kern1 += PvlKeyword("SpacecraftClock", dir + "MGS_SCLKSCET.00045.tsc");
kern1 += PvlKeyword("TargetPosition", dir + "de405.bsp");
kern1 += PvlKeyword("TargetAttitudeShape", dir + "pck00006.tpc");
kern1 += PvlKeyword("Instrument", dir + "mocSpiceUnitTest.ti");
kern1 += PvlKeyword("InstrumentAddendum", dir + "mocAddendum.ti");
kern1 += PvlKeyword("InstrumentPosition", dir + "moc.bsp");
kern1 += PvlKeyword("InstrumentPointing", dir + "moc.bc");
kern1 += PvlKeyword("Frame", "");
kern1 += PvlKeyword("NaifBodyCode", toString(499));
// Time Setup
double startTime = -69382819.0;
double endTime = -69382512.0;
double slope = (endTime - startTime) / (10 - 1);
kern1 += PvlKeyword("StartPadding", toString(slope));
kern1 += PvlKeyword("EndPadding", toString(slope));
Pvl lab1;
lab1.addGroup(inst1);
lab1.addGroup(kern1);
// Test ShapeModel keyword
cout << endl << " Testing ShapeModel keyword..." << endl;
PvlGroup kern2 = kern1;
kern2 += PvlKeyword("ShapeModel", dir2 + "molaMarsPlanetaryRadius0005.cub");
Pvl lab2;
lab2.addGroup(inst2);
lab2.addGroup(kern2);
Spice spiSh(lab2);
Target targSh(&spiSh, lab2);
ShapeModel *smSh = ShapeModelFactory::create(&targSh, lab2);
cout << " Successfully created shape " << smSh->name() << endl;
delete smSh;
// Test ElevationModel keyword with value
cout << endl << " Testing ElevationModel keyword..." << endl;
PvlGroup kern3 = kern1;
kern3 += PvlKeyword("ElevationModel", dir2 + "molaMarsPlanetaryRadius0005.cub");
Pvl lab3;
lab3.addGroup(inst2);
lab3.addGroup(kern3);
Spice spiEl(lab3);
Target targEl(&spiEl, lab3);
ShapeModel *smEl = ShapeModelFactory::create(&targEl, lab3);
cout << " Successfully created shape " << smEl->name() << endl;
delete smEl;
// Test ElevationModel keyword with Null value
cout << endl << " Testing ElevationModel keyword Null..." << endl;
PvlGroup kern4 = kern1;;
kern4 += PvlKeyword("ShapeModel", "Null");
Pvl lab4;
lab4.addGroup(inst2);
lab4.addGroup(kern4);
Spice spiElNull(lab4);
Target targElNull(&spiElNull, lab4);
ShapeModel *smElNull = ShapeModelFactory::create(&targElNull, lab4);
cout << " Successfully created shape " << smElNull->name() << endl;
delete smElNull;
// Create Spice and Target objects for sky test
Spice skySpi(lab1);
Target skyTarget(&skySpi, lab1);
ShapeModel *skyShape = ShapeModelFactory::create(&skyTarget, lab1);
cout << endl << " Testing Sky target..." << endl << " Shape model is " << skyShape->name() << endl;
try {
// Test ShapeModel file that does not exist
cout << endl << " Testing nonexistent file for shape model dem" << endl;
PvlGroup kern5 = kern1;
kern5 += PvlKeyword("ShapeModel", "NotAFile");
Pvl lab5;
lab5.addGroup(inst2);
lab5.addGroup(kern5);
Spice spiBadFile(lab5);
Target targBadFile(&spiBadFile, lab5);
ShapeModel *smBadFile = ShapeModelFactory::create(&targBadFile, lab4);
cout << " Successfully created shape " << smBadFile->name() << endl;
delete smBadFile;
}
catch(Isis::IException &e) {
e.print();
}
try {
// Test ShapeModel that's not a valid Isis map projection
cout << endl << " Testing Isis cube file for dem that is not map projected" << endl;
PvlGroup kern5 = kern1;
kern5 += PvlKeyword("ShapeModel", dir3 + "ab102401.cub");
Pvl lab5;
lab5.addGroup(inst2);
lab5.addGroup(kern5);
Spice spiBadFile(lab5);
Target targBadFile(&spiBadFile, lab5);
ShapeModel *smBadFile = ShapeModelFactory::create(&targBadFile, lab4);
cout << " Successfully created shape " << smBadFile->name() << endl;
delete smBadFile;
}
catch(Isis::IException &e) {
e.print();
}
try {
// Test ShapeModel dem that's not Equatorial Cylindrical
cout << endl << " Testing a dem that's not equatorial cylindrical" << endl;
PvlGroup kern5 = kern1;
kern5 += PvlKeyword("ShapeModel", dir3 + "ab102402.lev2.cub");
Pvl lab5;
lab5.addGroup(inst2);
lab5.addGroup(kern5);
Spice spiDem(lab5);
Target targDem(&spiDem, lab5);
ShapeModel *smDem = ShapeModelFactory::create(&targDem, lab5);
cout << " Successfully created shape " << smDem->name() << endl;
delete smDem;
}
catch(Isis::IException &e) {
e.print();
}
// Test demshape with ShapeModel keyword
cout << endl << " Testing dem shape..." << endl;
QString inputFile = "$ISIS3DATA/mgs/testData/ab102401.cub";
Cube cube;
cube.open(inputFile);
Camera *c = cube.camera();
vector<Distance> radii(3,Distance());
radii = c->target()->radii();
Pvl pvl = *cube.label();
Spice spi(pvl);<|fim▁hole|> Target targ(&spi, pvl);
targ.setRadii(radii);
ShapeModel *sm = ShapeModelFactory::create(&targ, pvl);
cout << " Successfully created shape " << sm->name() << endl;
delete sm;
cube.close();
// Test ellipsoid shape (ShapeModel = Null)
cout << endl << " Testing ellipsoid shape..." << endl;
inputFile = "$ISIS3DATA/galileo/testData/1213r.cub";
cube.open(inputFile);
c = cube.camera();
radii = c->target()->radii();
pvl = *cube.label();
Spice spi2(pvl);
Target targ2(&spi2, pvl);
targ2.setRadii(radii);
sm = ShapeModelFactory::create(&targ2, pvl);
cout << " Successfully created shape " << sm->name() << endl;
delete sm;
cube.close();
// Test plane shape TBD
// inputFile = "$ISIS3DATA/;
// cube.open(inputFile);
// c = cube.camera();
// radii = c->target()->radii();
// pvl = *cube.label();
// Target targ2(pvl);
// targ3.setRadii(radii);
// sm = ShapeModelFactory::Create(&targ3, pvl);
// cout << "Successfully created shape " << sm->name() << endl;
// delete sm;
// cube.close();
}<|fim▁end|> | |
<|file_name|>ProgressBar.cpp<|end_file_name|><|fim▁begin|>/*
DC++ Widget Toolkit
Copyright (c) 2007-2013, Jacek Sieka
<|fim▁hole|> Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of the DWT nor the names of its contributors
may be used to endorse or promote products derived from this software
without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include <dwt/widgets/ProgressBar.h>
namespace dwt {
const TCHAR ProgressBar::windowClass[] = PROGRESS_CLASS;
ProgressBar::Seed::Seed() :
BaseType::Seed(WS_CHILD | PBS_SMOOTH)
{
}
}<|fim▁end|> | All rights reserved.
|
<|file_name|>CipherAlgorithm.java<|end_file_name|><|fim▁begin|>/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance<|fim▁hole|> * http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.syncope.types;
public enum CipherAlgorithm {
MD5("MD5"),
SHA1("SHA-1"),
SHA256("SHA-256"),
AES("AES");
final private String algorithm;
CipherAlgorithm(String algorithm) {
this.algorithm = algorithm;
}
public final String getAlgorithm() {
return algorithm;
}
}<|fim▁end|> | * with the License. You may obtain a copy of the License at
* |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>//! Conversion from AST representation of types to the `ty.rs` representation.
//! The main routine here is `ast_ty_to_ty()`; each use is parameterized by an
//! instance of `AstConv`.
mod errors;
mod generics;
use crate::bounds::Bounds;
use crate::collect::PlaceholderHirTyCollector;
use crate::errors::{
AmbiguousLifetimeBound, MultipleRelaxedDefaultBounds, TraitObjectDeclaredWithNoTraits,
TypeofReservedKeywordUsed, ValueOfAssociatedStructAlreadySpecified,
};
use crate::middle::resolve_lifetime as rl;
use crate::require_c_abi_if_c_variadic;
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
use rustc_errors::{struct_span_err, Applicability, ErrorReported, FatalError};
use rustc_hir as hir;
use rustc_hir::def::{CtorOf, DefKind, Namespace, Res};
use rustc_hir::def_id::{DefId, LocalDefId};
use rustc_hir::intravisit::{walk_generics, Visitor as _};
use rustc_hir::lang_items::LangItem;
use rustc_hir::{GenericArg, GenericArgs};
use rustc_middle::ty::subst::{self, GenericArgKind, InternalSubsts, Subst, SubstsRef};
use rustc_middle::ty::GenericParamDefKind;
use rustc_middle::ty::{self, Const, DefIdTree, Ty, TyCtxt, TypeFoldable};
use rustc_session::lint::builtin::AMBIGUOUS_ASSOCIATED_ITEMS;
use rustc_span::lev_distance::find_best_match_for_name;
use rustc_span::symbol::{Ident, Symbol};
use rustc_span::{Span, DUMMY_SP};
use rustc_target::spec::abi;
use rustc_trait_selection::traits;
use rustc_trait_selection::traits::astconv_object_safety_violations;
use rustc_trait_selection::traits::error_reporting::report_object_safety_error;
use rustc_trait_selection::traits::wf::object_region_bounds;
use smallvec::SmallVec;
use std::array;
use std::collections::BTreeSet;
use std::slice;
#[derive(Debug)]
pub struct PathSeg(pub DefId, pub usize);
pub trait AstConv<'tcx> {
fn tcx<'a>(&'a self) -> TyCtxt<'tcx>;
fn item_def_id(&self) -> Option<DefId>;
/// Returns predicates in scope of the form `X: Foo<T>`, where `X`
/// is a type parameter `X` with the given id `def_id` and T
/// matches `assoc_name`. This is a subset of the full set of
/// predicates.
///
/// This is used for one specific purpose: resolving "short-hand"
/// associated type references like `T::Item`. In principle, we
/// would do that by first getting the full set of predicates in
/// scope and then filtering down to find those that apply to `T`,
/// but this can lead to cycle errors. The problem is that we have
/// to do this resolution *in order to create the predicates in
/// the first place*. Hence, we have this "special pass".
fn get_type_parameter_bounds(
&self,
span: Span,
def_id: DefId,
assoc_name: Ident,
) -> ty::GenericPredicates<'tcx>;
/// Returns the lifetime to use when a lifetime is omitted (and not elided).
fn re_infer(&self, param: Option<&ty::GenericParamDef>, span: Span)
-> Option<ty::Region<'tcx>>;
/// Returns the type to use when a type is omitted.
fn ty_infer(&self, param: Option<&ty::GenericParamDef>, span: Span) -> Ty<'tcx>;
/// Returns `true` if `_` is allowed in type signatures in the current context.
fn allow_ty_infer(&self) -> bool;
/// Returns the const to use when a const is omitted.
fn ct_infer(
&self,
ty: Ty<'tcx>,
param: Option<&ty::GenericParamDef>,
span: Span,
) -> &'tcx Const<'tcx>;
/// Projecting an associated type from a (potentially)
/// higher-ranked trait reference is more complicated, because of
/// the possibility of late-bound regions appearing in the
/// associated type binding. This is not legal in function
/// signatures for that reason. In a function body, we can always
/// handle it because we can use inference variables to remove the
/// late-bound regions.
fn projected_ty_from_poly_trait_ref(
&self,
span: Span,
item_def_id: DefId,
item_segment: &hir::PathSegment<'_>,
poly_trait_ref: ty::PolyTraitRef<'tcx>,
) -> Ty<'tcx>;
/// Normalize an associated type coming from the user.
fn normalize_ty(&self, span: Span, ty: Ty<'tcx>) -> Ty<'tcx>;
/// Invoked when we encounter an error from some prior pass
/// (e.g., resolve) that is translated into a ty-error. This is
/// used to help suppress derived errors typeck might otherwise
/// report.
fn set_tainted_by_errors(&self);
fn record_ty(&self, hir_id: hir::HirId, ty: Ty<'tcx>, span: Span);
}
#[derive(Debug)]
struct ConvertedBinding<'a, 'tcx> {
hir_id: hir::HirId,
item_name: Ident,
kind: ConvertedBindingKind<'a, 'tcx>,
gen_args: &'a GenericArgs<'a>,
span: Span,
}
#[derive(Debug)]
enum ConvertedBindingKind<'a, 'tcx> {
Equality(Ty<'tcx>),
Constraint(&'a [hir::GenericBound<'a>]),
}
/// New-typed boolean indicating whether explicit late-bound lifetimes
/// are present in a set of generic arguments.
///
/// For example if we have some method `fn f<'a>(&'a self)` implemented
/// for some type `T`, although `f` is generic in the lifetime `'a`, `'a`
/// is late-bound so should not be provided explicitly. Thus, if `f` is
/// instantiated with some generic arguments providing `'a` explicitly,
/// we taint those arguments with `ExplicitLateBound::Yes` so that we
/// can provide an appropriate diagnostic later.
#[derive(Copy, Clone, PartialEq)]
pub enum ExplicitLateBound {
Yes,
No,
}
#[derive(Copy, Clone, PartialEq)]
pub enum IsMethodCall {
Yes,
No,
}
/// Denotes the "position" of a generic argument, indicating if it is a generic type,
/// generic function or generic method call.
#[derive(Copy, Clone, PartialEq)]
pub(crate) enum GenericArgPosition {
Type,
Value, // e.g., functions
MethodCall,
}
/// A marker denoting that the generic arguments that were
/// provided did not match the respective generic parameters.
#[derive(Clone, Default)]
pub struct GenericArgCountMismatch {
/// Indicates whether a fatal error was reported (`Some`), or just a lint (`None`).
pub reported: Option<ErrorReported>,
/// A list of spans of arguments provided that were not valid.
pub invalid_args: Vec<Span>,
}
/// Decorates the result of a generic argument count mismatch
/// check with whether explicit late bounds were provided.
#[derive(Clone)]
pub struct GenericArgCountResult {
pub explicit_late_bound: ExplicitLateBound,
pub correct: Result<(), GenericArgCountMismatch>,
}<|fim▁hole|> fn args_for_def_id(&mut self, def_id: DefId) -> (Option<&'a GenericArgs<'a>>, bool);
fn provided_kind(
&mut self,
param: &ty::GenericParamDef,
arg: &GenericArg<'_>,
) -> subst::GenericArg<'tcx>;
fn inferred_kind(
&mut self,
substs: Option<&[subst::GenericArg<'tcx>]>,
param: &ty::GenericParamDef,
infer_args: bool,
) -> subst::GenericArg<'tcx>;
}
impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
#[tracing::instrument(level = "debug", skip(self))]
pub fn ast_region_to_region(
&self,
lifetime: &hir::Lifetime,
def: Option<&ty::GenericParamDef>,
) -> ty::Region<'tcx> {
let tcx = self.tcx();
let lifetime_name = |def_id| tcx.hir().name(tcx.hir().local_def_id_to_hir_id(def_id));
let r = match tcx.named_region(lifetime.hir_id) {
Some(rl::Region::Static) => tcx.lifetimes.re_static,
Some(rl::Region::LateBound(debruijn, index, def_id, _)) => {
let name = lifetime_name(def_id.expect_local());
let br = ty::BoundRegion {
var: ty::BoundVar::from_u32(index),
kind: ty::BrNamed(def_id, name),
};
tcx.mk_region(ty::ReLateBound(debruijn, br))
}
Some(rl::Region::LateBoundAnon(debruijn, index, anon_index)) => {
let br = ty::BoundRegion {
var: ty::BoundVar::from_u32(index),
kind: ty::BrAnon(anon_index),
};
tcx.mk_region(ty::ReLateBound(debruijn, br))
}
Some(rl::Region::EarlyBound(index, id, _)) => {
let name = lifetime_name(id.expect_local());
tcx.mk_region(ty::ReEarlyBound(ty::EarlyBoundRegion { def_id: id, index, name }))
}
Some(rl::Region::Free(scope, id)) => {
let name = lifetime_name(id.expect_local());
tcx.mk_region(ty::ReFree(ty::FreeRegion {
scope,
bound_region: ty::BrNamed(id, name),
}))
// (*) -- not late-bound, won't change
}
None => {
self.re_infer(def, lifetime.span).unwrap_or_else(|| {
debug!(?lifetime, "unelided lifetime in signature");
// This indicates an illegal lifetime
// elision. `resolve_lifetime` should have
// reported an error in this case -- but if
// not, let's error out.
tcx.sess.delay_span_bug(lifetime.span, "unelided lifetime in signature");
// Supply some dummy value. We don't have an
// `re_error`, annoyingly, so use `'static`.
tcx.lifetimes.re_static
})
}
};
debug!("ast_region_to_region(lifetime={:?}) yields {:?}", lifetime, r);
r
}
/// Given a path `path` that refers to an item `I` with the declared generics `decl_generics`,
/// returns an appropriate set of substitutions for this particular reference to `I`.
pub fn ast_path_substs_for_ty(
&self,
span: Span,
def_id: DefId,
item_segment: &hir::PathSegment<'_>,
) -> SubstsRef<'tcx> {
let (substs, _) = self.create_substs_for_ast_path(
span,
def_id,
&[],
item_segment,
item_segment.args(),
item_segment.infer_args,
None,
);
let assoc_bindings = self.create_assoc_bindings_for_generic_args(item_segment.args());
if let Some(b) = assoc_bindings.first() {
Self::prohibit_assoc_ty_binding(self.tcx(), b.span);
}
substs
}
/// Given the type/lifetime/const arguments provided to some path (along with
/// an implicit `Self`, if this is a trait reference), returns the complete
/// set of substitutions. This may involve applying defaulted type parameters.
/// Also returns back constraints on associated types.
///
/// Example:
///
/// ```
/// T: std::ops::Index<usize, Output = u32>
/// ^1 ^^^^^^^^^^^^^^2 ^^^^3 ^^^^^^^^^^^4
/// ```
///
/// 1. The `self_ty` here would refer to the type `T`.
/// 2. The path in question is the path to the trait `std::ops::Index`,
/// which will have been resolved to a `def_id`
/// 3. The `generic_args` contains info on the `<...>` contents. The `usize` type
/// parameters are returned in the `SubstsRef`, the associated type bindings like
/// `Output = u32` are returned in the `Vec<ConvertedBinding...>` result.
///
/// Note that the type listing given here is *exactly* what the user provided.
///
/// For (generic) associated types
///
/// ```
/// <Vec<u8> as Iterable<u8>>::Iter::<'a>
/// ```
///
/// We have the parent substs are the substs for the parent trait:
/// `[Vec<u8>, u8]` and `generic_args` are the arguments for the associated
/// type itself: `['a]`. The returned `SubstsRef` concatenates these two
/// lists: `[Vec<u8>, u8, 'a]`.
#[tracing::instrument(level = "debug", skip(self, span))]
fn create_substs_for_ast_path<'a>(
&self,
span: Span,
def_id: DefId,
parent_substs: &[subst::GenericArg<'tcx>],
seg: &hir::PathSegment<'_>,
generic_args: &'a hir::GenericArgs<'_>,
infer_args: bool,
self_ty: Option<Ty<'tcx>>,
) -> (SubstsRef<'tcx>, GenericArgCountResult) {
// If the type is parameterized by this region, then replace this
// region with the current anon region binding (in other words,
// whatever & would get replaced with).
let tcx = self.tcx();
let generics = tcx.generics_of(def_id);
debug!("generics: {:?}", generics);
if generics.has_self {
if generics.parent.is_some() {
// The parent is a trait so it should have at least one subst
// for the `Self` type.
assert!(!parent_substs.is_empty())
} else {
// This item (presumably a trait) needs a self-type.
assert!(self_ty.is_some());
}
} else {
assert!(self_ty.is_none() && parent_substs.is_empty());
}
let arg_count = Self::check_generic_arg_count(
tcx,
span,
def_id,
seg,
generics,
generic_args,
GenericArgPosition::Type,
self_ty.is_some(),
infer_args,
);
// Skip processing if type has no generic parameters.
// Traits always have `Self` as a generic parameter, which means they will not return early
// here and so associated type bindings will be handled regardless of whether there are any
// non-`Self` generic parameters.
if generics.params.is_empty() {
return (tcx.intern_substs(&[]), arg_count);
}
let is_object = self_ty.map_or(false, |ty| ty == self.tcx().types.trait_object_dummy_self);
struct SubstsForAstPathCtxt<'a, 'tcx> {
astconv: &'a (dyn AstConv<'tcx> + 'a),
def_id: DefId,
generic_args: &'a GenericArgs<'a>,
span: Span,
missing_type_params: Vec<String>,
inferred_params: Vec<Span>,
infer_args: bool,
is_object: bool,
}
impl<'tcx, 'a> SubstsForAstPathCtxt<'tcx, 'a> {
fn default_needs_object_self(&mut self, param: &ty::GenericParamDef) -> bool {
let tcx = self.astconv.tcx();
if let GenericParamDefKind::Type { has_default, .. } = param.kind {
if self.is_object && has_default {
let default_ty = tcx.at(self.span).type_of(param.def_id);
let self_param = tcx.types.self_param;
if default_ty.walk(tcx).any(|arg| arg == self_param.into()) {
// There is no suitable inference default for a type parameter
// that references self, in an object type.
return true;
}
}
}
false
}
}
impl<'a, 'tcx> CreateSubstsForGenericArgsCtxt<'a, 'tcx> for SubstsForAstPathCtxt<'a, 'tcx> {
fn args_for_def_id(&mut self, did: DefId) -> (Option<&'a GenericArgs<'a>>, bool) {
if did == self.def_id {
(Some(self.generic_args), self.infer_args)
} else {
// The last component of this tuple is unimportant.
(None, false)
}
}
fn provided_kind(
&mut self,
param: &ty::GenericParamDef,
arg: &GenericArg<'_>,
) -> subst::GenericArg<'tcx> {
let tcx = self.astconv.tcx();
match (¶m.kind, arg) {
(GenericParamDefKind::Lifetime, GenericArg::Lifetime(lt)) => {
self.astconv.ast_region_to_region(lt, Some(param)).into()
}
(&GenericParamDefKind::Type { has_default, .. }, GenericArg::Type(ty)) => {
if has_default {
tcx.check_optional_stability(
param.def_id,
Some(arg.id()),
arg.span(),
None,
|_, _| {
// Default generic parameters may not be marked
// with stability attributes, i.e. when the
// default parameter was defined at the same time
// as the rest of the type. As such, we ignore missing
// stability attributes.
},
)
}
if let (hir::TyKind::Infer, false) =
(&ty.kind, self.astconv.allow_ty_infer())
{
self.inferred_params.push(ty.span);
tcx.ty_error().into()
} else {
self.astconv.ast_ty_to_ty(ty).into()
}
}
(GenericParamDefKind::Const { .. }, GenericArg::Const(ct)) => {
ty::Const::from_opt_const_arg_anon_const(
tcx,
ty::WithOptConstParam {
did: tcx.hir().local_def_id(ct.value.hir_id),
const_param_did: Some(param.def_id),
},
)
.into()
}
(&GenericParamDefKind::Const { has_default }, hir::GenericArg::Infer(inf)) => {
if has_default {
tcx.const_param_default(param.def_id).into()
} else if self.astconv.allow_ty_infer() {
// FIXME(const_generics): Actually infer parameter here?
todo!()
} else {
self.inferred_params.push(inf.span);
tcx.ty_error().into()
}
}
(
&GenericParamDefKind::Type { has_default, .. },
hir::GenericArg::Infer(inf),
) => {
if has_default {
tcx.check_optional_stability(
param.def_id,
Some(arg.id()),
arg.span(),
None,
|_, _| {
// Default generic parameters may not be marked
// with stability attributes, i.e. when the
// default parameter was defined at the same time
// as the rest of the type. As such, we ignore missing
// stability attributes.
},
);
}
if self.astconv.allow_ty_infer() {
self.astconv.ast_ty_to_ty(&inf.to_ty()).into()
} else {
self.inferred_params.push(inf.span);
tcx.ty_error().into()
}
}
_ => unreachable!(),
}
}
fn inferred_kind(
&mut self,
substs: Option<&[subst::GenericArg<'tcx>]>,
param: &ty::GenericParamDef,
infer_args: bool,
) -> subst::GenericArg<'tcx> {
let tcx = self.astconv.tcx();
match param.kind {
GenericParamDefKind::Lifetime => tcx.lifetimes.re_static.into(),
GenericParamDefKind::Type { has_default, .. } => {
if !infer_args && has_default {
// No type parameter provided, but a default exists.
// If we are converting an object type, then the
// `Self` parameter is unknown. However, some of the
// other type parameters may reference `Self` in their
// defaults. This will lead to an ICE if we are not
// careful!
if self.default_needs_object_self(param) {
self.missing_type_params.push(param.name.to_string());
tcx.ty_error().into()
} else {
// This is a default type parameter.
let substs = substs.unwrap();
if substs.iter().any(|arg| match arg.unpack() {
GenericArgKind::Type(ty) => ty.references_error(),
_ => false,
}) {
// Avoid ICE #86756 when type error recovery goes awry.
return tcx.ty_error().into();
}
self.astconv
.normalize_ty(
self.span,
tcx.at(self.span).type_of(param.def_id).subst_spanned(
tcx,
substs,
Some(self.span),
),
)
.into()
}
} else if infer_args {
// No type parameters were provided, we can infer all.
let param = if !self.default_needs_object_self(param) {
Some(param)
} else {
None
};
self.astconv.ty_infer(param, self.span).into()
} else {
// We've already errored above about the mismatch.
tcx.ty_error().into()
}
}
GenericParamDefKind::Const { has_default } => {
let ty = tcx.at(self.span).type_of(param.def_id);
if !infer_args && has_default {
tcx.const_param_default(param.def_id)
.subst_spanned(tcx, substs.unwrap(), Some(self.span))
.into()
} else {
if infer_args {
self.astconv.ct_infer(ty, Some(param), self.span).into()
} else {
// We've already errored above about the mismatch.
tcx.const_error(ty).into()
}
}
}
}
}
}
let mut substs_ctx = SubstsForAstPathCtxt {
astconv: self,
def_id,
span,
generic_args,
missing_type_params: vec![],
inferred_params: vec![],
infer_args,
is_object,
};
let substs = Self::create_substs_for_generic_args(
tcx,
def_id,
parent_substs,
self_ty.is_some(),
self_ty,
&arg_count,
&mut substs_ctx,
);
self.complain_about_missing_type_params(
substs_ctx.missing_type_params,
def_id,
span,
generic_args.args.is_empty(),
);
debug!(
"create_substs_for_ast_path(generic_params={:?}, self_ty={:?}) -> {:?}",
generics, self_ty, substs
);
(substs, arg_count)
}
fn create_assoc_bindings_for_generic_args<'a>(
&self,
generic_args: &'a hir::GenericArgs<'_>,
) -> Vec<ConvertedBinding<'a, 'tcx>> {
// Convert associated-type bindings or constraints into a separate vector.
// Example: Given this:
//
// T: Iterator<Item = u32>
//
// The `T` is passed in as a self-type; the `Item = u32` is
// not a "type parameter" of the `Iterator` trait, but rather
// a restriction on `<T as Iterator>::Item`, so it is passed
// back separately.
let assoc_bindings = generic_args
.bindings
.iter()
.map(|binding| {
let kind = match binding.kind {
hir::TypeBindingKind::Equality { ty } => {
ConvertedBindingKind::Equality(self.ast_ty_to_ty(ty))
}
hir::TypeBindingKind::Constraint { bounds } => {
ConvertedBindingKind::Constraint(bounds)
}
};
ConvertedBinding {
hir_id: binding.hir_id,
item_name: binding.ident,
kind,
gen_args: binding.gen_args,
span: binding.span,
}
})
.collect();
assoc_bindings
}
crate fn create_substs_for_associated_item(
&self,
tcx: TyCtxt<'tcx>,
span: Span,
item_def_id: DefId,
item_segment: &hir::PathSegment<'_>,
parent_substs: SubstsRef<'tcx>,
) -> SubstsRef<'tcx> {
debug!(
"create_substs_for_associated_item(span: {:?}, item_def_id: {:?}, item_segment: {:?}",
span, item_def_id, item_segment
);
if tcx.generics_of(item_def_id).params.is_empty() {
self.prohibit_generics(slice::from_ref(item_segment));
parent_substs
} else {
self.create_substs_for_ast_path(
span,
item_def_id,
parent_substs,
item_segment,
item_segment.args(),
item_segment.infer_args,
None,
)
.0
}
}
/// Instantiates the path for the given trait reference, assuming that it's
/// bound to a valid trait type. Returns the `DefId` of the defining trait.
/// The type _cannot_ be a type other than a trait type.
///
/// If the `projections` argument is `None`, then assoc type bindings like `Foo<T = X>`
/// are disallowed. Otherwise, they are pushed onto the vector given.
pub fn instantiate_mono_trait_ref(
&self,
trait_ref: &hir::TraitRef<'_>,
self_ty: Ty<'tcx>,
) -> ty::TraitRef<'tcx> {
self.prohibit_generics(trait_ref.path.segments.split_last().unwrap().1);
self.ast_path_to_mono_trait_ref(
trait_ref.path.span,
trait_ref.trait_def_id().unwrap_or_else(|| FatalError.raise()),
self_ty,
trait_ref.path.segments.last().unwrap(),
)
}
fn instantiate_poly_trait_ref_inner(
&self,
hir_id: hir::HirId,
span: Span,
binding_span: Option<Span>,
constness: ty::BoundConstness,
bounds: &mut Bounds<'tcx>,
speculative: bool,
trait_ref_span: Span,
trait_def_id: DefId,
trait_segment: &hir::PathSegment<'_>,
args: &GenericArgs<'_>,
infer_args: bool,
self_ty: Ty<'tcx>,
) -> GenericArgCountResult {
let (substs, arg_count) = self.create_substs_for_ast_path(
trait_ref_span,
trait_def_id,
&[],
trait_segment,
args,
infer_args,
Some(self_ty),
);
let tcx = self.tcx();
let bound_vars = tcx.late_bound_vars(hir_id);
debug!(?bound_vars);
let assoc_bindings = self.create_assoc_bindings_for_generic_args(args);
let poly_trait_ref =
ty::Binder::bind_with_vars(ty::TraitRef::new(trait_def_id, substs), bound_vars);
debug!(?poly_trait_ref, ?assoc_bindings);
bounds.trait_bounds.push((poly_trait_ref, span, constness));
let mut dup_bindings = FxHashMap::default();
for binding in &assoc_bindings {
// Specify type to assert that error was already reported in `Err` case.
let _: Result<_, ErrorReported> = self.add_predicates_for_ast_type_binding(
hir_id,
poly_trait_ref,
binding,
bounds,
speculative,
&mut dup_bindings,
binding_span.unwrap_or(binding.span),
);
// Okay to ignore `Err` because of `ErrorReported` (see above).
}
arg_count
}
/// Given a trait bound like `Debug`, applies that trait bound the given self-type to construct
/// a full trait reference. The resulting trait reference is returned. This may also generate
/// auxiliary bounds, which are added to `bounds`.
///
/// Example:
///
/// ```
/// poly_trait_ref = Iterator<Item = u32>
/// self_ty = Foo
/// ```
///
/// this would return `Foo: Iterator` and add `<Foo as Iterator>::Item = u32` into `bounds`.
///
/// **A note on binders:** against our usual convention, there is an implied bounder around
/// the `self_ty` and `poly_trait_ref` parameters here. So they may reference bound regions.
/// If for example you had `for<'a> Foo<'a>: Bar<'a>`, then the `self_ty` would be `Foo<'a>`
/// where `'a` is a bound region at depth 0. Similarly, the `poly_trait_ref` would be
/// `Bar<'a>`. The returned poly-trait-ref will have this binder instantiated explicitly,
/// however.
#[tracing::instrument(level = "debug", skip(self, span, constness, bounds, speculative))]
pub(crate) fn instantiate_poly_trait_ref(
&self,
trait_ref: &hir::TraitRef<'_>,
span: Span,
constness: ty::BoundConstness,
self_ty: Ty<'tcx>,
bounds: &mut Bounds<'tcx>,
speculative: bool,
) -> GenericArgCountResult {
let hir_id = trait_ref.hir_ref_id;
let binding_span = None;
let trait_ref_span = trait_ref.path.span;
let trait_def_id = trait_ref.trait_def_id().unwrap_or_else(|| FatalError.raise());
let trait_segment = trait_ref.path.segments.last().unwrap();
let args = trait_segment.args();
let infer_args = trait_segment.infer_args;
self.prohibit_generics(trait_ref.path.segments.split_last().unwrap().1);
self.complain_about_internal_fn_trait(span, trait_def_id, trait_segment);
self.instantiate_poly_trait_ref_inner(
hir_id,
span,
binding_span,
constness,
bounds,
speculative,
trait_ref_span,
trait_def_id,
trait_segment,
args,
infer_args,
self_ty,
)
}
pub(crate) fn instantiate_lang_item_trait_ref(
&self,
lang_item: hir::LangItem,
span: Span,
hir_id: hir::HirId,
args: &GenericArgs<'_>,
self_ty: Ty<'tcx>,
bounds: &mut Bounds<'tcx>,
) {
let binding_span = Some(span);
let constness = ty::BoundConstness::NotConst;
let speculative = false;
let trait_ref_span = span;
let trait_def_id = self.tcx().require_lang_item(lang_item, Some(span));
let trait_segment = &hir::PathSegment::invalid();
let infer_args = false;
self.instantiate_poly_trait_ref_inner(
hir_id,
span,
binding_span,
constness,
bounds,
speculative,
trait_ref_span,
trait_def_id,
trait_segment,
args,
infer_args,
self_ty,
);
}
fn ast_path_to_mono_trait_ref(
&self,
span: Span,
trait_def_id: DefId,
self_ty: Ty<'tcx>,
trait_segment: &hir::PathSegment<'_>,
) -> ty::TraitRef<'tcx> {
let (substs, _) =
self.create_substs_for_ast_trait_ref(span, trait_def_id, self_ty, trait_segment);
let assoc_bindings = self.create_assoc_bindings_for_generic_args(trait_segment.args());
if let Some(b) = assoc_bindings.first() {
Self::prohibit_assoc_ty_binding(self.tcx(), b.span);
}
ty::TraitRef::new(trait_def_id, substs)
}
#[tracing::instrument(level = "debug", skip(self, span))]
fn create_substs_for_ast_trait_ref<'a>(
&self,
span: Span,
trait_def_id: DefId,
self_ty: Ty<'tcx>,
trait_segment: &'a hir::PathSegment<'a>,
) -> (SubstsRef<'tcx>, GenericArgCountResult) {
self.complain_about_internal_fn_trait(span, trait_def_id, trait_segment);
self.create_substs_for_ast_path(
span,
trait_def_id,
&[],
trait_segment,
trait_segment.args(),
trait_segment.infer_args,
Some(self_ty),
)
}
fn trait_defines_associated_type_named(&self, trait_def_id: DefId, assoc_name: Ident) -> bool {
self.tcx()
.associated_items(trait_def_id)
.find_by_name_and_kind(self.tcx(), assoc_name, ty::AssocKind::Type, trait_def_id)
.is_some()
}
// Sets `implicitly_sized` to true on `Bounds` if necessary
pub(crate) fn add_implicitly_sized<'hir>(
&self,
bounds: &mut Bounds<'hir>,
ast_bounds: &'hir [hir::GenericBound<'hir>],
self_ty_where_predicates: Option<(hir::HirId, &'hir [hir::WherePredicate<'hir>])>,
span: Span,
) {
let tcx = self.tcx();
// Try to find an unbound in bounds.
let mut unbound = None;
let mut search_bounds = |ast_bounds: &'hir [hir::GenericBound<'hir>]| {
for ab in ast_bounds {
if let hir::GenericBound::Trait(ptr, hir::TraitBoundModifier::Maybe) = ab {
if unbound.is_none() {
unbound = Some(&ptr.trait_ref);
} else {
tcx.sess.emit_err(MultipleRelaxedDefaultBounds { span });
}
}
}
};
search_bounds(ast_bounds);
if let Some((self_ty, where_clause)) = self_ty_where_predicates {
let self_ty_def_id = tcx.hir().local_def_id(self_ty).to_def_id();
for clause in where_clause {
if let hir::WherePredicate::BoundPredicate(pred) = clause {
match pred.bounded_ty.kind {
hir::TyKind::Path(hir::QPath::Resolved(_, path)) => match path.res {
Res::Def(DefKind::TyParam, def_id) if def_id == self_ty_def_id => {}
_ => continue,
},
_ => continue,
}
search_bounds(pred.bounds);
}
}
}
let sized_def_id = tcx.lang_items().require(LangItem::Sized);
match (&sized_def_id, unbound) {
(Ok(sized_def_id), Some(tpb))
if tpb.path.res == Res::Def(DefKind::Trait, *sized_def_id) =>
{
// There was in fact a `?Sized` bound, return without doing anything
return;
}
(_, Some(_)) => {
// There was a `?Trait` bound, but it was not `?Sized`; warn.
tcx.sess.span_warn(
span,
"default bound relaxed for a type parameter, but \
this does nothing because the given bound is not \
a default; only `?Sized` is supported",
);
// Otherwise, add implicitly sized if `Sized` is available.
}
_ => {
// There was no `?Sized` bound; add implicitly sized if `Sized` is available.
}
}
if sized_def_id.is_err() {
// No lang item for `Sized`, so we can't add it as a bound.
return;
}
bounds.implicitly_sized = Some(span);
}
/// This helper takes a *converted* parameter type (`param_ty`)
/// and an *unconverted* list of bounds:
///
/// ```text
/// fn foo<T: Debug>
/// ^ ^^^^^ `ast_bounds` parameter, in HIR form
/// |
/// `param_ty`, in ty form
/// ```
///
/// It adds these `ast_bounds` into the `bounds` structure.
///
/// **A note on binders:** there is an implied binder around
/// `param_ty` and `ast_bounds`. See `instantiate_poly_trait_ref`
/// for more details.
#[tracing::instrument(level = "debug", skip(self, ast_bounds, bounds))]
pub(crate) fn add_bounds<'hir, I: Iterator<Item = &'hir hir::GenericBound<'hir>>>(
&self,
param_ty: Ty<'tcx>,
ast_bounds: I,
bounds: &mut Bounds<'tcx>,
bound_vars: &'tcx ty::List<ty::BoundVariableKind>,
) {
for ast_bound in ast_bounds {
match ast_bound {
hir::GenericBound::Trait(poly_trait_ref, modifier) => {
let constness = match modifier {
hir::TraitBoundModifier::MaybeConst => ty::BoundConstness::ConstIfConst,
hir::TraitBoundModifier::None => ty::BoundConstness::NotConst,
hir::TraitBoundModifier::Maybe => continue,
};
let _ = self.instantiate_poly_trait_ref(
&poly_trait_ref.trait_ref,
poly_trait_ref.span,
constness,
param_ty,
bounds,
false,
);
}
&hir::GenericBound::LangItemTrait(lang_item, span, hir_id, args) => {
self.instantiate_lang_item_trait_ref(
lang_item, span, hir_id, args, param_ty, bounds,
);
}
hir::GenericBound::Outlives(lifetime) => {
let region = self.ast_region_to_region(lifetime, None);
bounds
.region_bounds
.push((ty::Binder::bind_with_vars(region, bound_vars), lifetime.span));
}
}
}
}
/// Translates a list of bounds from the HIR into the `Bounds` data structure.
/// The self-type for the bounds is given by `param_ty`.
///
/// Example:
///
/// ```
/// fn foo<T: Bar + Baz>() { }
/// ^ ^^^^^^^^^ ast_bounds
/// param_ty
/// ```
///
/// The `sized_by_default` parameter indicates if, in this context, the `param_ty` should be
/// considered `Sized` unless there is an explicit `?Sized` bound. This would be true in the
/// example above, but is not true in supertrait listings like `trait Foo: Bar + Baz`.
///
/// `span` should be the declaration size of the parameter.
pub(crate) fn compute_bounds(
&self,
param_ty: Ty<'tcx>,
ast_bounds: &[hir::GenericBound<'_>],
) -> Bounds<'tcx> {
self.compute_bounds_inner(param_ty, ast_bounds)
}
/// Convert the bounds in `ast_bounds` that refer to traits which define an associated type
/// named `assoc_name` into ty::Bounds. Ignore the rest.
pub(crate) fn compute_bounds_that_match_assoc_type(
&self,
param_ty: Ty<'tcx>,
ast_bounds: &[hir::GenericBound<'_>],
assoc_name: Ident,
) -> Bounds<'tcx> {
let mut result = Vec::new();
for ast_bound in ast_bounds {
if let Some(trait_ref) = ast_bound.trait_ref() {
if let Some(trait_did) = trait_ref.trait_def_id() {
if self.tcx().trait_may_define_assoc_type(trait_did, assoc_name) {
result.push(ast_bound.clone());
}
}
}
}
self.compute_bounds_inner(param_ty, &result)
}
fn compute_bounds_inner(
&self,
param_ty: Ty<'tcx>,
ast_bounds: &[hir::GenericBound<'_>],
) -> Bounds<'tcx> {
let mut bounds = Bounds::default();
self.add_bounds(param_ty, ast_bounds.iter(), &mut bounds, ty::List::empty());
bounds
}
/// Given an HIR binding like `Item = Foo` or `Item: Foo`, pushes the corresponding predicates
/// onto `bounds`.
///
/// **A note on binders:** given something like `T: for<'a> Iterator<Item = &'a u32>`, the
/// `trait_ref` here will be `for<'a> T: Iterator`. The `binding` data however is from *inside*
/// the binder (e.g., `&'a u32`) and hence may reference bound regions.
#[tracing::instrument(
level = "debug",
skip(self, bounds, speculative, dup_bindings, path_span)
)]
fn add_predicates_for_ast_type_binding(
&self,
hir_ref_id: hir::HirId,
trait_ref: ty::PolyTraitRef<'tcx>,
binding: &ConvertedBinding<'_, 'tcx>,
bounds: &mut Bounds<'tcx>,
speculative: bool,
dup_bindings: &mut FxHashMap<DefId, Span>,
path_span: Span,
) -> Result<(), ErrorReported> {
// Given something like `U: SomeTrait<T = X>`, we want to produce a
// predicate like `<U as SomeTrait>::T = X`. This is somewhat
// subtle in the event that `T` is defined in a supertrait of
// `SomeTrait`, because in that case we need to upcast.
//
// That is, consider this case:
//
// ```
// trait SubTrait: SuperTrait<i32> { }
// trait SuperTrait<A> { type T; }
//
// ... B: SubTrait<T = foo> ...
// ```
//
// We want to produce `<B as SuperTrait<i32>>::T == foo`.
let tcx = self.tcx();
let candidate =
if self.trait_defines_associated_type_named(trait_ref.def_id(), binding.item_name) {
// Simple case: X is defined in the current trait.
trait_ref
} else {
// Otherwise, we have to walk through the supertraits to find
// those that do.
self.one_bound_for_assoc_type(
|| traits::supertraits(tcx, trait_ref),
|| trait_ref.print_only_trait_path().to_string(),
binding.item_name,
path_span,
|| match binding.kind {
ConvertedBindingKind::Equality(ty) => Some(ty.to_string()),
_ => None,
},
)?
};
let (assoc_ident, def_scope) =
tcx.adjust_ident_and_get_scope(binding.item_name, candidate.def_id(), hir_ref_id);
// We have already adjusted the item name above, so compare with `ident.normalize_to_macros_2_0()` instead
// of calling `filter_by_name_and_kind`.
let assoc_ty = tcx
.associated_items(candidate.def_id())
.filter_by_name_unhygienic(assoc_ident.name)
.find(|i| {
i.kind == ty::AssocKind::Type && i.ident.normalize_to_macros_2_0() == assoc_ident
})
.expect("missing associated type");
if !assoc_ty.vis.is_accessible_from(def_scope, tcx) {
tcx.sess
.struct_span_err(
binding.span,
&format!("associated type `{}` is private", binding.item_name),
)
.span_label(binding.span, "private associated type")
.emit();
}
tcx.check_stability(assoc_ty.def_id, Some(hir_ref_id), binding.span, None);
if !speculative {
dup_bindings
.entry(assoc_ty.def_id)
.and_modify(|prev_span| {
self.tcx().sess.emit_err(ValueOfAssociatedStructAlreadySpecified {
span: binding.span,
prev_span: *prev_span,
item_name: binding.item_name,
def_path: tcx.def_path_str(assoc_ty.container.id()),
});
})
.or_insert(binding.span);
}
// Include substitutions for generic parameters of associated types
let projection_ty = candidate.map_bound(|trait_ref| {
let ident = Ident::new(assoc_ty.ident.name, binding.item_name.span);
let item_segment = hir::PathSegment {
ident,
hir_id: Some(binding.hir_id),
res: None,
args: Some(binding.gen_args),
infer_args: false,
};
let substs_trait_ref_and_assoc_item = self.create_substs_for_associated_item(
tcx,
path_span,
assoc_ty.def_id,
&item_segment,
trait_ref.substs,
);
debug!(
"add_predicates_for_ast_type_binding: substs for trait-ref and assoc_item: {:?}",
substs_trait_ref_and_assoc_item
);
ty::ProjectionTy {
item_def_id: assoc_ty.def_id,
substs: substs_trait_ref_and_assoc_item,
}
});
if !speculative {
// Find any late-bound regions declared in `ty` that are not
// declared in the trait-ref or assoc_ty. These are not well-formed.
//
// Example:
//
// for<'a> <T as Iterator>::Item = &'a str // <-- 'a is bad
// for<'a> <T as FnMut<(&'a u32,)>>::Output = &'a str // <-- 'a is ok
if let ConvertedBindingKind::Equality(ty) = binding.kind {
let late_bound_in_trait_ref =
tcx.collect_constrained_late_bound_regions(&projection_ty);
let late_bound_in_ty =
tcx.collect_referenced_late_bound_regions(&trait_ref.rebind(ty));
debug!("late_bound_in_trait_ref = {:?}", late_bound_in_trait_ref);
debug!("late_bound_in_ty = {:?}", late_bound_in_ty);
// FIXME: point at the type params that don't have appropriate lifetimes:
// struct S1<F: for<'a> Fn(&i32, &i32) -> &'a i32>(F);
// ---- ---- ^^^^^^^
self.validate_late_bound_regions(
late_bound_in_trait_ref,
late_bound_in_ty,
|br_name| {
struct_span_err!(
tcx.sess,
binding.span,
E0582,
"binding for associated type `{}` references {}, \
which does not appear in the trait input types",
binding.item_name,
br_name
)
},
);
}
}
match binding.kind {
ConvertedBindingKind::Equality(ty) => {
// "Desugar" a constraint like `T: Iterator<Item = u32>` this to
// the "projection predicate" for:
//
// `<T as Iterator>::Item = u32`
bounds.projection_bounds.push((
projection_ty.map_bound(|projection_ty| {
debug!(
"add_predicates_for_ast_type_binding: projection_ty {:?}, substs: {:?}",
projection_ty, projection_ty.substs
);
ty::ProjectionPredicate { projection_ty, ty }
}),
binding.span,
));
}
ConvertedBindingKind::Constraint(ast_bounds) => {
// "Desugar" a constraint like `T: Iterator<Item: Debug>` to
//
// `<T as Iterator>::Item: Debug`
//
// Calling `skip_binder` is okay, because `add_bounds` expects the `param_ty`
// parameter to have a skipped binder.
let param_ty = tcx.mk_ty(ty::Projection(projection_ty.skip_binder()));
self.add_bounds(param_ty, ast_bounds.iter(), bounds, candidate.bound_vars());
}
}
Ok(())
}
fn ast_path_to_ty(
&self,
span: Span,
did: DefId,
item_segment: &hir::PathSegment<'_>,
) -> Ty<'tcx> {
let substs = self.ast_path_substs_for_ty(span, did, item_segment);
self.normalize_ty(span, self.tcx().at(span).type_of(did).subst(self.tcx(), substs))
}
fn conv_object_ty_poly_trait_ref(
&self,
span: Span,
trait_bounds: &[hir::PolyTraitRef<'_>],
lifetime: &hir::Lifetime,
borrowed: bool,
) -> Ty<'tcx> {
let tcx = self.tcx();
let mut bounds = Bounds::default();
let mut potential_assoc_types = Vec::new();
let dummy_self = self.tcx().types.trait_object_dummy_self;
for trait_bound in trait_bounds.iter().rev() {
if let GenericArgCountResult {
correct:
Err(GenericArgCountMismatch { invalid_args: cur_potential_assoc_types, .. }),
..
} = self.instantiate_poly_trait_ref(
&trait_bound.trait_ref,
trait_bound.span,
ty::BoundConstness::NotConst,
dummy_self,
&mut bounds,
false,
) {
potential_assoc_types.extend(cur_potential_assoc_types);
}
}
// Expand trait aliases recursively and check that only one regular (non-auto) trait
// is used and no 'maybe' bounds are used.
let expanded_traits =
traits::expand_trait_aliases(tcx, bounds.trait_bounds.iter().map(|&(a, b, _)| (a, b)));
let (mut auto_traits, regular_traits): (Vec<_>, Vec<_>) =
expanded_traits.partition(|i| tcx.trait_is_auto(i.trait_ref().def_id()));
if regular_traits.len() > 1 {
let first_trait = ®ular_traits[0];
let additional_trait = ®ular_traits[1];
let mut err = struct_span_err!(
tcx.sess,
additional_trait.bottom().1,
E0225,
"only auto traits can be used as additional traits in a trait object"
);
additional_trait.label_with_exp_info(
&mut err,
"additional non-auto trait",
"additional use",
);
first_trait.label_with_exp_info(&mut err, "first non-auto trait", "first use");
err.help(&format!(
"consider creating a new trait with all of these as supertraits and using that \
trait here instead: `trait NewTrait: {} {{}}`",
regular_traits
.iter()
.map(|t| t.trait_ref().print_only_trait_path().to_string())
.collect::<Vec<_>>()
.join(" + "),
));
err.note(
"auto-traits like `Send` and `Sync` are traits that have special properties; \
for more information on them, visit \
<https://doc.rust-lang.org/reference/special-types-and-traits.html#auto-traits>",
);
err.emit();
}
if regular_traits.is_empty() && auto_traits.is_empty() {
tcx.sess.emit_err(TraitObjectDeclaredWithNoTraits { span });
return tcx.ty_error();
}
// Check that there are no gross object safety violations;
// most importantly, that the supertraits don't contain `Self`,
// to avoid ICEs.
for item in ®ular_traits {
let object_safety_violations =
astconv_object_safety_violations(tcx, item.trait_ref().def_id());
if !object_safety_violations.is_empty() {
report_object_safety_error(
tcx,
span,
item.trait_ref().def_id(),
&object_safety_violations[..],
)
.emit();
return tcx.ty_error();
}
}
// Use a `BTreeSet` to keep output in a more consistent order.
let mut associated_types: FxHashMap<Span, BTreeSet<DefId>> = FxHashMap::default();
let regular_traits_refs_spans = bounds
.trait_bounds
.into_iter()
.filter(|(trait_ref, _, _)| !tcx.trait_is_auto(trait_ref.def_id()));
for (base_trait_ref, span, constness) in regular_traits_refs_spans {
assert_eq!(constness, ty::BoundConstness::NotConst);
for obligation in traits::elaborate_trait_ref(tcx, base_trait_ref) {
debug!(
"conv_object_ty_poly_trait_ref: observing object predicate `{:?}`",
obligation.predicate
);
let bound_predicate = obligation.predicate.kind();
match bound_predicate.skip_binder() {
ty::PredicateKind::Trait(pred) => {
let pred = bound_predicate.rebind(pred);
associated_types.entry(span).or_default().extend(
tcx.associated_items(pred.def_id())
.in_definition_order()
.filter(|item| item.kind == ty::AssocKind::Type)
.map(|item| item.def_id),
);
}
ty::PredicateKind::Projection(pred) => {
let pred = bound_predicate.rebind(pred);
// A `Self` within the original bound will be substituted with a
// `trait_object_dummy_self`, so check for that.
let references_self =
pred.skip_binder().ty.walk(tcx).any(|arg| arg == dummy_self.into());
// If the projection output contains `Self`, force the user to
// elaborate it explicitly to avoid a lot of complexity.
//
// The "classicaly useful" case is the following:
// ```
// trait MyTrait: FnMut() -> <Self as MyTrait>::MyOutput {
// type MyOutput;
// }
// ```
//
// Here, the user could theoretically write `dyn MyTrait<Output = X>`,
// but actually supporting that would "expand" to an infinitely-long type
// `fix $ τ → dyn MyTrait<MyOutput = X, Output = <τ as MyTrait>::MyOutput`.
//
// Instead, we force the user to write
// `dyn MyTrait<MyOutput = X, Output = X>`, which is uglier but works. See
// the discussion in #56288 for alternatives.
if !references_self {
// Include projections defined on supertraits.
bounds.projection_bounds.push((pred, span));
}
}
_ => (),
}
}
}
for (projection_bound, _) in &bounds.projection_bounds {
for def_ids in associated_types.values_mut() {
def_ids.remove(&projection_bound.projection_def_id());
}
}
self.complain_about_missing_associated_types(
associated_types,
potential_assoc_types,
trait_bounds,
);
// De-duplicate auto traits so that, e.g., `dyn Trait + Send + Send` is the same as
// `dyn Trait + Send`.
// We remove duplicates by inserting into a `FxHashSet` to avoid re-ordering
// the bounds
let mut duplicates = FxHashSet::default();
auto_traits.retain(|i| duplicates.insert(i.trait_ref().def_id()));
debug!("regular_traits: {:?}", regular_traits);
debug!("auto_traits: {:?}", auto_traits);
// Erase the `dummy_self` (`trait_object_dummy_self`) used above.
let existential_trait_refs = regular_traits.iter().map(|i| {
i.trait_ref().map_bound(|trait_ref: ty::TraitRef<'tcx>| {
if trait_ref.self_ty() != dummy_self {
// FIXME: There appears to be a missing filter on top of `expand_trait_aliases`,
// which picks up non-supertraits where clauses - but also, the object safety
// completely ignores trait aliases, which could be object safety hazards. We
// `delay_span_bug` here to avoid an ICE in stable even when the feature is
// disabled. (#66420)
tcx.sess.delay_span_bug(
DUMMY_SP,
&format!(
"trait_ref_to_existential called on {:?} with non-dummy Self",
trait_ref,
),
);
}
ty::ExistentialTraitRef::erase_self_ty(tcx, trait_ref)
})
});
let existential_projections = bounds.projection_bounds.iter().map(|(bound, _)| {
bound.map_bound(|b| {
if b.projection_ty.self_ty() != dummy_self {
tcx.sess.delay_span_bug(
DUMMY_SP,
&format!("trait_ref_to_existential called on {:?} with non-dummy Self", b),
);
}
ty::ExistentialProjection::erase_self_ty(tcx, b)
})
});
let regular_trait_predicates = existential_trait_refs
.map(|trait_ref| trait_ref.map_bound(ty::ExistentialPredicate::Trait));
let auto_trait_predicates = auto_traits.into_iter().map(|trait_ref| {
ty::Binder::dummy(ty::ExistentialPredicate::AutoTrait(trait_ref.trait_ref().def_id()))
});
// N.b. principal, projections, auto traits
// FIXME: This is actually wrong with multiple principals in regards to symbol mangling
let mut v = regular_trait_predicates
.chain(
existential_projections.map(|x| x.map_bound(ty::ExistentialPredicate::Projection)),
)
.chain(auto_trait_predicates)
.collect::<SmallVec<[_; 8]>>();
v.sort_by(|a, b| a.skip_binder().stable_cmp(tcx, &b.skip_binder()));
v.dedup();
let existential_predicates = tcx.mk_poly_existential_predicates(v.into_iter());
// Use explicitly-specified region bound.
let region_bound = if !lifetime.is_elided() {
self.ast_region_to_region(lifetime, None)
} else {
self.compute_object_lifetime_bound(span, existential_predicates).unwrap_or_else(|| {
if tcx.named_region(lifetime.hir_id).is_some() {
self.ast_region_to_region(lifetime, None)
} else {
self.re_infer(None, span).unwrap_or_else(|| {
let mut err = struct_span_err!(
tcx.sess,
span,
E0228,
"the lifetime bound for this object type cannot be deduced \
from context; please supply an explicit bound"
);
if borrowed {
// We will have already emitted an error E0106 complaining about a
// missing named lifetime in `&dyn Trait`, so we elide this one.
err.delay_as_bug();
} else {
err.emit();
}
tcx.lifetimes.re_static
})
}
})
};
debug!("region_bound: {:?}", region_bound);
let ty = tcx.mk_dynamic(existential_predicates, region_bound);
debug!("trait_object_type: {:?}", ty);
ty
}
fn report_ambiguous_associated_type(
&self,
span: Span,
type_str: &str,
trait_str: &str,
name: Symbol,
) {
let mut err = struct_span_err!(self.tcx().sess, span, E0223, "ambiguous associated type");
if let (true, Ok(snippet)) = (
self.tcx()
.resolutions(())
.confused_type_with_std_module
.keys()
.any(|full_span| full_span.contains(span)),
self.tcx().sess.source_map().span_to_snippet(span),
) {
err.span_suggestion(
span,
"you are looking for the module in `std`, not the primitive type",
format!("std::{}", snippet),
Applicability::MachineApplicable,
);
} else {
err.span_suggestion(
span,
"use fully-qualified syntax",
format!("<{} as {}>::{}", type_str, trait_str, name),
Applicability::HasPlaceholders,
);
}
err.emit();
}
// Search for a bound on a type parameter which includes the associated item
// given by `assoc_name`. `ty_param_def_id` is the `DefId` of the type parameter
// This function will fail if there are no suitable bounds or there is
// any ambiguity.
fn find_bound_for_assoc_item(
&self,
ty_param_def_id: LocalDefId,
assoc_name: Ident,
span: Span,
) -> Result<ty::PolyTraitRef<'tcx>, ErrorReported> {
let tcx = self.tcx();
debug!(
"find_bound_for_assoc_item(ty_param_def_id={:?}, assoc_name={:?}, span={:?})",
ty_param_def_id, assoc_name, span,
);
let predicates = &self
.get_type_parameter_bounds(span, ty_param_def_id.to_def_id(), assoc_name)
.predicates;
debug!("find_bound_for_assoc_item: predicates={:#?}", predicates);
let param_hir_id = tcx.hir().local_def_id_to_hir_id(ty_param_def_id);
let param_name = tcx.hir().ty_param_name(param_hir_id);
self.one_bound_for_assoc_type(
|| {
traits::transitive_bounds_that_define_assoc_type(
tcx,
predicates.iter().filter_map(|(p, _)| {
p.to_opt_poly_trait_ref().map(|trait_ref| trait_ref.value)
}),
assoc_name,
)
},
|| param_name.to_string(),
assoc_name,
span,
|| None,
)
}
// Checks that `bounds` contains exactly one element and reports appropriate
// errors otherwise.
fn one_bound_for_assoc_type<I>(
&self,
all_candidates: impl Fn() -> I,
ty_param_name: impl Fn() -> String,
assoc_name: Ident,
span: Span,
is_equality: impl Fn() -> Option<String>,
) -> Result<ty::PolyTraitRef<'tcx>, ErrorReported>
where
I: Iterator<Item = ty::PolyTraitRef<'tcx>>,
{
let mut matching_candidates = all_candidates()
.filter(|r| self.trait_defines_associated_type_named(r.def_id(), assoc_name));
let bound = match matching_candidates.next() {
Some(bound) => bound,
None => {
self.complain_about_assoc_type_not_found(
all_candidates,
&ty_param_name(),
assoc_name,
span,
);
return Err(ErrorReported);
}
};
debug!("one_bound_for_assoc_type: bound = {:?}", bound);
if let Some(bound2) = matching_candidates.next() {
debug!("one_bound_for_assoc_type: bound2 = {:?}", bound2);
let is_equality = is_equality();
let bounds = array::IntoIter::new([bound, bound2]).chain(matching_candidates);
let mut err = if is_equality.is_some() {
// More specific Error Index entry.
struct_span_err!(
self.tcx().sess,
span,
E0222,
"ambiguous associated type `{}` in bounds of `{}`",
assoc_name,
ty_param_name()
)
} else {
struct_span_err!(
self.tcx().sess,
span,
E0221,
"ambiguous associated type `{}` in bounds of `{}`",
assoc_name,
ty_param_name()
)
};
err.span_label(span, format!("ambiguous associated type `{}`", assoc_name));
let mut where_bounds = vec![];
for bound in bounds {
let bound_id = bound.def_id();
let bound_span = self
.tcx()
.associated_items(bound_id)
.find_by_name_and_kind(self.tcx(), assoc_name, ty::AssocKind::Type, bound_id)
.and_then(|item| self.tcx().hir().span_if_local(item.def_id));
if let Some(bound_span) = bound_span {
err.span_label(
bound_span,
format!(
"ambiguous `{}` from `{}`",
assoc_name,
bound.print_only_trait_path(),
),
);
if let Some(constraint) = &is_equality {
where_bounds.push(format!(
" T: {trait}::{assoc} = {constraint}",
trait=bound.print_only_trait_path(),
assoc=assoc_name,
constraint=constraint,
));
} else {
err.span_suggestion_verbose(
span.with_hi(assoc_name.span.lo()),
"use fully qualified syntax to disambiguate",
format!(
"<{} as {}>::",
ty_param_name(),
bound.print_only_trait_path(),
),
Applicability::MaybeIncorrect,
);
}
} else {
err.note(&format!(
"associated type `{}` could derive from `{}`",
ty_param_name(),
bound.print_only_trait_path(),
));
}
}
if !where_bounds.is_empty() {
err.help(&format!(
"consider introducing a new type parameter `T` and adding `where` constraints:\
\n where\n T: {},\n{}",
ty_param_name(),
where_bounds.join(",\n"),
));
}
err.emit();
if !where_bounds.is_empty() {
return Err(ErrorReported);
}
}
Ok(bound)
}
// Create a type from a path to an associated type.
// For a path `A::B::C::D`, `qself_ty` and `qself_def` are the type and def for `A::B::C`
// and item_segment is the path segment for `D`. We return a type and a def for
// the whole path.
// Will fail except for `T::A` and `Self::A`; i.e., if `qself_ty`/`qself_def` are not a type
// parameter or `Self`.
// NOTE: When this function starts resolving `Trait::AssocTy` successfully
// it should also start reportint the `BARE_TRAIT_OBJECTS` lint.
pub fn associated_path_to_ty(
&self,
hir_ref_id: hir::HirId,
span: Span,
qself_ty: Ty<'tcx>,
qself_res: Res,
assoc_segment: &hir::PathSegment<'_>,
permit_variants: bool,
) -> Result<(Ty<'tcx>, DefKind, DefId), ErrorReported> {
let tcx = self.tcx();
let assoc_ident = assoc_segment.ident;
debug!("associated_path_to_ty: {:?}::{}", qself_ty, assoc_ident);
// Check if we have an enum variant.
let mut variant_resolution = None;
if let ty::Adt(adt_def, _) = qself_ty.kind() {
if adt_def.is_enum() {
let variant_def = adt_def
.variants
.iter()
.find(|vd| tcx.hygienic_eq(assoc_ident, vd.ident, adt_def.did));
if let Some(variant_def) = variant_def {
if permit_variants {
tcx.check_stability(variant_def.def_id, Some(hir_ref_id), span, None);
self.prohibit_generics(slice::from_ref(assoc_segment));
return Ok((qself_ty, DefKind::Variant, variant_def.def_id));
} else {
variant_resolution = Some(variant_def.def_id);
}
}
}
}
// Find the type of the associated item, and the trait where the associated
// item is declared.
let bound = match (&qself_ty.kind(), qself_res) {
(_, Res::SelfTy(Some(_), Some((impl_def_id, _)))) => {
// `Self` in an impl of a trait -- we have a concrete self type and a
// trait reference.
let trait_ref = match tcx.impl_trait_ref(impl_def_id) {
Some(trait_ref) => trait_ref,
None => {
// A cycle error occurred, most likely.
return Err(ErrorReported);
}
};
self.one_bound_for_assoc_type(
|| traits::supertraits(tcx, ty::Binder::dummy(trait_ref)),
|| "Self".to_string(),
assoc_ident,
span,
|| None,
)?
}
(
&ty::Param(_),
Res::SelfTy(Some(param_did), None) | Res::Def(DefKind::TyParam, param_did),
) => self.find_bound_for_assoc_item(param_did.expect_local(), assoc_ident, span)?,
_ => {
if variant_resolution.is_some() {
// Variant in type position
let msg = format!("expected type, found variant `{}`", assoc_ident);
tcx.sess.span_err(span, &msg);
} else if qself_ty.is_enum() {
let mut err = struct_span_err!(
tcx.sess,
assoc_ident.span,
E0599,
"no variant named `{}` found for enum `{}`",
assoc_ident,
qself_ty,
);
let adt_def = qself_ty.ty_adt_def().expect("enum is not an ADT");
if let Some(suggested_name) = find_best_match_for_name(
&adt_def
.variants
.iter()
.map(|variant| variant.ident.name)
.collect::<Vec<Symbol>>(),
assoc_ident.name,
None,
) {
err.span_suggestion(
assoc_ident.span,
"there is a variant with a similar name",
suggested_name.to_string(),
Applicability::MaybeIncorrect,
);
} else {
err.span_label(
assoc_ident.span,
format!("variant not found in `{}`", qself_ty),
);
}
if let Some(sp) = tcx.hir().span_if_local(adt_def.did) {
let sp = tcx.sess.source_map().guess_head_span(sp);
err.span_label(sp, format!("variant `{}` not found here", assoc_ident));
}
err.emit();
} else if !qself_ty.references_error() {
// Don't print `TyErr` to the user.
self.report_ambiguous_associated_type(
span,
&qself_ty.to_string(),
"Trait",
assoc_ident.name,
);
}
return Err(ErrorReported);
}
};
let trait_did = bound.def_id();
let (assoc_ident, def_scope) =
tcx.adjust_ident_and_get_scope(assoc_ident, trait_did, hir_ref_id);
// We have already adjusted the item name above, so compare with `ident.normalize_to_macros_2_0()` instead
// of calling `filter_by_name_and_kind`.
let item = tcx
.associated_items(trait_did)
.in_definition_order()
.find(|i| {
i.kind.namespace() == Namespace::TypeNS
&& i.ident.normalize_to_macros_2_0() == assoc_ident
})
.expect("missing associated type");
let ty = self.projected_ty_from_poly_trait_ref(span, item.def_id, assoc_segment, bound);
let ty = self.normalize_ty(span, ty);
let kind = DefKind::AssocTy;
if !item.vis.is_accessible_from(def_scope, tcx) {
let kind = kind.descr(item.def_id);
let msg = format!("{} `{}` is private", kind, assoc_ident);
tcx.sess
.struct_span_err(span, &msg)
.span_label(span, &format!("private {}", kind))
.emit();
}
tcx.check_stability(item.def_id, Some(hir_ref_id), span, None);
if let Some(variant_def_id) = variant_resolution {
tcx.struct_span_lint_hir(AMBIGUOUS_ASSOCIATED_ITEMS, hir_ref_id, span, |lint| {
let mut err = lint.build("ambiguous associated item");
let mut could_refer_to = |kind: DefKind, def_id, also| {
let note_msg = format!(
"`{}` could{} refer to the {} defined here",
assoc_ident,
also,
kind.descr(def_id)
);
err.span_note(tcx.def_span(def_id), ¬e_msg);
};
could_refer_to(DefKind::Variant, variant_def_id, "");
could_refer_to(kind, item.def_id, " also");
err.span_suggestion(
span,
"use fully-qualified syntax",
format!("<{} as {}>::{}", qself_ty, tcx.item_name(trait_did), assoc_ident),
Applicability::MachineApplicable,
);
err.emit();
});
}
Ok((ty, kind, item.def_id))
}
fn qpath_to_ty(
&self,
span: Span,
opt_self_ty: Option<Ty<'tcx>>,
item_def_id: DefId,
trait_segment: &hir::PathSegment<'_>,
item_segment: &hir::PathSegment<'_>,
) -> Ty<'tcx> {
let tcx = self.tcx();
let trait_def_id = tcx.parent(item_def_id).unwrap();
debug!("qpath_to_ty: trait_def_id={:?}", trait_def_id);
let Some(self_ty) = opt_self_ty else {
let path_str = tcx.def_path_str(trait_def_id);
let def_id = self.item_def_id();
debug!("qpath_to_ty: self.item_def_id()={:?}", def_id);
let parent_def_id = def_id
.and_then(|def_id| {
def_id.as_local().map(|def_id| tcx.hir().local_def_id_to_hir_id(def_id))
})
.map(|hir_id| tcx.hir().get_parent_did(hir_id).to_def_id());
debug!("qpath_to_ty: parent_def_id={:?}", parent_def_id);
// If the trait in segment is the same as the trait defining the item,
// use the `<Self as ..>` syntax in the error.
let is_part_of_self_trait_constraints = def_id == Some(trait_def_id);
let is_part_of_fn_in_self_trait = parent_def_id == Some(trait_def_id);
let type_name = if is_part_of_self_trait_constraints || is_part_of_fn_in_self_trait {
"Self"
} else {
"Type"
};
self.report_ambiguous_associated_type(
span,
type_name,
&path_str,
item_segment.ident.name,
);
return tcx.ty_error();
};
debug!("qpath_to_ty: self_type={:?}", self_ty);
let trait_ref = self.ast_path_to_mono_trait_ref(span, trait_def_id, self_ty, trait_segment);
let item_substs = self.create_substs_for_associated_item(
tcx,
span,
item_def_id,
item_segment,
trait_ref.substs,
);
debug!("qpath_to_ty: trait_ref={:?}", trait_ref);
self.normalize_ty(span, tcx.mk_projection(item_def_id, item_substs))
}
pub fn prohibit_generics<'a, T: IntoIterator<Item = &'a hir::PathSegment<'a>>>(
&self,
segments: T,
) -> bool {
let mut has_err = false;
for segment in segments {
let (mut err_for_lt, mut err_for_ty, mut err_for_ct) = (false, false, false);
for arg in segment.args().args {
let (span, kind) = match arg {
hir::GenericArg::Lifetime(lt) => {
if err_for_lt {
continue;
}
err_for_lt = true;
has_err = true;
(lt.span, "lifetime")
}
hir::GenericArg::Type(ty) => {
if err_for_ty {
continue;
}
err_for_ty = true;
has_err = true;
(ty.span, "type")
}
hir::GenericArg::Const(ct) => {
if err_for_ct {
continue;
}
err_for_ct = true;
has_err = true;
(ct.span, "const")
}
hir::GenericArg::Infer(inf) => {
if err_for_ty {
continue;
}
has_err = true;
err_for_ty = true;
(inf.span, "generic")
}
};
let mut err = struct_span_err!(
self.tcx().sess,
span,
E0109,
"{} arguments are not allowed for this type",
kind,
);
err.span_label(span, format!("{} argument not allowed", kind));
err.emit();
if err_for_lt && err_for_ty && err_for_ct {
break;
}
}
// Only emit the first error to avoid overloading the user with error messages.
if let [binding, ..] = segment.args().bindings {
has_err = true;
Self::prohibit_assoc_ty_binding(self.tcx(), binding.span);
}
}
has_err
}
// FIXME(eddyb, varkor) handle type paths here too, not just value ones.
pub fn def_ids_for_value_path_segments(
&self,
segments: &[hir::PathSegment<'_>],
self_ty: Option<Ty<'tcx>>,
kind: DefKind,
def_id: DefId,
) -> Vec<PathSeg> {
// We need to extract the type parameters supplied by the user in
// the path `path`. Due to the current setup, this is a bit of a
// tricky-process; the problem is that resolve only tells us the
// end-point of the path resolution, and not the intermediate steps.
// Luckily, we can (at least for now) deduce the intermediate steps
// just from the end-point.
//
// There are basically five cases to consider:
//
// 1. Reference to a constructor of a struct:
//
// struct Foo<T>(...)
//
// In this case, the parameters are declared in the type space.
//
// 2. Reference to a constructor of an enum variant:
//
// enum E<T> { Foo(...) }
//
// In this case, the parameters are defined in the type space,
// but may be specified either on the type or the variant.
//
// 3. Reference to a fn item or a free constant:
//
// fn foo<T>() { }
//
// In this case, the path will again always have the form
// `a::b::foo::<T>` where only the final segment should have
// type parameters. However, in this case, those parameters are
// declared on a value, and hence are in the `FnSpace`.
//
// 4. Reference to a method or an associated constant:
//
// impl<A> SomeStruct<A> {
// fn foo<B>(...)
// }
//
// Here we can have a path like
// `a::b::SomeStruct::<A>::foo::<B>`, in which case parameters
// may appear in two places. The penultimate segment,
// `SomeStruct::<A>`, contains parameters in TypeSpace, and the
// final segment, `foo::<B>` contains parameters in fn space.
//
// The first step then is to categorize the segments appropriately.
let tcx = self.tcx();
assert!(!segments.is_empty());
let last = segments.len() - 1;
let mut path_segs = vec![];
match kind {
// Case 1. Reference to a struct constructor.
DefKind::Ctor(CtorOf::Struct, ..) => {
// Everything but the final segment should have no
// parameters at all.
let generics = tcx.generics_of(def_id);
// Variant and struct constructors use the
// generics of their parent type definition.
let generics_def_id = generics.parent.unwrap_or(def_id);
path_segs.push(PathSeg(generics_def_id, last));
}
// Case 2. Reference to a variant constructor.
DefKind::Ctor(CtorOf::Variant, ..) | DefKind::Variant => {
let adt_def = self_ty.map(|t| t.ty_adt_def().unwrap());
let (generics_def_id, index) = if let Some(adt_def) = adt_def {
debug_assert!(adt_def.is_enum());
(adt_def.did, last)
} else if last >= 1 && segments[last - 1].args.is_some() {
// Everything but the penultimate segment should have no
// parameters at all.
let mut def_id = def_id;
// `DefKind::Ctor` -> `DefKind::Variant`
if let DefKind::Ctor(..) = kind {
def_id = tcx.parent(def_id).unwrap()
}
// `DefKind::Variant` -> `DefKind::Enum`
let enum_def_id = tcx.parent(def_id).unwrap();
(enum_def_id, last - 1)
} else {
// FIXME: lint here recommending `Enum::<...>::Variant` form
// instead of `Enum::Variant::<...>` form.
// Everything but the final segment should have no
// parameters at all.
let generics = tcx.generics_of(def_id);
// Variant and struct constructors use the
// generics of their parent type definition.
(generics.parent.unwrap_or(def_id), last)
};
path_segs.push(PathSeg(generics_def_id, index));
}
// Case 3. Reference to a top-level value.
DefKind::Fn | DefKind::Const | DefKind::ConstParam | DefKind::Static => {
path_segs.push(PathSeg(def_id, last));
}
// Case 4. Reference to a method or associated const.
DefKind::AssocFn | DefKind::AssocConst => {
if segments.len() >= 2 {
let generics = tcx.generics_of(def_id);
path_segs.push(PathSeg(generics.parent.unwrap(), last - 1));
}
path_segs.push(PathSeg(def_id, last));
}
kind => bug!("unexpected definition kind {:?} for {:?}", kind, def_id),
}
debug!("path_segs = {:?}", path_segs);
path_segs
}
// Check a type `Path` and convert it to a `Ty`.
pub fn res_to_ty(
&self,
opt_self_ty: Option<Ty<'tcx>>,
path: &hir::Path<'_>,
permit_variants: bool,
) -> Ty<'tcx> {
let tcx = self.tcx();
debug!(
"res_to_ty(res={:?}, opt_self_ty={:?}, path_segments={:?})",
path.res, opt_self_ty, path.segments
);
let span = path.span;
match path.res {
Res::Def(DefKind::OpaqueTy, did) => {
// Check for desugared `impl Trait`.
assert!(ty::is_impl_trait_defn(tcx, did).is_none());
let item_segment = path.segments.split_last().unwrap();
self.prohibit_generics(item_segment.1);
let substs = self.ast_path_substs_for_ty(span, did, item_segment.0);
self.normalize_ty(span, tcx.mk_opaque(did, substs))
}
Res::Def(
DefKind::Enum
| DefKind::TyAlias
| DefKind::Struct
| DefKind::Union
| DefKind::ForeignTy,
did,
) => {
assert_eq!(opt_self_ty, None);
self.prohibit_generics(path.segments.split_last().unwrap().1);
self.ast_path_to_ty(span, did, path.segments.last().unwrap())
}
Res::Def(kind @ DefKind::Variant, def_id) if permit_variants => {
// Convert "variant type" as if it were a real type.
// The resulting `Ty` is type of the variant's enum for now.
assert_eq!(opt_self_ty, None);
let path_segs =
self.def_ids_for_value_path_segments(path.segments, None, kind, def_id);
let generic_segs: FxHashSet<_> =
path_segs.iter().map(|PathSeg(_, index)| index).collect();
self.prohibit_generics(path.segments.iter().enumerate().filter_map(
|(index, seg)| {
if !generic_segs.contains(&index) { Some(seg) } else { None }
},
));
let PathSeg(def_id, index) = path_segs.last().unwrap();
self.ast_path_to_ty(span, *def_id, &path.segments[*index])
}
Res::Def(DefKind::TyParam, def_id) => {
assert_eq!(opt_self_ty, None);
self.prohibit_generics(path.segments);
let hir_id = tcx.hir().local_def_id_to_hir_id(def_id.expect_local());
let item_id = tcx.hir().get_parent_node(hir_id);
let item_def_id = tcx.hir().local_def_id(item_id);
let generics = tcx.generics_of(item_def_id);
let index = generics.param_def_id_to_index[&def_id];
tcx.mk_ty_param(index, tcx.hir().name(hir_id))
}
Res::SelfTy(Some(_), None) => {
// `Self` in trait or type alias.
assert_eq!(opt_self_ty, None);
self.prohibit_generics(path.segments);
tcx.types.self_param
}
Res::SelfTy(_, Some((def_id, forbid_generic))) => {
// `Self` in impl (we know the concrete type).
assert_eq!(opt_self_ty, None);
self.prohibit_generics(path.segments);
// Try to evaluate any array length constants.
let normalized_ty = self.normalize_ty(span, tcx.at(span).type_of(def_id));
if forbid_generic && normalized_ty.definitely_needs_subst(tcx) {
let mut err = tcx.sess.struct_span_err(
path.span,
"generic `Self` types are currently not permitted in anonymous constants",
);
if let Some(hir::Node::Item(&hir::Item {
kind: hir::ItemKind::Impl(ref impl_),
..
})) = tcx.hir().get_if_local(def_id)
{
err.span_note(impl_.self_ty.span, "not a concrete type");
}
err.emit();
tcx.ty_error()
} else {
normalized_ty
}
}
Res::Def(DefKind::AssocTy, def_id) => {
debug_assert!(path.segments.len() >= 2);
self.prohibit_generics(&path.segments[..path.segments.len() - 2]);
self.qpath_to_ty(
span,
opt_self_ty,
def_id,
&path.segments[path.segments.len() - 2],
path.segments.last().unwrap(),
)
}
Res::PrimTy(prim_ty) => {
assert_eq!(opt_self_ty, None);
self.prohibit_generics(path.segments);
match prim_ty {
hir::PrimTy::Bool => tcx.types.bool,
hir::PrimTy::Char => tcx.types.char,
hir::PrimTy::Int(it) => tcx.mk_mach_int(ty::int_ty(it)),
hir::PrimTy::Uint(uit) => tcx.mk_mach_uint(ty::uint_ty(uit)),
hir::PrimTy::Float(ft) => tcx.mk_mach_float(ty::float_ty(ft)),
hir::PrimTy::Str => tcx.types.str_,
}
}
Res::Err => {
self.set_tainted_by_errors();
self.tcx().ty_error()
}
_ => span_bug!(span, "unexpected resolution: {:?}", path.res),
}
}
/// Parses the programmer's textual representation of a type into our
/// internal notion of a type.
pub fn ast_ty_to_ty(&self, ast_ty: &hir::Ty<'_>) -> Ty<'tcx> {
self.ast_ty_to_ty_inner(ast_ty, false)
}
/// Turns a `hir::Ty` into a `Ty`. For diagnostics' purposes we keep track of whether trait
/// objects are borrowed like `&dyn Trait` to avoid emitting redundant errors.
#[tracing::instrument(level = "debug", skip(self))]
fn ast_ty_to_ty_inner(&self, ast_ty: &hir::Ty<'_>, borrowed: bool) -> Ty<'tcx> {
let tcx = self.tcx();
let result_ty = match ast_ty.kind {
hir::TyKind::Slice(ref ty) => tcx.mk_slice(self.ast_ty_to_ty(ty)),
hir::TyKind::Ptr(ref mt) => {
tcx.mk_ptr(ty::TypeAndMut { ty: self.ast_ty_to_ty(mt.ty), mutbl: mt.mutbl })
}
hir::TyKind::Rptr(ref region, ref mt) => {
let r = self.ast_region_to_region(region, None);
debug!(?r);
let t = self.ast_ty_to_ty_inner(mt.ty, true);
tcx.mk_ref(r, ty::TypeAndMut { ty: t, mutbl: mt.mutbl })
}
hir::TyKind::Never => tcx.types.never,
hir::TyKind::Tup(fields) => tcx.mk_tup(fields.iter().map(|t| self.ast_ty_to_ty(t))),
hir::TyKind::BareFn(bf) => {
require_c_abi_if_c_variadic(tcx, bf.decl, bf.abi, ast_ty.span);
tcx.mk_fn_ptr(self.ty_of_fn(
ast_ty.hir_id,
bf.unsafety,
bf.abi,
bf.decl,
&hir::Generics::empty(),
None,
Some(ast_ty),
))
}
hir::TyKind::TraitObject(bounds, ref lifetime, _) => {
self.conv_object_ty_poly_trait_ref(ast_ty.span, bounds, lifetime, borrowed)
}
hir::TyKind::Path(hir::QPath::Resolved(ref maybe_qself, ref path)) => {
debug!(?maybe_qself, ?path);
let opt_self_ty = maybe_qself.as_ref().map(|qself| self.ast_ty_to_ty(qself));
self.res_to_ty(opt_self_ty, path, false)
}
hir::TyKind::OpaqueDef(item_id, lifetimes) => {
let opaque_ty = tcx.hir().item(item_id);
let def_id = item_id.def_id.to_def_id();
match opaque_ty.kind {
hir::ItemKind::OpaqueTy(hir::OpaqueTy { impl_trait_fn, .. }) => {
self.impl_trait_ty_to_ty(def_id, lifetimes, impl_trait_fn.is_some())
}
ref i => bug!("`impl Trait` pointed to non-opaque type?? {:#?}", i),
}
}
hir::TyKind::Path(hir::QPath::TypeRelative(ref qself, ref segment)) => {
debug!(?qself, ?segment);
let ty = self.ast_ty_to_ty(qself);
let res = if let hir::TyKind::Path(hir::QPath::Resolved(_, path)) = qself.kind {
path.res
} else {
Res::Err
};
self.associated_path_to_ty(ast_ty.hir_id, ast_ty.span, ty, res, segment, false)
.map(|(ty, _, _)| ty)
.unwrap_or_else(|_| tcx.ty_error())
}
hir::TyKind::Path(hir::QPath::LangItem(lang_item, span)) => {
let def_id = tcx.require_lang_item(lang_item, Some(span));
let (substs, _) = self.create_substs_for_ast_path(
span,
def_id,
&[],
&hir::PathSegment::invalid(),
&GenericArgs::none(),
true,
None,
);
self.normalize_ty(span, tcx.at(span).type_of(def_id).subst(tcx, substs))
}
hir::TyKind::Array(ref ty, ref length) => {
let length_def_id = tcx.hir().local_def_id(length.hir_id);
let length = ty::Const::from_anon_const(tcx, length_def_id);
let array_ty = tcx.mk_ty(ty::Array(self.ast_ty_to_ty(ty), length));
self.normalize_ty(ast_ty.span, array_ty)
}
hir::TyKind::Typeof(ref e) => {
tcx.sess.emit_err(TypeofReservedKeywordUsed { span: ast_ty.span });
tcx.type_of(tcx.hir().local_def_id(e.hir_id))
}
hir::TyKind::Infer => {
// Infer also appears as the type of arguments or return
// values in an ExprKind::Closure, or as
// the type of local variables. Both of these cases are
// handled specially and will not descend into this routine.
self.ty_infer(None, ast_ty.span)
}
hir::TyKind::Err => tcx.ty_error(),
};
debug!(?result_ty);
self.record_ty(ast_ty.hir_id, result_ty, ast_ty.span);
result_ty
}
fn impl_trait_ty_to_ty(
&self,
def_id: DefId,
lifetimes: &[hir::GenericArg<'_>],
replace_parent_lifetimes: bool,
) -> Ty<'tcx> {
debug!("impl_trait_ty_to_ty(def_id={:?}, lifetimes={:?})", def_id, lifetimes);
let tcx = self.tcx();
let generics = tcx.generics_of(def_id);
debug!("impl_trait_ty_to_ty: generics={:?}", generics);
let substs = InternalSubsts::for_item(tcx, def_id, |param, _| {
if let Some(i) = (param.index as usize).checked_sub(generics.parent_count) {
// Our own parameters are the resolved lifetimes.
if let GenericParamDefKind::Lifetime = param.kind {
if let hir::GenericArg::Lifetime(lifetime) = &lifetimes[i] {
self.ast_region_to_region(lifetime, None).into()
} else {
bug!()
}
} else {
bug!()
}
} else {
match param.kind {
// For RPIT (return position impl trait), only lifetimes
// mentioned in the impl Trait predicate are captured by
// the opaque type, so the lifetime parameters from the
// parent item need to be replaced with `'static`.
//
// For `impl Trait` in the types of statics, constants,
// locals and type aliases. These capture all parent
// lifetimes, so they can use their identity subst.
GenericParamDefKind::Lifetime if replace_parent_lifetimes => {
tcx.lifetimes.re_static.into()
}
_ => tcx.mk_param_from_def(param),
}
}
});
debug!("impl_trait_ty_to_ty: substs={:?}", substs);
let ty = tcx.mk_opaque(def_id, substs);
debug!("impl_trait_ty_to_ty: {}", ty);
ty
}
pub fn ty_of_arg(&self, ty: &hir::Ty<'_>, expected_ty: Option<Ty<'tcx>>) -> Ty<'tcx> {
match ty.kind {
hir::TyKind::Infer if expected_ty.is_some() => {
self.record_ty(ty.hir_id, expected_ty.unwrap(), ty.span);
expected_ty.unwrap()
}
_ => self.ast_ty_to_ty(ty),
}
}
pub fn ty_of_fn(
&self,
hir_id: hir::HirId,
unsafety: hir::Unsafety,
abi: abi::Abi,
decl: &hir::FnDecl<'_>,
generics: &hir::Generics<'_>,
ident_span: Option<Span>,
hir_ty: Option<&hir::Ty<'_>>,
) -> ty::PolyFnSig<'tcx> {
debug!("ty_of_fn");
let tcx = self.tcx();
let bound_vars = tcx.late_bound_vars(hir_id);
debug!(?bound_vars);
// We proactively collect all the inferred type params to emit a single error per fn def.
let mut visitor = PlaceholderHirTyCollector::default();
for ty in decl.inputs {
visitor.visit_ty(ty);
}
walk_generics(&mut visitor, generics);
let input_tys = decl.inputs.iter().map(|a| self.ty_of_arg(a, None));
let output_ty = match decl.output {
hir::FnRetTy::Return(output) => {
visitor.visit_ty(output);
self.ast_ty_to_ty(output)
}
hir::FnRetTy::DefaultReturn(..) => tcx.mk_unit(),
};
debug!("ty_of_fn: output_ty={:?}", output_ty);
let fn_ty = tcx.mk_fn_sig(input_tys, output_ty, decl.c_variadic, unsafety, abi);
let bare_fn_ty = ty::Binder::bind_with_vars(fn_ty, bound_vars);
if !self.allow_ty_infer() {
// We always collect the spans for placeholder types when evaluating `fn`s, but we
// only want to emit an error complaining about them if infer types (`_`) are not
// allowed. `allow_ty_infer` gates this behavior. We check for the presence of
// `ident_span` to not emit an error twice when we have `fn foo(_: fn() -> _)`.
crate::collect::placeholder_type_error(
tcx,
ident_span.map(|sp| sp.shrink_to_hi()),
generics.params,
visitor.0,
true,
hir_ty,
"function",
);
}
// Find any late-bound regions declared in return type that do
// not appear in the arguments. These are not well-formed.
//
// Example:
// for<'a> fn() -> &'a str <-- 'a is bad
// for<'a> fn(&'a String) -> &'a str <-- 'a is ok
let inputs = bare_fn_ty.inputs();
let late_bound_in_args =
tcx.collect_constrained_late_bound_regions(&inputs.map_bound(|i| i.to_owned()));
let output = bare_fn_ty.output();
let late_bound_in_ret = tcx.collect_referenced_late_bound_regions(&output);
self.validate_late_bound_regions(late_bound_in_args, late_bound_in_ret, |br_name| {
struct_span_err!(
tcx.sess,
decl.output.span(),
E0581,
"return type references {}, which is not constrained by the fn input types",
br_name
)
});
bare_fn_ty
}
fn validate_late_bound_regions(
&self,
constrained_regions: FxHashSet<ty::BoundRegionKind>,
referenced_regions: FxHashSet<ty::BoundRegionKind>,
generate_err: impl Fn(&str) -> rustc_errors::DiagnosticBuilder<'tcx>,
) {
for br in referenced_regions.difference(&constrained_regions) {
let br_name = match *br {
ty::BrNamed(_, name) => format!("lifetime `{}`", name),
ty::BrAnon(_) | ty::BrEnv => "an anonymous lifetime".to_string(),
};
let mut err = generate_err(&br_name);
if let ty::BrAnon(_) = *br {
// The only way for an anonymous lifetime to wind up
// in the return type but **also** be unconstrained is
// if it only appears in "associated types" in the
// input. See #47511 and #62200 for examples. In this case,
// though we can easily give a hint that ought to be
// relevant.
err.note(
"lifetimes appearing in an associated type are not considered constrained",
);
}
err.emit();
}
}
/// Given the bounds on an object, determines what single region bound (if any) we can
/// use to summarize this type. The basic idea is that we will use the bound the user
/// provided, if they provided one, and otherwise search the supertypes of trait bounds
/// for region bounds. It may be that we can derive no bound at all, in which case
/// we return `None`.
fn compute_object_lifetime_bound(
&self,
span: Span,
existential_predicates: &'tcx ty::List<ty::Binder<'tcx, ty::ExistentialPredicate<'tcx>>>,
) -> Option<ty::Region<'tcx>> // if None, use the default
{
let tcx = self.tcx();
debug!("compute_opt_region_bound(existential_predicates={:?})", existential_predicates);
// No explicit region bound specified. Therefore, examine trait
// bounds and see if we can derive region bounds from those.
let derived_region_bounds = object_region_bounds(tcx, existential_predicates);
// If there are no derived region bounds, then report back that we
// can find no region bound. The caller will use the default.
if derived_region_bounds.is_empty() {
return None;
}
// If any of the derived region bounds are 'static, that is always
// the best choice.
if derived_region_bounds.iter().any(|&r| ty::ReStatic == *r) {
return Some(tcx.lifetimes.re_static);
}
// Determine whether there is exactly one unique region in the set
// of derived region bounds. If so, use that. Otherwise, report an
// error.
let r = derived_region_bounds[0];
if derived_region_bounds[1..].iter().any(|r1| r != *r1) {
tcx.sess.emit_err(AmbiguousLifetimeBound { span });
}
Some(r)
}
}<|fim▁end|> |
pub trait CreateSubstsForGenericArgsCtxt<'a, 'tcx> { |
<|file_name|>qr.js<|end_file_name|><|fim▁begin|>$(function () {
$.widget("as24.qr", {
_create: function () {
var self = this;
$(self.element).find('[data-description="QR-Code Generation"]').find("p").hide();
<|fim▁hole|> $(self.element).find('[data-generate="qr-code"]').click(function () {
$(self.element).find('[data-description="QR-Code Generation"]').find("p").show();
var url = $(self.element).find('[name="urlQRCode"]').val();
var featureName = $(self.element).find('[name="name"]').val();
var featureEnabled = url + "?" + $.param({ "FeatureBee": '#' + featureName + "=true#" });
var featureDisabled = url + "?" + $.param({ "FeatureBee": '#' + featureName + "=false#" });
$(self.element).find('#qrcode_enable').empty();
$(self.element).find('#qrcode_enable').qrcode(featureEnabled);
$(self.element).find('#qrcode_disable').empty();
$(self.element).find('#qrcode_disable').qrcode(featureDisabled);
});
}
});
})<|fim▁end|> | |
<|file_name|>PlayGameScreenManager.java<|end_file_name|><|fim▁begin|>package h1z1.screens;
import h1z1.MainFrame;
import h1z1.game.GameState;
import h1z1.input.ButtonHandler;
import h1z1.input.InputButton;
import h1z1.input.InputProvider;
import h1z1.io.ResourceManager;
import javax.imageio.ImageIO;
import java.awt.*;
import java.util.List;
import h1z1.game.Maze;
public class PlayGameScreenManager extends ScreenManager {
private Maze maze = new Maze();
public PlayGameScreenManager(MainFrame mainFrame) throws Exception {
super(mainFrame);
}
@Override
public void update(List<InputProvider> inputs) {
}
@Override
public void paint(Frame frame, Graphics2D graphics) {
for(int x = 0; x < Maze.WIDTH; x++){
for(int y = 0; y < Maze.HEIGHT; y++) {
boolean value = maze.getValue(x, y);
if (value) {graphics.setColor(Color.RED);}
else{<|fim▁hole|> Maze.OFFSET + Maze.SIZE * y,
Maze.SIZE,
Maze.SIZE);
}
}
}
}<|fim▁end|> | graphics.setColor(Color.WHITE);
}
graphics.fillRect(Maze.OFFSET + Maze.SIZE * x, |
<|file_name|>gulpfile.js<|end_file_name|><|fim▁begin|>var gulp = require('gulp');
var connect = require('gulp-connect');
var uglify = require('gulp-uglify');
var concat = require('gulp-concat');
var opn = require('opn');
var config = {
rootDir: '.',
servingPort: 8080,
servingDir: './dist',<|fim▁hole|> styles: './src/**/*.css',
images: '',
index: './src/index.html',
partials: ['./src/**/*.html', '!./index.html'],
},
distDev: './dist.dev',
distProd: './dist.prod'
}
}
gulp.task('build', function() {
});
gulp.task('serve', ['connect'], function() {
return opn('http://localhost:' + config.servingPort);
});
gulp.task('livereload', function() {
console.log('reloading')
gulp.src(config.filesToWatch)
.pipe(connect.reload());
});
gulp.task('connect', function() {
connect.server({
root: config.servingDir,
port: config.servingPort,
livereload: false,
fallback: config.servingDir + '/index.html'
});
});
gulp.task('watch', function() {
gulp.watch([config.sourcePaths.css, config.sourcePaths.html, config.sourcePaths.js], ['livereload']);
});
gulp.task('default', ['serve']);
//default: clean-build-prod
//serve dev<|fim▁end|> | paths: {
src: {
scripts: './src/**/*.js', |
<|file_name|>UnitGroup.cpp<|end_file_name|><|fim▁begin|>#include <PrecompiledHeader.h>
#include "Macro/UnitGroup.h"
using namespace BWAPI;
using namespace std;
bool passesFlag(Unit* u, int f)
{
if (f<0)
return !passesFlag(u,-f);
switch(f)
{
case exists:
if (u->exists()) return true;
break;
case isAccelerating:
if (u->isAccelerating()) return true;
break;
case isAttacking:
if (u->isAttacking()) return true;
break;
case isBeingConstructed:
if (u->isBeingConstructed()) return true;
break;
case isBeingGathered:
if (u->isBeingGathered()) return true;
break;
case isBeingHealed:
if (u->isBeingHealed()) return true;
break;
case isBlind:
if (u->isBlind()) return true;
break;
case isBraking:
if (u->isBraking()) return true;
break;
case isBurrowed:
if (u->isBurrowed()) return true;
break;
case isCarryingGas:
if (u->isCarryingGas()) return true;
break;
case isCarryingMinerals:
if (u->isCarryingMinerals()) return true;
break;
case isCloaked:
if (u->isCloaked()) return true;
break;
case isCompleted:
if (u->isCompleted()) return true;
break;
case isConstructing:
if (u->isConstructing()) return true;
break;
case isDefenseMatrixed:
if (u->isDefenseMatrixed()) return true;
break;
case isDetected:
if (u->isDetected()) return true;
break;
case isEnsnared:
if (u->isEnsnared()) return true;
break;
case isFollowing:
if (u->isFollowing()) return true;
break;
case isGatheringGas:
if (u->isGatheringGas()) return true;
break;
case isGatheringMinerals:
if (u->isGatheringMinerals()) return true;
break;
case isHallucination:
if (u->isHallucination()) return true;
break;
case isHoldingPosition:
if (u->isHoldingPosition()) return true;
break;
case isIdle:
if (u->isIdle()) return true;
break;
case isInterruptible:
if (u->isInterruptible()) return true;
break;
case isIrradiated:
if (u->isIrradiated()) return true;
break;
case isLifted:
if (u->isLifted()) return true;
break;
case isLoaded:
if (u->isLoaded()) return true;
break;
case isLockedDown:
if (u->isLockedDown()) return true;
break;
case isMaelstrommed:
if (u->isMaelstrommed()) return true;
break;
case isMorphing:
if (u->isMorphing()) return true;
break;
case isMoving:
if (u->isMoving()) return true;
break;
case isParasited:
if (u->isParasited()) return true;
break;
case isPatrolling:
if (u->isPatrolling()) return true;
break;
case isPlagued:
if (u->isPlagued()) return true;
break;
case isRepairing:
if (u->isRepairing()) return true;
break;
case isResearching:
if (u->isResearching()) return true;
break;
case isSelected:
if (u->isSelected()) return true;
break;
case isSieged:
if (u->isSieged()) return true;
break;
case isStartingAttack:
if (u->isStartingAttack()) return true;
break;
case isStasised:
if (u->isStasised()) return true;
break;
case isStimmed:
if (u->isStimmed()) return true;
break;
case isStuck:
if (u->isStuck()) return true;
break;
case isTraining:
if (u->isTraining()) return true;
break;
case isUnderStorm:
if (u->isUnderStorm()) return true;
break;
case isUnpowered:
if (u->isUnpowered()) return true;
break;
case isUpgrading:
if (u->isUpgrading()) return true;
break;
case isVisible:
if (u->isVisible()) return true;
break;
case canProduce:
if (u->getType().canProduce()) return true;
break;
case canAttack:
if (u->getType().canAttack()) return true;
break;
case canMove:
if (u->getType().canMove()) return true;
break;
case isFlyer:
if (u->getType().isFlyer()) return true;
break;
case regeneratesHP:
if (u->getType().regeneratesHP()) return true;
break;
case isSpellcaster:
if (u->getType().isSpellcaster()) return true;
break;
case hasPermanentCloak:
if (u->getType().hasPermanentCloak()) return true;
break;
case isInvincible:
if (u->getType().isInvincible()) return true;
break;
case isOrganic:
if (u->getType().isOrganic()) return true;
break;
case isMechanical:
if (u->getType().isMechanical()) return true;
break;
case isRobotic:
if (u->getType().isRobotic()) return true;
break;
case isDetector:
if (u->getType().isDetector()) return true;
break;
case isResourceContainer:
if (u->getType().isResourceContainer()) return true;
break;
case isResourceDepot:
if (u->getType().isResourceDepot()) return true;
break;
case isRefinery:
if (u->getType().isRefinery()) return true;
break;
case isWorker:
if (u->getType().isWorker()) return true;
break;
case requiresPsi:
if (u->getType().requiresPsi()) return true;
break;
case requiresCreep:
if (u->getType().requiresCreep()) return true;
break;
case isTwoUnitsInOneEgg:
if (u->getType().isTwoUnitsInOneEgg()) return true;
break;
case isBurrowable:
if (u->getType().isBurrowable()) return true;
break;
case isCloakable:
if (u->getType().isCloakable()) return true;
break;
case isBuilding:
if (u->getType().isBuilding()) return true;
break;
case isAddon:
if (u->getType().isAddon()) return true;
break;
case isFlyingBuilding:
if (u->getType().isFlyingBuilding()) return true;
break;
case isNeutral:
if (u->getType().isNeutral()) return true;
break;
case isHero:
if (u->getType().isHero()) return true;
break;
case isPowerup:
if (u->getType().isPowerup()) return true;
break;
case isBeacon:
if (u->getType().isBeacon()) return true;
break;
case isFlagBeacon:
if (u->getType().isFlagBeacon()) return true;
break;
case isSpecialBuilding:
if (u->getType().isSpecialBuilding()) return true;
break;
case isSpell:
if (u->getType().isSpell()) return true;
break;
case Firebat:
if(u->getType()==UnitTypes::Terran_Firebat) return true;
break;
case Ghost:
if(u->getType()==UnitTypes::Terran_Ghost) return true;
break;
case Goliath:
if(u->getType()==UnitTypes::Terran_Goliath) return true;
break;
case Marine:
if(u->getType()==UnitTypes::Terran_Marine) return true;
break;
case Medic:
if(u->getType()==UnitTypes::Terran_Medic) return true;
break;
case SCV:
if(u->getType()==UnitTypes::Terran_SCV) return true;
break;
case Siege_Tank:
if(u->getType()==UnitTypes::Terran_Siege_Tank_Tank_Mode) return true;
if(u->getType()==UnitTypes::Terran_Siege_Tank_Siege_Mode) return true;
break;
case Vulture:
if(u->getType()==UnitTypes::Terran_Vulture) return true;
break;
case Vulture_Spider_Mine:
if(u->getType()==UnitTypes::Terran_Vulture_Spider_Mine) return true;
break;
case Battlecruiser:
if(u->getType()==UnitTypes::Terran_Battlecruiser) return true;
break;
case Dropship:
if(u->getType()==UnitTypes::Terran_Dropship) return true;
break;
case Nuclear_Missile:
if(u->getType()==UnitTypes::Terran_Nuclear_Missile) return true;
break;
case Science_Vessel:
if(u->getType()==UnitTypes::Terran_Science_Vessel) return true;
break;
case Valkyrie:
if(u->getType()==UnitTypes::Terran_Valkyrie) return true;
break;
case Wraith:
if(u->getType()==UnitTypes::Terran_Wraith) return true;
break;
case Alan_Schezar:
if(u->getType()==UnitTypes::Hero_Alan_Schezar) return true;
break;
case Alexei_Stukov:
if(u->getType()==UnitTypes::Hero_Alexei_Stukov) return true;
break;
case Arcturus_Mengsk:
if(u->getType()==UnitTypes::Hero_Arcturus_Mengsk) return true;
break;
case Edmund_Duke:
if(u->getType()==UnitTypes::Hero_Edmund_Duke_Siege_Mode) return true;
if(u->getType()==UnitTypes::Hero_Edmund_Duke_Tank_Mode) return true;
break;
case Gerard_DuGalle:
if(u->getType()==UnitTypes::Hero_Gerard_DuGalle) return true;
break;
case Gui_Montag:
if(u->getType()==UnitTypes::Hero_Gui_Montag) return true;
break;
case Hyperion:
if(u->getType()==UnitTypes::Hero_Hyperion) return true;
break;
case Jim_Raynor_Marine:
if(u->getType()==UnitTypes::Hero_Jim_Raynor_Marine) return true;
break;
case Jim_Raynor_Vulture:
if(u->getType()==UnitTypes::Hero_Jim_Raynor_Vulture) return true;
break;
case Magellan:
if(u->getType()==UnitTypes::Hero_Magellan) return true;
break;
case Norad_II:
if(u->getType()==UnitTypes::Hero_Norad_II) return true;
break;
case Samir_Duran:
if(u->getType()==UnitTypes::Hero_Samir_Duran) return true;
break;
case Sarah_Kerrigan:
if(u->getType()==UnitTypes::Hero_Sarah_Kerrigan) return true;
break;
case Tom_Kazansky:
if(u->getType()==UnitTypes::Hero_Tom_Kazansky) return true;
break;
case Civilian:
if(u->getType()==UnitTypes::Terran_Civilian) return true;
break;
case Academy:
if(u->getType()==UnitTypes::Terran_Academy) return true;
break;
case Armory:
if(u->getType()==UnitTypes::Terran_Armory) return true;
break;
case Barracks:
if(u->getType()==UnitTypes::Terran_Barracks) return true;
break;
case Bunker:
if(u->getType()==UnitTypes::Terran_Bunker) return true;
break;
case Command_Center:
if(u->getType()==UnitTypes::Terran_Command_Center) return true;
break;
case Engineering_Bay:
if(u->getType()==UnitTypes::Terran_Engineering_Bay) return true;
break;
case Factory:
if(u->getType()==UnitTypes::Terran_Factory) return true;
break;
case Missile_Turret:
if(u->getType()==UnitTypes::Terran_Missile_Turret) return true;
break;
case Refinery:
if(u->getType()==UnitTypes::Terran_Refinery) return true;
break;
case Science_Facility:
if(u->getType()==UnitTypes::Terran_Science_Facility) return true;
break;
case Starport:
if(u->getType()==UnitTypes::Terran_Starport) return true;
break;
case Supply_Depot:
if(u->getType()==UnitTypes::Terran_Supply_Depot) return true;
break;
case Comsat_Station:
if(u->getType()==UnitTypes::Terran_Comsat_Station) return true;
break;
case Control_Tower:
if(u->getType()==UnitTypes::Terran_Control_Tower) return true;
break;
case Covert_Ops:
if(u->getType()==UnitTypes::Terran_Covert_Ops) return true;
break;
case Machine_Shop:
if(u->getType()==UnitTypes::Terran_Machine_Shop) return true;
break;
case Nuclear_Silo:
if(u->getType()==UnitTypes::Terran_Nuclear_Silo) return true;
break;
case Physics_Lab:
if(u->getType()==UnitTypes::Terran_Physics_Lab) return true;
break;
case Crashed_Norad_II:
if(u->getType()==UnitTypes::Special_Crashed_Norad_II) return true;
break;
case Ion_Cannon:
if(u->getType()==UnitTypes::Special_Ion_Cannon) return true;
break;
case Power_Generator:
if(u->getType()==UnitTypes::Special_Power_Generator) return true;
break;
case Psi_Disrupter:
if(u->getType()==UnitTypes::Special_Psi_Disrupter) return true;
break;
case Archon:
if(u->getType()==UnitTypes::Protoss_Archon) return true;
break;
case Dark_Archon:
if(u->getType()==UnitTypes::Protoss_Dark_Archon) return true;
break;
case Dark_Templar:
if(u->getType()==UnitTypes::Protoss_Dark_Templar) return true;
break;
case Dragoon:
if(u->getType()==UnitTypes::Protoss_Dragoon) return true;
break;
case High_Templar:
if(u->getType()==UnitTypes::Protoss_High_Templar) return true;
break;
case Probe:
if(u->getType()==UnitTypes::Protoss_Probe) return true;
break;
case Reaver:
if(u->getType()==UnitTypes::Protoss_Reaver) return true;
break;
case Scarab:
if(u->getType()==UnitTypes::Protoss_Scarab) return true;
break;
case Zealot:
if(u->getType()==UnitTypes::Protoss_Zealot) return true;
break;
case Arbiter:
if(u->getType()==UnitTypes::Protoss_Arbiter) return true;
break;
case Carrier:
if(u->getType()==UnitTypes::Protoss_Carrier) return true;
break;
case Corsair:
if(u->getType()==UnitTypes::Protoss_Corsair) return true;
break;
case Interceptor:
if(u->getType()==UnitTypes::Protoss_Interceptor) return true;
break;
case Observer:
if(u->getType()==UnitTypes::Protoss_Observer) return true;
break;
case Scout:
if(u->getType()==UnitTypes::Protoss_Scout) return true;
break;
case Shuttle:
if(u->getType()==UnitTypes::Protoss_Shuttle) return true;
break;
case Aldaris:
if(u->getType()==UnitTypes::Hero_Aldaris) return true;
break;
case Artanis:
if(u->getType()==UnitTypes::Hero_Artanis) return true;
break;
case Danimoth:
if(u->getType()==UnitTypes::Hero_Danimoth) return true;
break;
case Hero_Dark_Templar:
if(u->getType()==UnitTypes::Hero_Dark_Templar) return true;
break;
case Fenix_Dragoon:
if(u->getType()==UnitTypes::Hero_Fenix_Dragoon) return true;
break;
case Fenix_Zealot:
if(u->getType()==UnitTypes::Hero_Fenix_Zealot) return true;
break;
case Gantrithor:
if(u->getType()==UnitTypes::Hero_Gantrithor) return true;
break;
case Mojo:
if(u->getType()==UnitTypes::Hero_Mojo) return true;
break;
case Raszagal:
if(u->getType()==UnitTypes::Hero_Raszagal) return true;
break;
case Tassadar:
if(u->getType()==UnitTypes::Hero_Tassadar) return true;
break;
case Tassadar_Zeratul_Archon:
if(u->getType()==UnitTypes::Hero_Tassadar_Zeratul_Archon) return true;
break;
case Warbringer:
if(u->getType()==UnitTypes::Hero_Warbringer) return true;
break;
case Zeratul:
if(u->getType()==UnitTypes::Hero_Zeratul) return true;
break;
case Arbiter_Tribunal:
if(u->getType()==UnitTypes::Protoss_Arbiter_Tribunal) return true;
break;
case Assimilator:
if(u->getType()==UnitTypes::Protoss_Assimilator) return true;
break;
case Citadel_of_Adun:
if(u->getType()==UnitTypes::Protoss_Citadel_of_Adun) return true;
break;
case Cybernetics_Core:
if(u->getType()==UnitTypes::Protoss_Cybernetics_Core) return true;
break;
case Fleet_Beacon:
if(u->getType()==UnitTypes::Protoss_Fleet_Beacon) return true;
break;
case Forge:
if(u->getType()==UnitTypes::Protoss_Forge) return true;
break;
case Gateway:
if(u->getType()==UnitTypes::Protoss_Gateway) return true;
break;
case Nexus:
if(u->getType()==UnitTypes::Protoss_Nexus) return true;
break;
case Observatory:
if(u->getType()==UnitTypes::Protoss_Observatory) return true;
break;
case Photon_Cannon:
if(u->getType()==UnitTypes::Protoss_Photon_Cannon) return true;
break;
case Pylon:
if(u->getType()==UnitTypes::Protoss_Pylon) return true;
break;
case Robotics_Facility:
if(u->getType()==UnitTypes::Protoss_Robotics_Facility) return true;
break;
case Robotics_Support_Bay:
if(u->getType()==UnitTypes::Protoss_Robotics_Support_Bay) return true;
break;
case Shield_Battery:
if(u->getType()==UnitTypes::Protoss_Shield_Battery) return true;
break;
case Stargate:
if(u->getType()==UnitTypes::Protoss_Stargate) return true;
break;
case Templar_Archives:
if(u->getType()==UnitTypes::Protoss_Templar_Archives) return true;
break;
case Khaydarin_Crystal_Form:
if(u->getType()==UnitTypes::Special_Khaydarin_Crystal_Form) return true;
break;
case Protoss_Temple:
if(u->getType()==UnitTypes::Special_Protoss_Temple) return true;
break;
case Stasis_Cell_Prison:
if(u->getType()==UnitTypes::Special_Stasis_Cell_Prison) return true;
break;
case Warp_Gate:
if(u->getType()==UnitTypes::Special_Warp_Gate) return true;
break;
case XelNaga_Temple:
if(u->getType()==UnitTypes::Special_XelNaga_Temple) return true;
break;
case Broodling:
if(u->getType()==UnitTypes::Zerg_Broodling) return true;
break;
case Defiler:
if(u->getType()==UnitTypes::Zerg_Defiler) return true;
break;
case Drone:
if(u->getType()==UnitTypes::Zerg_Drone) return true;
break;
case Egg:
if(u->getType()==UnitTypes::Zerg_Egg) return true;
break;
case Hydralisk:
if(u->getType()==UnitTypes::Zerg_Hydralisk) return true;
break;
case Infested_Terran:
if(u->getType()==UnitTypes::Zerg_Infested_Terran) return true;
break;
case Larva:
if(u->getType()==UnitTypes::Zerg_Larva) return true;
break;
case Lurker:
if(u->getType()==UnitTypes::Zerg_Lurker) return true;
break;
case Lurker_Egg:
if(u->getType()==UnitTypes::Zerg_Lurker_Egg) return true;
break;
case Ultralisk:
if(u->getType()==UnitTypes::Zerg_Ultralisk) return true;
break;
case Zergling:
if(u->getType()==UnitTypes::Zerg_Zergling) return true;
break;
case Cocoon:
if(u->getType()==UnitTypes::Zerg_Cocoon) return true;
break;
case Devourer:
if(u->getType()==UnitTypes::Zerg_Devourer) return true;
break;
case Guardian:
if(u->getType()==UnitTypes::Zerg_Guardian) return true;
break;
case Mutalisk:
if(u->getType()==UnitTypes::Zerg_Mutalisk) return true;
break;
case Overlord:
if(u->getType()==UnitTypes::Zerg_Overlord) return true;
break;
case Queen:
if(u->getType()==UnitTypes::Zerg_Queen) return true;
break;
case Scourge:
if(u->getType()==UnitTypes::Zerg_Scourge) return true;
break;
case Devouring_One:
if(u->getType()==UnitTypes::Hero_Devouring_One) return true;
break;
case Hunter_Killer:
if(u->getType()==UnitTypes::Hero_Hunter_Killer) return true;
break;
case Infested_Duran:
if(u->getType()==UnitTypes::Hero_Infested_Duran) return true;
break;
case Infested_Kerrigan:
if(u->getType()==UnitTypes::Hero_Infested_Kerrigan) return true;
break;
case Kukulza_Guardian:
if(u->getType()==UnitTypes::Hero_Kukulza_Guardian) return true;
break;
case Kukulza_Mutalisk:
if(u->getType()==UnitTypes::Hero_Kukulza_Mutalisk) return true;
break;
case Matriarch:
if(u->getType()==UnitTypes::Hero_Matriarch) return true;
break;
case Torrasque:
if(u->getType()==UnitTypes::Hero_Torrasque) return true;
break;
case Unclean_One:
if(u->getType()==UnitTypes::Hero_Unclean_One) return true;
break;
case Yggdrasill:
if(u->getType()==UnitTypes::Hero_Yggdrasill) return true;
break;
case Creep_Colony:
if(u->getType()==UnitTypes::Zerg_Creep_Colony) return true;
break;
case Defiler_Mound:
if(u->getType()==UnitTypes::Zerg_Defiler_Mound) return true;
break;
case Evolution_Chamber:
if(u->getType()==UnitTypes::Zerg_Evolution_Chamber) return true;
break;
case Extractor:
if(u->getType()==UnitTypes::Zerg_Extractor) return true;
break;
case Greater_Spire:
if(u->getType()==UnitTypes::Zerg_Greater_Spire) return true;
break;
case Hatchery:
if(u->getType()==UnitTypes::Zerg_Hatchery) return true;
break;
case Hive:
if(u->getType()==UnitTypes::Zerg_Hive) return true;
break;
case Hydralisk_Den:
if(u->getType()==UnitTypes::Zerg_Hydralisk_Den) return true;
break;
case Infested_Command_Center:
if(u->getType()==UnitTypes::Zerg_Infested_Command_Center) return true;
break;
case Lair:
if(u->getType()==UnitTypes::Zerg_Lair) return true;
break;
case Nydus_Canal:
if(u->getType()==UnitTypes::Zerg_Nydus_Canal) return true;
break;
case Queens_Nest:
if(u->getType()==UnitTypes::Zerg_Queens_Nest) return true;
break;
case Spawning_Pool:
if(u->getType()==UnitTypes::Zerg_Spawning_Pool) return true;
break;
case Spire:
if(u->getType()==UnitTypes::Zerg_Spire) return true;
break;
case Spore_Colony:
if(u->getType()==UnitTypes::Zerg_Spore_Colony) return true;
break;
case Sunken_Colony:
if(u->getType()==UnitTypes::Zerg_Sunken_Colony) return true;
break;
case Ultralisk_Cavern:
if(u->getType()==UnitTypes::Zerg_Ultralisk_Cavern) return true;
break;
case Cerebrate:
if(u->getType()==UnitTypes::Special_Cerebrate) return true;
break;
case Cerebrate_Daggoth:
if(u->getType()==UnitTypes::Special_Cerebrate_Daggoth) return true;
break;
case Mature_Chrysalis:
if(u->getType()==UnitTypes::Special_Mature_Chrysalis) return true;
break;
case Overmind:
if(u->getType()==UnitTypes::Special_Overmind) return true;
break;
case Overmind_Cocoon:
if(u->getType()==UnitTypes::Special_Overmind_Cocoon) return true;
break;
case Overmind_With_Shell:
if(u->getType()==UnitTypes::Special_Overmind_With_Shell) return true;
break;
case Bengalaas:
if(u->getType()==UnitTypes::Critter_Bengalaas) return true;
break;
case Kakaru:
if(u->getType()==UnitTypes::Critter_Kakaru) return true;
break;
case Ragnasaur:
if(u->getType()==UnitTypes::Critter_Ragnasaur) return true;
break;
case Rhynadon:
if(u->getType()==UnitTypes::Critter_Rhynadon) return true;
break;
case Scantid:
if(u->getType()==UnitTypes::Critter_Scantid) return true;
break;
case Ursadon:
if(u->getType()==UnitTypes::Critter_Ursadon) return true;
break;
case Mineral_Field:
if(u->getType()==UnitTypes::Resource_Mineral_Field) return true;
break;
case Vespene_Geyser:
if(u->getType()==UnitTypes::Resource_Vespene_Geyser) return true;
break;
case Dark_Swarm:
if(u->getType()==UnitTypes::Spell_Dark_Swarm) return true;
break;
case Disruption_Web:
if(u->getType()==UnitTypes::Spell_Disruption_Web) return true;
break;
case Scanner_Sweep:
if(u->getType()==UnitTypes::Spell_Scanner_Sweep) return true;
break;
case Protoss_Beacon:
if(u->getType()==UnitTypes::Special_Protoss_Beacon) return true;
break;
case Protoss_Flag_Beacon:
if(u->getType()==UnitTypes::Special_Protoss_Flag_Beacon) return true;
break;
case Terran_Beacon:
if(u->getType()==UnitTypes::Special_Terran_Beacon) return true;
break;
case Terran_Flag_Beacon:
if(u->getType()==UnitTypes::Special_Terran_Flag_Beacon) return true;
break;
case Zerg_Beacon:
if(u->getType()==UnitTypes::Special_Zerg_Beacon) return true;
break;
case Zerg_Flag_Beacon:
if(u->getType()==UnitTypes::Special_Zerg_Flag_Beacon) return true;
break;
case Powerup_Data_Disk:
if(u->getType()==UnitTypes::Powerup_Data_Disk) return true;
break;
case Powerup_Flag:
if(u->getType()==UnitTypes::Powerup_Flag) return true;
break;
case Powerup_Khalis_Crystal:
if(u->getType()==UnitTypes::Powerup_Khalis_Crystal) return true;
break;
case Powerup_Khaydarin_Crystal:
if(u->getType()==UnitTypes::Powerup_Khaydarin_Crystal) return true;
break;
case Powerup_Psi_Emitter:
if(u->getType()==UnitTypes::Powerup_Psi_Emitter) return true;
break;
case Powerup_Uraj_Crystal:
if(u->getType()==UnitTypes::Powerup_Uraj_Crystal) return true;
break;
case Powerup_Young_Chrysalis:
if(u->getType()==UnitTypes::Powerup_Young_Chrysalis) return true;
break;
case None:
if(u->getType()==UnitTypes::None) return true;
break;
case Unknown_Unit:
if(u->getType()==UnitTypes::Unknown) return true;
break;
}
return false;
}
<|fim▁hole|>{
switch(a)
{
case HitPoints:
return u->getHitPoints();
break;
case InitialHitPoints:
return u->getInitialHitPoints();
break;
case Shields:
return u->getShields();
break;
case Energy:
return u->getEnergy();
break;
case Resources:
return u->getResources();
break;
case InitialResources:
return u->getInitialResources();
break;
case KillCount:
return u->getKillCount();
break;
case GroundWeaponCooldown:
return u->getGroundWeaponCooldown();
break;
case AirWeaponCooldown:
return u->getAirWeaponCooldown();
break;
case SpellCooldown:
return u->getSpellCooldown();
break;
case DefenseMatrixPoints:
return u->getDefenseMatrixPoints();
break;
case DefenseMatrixTimer:
return u->getDefenseMatrixTimer();
break;
case EnsnareTimer:
return u->getEnsnareTimer();
break;
case IrradiateTimer:
return u->getIrradiateTimer();
break;
case LockdownTimer:
return u->getLockdownTimer();
break;
case MaelstromTimer:
return u->getMaelstromTimer();
break;
case PlagueTimer:
return u->getPlagueTimer();
break;
case RemoveTimer:
return u->getRemoveTimer();
break;
case StasisTimer:
return u->getStasisTimer();
break;
case StimTimer:
return u->getStimTimer();
break;
case PositionX:
return u->getPosition().x();
break;
case PositionY:
return u->getPosition().y();
break;
case InitialPositionX:
return u->getInitialPosition().x();
break;
case InitialPositionY:
return u->getInitialPosition().y();
break;
case TilePositionX:
return u->getTilePosition().x();
break;
case TilePositionY:
return u->getTilePosition().y();
break;
case InitialTilePositionX:
return u->getInitialTilePosition().x();
break;
case InitialTilePositionY:
return u->getInitialTilePosition().y();
break;
case Angle:
return u->getAngle();
break;
case VelocityX:
return u->getVelocityX();
break;
case VelocityY:
return u->getVelocityY();
break;
case TargetPositionX:
return u->getTargetPosition().x();
break;
case TargetPositionY:
return u->getTargetPosition().y();
break;
case OrderTimer:
return u->getOrderTimer();
break;
case RemainingBuildTime:
return u->getRemainingBuildTime();
break;
case RemainingTrainTime:
return u->getRemainingTrainTime();
break;
case TrainingQueueCount:
return u->getTrainingQueue().size();
break;
case LoadedUnitsCount:
return u->getLoadedUnits().size();
break;
case InterceptorCount:
return u->getInterceptorCount();
break;
case ScarabCount:
return u->getScarabCount();
break;
case SpiderMineCount:
return u->getSpiderMineCount();
break;
case RemainingResearchTime:
return u->getRemainingResearchTime();
break;
case RemainingUpgradeTime:
return u->getRemainingUpgradeTime();
break;
case RallyPositionX:
return u->getRallyPosition().x();
break;
case RallyPositionY:
return u->getRallyPosition().y();
break;
}
return 0;
}
Unit* getUnit(Unit* u,FilterAttributeUnit a)
{
switch(a)
{
case GetTarget:
return u->getTarget();
break;
case GetOrderTarget:
return u->getOrderTarget();
break;
case GetBuildUnit:
return u->getBuildUnit();
break;
case GetTransport:
return u->getTransport();
break;
case GetRallyUnit:
return u->getRallyUnit();
break;
case GetAddon:
return u->getAddon();
break;
}
return u;
}
UnitGroup UnitGroup::operator+(const UnitGroup& other) const
{
UnitGroup result=*this;
result+=other;
return result;
}
UnitGroup UnitGroup::operator*(const UnitGroup& other) const
{
UnitGroup result=*this;
result*=other;
return result;
}
UnitGroup UnitGroup::operator^(const UnitGroup& other) const
{
UnitGroup result=*this;
result^=other;
return result;
}
UnitGroup UnitGroup::operator-(const UnitGroup& other) const
{
UnitGroup result=*this;
result-=other;
return result;
}
UnitGroup UnitGroup::operator()(int f1) const
{
UnitGroup result;
for(set<Unit*>::const_iterator i=this->begin();i!=this->end();i++)
{
if (passesFlag(*i,f1))
result.insert(*i);
}
return result;
}
UnitGroup UnitGroup::operator()(int f1, int f2) const
{
UnitGroup result;
for(set<Unit*>::const_iterator i=this->begin();i!=this->end();i++)
{
if (passesFlag(*i,f1) || passesFlag(*i,f2))
result.insert(*i);
}
return result;
}
UnitGroup UnitGroup::operator()(int f1, int f2, int f3) const
{
UnitGroup result;
for(set<Unit*>::const_iterator i=this->begin();i!=this->end();i++)
{
if (passesFlag(*i,f1) || passesFlag(*i,f2) || passesFlag(*i,f3))
result.insert(*i);
}
return result;
}
UnitGroup UnitGroup::operator()(int f1, int f2, int f3, int f4) const
{
UnitGroup result;
for(set<Unit*>::const_iterator i=this->begin();i!=this->end();i++)
{
if (passesFlag(*i,f1) || passesFlag(*i,f2) || passesFlag(*i,f3) || passesFlag(*i,f4))
result.insert(*i);
}
return result;
}
UnitGroup UnitGroup::operator()(int f1, int f2, int f3, int f4, int f5) const
{
UnitGroup result;
for(set<Unit*>::const_iterator i=this->begin();i!=this->end();i++)
{
if (passesFlag(*i,f1) || passesFlag(*i,f2) || passesFlag(*i,f3) || passesFlag(*i,f4) || passesFlag(*i,f5))
result.insert(*i);
}
return result;
}
UnitGroup UnitGroup::operator()(FliterAttributeScalar a, const char* compare, double value) const
{
UnitGroup result;
string cmp(compare);
for(set<Unit*>::const_iterator i=this->begin();i!=this->end();i++)
{
double val=getAttribute(*i,a);
bool passes=false;
if (cmp=="=" || cmp=="==" || cmp=="<=" || cmp==">=")
if (val==value)
passes=true;
if (cmp=="<" || cmp=="<=" || cmp=="!=" || cmp=="<>")
if (val<value)
passes=true;
if (cmp==">" || cmp==">=" || cmp=="!=" || cmp=="<>")
if (val>value)
passes=true;
if (passes)
result.insert(*i);
}
return result;
}
UnitGroup UnitGroup::operator()(FliterAttributeScalar a, const char* compare, int value) const
{
UnitGroup result;
string cmp(compare);
for(set<Unit*>::const_iterator i=this->begin();i!=this->end();i++)
{
int val=(int)getAttribute(*i,a);
bool passes=false;
if (cmp=="=" || cmp=="==" || cmp=="<=" || cmp==">=")
if (val==value)
passes=true;
if (cmp=="<" || cmp=="<=" || cmp=="!=" || cmp=="<>")
if (val<value)
passes=true;
if (cmp==">" || cmp==">=" || cmp=="!=" || cmp=="<>")
if (val>value)
passes=true;
if (passes)
result.insert(*i);
}
return result;
}
UnitGroup UnitGroup::operator()(BWAPI::Player* player) const
{
UnitGroup result;
for(set<Unit*>::const_iterator i=this->begin();i!=this->end();i++)
{
if ((*i)->getPlayer()==player)
result.insert(*i);
}
return result;
}
UnitGroup UnitGroup::operator()(FilterAttributeUnit a, BWAPI::Unit* unit) const
{
UnitGroup result;
for(set<Unit*>::const_iterator i=this->begin();i!=this->end();i++)
{
Unit* target=getUnit(*i,a);
if (target==unit)
result.insert(*i);
}
return result;
}
UnitGroup UnitGroup::operator()(FilterAttributeType a, BWAPI::UnitType type) const
{
UnitGroup result;
for(set<Unit*>::const_iterator i=this->begin();i!=this->end();i++)
{
bool passes=false;
if (a==GetType)
if ((*i)->getType()==type)
passes=true;
if (a==GetInitialType)
if ((*i)->getInitialType()==type)
passes=true;
if (a==GetBuildType)
if ((*i)->getBuildType()==type)
passes=true;
if (passes)
result.insert(*i);
}
return result;
}
UnitGroup UnitGroup::operator()(FilterAttributeType a, BWAPI::TechType type) const
{
UnitGroup result;
for(set<Unit*>::const_iterator i=this->begin();i!=this->end();i++)
{
bool passes=false;
if (a==GetTech)
if ((*i)->getTech()==type)
passes=true;
if (passes)
result.insert(*i);
}
return result;
}
UnitGroup UnitGroup::operator()(FilterAttributeOrder a, BWAPI::Order type) const
{
UnitGroup result;
for(set<Unit*>::const_iterator i=this->begin();i!=this->end();i++)
{
bool passes=false;
if (a==GetOrder)
if ((*i)->getOrder()==type)
passes=true;
if (a==GetSecondaryOrder)
if ((*i)->getSecondaryOrder()==type)
passes=true;
if (passes)
result.insert(*i);
}
return result;
}
UnitGroup UnitGroup::operator()(FilterAttributeType a, BWAPI::UpgradeType type) const
{
UnitGroup result;
for(set<Unit*>::const_iterator i=this->begin();i!=this->end();i++)
{
bool passes=false;
if (a==GetUpgrade)
if ((*i)->getUpgrade()==type)
passes=true;
if (passes)
result.insert(*i);
}
return result;
}
UnitGroup UnitGroup::operator()(FilterAttributePosition a, BWAPI::Position position) const
{
UnitGroup result;
for(set<Unit*>::const_iterator i=this->begin();i!=this->end();i++)
{
bool passes=false;
if (a==GetPosition)
if ((*i)->getPosition()==position)
passes=true;
if (a==GetInitialPosition)
if ((*i)->getInitialPosition()==position)
passes=true;
if (a==GetTargetPosition)
if ((*i)->getTargetPosition()==position)
passes=true;
if (a==GetRallyPosition)
if ((*i)->getRallyPosition()==position)
passes=true;
if (passes)
result.insert(*i);
}
return result;
}
UnitGroup UnitGroup::operator()(FilterAttributeTilePosition a, BWAPI::TilePosition position) const
{
UnitGroup result;
for(set<Unit*>::const_iterator i=this->begin();i!=this->end();i++)
{
bool passes=false;
if (a==GetPosition)
if ((*i)->getTilePosition()==position)
passes=true;
if (a==GetInitialPosition)
if ((*i)->getInitialTilePosition()==position)
passes=true;
if (passes)
result.insert(*i);
}
return result;
}
UnitGroup UnitGroup::not(int f1) const
{
UnitGroup result;
for(set<Unit*>::const_iterator i=this->begin();i!=this->end();i++)
{
if (!(passesFlag(*i,f1)))
result.insert(*i);
}
return result;
}
UnitGroup UnitGroup::not(int f1, int f2) const
{
UnitGroup result;
for(set<Unit*>::const_iterator i=this->begin();i!=this->end();i++)
{
if (!(passesFlag(*i,f1) || passesFlag(*i,f2)))
result.insert(*i);
}
return result;
}
UnitGroup UnitGroup::not(int f1, int f2, int f3) const
{
UnitGroup result;
for(set<Unit*>::const_iterator i=this->begin();i!=this->end();i++)
{
if (!(passesFlag(*i,f1) || passesFlag(*i,f2) || passesFlag(*i,f3)))
result.insert(*i);
}
return result;
}
UnitGroup UnitGroup::not(int f1, int f2, int f3, int f4) const
{
UnitGroup result;
for(set<Unit*>::const_iterator i=this->begin();i!=this->end();i++)
{
if (!(passesFlag(*i,f1) || passesFlag(*i,f2) || passesFlag(*i,f3) || passesFlag(*i,f4)))
result.insert(*i);
}
return result;
}
UnitGroup UnitGroup::not(int f1, int f2, int f3, int f4, int f5) const
{
UnitGroup result;
for(set<Unit*>::const_iterator i=this->begin();i!=this->end();i++)
{
if (!(passesFlag(*i,f1) || passesFlag(*i,f2) || passesFlag(*i,f3) || passesFlag(*i,f4) || passesFlag(*i,f5)))
result.insert(*i);
}
return result;
}
UnitGroup UnitGroup::not(FliterAttributeScalar a, const char* compare, double value) const
{
UnitGroup result;
string cmp(compare);
for(set<Unit*>::const_iterator i=this->begin();i!=this->end();i++)
{
double val=getAttribute(*i,a);
bool passes=false;
if (cmp=="=" || cmp=="==" || cmp=="<=" || cmp==">=")
if (val==value)
passes=true;
if (cmp=="<" || cmp=="<=" || cmp=="!=" || cmp=="<>")
if (val<value)
passes=true;
if (cmp==">" || cmp==">=" || cmp=="!=" || cmp=="<>")
if (val>value)
passes=true;
if (passes)
result.insert(*i);
}
return result;
}
UnitGroup UnitGroup::not(FliterAttributeScalar a, const char* compare, int value) const
{
UnitGroup result;
string cmp(compare);
for(set<Unit*>::const_iterator i=this->begin();i!=this->end();i++)
{
int val=(int)getAttribute(*i,a);
bool passes=false;
if (cmp=="=" || cmp=="==" || cmp=="<=" || cmp==">=")
if (val==value)
passes=true;
if (cmp=="<" || cmp=="<=" || cmp=="!=" || cmp=="<>")
if (val<value)
passes=true;
if (cmp==">" || cmp==">=" || cmp=="!=" || cmp=="<>")
if (val>value)
passes=true;
if (passes)
result.insert(*i);
}
return result;
}
UnitGroup UnitGroup::not(BWAPI::Player* player) const
{
UnitGroup result;
for(set<Unit*>::const_iterator i=this->begin();i!=this->end();i++)
{
if ((*i)->getPlayer()!=player)
result.insert(*i);
}
return result;
}
UnitGroup UnitGroup::not(FilterAttributeUnit a, BWAPI::Unit* unit) const
{
UnitGroup result;
for(set<Unit*>::const_iterator i=this->begin();i!=this->end();i++)
{
Unit* target=getUnit(*i,a);
if (target!=unit)
result.insert(*i);
}
return result;
}
UnitGroup UnitGroup::not(FilterAttributeType a, BWAPI::UnitType type) const
{
UnitGroup result;
for(set<Unit*>::const_iterator i=this->begin();i!=this->end();i++)
{
bool passes=false;
if (a==GetType)
if ((*i)->getType()==type)
passes=true;
if (a==GetInitialType)
if ((*i)->getInitialType()==type)
passes=true;
if (a==GetBuildType)
if ((*i)->getBuildType()==type)
passes=true;
if (!passes)
result.insert(*i);
}
return result;
}
UnitGroup UnitGroup::not(FilterAttributeType a, BWAPI::TechType type) const
{
UnitGroup result;
for(set<Unit*>::const_iterator i=this->begin();i!=this->end();i++)
{
bool passes=false;
if (a==GetTech)
if ((*i)->getTech()==type)
passes=true;
if (!passes)
result.insert(*i);
}
return result;
}
UnitGroup UnitGroup::not(FilterAttributeOrder a, BWAPI::Order type) const
{
UnitGroup result;
for(set<Unit*>::const_iterator i=this->begin();i!=this->end();i++)
{
bool passes=false;
if (a==GetOrder)
if ((*i)->getOrder()==type)
passes=true;
if (a==GetSecondaryOrder)
if ((*i)->getSecondaryOrder()==type)
passes=true;
if (!passes)
result.insert(*i);
}
return result;
}
UnitGroup UnitGroup::not(FilterAttributeType a, BWAPI::UpgradeType type) const
{
UnitGroup result;
for(set<Unit*>::const_iterator i=this->begin();i!=this->end();i++)
{
bool passes=false;
if (a==GetUpgrade)
if ((*i)->getUpgrade()==type)
passes=true;
if (!passes)
result.insert(*i);
}
return result;
}
UnitGroup UnitGroup::not(FilterAttributePosition a, BWAPI::Position position) const
{
UnitGroup result;
for(set<Unit*>::const_iterator i=this->begin();i!=this->end();i++)
{
bool passes=false;
if (a==GetPosition)
if ((*i)->getPosition()==position)
passes=true;
if (a==GetInitialPosition)
if ((*i)->getInitialPosition()==position)
passes=true;
if (a==GetTargetPosition)
if ((*i)->getTargetPosition()==position)
passes=true;
if (a==GetRallyPosition)
if ((*i)->getRallyPosition()==position)
passes=true;
if (!passes)
result.insert(*i);
}
return result;
}
UnitGroup UnitGroup::not(FilterAttributeTilePosition a, BWAPI::TilePosition position) const
{
UnitGroup result;
for(set<Unit*>::const_iterator i=this->begin();i!=this->end();i++)
{
bool passes=false;
if (a==GetPosition)
if ((*i)->getTilePosition()==position)
passes=true;
if (a==GetInitialPosition)
if ((*i)->getInitialTilePosition()==position)
passes=true;
if (!passes)
result.insert(*i);
}
return result;
}
UnitGroup UnitGroup::inRadius(double radius,BWAPI::Position position) const
{
UnitGroup result;
for(set<Unit*>::const_iterator i=this->begin();i!=this->end();i++)
{
if ((*i)->getDistance(position)<=radius)
result.insert(*i);
}
return result;
}
UnitGroup UnitGroup::inRegion(BWTA::Region* region) const
{
UnitGroup result;
for(set<Unit*>::const_iterator i=this->begin();i!=this->end();i++)
{
if (BWTA::getRegion((*i)->getTilePosition())==region)
result.insert(*i);
}
return result;
}
UnitGroup UnitGroup::onlyNearestChokepoint(BWTA::Chokepoint* choke) const
{
UnitGroup result;
for(set<Unit*>::const_iterator i=this->begin();i!=this->end();i++)
{
if (BWTA::getNearestChokepoint((*i)->getTilePosition())==choke)
result.insert(*i);
}
return result;
}
UnitGroup UnitGroup::onlyNearestBaseLocation(BWTA::BaseLocation* location) const
{
UnitGroup result;
for(set<Unit*>::const_iterator i=this->begin();i!=this->end();i++)
{
if (BWTA::getNearestBaseLocation((*i)->getTilePosition())==location)
result.insert(*i);
}
return result;
}
UnitGroup UnitGroup::onlyNearestUnwalkablePolygon(BWTA::Polygon* polygon) const
{
UnitGroup result;
for(set<Unit*>::const_iterator i=this->begin();i!=this->end();i++)
{
if (BWTA::getNearestUnwalkablePolygon((*i)->getTilePosition())==polygon)
result.insert(*i);
}
return result;
}
UnitGroup& UnitGroup::operator+=(const UnitGroup& other)
{
for(set<Unit*>::const_iterator i=other.begin();i!=other.end();i++)
this->insert(*i);
return *this;
}
UnitGroup& UnitGroup::operator*=(const UnitGroup& other)
{
set<Unit*>::iterator i2;
for(set<Unit*>::iterator i=this->begin();i!=this->end();i=i2)
{
i2=i;
i2++;
if (!other.contains(*i))
this->erase(*i);
}
return *this;
}
UnitGroup& UnitGroup::operator^=(const UnitGroup& other)
{
UnitGroup result=*this;
for(set<Unit*>::const_iterator i=other.begin();i!=other.end();i++)
{
if (this->contains(*i))
this->erase(*i);
else
this->insert(*i);
}
return *this;
}
UnitGroup& UnitGroup::operator-=(const UnitGroup& other)
{
for(set<Unit*>::const_iterator i=other.begin();i!=other.end();i++)
this->erase(*i);
return *this;
}
BWAPI::Unit* UnitGroup::getNearest(BWAPI::Position position) const
{
if (this->empty()) return NULL;
set<Unit*>::const_iterator i=this->begin();
Unit* result=*i;
double d=(*i)->getDistance(position);
i++;
for(;i!=this->end();i++)
{
double d2=(*i)->getDistance(position);
if (d2<d)
{
d=d2;
result=*i;
}
}
return result;
}
bool UnitGroup::contains(BWAPI::Unit* u) const
{
return this->find(u)!=this->end();
}
Position UnitGroup::getCenter() const
{
if (this->empty())
return Positions::None;
if (this->size()==1)
return ((*this->begin())->getPosition());
int count=0;
double x=0;
double y=0;
for(set<Unit*>::const_iterator i=this->begin();i!=this->end();i++)
{
Position p((*i)->getPosition());
if (p!=Positions::None && p!=Positions::Unknown)
{
count++;
x+=p.x();
y+=p.y();
}
}
if (count==0)
{
return Positions::None;
}
return Position((int)(x/count),(int)(y/count));
}
bool UnitGroup::attack(Position position) const
{
bool retval=true;
for(set<Unit*>::const_iterator i=this->begin();i!=this->end();i++)
{
retval = retval && (*i)->attack(position);
}
return retval;
}
bool UnitGroup::attackUnit(Unit* target) const
{
bool retval=true;
for(set<Unit*>::const_iterator i=this->begin();i!=this->end();i++)
{
retval = retval && (*i)->attack(target);
}
return retval;
}
bool UnitGroup::rightClick(Position position) const
{
bool retval=true;
for(set<Unit*>::const_iterator i=this->begin();i!=this->end();i++)
{
retval = retval && (*i)->rightClick(position);
}
return retval;
}
bool UnitGroup::rightClick(Unit* target) const
{
bool retval=true;
for(set<Unit*>::const_iterator i=this->begin();i!=this->end();i++)
{
retval = retval && (*i)->rightClick(target);
}
return retval;
}
bool UnitGroup::train(UnitType type) const
{
bool retval=true;
for(set<Unit*>::const_iterator i=this->begin();i!=this->end();i++)
{
retval = retval && (*i)->train(type);
}
return retval;
}
bool UnitGroup::build(TilePosition position, UnitType type) const
{
bool retval=true;
for(set<Unit*>::const_iterator i=this->begin();i!=this->end();i++)
{
retval = retval && (*i)->build(position,type);
}
return retval;
}
bool UnitGroup::buildAddon(UnitType type) const
{
bool retval=true;
for(set<Unit*>::const_iterator i=this->begin();i!=this->end();i++)
{
retval = retval && (*i)->buildAddon(type);
}
return retval;
}
bool UnitGroup::research(TechType tech) const
{
bool retval=true;
for(set<Unit*>::const_iterator i=this->begin();i!=this->end();i++)
{
retval = retval && (*i)->research(tech);
}
return retval;
}
bool UnitGroup::upgrade(UpgradeType upgrade) const
{
bool retval=true;
for(set<Unit*>::const_iterator i=this->begin();i!=this->end();i++)
{
retval = retval && (*i)->upgrade(upgrade);
}
return retval;
}
bool UnitGroup::stop() const
{
bool retval=true;
for(set<Unit*>::const_iterator i=this->begin();i!=this->end();i++)
{
retval = retval && (*i)->stop();
}
return retval;
}
bool UnitGroup::holdPosition() const
{
bool retval=true;
for(set<Unit*>::const_iterator i=this->begin();i!=this->end();i++)
{
retval = retval && (*i)->holdPosition();
}
return retval;
}
bool UnitGroup::patrol(Position position) const
{
bool retval=true;
for(set<Unit*>::const_iterator i=this->begin();i!=this->end();i++)
{
retval = retval && (*i)->patrol(position);
}
return retval;
}
bool UnitGroup::follow(Unit* target) const
{
bool retval=true;
for(set<Unit*>::const_iterator i=this->begin();i!=this->end();i++)
{
retval = retval && (*i)->follow(target);
}
return retval;
}
bool UnitGroup::setRallyPoint(Position target) const
{
bool retval=true;
for(set<Unit*>::const_iterator i=this->begin();i!=this->end();i++)
{
retval = retval && (*i)->setRallyPoint(target);
}
return retval;
}
bool UnitGroup::setRallyPoint(Unit* target) const
{
bool retval=true;
for(set<Unit*>::const_iterator i=this->begin();i!=this->end();i++)
{
retval = retval && (*i)->setRallyPoint(target);
}
return retval;
}
bool UnitGroup::repair(Unit* target) const
{
bool retval=true;
for(set<Unit*>::const_iterator i=this->begin();i!=this->end();i++)
{
retval = retval && (*i)->repair(target);
}
return retval;
}
bool UnitGroup::morph(UnitType type) const
{
bool retval=true;
for(set<Unit*>::const_iterator i=this->begin();i!=this->end();i++)
{
retval = retval && (*i)->morph(type);
}
return retval;
}
bool UnitGroup::burrow() const
{
bool retval=true;
for(set<Unit*>::const_iterator i=this->begin();i!=this->end();i++)
{
retval = retval && (*i)->burrow();
}
return retval;
}
bool UnitGroup::unburrow() const
{
bool retval=true;
for(set<Unit*>::const_iterator i=this->begin();i!=this->end();i++)
{
retval = retval && (*i)->unburrow();
}
return retval;
}
bool UnitGroup::siege() const
{
bool retval=true;
for(set<Unit*>::const_iterator i=this->begin();i!=this->end();i++)
{
retval = retval && (*i)->siege();
}
return retval;
}
bool UnitGroup::unsiege() const
{
bool retval=true;
for(set<Unit*>::const_iterator i=this->begin();i!=this->end();i++)
{
retval = retval && (*i)->unsiege();
}
return retval;
}
bool UnitGroup::cloak() const
{
bool retval=true;
for(set<Unit*>::const_iterator i=this->begin();i!=this->end();i++)
{
retval = retval && (*i)->cloak();
}
return retval;
}
bool UnitGroup::decloak() const
{
bool retval=true;
for(set<Unit*>::const_iterator i=this->begin();i!=this->end();i++)
{
retval = retval && (*i)->decloak();
}
return retval;
}
bool UnitGroup::lift() const
{
bool retval=true;
for(set<Unit*>::const_iterator i=this->begin();i!=this->end();i++)
{
retval = retval && (*i)->lift();
}
return retval;
}
bool UnitGroup::land(TilePosition position) const
{
bool retval=true;
for(set<Unit*>::const_iterator i=this->begin();i!=this->end();i++)
{
retval = retval && (*i)->land(position);
}
return retval;
}
bool UnitGroup::load(Unit* target) const
{
bool retval=true;
for(set<Unit*>::const_iterator i=this->begin();i!=this->end();i++)
{
retval = retval && (*i)->load(target);
}
return retval;
}
bool UnitGroup::unload(Unit* target) const
{
bool retval=true;
for(set<Unit*>::const_iterator i=this->begin();i!=this->end();i++)
{
retval = retval && (*i)->unload(target);
}
return retval;
}
bool UnitGroup::unloadAll() const
{
bool retval=true;
for(set<Unit*>::const_iterator i=this->begin();i!=this->end();i++)
{
retval = retval && (*i)->unloadAll();
}
return retval;
}
bool UnitGroup::unloadAll(Position position) const
{
bool retval=true;
for(set<Unit*>::const_iterator i=this->begin();i!=this->end();i++)
{
retval = retval && (*i)->unloadAll(position);
}
return retval;
}
bool UnitGroup::cancelConstruction() const
{
bool retval=true;
for(set<Unit*>::const_iterator i=this->begin();i!=this->end();i++)
{
retval = retval && (*i)->cancelConstruction();
}
return retval;
}
bool UnitGroup::haltConstruction() const
{
bool retval=true;
for(set<Unit*>::const_iterator i=this->begin();i!=this->end();i++)
{
retval = retval && (*i)->haltConstruction();
}
return retval;
}
bool UnitGroup::cancelMorph() const
{
bool retval=true;
for(set<Unit*>::const_iterator i=this->begin();i!=this->end();i++)
{
retval = retval && (*i)->cancelMorph();
}
return retval;
}
bool UnitGroup::cancelTrain() const
{
bool retval=true;
for(set<Unit*>::const_iterator i=this->begin();i!=this->end();i++)
{
retval = retval && (*i)->cancelTrain();
}
return retval;
}
bool UnitGroup::cancelTrain(int slot) const
{
bool retval=true;
for(set<Unit*>::const_iterator i=this->begin();i!=this->end();i++)
{
retval = retval && (*i)->cancelTrain(slot);
}
return retval;
}
bool UnitGroup::cancelAddon() const
{
bool retval=true;
for(set<Unit*>::const_iterator i=this->begin();i!=this->end();i++)
{
retval = retval && (*i)->cancelAddon();
}
return retval;
}
bool UnitGroup::cancelResearch() const
{
bool retval=true;
for(set<Unit*>::const_iterator i=this->begin();i!=this->end();i++)
{
retval = retval && (*i)->cancelResearch();
}
return retval;
}
bool UnitGroup::cancelUpgrade() const
{
bool retval=true;
for(set<Unit*>::const_iterator i=this->begin();i!=this->end();i++)
{
retval = retval && (*i)->cancelUpgrade();
}
return retval;
}
bool UnitGroup::useTech(TechType tech) const
{
bool retval=true;
for(set<Unit*>::const_iterator i=this->begin();i!=this->end();i++)
{
retval = retval && (*i)->useTech(tech);
}
return retval;
}
bool UnitGroup::useTech(TechType tech, Position position) const
{
bool retval=true;
for(set<Unit*>::const_iterator i=this->begin();i!=this->end();i++)
{
retval = retval && (*i)->useTech(tech,position);
}
return retval;
}
bool UnitGroup::useTech(TechType tech, Unit* target) const
{
bool retval=true;
for(set<Unit*>::const_iterator i=this->begin();i!=this->end();i++)
{
retval = retval && (*i)->useTech(tech,target);
}
return retval;
}<|fim▁end|> |
double getAttribute(Unit* u, FliterAttributeScalar a)
|
<|file_name|>wsgi.py<|end_file_name|><|fim▁begin|>"""
WSGI config for first_app project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/howto/deployment/wsgi/<|fim▁hole|>os.environ.setdefault("DJANGO_SETTINGS_MODULE", "first_app.settings")
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()<|fim▁end|> | """
import os |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># $Id: __init__.py 6141 2009-09-25 18:50:30Z milde $
# Author: David Goodger <[email protected]>
# Copyright: This module has been placed in the public domain.
"""
This is ``docutils.parsers.rst`` package. It exports a single class, `Parser`,
the reStructuredText parser.
Usage
=====
1. Create a parser::
parser = docutils.parsers.rst.Parser()
Several optional arguments may be passed to modify the parser's behavior.
Please see `Customizing the Parser`_ below for details.
2. Gather input (a multi-line string), by reading a file or the standard
input::
input = sys.stdin.read()
3. Create a new empty `docutils.nodes.document` tree::
document = docutils.utils.new_document(source, settings)
See `docutils.utils.new_document()` for parameter details.
4. Run the parser, populating the document tree::
parser.parse(input, document)
Parser Overview
===============
The reStructuredText parser is implemented as a state machine, examining its
input one line at a time. To understand how the parser works, please first
become familiar with the `docutils.statemachine` module, then see the
`states` module.
Customizing the Parser
----------------------
Anything that isn't already customizable is that way simply because that type
of customizability hasn't been implemented yet. Patches welcome!
When instantiating an object of the `Parser` class, two parameters may be
passed: ``rfc2822`` and ``inliner``. Pass ``rfc2822=1`` to enable an initial
RFC-2822 style header block, parsed as a "field_list" element (with "class"
attribute set to "rfc2822"). Currently this is the only body-level element
which is customizable without subclassing. (Tip: subclass `Parser` and change
its "state_classes" and "initial_state" attributes to refer to new classes.
Contact the author if you need more details.)
The ``inliner`` parameter takes an instance of `states.Inliner` or a subclass.
It handles inline markup recognition. A common extension is the addition of
further implicit hyperlinks, like "RFC 2822". This can be done by subclassing
`states.Inliner`, adding a new method for the implicit markup, and adding a
``(pattern, method)`` pair to the "implicit_dispatch" attribute of the
subclass. See `states.Inliner.implicit_inline()` for details. Explicit
inline markup can be customized in a `states.Inliner` subclass via the
``patterns.initial`` and ``dispatch`` attributes (and new methods as
appropriate).
"""
__docformat__ = 'reStructuredText'
import docutils.parsers
import docutils.statemachine
from docutils.parsers.rst import states
from docutils import frontend, nodes
class Parser(docutils.parsers.Parser):
"""The reStructuredText parser."""
supported = ('restructuredtext', 'rst', 'rest', 'restx', 'rtxt', 'rstx')
"""Aliases this parser supports."""
settings_spec = (
'reStructuredText Parser Options',
None,
(('Recognize and link to standalone PEP references (like "PEP 258").',
['--pep-references'],
{'action': 'store_true', 'validator': frontend.validate_boolean}),
('Base URL for PEP references '
'(default "http://www.python.org/dev/peps/").',
['--pep-base-url'],
{'metavar': '<URL>', 'default': 'http://www.python.org/dev/peps/',
'validator': frontend.validate_url_trailing_slash}),
('Template for PEP file part of URL. (default "pep-%04d")',
['--pep-file-url-template'],
{'metavar': '<URL>', 'default': 'pep-%04d'}),
('Recognize and link to standalone RFC references (like "RFC 822").',
['--rfc-references'],
{'action': 'store_true', 'validator': frontend.validate_boolean}),
('Base URL for RFC references (default "http://www.faqs.org/rfcs/").',
['--rfc-base-url'],
{'metavar': '<URL>', 'default': 'http://www.faqs.org/rfcs/',
'validator': frontend.validate_url_trailing_slash}),
('Set number of spaces for tab expansion (default 8).',
['--tab-width'],
{'metavar': '<width>', 'type': 'int', 'default': 8,
'validator': frontend.validate_nonnegative_int}),
('Remove spaces before footnote references.',
['--trim-footnote-reference-space'],
{'action': 'store_true', 'validator': frontend.validate_boolean}),
('Leave spaces before footnote references.',
['--leave-footnote-reference-space'],
{'action': 'store_false', 'dest': 'trim_footnote_reference_space'}),
('Disable directives that insert the contents of external file '
'("include" & "raw"); replaced with a "warning" system message.',
['--no-file-insertion'],
{'action': 'store_false', 'default': 1,
'dest': 'file_insertion_enabled',
'validator': frontend.validate_boolean}),
('Enable directives that insert the contents of external file '
'("include" & "raw"). Enabled by default.',
['--file-insertion-enabled'],
{'action': 'store_true'}),
('Disable the "raw" directives; replaced with a "warning" '
'system message.',
['--no-raw'],
{'action': 'store_false', 'default': 1, 'dest': 'raw_enabled',
'validator': frontend.validate_boolean}),
('Enable the "raw" directive. Enabled by default.',
['--raw-enabled'],
{'action': 'store_true'}),))
config_section = 'restructuredtext parser'
config_section_dependencies = ('parsers',)
def __init__(self, rfc2822=None, inliner=None):
if rfc2822:
self.initial_state = 'RFC2822Body'
else:
self.initial_state = 'Body'
self.state_classes = states.state_classes
self.inliner = inliner
def parse(self, inputstring, document):
"""Parse `inputstring` and populate `document`, a document tree."""
self.setup_parse(inputstring, document)
self.statemachine = states.RSTStateMachine(
state_classes=self.state_classes,
initial_state=self.initial_state,
debug=document.reporter.debug_flag)
inputlines = docutils.statemachine.string2lines(
inputstring, tab_width=document.settings.tab_width,
convert_whitespace=1)
self.statemachine.run(inputlines, document, inliner=self.inliner)
self.finish_parse()
class DirectiveError(Exception):
"""
Store a message and a system message level.
To be thrown from inside directive code.
Do not instantiate directly -- use `Directive.directive_error()`
instead!
"""
def __init__(self, level, message, source, line):
"""
Initialize with message `message`. `level` is a system message level.
"""
Exception.__init__(self)
self.level = level
self.msg = message
self.source = source
self.line = line
class Directive(object):
"""
Base class for reStructuredText directives.
The following attributes may be set by subclasses. They are
interpreted by the directive parser (which runs the directive
class):
- `required_arguments`: The number of required arguments (default:
0).
- `optional_arguments`: The number of optional arguments (default:
0).
- `final_argument_whitespace`: A boolean, indicating if the final
argument may contain whitespace (default: False).
- `option_spec`: A dictionary, mapping known option names to
conversion functions such as `int` or `float` (default: {}, no
options). Several conversion functions are defined in the
directives/__init__.py module.
Option conversion functions take a single parameter, the option
argument (a string or ``None``), validate it and/or convert it
to the appropriate form. Conversion functions may raise
`ValueError` and `TypeError` exceptions.
- `has_content`: A boolean; True if content is allowed. Client
code must handle the case where content is required but not
supplied (an empty content list will be supplied).
Arguments are normally single whitespace-separated words. The
final argument may contain whitespace and/or newlines if
`final_argument_whitespace` is True.
If the form of the arguments is more complex, specify only one
argument (either required or optional) and set
`final_argument_whitespace` to True; the client code must do any
context-sensitive parsing.
When a directive implementation is being run, the directive class
is instantiated, and the `run()` method is executed. During
instantiation, the following instance variables are set:
- ``name`` is the directive type or name (string).
- ``arguments`` is the list of positional arguments (strings).
- ``options`` is a dictionary mapping option names (strings) to
values (type depends on option conversion functions; see
`option_spec` above).
- ``content`` is a list of strings, the directive content line by line.
- ``lineno`` is the line number of the first line of the directive.
- ``content_offset`` is the line offset of the first line of the content from
the beginning of the current input. Used when initiating a nested parse.
- ``block_text`` is a string containing the entire directive.
- ``state`` is the state which called the directive function.
- ``state_machine`` is the state machine which controls the state which called
the directive function.
Directive functions return a list of nodes which will be inserted
into the document tree at the point where the directive was
encountered. This can be an empty list if there is nothing to
insert.
For ordinary directives, the list must contain body elements or
structural elements. Some directives are intended specifically
for substitution definitions, and must return a list of `Text`
nodes and/or inline elements (suitable for inline insertion, in
place of the substitution reference). Such directives must verify
substitution definition context, typically using code like this::
if not isinstance(state, states.SubstitutionDef):
error = state_machine.reporter.error(
'Invalid context: the "%s" directive can only be used '
'within a substitution definition.' % (name),
nodes.literal_block(block_text, block_text), line=lineno)
return [error]
"""
# There is a "Creating reStructuredText Directives" how-to at
# <http://docutils.sf.net/docs/howto/rst-directives.html>. If you
# update this docstring, please update the how-to as well.
required_arguments = 0
"""Number of required directive arguments."""
optional_arguments = 0
"""Number of optional arguments after the required arguments."""
final_argument_whitespace = False
"""May the final argument contain whitespace?"""
option_spec = None
"""Mapping of option names to validator functions."""
has_content = False
"""May the directive have content?"""
def __init__(self, name, arguments, options, content, lineno,
content_offset, block_text, state, state_machine):<|fim▁hole|> self.name = name
self.arguments = arguments
self.options = options
self.content = content
self.lineno = lineno
self.content_offset = content_offset
self.block_text = block_text
self.state = state
self.state_machine = state_machine
def run(self):
raise NotImplementedError('Must override run() is subclass.')
# Directive errors:
def directive_error(self, level, message):
"""
Return a DirectiveError suitable for being thrown as an exception.
Call "raise self.directive_error(level, message)" from within
a directive implementation to return one single system message
at level `level`, which automatically gets the directive block
and the line number added.
You'd often use self.error(message) instead, which will
generate an ERROR-level directive error.
"""
# source = self.state_machine.get_source(self.lineno - 1)
try:
(source, line) = self.state_machine.input_lines.info(self.lineno)
except IndexError:
source = self.state_machine.get_source(self.lineno - 1)
line = self.lineno
return DirectiveError(level, message, source, line)
def debug(self, message):
return self.directive_error(0, message)
def info(self, message):
return self.directive_error(1, message)
def warning(self, message):
return self.directive_error(2, message)
def error(self, message):
return self.directive_error(3, message)
def severe(self, message):
return self.directive_error(4, message)
# Convenience methods:
def assert_has_content(self):
"""
Throw an ERROR-level DirectiveError if the directive doesn't
have contents.
"""
if not self.content:
raise self.error('Content block expected for the "%s" directive; '
'none found.' % self.name)
def convert_directive_function(directive_fn):
"""
Define & return a directive class generated from `directive_fn`.
`directive_fn` uses the old-style, functional interface.
"""
class FunctionalDirective(Directive):
option_spec = getattr(directive_fn, 'options', None)
has_content = getattr(directive_fn, 'content', False)
_argument_spec = getattr(directive_fn, 'arguments', (0, 0, False))
required_arguments, optional_arguments, final_argument_whitespace \
= _argument_spec
def run(self):
return directive_fn(
self.name, self.arguments, self.options, self.content,
self.lineno, self.content_offset, self.block_text,
self.state, self.state_machine)
# Return new-style directive.
return FunctionalDirective<|fim▁end|> | |
<|file_name|>metadata.py<|end_file_name|><|fim▁begin|>from urllib2 import (
urlopen,
HTTPError,
URLError,
)
BASEURL = 'http://169.254.169.254/'
<|fim▁hole|>class MetadataError(Exception):
pass
def path(path=None, api_version=DEFAULT_API_VERSION, timeout=DEFAULT_TIMEOUT):
if not api_version:
api_version = 'latest'
md_path = api_version
if path:
md_path = md_path + "/" + path
try:
u = urlopen(BASEURL + md_path, timeout=timeout)
except HTTPError as e:
if e.code == 404:
raise MetadataError("Path not found: /%s" % path)
else:
raise MetadataError(e)
except URLError as e:
raise MetadataError(e)
if not path:
return "\n".join(map(lambda p: p.strip() + "/", u.readlines()))
return u.read()
class ShortNames(object):
'''Provide commonly-used metadata values by name'''
names = {
'az': '/meta-data/placement/availability-zone',
'instance-id': '/meta-data/instance-id',
}
def __init__(self, api_version=None, timeout=DEFAULT_TIMEOUT):
self.api_version = api_version
self.timeout = timeout
def list(self):
return self.names.keys()
def get(self, name):
if name not in self.names:
raise MetadataError('The shortname "{}" is not defined'.format(name))
return path(self.names[name], self.api_version, self.timeout)<|fim▁end|> | DEFAULT_TIMEOUT = 2
DEFAULT_API_VERSION = 'latest'
|
<|file_name|>celery.py<|end_file_name|><|fim▁begin|>""":mod:`cliche.celery` --- Celery_-backed task queue worker
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Sometimes web app should provide time-consuming features that cannot
immediately respond to user (and we define "immediately" as "shorter than
a second or two seconds" in here). Such things should be queued and then
processed by background workers. Celery_ does that in natural way.
We use this at serveral points like resampling images to make thumbnails,
or crawling ontology data from other services. Such tasks are definitely
cannot "immediately" respond.
.. seealso::
:ref:`faq-when-to-use` --- Celery FAQ
Answer to what kinds of benefits are there in Celery.
`Queue everything and delight everyone`__
This article describes why you should use a queue in a web application.
__ http://decafbad.com/blog/2008/07/04/queue-everything-and-delight-everyone
.. _Celery: http://celeryproject.org/
How to define tasks
-------------------
In order to defer some types of tasks, you have to make these functions
a task. It's not a big deal, just attach a decorator to them::
@celery.task(ignore_result=True)
def do_heavy_work(some, inputs):
'''Do something heavy work.'''
...
How to defer tasks
------------------
It's similar to ordinary function calls except it uses :meth:`delay()
<celery.app.task.Task.delay>` method (or :meth:`apply_async()
<celery.app.task.Task.apply_async>` method) instead of calling operator::
do_heavy_work.delay('some', inputs='...')
That command will be queued and sent to one of distributed workers.
That means these argument values are serialized using :mod:`json`.
If any argument value isn't serializable it will error.
Simple objects like numbers, strings, tuples, lists, dictionaries are
safe to serialize.
In the other hand, entity objects (that an instance of :class:`cliche.orm.Base`
and its subtypes) mostly fail to serialize, so use primary key values like
entity id instead of object itself.
What things are ready for task?
-------------------------------
Every deferred call of task share equivalent inital state:
- You can get a database session using :func:`get_session()`.
- You also can get a database engine using :func:`get_database_engine()`.
While there are several things not ready either:
- Flask's request context isn't ready for each task. You should explicitly
deal with it using :meth:`~flask.Flask.request_context()` method
to use context locals like :class:`flask.request`.
See also :ref:`request-context`.
- Physical computers would differ from web environment. Total memory,
CPU capacity, the number of processors, IP address, operating system,
Python VM (which of PyPy or CPython), and other many environments also
can vary. Assume nothing on these variables.
- Hence global states (e.g. module-level global variables) are completely
isolated from web environment which called the task. Don't depend on
such global states.
How to run Celery worker
------------------------
:program:`celery worker` (formerly :program:`celeryd`) takes Celery app object
as its endpoint, and Cliche's endpoint is :data:`cliche.celery.celery`.
You can omit the latter variable name and module name: :mod:`cliche`.
Execute the following command in the shell:
.. sourcecode:: console
$ celery worker -A cliche --config dev.cfg.yml
-------------- celery@localhost v3.1.13 (Cipater)
---- **** -----
--- * *** * -- Darwin-13.3.0-x86_64-i386-64bit
-- * - **** ---
- ** ---------- [config]
- ** ---------- .> app: cliche.celery:0x1... (cliche.celery.Loader)
- ** ---------- .> transport: redis://localhost:6379/5
- ** ---------- .> results: disabled
- *** --- * --- .> concurrency: 4 (prefork)
-- ******* ----
--- ***** ----- [queues]
-------------- .> celery exchange=celery(direct) key=celery
[2014-09-12 00:31:25,150: WARNING/MainProcess] celery@localhost ready.
Note that you should pass the same configuration file (``--config`` option)
to the WSGI application. It should contain ``DATABASE_URL`` and so on.
References
----------
"""
import os
import pathlib
from celery import Celery, current_app, current_task
from celery.loaders.base import BaseLoader
from celery.signals import celeryd_init, task_failure, task_postrun<|fim▁hole|>from raven.handlers.logging import SentryHandler
from sqlalchemy.engine import Engine, create_engine
from .config import ConfigDict, read_config
from .orm import Session, import_all_modules
__all__ = (
'Loader',
'get_database_engine',
'get_session',
'get_raven_client',
'app',
)
app = Celery(__name__, loader=__name__ + ':Loader')
class Loader(BaseLoader):
"""The loader used by Cliche app."""
def read_configuration(self):
config = ConfigDict()
config_path = os.environ.get(
'CELERY_CONFIG_MODULE',
os.environ.get('CLICHE_CONFIG')
)
if config_path is not None:
config = read_config(pathlib.Path(config_path))
config['CELERY_IMPORTS'] = import_all_modules()
config['CELERY_ACCEPT_CONTENT'] = ['pickle', 'json']
return config
def get_database_engine() -> Engine:
"""Get a database engine.
:returns: a database engine
:rtype: :class:`sqlalchemy.engine.base.Engine`
"""
config = current_app.conf
if 'DATABASE_ENGINE' not in config:
db_url = config['DATABASE_URL']
config['DATABASE_ENGINE'] = create_engine(db_url)
if 'BROKER_URL' not in config:
config['BROKER_URL'] = 'sqla+' + db_url
if 'CELERY_RESULT_BACKEND' not in config and \
'CELERY_RESULT_DBURI' not in config:
config['CELERY_RESULT_BACKEND'] = 'database'
config['CELERY_RESULT_DBURI'] = db_url
return config['DATABASE_ENGINE']
def get_session() -> Session:
"""Get a database session.
:returns: a database session
:rtype: :class:`~.orm.Session`
"""
task = current_task._get_current_object()
request = task.request
if getattr(request, 'db_session', None) is None:
request.db_session = Session(bind=get_database_engine())
return request.db_session
@task_postrun.connect
def close_session(task_id, task, *args, **kwargs):
"""Close the session if there's the opened session."""
session = getattr(task.request, 'db_session', None)
if session is not None:
session.close()
def get_raven_client() -> Client:
"""Get a raven client.
:returns: a raven client
:rtype: :class:`raven.Client`
"""
config = current_app.conf
if 'SENTRY_DSN' in config:
if 'RAVEN_CLIENT' not in config:
sentry_dsn = config['SENTRY_DSN']
config['RAVEN_CLIENT'] = Client(
dsn=sentry_dsn,
include_paths=[
'cliche',
],
)
return config['RAVEN_CLIENT']
else:
return None
@celeryd_init.connect
def setup_raven_logging(conf=None, **kwargs):
client = get_raven_client()
if client is not None:
handler = SentryHandler(client)
setup_logging(handler)
@task_failure.connect
def report_task_failure(task_id, exception, args, kwargs,
traceback, einfo, sender):
client = get_raven_client()
client.captureException(einfo.exc_info)<|fim▁end|> | from raven import Client
from raven.conf import setup_logging |
<|file_name|>pattern-macro.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
macro_rules! foo { () => ( x ) }<|fim▁hole|> let foo!() = 2;
x + 1; //~ ERROR cannot find value `x` in this scope
}<|fim▁end|> |
fn main() { |
<|file_name|>app.py<|end_file_name|><|fim▁begin|># coding: utf-8
from datetime import datetime<|fim▁hole|>from flask import Flask
from flask import render_template
from views.todos import todos_view
app = Flask(__name__)
app.register_blueprint(todos_view, url_prefix='/todos')
@app.route('/')
def index():
return render_template('index.html')
@app.route('/time')
def time():
return str(datetime.now())
@app.route('/1/ping')
def ping():
"""健康监测
LeanEngine 会根据 `/1/ping` 判断应用是否正常运行。
如果返回状态码为 200 则认为正常。
其他状态码或者超过 5 秒没响应则认为应用运行异常。
"""
return 'pong'<|fim▁end|> | |
<|file_name|>test_architecture.py<|end_file_name|><|fim▁begin|># -*- encoding: utf-8 -*-
"""Test class for Architecture UI"""
from fauxfactory import gen_string
from nailgun import entities
from robottelo.datafactory import generate_strings_list, invalid_values_list
from robottelo.decorators import run_only_on, tier1
from robottelo.test import UITestCase
from robottelo.ui.factory import make_arch
from robottelo.ui.locators import common_locators
from robottelo.ui.session import Session
def valid_arch_os_names():
"""Returns a tuple of arch/os names for creation tests"""
return(<|fim▁hole|> {u'name': gen_string('alpha'), u'os_name': gen_string('alpha')},
{u'name': gen_string('html'), u'os_name': gen_string('html')},
{u'name': gen_string('utf8'), u'os_name': gen_string('utf8')},
{u'name': gen_string('alphanumeric'),
u'os_name': gen_string('alphanumeric')}
)
class ArchitectureTestCase(UITestCase):
"""Implements Architecture tests from UI"""
@run_only_on('sat')
@tier1
def test_positive_create_with_os(self):
"""@Test: Create a new Architecture with OS
@Feature: Architecture - Positive Create
@Assert: Architecture is created
"""
with Session(self.browser) as session:
for test_data in valid_arch_os_names():
with self.subTest(test_data):
entities.OperatingSystem(
name=test_data['os_name']).create()
make_arch(session, name=test_data['name'],
os_names=[test_data['os_name']])
self.assertIsNotNone(
self.architecture.search(test_data['name']))
@run_only_on('sat')
@tier1
def test_positive_create_with_name(self):
"""@Test: Create a new Architecture with different data
@Feature: Architecture - Positive Create
@Assert: Architecture is created
"""
with Session(self.browser) as session:
for name in generate_strings_list():
with self.subTest(name):
make_arch(session, name=name)
self.assertIsNotNone(self.architecture.search(name))
@run_only_on('sat')
@tier1
def test_negative_create_with_invalid_name(self):
"""@Test: Try to create architecture and use whitespace, blank, tab
symbol or too long string of different types as its name value
@Feature: Architecture - Negative Create
@Assert: Architecture is not created
"""
with Session(self.browser) as session:
for invalid_name in invalid_values_list(interface='ui'):
with self.subTest(invalid_name):
make_arch(session, name=invalid_name)
self.assertIsNotNone(self.architecture.wait_until_element(
common_locators['name_haserror']))
@run_only_on('sat')
@tier1
def test_negative_create_with_same_name(self):
"""@Test: Create a new Architecture with same name
@Feature: Architecture - Negative Create
@Assert: Architecture is not created
"""
with Session(self.browser) as session:
for name in generate_strings_list():
with self.subTest(name):
make_arch(session, name=name)
self.assertIsNotNone(self.architecture.search(name))
make_arch(session, name=name)
self.assertIsNotNone(self.architecture.wait_until_element(
common_locators['name_haserror']))
@run_only_on('sat')
@tier1
def test_positive_delete(self):
"""@Test: Delete an existing Architecture
@Feature: Architecture - Delete
@Assert: Architecture is deleted
"""
os = entities.OperatingSystem(name=gen_string('alpha')).create()
with Session(self.browser) as session:
for name in generate_strings_list():
with self.subTest(name):
entities.Architecture(
name=name, operatingsystem=[os]).create()
session.nav.go_to_architectures()
self.architecture.delete(name)
@run_only_on('sat')
@tier1
def test_positive_update_name_and_os(self):
"""@Test: Update Architecture with new name and OS
@Feature: Architecture - Update
@Assert: Architecture is updated
"""
old_name = gen_string('alpha')
with Session(self.browser) as session:
make_arch(session, name=old_name)
self.assertIsNotNone(self.architecture.search(old_name))
for new_name in generate_strings_list():
with self.subTest(new_name):
os_name = gen_string('alpha')
entities.OperatingSystem(name=os_name).create()
self.architecture.update(
old_name, new_name, new_os_names=[os_name])
self.assertIsNotNone(self.architecture.search(new_name))
old_name = new_name # for next iteration<|fim▁end|> | |
<|file_name|>task_18.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python3
from pyrob.api import *
@task
def task_8_28():
if wall_is_above() != True:
while (wall_is_above() != True):
move_up()
while (wall_is_on_the_left() != True):
move_left()
while(wall_is_on_the_right() != True and wall_is_beneath() and wall_is_above()):
move_right()
if wall_is_above() != True:
while (wall_is_above() != True):
move_up()
while(wall_is_on_the_left() != True):
move_left()
while (wall_is_on_the_left() != True and wall_is_beneath() and wall_is_above()):
move_left()
if wall_is_above() != True:
while (wall_is_above() != True):
move_up()
while (wall_is_on_the_left() != True):<|fim▁hole|>if __name__ == '__main__':
run_tasks()<|fim▁end|> | move_left()
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>import PyQt5 # this force pyqtgraph to deal with Qt5
# For matplotlib to Qt5 :
# * this avoid tinker problem when not installed
# * work better with GUI
# * trigger a warning on notebook
import matplotlib
import warnings
with warnings.catch_warnings():
try:
warnings.simplefilter("ignore")
matplotlib.use('Qt5Agg')
except:
# on server without screen this is not possible.
pass
from .myqt import QT,mkQApp
#for catalogue window
from .cataloguecontroller import CatalogueController
from .traceviewer import CatalogueTraceViewer
from .peaklists import PeakList, ClusterPeakList
from .ndscatter import NDScatter
from .waveformviewer import WaveformViewer
from .similarity import SpikeSimilarityView, ClusterSimilarityView, ClusterRatioSimilarityView
from .pairlist import PairList
from .silhouette import Silhouette
from .waveformhistviewer import WaveformHistViewer
from .featuretimeviewer import FeatureTimeViewer
from .cataloguewindow import CatalogueWindow
#for peeler window
from .peelercontroller import PeelerController
from .traceviewer import PeelerTraceViewer
from .spikelists import SpikeList, ClusterSpikeList
from .waveformviewer import PeelerWaveformViewer
from .isiviewer import ISIViewer
from .crosscorrelogramviewer import CrossCorrelogramViewer<|fim▁hole|>
#main window
from .mainwindow import MainWindow
from .initializedatasetwindow import InitializeDatasetWindow, ChannelGroupWidget
from .probegeometryview import ProbeGeometryView
from .gpuselector import GpuSelector<|fim▁end|> |
from .peelerwindow import PeelerWindow
|
<|file_name|>getFileList.test.js<|end_file_name|><|fim▁begin|><|fim▁hole|>describe('injector', function() {
it('returns a function returning a promise', function() {
var fn = subject({});
expect(fn('name', [])).to.be.instanceOf(Promise);
});
});<|fim▁end|> | var subject = require('../../lib/helpers/injector');
var Promise = require('bluebird');
|
<|file_name|>cnttransformnote.cpp<|end_file_name|><|fim▁begin|>/****************************************************************************
**
** Copyright (C) 2010 Nokia Corporation and/or its subsidiary(-ies).
** All rights reserved.
** Contact: Nokia Corporation ([email protected])
**
** This file is part of the Qt Mobility Components.
**
** $QT_BEGIN_LICENSE:LGPL$
** Commercial Usage
** Licensees holding valid Qt Commercial licenses may use this file in
** accordance with the Qt Solutions Commercial License Agreement provided
** with the Software or, alternatively, in accordance with the terms
** contained in a written agreement between you and Nokia.
**
** GNU Lesser General Public License Usage
** Alternatively, this file may be used under the terms of the GNU Lesser
** General Public License version 2.1 as published by the Free Software
** Foundation and appearing in the file LICENSE.LGPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU Lesser General Public License version 2.1 requirements
** will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** In addition, as a special exception, Nokia gives you certain additional
** rights. These rights are described in the Nokia Qt LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
** GNU General Public License Usage
** Alternatively, this file may be used under the terms of the GNU
** General Public License version 3.0 as published by the Free Software
** Foundation and appearing in the file LICENSE.GPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU General Public License version 3.0 requirements will be
** met: http://www.gnu.org/copyleft/gpl.html.
**
** Please note Third Party Software included with Qt Solutions may impose
** additional restrictions and it is the user's responsibility to ensure
** that they have met the licensing requirements of the GPL, LGPL, or Qt
** Solutions Commercial license and the relevant license of the Third
** Party Software they are using.
**
** If you are unsure which license is appropriate for your use, please
** contact the sales department at [email protected].
** $QT_END_LICENSE$
**
****************************************************************************/
#include "cnttransformnote.h"
QList<CContactItemField *> CntTransformNote::transformDetailL(const QContactDetail &detail)
{
if(detail.definitionName() != QContactNote::DefinitionName)
User::Leave(KErrArgument);
QList<CContactItemField *> fieldList;
//cast to note
const QContactNote ¬e(static_cast<const QContactNote&>(detail));
//create new fields without contexts
transformToTextFieldL(note, fieldList, note.note(), KUidContactFieldNote, KUidContactFieldVCardMapNOTE, false);<|fim▁hole|> return fieldList;
}
QContactDetail *CntTransformNote::transformItemField(const CContactItemField& field, const QContact &contact)
{
Q_UNUSED(contact);
QContactNote *note = new QContactNote();
CContactTextField* storage = field.TextStorage();
QString noteString = QString::fromUtf16(storage->Text().Ptr(), storage->Text().Length());
note->setNote(noteString);
return note;
}
bool CntTransformNote::supportsDetail(QString detailName) const
{
bool ret = false;
if (detailName == QContactNote::DefinitionName) {
ret = true;
}
return ret;
}
QList<TUid> CntTransformNote::supportedFields() const
{
return QList<TUid>()
<< KUidContactFieldNote;
}
QList<TUid> CntTransformNote::supportedSortingFieldTypes(QString detailFieldName) const
{
QList<TUid> uids;
if (detailFieldName == QContactNote::FieldNote)
uids << KUidContactFieldNote;
return uids;
}
/*!
* Checks whether the subtype is supported
*
* \a subType The subtype to be checked
* \return True if this subtype is supported
*/
bool CntTransformNote::supportsSubType(const QString& subType) const
{
Q_UNUSED(subType);
return false;
}
/*!
* Returns the filed id corresponding to a field
*
* \a fieldName The name of the supported field
* \return fieldId for the fieldName, 0 if not supported
*/
quint32 CntTransformNote::getIdForField(const QString& fieldName) const
{
if (QContactNote::FieldNote == fieldName)
return KUidContactFieldNote.iUid;
else
return 0;
}
/*!
* Modifies the detail definitions. The default detail definitions are
* queried from QContactManagerEngine::schemaDefinitions and then modified
* with this function in the transform leaf classes.
*
* \a definitions The detail definitions to modify.
* \a contactType The contact type the definitions apply for.
*/
void CntTransformNote::detailDefinitions(QMap<QString, QContactDetailDefinition> &definitions, const QString& contactType) const
{
Q_UNUSED(contactType);
if(definitions.contains(QContactNote::DefinitionName)) {
QContactDetailDefinition d = definitions.value(QContactNote::DefinitionName);
QMap<QString, QContactDetailFieldDefinition> fields = d.fields();
// Context not supported in symbian back-end, remove
fields.remove(QContactNote::FieldContext);
d.setFields(fields);
// Replace original definitions
definitions.insert(d.name(), d);
}
}<|fim▁end|> | |
<|file_name|>np.py<|end_file_name|><|fim▁begin|>from os.path import dirname
import numpy as np
from ..os import open_file, exists_isdir, makedirs
from ..log import get_logger
logger = get_logger()
def read_or_write(data_f, fallback=None):
"""Loads the data file if it exists. Otherwise, if fallback is provided,
call fallback and save its return to disk.
Args:<|fim▁hole|> deciding how to load the data.
fallback (function, optional): Fallback function used if data file
doesn't exist. Its return will be saved to ``data_f`` for future
loadings. It should not take arguments, but if yours requires taking
arguments, just wrap yours with::
fallback=lambda: your_fancy_func(var0, var1)
Returns:
Data loaded if ``data_f`` exists; otherwise, ``fallback``'s return
(``None`` if no fallback).
Writes
- Return by the fallback, if provided.
"""
# Decide data file type
ext = data_f.split('.')[-1].lower()
def load_func(path):
with open_file(path, 'rb') as h:
data = np.load(h)
return data
def save_func(data, path):
if ext == 'npy':
save = np.save
elif ext == 'npz':
save = np.savez
else:
raise NotImplementedError(ext)
with open_file(path, 'wb') as h:
save(h, data)
# Load or call fallback
if exists_isdir(data_f)[0]:
data = load_func(data_f)
msg = "Loaded: "
else:
msg = "File doesn't exist "
if fallback is None:
data = None
msg += "(fallback not provided): "
else:
data = fallback()
out_dir = dirname(data_f)
makedirs(out_dir)
save_func(data, data_f)
msg += "(fallback provided); fallback return now saved to: "
msg += data_f
logger.info(msg)
return data<|fim▁end|> | data_f (str): Path to the data file, whose extension will be used for |
<|file_name|>suggest-path-for-tuple-struct.rs<|end_file_name|><|fim▁begin|>mod module {
pub struct SomeTupleStruct(u8);
pub struct SomeRegularStruct {
foo: u8
}
impl SomeTupleStruct {
pub fn new() -> Self {
Self(0)
}
}
impl SomeRegularStruct {
pub fn new() -> Self {
Self { foo: 0 }
}
}
}
<|fim▁hole|>use module::{SomeTupleStruct, SomeRegularStruct};
fn main() {
let _ = SomeTupleStruct.new();
//~^ ERROR expected value, found struct `SomeTupleStruct`
let _ = SomeRegularStruct.new();
//~^ ERROR expected value, found struct `SomeRegularStruct`
}<|fim▁end|> | |
<|file_name|>windows.rs<|end_file_name|><|fim▁begin|>//! Windows specific definitions
use std::io::{self, Stdout, Write};
use std::mem;
use std::sync::atomic;
use unicode_width::UnicodeWidthChar;
use winapi::shared::minwindef::{DWORD, WORD};
use winapi::um::winnt::{CHAR, HANDLE};
use winapi::um::{consoleapi, handleapi, processenv, winbase, wincon, winuser};
use super::{truncate, Position, RawMode, RawReader, Renderer, Term};
use config::{ColorMode, Config};
use error;
use highlight::Highlighter;
use keys::{self, KeyPress};
use line_buffer::LineBuffer;
use Result;
const STDIN_FILENO: DWORD = winbase::STD_INPUT_HANDLE;
const STDOUT_FILENO: DWORD = winbase::STD_OUTPUT_HANDLE;
fn get_std_handle(fd: DWORD) -> Result<HANDLE> {
let handle = unsafe { processenv::GetStdHandle(fd) };
if handle == handleapi::INVALID_HANDLE_VALUE {
try!(Err(io::Error::last_os_error()));
} else if handle.is_null() {
try!(Err(io::Error::new(
io::ErrorKind::Other,
"no stdio handle available for this process",
),));
}
Ok(handle)
}
#[macro_export]
macro_rules! check {
($funcall:expr) => {{
let rc = unsafe { $funcall };
if rc == 0 {
try!(Err(io::Error::last_os_error()));
}
rc
}};
}
fn get_win_size(handle: HANDLE) -> (usize, usize) {
let mut info = unsafe { mem::zeroed() };
match unsafe { wincon::GetConsoleScreenBufferInfo(handle, &mut info) } {
0 => (80, 24),
_ => (
info.dwSize.X as usize,
(1 + info.srWindow.Bottom - info.srWindow.Top) as usize,
), // (info.srWindow.Right - info.srWindow.Left + 1)
}
}
fn get_console_mode(handle: HANDLE) -> Result<DWORD> {
let mut original_mode = 0;
check!(consoleapi::GetConsoleMode(handle, &mut original_mode));
Ok(original_mode)
}
pub type Mode = ConsoleMode;
#[derive(Clone, Copy, Debug)]
pub struct ConsoleMode {
original_stdin_mode: DWORD,
stdin_handle: HANDLE,
original_stdout_mode: Option<DWORD>,
stdout_handle: HANDLE,
}
impl RawMode for Mode {
/// Disable RAW mode for the terminal.
fn disable_raw_mode(&self) -> Result<()> {
check!(consoleapi::SetConsoleMode(
self.stdin_handle,
self.original_stdin_mode,
));
if let Some(original_stdout_mode) = self.original_stdout_mode {
check!(consoleapi::SetConsoleMode(
self.stdout_handle,
original_stdout_mode,
));
}
Ok(())
}
}
/// Console input reader
pub struct ConsoleRawReader {
handle: HANDLE,
buf: [u16; 2],
}
impl ConsoleRawReader {
pub fn new() -> Result<ConsoleRawReader> {
let handle = try!(get_std_handle(STDIN_FILENO));
Ok(ConsoleRawReader {
handle,
buf: [0; 2],
})
}
}
impl RawReader for ConsoleRawReader {
fn next_key(&mut self, _: bool) -> Result<KeyPress> {
use std::char::decode_utf16;
use winapi::um::wincon::{
LEFT_ALT_PRESSED, LEFT_CTRL_PRESSED, RIGHT_ALT_PRESSED, RIGHT_CTRL_PRESSED,
SHIFT_PRESSED,
};
let mut rec: wincon::INPUT_RECORD = unsafe { mem::zeroed() };
let mut count = 0;
let mut surrogate = false;
loop {
// TODO GetNumberOfConsoleInputEvents
check!(consoleapi::ReadConsoleInputW(
self.handle,
&mut rec,
1 as DWORD,
&mut count,
));
if rec.EventType == wincon::WINDOW_BUFFER_SIZE_EVENT {
SIGWINCH.store(true, atomic::Ordering::SeqCst);
debug!(target: "rustyline", "SIGWINCH");
return Err(error::ReadlineError::WindowResize); // sigwinch + err => err ignored
} else if rec.EventType != wincon::KEY_EVENT {
continue;
}
let key_event = unsafe { rec.Event.KeyEvent() };
// writeln!(io::stderr(), "key_event: {:?}", key_event).unwrap();
if key_event.bKeyDown == 0 && key_event.wVirtualKeyCode != winuser::VK_MENU as WORD {
continue;
}
// key_event.wRepeatCount seems to be always set to 1 (maybe because we only
// read one character at a time)
let alt_gr = key_event.dwControlKeyState & (LEFT_CTRL_PRESSED | RIGHT_ALT_PRESSED)
== (LEFT_CTRL_PRESSED | RIGHT_ALT_PRESSED);
let alt = key_event.dwControlKeyState & (LEFT_ALT_PRESSED | RIGHT_ALT_PRESSED) != 0;
let ctrl = key_event.dwControlKeyState & (LEFT_CTRL_PRESSED | RIGHT_CTRL_PRESSED) != 0;
let meta = alt && !alt_gr;
let shift = key_event.dwControlKeyState & SHIFT_PRESSED != 0;
let utf16 = unsafe { *key_event.uChar.UnicodeChar() };
if utf16 == 0 {
match key_event.wVirtualKeyCode as i32 {
winuser::VK_LEFT => {
return Ok(if ctrl {
KeyPress::ControlLeft
} else if shift {
KeyPress::ShiftLeft
} else {
KeyPress::Left
})
}
winuser::VK_RIGHT => {
return Ok(if ctrl {
KeyPress::ControlRight
} else if shift {
KeyPress::ShiftRight
} else {
KeyPress::Right
})
}
winuser::VK_UP => {
return Ok(if ctrl {
KeyPress::ControlUp
} else if shift {
KeyPress::ShiftUp
} else {
KeyPress::Up
})
}
winuser::VK_DOWN => {
return Ok(if ctrl {
KeyPress::ControlDown
} else if shift {
KeyPress::ShiftDown
} else {
KeyPress::Down
})
}
winuser::VK_DELETE => return Ok(KeyPress::Delete),
winuser::VK_HOME => return Ok(KeyPress::Home),
winuser::VK_END => return Ok(KeyPress::End),
winuser::VK_PRIOR => return Ok(KeyPress::PageUp),
winuser::VK_NEXT => return Ok(KeyPress::PageDown),
winuser::VK_INSERT => return Ok(KeyPress::Insert),
winuser::VK_F1 => return Ok(KeyPress::F(1)),
winuser::VK_F2 => return Ok(KeyPress::F(2)),
winuser::VK_F3 => return Ok(KeyPress::F(3)),
winuser::VK_F4 => return Ok(KeyPress::F(4)),
winuser::VK_F5 => return Ok(KeyPress::F(5)),
winuser::VK_F6 => return Ok(KeyPress::F(6)),
winuser::VK_F7 => return Ok(KeyPress::F(7)),
winuser::VK_F8 => return Ok(KeyPress::F(8)),
winuser::VK_F9 => return Ok(KeyPress::F(9)),
winuser::VK_F10 => return Ok(KeyPress::F(10)),
winuser::VK_F11 => return Ok(KeyPress::F(11)),
winuser::VK_F12 => return Ok(KeyPress::F(12)),
// winuser::VK_BACK is correctly handled because the key_event.UnicodeChar is
// also set.
_ => continue,
};
} else if utf16 == 27 {
return Ok(KeyPress::Esc);
} else {
if utf16 >= 0xD800 && utf16 < 0xDC00 {
surrogate = true;
self.buf[0] = utf16;
continue;
}
let buf = if surrogate {
self.buf[1] = utf16;
&self.buf[..]
} else {
self.buf[0] = utf16;
&self.buf[..1]
};
let orc = decode_utf16(buf.iter().cloned()).next();
if orc.is_none() {
return Err(error::ReadlineError::Eof);
}
let c = try!(orc.unwrap());
if meta {
return Ok(KeyPress::Meta(c));
} else {
let mut key = keys::char_to_key_press(c);
if key == KeyPress::Tab && shift {
key = KeyPress::BackTab;
} else if key == KeyPress::Char(' ') && ctrl {
key = KeyPress::Ctrl(' ');
}
return Ok(key);
}
}
}
}
}
pub struct ConsoleRenderer {
out: Stdout,
handle: HANDLE,
cols: usize, // Number of columns in terminal
buffer: String,
}
impl ConsoleRenderer {
fn new(handle: HANDLE) -> ConsoleRenderer {
// Multi line editing is enabled by ENABLE_WRAP_AT_EOL_OUTPUT mode
let (cols, _) = get_win_size(handle);
ConsoleRenderer {
out: io::stdout(),
handle,
cols,
buffer: String::with_capacity(1024),
}
}
fn get_console_screen_buffer_info(&self) -> Result<wincon::CONSOLE_SCREEN_BUFFER_INFO> {
let mut info = unsafe { mem::zeroed() };
check!(wincon::GetConsoleScreenBufferInfo(self.handle, &mut info));
Ok(info)
}
fn set_console_cursor_position(&mut self, pos: wincon::COORD) -> Result<()> {
check!(wincon::SetConsoleCursorPosition(self.handle, pos));
Ok(())
}
fn clear(&mut self, length: DWORD, pos: wincon::COORD) -> Result<()> {
let mut _count = 0;
check!(wincon::FillConsoleOutputCharacterA(
self.handle,
' ' as CHAR,
length,
pos,
&mut _count,
));
Ok(())
}
}
impl Renderer for ConsoleRenderer {
fn move_cursor(&mut self, old: Position, new: Position) -> Result<()> {
let mut info = try!(self.get_console_screen_buffer_info());
if new.row > old.row {
info.dwCursorPosition.Y += (new.row - old.row) as i16;
} else {
info.dwCursorPosition.Y -= (old.row - new.row) as i16;
}
if new.col > old.col {
info.dwCursorPosition.X += (new.col - old.col) as i16;
} else {
info.dwCursorPosition.X -= (old.col - new.col) as i16;
}
self.set_console_cursor_position(info.dwCursorPosition)
}
fn refresh_line(
&mut self,
prompt: &str,
prompt_size: Position,
line: &LineBuffer,
hint: Option<String>,
current_row: usize,
old_rows: usize,
highlighter: Option<&Highlighter>,
) -> Result<(Position, Position)> {
// calculate the position of the end of the input line
let end_pos = self.calculate_position(line, prompt_size);
// calculate the desired position of the cursor
let cursor = self.calculate_position(&line[..line.pos()], prompt_size);
// position at the start of the prompt, clear to end of previous input
let mut info = try!(self.get_console_screen_buffer_info());
info.dwCursorPosition.X = 0;
info.dwCursorPosition.Y -= current_row as i16;
try!(self.set_console_cursor_position(info.dwCursorPosition));
try!(self.clear(
(info.dwSize.X * (old_rows as i16 + 1)) as DWORD,
info.dwCursorPosition,
));
self.buffer.clear();
if let Some(highlighter) = highlighter {
// TODO handle ansi escape code (SetConsoleTextAttribute)
// display the prompt
self.buffer.push_str(&highlighter.highlight_prompt(prompt));
// display the input line
self.buffer
.push_str(&highlighter.highlight(line, line.pos()));
} else {
// display the prompt
self.buffer.push_str(prompt);
// display the input line
self.buffer.push_str(line);
}
// display hint
if let Some(hint) = hint {
let truncate = truncate(&hint, end_pos.col, self.cols);
if let Some(highlighter) = highlighter {
self.buffer.push_str(&highlighter.highlight_hint(truncate));
} else {
self.buffer.push_str(truncate);
}
}
try!(self.out.write_all(self.buffer.as_bytes()));
try!(self.out.flush());
// position the cursor
let mut info = try!(self.get_console_screen_buffer_info());
info.dwCursorPosition.X = cursor.col as i16;
info.dwCursorPosition.Y -= (end_pos.row - cursor.row) as i16;
try!(self.set_console_cursor_position(info.dwCursorPosition));
Ok((cursor, end_pos))
}
fn write_and_flush(&mut self, buf: &[u8]) -> Result<()> {
try!(self.out.write_all(buf));
try!(self.out.flush());
Ok(())
}
/// Characters with 2 column width are correctly handled (not splitted).
fn calculate_position(&self, s: &str, orig: Position) -> Position {
let mut pos = orig;
for c in s.chars() {
let cw = if c == '\n' {
pos.col = 0;
pos.row += 1;
None
} else {
c.width()
};
if let Some(cw) = cw {
pos.col += cw;
if pos.col > self.cols {
pos.row += 1;
pos.col = cw;
}
}
}
if pos.col == self.cols {
pos.col = 0;
pos.row += 1;
}
pos
}
/// Clear the screen. Used to handle ctrl+l
fn clear_screen(&mut self) -> Result<()> {
let info = try!(self.get_console_screen_buffer_info());
let coord = wincon::COORD { X: 0, Y: 0 };
check!(wincon::SetConsoleCursorPosition(self.handle, coord));
let n = info.dwSize.X as DWORD * info.dwSize.Y as DWORD;
self.clear(n, coord)
}
fn sigwinch(&self) -> bool {
SIGWINCH.compare_and_swap(true, false, atomic::Ordering::SeqCst)
}
/// Try to get the number of columns in the current terminal,
/// or assume 80 if it fails.
fn update_size(&mut self) {
let (cols, _) = get_win_size(self.handle);
self.cols = cols;
}
fn get_columns(&self) -> usize {
self.cols
}
/// Try to get the number of rows in the current terminal,
/// or assume 24 if it fails.
fn get_rows(&self) -> usize {
let (_, rows) = get_win_size(self.handle);
rows<|fim▁hole|>static SIGWINCH: atomic::AtomicBool = atomic::ATOMIC_BOOL_INIT;
pub type Terminal = Console;
#[derive(Clone, Debug)]
pub struct Console {
stdin_isatty: bool,
stdin_handle: HANDLE,
stdout_isatty: bool,
stdout_handle: HANDLE,
pub(crate) color_mode: ColorMode,
ansi_colors_supported: bool,
}
impl Console {}
impl Term for Console {
type Mode = Mode;
type Reader = ConsoleRawReader;
type Writer = ConsoleRenderer;
fn new(color_mode: ColorMode) -> Console {
use std::ptr;
let stdin_handle = get_std_handle(STDIN_FILENO);
let stdin_isatty = match stdin_handle {
Ok(handle) => {
// If this function doesn't fail then fd is a TTY
get_console_mode(handle).is_ok()
}
Err(_) => false,
};
let stdout_handle = get_std_handle(STDOUT_FILENO);
let stdout_isatty = match stdout_handle {
Ok(handle) => {
// If this function doesn't fail then fd is a TTY
get_console_mode(handle).is_ok()
}
Err(_) => false,
};
Console {
stdin_isatty,
stdin_handle: stdin_handle.unwrap_or(ptr::null_mut()),
stdout_isatty,
stdout_handle: stdout_handle.unwrap_or(ptr::null_mut()),
color_mode,
ansi_colors_supported: false,
}
}
/// Checking for an unsupported TERM in windows is a no-op
fn is_unsupported(&self) -> bool {
false
}
fn is_stdin_tty(&self) -> bool {
self.stdin_isatty
}
fn colors_enabled(&self) -> bool {
// TODO ANSI Colors & Windows <10
match self.color_mode {
ColorMode::Enabled => self.stdout_isatty && self.ansi_colors_supported,
ColorMode::Forced => true,
ColorMode::Disabled => false,
}
}
// pub fn install_sigwinch_handler(&mut self) {
// See ReadConsoleInputW && WINDOW_BUFFER_SIZE_EVENT
// }
/// Enable RAW mode for the terminal.
fn enable_raw_mode(&mut self) -> Result<Mode> {
if !self.stdin_isatty {
try!(Err(io::Error::new(
io::ErrorKind::Other,
"no stdio handle available for this process",
),));
}
let original_stdin_mode = try!(get_console_mode(self.stdin_handle));
// Disable these modes
let mut raw = original_stdin_mode & !(wincon::ENABLE_LINE_INPUT
| wincon::ENABLE_ECHO_INPUT
| wincon::ENABLE_PROCESSED_INPUT);
// Enable these modes
raw |= wincon::ENABLE_EXTENDED_FLAGS;
raw |= wincon::ENABLE_INSERT_MODE;
raw |= wincon::ENABLE_QUICK_EDIT_MODE;
raw |= wincon::ENABLE_WINDOW_INPUT;
check!(consoleapi::SetConsoleMode(self.stdin_handle, raw));
let original_stdout_mode = if self.stdout_isatty {
let original_stdout_mode = try!(get_console_mode(self.stdout_handle));
// To enable ANSI colors (Windows 10 only):
// https://docs.microsoft.com/en-us/windows/console/setconsolemode
if original_stdout_mode & wincon::ENABLE_VIRTUAL_TERMINAL_PROCESSING == 0 {
let raw = original_stdout_mode | wincon::ENABLE_VIRTUAL_TERMINAL_PROCESSING;
self.ansi_colors_supported =
unsafe { consoleapi::SetConsoleMode(self.stdout_handle, raw) != 0 };
}
Some(original_stdout_mode)
} else {
None
};
Ok(Mode {
original_stdin_mode,
stdin_handle: self.stdin_handle,
original_stdout_mode,
stdout_handle: self.stdout_handle,
})
}
fn create_reader(&self, _: &Config) -> Result<ConsoleRawReader> {
ConsoleRawReader::new()
}
fn create_writer(&self) -> ConsoleRenderer {
ConsoleRenderer::new(self.stdout_handle)
}
}<|fim▁end|> | }
}
|
<|file_name|>test_dwt_idwt.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
from __future__ import division, print_function, absolute_import
import numpy as np
from numpy.testing import (assert_allclose, assert_, assert_raises,
assert_array_equal)
import pywt
# Check that float32, float64, complex64, complex128 are preserved.
# Other real types get converted to float64.
# complex256 gets converted to complex128
dtypes_in = [np.int8, np.float16, np.float32, np.float64, np.complex64,
np.complex128]
dtypes_out = [np.float64, np.float32, np.float32, np.float64, np.complex64,
np.complex128]
# test complex256 as well if it is available
try:
dtypes_in += [np.complex256, ]
dtypes_out += [np.complex128, ]
except AttributeError:
pass
def test_dwt_idwt_basic():
x = [3, 7, 1, 1, -2, 5, 4, 6]
cA, cD = pywt.dwt(x, 'db2')
cA_expect = [5.65685425, 7.39923721, 0.22414387, 3.33677403, 7.77817459]
cD_expect = [-2.44948974, -1.60368225, -4.44140056, -0.41361256,
1.22474487]
assert_allclose(cA, cA_expect)
assert_allclose(cD, cD_expect)
x_roundtrip = pywt.idwt(cA, cD, 'db2')
assert_allclose(x_roundtrip, x, rtol=1e-10)
# mismatched dtypes OK
x_roundtrip2 = pywt.idwt(cA.astype(np.float64), cD.astype(np.float32),
'db2')
assert_allclose(x_roundtrip2, x, rtol=1e-7, atol=1e-7)
assert_(x_roundtrip2.dtype == np.float64)
def test_idwt_mixed_complex_dtype():
x = np.arange(8).astype(float)
x = x + 1j*x[::-1]
cA, cD = pywt.dwt(x, 'db2')
x_roundtrip = pywt.idwt(cA, cD, 'db2')
assert_allclose(x_roundtrip, x, rtol=1e-10)
# mismatched dtypes OK
x_roundtrip2 = pywt.idwt(cA.astype(np.complex128), cD.astype(np.complex64),
'db2')
assert_allclose(x_roundtrip2, x, rtol=1e-7, atol=1e-7)
assert_(x_roundtrip2.dtype == np.complex128)
def test_dwt_idwt_dtypes():
wavelet = pywt.Wavelet('haar')
for dt_in, dt_out in zip(dtypes_in, dtypes_out):
x = np.ones(4, dtype=dt_in)
errmsg = "wrong dtype returned for {0} input".format(dt_in)
cA, cD = pywt.dwt(x, wavelet)
assert_(cA.dtype == cD.dtype == dt_out, "dwt: " + errmsg)
x_roundtrip = pywt.idwt(cA, cD, wavelet)
assert_(x_roundtrip.dtype == dt_out, "idwt: " + errmsg)
def test_dwt_idwt_basic_complex():
x = np.asarray([3, 7, 1, 1, -2, 5, 4, 6])
x = x + 0.5j*x
cA, cD = pywt.dwt(x, 'db2')
cA_expect = np.asarray([5.65685425, 7.39923721, 0.22414387, 3.33677403,
7.77817459])
cA_expect = cA_expect + 0.5j*cA_expect
cD_expect = np.asarray([-2.44948974, -1.60368225, -4.44140056, -0.41361256,
1.22474487])
cD_expect = cD_expect + 0.5j*cD_expect
assert_allclose(cA, cA_expect)
assert_allclose(cD, cD_expect)
x_roundtrip = pywt.idwt(cA, cD, 'db2')
assert_allclose(x_roundtrip, x, rtol=1e-10)
def test_dwt_idwt_partial_complex():
x = np.asarray([3, 7, 1, 1, -2, 5, 4, 6])
x = x + 0.5j*x
cA, cD = pywt.dwt(x, 'haar')
cA_rec_expect = np.array([5.0+2.5j, 5.0+2.5j, 1.0+0.5j, 1.0+0.5j,
1.5+0.75j, 1.5+0.75j, 5.0+2.5j, 5.0+2.5j])
cA_rec = pywt.idwt(cA, None, 'haar')
assert_allclose(cA_rec, cA_rec_expect)
cD_rec_expect = np.array([-2.0-1.0j, 2.0+1.0j, 0.0+0.0j, 0.0+0.0j,
-3.5-1.75j, 3.5+1.75j, -1.0-0.5j, 1.0+0.5j])
cD_rec = pywt.idwt(None, cD, 'haar')
assert_allclose(cD_rec, cD_rec_expect)
assert_allclose(cA_rec + cD_rec, x)
def test_dwt_wavelet_kwd():
x = np.array([3, 7, 1, 1, -2, 5, 4, 6])
w = pywt.Wavelet('sym3')
cA, cD = pywt.dwt(x, wavelet=w, mode='constant')
cA_expect = [4.38354585, 3.80302657, 7.31813271, -0.58565539, 4.09727044,
7.81994027]
cD_expect = [-1.33068221, -2.78795192, -3.16825651, -0.67715519,
-0.09722957, -0.07045258]
assert_allclose(cA, cA_expect)
assert_allclose(cD, cD_expect)
def test_dwt_coeff_len():
x = np.array([3, 7, 1, 1, -2, 5, 4, 6])
w = pywt.Wavelet('sym3')
ln_modes = [pywt.dwt_coeff_len(len(x), w.dec_len, mode) for mode in
pywt.Modes.modes]
expected_result = [6, ] * len(pywt.Modes.modes)
expected_result[pywt.Modes.modes.index('periodization')] = 4<|fim▁hole|> ln_modes = [pywt.dwt_coeff_len(len(x), w, mode) for mode in
pywt.Modes.modes]
assert_allclose(ln_modes, expected_result)
def test_idwt_none_input():
# None input equals arrays of zeros of the right length
res1 = pywt.idwt([1, 2, 0, 1], None, 'db2', 'symmetric')
res2 = pywt.idwt([1, 2, 0, 1], [0, 0, 0, 0], 'db2', 'symmetric')
assert_allclose(res1, res2, rtol=1e-15, atol=1e-15)
res1 = pywt.idwt(None, [1, 2, 0, 1], 'db2', 'symmetric')
res2 = pywt.idwt([0, 0, 0, 0], [1, 2, 0, 1], 'db2', 'symmetric')
assert_allclose(res1, res2, rtol=1e-15, atol=1e-15)
# Only one argument at a time can be None
assert_raises(ValueError, pywt.idwt, None, None, 'db2', 'symmetric')
def test_idwt_invalid_input():
# Too short, min length is 4 for 'db4':
assert_raises(ValueError, pywt.idwt, [1, 2, 4], [4, 1, 3], 'db4', 'symmetric')
def test_dwt_single_axis():
x = [[3, 7, 1, 1],
[-2, 5, 4, 6]]
cA, cD = pywt.dwt(x, 'db2', axis=-1)
cA0, cD0 = pywt.dwt(x[0], 'db2')
cA1, cD1 = pywt.dwt(x[1], 'db2')
assert_allclose(cA[0], cA0)
assert_allclose(cA[1], cA1)
assert_allclose(cD[0], cD0)
assert_allclose(cD[1], cD1)
def test_idwt_single_axis():
x = [[3, 7, 1, 1],
[-2, 5, 4, 6]]
x = np.asarray(x)
x = x + 1j*x # test with complex data
cA, cD = pywt.dwt(x, 'db2', axis=-1)
x0 = pywt.idwt(cA[0], cD[0], 'db2', axis=-1)
x1 = pywt.idwt(cA[1], cD[1], 'db2', axis=-1)
assert_allclose(x[0], x0)
assert_allclose(x[1], x1)
def test_dwt_invalid_input():
x = np.arange(1)
assert_raises(ValueError, pywt.dwt, x, 'db2', 'reflect')
assert_raises(ValueError, pywt.dwt, x, 'haar', 'antireflect')
def test_dwt_axis_arg():
x = [[3, 7, 1, 1],
[-2, 5, 4, 6]]
cA_, cD_ = pywt.dwt(x, 'db2', axis=-1)
cA, cD = pywt.dwt(x, 'db2', axis=1)
assert_allclose(cA_, cA)
assert_allclose(cD_, cD)
def test_dwt_axis_invalid_input():
x = np.ones((3,1))
assert_raises(ValueError, pywt.dwt, x, 'db2', 'reflect')
def test_idwt_axis_arg():
x = [[3, 7, 1, 1],
[-2, 5, 4, 6]]
cA, cD = pywt.dwt(x, 'db2', axis=1)
x_ = pywt.idwt(cA, cD, 'db2', axis=-1)
x = pywt.idwt(cA, cD, 'db2', axis=1)
assert_allclose(x_, x)
def test_dwt_idwt_axis_excess():
x = [[3, 7, 1, 1],
[-2, 5, 4, 6]]
# can't transform over axes that aren't there
assert_raises(ValueError,
pywt.dwt, x, 'db2', 'symmetric', axis=2)
assert_raises(ValueError,
pywt.idwt, [1, 2, 4], [4, 1, 3], 'db2', 'symmetric', axis=1)
def test_error_on_continuous_wavelet():
# A ValueError is raised if a Continuous wavelet is selected
data = np.ones((32, ))
for cwave in ['morl', pywt.DiscreteContinuousWavelet('morl')]:
assert_raises(ValueError, pywt.dwt, data, cwave)
cA, cD = pywt.dwt(data, 'db1')
assert_raises(ValueError, pywt.idwt, cA, cD, cwave)
def test_dwt_zero_size_axes():
# raise on empty input array
assert_raises(ValueError, pywt.dwt, [], 'db2')
# >1D case uses a different code path so check there as well
x = np.ones((1, 4))[0:0, :] # 2D with a size zero axis
assert_raises(ValueError, pywt.dwt, x, 'db2', axis=0)
def test_pad_1d():
x = [1, 2, 3]
assert_array_equal(pywt.pad(x, (4, 6), 'periodization'),
[1, 2, 3, 3, 1, 2, 3, 3, 1, 2, 3, 3, 1, 2])
assert_array_equal(pywt.pad(x, (4, 6), 'periodic'),
[3, 1, 2, 3, 1, 2, 3, 1, 2, 3, 1, 2, 3])
assert_array_equal(pywt.pad(x, (4, 6), 'constant'),
[1, 1, 1, 1, 1, 2, 3, 3, 3, 3, 3, 3, 3])
assert_array_equal(pywt.pad(x, (4, 6), 'zero'),
[0, 0, 0, 0, 1, 2, 3, 0, 0, 0, 0, 0, 0])
assert_array_equal(pywt.pad(x, (4, 6), 'smooth'),
[-3, -2, -1, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9])
assert_array_equal(pywt.pad(x, (4, 6), 'symmetric'),
[3, 3, 2, 1, 1, 2, 3, 3, 2, 1, 1, 2, 3])
assert_array_equal(pywt.pad(x, (4, 6), 'antisymmetric'),
[3, -3, -2, -1, 1, 2, 3, -3, -2, -1, 1, 2, 3])
assert_array_equal(pywt.pad(x, (4, 6), 'reflect'),
[1, 2, 3, 2, 1, 2, 3, 2, 1, 2, 3, 2, 1])
assert_array_equal(pywt.pad(x, (4, 6), 'antireflect'),
[-3, -2, -1, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9])
# equivalence of various pad_width formats
assert_array_equal(pywt.pad(x, 4, 'periodic'),
pywt.pad(x, (4, 4), 'periodic'))
assert_array_equal(pywt.pad(x, (4, ), 'periodic'),
pywt.pad(x, (4, 4), 'periodic'))
assert_array_equal(pywt.pad(x, [(4, 4)], 'periodic'),
pywt.pad(x, (4, 4), 'periodic'))
def test_pad_errors():
# negative pad width
x = [1, 2, 3]
assert_raises(ValueError, pywt.pad, x, -2, 'periodic')
# wrong length pad width
assert_raises(ValueError, pywt.pad, x, (1, 1, 1), 'periodic')
# invalid mode name
assert_raises(ValueError, pywt.pad, x, 2, 'bad_mode')
def test_pad_nd():
for ndim in [2, 3]:
x = np.arange(4**ndim).reshape((4, ) * ndim)
if ndim == 2:
pad_widths = [(2, 1), (2, 3)]
else:
pad_widths = [(2, 1), ] * ndim
for mode in pywt.Modes.modes:
xp = pywt.pad(x, pad_widths, mode)
# expected result is the same as applying along axes separably
xp_expected = x.copy()
for ax in range(ndim):
xp_expected = np.apply_along_axis(pywt.pad,
ax,
xp_expected,
pad_widths=[pad_widths[ax]],
mode=mode)
assert_array_equal(xp, xp_expected)<|fim▁end|> |
assert_allclose(ln_modes, expected_result) |
<|file_name|>time.go<|end_file_name|><|fim▁begin|>// Copyright 2015 PingCAP, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package types
import (
"bytes"
"fmt"
"math"
"regexp"
"strconv"
"strings"
gotime "time"
"unicode"
"github.com/pingcap/errors"
"github.com/pingcap/tidb/parser/mysql"
"github.com/pingcap/tidb/parser/terror"
"github.com/pingcap/tidb/sessionctx/stmtctx"
"github.com/pingcap/tidb/util/logutil"
tidbMath "github.com/pingcap/tidb/util/math"
"github.com/pingcap/tidb/util/parser"
)
// Time format without fractional seconds precision.
const (
DateFormat = "2006-01-02"
TimeFormat = "2006-01-02 15:04:05"
// TimeFSPFormat is time format with fractional seconds precision.
TimeFSPFormat = "2006-01-02 15:04:05.000000"
// UTCTimeFormat is used to parse and format gotime.
UTCTimeFormat = "2006-01-02 15:04:05 UTC"
)
const (
// MinYear is the minimum for mysql year type.
MinYear int16 = 1901
// MaxYear is the maximum for mysql year type.
MaxYear int16 = 2155
// MaxDuration is the maximum for duration.
MaxDuration int64 = 838*10000 + 59*100 + 59
// MinTime is the minimum for mysql time type.
MinTime = -(838*gotime.Hour + 59*gotime.Minute + 59*gotime.Second)
// MaxTime is the maximum for mysql time type.
MaxTime = 838*gotime.Hour + 59*gotime.Minute + 59*gotime.Second
// ZeroDatetimeStr is the string representation of a zero datetime.
ZeroDatetimeStr = "0000-00-00 00:00:00"
// ZeroDateStr is the string representation of a zero date.
ZeroDateStr = "0000-00-00"
// TimeMaxHour is the max hour for mysql time type.
TimeMaxHour = 838
// TimeMaxMinute is the max minute for mysql time type.
TimeMaxMinute = 59
// TimeMaxSecond is the max second for mysql time type.
TimeMaxSecond = 59
// TimeMaxValue is the maximum value for mysql time type.
TimeMaxValue = TimeMaxHour*10000 + TimeMaxMinute*100 + TimeMaxSecond
// TimeMaxValueSeconds is the maximum second value for mysql time type.
TimeMaxValueSeconds = TimeMaxHour*3600 + TimeMaxMinute*60 + TimeMaxSecond
)
const (
// YearIndex is index of 'YEARS-MONTHS DAYS HOURS:MINUTES:SECONDS.MICROSECONDS' expr Format
YearIndex = 0 + iota
// MonthIndex is index of 'YEARS-MONTHS DAYS HOURS:MINUTES:SECONDS.MICROSECONDS' expr Format
MonthIndex
// DayIndex is index of 'YEARS-MONTHS DAYS HOURS:MINUTES:SECONDS.MICROSECONDS' expr Format
DayIndex
// HourIndex is index of 'YEARS-MONTHS DAYS HOURS:MINUTES:SECONDS.MICROSECONDS' expr Format
HourIndex
// MinuteIndex is index of 'YEARS-MONTHS DAYS HOURS:MINUTES:SECONDS.MICROSECONDS' expr Format
MinuteIndex
// SecondIndex is index of 'YEARS-MONTHS DAYS HOURS:MINUTES:SECONDS.MICROSECONDS' expr Format
SecondIndex
// MicrosecondIndex is index of 'YEARS-MONTHS DAYS HOURS:MINUTES:SECONDS.MICROSECONDS' expr Format
MicrosecondIndex
)
const (
// YearMonthMaxCnt is max parameters count 'YEARS-MONTHS' expr Format allowed
YearMonthMaxCnt = 2
// DayHourMaxCnt is max parameters count 'DAYS HOURS' expr Format allowed
DayHourMaxCnt = 2
// DayMinuteMaxCnt is max parameters count 'DAYS HOURS:MINUTES' expr Format allowed
DayMinuteMaxCnt = 3
// DaySecondMaxCnt is max parameters count 'DAYS HOURS:MINUTES:SECONDS' expr Format allowed
DaySecondMaxCnt = 4
// DayMicrosecondMaxCnt is max parameters count 'DAYS HOURS:MINUTES:SECONDS.MICROSECONDS' expr Format allowed
DayMicrosecondMaxCnt = 5
// HourMinuteMaxCnt is max parameters count 'HOURS:MINUTES' expr Format allowed
HourMinuteMaxCnt = 2
// HourSecondMaxCnt is max parameters count 'HOURS:MINUTES:SECONDS' expr Format allowed
HourSecondMaxCnt = 3
// HourMicrosecondMaxCnt is max parameters count 'HOURS:MINUTES:SECONDS.MICROSECONDS' expr Format allowed
HourMicrosecondMaxCnt = 4
// MinuteSecondMaxCnt is max parameters count 'MINUTES:SECONDS' expr Format allowed
MinuteSecondMaxCnt = 2
// MinuteMicrosecondMaxCnt is max parameters count 'MINUTES:SECONDS.MICROSECONDS' expr Format allowed
MinuteMicrosecondMaxCnt = 3
// SecondMicrosecondMaxCnt is max parameters count 'SECONDS.MICROSECONDS' expr Format allowed
SecondMicrosecondMaxCnt = 2
// TimeValueCnt is parameters count 'YEARS-MONTHS DAYS HOURS:MINUTES:SECONDS.MICROSECONDS' expr Format
TimeValueCnt = 7
)
// Zero values for different types.
var (
// ZeroDuration is the zero value for Duration type.
ZeroDuration = Duration{Duration: gotime.Duration(0), Fsp: DefaultFsp}
// ZeroCoreTime is the zero value for Time type.
ZeroTime = Time{}
// ZeroDatetime is the zero value for datetime Time.
ZeroDatetime = NewTime(ZeroCoreTime, mysql.TypeDatetime, DefaultFsp)
// ZeroTimestamp is the zero value for timestamp Time.
ZeroTimestamp = NewTime(ZeroCoreTime, mysql.TypeTimestamp, DefaultFsp)
// ZeroDate is the zero value for date Time.
ZeroDate = NewTime(ZeroCoreTime, mysql.TypeDate, DefaultFsp)
)
var (
// MinDatetime is the minimum for Golang Time type.
MinDatetime = FromDate(1, 1, 1, 0, 0, 0, 0)
// MaxDatetime is the maximum for mysql datetime type.
MaxDatetime = FromDate(9999, 12, 31, 23, 59, 59, 999999)
// BoundTimezone is the timezone for min and max timestamp.
BoundTimezone = gotime.UTC
// MinTimestamp is the minimum for mysql timestamp type.
MinTimestamp = NewTime(FromDate(1970, 1, 1, 0, 0, 1, 0), mysql.TypeTimestamp, DefaultFsp)
// MaxTimestamp is the maximum for mysql timestamp type.
MaxTimestamp = NewTime(FromDate(2038, 1, 19, 3, 14, 7, 999999), mysql.TypeTimestamp, DefaultFsp)
// WeekdayNames lists names of weekdays, which are used in builtin time function `dayname`.
WeekdayNames = []string{
"Monday",
"Tuesday",
"Wednesday",
"Thursday",
"Friday",
"Saturday",
"Sunday",
}
// MonthNames lists names of months, which are used in builtin time function `monthname`.
MonthNames = []string{
"January", "February",
"March", "April",
"May", "June",
"July", "August",
"September", "October",
"November", "December",
}
)
const (
// GoDurationDay is the gotime.Duration which equals to a Day.
GoDurationDay = gotime.Hour * 24
// GoDurationWeek is the gotime.Duration which equals to a Week.
GoDurationWeek = GoDurationDay * 7
)
// FromGoTime translates time.Time to mysql time internal representation.
func FromGoTime(t gotime.Time) CoreTime {
// Plus 500 nanosecond for rounding of the millisecond part.
t = t.Add(500 * gotime.Nanosecond)
year, month, day := t.Date()
hour, minute, second := t.Clock()
microsecond := t.Nanosecond() / 1000
return FromDate(year, int(month), day, hour, minute, second, microsecond)
}
// FromDate makes a internal time representation from the given date.
func FromDate(year int, month int, day int, hour int, minute int, second int, microsecond int) CoreTime {
v := uint64(ZeroCoreTime)
v |= (uint64(microsecond) << microsecondBitFieldOffset) & microsecondBitFieldMask
v |= (uint64(second) << secondBitFieldOffset) & secondBitFieldMask
v |= (uint64(minute) << minuteBitFieldOffset) & minuteBitFieldMask
v |= (uint64(hour) << hourBitFieldOffset) & hourBitFieldMask
v |= (uint64(day) << dayBitFieldOffset) & dayBitFieldMask
v |= (uint64(month) << monthBitFieldOffset) & monthBitFieldMask
v |= (uint64(year) << yearBitFieldOffset) & yearBitFieldMask
return CoreTime(v)
}
// FromDateChecked makes a internal time representation from the given date with field overflow check.
func FromDateChecked(year, month, day, hour, minute, second, microsecond int) (CoreTime, bool) {
if uint64(year) >= (1<<yearBitFieldWidth) ||
uint64(month) >= (1<<monthBitFieldWidth) ||
uint64(day) >= (1<<dayBitFieldWidth) ||
uint64(hour) >= (1<<hourBitFieldWidth) ||
uint64(minute) >= (1<<minuteBitFieldWidth) ||
uint64(second) >= (1<<secondBitFieldWidth) ||
uint64(microsecond) >= (1<<microsecondBitFieldWidth) {
return ZeroCoreTime, false
}
return FromDate(year, month, day, hour, minute, second, microsecond), true
}
// coreTime is an alias to CoreTime, embedd in Time.
type coreTime = CoreTime
// Time is the struct for handling datetime, timestamp and date.
type Time struct {
coreTime
}
// Clock returns the hour, minute, and second within the day specified by t.
func (t Time) Clock() (hour int, minute int, second int) {
return t.Hour(), t.Minute(), t.Second()
}
const (
// Core time bit fields.
yearBitFieldOffset, yearBitFieldWidth uint64 = 50, 14
monthBitFieldOffset, monthBitFieldWidth uint64 = 46, 4
dayBitFieldOffset, dayBitFieldWidth uint64 = 41, 5
hourBitFieldOffset, hourBitFieldWidth uint64 = 36, 5
minuteBitFieldOffset, minuteBitFieldWidth uint64 = 30, 6
secondBitFieldOffset, secondBitFieldWidth uint64 = 24, 6
microsecondBitFieldOffset, microsecondBitFieldWidth uint64 = 4, 20
// fspTt bit field.
// `fspTt` format:
// | fsp: 3 bits | type: 1 bit |
// When `fsp` is valid (in range [0, 6]):
// 1. `type` bit 0 represent `DateTime`
// 2. `type` bit 1 represent `Timestamp`
//
// Since s`Date` does not require `fsp`, we could use `fspTt` == 0b1110 to represent it.
fspTtBitFieldOffset, fspTtBitFieldWidth uint64 = 0, 4
yearBitFieldMask uint64 = ((1 << yearBitFieldWidth) - 1) << yearBitFieldOffset
monthBitFieldMask uint64 = ((1 << monthBitFieldWidth) - 1) << monthBitFieldOffset
dayBitFieldMask uint64 = ((1 << dayBitFieldWidth) - 1) << dayBitFieldOffset
hourBitFieldMask uint64 = ((1 << hourBitFieldWidth) - 1) << hourBitFieldOffset
minuteBitFieldMask uint64 = ((1 << minuteBitFieldWidth) - 1) << minuteBitFieldOffset
secondBitFieldMask uint64 = ((1 << secondBitFieldWidth) - 1) << secondBitFieldOffset
microsecondBitFieldMask uint64 = ((1 << microsecondBitFieldWidth) - 1) << microsecondBitFieldOffset
fspTtBitFieldMask uint64 = ((1 << fspTtBitFieldWidth) - 1) << fspTtBitFieldOffset
fspTtForDate uint = 0b1110
fspBitFieldMask uint64 = 0b1110
coreTimeBitFieldMask = ^fspTtBitFieldMask
)
// NewTime constructs time from core time, type and fsp.
func NewTime(coreTime CoreTime, tp uint8, fsp int) Time {
t := ZeroTime
p := (*uint64)(&t.coreTime)
*p |= uint64(coreTime) & coreTimeBitFieldMask
if tp == mysql.TypeDate {
*p |= uint64(fspTtForDate)
return t
}
if fsp == UnspecifiedFsp {
fsp = DefaultFsp
}
*p |= uint64(fsp) << 1
if tp == mysql.TypeTimestamp {
*p |= 1
}
return t
}
func (t Time) getFspTt() uint {
return uint(uint64(t.coreTime) & fspTtBitFieldMask)
}
func (t *Time) setFspTt(fspTt uint) {
*(*uint64)(&t.coreTime) &= ^(fspTtBitFieldMask)
*(*uint64)(&t.coreTime) |= uint64(fspTt)
}
// Type returns type value.
func (t Time) Type() uint8 {
if t.getFspTt() == fspTtForDate {
return mysql.TypeDate
}
if uint64(t.coreTime)&1 == 1 {
return mysql.TypeTimestamp
}
return mysql.TypeDatetime
}
// Fsp returns fsp value.
func (t Time) Fsp() int {
fspTt := t.getFspTt()
if fspTt == fspTtForDate {
return 0
}
return int(fspTt >> 1)
}
// SetType updates the type in Time.
// Only DateTime/Date/Timestamp is valid.
func (t *Time) SetType(tp uint8) {
fspTt := t.getFspTt()
if fspTt == fspTtForDate && tp != mysql.TypeDate {
fspTt = 0
}
switch tp {
case mysql.TypeDate:
fspTt = fspTtForDate
case mysql.TypeTimestamp:
fspTt |= 1
case mysql.TypeDatetime:
fspTt &= ^(uint(1))
}
t.setFspTt(fspTt)
}
// SetFsp updates the fsp in Time.
func (t *Time) SetFsp(fsp int) {
if t.getFspTt() == fspTtForDate {
return
}
if fsp == UnspecifiedFsp {
fsp = DefaultFsp
}
*(*uint64)(&t.coreTime) &= ^(fspBitFieldMask)
*(*uint64)(&t.coreTime) |= uint64(fsp) << 1
}
// CoreTime returns core time.
func (t Time) CoreTime() CoreTime {
return CoreTime(uint64(t.coreTime) & coreTimeBitFieldMask)
}
// SetCoreTime updates core time.
func (t *Time) SetCoreTime(ct CoreTime) {
*(*uint64)(&t.coreTime) &= ^coreTimeBitFieldMask
*(*uint64)(&t.coreTime) |= uint64(ct) & coreTimeBitFieldMask
}
// CurrentTime returns current time with type tp.
func CurrentTime(tp uint8) Time {
return NewTime(FromGoTime(gotime.Now()), tp, 0)
}
// ConvertTimeZone converts the time value from one timezone to another.
// The input time should be a valid timestamp.
func (t *Time) ConvertTimeZone(from, to *gotime.Location) error {
if !t.IsZero() {
raw, err := t.GoTime(from)
if err != nil {
return errors.Trace(err)
}
converted := raw.In(to)
t.SetCoreTime(FromGoTime(converted))
}
return nil
}
func (t Time) String() string {
if t.Type() == mysql.TypeDate {
// We control the format, so no error would occur.
str, err := t.DateFormat("%Y-%m-%d")
terror.Log(errors.Trace(err))
return str
}
str, err := t.DateFormat("%Y-%m-%d %H:%i:%s")
terror.Log(errors.Trace(err))
fsp := t.Fsp()
if fsp > 0 {
tmp := fmt.Sprintf(".%06d", t.Microsecond())
str = str + tmp[:1+fsp]
}
return str
}
// IsZero returns a boolean indicating whether the time is equal to ZeroCoreTime.
func (t Time) IsZero() bool {
return compareTime(t.coreTime, ZeroCoreTime) == 0
}
// InvalidZero returns a boolean indicating whether the month or day is zero.
func (t Time) InvalidZero() bool {
return t.Month() == 0 || t.Day() == 0
}
const numberFormat = "%Y%m%d%H%i%s"
const dateFormat = "%Y%m%d"
// ToNumber returns a formatted number.
// e.g,
// 2012-12-12 -> 20121212
// 2012-12-12T10:10:10 -> 20121212101010
// 2012-12-12T10:10:10.123456 -> 20121212101010.123456
func (t Time) ToNumber() *MyDecimal {
dec := new(MyDecimal)
t.FillNumber(dec)
return dec
}
// FillNumber is the same as ToNumber,
// but reuses input decimal instead of allocating one.
func (t Time) FillNumber(dec *MyDecimal) {
if t.IsZero() {
dec.FromInt(0)
return
}
// Fix issue #1046
// Prevents from converting 2012-12-12 to 20121212000000
var tfStr string
if t.Type() == mysql.TypeDate {
tfStr = dateFormat
} else {
tfStr = numberFormat
}
s, err := t.DateFormat(tfStr)
if err != nil {
logutil.BgLogger().Error("[fatal] never happen because we've control the format!")
}
fsp := t.Fsp()
if fsp > 0 {
s1 := fmt.Sprintf("%s.%06d", s, t.Microsecond())
s = s1[:len(s)+fsp+1]
}
// We skip checking error here because time formatted string can be parsed certainly.
err = dec.FromString([]byte(s))
terror.Log(errors.Trace(err))
}
// Convert converts t with type tp.
func (t Time) Convert(sc *stmtctx.StatementContext, tp uint8) (Time, error) {
t1 := t
if t.Type() == tp || t.IsZero() {
t1.SetType(tp)
return t1, nil
}
t1.SetType(tp)
err := t1.check(sc)
return t1, errors.Trace(err)
}
// ConvertToDuration converts mysql datetime, timestamp and date to mysql time type.
// e.g,
// 2012-12-12T10:10:10 -> 10:10:10
// 2012-12-12 -> 0
func (t Time) ConvertToDuration() (Duration, error) {
if t.IsZero() {
return ZeroDuration, nil
}
hour, minute, second := t.Clock()
frac := t.Microsecond() * 1000
d := gotime.Duration(hour*3600+minute*60+second)*gotime.Second + gotime.Duration(frac) //nolint:durationcheck
// TODO: check convert validation
return Duration{Duration: d, Fsp: t.Fsp()}, nil
}
// Compare returns an integer comparing the time instant t to o.
// If t is after o, returns 1, equal o, returns 0, before o, returns -1.
func (t Time) Compare(o Time) int {
return compareTime(t.coreTime, o.coreTime)
}
// CompareString is like Compare,
// but parses string to Time then compares.
func (t Time) CompareString(sc *stmtctx.StatementContext, str string) (int, error) {
// use MaxFsp to parse the string
o, err := ParseTime(sc, str, t.Type(), MaxFsp)
if err != nil {
return 0, errors.Trace(err)
}
return t.Compare(o), nil
}
// roundTime rounds the time value according to digits count specified by fsp.
func roundTime(t gotime.Time, fsp int) gotime.Time {
d := gotime.Duration(math.Pow10(9 - fsp))
return t.Round(d)
}
// RoundFrac rounds the fraction part of a time-type value according to `fsp`.
func (t Time) RoundFrac(sc *stmtctx.StatementContext, fsp int) (Time, error) {
if t.Type() == mysql.TypeDate || t.IsZero() {
// date type has no fsp
return t, nil
}
fsp, err := CheckFsp(fsp)
if err != nil {
return t, errors.Trace(err)
}
if fsp == t.Fsp() {
// have same fsp
return t, nil
}
var nt CoreTime
if t1, err := t.GoTime(sc.TimeZone); err == nil {
t1 = roundTime(t1, fsp)
nt = FromGoTime(t1)
} else {
// Take the hh:mm:ss part out to avoid handle month or day = 0.
hour, minute, second, microsecond := t.Hour(), t.Minute(), t.Second(), t.Microsecond()
t1 := gotime.Date(1, 1, 1, hour, minute, second, microsecond*1000, sc.TimeZone)
t2 := roundTime(t1, fsp)
hour, minute, second = t2.Clock()
microsecond = t2.Nanosecond() / 1000
// TODO: when hh:mm:ss overflow one day after rounding, it should be add to yy:mm:dd part,
// but mm:dd may contain 0, it makes the code complex, so we ignore it here.
if t2.Day()-1 > 0 {
return t, errors.Trace(ErrWrongValue.GenWithStackByArgs(TimeStr, t.String()))
}
var ok bool
nt, ok = FromDateChecked(t.Year(), t.Month(), t.Day(), hour, minute, second, microsecond)
if !ok {
return t, errors.Trace(ErrWrongValue.GenWithStackByArgs(TimeStr, t.String()))
}
}
return NewTime(nt, t.Type(), fsp), nil
}
// GetFsp gets the fsp of a string.
func GetFsp(s string) int {
index := GetFracIndex(s)
var fsp int
if index < 0 {
fsp = 0
} else {
fsp = len(s) - index - 1
}
if fsp > 6 {
fsp = 6
}
return fsp
}
// GetFracIndex finds the last '.' for get fracStr, index = -1 means fracStr not found.
// but for format like '2019.01.01 00:00:00', the index should be -1.
func GetFracIndex(s string) (index int) {
index = -1
for i := len(s) - 1; i >= 0; i-- {
if unicode.IsPunct(rune(s[i])) {
if s[i] == '.' {
index = i
}
break
}
}
return index
}
// RoundFrac rounds fractional seconds precision with new fsp and returns a new one.
// We will use the “round half up” rule, e.g, >= 0.5 -> 1, < 0.5 -> 0,
// so 2011:11:11 10:10:10.888888 round 0 -> 2011:11:11 10:10:11
// and 2011:11:11 10:10:10.111111 round 0 -> 2011:11:11 10:10:10
func RoundFrac(t gotime.Time, fsp int) (gotime.Time, error) {
_, err := CheckFsp(fsp)
if err != nil {
return t, errors.Trace(err)
}
return t.Round(gotime.Duration(math.Pow10(9-fsp)) * gotime.Nanosecond), nil //nolint:durationcheck
}
// TruncateFrac truncates fractional seconds precision with new fsp and returns a new one.
// 2011:11:11 10:10:10.888888 round 0 -> 2011:11:11 10:10:10
// 2011:11:11 10:10:10.111111 round 0 -> 2011:11:11 10:10:10
func TruncateFrac(t gotime.Time, fsp int) (gotime.Time, error) {
if _, err := CheckFsp(fsp); err != nil {
return t, err
}
return t.Truncate(gotime.Duration(math.Pow10(9-fsp)) * gotime.Nanosecond), nil //nolint:durationcheck
}
// ToPackedUint encodes Time to a packed uint64 value.
//
// 1 bit 0
// 17 bits year*13+month (year 0-9999, month 0-12)
// 5 bits day (0-31)
// 5 bits hour (0-23)
// 6 bits minute (0-59)
// 6 bits second (0-59)
// 24 bits microseconds (0-999999)
//
// Total: 64 bits = 8 bytes
//
// 0YYYYYYY.YYYYYYYY.YYdddddh.hhhhmmmm.mmssssss.ffffffff.ffffffff.ffffffff
//
func (t Time) ToPackedUint() (uint64, error) {
tm := t
if t.IsZero() {
return 0, nil
}
year, month, day := tm.Year(), tm.Month(), tm.Day()
hour, minute, sec := tm.Hour(), tm.Minute(), tm.Second()
ymd := uint64(((year*13 + month) << 5) | day)
hms := uint64(hour<<12 | minute<<6 | sec)
micro := uint64(tm.Microsecond())
return ((ymd<<17 | hms) << 24) | micro, nil
}
// FromPackedUint decodes Time from a packed uint64 value.
func (t *Time) FromPackedUint(packed uint64) error {
if packed == 0 {
t.SetCoreTime(ZeroCoreTime)
return nil
}
ymdhms := packed >> 24
ymd := ymdhms >> 17
day := int(ymd & (1<<5 - 1))
ym := ymd >> 5
month := int(ym % 13)
year := int(ym / 13)
hms := ymdhms & (1<<17 - 1)
second := int(hms & (1<<6 - 1))
minute := int((hms >> 6) & (1<<6 - 1))
hour := int(hms >> 12)
microsec := int(packed % (1 << 24))
t.SetCoreTime(FromDate(year, month, day, hour, minute, second, microsec))
return nil
}
// check whether t matches valid Time format.
// If allowZeroInDate is false, it returns ErrZeroDate when month or day is zero.
// FIXME: See https://dev.mysql.com/doc/refman/5.7/en/sql-mode.html#sqlmode_no_zero_in_date
func (t Time) check(sc *stmtctx.StatementContext) error {
allowZeroInDate := false
allowInvalidDate := false
// We should avoid passing sc as nil here as far as possible.
if sc != nil {
allowZeroInDate = sc.IgnoreZeroInDate
allowInvalidDate = sc.AllowInvalidDate
}
var err error
switch t.Type() {
case mysql.TypeTimestamp:
err = checkTimestampType(sc, t.coreTime)
case mysql.TypeDatetime, mysql.TypeDate:
err = checkDatetimeType(t.coreTime, allowZeroInDate, allowInvalidDate)
}
return errors.Trace(err)
}
// Check if 't' is valid
func (t *Time) Check(sc *stmtctx.StatementContext) error {
return t.check(sc)
}
// Sub subtracts t1 from t, returns a duration value.
// Note that sub should not be done on different time types.
func (t *Time) Sub(sc *stmtctx.StatementContext, t1 *Time) Duration {
var duration gotime.Duration
if t.Type() == mysql.TypeTimestamp && t1.Type() == mysql.TypeTimestamp {
a, err := t.GoTime(sc.TimeZone)
terror.Log(errors.Trace(err))
b, err := t1.GoTime(sc.TimeZone)
terror.Log(errors.Trace(err))
duration = a.Sub(b)
} else {
seconds, microseconds, neg := calcTimeTimeDiff(t.coreTime, t1.coreTime, 1)
duration = gotime.Duration(seconds*1e9 + microseconds*1e3)
if neg {
duration = -duration
}
}
fsp := t.Fsp()
fsp1 := t1.Fsp()
if fsp < fsp1 {
fsp = fsp1
}
return Duration{
Duration: duration,
Fsp: fsp,
}
}
// Add adds d to t, returns the result time value.
func (t *Time) Add(sc *stmtctx.StatementContext, d Duration) (Time, error) {
seconds, microseconds, _ := calcTimeDurationDiff(t.coreTime, d)
days := seconds / secondsIn24Hour
year, month, day := getDateFromDaynr(uint(days))
var tm Time
tm.setYear(uint16(year))
tm.setMonth(uint8(month))
tm.setDay(uint8(day))
calcTimeFromSec(&tm.coreTime, seconds%secondsIn24Hour, microseconds)
if t.Type() == mysql.TypeDate {
tm.setHour(0)
tm.setMinute(0)
tm.setSecond(0)
tm.setMicrosecond(0)
}
fsp := t.Fsp()
if d.Fsp > fsp {
fsp = d.Fsp
}
ret := NewTime(tm.coreTime, t.Type(), fsp)
return ret, ret.Check(sc)
}
// TimestampDiff returns t2 - t1 where t1 and t2 are date or datetime expressions.
// The unit for the result (an integer) is given by the unit argument.
// The legal values for unit are "YEAR" "QUARTER" "MONTH" "DAY" "HOUR" "SECOND" and so on.
func TimestampDiff(unit string, t1 Time, t2 Time) int64 {
return timestampDiff(unit, t1.coreTime, t2.coreTime)
}
// ParseDateFormat parses a formatted date string and returns separated components.<|fim▁hole|> return nil
}
// Date format must start with number.
if !isDigit(format[0]) {
return nil
}
start := 0
// Initialize `seps` with capacity of 6. The input `format` is typically
// a date time of the form "2006-01-02 15:04:05", which has 6 numeric parts
// (the fractional second part is usually removed by `splitDateTime`).
// Setting `seps`'s capacity to 6 avoids reallocation in this common case.
seps := make([]string, 0, 6)
for i := 1; i < len(format)-1; i++ {
if isValidSeparator(format[i], len(seps)) {
prevParts := len(seps)
seps = append(seps, format[start:i])
start = i + 1
// consume further consecutive separators
for j := i + 1; j < len(format); j++ {
if !isValidSeparator(format[j], prevParts) {
break
}
start++
i++
}
continue
}
if !isDigit(format[i]) {
return nil
}
}
seps = append(seps, format[start:])
return seps
}
// helper for date part splitting, punctuation characters are valid separators anywhere,
// while space and 'T' are valid separators only between date and time.
func isValidSeparator(c byte, prevParts int) bool {
if isPunctuation(c) {
return true
}
// for https://github.com/pingcap/tidb/issues/32232
if prevParts == 2 && (c == 'T' || c == ' ' || c == '\t' || c == '\n' || c == '\v' || c == '\f' || c == '\r') {
return true
}
if prevParts > 4 && !isDigit(c) {
return true
}
return false
}
var validIdxCombinations = map[int]struct {
h int
m int
}{
100: {0, 0}, // 23:59:59Z
30: {2, 0}, // 23:59:59+08
50: {4, 2}, // 23:59:59+0800
63: {5, 2}, // 23:59:59+08:00
// postgres supports the following additional syntax that deviates from ISO8601, although we won't support it
// currently, it will be fairly easy to add in the current parsing framework
// 23:59:59Z+08
// 23:59:59Z+08:00
}
// GetTimezone parses the trailing timezone information of a given time string literal. If idx = -1 is returned, it
// means timezone information not found, otherwise it indicates the index of the starting index of the timezone
// information. If the timezone contains sign, hour part and/or minute part, it will be returned as is, otherwise an
// empty string will be returned.
//
// Supported syntax:
// MySQL compatible: ((?P<tz_sign>[-+])(?P<tz_hour>[0-9]{2}):(?P<tz_minute>[0-9]{2})){0,1}$, see
// https://dev.mysql.com/doc/refman/8.0/en/time-zone-support.html and https://dev.mysql.com/doc/refman/8.0/en/datetime.html
// the first link specified that timezone information should be in "[H]H:MM, prefixed with a + or -" while the
// second link specified that for string literal, "hour values less than than 10, a leading zero is required.".
// ISO-8601: Z|((((?P<tz_sign>[-+])(?P<tz_hour>[0-9]{2})(:(?P<tz_minute>[0-9]{2}){0,1}){0,1})|((?P<tz_minute>[0-9]{2}){0,1}){0,1}))$
// see https://www.cl.cam.ac.uk/~mgk25/iso-time.html
func GetTimezone(lit string) (idx int, tzSign, tzHour, tzSep, tzMinute string) {
idx, zidx, sidx, spidx := -1, -1, -1, -1
// idx is for the position of the starting of the timezone information
// zidx is for the z symbol
// sidx is for the sign
// spidx is for the separator
l := len(lit)
// the following loop finds the first index of Z, sign, and separator from backwards.
for i := l - 1; 0 <= i; i-- {
if lit[i] == 'Z' {
zidx = i
break
}
if sidx == -1 && (lit[i] == '-' || lit[i] == '+') {
sidx = i
}
if spidx == -1 && lit[i] == ':' {
spidx = i
}
}
// we could enumerate all valid combinations of these values and look it up in a table, see validIdxCombinations
// zidx can be -1 (23:59:59+08:00), l-1 (23:59:59Z)
// sidx can be -1, l-3, l-5, l-6
// spidx can be -1, l-3
k := 0
if l-zidx == 1 {
k += 100
}
if t := l - sidx; t == 3 || t == 5 || t == 6 {
k += t * 10
}
if l-spidx == 3 {
k += 3
}
if v, ok := validIdxCombinations[k]; ok {
hidx, midx := l-v.h, l-v.m
valid := func(v string) bool {
return '0' <= v[0] && v[0] <= '9' && '0' <= v[1] && v[1] <= '9'
}
if sidx != -1 {
tzSign = lit[sidx : sidx+1]
idx = sidx
}
if zidx != -1 {
idx = zidx
}
if (l - spidx) == 3 {
tzSep = lit[spidx : spidx+1]
}
if v.h != 0 {
tzHour = lit[hidx : hidx+2]
if !valid(tzHour) {
return -1, "", "", "", ""
}
}
if v.m != 0 {
tzMinute = lit[midx : midx+2]
if !valid(tzMinute) {
return -1, "", "", "", ""
}
}
return
}
return -1, "", "", "", ""
}
// See https://dev.mysql.com/doc/refman/5.7/en/date-and-time-literals.html.
// splitDateTime splits the string literal into 3 parts, date & time, FSP(Fractional Seconds Precision) and time zone.
// For FSP, The only delimiter recognized between a date & time part and a fractional seconds part is the decimal point,
// therefore we could look from backwards at the literal to find the index of the decimal point.
// For time zone, the possible delimiter could be +/- (w.r.t. MySQL 8.0, see
// https://dev.mysql.com/doc/refman/8.0/en/datetime.html) and Z/z (w.r.t. ISO 8601, see section Time zone in
// https://www.cl.cam.ac.uk/~mgk25/iso-time.html). We also look from backwards for the delimiter, see GetTimezone.
func splitDateTime(format string) (seps []string, fracStr string, hasTZ bool, tzSign, tzHour, tzSep, tzMinute string, truncated bool) {
tzIndex, tzSign, tzHour, tzSep, tzMinute := GetTimezone(format)
if tzIndex > 0 {
hasTZ = true
for ; tzIndex > 0 && isPunctuation(format[tzIndex-1]); tzIndex-- {
// In case of multiple separators, e.g. 2020-10--10
}
format = format[:tzIndex]
}
fracIndex := GetFracIndex(format)
if fracIndex > 0 {
// Only contain digits
fracEnd := fracIndex + 1
for fracEnd < len(format) && isDigit(format[fracEnd]) {
fracEnd++
}
truncated = (fracEnd != len(format))
fracStr = format[fracIndex+1 : fracEnd]
for ; fracIndex > 0 && isPunctuation(format[fracIndex-1]); fracIndex-- {
// In case of multiple separators, e.g. 2020-10..10
}
format = format[:fracIndex]
}
seps = ParseDateFormat(format)
return
}
// See https://dev.mysql.com/doc/refman/5.7/en/date-and-time-literals.html.
func parseDatetime(sc *stmtctx.StatementContext, str string, fsp int, isFloat bool) (Time, error) {
var (
year, month, day, hour, minute, second, deltaHour, deltaMinute int
fracStr string
tzSign, tzHour, tzSep, tzMinute string
hasTZ, hhmmss bool
err error
)
seps, fracStr, hasTZ, tzSign, tzHour, tzSep, tzMinute, truncatedOrIncorrect := splitDateTime(str)
if truncatedOrIncorrect {
sc.AppendWarning(ErrTruncatedWrongVal.GenWithStackByArgs("datetime", str))
}
/*
if we have timezone parsed, there are the following cases to be considered, however some of them are wrongly parsed, and we should consider absorb them back to seps.
1. Z, then it must be time zone information, and we should not tamper with it
2. -HH, it might be from
1. no fracStr
1. YYYY-MM-DD
2. YYYY-MM-DD-HH
3. YYYY-MM-DD HH-MM
4. YYYY-MM-DD HH:MM-SS
5. YYYY-MM-DD HH:MM:SS-HH (correct, no need absorb)
2. with fracStr
1. YYYY.MM-DD
2. YYYY-MM.DD-HH
3. YYYY-MM-DD.HH-MM
4. YYYY-MM-DD HH.MM-SS
5. YYYY-MM-DD HH:MM.SS-HH (correct, no need absorb)
3. -HH:MM, similarly it might be from
1. no fracStr
1. YYYY-MM:DD
2. YYYY-MM-DD:HH
3. YYYY-MM-DD-HH:MM
4. YYYY-MM-DD HH-MM:SS
5. YYYY-MM-DD HH:MM-SS:HH (invalid)
6. YYYY-MM-DD HH:MM:SS-HH:MM (correct, no need absorb)
2. with fracStr
1. YYYY.MM-DD:HH
2. YYYY-MM.DD-HH:MM
3. YYYY-MM-DD.HH-MM:SS
4. YYYY-MM-DD HH.MM-SS:HH (invalid)
5. YYYY-MM-DD HH:MM.SS-HH:MM (correct, no need absorb)
4. -HHMM, there should only be one case, that is both the date and time part have existed, only then could we have fracStr or time zone
1. YYYY-MM-DD HH:MM:SS.FSP-HHMM (correct, no need absorb)
to summarize, FSP and timezone is only valid if we have date and time presented, otherwise we should consider absorbing
FSP or timezone into seps. additionally, if we want to absorb timezone, we either absorb them all, or not, meaning
we won't only absorb tzHour but not tzMinute.
additional case to consider is that when the time literal is presented in float string (e.g. `YYYYMMDD.HHMMSS`), in
this case, FSP should not be absorbed and only `+HH:MM` would be allowed (i.e. Z, +HHMM, +HH that comes from ISO8601
should be banned), because it only conforms to MySQL's timezone parsing logic, but it is not valid in ISO8601.
However, I think it is generally acceptable to allow a wider spectrum of timezone format in string literal.
*/
// noAbsorb tests if can absorb FSP or TZ
noAbsorb := func(seps []string) bool {
// if we have more than 5 parts (i.e. 6), the tailing part can't be absorbed
// or if we only have 1 part, but its length is longer than 4, then it is at least YYMMD, in this case, FSP can
// not be absorbed, and it will be handled later, and the leading sign prevents TZ from being absorbed, because
// if date part has no separators, we can't use -/+ as separators between date & time.
return len(seps) > 5 || (len(seps) == 1 && len(seps[0]) > 4)
}
if len(fracStr) != 0 && !isFloat {
if !noAbsorb(seps) {
seps = append(seps, fracStr)
fracStr = ""
}
}
if hasTZ && tzSign != "" {
// if tzSign is empty, we can be sure that the string literal contains timezone (such as 2010-10-10T10:10:10Z),
// therefore we could safely skip this branch.
if !noAbsorb(seps) && !(tzMinute != "" && tzSep == "") {
// we can't absorb timezone if there is no separate between tzHour and tzMinute
if len(tzHour) != 0 {
seps = append(seps, tzHour)
}
if len(tzMinute) != 0 {
seps = append(seps, tzMinute)
}
hasTZ = false
}
}
switch len(seps) {
case 0:
return ZeroDatetime, errors.Trace(ErrWrongValue.GenWithStackByArgs(DateTimeStr, str))
case 1:
l := len(seps[0])
// Values specified as numbers
if isFloat {
numOfTime, err := StrToInt(sc, seps[0], false)
if err != nil {
return ZeroDatetime, errors.Trace(ErrWrongValue.GenWithStackByArgs(DateTimeStr, str))
}
dateTime, err := ParseDatetimeFromNum(sc, numOfTime)
if err != nil {
return ZeroDatetime, errors.Trace(ErrWrongValue.GenWithStackByArgs(DateTimeStr, str))
}
year, month, day, hour, minute, second =
dateTime.Year(), dateTime.Month(), dateTime.Day(), dateTime.Hour(), dateTime.Minute(), dateTime.Second()
// case: 0.XXX or like "20170118.999"
if seps[0] == "0" || (l >= 9 && l <= 14) {
hhmmss = true
}
break
}
// Values specified as strings
switch l {
case 14: // No delimiter.
// YYYYMMDDHHMMSS
_, err = fmt.Sscanf(seps[0], "%4d%2d%2d%2d%2d%2d", &year, &month, &day, &hour, &minute, &second)
hhmmss = true
case 12: // YYMMDDHHMMSS
_, err = fmt.Sscanf(seps[0], "%2d%2d%2d%2d%2d%2d", &year, &month, &day, &hour, &minute, &second)
year = adjustYear(year)
hhmmss = true
case 11: // YYMMDDHHMMS
_, err = fmt.Sscanf(seps[0], "%2d%2d%2d%2d%2d%1d", &year, &month, &day, &hour, &minute, &second)
year = adjustYear(year)
hhmmss = true
case 10: // YYMMDDHHMM
_, err = fmt.Sscanf(seps[0], "%2d%2d%2d%2d%2d", &year, &month, &day, &hour, &minute)
year = adjustYear(year)
case 9: // YYMMDDHHM
_, err = fmt.Sscanf(seps[0], "%2d%2d%2d%2d%1d", &year, &month, &day, &hour, &minute)
year = adjustYear(year)
case 8: // YYYYMMDD
_, err = fmt.Sscanf(seps[0], "%4d%2d%2d", &year, &month, &day)
case 7: // YYMMDDH
_, err = fmt.Sscanf(seps[0], "%2d%2d%2d%1d", &year, &month, &day, &hour)
year = adjustYear(year)
case 6, 5:
// YYMMDD && YYMMD
_, err = fmt.Sscanf(seps[0], "%2d%2d%2d", &year, &month, &day)
year = adjustYear(year)
default:
return ZeroDatetime, errors.Trace(ErrWrongValue.GenWithStackByArgs(TimeStr, str))
}
if l == 5 || l == 6 || l == 8 {
// YYMMDD or YYYYMMDD
// We must handle float => string => datetime, the difference is that fractional
// part of float type is discarded directly, while fractional part of string type
// is parsed to HH:MM:SS.
if isFloat {
// 20170118.123423 => 2017-01-18 00:00:00
} else {
// '20170118.123423' => 2017-01-18 12:34:23.234
switch len(fracStr) {
case 0:
case 1, 2:
_, err = fmt.Sscanf(fracStr, "%2d ", &hour)
case 3, 4:
_, err = fmt.Sscanf(fracStr, "%2d%2d ", &hour, &minute)
default:
_, err = fmt.Sscanf(fracStr, "%2d%2d%2d ", &hour, &minute, &second)
}
truncatedOrIncorrect = err != nil
}
}
if l == 9 || l == 10 {
if len(fracStr) == 0 {
second = 0
} else {
_, err = fmt.Sscanf(fracStr, "%2d ", &second)
}
truncatedOrIncorrect = err != nil
}
if truncatedOrIncorrect && sc != nil {
sc.AppendWarning(ErrTruncatedWrongVal.GenWithStackByArgs("datetime", str))
err = nil
}
case 2:
return ZeroDatetime, errors.Trace(ErrWrongValue.GenWithStackByArgs(DateTimeStr, str))
case 3:
// YYYY-MM-DD
err = scanTimeArgs(seps, &year, &month, &day)
case 4:
// YYYY-MM-DD HH
err = scanTimeArgs(seps, &year, &month, &day, &hour)
case 5:
// YYYY-MM-DD HH-MM
err = scanTimeArgs(seps, &year, &month, &day, &hour, &minute)
case 6:
// We don't have fractional seconds part.
// YYYY-MM-DD HH-MM-SS
err = scanTimeArgs(seps, &year, &month, &day, &hour, &minute, &second)
hhmmss = true
default:
// For case like `2020-05-28 23:59:59 00:00:00`, the seps should be > 6, the reluctant parts should be truncated.
seps = seps[:6]
// YYYY-MM-DD HH-MM-SS
if sc != nil {
sc.AppendWarning(ErrTruncatedWrongVal.GenWithStackByArgs("datetime", str))
}
err = scanTimeArgs(seps, &year, &month, &day, &hour, &minute, &second)
hhmmss = true
}
if err != nil {
return ZeroDatetime, errors.Trace(err)
}
// If str is sepereated by delimiters, the first one is year, and if the year is 1/2 digit,
// we should adjust it.
// TODO: adjust year is very complex, now we only consider the simplest way.
if len(seps[0]) <= 2 && !isFloat {
if year == 0 && month == 0 && day == 0 && hour == 0 && minute == 0 && second == 0 && fracStr == "" {
// Skip a special case "00-00-00".
} else {
year = adjustYear(year)
}
}
var microsecond int
var overflow bool
if hhmmss {
// If input string is "20170118.999", without hhmmss, fsp is meaningless.
// TODO: this case is not only meaningless, but erroneous, please confirm.
microsecond, overflow, err = ParseFrac(fracStr, fsp)
if err != nil {
return ZeroDatetime, errors.Trace(err)
}
}
tmp, ok := FromDateChecked(year, month, day, hour, minute, second, microsecond)
if !ok {
return ZeroDatetime, errors.Trace(ErrWrongValue.GenWithStackByArgs(DateTimeStr, str))
}
if overflow {
// Convert to Go time and add 1 second, to handle input like 2017-01-05 08:40:59.575601
t1, err := tmp.GoTime(sc.TimeZone)
if err != nil {
return ZeroDatetime, errors.Trace(err)
}
tmp = FromGoTime(t1.Add(gotime.Second))
}
if hasTZ {
// without hhmmss, timezone is also meaningless
if !hhmmss {
return ZeroDatetime, errors.Trace(ErrWrongValue.GenWithStack(DateTimeStr, str))
}
if len(tzHour) != 0 {
deltaHour = int((tzHour[0]-'0')*10 + (tzHour[1] - '0'))
}
if len(tzMinute) != 0 {
deltaMinute = int((tzMinute[0]-'0')*10 + (tzMinute[1] - '0'))
}
// allowed delta range is [-14:00, 14:00], and we will intentionally reject -00:00
if deltaHour > 14 || deltaMinute > 59 || (deltaHour == 14 && deltaMinute != 0) || (tzSign == "-" && deltaHour == 0 && deltaMinute == 0) {
return ZeroDatetime, errors.Trace(ErrWrongValue.GenWithStackByArgs(DateTimeStr, str))
}
// by default, if the temporal string literal does not contain timezone information, it will be in the timezone
// specified by the time_zone system variable. However, if the timezone is specified in the string literal, we
// will use the specified timezone to interpret the string literal and convert it into the system timezone.
offset := deltaHour*60*60 + deltaMinute*60
if tzSign == "-" {
offset = -offset
}
loc := gotime.FixedZone(fmt.Sprintf("UTC%s%s:%s", tzSign, tzHour, tzMinute), offset)
t1, err := tmp.GoTime(loc)
if err != nil {
return ZeroDatetime, errors.Trace(err)
}
t1 = t1.In(sc.TimeZone)
tmp = FromGoTime(t1)
}
nt := NewTime(tmp, mysql.TypeDatetime, fsp)
return nt, nil
}
func scanTimeArgs(seps []string, args ...*int) error {
if len(seps) != len(args) {
return errors.Trace(ErrWrongValue.GenWithStackByArgs(TimeStr, seps))
}
var err error
for i, s := range seps {
*args[i], err = strconv.Atoi(s)
if err != nil {
return errors.Trace(err)
}
}
return nil
}
// ParseYear parses a formatted string and returns a year number.
func ParseYear(str string) (int16, error) {
v, err := strconv.ParseInt(str, 10, 16)
if err != nil {
return 0, errors.Trace(err)
}
y := int16(v)
if len(str) == 4 {
// Nothing to do.
} else if len(str) == 2 || len(str) == 1 {
y = int16(adjustYear(int(y)))
} else {
return 0, errors.Trace(ErrInvalidYearFormat)
}
if y < MinYear || y > MaxYear {
return 0, errors.Trace(ErrInvalidYearFormat)
}
return y, nil
}
// adjustYear adjusts year according to y.
// See https://dev.mysql.com/doc/refman/5.7/en/two-digit-years.html
func adjustYear(y int) int {
if y >= 0 && y <= 69 {
y = 2000 + y
} else if y >= 70 && y <= 99 {
y = 1900 + y
}
return y
}
// AdjustYear is used for adjusting year and checking its validation.
func AdjustYear(y int64, adjustZero bool) (int64, error) {
if y == 0 && !adjustZero {
return y, nil
}
y = int64(adjustYear(int(y)))
if y < 0 {
return 0, errors.Trace(ErrWarnDataOutOfRange)
}
if y < int64(MinYear) {
return int64(MinYear), errors.Trace(ErrWarnDataOutOfRange)
}
if y > int64(MaxYear) {
return int64(MaxYear), errors.Trace(ErrWarnDataOutOfRange)
}
return y, nil
}
// NewDuration construct duration with time.
func NewDuration(hour, minute, second, microsecond int, fsp int) Duration {
return Duration{
Duration: gotime.Duration(hour)*gotime.Hour + gotime.Duration(minute)*gotime.Minute + gotime.Duration(second)*gotime.Second + gotime.Duration(microsecond)*gotime.Microsecond, //nolint:durationcheck
Fsp: fsp,
}
}
// Duration is the type for MySQL TIME type.
type Duration struct {
gotime.Duration
// Fsp is short for Fractional Seconds Precision.
// See http://dev.mysql.com/doc/refman/5.7/en/fractional-seconds.html
Fsp int
}
// MaxMySQLDuration returns Duration with maximum mysql time.
func MaxMySQLDuration(fsp int) Duration {
return NewDuration(TimeMaxHour, TimeMaxMinute, TimeMaxSecond, 0, fsp)
}
// Neg negative d, returns a duration value.
func (d Duration) Neg() Duration {
return Duration{
Duration: -d.Duration,
Fsp: d.Fsp,
}
}
// Add adds d to d, returns a duration value.
func (d Duration) Add(v Duration) (Duration, error) {
if v == (Duration{}) {
return d, nil
}
dsum, err := AddInt64(int64(d.Duration), int64(v.Duration))
if err != nil {
return Duration{}, errors.Trace(err)
}
if d.Fsp >= v.Fsp {
return Duration{Duration: gotime.Duration(dsum), Fsp: d.Fsp}, nil
}
return Duration{Duration: gotime.Duration(dsum), Fsp: v.Fsp}, nil
}
// Sub subtracts d to d, returns a duration value.
func (d Duration) Sub(v Duration) (Duration, error) {
if v == (Duration{}) {
return d, nil
}
dsum, err := SubInt64(int64(d.Duration), int64(v.Duration))
if err != nil {
return Duration{}, errors.Trace(err)
}
if d.Fsp >= v.Fsp {
return Duration{Duration: gotime.Duration(dsum), Fsp: d.Fsp}, nil
}
return Duration{Duration: gotime.Duration(dsum), Fsp: v.Fsp}, nil
}
// DurationFormat returns a textual representation of the duration value formatted
// according to layout.
// See http://dev.mysql.com/doc/refman/5.7/en/date-and-time-functions.html#function_date-format
func (d Duration) DurationFormat(layout string) (string, error) {
var buf bytes.Buffer
inPatternMatch := false
for _, b := range layout {
if inPatternMatch {
if err := d.convertDateFormat(b, &buf); err != nil {
return "", errors.Trace(err)
}
inPatternMatch = false
continue
}
// It's not in pattern match now.
if b == '%' {
inPatternMatch = true
} else {
buf.WriteRune(b)
}
}
return buf.String(), nil
}
func (d Duration) convertDateFormat(b rune, buf *bytes.Buffer) error {
switch b {
case 'H':
buf.WriteString(FormatIntWidthN(d.Hour(), 2))
case 'k':
buf.WriteString(strconv.FormatInt(int64(d.Hour()), 10))
case 'h', 'I':
t := d.Hour()
if t%12 == 0 {
buf.WriteString("12")
} else {
buf.WriteString(FormatIntWidthN(t%12, 2))
}
case 'l':
t := d.Hour()
if t%12 == 0 {
buf.WriteString("12")
} else {
buf.WriteString(strconv.FormatInt(int64(t%12), 10))
}
case 'i':
buf.WriteString(FormatIntWidthN(d.Minute(), 2))
case 'p':
hour := d.Hour()
if hour/12%2 == 0 {
buf.WriteString("AM")
} else {
buf.WriteString("PM")
}
case 'r':
h := d.Hour()
h %= 24
switch {
case h == 0:
fmt.Fprintf(buf, "%02d:%02d:%02d AM", 12, d.Minute(), d.Second())
case h == 12:
fmt.Fprintf(buf, "%02d:%02d:%02d PM", 12, d.Minute(), d.Second())
case h < 12:
fmt.Fprintf(buf, "%02d:%02d:%02d AM", h, d.Minute(), d.Second())
default:
fmt.Fprintf(buf, "%02d:%02d:%02d PM", h-12, d.Minute(), d.Second())
}
case 'T':
fmt.Fprintf(buf, "%02d:%02d:%02d", d.Hour(), d.Minute(), d.Second())
case 'S', 's':
buf.WriteString(FormatIntWidthN(d.Second(), 2))
case 'f':
fmt.Fprintf(buf, "%06d", d.MicroSecond())
default:
buf.WriteRune(b)
}
return nil
}
// String returns the time formatted using default TimeFormat and fsp.
func (d Duration) String() string {
var buf bytes.Buffer
sign, hours, minutes, seconds, fraction := splitDuration(d.Duration)
if sign < 0 {
buf.WriteByte('-')
}
fmt.Fprintf(&buf, "%02d:%02d:%02d", hours, minutes, seconds)
if d.Fsp > 0 {
buf.WriteString(".")
buf.WriteString(d.formatFrac(fraction))
}
p := buf.String()
return p
}
func (d Duration) formatFrac(frac int) string {
s := fmt.Sprintf("%06d", frac)
return s[0:d.Fsp]
}
// ToNumber changes duration to number format.
// e.g,
// 10:10:10 -> 101010
func (d Duration) ToNumber() *MyDecimal {
sign, hours, minutes, seconds, fraction := splitDuration(d.Duration)
var (
s string
signStr string
)
if sign < 0 {
signStr = "-"
}
if d.Fsp == 0 {
s = fmt.Sprintf("%s%02d%02d%02d", signStr, hours, minutes, seconds)
} else {
s = fmt.Sprintf("%s%02d%02d%02d.%s", signStr, hours, minutes, seconds, d.formatFrac(fraction))
}
// We skip checking error here because time formatted string can be parsed certainly.
dec := new(MyDecimal)
err := dec.FromString([]byte(s))
terror.Log(errors.Trace(err))
return dec
}
// ConvertToTime converts duration to Time.
// Tp is TypeDatetime, TypeTimestamp and TypeDate.
func (d Duration) ConvertToTime(sc *stmtctx.StatementContext, tp uint8) (Time, error) {
year, month, day := gotime.Now().In(sc.TimeZone).Date()
datePart := FromDate(year, int(month), day, 0, 0, 0, 0)
mixDateAndDuration(&datePart, d)
t := NewTime(datePart, mysql.TypeDatetime, d.Fsp)
return t.Convert(sc, tp)
}
// RoundFrac rounds fractional seconds precision with new fsp and returns a new one.
// We will use the “round half up” rule, e.g, >= 0.5 -> 1, < 0.5 -> 0,
// so 10:10:10.999999 round 0 -> 10:10:11
// and 10:10:10.000000 round 0 -> 10:10:10
func (d Duration) RoundFrac(fsp int, loc *gotime.Location) (Duration, error) {
tz := loc
if tz == nil {
logutil.BgLogger().Warn("use gotime.local because sc.timezone is nil")
tz = gotime.Local
}
fsp, err := CheckFsp(fsp)
if err != nil {
return d, errors.Trace(err)
}
if fsp == d.Fsp {
return d, nil
}
n := gotime.Date(0, 0, 0, 0, 0, 0, 0, tz)
nd := n.Add(d.Duration).Round(gotime.Duration(math.Pow10(9-fsp)) * gotime.Nanosecond).Sub(n) //nolint:durationcheck
return Duration{Duration: nd, Fsp: fsp}, nil
}
// Compare returns an integer comparing the Duration instant t to o.
// If d is after o, returns 1, equal o, returns 0, before o, returns -1.
func (d Duration) Compare(o Duration) int {
if d.Duration > o.Duration {
return 1
} else if d.Duration == o.Duration {
return 0
} else {
return -1
}
}
// CompareString is like Compare,
// but parses str to Duration then compares.
func (d Duration) CompareString(sc *stmtctx.StatementContext, str string) (int, error) {
// use MaxFsp to parse the string
o, err := ParseDuration(sc, str, MaxFsp)
if err != nil {
return 0, err
}
return d.Compare(o), nil
}
// Hour returns current hour.
// e.g, hour("11:11:11") -> 11
func (d Duration) Hour() int {
_, hour, _, _, _ := splitDuration(d.Duration)
return hour
}
// Minute returns current minute.
// e.g, hour("11:11:11") -> 11
func (d Duration) Minute() int {
_, _, minute, _, _ := splitDuration(d.Duration)
return minute
}
// Second returns current second.
// e.g, hour("11:11:11") -> 11
func (d Duration) Second() int {
_, _, _, second, _ := splitDuration(d.Duration)
return second
}
// MicroSecond returns current microsecond.
// e.g, hour("11:11:11.11") -> 110000
func (d Duration) MicroSecond() int {
_, _, _, _, frac := splitDuration(d.Duration)
return frac
}
func isNegativeDuration(str string) (bool, string) {
rest, err := parser.Char(str, '-')
if err != nil {
return false, str
}
return true, rest
}
func matchColon(str string) (string, error) {
rest := parser.Space0(str)
rest, err := parser.Char(rest, ':')
if err != nil {
return str, err
}
rest = parser.Space0(rest)
return rest, nil
}
func matchDayHHMMSS(str string) (int, [3]int, string, error) {
day, rest, err := parser.Number(str)
if err != nil {
return 0, [3]int{}, str, err
}
rest, err = parser.Space(rest, 1)
if err != nil {
return 0, [3]int{}, str, err
}
hhmmss, rest, err := matchHHMMSSDelimited(rest, false)
if err != nil {
return 0, [3]int{}, str, err
}
return day, hhmmss, rest, nil
}
func matchHHMMSSDelimited(str string, requireColon bool) ([3]int, string, error) {
hhmmss := [3]int{}
hour, rest, err := parser.Number(str)
if err != nil {
return [3]int{}, str, err
}
hhmmss[0] = hour
for i := 1; i < 3; i++ {
if remain, err := matchColon(rest); err == nil {
num, remain, err := parser.Number(remain)
if err != nil {
return [3]int{}, str, err
}
hhmmss[i] = num
rest = remain
} else {
if i == 1 && requireColon {
return [3]int{}, str, err
}
break
}
}
return hhmmss, rest, nil
}
func matchHHMMSSCompact(str string) ([3]int, string, error) {
num, rest, err := parser.Number(str)
if err != nil {
return [3]int{}, str, err
}
hhmmss := [3]int{num / 10000, (num / 100) % 100, num % 100}
return hhmmss, rest, nil
}
func hhmmssAddOverflow(hms []int, overflow bool) {
mod := []int{-1, 60, 60}
for i := 2; i >= 0 && overflow; i-- {
hms[i]++
if hms[i] == mod[i] {
overflow = true
hms[i] = 0
} else {
overflow = false
}
}
}
func checkHHMMSS(hms [3]int) bool {
m, s := hms[1], hms[2]
return m < 60 && s < 60
}
// matchFrac returns overflow, fraction, rest, error
func matchFrac(str string, fsp int) (bool, int, string, error) {
rest, err := parser.Char(str, '.')
if err != nil {
return false, 0, str, nil
}
digits, rest, err := parser.Digit(rest, 0)
if err != nil {
return false, 0, str, err
}
frac, overflow, err := ParseFrac(digits, fsp)
if err != nil {
return false, 0, str, err
}
return overflow, frac, rest, nil
}
func matchDuration(str string, fsp int) (Duration, error) {
fsp, err := CheckFsp(fsp)
if err != nil {
return ZeroDuration, errors.Trace(err)
}
if len(str) == 0 {
return ZeroDuration, ErrTruncatedWrongVal.GenWithStackByArgs("time", str)
}
negative, rest := isNegativeDuration(str)
rest = parser.Space0(rest)
hhmmss := [3]int{}
if day, hms, remain, err := matchDayHHMMSS(rest); err == nil {
hms[0] += 24 * day
rest, hhmmss = remain, hms
} else if hms, remain, err := matchHHMMSSDelimited(rest, true); err == nil {
rest, hhmmss = remain, hms
} else if hms, remain, err := matchHHMMSSCompact(rest); err == nil {
rest, hhmmss = remain, hms
} else {
return ZeroDuration, ErrTruncatedWrongVal.GenWithStackByArgs("time", str)
}
rest = parser.Space0(rest)
overflow, frac, rest, err := matchFrac(rest, fsp)
if err != nil || len(rest) > 0 {
return ZeroDuration, ErrTruncatedWrongVal.GenWithStackByArgs("time", str)
}
if overflow {
hhmmssAddOverflow(hhmmss[:], overflow)
frac = 0
}
if !checkHHMMSS(hhmmss) {
return ZeroDuration, ErrTruncatedWrongVal.GenWithStackByArgs("time", str)
}
if hhmmss[0] > TimeMaxHour {
var t gotime.Duration
if negative {
t = MinTime
} else {
t = MaxTime
}
return Duration{t, fsp}, ErrTruncatedWrongVal.GenWithStackByArgs("time", str)
}
d := gotime.Duration(hhmmss[0]*3600+hhmmss[1]*60+hhmmss[2])*gotime.Second + gotime.Duration(frac)*gotime.Microsecond //nolint:durationcheck
if negative {
d = -d
}
d, err = TruncateOverflowMySQLTime(d)
return Duration{d, fsp}, errors.Trace(err)
}
// canFallbackToDateTime return true
// 1. the string is failed to be parsed by `matchDuration`
// 2. the string is start with a series of digits whose length match the full format of DateTime literal (12, 14)
// or the string start with a date literal.
func canFallbackToDateTime(str string) bool {
digits, rest, err := parser.Digit(str, 1)
if err != nil {
return false
}
if len(digits) == 12 || len(digits) == 14 {
return true
}
rest, err = parser.AnyPunct(rest)
if err != nil {
return false
}
_, rest, err = parser.Digit(rest, 1)
if err != nil {
return false
}
rest, err = parser.AnyPunct(rest)
if err != nil {
return false
}
_, rest, err = parser.Digit(rest, 1)
if err != nil {
return false
}
return len(rest) > 0 && (rest[0] == ' ' || rest[0] == 'T')
}
// ParseDuration parses the time form a formatted string with a fractional seconds part,
// returns the duration type Time value.
// See http://dev.mysql.com/doc/refman/5.7/en/fractional-seconds.html
func ParseDuration(sc *stmtctx.StatementContext, str string, fsp int) (Duration, error) {
rest := strings.TrimSpace(str)
d, err := matchDuration(rest, fsp)
if err == nil {
return d, nil
}
if !canFallbackToDateTime(rest) {
return d, ErrTruncatedWrongVal.GenWithStackByArgs("time", str)
}
datetime, err := ParseDatetime(sc, rest)
if err != nil {
return ZeroDuration, ErrTruncatedWrongVal.GenWithStackByArgs("time", str)
}
d, err = datetime.ConvertToDuration()
if err != nil {
return ZeroDuration, ErrTruncatedWrongVal.GenWithStackByArgs("time", str)
}
return d.RoundFrac(fsp, sc.TimeZone)
}
// TruncateOverflowMySQLTime truncates d when it overflows, and returns ErrTruncatedWrongVal.
func TruncateOverflowMySQLTime(d gotime.Duration) (gotime.Duration, error) {
if d > MaxTime {
return MaxTime, ErrTruncatedWrongVal.GenWithStackByArgs("time", d)
} else if d < MinTime {
return MinTime, ErrTruncatedWrongVal.GenWithStackByArgs("time", d)
}
return d, nil
}
func splitDuration(t gotime.Duration) (int, int, int, int, int) {
sign := 1
if t < 0 {
t = -t
sign = -1
}
hours := t / gotime.Hour
t -= hours * gotime.Hour //nolint:durationcheck
minutes := t / gotime.Minute
t -= minutes * gotime.Minute //nolint:durationcheck
seconds := t / gotime.Second
t -= seconds * gotime.Second //nolint:durationcheck
fraction := t / gotime.Microsecond
return sign, int(hours), int(minutes), int(seconds), int(fraction)
}
var maxDaysInMonth = []int{31, 29, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31}
func getTime(sc *stmtctx.StatementContext, num, originNum int64, tp byte) (Time, error) {
s1 := num / 1000000
s2 := num - s1*1000000
year := int(s1 / 10000)
s1 %= 10000
month := int(s1 / 100)
day := int(s1 % 100)
hour := int(s2 / 10000)
s2 %= 10000
minute := int(s2 / 100)
second := int(s2 % 100)
ct, ok := FromDateChecked(year, month, day, hour, minute, second, 0)
if !ok {
numStr := strconv.FormatInt(originNum, 10)
return ZeroDatetime, errors.Trace(ErrWrongValue.GenWithStackByArgs(TimeStr, numStr))
}
t := NewTime(ct, tp, DefaultFsp)
err := t.check(sc)
return t, errors.Trace(err)
}
// parseDateTimeFromNum parses date time from num.
// See number_to_datetime function.
// https://github.com/mysql/mysql-server/blob/5.7/sql-common/my_time.c
func parseDateTimeFromNum(sc *stmtctx.StatementContext, num int64) (Time, error) {
t := ZeroDate
// Check zero.
if num == 0 {
return t, nil
}
originNum := num
// Check datetime type.
if num >= 10000101000000 {
t.SetType(mysql.TypeDatetime)
return getTime(sc, num, originNum, t.Type())
}
// Check MMDD.
if num < 101 {
return t, errors.Trace(ErrWrongValue.GenWithStackByArgs(TimeStr, strconv.FormatInt(num, 10)))
}
// Adjust year
// YYMMDD, year: 2000-2069
if num <= (70-1)*10000+1231 {
num = (num + 20000000) * 1000000
return getTime(sc, num, originNum, t.Type())
}
// Check YYMMDD.
if num < 70*10000+101 {
return t, errors.Trace(ErrWrongValue.GenWithStackByArgs(TimeStr, strconv.FormatInt(num, 10)))
}
// Adjust year
// YYMMDD, year: 1970-1999
if num <= 991231 {
num = (num + 19000000) * 1000000
return getTime(sc, num, originNum, t.Type())
}
// Adjust hour/min/second.
if num <= 99991231 {
num = num * 1000000
return getTime(sc, num, originNum, t.Type())
}
// Check MMDDHHMMSS.
if num < 101000000 {
return t, errors.Trace(ErrWrongValue.GenWithStackByArgs(TimeStr, strconv.FormatInt(num, 10)))
}
// Set TypeDatetime type.
t.SetType(mysql.TypeDatetime)
// Adjust year
// YYMMDDHHMMSS, 2000-2069
if num <= 69*10000000000+1231235959 {
num = num + 20000000000000
return getTime(sc, num, originNum, t.Type())
}
// Check YYYYMMDDHHMMSS.
if num < 70*10000000000+101000000 {
return t, errors.Trace(ErrWrongValue.GenWithStackByArgs(TimeStr, strconv.FormatInt(num, 10)))
}
// Adjust year
// YYMMDDHHMMSS, 1970-1999
if num <= 991231235959 {
num = num + 19000000000000
return getTime(sc, num, originNum, t.Type())
}
return getTime(sc, num, originNum, t.Type())
}
// ParseTime parses a formatted string with type tp and specific fsp.
// Type is TypeDatetime, TypeTimestamp and TypeDate.
// Fsp is in range [0, 6].
// MySQL supports many valid datetime format, but still has some limitation.
// If delimiter exists, the date part and time part is separated by a space or T,
// other punctuation character can be used as the delimiter between date parts or time parts.
// If no delimiter, the format must be YYYYMMDDHHMMSS or YYMMDDHHMMSS
// If we have fractional seconds part, we must use decimal points as the delimiter.
// The valid datetime range is from '1000-01-01 00:00:00.000000' to '9999-12-31 23:59:59.999999'.
// The valid timestamp range is from '1970-01-01 00:00:01.000000' to '2038-01-19 03:14:07.999999'.
// The valid date range is from '1000-01-01' to '9999-12-31'
func ParseTime(sc *stmtctx.StatementContext, str string, tp byte, fsp int) (Time, error) {
return parseTime(sc, str, tp, fsp, false)
}
// ParseTimeFromFloatString is similar to ParseTime, except that it's used to parse a float converted string.
func ParseTimeFromFloatString(sc *stmtctx.StatementContext, str string, tp byte, fsp int) (Time, error) {
// MySQL compatibility: 0.0 should not be converted to null, see #11203
if len(str) >= 3 && str[:3] == "0.0" {
return NewTime(ZeroCoreTime, tp, DefaultFsp), nil
}
return parseTime(sc, str, tp, fsp, true)
}
func parseTime(sc *stmtctx.StatementContext, str string, tp byte, fsp int, isFloat bool) (Time, error) {
fsp, err := CheckFsp(fsp)
if err != nil {
return NewTime(ZeroCoreTime, tp, DefaultFsp), errors.Trace(err)
}
t, err := parseDatetime(sc, str, fsp, isFloat)
if err != nil {
return NewTime(ZeroCoreTime, tp, DefaultFsp), errors.Trace(err)
}
t.SetType(tp)
if err = t.check(sc); err != nil {
return NewTime(ZeroCoreTime, tp, DefaultFsp), errors.Trace(err)
}
return t, nil
}
// ParseDatetime is a helper function wrapping ParseTime with datetime type and default fsp.
func ParseDatetime(sc *stmtctx.StatementContext, str string) (Time, error) {
return ParseTime(sc, str, mysql.TypeDatetime, GetFsp(str))
}
// ParseTimestamp is a helper function wrapping ParseTime with timestamp type and default fsp.
func ParseTimestamp(sc *stmtctx.StatementContext, str string) (Time, error) {
return ParseTime(sc, str, mysql.TypeTimestamp, GetFsp(str))
}
// ParseDate is a helper function wrapping ParseTime with date type.
func ParseDate(sc *stmtctx.StatementContext, str string) (Time, error) {
// date has no fractional seconds precision
return ParseTime(sc, str, mysql.TypeDate, MinFsp)
}
// ParseTimeFromYear parse a `YYYY` formed year to corresponded Datetime type.
// Note: the invoker must promise the `year` is in the range [MinYear, MaxYear].
func ParseTimeFromYear(sc *stmtctx.StatementContext, year int64) (Time, error) {
if year == 0 {
return NewTime(ZeroCoreTime, mysql.TypeDate, DefaultFsp), nil
}
dt := FromDate(int(year), 0, 0, 0, 0, 0, 0)
return NewTime(dt, mysql.TypeDatetime, DefaultFsp), nil
}
// ParseTimeFromNum parses a formatted int64,
// returns the value which type is tp.
func ParseTimeFromNum(sc *stmtctx.StatementContext, num int64, tp byte, fsp int) (Time, error) {
// MySQL compatibility: 0 should not be converted to null, see #11203
if num == 0 {
zt := NewTime(ZeroCoreTime, tp, DefaultFsp)
if sc != nil && sc.InCreateOrAlterStmt && !sc.TruncateAsWarning && sc.NoZeroDate {
switch tp {
case mysql.TypeTimestamp:
return zt, ErrTruncatedWrongVal.GenWithStackByArgs(TimestampStr, "0")
case mysql.TypeDate:
return zt, ErrTruncatedWrongVal.GenWithStackByArgs(DateStr, "0")
case mysql.TypeDatetime:
return zt, ErrTruncatedWrongVal.GenWithStackByArgs(DateTimeStr, "0")
}
}
return zt, nil
}
fsp, err := CheckFsp(fsp)
if err != nil {
return NewTime(ZeroCoreTime, tp, DefaultFsp), errors.Trace(err)
}
t, err := parseDateTimeFromNum(sc, num)
if err != nil {
return NewTime(ZeroCoreTime, tp, DefaultFsp), errors.Trace(err)
}
t.SetType(tp)
t.SetFsp(fsp)
if err := t.check(sc); err != nil {
return NewTime(ZeroCoreTime, tp, DefaultFsp), errors.Trace(err)
}
return t, nil
}
// ParseDatetimeFromNum is a helper function wrapping ParseTimeFromNum with datetime type and default fsp.
func ParseDatetimeFromNum(sc *stmtctx.StatementContext, num int64) (Time, error) {
return ParseTimeFromNum(sc, num, mysql.TypeDatetime, DefaultFsp)
}
// ParseTimestampFromNum is a helper function wrapping ParseTimeFromNum with timestamp type and default fsp.
func ParseTimestampFromNum(sc *stmtctx.StatementContext, num int64) (Time, error) {
return ParseTimeFromNum(sc, num, mysql.TypeTimestamp, DefaultFsp)
}
// ParseDateFromNum is a helper function wrapping ParseTimeFromNum with date type.
func ParseDateFromNum(sc *stmtctx.StatementContext, num int64) (Time, error) {
// date has no fractional seconds precision
return ParseTimeFromNum(sc, num, mysql.TypeDate, MinFsp)
}
// TimeFromDays Converts a day number to a date.
func TimeFromDays(num int64) Time {
if num < 0 {
return NewTime(FromDate(0, 0, 0, 0, 0, 0, 0), mysql.TypeDate, 0)
}
year, month, day := getDateFromDaynr(uint(num))
ct, ok := FromDateChecked(int(year), int(month), int(day), 0, 0, 0, 0)
if !ok {
return NewTime(FromDate(0, 0, 0, 0, 0, 0, 0), mysql.TypeDate, 0)
}
return NewTime(ct, mysql.TypeDate, 0)
}
func checkDateType(t CoreTime, allowZeroInDate, allowInvalidDate bool) error {
year, month, day := t.Year(), t.Month(), t.Day()
if year == 0 && month == 0 && day == 0 {
return nil
}
if !allowZeroInDate && (month == 0 || day == 0) {
return ErrWrongValue.GenWithStackByArgs(DateTimeStr, fmt.Sprintf("%04d-%02d-%02d", year, month, day))
}
if err := checkDateRange(t); err != nil {
return errors.Trace(err)
}
if err := checkMonthDay(year, month, day, allowInvalidDate); err != nil {
return errors.Trace(err)
}
return nil
}
func checkDateRange(t CoreTime) error {
// Oddly enough, MySQL document says date range should larger than '1000-01-01',
// but we can insert '0001-01-01' actually.
if t.Year() < 0 || t.Month() < 0 || t.Day() < 0 {
return errors.Trace(ErrWrongValue.GenWithStackByArgs(TimeStr, t))
}
if compareTime(t, MaxDatetime) > 0 {
return errors.Trace(ErrWrongValue.GenWithStackByArgs(TimeStr, t))
}
return nil
}
func checkMonthDay(year, month, day int, allowInvalidDate bool) error {
if month < 0 || month > 12 {
return errors.Trace(ErrWrongValue.GenWithStackByArgs(DateTimeStr, fmt.Sprintf("%d-%d-%d", year, month, day)))
}
maxDay := 31
if !allowInvalidDate {
if month > 0 {
maxDay = maxDaysInMonth[month-1]
}
if month == 2 && !isLeapYear(uint16(year)) {
maxDay = 28
}
}
if day < 0 || day > maxDay {
return errors.Trace(ErrWrongValue.GenWithStackByArgs(DateTimeStr, fmt.Sprintf("%d-%d-%d", year, month, day)))
}
return nil
}
func checkTimestampType(sc *stmtctx.StatementContext, t CoreTime) error {
if compareTime(t, ZeroCoreTime) == 0 {
return nil
}
if sc == nil {
return errors.New("statementContext is required during checkTimestampType")
}
var checkTime CoreTime
if sc.TimeZone != BoundTimezone {
convertTime := NewTime(t, mysql.TypeTimestamp, DefaultFsp)
err := convertTime.ConvertTimeZone(sc.TimeZone, BoundTimezone)
if err != nil {
return err
}
checkTime = convertTime.coreTime
} else {
checkTime = t
}
if compareTime(checkTime, MaxTimestamp.coreTime) > 0 || compareTime(checkTime, MinTimestamp.coreTime) < 0 {
return errors.Trace(ErrWrongValue.GenWithStackByArgs(TimeStr, t))
}
if _, err := t.GoTime(sc.TimeZone); err != nil {
return errors.Trace(err)
}
return nil
}
func checkDatetimeType(t CoreTime, allowZeroInDate, allowInvalidDate bool) error {
if err := checkDateType(t, allowZeroInDate, allowInvalidDate); err != nil {
return errors.Trace(err)
}
hour, minute, second := t.Hour(), t.Minute(), t.Second()
if hour < 0 || hour >= 24 {
return errors.Trace(ErrWrongValue.GenWithStackByArgs(TimeStr, strconv.Itoa(hour)))
}
if minute < 0 || minute >= 60 {
return errors.Trace(ErrWrongValue.GenWithStackByArgs(TimeStr, strconv.Itoa(minute)))
}
if second < 0 || second >= 60 {
return errors.Trace(ErrWrongValue.GenWithStackByArgs(TimeStr, strconv.Itoa(second)))
}
return nil
}
// ExtractDatetimeNum extracts time value number from datetime unit and format.
func ExtractDatetimeNum(t *Time, unit string) (int64, error) {
// TODO: Consider time_zone variable.
switch strings.ToUpper(unit) {
case "DAY":
return int64(t.Day()), nil
case "WEEK":
week := t.Week(0)
return int64(week), nil
case "MONTH":
return int64(t.Month()), nil
case "QUARTER":
m := int64(t.Month())
// 1 - 3 -> 1
// 4 - 6 -> 2
// 7 - 9 -> 3
// 10 - 12 -> 4
return (m + 2) / 3, nil
case "YEAR":
return int64(t.Year()), nil
case "DAY_MICROSECOND":
h, m, s := t.Clock()
d := t.Day()
return int64(d*1000000+h*10000+m*100+s)*1000000 + int64(t.Microsecond()), nil
case "DAY_SECOND":
h, m, s := t.Clock()
d := t.Day()
return int64(d)*1000000 + int64(h)*10000 + int64(m)*100 + int64(s), nil
case "DAY_MINUTE":
h, m, _ := t.Clock()
d := t.Day()
return int64(d)*10000 + int64(h)*100 + int64(m), nil
case "DAY_HOUR":
h, _, _ := t.Clock()
d := t.Day()
return int64(d)*100 + int64(h), nil
case "YEAR_MONTH":
y, m := t.Year(), t.Month()
return int64(y)*100 + int64(m), nil
default:
return 0, errors.Errorf("invalid unit %s", unit)
}
}
// ExtractDurationNum extracts duration value number from duration unit and format.
func ExtractDurationNum(d *Duration, unit string) (res int64, err error) {
switch strings.ToUpper(unit) {
case "MICROSECOND":
res = int64(d.MicroSecond())
case "SECOND":
res = int64(d.Second())
case "MINUTE":
res = int64(d.Minute())
case "HOUR":
res = int64(d.Hour())
case "SECOND_MICROSECOND":
res = int64(d.Second())*1000000 + int64(d.MicroSecond())
case "MINUTE_MICROSECOND":
res = int64(d.Minute())*100000000 + int64(d.Second())*1000000 + int64(d.MicroSecond())
case "MINUTE_SECOND":
res = int64(d.Minute()*100 + d.Second())
case "HOUR_MICROSECOND":
res = int64(d.Hour())*10000000000 + int64(d.Minute())*100000000 + int64(d.Second())*1000000 + int64(d.MicroSecond())
case "HOUR_SECOND":
res = int64(d.Hour())*10000 + int64(d.Minute())*100 + int64(d.Second())
case "HOUR_MINUTE":
res = int64(d.Hour())*100 + int64(d.Minute())
case "DAY_MICROSECOND":
res = int64(d.Hour()*10000+d.Minute()*100+d.Second())*1000000 + int64(d.MicroSecond())
case "DAY_SECOND":
res = int64(d.Hour())*10000 + int64(d.Minute())*100 + int64(d.Second())
case "DAY_MINUTE":
res = int64(d.Hour())*100 + int64(d.Minute())
case "DAY_HOUR":
res = int64(d.Hour())
default:
return 0, errors.Errorf("invalid unit %s", unit)
}
if d.Duration < 0 {
res = -res
}
return res, nil
}
// parseSingleTimeValue parse the format according the given unit. If we set strictCheck true, we'll check whether
// the converted value not exceed the range of MySQL's TIME type.
// The first four returned values are year, month, day and nanosecond.
func parseSingleTimeValue(unit string, format string, strictCheck bool) (int64, int64, int64, int64, error) {
// Format is a preformatted number, it format should be A[.[B]].
decimalPointPos := strings.IndexRune(format, '.')
if decimalPointPos == -1 {
decimalPointPos = len(format)
}
sign := int64(1)
if len(format) > 0 && format[0] == '-' {
sign = int64(-1)
}
iv, err := strconv.ParseInt(format[0:decimalPointPos], 10, 64)
if err != nil {
return 0, 0, 0, 0, ErrWrongValue.GenWithStackByArgs(DateTimeStr, format)
}
riv := iv // Rounded integer value
dv := int64(0)
lf := len(format) - 1
// Has fraction part
if decimalPointPos < lf {
dvPre := oneToSixDigitRegex.FindString(format[decimalPointPos+1:]) // the numberical prefix of the fraction part
dvPreLen := len(dvPre)
if dvPreLen >= 6 {
// MySQL rounds down to 1e-6.
if dv, err = strconv.ParseInt(dvPre[0:6], 10, 64); err != nil {
return 0, 0, 0, 0, ErrWrongValue.GenWithStackByArgs(DateTimeStr, format)
}
} else {
if dv, err = strconv.ParseInt(dvPre+"000000"[:6-dvPreLen], 10, 64); err != nil {
return 0, 0, 0, 0, ErrWrongValue.GenWithStackByArgs(DateTimeStr, format)
}
}
if dv >= 500000 { // Round up, and we should keep 6 digits for microsecond, so dv should in [000000, 999999].
riv += sign
}
if unit != "SECOND" {
err = ErrTruncatedWrongVal.GenWithStackByArgs(format)
}
dv *= sign
}
switch strings.ToUpper(unit) {
case "MICROSECOND":
if strictCheck && tidbMath.Abs(riv) > TimeMaxValueSeconds*1000 {
return 0, 0, 0, 0, ErrDatetimeFunctionOverflow.GenWithStackByArgs("time")
}
dayCount := riv / int64(GoDurationDay/gotime.Microsecond)
riv %= int64(GoDurationDay / gotime.Microsecond)
return 0, 0, dayCount, riv * int64(gotime.Microsecond), err
case "SECOND":
if strictCheck && tidbMath.Abs(iv) > TimeMaxValueSeconds {
return 0, 0, 0, 0, ErrDatetimeFunctionOverflow.GenWithStackByArgs("time")
}
dayCount := iv / int64(GoDurationDay/gotime.Second)
iv %= int64(GoDurationDay / gotime.Second)
return 0, 0, dayCount, iv*int64(gotime.Second) + dv*int64(gotime.Microsecond), err
case "MINUTE":
if strictCheck && tidbMath.Abs(riv) > TimeMaxHour*60+TimeMaxMinute {
return 0, 0, 0, 0, ErrDatetimeFunctionOverflow.GenWithStackByArgs("time")
}
dayCount := riv / int64(GoDurationDay/gotime.Minute)
riv %= int64(GoDurationDay / gotime.Minute)
return 0, 0, dayCount, riv * int64(gotime.Minute), err
case "HOUR":
if strictCheck && tidbMath.Abs(riv) > TimeMaxHour {
return 0, 0, 0, 0, ErrDatetimeFunctionOverflow.GenWithStackByArgs("time")
}
dayCount := riv / 24
riv %= 24
return 0, 0, dayCount, riv * int64(gotime.Hour), err
case "DAY":
if strictCheck && tidbMath.Abs(riv) > TimeMaxHour/24 {
return 0, 0, 0, 0, ErrDatetimeFunctionOverflow.GenWithStackByArgs("time")
}
return 0, 0, riv, 0, err
case "WEEK":
if strictCheck && 7*tidbMath.Abs(riv) > TimeMaxHour/24 {
return 0, 0, 0, 0, ErrDatetimeFunctionOverflow.GenWithStackByArgs("time")
}
return 0, 0, 7 * riv, 0, err
case "MONTH":
if strictCheck && tidbMath.Abs(riv) > 1 {
return 0, 0, 0, 0, ErrDatetimeFunctionOverflow.GenWithStackByArgs("time")
}
return 0, riv, 0, 0, err
case "QUARTER":
if strictCheck {
return 0, 0, 0, 0, ErrDatetimeFunctionOverflow.GenWithStackByArgs("time")
}
return 0, 3 * riv, 0, 0, err
case "YEAR":
if strictCheck {
return 0, 0, 0, 0, ErrDatetimeFunctionOverflow.GenWithStackByArgs("time")
}
return riv, 0, 0, 0, err
}
return 0, 0, 0, 0, errors.Errorf("invalid singel timeunit - %s", unit)
}
// parseTimeValue gets years, months, days, nanoseconds from a string
// nanosecond will not exceed length of single day
// MySQL permits any punctuation delimiter in the expr format.
// See https://dev.mysql.com/doc/refman/8.0/en/expressions.html#temporal-intervals
func parseTimeValue(format string, index, cnt int) (int64, int64, int64, int64, error) {
neg := false
originalFmt := format
format = strings.TrimSpace(format)
if len(format) > 0 && format[0] == '-' {
neg = true
format = format[1:]
}
fields := make([]string, TimeValueCnt)
for i := range fields {
fields[i] = "0"
}
matches := numericRegex.FindAllString(format, -1)
if len(matches) > cnt {
return 0, 0, 0, 0, ErrWrongValue.GenWithStackByArgs(DateTimeStr, originalFmt)
}
for i := range matches {
if neg {
fields[index] = "-" + matches[len(matches)-1-i]
} else {
fields[index] = matches[len(matches)-1-i]
}
index--
}
years, err := strconv.ParseInt(fields[YearIndex], 10, 64)
if err != nil {
return 0, 0, 0, 0, ErrWrongValue.GenWithStackByArgs(DateTimeStr, originalFmt)
}
months, err := strconv.ParseInt(fields[MonthIndex], 10, 64)
if err != nil {
return 0, 0, 0, 0, ErrWrongValue.GenWithStackByArgs(DateTimeStr, originalFmt)
}
days, err := strconv.ParseInt(fields[DayIndex], 10, 64)
if err != nil {
return 0, 0, 0, 0, ErrWrongValue.GenWithStackByArgs(DateTimeStr, originalFmt)
}
hours, err := strconv.ParseInt(fields[HourIndex], 10, 64)
if err != nil {
return 0, 0, 0, 0, ErrWrongValue.GenWithStackByArgs(DateTimeStr, originalFmt)
}
minutes, err := strconv.ParseInt(fields[MinuteIndex], 10, 64)
if err != nil {
return 0, 0, 0, 0, ErrWrongValue.GenWithStackByArgs(DateTimeStr, originalFmt)
}
seconds, err := strconv.ParseInt(fields[SecondIndex], 10, 64)
if err != nil {
return 0, 0, 0, 0, ErrWrongValue.GenWithStackByArgs(DateTimeStr, originalFmt)
}
microseconds, err := strconv.ParseInt(alignFrac(fields[MicrosecondIndex], MaxFsp), 10, 64)
if err != nil {
return 0, 0, 0, 0, ErrWrongValue.GenWithStackByArgs(DateTimeStr, originalFmt)
}
seconds = hours*3600 + minutes*60 + seconds
days += seconds / (3600 * 24)
seconds %= 3600 * 24
return years, months, days, seconds*int64(gotime.Second) + microseconds*int64(gotime.Microsecond), nil
}
func parseAndValidateDurationValue(format string, index, cnt int) (int64, error) {
year, month, day, nano, err := parseTimeValue(format, index, cnt)
if err != nil {
return 0, err
}
if year != 0 || month != 0 || tidbMath.Abs(day) > TimeMaxHour/24 {
return 0, ErrDatetimeFunctionOverflow.GenWithStackByArgs("time")
}
dur := day*int64(GoDurationDay) + nano
if tidbMath.Abs(dur) > int64(MaxTime) {
return 0, ErrDatetimeFunctionOverflow.GenWithStackByArgs("time")
}
return dur, nil
}
// ParseDurationValue parses time value from time unit and format.
// Returns y years m months d days + n nanoseconds
// Nanoseconds will no longer than one day.
func ParseDurationValue(unit string, format string) (y int64, m int64, d int64, n int64, _ error) {
switch strings.ToUpper(unit) {
case "MICROSECOND", "SECOND", "MINUTE", "HOUR", "DAY", "WEEK", "MONTH", "QUARTER", "YEAR":
return parseSingleTimeValue(unit, format, false)
case "SECOND_MICROSECOND":
return parseTimeValue(format, MicrosecondIndex, SecondMicrosecondMaxCnt)
case "MINUTE_MICROSECOND":
return parseTimeValue(format, MicrosecondIndex, MinuteMicrosecondMaxCnt)
case "MINUTE_SECOND":
return parseTimeValue(format, SecondIndex, MinuteSecondMaxCnt)
case "HOUR_MICROSECOND":
return parseTimeValue(format, MicrosecondIndex, HourMicrosecondMaxCnt)
case "HOUR_SECOND":
return parseTimeValue(format, SecondIndex, HourSecondMaxCnt)
case "HOUR_MINUTE":
return parseTimeValue(format, MinuteIndex, HourMinuteMaxCnt)
case "DAY_MICROSECOND":
return parseTimeValue(format, MicrosecondIndex, DayMicrosecondMaxCnt)
case "DAY_SECOND":
return parseTimeValue(format, SecondIndex, DaySecondMaxCnt)
case "DAY_MINUTE":
return parseTimeValue(format, MinuteIndex, DayMinuteMaxCnt)
case "DAY_HOUR":
return parseTimeValue(format, HourIndex, DayHourMaxCnt)
case "YEAR_MONTH":
return parseTimeValue(format, MonthIndex, YearMonthMaxCnt)
default:
return 0, 0, 0, 0, errors.Errorf("invalid single timeunit - %s", unit)
}
}
// ExtractDurationValue extract the value from format to Duration.
func ExtractDurationValue(unit string, format string) (Duration, error) {
unit = strings.ToUpper(unit)
switch unit {
case "MICROSECOND", "SECOND", "MINUTE", "HOUR", "DAY", "WEEK", "MONTH", "QUARTER", "YEAR":
_, month, day, nano, err := parseSingleTimeValue(unit, format, true)
if err != nil {
return ZeroDuration, err
}
dur := Duration{Duration: gotime.Duration((month*30+day)*int64(GoDurationDay) + nano)}
if unit == "MICROSECOND" {
dur.Fsp = MaxFsp
}
return dur, err
case "SECOND_MICROSECOND":
d, err := parseAndValidateDurationValue(format, MicrosecondIndex, SecondMicrosecondMaxCnt)
if err != nil {
return ZeroDuration, err
}
return Duration{Duration: gotime.Duration(d), Fsp: MaxFsp}, nil
case "MINUTE_MICROSECOND":
d, err := parseAndValidateDurationValue(format, MicrosecondIndex, MinuteMicrosecondMaxCnt)
if err != nil {
return ZeroDuration, err
}
return Duration{Duration: gotime.Duration(d), Fsp: MaxFsp}, nil
case "MINUTE_SECOND":
d, err := parseAndValidateDurationValue(format, SecondIndex, MinuteSecondMaxCnt)
if err != nil {
return ZeroDuration, err
}
return Duration{Duration: gotime.Duration(d), Fsp: MaxFsp}, nil
case "HOUR_MICROSECOND":
d, err := parseAndValidateDurationValue(format, MicrosecondIndex, HourMicrosecondMaxCnt)
if err != nil {
return ZeroDuration, err
}
return Duration{Duration: gotime.Duration(d), Fsp: MaxFsp}, nil
case "HOUR_SECOND":
d, err := parseAndValidateDurationValue(format, SecondIndex, HourSecondMaxCnt)
if err != nil {
return ZeroDuration, err
}
return Duration{Duration: gotime.Duration(d), Fsp: MaxFsp}, nil
case "HOUR_MINUTE":
d, err := parseAndValidateDurationValue(format, MinuteIndex, HourMinuteMaxCnt)
if err != nil {
return ZeroDuration, err
}
return Duration{Duration: gotime.Duration(d), Fsp: 0}, nil
case "DAY_MICROSECOND":
d, err := parseAndValidateDurationValue(format, MicrosecondIndex, DayMicrosecondMaxCnt)
if err != nil {
return ZeroDuration, err
}
return Duration{Duration: gotime.Duration(d), Fsp: MaxFsp}, nil
case "DAY_SECOND":
d, err := parseAndValidateDurationValue(format, SecondIndex, DaySecondMaxCnt)
if err != nil {
return ZeroDuration, err
}
return Duration{Duration: gotime.Duration(d), Fsp: MaxFsp}, nil
case "DAY_MINUTE":
d, err := parseAndValidateDurationValue(format, MinuteIndex, DayMinuteMaxCnt)
if err != nil {
return ZeroDuration, err
}
return Duration{Duration: gotime.Duration(d), Fsp: 0}, nil
case "DAY_HOUR":
d, err := parseAndValidateDurationValue(format, HourIndex, DayHourMaxCnt)
if err != nil {
return ZeroDuration, err
}
return Duration{Duration: gotime.Duration(d), Fsp: 0}, nil
case "YEAR_MONTH":
_, err := parseAndValidateDurationValue(format, MonthIndex, YearMonthMaxCnt)
if err != nil {
return ZeroDuration, err
}
// MONTH must exceed the limit of mysql's duration. So just returns overflow error.
return ZeroDuration, ErrDatetimeFunctionOverflow.GenWithStackByArgs("time")
default:
return ZeroDuration, errors.Errorf("invalid single timeunit - %s", unit)
}
}
// IsClockUnit returns true when unit is interval unit with hour, minute or second.
func IsClockUnit(unit string) bool {
switch strings.ToUpper(unit) {
case "MICROSECOND", "SECOND", "MINUTE", "HOUR",
"SECOND_MICROSECOND", "MINUTE_MICROSECOND", "MINUTE_SECOND",
"HOUR_MICROSECOND", "HOUR_SECOND", "HOUR_MINUTE",
"DAY_MICROSECOND", "DAY_SECOND", "DAY_MINUTE", "DAY_HOUR":
return true
default:
return false
}
}
// IsDateFormat returns true when the specified time format could contain only date.
func IsDateFormat(format string) bool {
format = strings.TrimSpace(format)
seps := ParseDateFormat(format)
length := len(format)
switch len(seps) {
case 1:
// "20129" will be parsed to 2020-12-09, which is date format.
if (length == 8) || (length == 6) || (length == 5) {
return true
}
case 3:
return true
}
return false
}
// ParseTimeFromInt64 parses mysql time value from int64.
func ParseTimeFromInt64(sc *stmtctx.StatementContext, num int64) (Time, error) {
return parseDateTimeFromNum(sc, num)
}
// DateFormat returns a textual representation of the time value formatted
// according to layout.
// See http://dev.mysql.com/doc/refman/5.7/en/date-and-time-functions.html#function_date-format
func (t Time) DateFormat(layout string) (string, error) {
var buf bytes.Buffer
inPatternMatch := false
for _, b := range layout {
if inPatternMatch {
if err := t.convertDateFormat(b, &buf); err != nil {
return "", errors.Trace(err)
}
inPatternMatch = false
continue
}
// It's not in pattern match now.
if b == '%' {
inPatternMatch = true
} else {
buf.WriteRune(b)
}
}
return buf.String(), nil
}
var abbrevWeekdayName = []string{
"Sun", "Mon", "Tue",
"Wed", "Thu", "Fri", "Sat",
}
func (t Time) convertDateFormat(b rune, buf *bytes.Buffer) error {
switch b {
case 'b':
m := t.Month()
if m == 0 || m > 12 {
return errors.Trace(ErrWrongValue.GenWithStackByArgs(TimeStr, strconv.Itoa(m)))
}
buf.WriteString(MonthNames[m-1][:3])
case 'M':
m := t.Month()
if m == 0 || m > 12 {
return errors.Trace(ErrWrongValue.GenWithStackByArgs(TimeStr, strconv.Itoa(m)))
}
buf.WriteString(MonthNames[m-1])
case 'm':
buf.WriteString(FormatIntWidthN(t.Month(), 2))
case 'c':
buf.WriteString(strconv.FormatInt(int64(t.Month()), 10))
case 'D':
buf.WriteString(strconv.FormatInt(int64(t.Day()), 10))
buf.WriteString(abbrDayOfMonth(t.Day()))
case 'd':
buf.WriteString(FormatIntWidthN(t.Day(), 2))
case 'e':
buf.WriteString(strconv.FormatInt(int64(t.Day()), 10))
case 'j':
fmt.Fprintf(buf, "%03d", t.YearDay())
case 'H':
buf.WriteString(FormatIntWidthN(t.Hour(), 2))
case 'k':
buf.WriteString(strconv.FormatInt(int64(t.Hour()), 10))
case 'h', 'I':
t := t.Hour()
if t%12 == 0 {
buf.WriteString("12")
} else {
buf.WriteString(FormatIntWidthN(t%12, 2))
}
case 'l':
t := t.Hour()
if t%12 == 0 {
buf.WriteString("12")
} else {
buf.WriteString(strconv.FormatInt(int64(t%12), 10))
}
case 'i':
buf.WriteString(FormatIntWidthN(t.Minute(), 2))
case 'p':
hour := t.Hour()
if hour/12%2 == 0 {
buf.WriteString("AM")
} else {
buf.WriteString("PM")
}
case 'r':
h := t.Hour()
h %= 24
switch {
case h == 0:
fmt.Fprintf(buf, "%02d:%02d:%02d AM", 12, t.Minute(), t.Second())
case h == 12:
fmt.Fprintf(buf, "%02d:%02d:%02d PM", 12, t.Minute(), t.Second())
case h < 12:
fmt.Fprintf(buf, "%02d:%02d:%02d AM", h, t.Minute(), t.Second())
default:
fmt.Fprintf(buf, "%02d:%02d:%02d PM", h-12, t.Minute(), t.Second())
}
case 'T':
fmt.Fprintf(buf, "%02d:%02d:%02d", t.Hour(), t.Minute(), t.Second())
case 'S', 's':
buf.WriteString(FormatIntWidthN(t.Second(), 2))
case 'f':
fmt.Fprintf(buf, "%06d", t.Microsecond())
case 'U':
w := t.Week(0)
buf.WriteString(FormatIntWidthN(w, 2))
case 'u':
w := t.Week(1)
buf.WriteString(FormatIntWidthN(w, 2))
case 'V':
w := t.Week(2)
buf.WriteString(FormatIntWidthN(w, 2))
case 'v':
_, w := t.YearWeek(3)
buf.WriteString(FormatIntWidthN(w, 2))
case 'a':
weekday := t.Weekday()
buf.WriteString(abbrevWeekdayName[weekday])
case 'W':
buf.WriteString(t.Weekday().String())
case 'w':
buf.WriteString(strconv.FormatInt(int64(t.Weekday()), 10))
case 'X':
year, _ := t.YearWeek(2)
if year < 0 {
buf.WriteString(strconv.FormatUint(uint64(math.MaxUint32), 10))
} else {
buf.WriteString(FormatIntWidthN(year, 4))
}
case 'x':
year, _ := t.YearWeek(3)
if year < 0 {
buf.WriteString(strconv.FormatUint(uint64(math.MaxUint32), 10))
} else {
buf.WriteString(FormatIntWidthN(year, 4))
}
case 'Y':
buf.WriteString(FormatIntWidthN(t.Year(), 4))
case 'y':
str := FormatIntWidthN(t.Year(), 4)
buf.WriteString(str[2:])
default:
buf.WriteRune(b)
}
return nil
}
// FormatIntWidthN uses to format int with width. Insufficient digits are filled by 0.
func FormatIntWidthN(num, n int) string {
numString := strconv.FormatInt(int64(num), 10)
if len(numString) >= n {
return numString
}
padBytes := make([]byte, n-len(numString))
for i := range padBytes {
padBytes[i] = '0'
}
return string(padBytes) + numString
}
func abbrDayOfMonth(day int) string {
var str string
switch day {
case 1, 21, 31:
str = "st"
case 2, 22:
str = "nd"
case 3, 23:
str = "rd"
default:
str = "th"
}
return str
}
// StrToDate converts date string according to format.
// See https://dev.mysql.com/doc/refman/5.7/en/date-and-time-functions.html#function_date-format
func (t *Time) StrToDate(sc *stmtctx.StatementContext, date, format string) bool {
ctx := make(map[string]int)
var tm CoreTime
success, warning := strToDate(&tm, date, format, ctx)
if !success {
t.SetCoreTime(ZeroCoreTime)
t.SetType(mysql.TypeDatetime)
t.SetFsp(0)
return false
}
if err := mysqlTimeFix(&tm, ctx); err != nil {
return false
}
t.SetCoreTime(tm)
t.SetType(mysql.TypeDatetime)
if t.check(sc) != nil {
return false
}
if warning {
// Only append this warning when success but still need warning.
// Currently this only happens when `date` has extra characters at the end.
sc.AppendWarning(ErrTruncatedWrongVal.GenWithStackByArgs(DateTimeStr, date))
}
return true
}
// mysqlTimeFix fixes the Time use the values in the context.
func mysqlTimeFix(t *CoreTime, ctx map[string]int) error {
// Key of the ctx is the format char, such as `%j` `%p` and so on.
if yearOfDay, ok := ctx["%j"]; ok {
// TODO: Implement the function that converts day of year to yy:mm:dd.
_ = yearOfDay
}
if valueAMorPm, ok := ctx["%p"]; ok {
if _, ok := ctx["%H"]; ok {
return ErrWrongValue.GenWithStackByArgs(TimeStr, t)
}
if t.Hour() == 0 {
return ErrWrongValue.GenWithStackByArgs(TimeStr, t)
}
if t.Hour() == 12 {
// 12 is a special hour.
switch valueAMorPm {
case constForAM:
t.setHour(0)
case constForPM:
t.setHour(12)
}
return nil
}
if valueAMorPm == constForPM {
t.setHour(t.getHour() + 12)
}
} else {
if _, ok := ctx["%h"]; ok && t.Hour() == 12 {
t.setHour(0)
}
}
return nil
}
// strToDate converts date string according to format,
// the value will be stored in argument t or ctx.
// The second return value is true when success but still need to append a warning.
func strToDate(t *CoreTime, date string, format string, ctx map[string]int) (success bool, warning bool) {
date = skipWhiteSpace(date)
format = skipWhiteSpace(format)
token, formatRemain, succ := getFormatToken(format)
if !succ {
return false, false
}
if token == "" {
if len(date) != 0 {
// Extra characters at the end of date are ignored, but a warning should be reported at this case.
return true, true
}
// Normal case. Both token and date are empty now.
return true, false
}
if len(date) == 0 {
ctx[token] = 0
return true, false
}
dateRemain, succ := matchDateWithToken(t, date, token, ctx)
if !succ {
return false, false
}
return strToDate(t, dateRemain, formatRemain, ctx)
}
// getFormatToken takes one format control token from the string.
// format "%d %H %m" will get token "%d" and the remain is " %H %m".
func getFormatToken(format string) (token string, remain string, succ bool) {
if len(format) == 0 {
return "", "", true
}
// Just one character.
if len(format) == 1 {
if format[0] == '%' {
return "", "", false
}
return format, "", true
}
// More than one character.
if format[0] == '%' {
return format[:2], format[2:], true
}
return format[:1], format[1:], true
}
func skipWhiteSpace(input string) string {
for i, c := range input {
if !unicode.IsSpace(c) {
return input[i:]
}
}
return ""
}
var monthAbbrev = map[string]gotime.Month{
"jan": gotime.January,
"feb": gotime.February,
"mar": gotime.March,
"apr": gotime.April,
"may": gotime.May,
"jun": gotime.June,
"jul": gotime.July,
"aug": gotime.August,
"sep": gotime.September,
"oct": gotime.October,
"nov": gotime.November,
"dec": gotime.December,
}
type dateFormatParser func(t *CoreTime, date string, ctx map[string]int) (remain string, succ bool)
var dateFormatParserTable = map[string]dateFormatParser{
"%b": abbreviatedMonth, // Abbreviated month name (Jan..Dec)
"%c": monthNumeric, // Month, numeric (0..12)
"%d": dayOfMonthNumeric, // Day of the month, numeric (0..31)
"%e": dayOfMonthNumeric, // Day of the month, numeric (0..31)
"%f": microSeconds, // Microseconds (000000..999999)
"%h": hour12Numeric, // Hour (01..12)
"%H": hour24Numeric, // Hour (00..23)
"%I": hour12Numeric, // Hour (01..12)
"%i": minutesNumeric, // Minutes, numeric (00..59)
"%j": dayOfYearNumeric, // Day of year (001..366)
"%k": hour24Numeric, // Hour (0..23)
"%l": hour12Numeric, // Hour (1..12)
"%M": fullNameMonth, // Month name (January..December)
"%m": monthNumeric, // Month, numeric (00..12)
"%p": isAMOrPM, // AM or PM
"%r": time12Hour, // Time, 12-hour (hh:mm:ss followed by AM or PM)
"%s": secondsNumeric, // Seconds (00..59)
"%S": secondsNumeric, // Seconds (00..59)
"%T": time24Hour, // Time, 24-hour (hh:mm:ss)
"%Y": yearNumericFourDigits, // Year, numeric, four digits
"%#": skipAllNums, // Skip all numbers
"%.": skipAllPunct, // Skip all punctation characters
"%@": skipAllAlpha, // Skip all alpha characters
// Deprecated since MySQL 5.7.5
"%y": yearNumericTwoDigits, // Year, numeric (two digits)
// TODO: Add the following...
// "%a": abbreviatedWeekday, // Abbreviated weekday name (Sun..Sat)
// "%D": dayOfMonthWithSuffix, // Day of the month with English suffix (0th, 1st, 2nd, 3rd)
// "%U": weekMode0, // Week (00..53), where Sunday is the first day of the week; WEEK() mode 0
// "%u": weekMode1, // Week (00..53), where Monday is the first day of the week; WEEK() mode 1
// "%V": weekMode2, // Week (01..53), where Sunday is the first day of the week; WEEK() mode 2; used with %X
// "%v": weekMode3, // Week (01..53), where Monday is the first day of the week; WEEK() mode 3; used with %x
// "%W": weekdayName, // Weekday name (Sunday..Saturday)
// "%w": dayOfWeek, // Day of the week (0=Sunday..6=Saturday)
// "%X": yearOfWeek, // Year for the week where Sunday is the first day of the week, numeric, four digits; used with %V
// "%x": yearOfWeek, // Year for the week, where Monday is the first day of the week, numeric, four digits; used with %v
}
// GetFormatType checks the type(Duration, Date or Datetime) of a format string.
func GetFormatType(format string) (isDuration, isDate bool) {
format = skipWhiteSpace(format)
var token string
var succ bool
for {
token, format, succ = getFormatToken(format)
if len(token) == 0 {
break
}
if !succ {
isDuration, isDate = false, false
break
}
if len(token) >= 2 && token[0] == '%' {
switch token[1] {
case 'h', 'H', 'i', 'I', 's', 'S', 'k', 'l', 'f', 'r', 'T':
isDuration = true
case 'y', 'Y', 'm', 'M', 'c', 'b', 'D', 'd', 'e':
isDate = true
}
}
if isDuration && isDate {
break
}
}
return
}
func matchDateWithToken(t *CoreTime, date string, token string, ctx map[string]int) (remain string, succ bool) {
if parse, ok := dateFormatParserTable[token]; ok {
return parse(t, date, ctx)
}
if strings.HasPrefix(date, token) {
return date[len(token):], true
}
return date, false
}
// Try to parse digits with number of `limit` starting from `input`
// Return <number, n chars to step forward> if success.
// Return <_, 0> if fail.
func parseNDigits(input string, limit int) (int, int) {
if limit <= 0 {
return 0, 0
}
var num uint64 = 0
var step = 0
for ; step < len(input) && step < limit && '0' <= input[step] && input[step] <= '9'; step++ {
num = num*10 + uint64(input[step]-'0')
}
return int(num), step
}
func secondsNumeric(t *CoreTime, input string, ctx map[string]int) (string, bool) {
v, step := parseNDigits(input, 2)
if step <= 0 || v >= 60 {
return input, false
}
t.setSecond(uint8(v))
return input[step:], true
}
func minutesNumeric(t *CoreTime, input string, ctx map[string]int) (string, bool) {
v, step := parseNDigits(input, 2)
if step <= 0 || v >= 60 {
return input, false
}
t.setMinute(uint8(v))
return input[step:], true
}
type parseState int32
const (
parseStateNormal parseState = 1
parseStateFail parseState = 2
parseStateEndOfLine parseState = 3
)
func parseSep(input string) (string, parseState) {
input = skipWhiteSpace(input)
if len(input) == 0 {
return input, parseStateEndOfLine
}
if input[0] != ':' {
return input, parseStateFail
}
if input = skipWhiteSpace(input[1:]); len(input) == 0 {
return input, parseStateEndOfLine
}
return input, parseStateNormal
}
func time12Hour(t *CoreTime, input string, ctx map[string]int) (string, bool) {
tryParse := func(input string) (string, parseState) {
// hh:mm:ss AM
/// Note that we should update `t` as soon as possible, or we
/// can not get correct result for incomplete input like "12:13"
/// that is shorter than "hh:mm:ss"
hour, step := parseNDigits(input, 2) // 1..12
if step <= 0 || hour > 12 || hour == 0 {
return input, parseStateFail
}
// Handle special case: 12:34:56 AM -> 00:34:56
// For PM, we will add 12 it later
if hour == 12 {
hour = 0
}
t.setHour(uint8(hour))
// ':'
var state parseState
if input, state = parseSep(input[step:]); state != parseStateNormal {
return input, state
}
minute, step := parseNDigits(input, 2) // 0..59
if step <= 0 || minute > 59 {
return input, parseStateFail
}
t.setMinute(uint8(minute))
// ':'
if input, state = parseSep(input[step:]); state != parseStateNormal {
return input, state
}
second, step := parseNDigits(input, 2) // 0..59
if step <= 0 || second > 59 {
return input, parseStateFail
}
t.setSecond(uint8(second))
input = skipWhiteSpace(input[step:])
if len(input) == 0 {
// No "AM"/"PM" suffix, it is ok
return input, parseStateEndOfLine
} else if len(input) < 2 {
// some broken char, fail
return input, parseStateFail
}
switch {
case hasCaseInsensitivePrefix(input, "AM"):
t.setHour(uint8(hour))
case hasCaseInsensitivePrefix(input, "PM"):
t.setHour(uint8(hour + 12))
default:
return input, parseStateFail
}
return input[2:], parseStateNormal
}
remain, state := tryParse(input)
if state == parseStateFail {
return input, false
}
return remain, true
}
func time24Hour(t *CoreTime, input string, ctx map[string]int) (string, bool) {
tryParse := func(input string) (string, parseState) {
// hh:mm:ss
/// Note that we should update `t` as soon as possible, or we
/// can not get correct result for incomplete input like "12:13"
/// that is shorter than "hh:mm:ss"
hour, step := parseNDigits(input, 2) // 0..23
if step <= 0 || hour > 23 {
return input, parseStateFail
}
t.setHour(uint8(hour))
// ':'
var state parseState
if input, state = parseSep(input[step:]); state != parseStateNormal {
return input, state
}
minute, step := parseNDigits(input, 2) // 0..59
if step <= 0 || minute > 59 {
return input, parseStateFail
}
t.setMinute(uint8(minute))
// ':'
if input, state = parseSep(input[step:]); state != parseStateNormal {
return input, state
}
second, step := parseNDigits(input, 2) // 0..59
if step <= 0 || second > 59 {
return input, parseStateFail
}
t.setSecond(uint8(second))
return input[step:], parseStateNormal
}
remain, state := tryParse(input)
if state == parseStateFail {
return input, false
}
return remain, true
}
const (
constForAM = 1 + iota
constForPM
)
func isAMOrPM(t *CoreTime, input string, ctx map[string]int) (string, bool) {
if len(input) < 2 {
return input, false
}
s := strings.ToLower(input[:2])
switch s {
case "am":
ctx["%p"] = constForAM
case "pm":
ctx["%p"] = constForPM
default:
return input, false
}
return input[2:], true
}
// oneToSixDigitRegex: it was just for [0, 999999]
var oneToSixDigitRegex = regexp.MustCompile("^[0-9]{0,6}")
// numericRegex: it was for any numeric characters
var numericRegex = regexp.MustCompile("[0-9]+")
func dayOfMonthNumeric(t *CoreTime, input string, ctx map[string]int) (string, bool) {
v, step := parseNDigits(input, 2) // 0..31
if step <= 0 || v > 31 {
return input, false
}
t.setDay(uint8(v))
return input[step:], true
}
func hour24Numeric(t *CoreTime, input string, ctx map[string]int) (string, bool) {
v, step := parseNDigits(input, 2) // 0..23
if step <= 0 || v > 23 {
return input, false
}
t.setHour(uint8(v))
ctx["%H"] = v
return input[step:], true
}
func hour12Numeric(t *CoreTime, input string, ctx map[string]int) (string, bool) {
v, step := parseNDigits(input, 2) // 1..12
if step <= 0 || v > 12 || v == 0 {
return input, false
}
t.setHour(uint8(v))
ctx["%h"] = v
return input[step:], true
}
func microSeconds(t *CoreTime, input string, ctx map[string]int) (string, bool) {
v, step := parseNDigits(input, 6)
if step <= 0 {
t.setMicrosecond(0)
return input, true
}
for i := step; i < 6; i++ {
v *= 10
}
t.setMicrosecond(uint32(v))
return input[step:], true
}
func yearNumericFourDigits(t *CoreTime, input string, ctx map[string]int) (string, bool) {
return yearNumericNDigits(t, input, ctx, 4)
}
func yearNumericTwoDigits(t *CoreTime, input string, ctx map[string]int) (string, bool) {
return yearNumericNDigits(t, input, ctx, 2)
}
func yearNumericNDigits(t *CoreTime, input string, ctx map[string]int, n int) (string, bool) {
year, step := parseNDigits(input, n)
if step <= 0 {
return input, false
} else if step <= 2 {
year = adjustYear(year)
}
t.setYear(uint16(year))
return input[step:], true
}
func dayOfYearNumeric(t *CoreTime, input string, ctx map[string]int) (string, bool) {
// MySQL declares that "%j" should be "Day of year (001..366)". But actually,
// it accepts a number that is up to three digits, which range is [1, 999].
v, step := parseNDigits(input, 3)
if step <= 0 || v == 0 {
return input, false
}
ctx["%j"] = v
return input[step:], true
}
func abbreviatedMonth(t *CoreTime, input string, ctx map[string]int) (string, bool) {
if len(input) >= 3 {
monthName := strings.ToLower(input[:3])
if month, ok := monthAbbrev[monthName]; ok {
t.setMonth(uint8(month))
return input[len(monthName):], true
}
}
return input, false
}
func hasCaseInsensitivePrefix(input, prefix string) bool {
if len(input) < len(prefix) {
return false
}
return strings.EqualFold(input[:len(prefix)], prefix)
}
func fullNameMonth(t *CoreTime, input string, ctx map[string]int) (string, bool) {
for i, month := range MonthNames {
if hasCaseInsensitivePrefix(input, month) {
t.setMonth(uint8(i + 1))
return input[len(month):], true
}
}
return input, false
}
func monthNumeric(t *CoreTime, input string, ctx map[string]int) (string, bool) {
v, step := parseNDigits(input, 2) // 1..12
if step <= 0 || v > 12 {
return input, false
}
t.setMonth(uint8(v))
return input[step:], true
}
// DateFSP gets fsp from date string.
func DateFSP(date string) (fsp int) {
i := strings.LastIndex(date, ".")
if i != -1 {
fsp = len(date) - i - 1
}
return
}
// DateTimeIsOverflow returns if this date is overflow.
// See: https://dev.mysql.com/doc/refman/8.0/en/datetime.html
func DateTimeIsOverflow(sc *stmtctx.StatementContext, date Time) (bool, error) {
tz := sc.TimeZone
if tz == nil {
logutil.BgLogger().Warn("use gotime.local because sc.timezone is nil")
tz = gotime.Local
}
var err error
var b, e, t gotime.Time
switch date.Type() {
case mysql.TypeDate, mysql.TypeDatetime:
if b, err = MinDatetime.GoTime(tz); err != nil {
return false, err
}
if e, err = MaxDatetime.GoTime(tz); err != nil {
return false, err
}
case mysql.TypeTimestamp:
minTS, maxTS := MinTimestamp, MaxTimestamp
if tz != gotime.UTC {
if err = minTS.ConvertTimeZone(gotime.UTC, tz); err != nil {
return false, err
}
if err = maxTS.ConvertTimeZone(gotime.UTC, tz); err != nil {
return false, err
}
}
if b, err = minTS.GoTime(tz); err != nil {
return false, err
}
if e, err = maxTS.GoTime(tz); err != nil {
return false, err
}
default:
return false, nil
}
if t, err = date.GoTime(tz); err != nil {
return false, err
}
inRange := (t.After(b) || t.Equal(b)) && (t.Before(e) || t.Equal(e))
return !inRange, nil
}
func skipAllNums(t *CoreTime, input string, ctx map[string]int) (string, bool) {
retIdx := 0
for i, ch := range input {
if unicode.IsNumber(ch) {
retIdx = i + 1
} else {
break
}
}
return input[retIdx:], true
}
func skipAllPunct(t *CoreTime, input string, ctx map[string]int) (string, bool) {
retIdx := 0
for i, ch := range input {
if unicode.IsPunct(ch) {
retIdx = i + 1
} else {
break
}
}
return input[retIdx:], true
}
func skipAllAlpha(t *CoreTime, input string, ctx map[string]int) (string, bool) {
retIdx := 0
for i, ch := range input {
if unicode.IsLetter(ch) {
retIdx = i + 1
} else {
break
}
}
return input[retIdx:], true
}<|fim▁end|> | func ParseDateFormat(format string) []string {
format = strings.TrimSpace(format)
if len(format) == 0 { |
<|file_name|>CellRelayEnd.java<|end_file_name|><|fim▁begin|>/**
* OnionCoffee - Anonymous Communication through TOR Network
* Copyright (C) 2005-2007 RWTH Aachen University, Informatik IV
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* version 2 as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
*/
package cf.monteux.silvertunnel.netlib.layer.tor.circuit.cells;
import cf.monteux.silvertunnel.netlib.layer.tor.circuit.Stream;
/**
* sends an END cell, needed to close a tcp-stream.
*
* @author Lexi Pimenidis
*/
public class CellRelayEnd extends CellRelay<|fim▁hole|> *
* @param s
* the stream that shall be closed
* @param reason
* a reason
*/
public CellRelayEnd(final Stream s, final byte reason)
{
// initialize a new Relay-cell
super(s, CellRelay.RELAY_END);
// set length
setLength(1);
data[0] = reason;
}
}<|fim▁end|> | {
/**
* constructor to build a ENDCELL. |
<|file_name|>sites.py<|end_file_name|><|fim▁begin|>from __future__ import unicode_literals
import os
import sys
import threading
from contextlib import contextmanager
from django.contrib.sites.models import Site
from mezzanine.conf import settings
from mezzanine.core.request import current_request
from mezzanine.utils.conf import middlewares_or_subclasses_installed
SITE_PERMISSION_MIDDLEWARE = \
"mezzanine.core.middleware.SitePermissionMiddleware"
def current_site_id():
"""
Responsible for determining the current ``Site`` instance to use
when retrieving data for any ``SiteRelated`` models. If we're inside an
override_current_site_id context manager, return the overriding site ID.
Otherwise, try to determine the site using the following methods in order:
- ``site_id`` in session. Used in the admin so that admin users
can switch sites and stay on the same domain for the admin.
- The id of the Site object corresponding to the hostname in the current
request. This result is cached.
- ``MEZZANINE_SITE_ID`` environment variable, so management
commands or anything else outside of a request can specify a
site.
- ``SITE_ID`` setting.
If a current request exists and the current site is not overridden, the
site ID is stored on the request object to speed up subsequent calls.
"""
if hasattr(override_current_site_id.thread_local, "site_id"):
return override_current_site_id.thread_local.site_id
from mezzanine.utils.cache import cache_installed, cache_get, cache_set
request = current_request()
site_id = getattr(request, "site_id", None)
if request and not site_id:
site_id = request.session.get("site_id", None)
if not site_id:
domain = request.get_host().lower()
if cache_installed():
# Don't use Mezzanine's cache_key_prefix here, since it
# uses this very function we're in right now to create a
# per-site cache key.
bits = (settings.CACHE_MIDDLEWARE_KEY_PREFIX, domain)
cache_key = "%s.site_id.%s" % bits
site_id = cache_get(cache_key)
if not site_id:
try:
site = Site.objects.get(domain__iexact=domain)
except Site.DoesNotExist:
pass
else:
site_id = site.id
if cache_installed():
cache_set(cache_key, site_id)
if not site_id:
site_id = os.environ.get("MEZZANINE_SITE_ID", settings.SITE_ID)
if request and site_id and not getattr(settings, "TESTING", False):
request.site_id = site_id
return site_id
@contextmanager
def override_current_site_id(site_id):
"""
Context manager that overrides the current site id for code executed
within it. Used to access SiteRelated objects outside the current site.
"""
override_current_site_id.thread_local.site_id = site_id
yield
del override_current_site_id.thread_local.site_id
override_current_site_id.thread_local = threading.local()
def has_site_permission(user):
"""
Checks if a staff user has staff-level access for the current site.
The actual permission lookup occurs in ``SitePermissionMiddleware``
which then marks the request with the ``has_site_permission`` flag,
so that we only query the db once per request, so this function<|fim▁hole|> if not middlewares_or_subclasses_installed([SITE_PERMISSION_MIDDLEWARE]):
return user.is_staff and user.is_active
return getattr(user, "has_site_permission", False)
def host_theme_path():
"""
Returns the directory of the theme associated with the given host.
"""
# Set domain to None, which we'll then query for in the first
# iteration of HOST_THEMES. We use the current site_id rather
# than a request object here, as it may differ for admin users.
domain = None
for (host, theme) in settings.HOST_THEMES:
if domain is None:
domain = Site.objects.get(id=current_site_id()).domain
if host.lower() == domain.lower():
try:
__import__(theme)
module = sys.modules[theme]
except ImportError:
pass
else:
return os.path.dirname(os.path.abspath(module.__file__))
return ""<|fim▁end|> | serves as the entry point for everything else to check access. We
also fall back to an ``is_staff`` check if the middleware is not
installed, to ease migration.
""" |
<|file_name|>net.cpp<|end_file_name|><|fim▁begin|>// Copyright (c) 2009-2010 Satoshi Nakamoto
// Copyright (c) 2009-2012 The Bitcoin developers
// Copyright (c) 2011-2012 Litecoin Developers
// Distributed under the MIT/X11 software license, see the accompanying
// file COPYING or http://www.opensource.org/licenses/mit-license.php.
#include "irc.h"
#include "db.h"
#include "net.h"
#include "init.h"
#include "strlcpy.h"
#include "addrman.h"
#include "ui_interface.h"
#ifdef WIN32
#include <string.h>
#endif
#ifdef USE_UPNP
#include <miniupnpc/miniwget.h>
#include <miniupnpc/miniupnpc.h>
#include <miniupnpc/upnpcommands.h>
#include <miniupnpc/upnperrors.h>
#endif
using namespace std;
using namespace boost;
static const int MAX_OUTBOUND_CONNECTIONS = 8;
void ThreadMessageHandler2(void* parg);
void ThreadSocketHandler2(void* parg);
void ThreadOpenConnections2(void* parg);
void ThreadOpenAddedConnections2(void* parg);
#ifdef USE_UPNP
void ThreadMapPort2(void* parg);
#endif
void ThreadDNSAddressSeed2(void* parg);
bool OpenNetworkConnection(const CAddress& addrConnect, CSemaphoreGrant *grantOutbound = NULL, const char *strDest = NULL, bool fOneShot = false);
struct LocalServiceInfo {
int nScore;
int nPort;
};
//
// Global state variables
//
bool fClient = false;
bool fDiscover = true;
bool fUseUPnP = false;
uint64 nLocalServices = (fClient ? 0 : NODE_NETWORK);
static CCriticalSection cs_mapLocalHost;
static map<CNetAddr, LocalServiceInfo> mapLocalHost;
static bool vfReachable[NET_MAX] = {};
static bool vfLimited[NET_MAX] = {};
static CNode* pnodeLocalHost = NULL;
uint64 nLocalHostNonce = 0;
array<int, THREAD_MAX> vnThreadsRunning;
static std::vector<SOCKET> vhListenSocket;
CAddrMan addrman;
vector<CNode*> vNodes;
CCriticalSection cs_vNodes;
map<CInv, CDataStream> mapRelay;
deque<pair<int64, CInv> > vRelayExpiration;
CCriticalSection cs_mapRelay;
map<CInv, int64> mapAlreadyAskedFor;
static deque<string> vOneShots;
CCriticalSection cs_vOneShots;
set<CNetAddr> setservAddNodeAddresses;
CCriticalSection cs_setservAddNodeAddresses;
static CSemaphore *semOutbound = NULL;
void AddOneShot(string strDest)
{
LOCK(cs_vOneShots);
vOneShots.push_back(strDest);
}
unsigned short GetListenPort()
{
return (unsigned short)(GetArg("-port", GetDefaultPort()));
}
void CNode::PushGetBlocks(CBlockIndex* pindexBegin, uint256 hashEnd)
{
// Filter out duplicate requests
if (pindexBegin == pindexLastGetBlocksBegin && hashEnd == hashLastGetBlocksEnd)
return;
pindexLastGetBlocksBegin = pindexBegin;
hashLastGetBlocksEnd = hashEnd;
PushMessage("getblocks", CBlockLocator(pindexBegin), hashEnd);
}
// find 'best' local address for a particular peer
bool GetLocal(CService& addr, const CNetAddr *paddrPeer)
{
if (fNoListen)
return false;
int nBestScore = -1;
int nBestReachability = -1;
{
LOCK(cs_mapLocalHost);
for (map<CNetAddr, LocalServiceInfo>::iterator it = mapLocalHost.begin(); it != mapLocalHost.end(); it++)
{
int nScore = (*it).second.nScore;
int nReachability = (*it).first.GetReachabilityFrom(paddrPeer);
if (nReachability > nBestReachability || (nReachability == nBestReachability && nScore > nBestScore))
{
addr = CService((*it).first, (*it).second.nPort);
nBestReachability = nReachability;
nBestScore = nScore;
}
}
}
return nBestScore >= 0;
}
// get best local address for a particular peer as a CAddress
CAddress GetLocalAddress(const CNetAddr *paddrPeer)
{
CAddress ret(CService("0.0.0.0",0),0);
CService addr;
if (GetLocal(addr, paddrPeer))
{
ret = CAddress(addr);
ret.nServices = nLocalServices;
ret.nTime = GetAdjustedTime();
}
return ret;
}
bool RecvLine(SOCKET hSocket, string& strLine)
{
strLine = "";
loop
{
char c;
int nBytes = recv(hSocket, &c, 1, 0);
if (nBytes > 0)
{
if (c == '\n')
continue;
if (c == '\r')
return true;
strLine += c;
if (strLine.size() >= 9000)
return true;
}
else if (nBytes <= 0)
{
if (fShutdown)
return false;
if (nBytes < 0)
{
int nErr = WSAGetLastError();
if (nErr == WSAEMSGSIZE)
continue;
if (nErr == WSAEWOULDBLOCK || nErr == WSAEINTR || nErr == WSAEINPROGRESS)
{
Sleep(10);
continue;
}
}
if (!strLine.empty())
return true;
if (nBytes == 0)
{
// socket closed
printf("socket closed\n");
return false;
}
else
{
// socket error
int nErr = WSAGetLastError();
printf("recv failed: %d\n", nErr);
return false;
}
}
}
}
// used when scores of local addresses may have changed
// pushes better local address to peers
void static AdvertizeLocal()
{
LOCK(cs_vNodes);
BOOST_FOREACH(CNode* pnode, vNodes)
{
if (pnode->fSuccessfullyConnected)
{
CAddress addrLocal = GetLocalAddress(&pnode->addr);
if (addrLocal.IsRoutable() && (CService)addrLocal != (CService)pnode->addrLocal)
{
pnode->PushAddress(addrLocal);
pnode->addrLocal = addrLocal;
}
}
}
}
void SetReachable(enum Network net, bool fFlag)
{
LOCK(cs_mapLocalHost);
vfReachable[net] = fFlag;
if (net == NET_IPV6 && fFlag)
vfReachable[NET_IPV4] = true;
}
// learn a new local address
bool AddLocal(const CService& addr, int nScore)
{
if (!addr.IsRoutable())
return false;
if (!fDiscover && nScore < LOCAL_MANUAL)
return false;
if (IsLimited(addr))
return false;
printf("AddLocal(%s,%i)\n", addr.ToString().c_str(), nScore);
{
LOCK(cs_mapLocalHost);
bool fAlready = mapLocalHost.count(addr) > 0;
LocalServiceInfo &info = mapLocalHost[addr];
if (!fAlready || nScore >= info.nScore) {
info.nScore = nScore;
info.nPort = addr.GetPort() + (fAlready ? 1 : 0);
}
SetReachable(addr.GetNetwork());
}
AdvertizeLocal();
return true;
}
bool AddLocal(const CNetAddr &addr, int nScore)
{
return AddLocal(CService(addr, GetListenPort()), nScore);
}
/** Make a particular network entirely off-limits (no automatic connects to it) */
void SetLimited(enum Network net, bool fLimited)
{
if (net == NET_UNROUTABLE)
return;
LOCK(cs_mapLocalHost);
vfLimited[net] = fLimited;
}
bool IsLimited(enum Network net)
{
LOCK(cs_mapLocalHost);
return vfLimited[net];
}
bool IsLimited(const CNetAddr &addr)
{
return IsLimited(addr.GetNetwork());
}
/** vote for a local address */
bool SeenLocal(const CService& addr)
{
{
LOCK(cs_mapLocalHost);
if (mapLocalHost.count(addr) == 0)
return false;
mapLocalHost[addr].nScore++;
}
AdvertizeLocal();
return true;
}
/** check whether a given address is potentially local */
bool IsLocal(const CService& addr)
{
LOCK(cs_mapLocalHost);
return mapLocalHost.count(addr) > 0;
}
/** check whether a given address is in a network we can probably connect to */
bool IsReachable(const CNetAddr& addr)
{
LOCK(cs_mapLocalHost);
enum Network net = addr.GetNetwork();
return vfReachable[net] && !vfLimited[net];
}<|fim▁hole|> if (!ConnectSocket(addrConnect, hSocket))
return error("GetMyExternalIP() : connection to %s failed", addrConnect.ToString().c_str());
send(hSocket, pszGet, strlen(pszGet), MSG_NOSIGNAL);
string strLine;
while (RecvLine(hSocket, strLine))
{
if (strLine.empty()) // HTTP response is separated from headers by blank line
{
loop
{
if (!RecvLine(hSocket, strLine))
{
closesocket(hSocket);
return false;
}
if (pszKeyword == NULL)
break;
if (strLine.find(pszKeyword) != string::npos)
{
strLine = strLine.substr(strLine.find(pszKeyword) + strlen(pszKeyword));
break;
}
}
closesocket(hSocket);
if (strLine.find("<") != string::npos)
strLine = strLine.substr(0, strLine.find("<"));
strLine = strLine.substr(strspn(strLine.c_str(), " \t\n\r"));
while (strLine.size() > 0 && isspace(strLine[strLine.size()-1]))
strLine.resize(strLine.size()-1);
CService addr(strLine,0,true);
printf("GetMyExternalIP() received [%s] %s\n", strLine.c_str(), addr.ToString().c_str());
if (!addr.IsValid() || !addr.IsRoutable())
return false;
ipRet.SetIP(addr);
return true;
}
}
closesocket(hSocket);
return error("GetMyExternalIP() : connection closed");
}
// We now get our external IP from the IRC server first and only use this as a backup
bool GetMyExternalIP(CNetAddr& ipRet)
{
CService addrConnect;
const char* pszGet;
const char* pszKeyword;
for (int nLookup = 0; nLookup <= 1; nLookup++)
for (int nHost = 1; nHost <= 2; nHost++)
{
// We should be phasing out our use of sites like these. If we need
// replacements, we should ask for volunteers to put this simple
// php file on their webserver that prints the client IP:
// <?php echo $_SERVER["REMOTE_ADDR"]; ?>
if (nHost == 1)
{
addrConnect = CService("91.198.22.70",80); // checkip.dyndns.org
if (nLookup == 1)
{
CService addrIP("checkip.dyndns.org", 80, true);
if (addrIP.IsValid())
addrConnect = addrIP;
}
pszGet = "GET / HTTP/1.1\r\n"
"Host: checkip.dyndns.org\r\n"
"User-Agent: Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1)\r\n"
"Connection: close\r\n"
"\r\n";
pszKeyword = "Address:";
}
else if (nHost == 2)
{
addrConnect = CService("74.208.43.192", 80); // www.showmyip.com
if (nLookup == 1)
{
CService addrIP("www.showmyip.com", 80, true);
if (addrIP.IsValid())
addrConnect = addrIP;
}
pszGet = "GET /simple/ HTTP/1.1\r\n"
"Host: www.showmyip.com\r\n"
"User-Agent: Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1)\r\n"
"Connection: close\r\n"
"\r\n";
pszKeyword = NULL; // Returns just IP address
}
if (GetMyExternalIP2(addrConnect, pszGet, pszKeyword, ipRet))
return true;
}
return false;
}
void ThreadGetMyExternalIP(void* parg)
{
// Make this thread recognisable as the external IP detection thread
RenameThread("bitcoin-ext-ip");
CNetAddr addrLocalHost;
if (GetMyExternalIP(addrLocalHost))
{
printf("GetMyExternalIP() returned %s\n", addrLocalHost.ToStringIP().c_str());
AddLocal(addrLocalHost, LOCAL_HTTP);
}
}
void AddressCurrentlyConnected(const CService& addr)
{
addrman.Connected(addr);
}
CNode* FindNode(const CNetAddr& ip)
{
{
LOCK(cs_vNodes);
BOOST_FOREACH(CNode* pnode, vNodes)
if ((CNetAddr)pnode->addr == ip)
return (pnode);
}
return NULL;
}
CNode* FindNode(std::string addrName)
{
LOCK(cs_vNodes);
BOOST_FOREACH(CNode* pnode, vNodes)
if (pnode->addrName == addrName)
return (pnode);
return NULL;
}
CNode* FindNode(const CService& addr)
{
{
LOCK(cs_vNodes);
BOOST_FOREACH(CNode* pnode, vNodes)
if ((CService)pnode->addr == addr)
return (pnode);
}
return NULL;
}
CNode* ConnectNode(CAddress addrConnect, const char *pszDest, int64 nTimeout)
{
if (pszDest == NULL) {
if (IsLocal(addrConnect))
return NULL;
// Look for an existing connection
CNode* pnode = FindNode((CService)addrConnect);
if (pnode)
{
if (nTimeout != 0)
pnode->AddRef(nTimeout);
else
pnode->AddRef();
return pnode;
}
}
/// debug print
printf("trying connection %s lastseen=%.1fhrs\n",
pszDest ? pszDest : addrConnect.ToString().c_str(),
pszDest ? 0 : (double)(GetAdjustedTime() - addrConnect.nTime)/3600.0);
// Connect
SOCKET hSocket;
if (pszDest ? ConnectSocketByName(addrConnect, hSocket, pszDest, GetDefaultPort()) : ConnectSocket(addrConnect, hSocket))
{
addrman.Attempt(addrConnect);
/// debug print
printf("connected %s\n", pszDest ? pszDest : addrConnect.ToString().c_str());
// Set to nonblocking
#ifdef WIN32
u_long nOne = 1;
if (ioctlsocket(hSocket, FIONBIO, &nOne) == SOCKET_ERROR)
printf("ConnectSocket() : ioctlsocket nonblocking setting failed, error %d\n", WSAGetLastError());
#else
if (fcntl(hSocket, F_SETFL, O_NONBLOCK) == SOCKET_ERROR)
printf("ConnectSocket() : fcntl nonblocking setting failed, error %d\n", errno);
#endif
// Add node
CNode* pnode = new CNode(hSocket, addrConnect, pszDest ? pszDest : "", false);
if (nTimeout != 0)
pnode->AddRef(nTimeout);
else
pnode->AddRef();
{
LOCK(cs_vNodes);
vNodes.push_back(pnode);
}
pnode->nTimeConnected = GetTime();
return pnode;
}
else
{
return NULL;
}
}
void CNode::CloseSocketDisconnect()
{
fDisconnect = true;
if (hSocket != INVALID_SOCKET)
{
printf("disconnecting node %s\n", addrName.c_str());
closesocket(hSocket);
hSocket = INVALID_SOCKET;
vRecv.clear();
}
}
void CNode::Cleanup()
{
}
void CNode::PushVersion()
{
/// when NTP implemented, change to just nTime = GetAdjustedTime()
int64 nTime = (fInbound ? GetAdjustedTime() : GetTime());
CAddress addrYou = (addr.IsRoutable() && !IsProxy(addr) ? addr : CAddress(CService("0.0.0.0",0)));
CAddress addrMe = GetLocalAddress(&addr);
RAND_bytes((unsigned char*)&nLocalHostNonce, sizeof(nLocalHostNonce));
printf("send version message: version %d, blocks=%d, us=%s, them=%s, peer=%s\n", PROTOCOL_VERSION, nBestHeight, addrMe.ToString().c_str(), addrYou.ToString().c_str(), addr.ToString().c_str());
PushMessage("version", PROTOCOL_VERSION, nLocalServices, nTime, addrYou, addrMe,
nLocalHostNonce, FormatSubVersion(CLIENT_NAME, CLIENT_VERSION, std::vector<string>()), nBestHeight);
}
std::map<CNetAddr, int64> CNode::setBanned;
CCriticalSection CNode::cs_setBanned;
void CNode::ClearBanned()
{
setBanned.clear();
}
bool CNode::IsBanned(CNetAddr ip)
{
bool fResult = false;
{
LOCK(cs_setBanned);
std::map<CNetAddr, int64>::iterator i = setBanned.find(ip);
if (i != setBanned.end())
{
int64 t = (*i).second;
if (GetTime() < t)
fResult = true;
}
}
return fResult;
}
bool CNode::Misbehaving(int howmuch)
{
if (addr.IsLocal())
{
printf("Warning: local node %s misbehaving\n", addrName.c_str());
return false;
}
nMisbehavior += howmuch;
if (nMisbehavior >= GetArg("-banscore", 100))
{
int64 banTime = GetTime()+GetArg("-bantime", 60*60*24); // Default 24-hour ban
{
LOCK(cs_setBanned);
if (setBanned[addr] < banTime)
setBanned[addr] = banTime;
}
CloseSocketDisconnect();
printf("Disconnected %s for misbehavior (score=%d)\n", addrName.c_str(), nMisbehavior);
return true;
}
return false;
}
#undef X
#define X(name) stats.name = name
void CNode::copyStats(CNodeStats &stats)
{
X(nServices);
X(nLastSend);
X(nLastRecv);
X(nTimeConnected);
X(addrName);
X(nVersion);
X(strSubVer);
X(fInbound);
X(nReleaseTime);
X(nStartingHeight);
X(nMisbehavior);
}
#undef X
void ThreadSocketHandler(void* parg)
{
IMPLEMENT_RANDOMIZE_STACK(ThreadSocketHandler(parg));
// Make this thread recognisable as the networking thread
RenameThread("bitcoin-net");
try
{
vnThreadsRunning[THREAD_SOCKETHANDLER]++;
ThreadSocketHandler2(parg);
vnThreadsRunning[THREAD_SOCKETHANDLER]--;
}
catch (std::exception& e) {
vnThreadsRunning[THREAD_SOCKETHANDLER]--;
PrintException(&e, "ThreadSocketHandler()");
} catch (...) {
vnThreadsRunning[THREAD_SOCKETHANDLER]--;
throw; // support pthread_cancel()
}
printf("ThreadSocketHandler exited\n");
}
void ThreadSocketHandler2(void* parg)
{
printf("ThreadSocketHandler started\n");
list<CNode*> vNodesDisconnected;
unsigned int nPrevNodeCount = 0;
loop
{
//
// Disconnect nodes
//
{
LOCK(cs_vNodes);
// Disconnect unused nodes
vector<CNode*> vNodesCopy = vNodes;
BOOST_FOREACH(CNode* pnode, vNodesCopy)
{
if (pnode->fDisconnect ||
(pnode->GetRefCount() <= 0 && pnode->vRecv.empty() && pnode->vSend.empty()))
{
// remove from vNodes
vNodes.erase(remove(vNodes.begin(), vNodes.end(), pnode), vNodes.end());
// release outbound grant (if any)
pnode->grantOutbound.Release();
// close socket and cleanup
pnode->CloseSocketDisconnect();
pnode->Cleanup();
// hold in disconnected pool until all refs are released
pnode->nReleaseTime = max(pnode->nReleaseTime, GetTime() + 15 * 60);
if (pnode->fNetworkNode || pnode->fInbound)
pnode->Release();
vNodesDisconnected.push_back(pnode);
}
}
// Delete disconnected nodes
list<CNode*> vNodesDisconnectedCopy = vNodesDisconnected;
BOOST_FOREACH(CNode* pnode, vNodesDisconnectedCopy)
{
// wait until threads are done using it
if (pnode->GetRefCount() <= 0)
{
bool fDelete = false;
{
TRY_LOCK(pnode->cs_vSend, lockSend);
if (lockSend)
{
TRY_LOCK(pnode->cs_vRecv, lockRecv);
if (lockRecv)
{
TRY_LOCK(pnode->cs_mapRequests, lockReq);
if (lockReq)
{
TRY_LOCK(pnode->cs_inventory, lockInv);
if (lockInv)
fDelete = true;
}
}
}
}
if (fDelete)
{
vNodesDisconnected.remove(pnode);
delete pnode;
}
}
}
}
if (vNodes.size() != nPrevNodeCount)
{
nPrevNodeCount = vNodes.size();
uiInterface.NotifyNumConnectionsChanged(vNodes.size());
}
//
// Find which sockets have data to receive
//
struct timeval timeout;
timeout.tv_sec = 0;
timeout.tv_usec = 50000; // frequency to poll pnode->vSend
fd_set fdsetRecv;
fd_set fdsetSend;
fd_set fdsetError;
FD_ZERO(&fdsetRecv);
FD_ZERO(&fdsetSend);
FD_ZERO(&fdsetError);
SOCKET hSocketMax = 0;
BOOST_FOREACH(SOCKET hListenSocket, vhListenSocket) {
FD_SET(hListenSocket, &fdsetRecv);
hSocketMax = max(hSocketMax, hListenSocket);
}
{
LOCK(cs_vNodes);
BOOST_FOREACH(CNode* pnode, vNodes)
{
if (pnode->hSocket == INVALID_SOCKET)
continue;
FD_SET(pnode->hSocket, &fdsetRecv);
FD_SET(pnode->hSocket, &fdsetError);
hSocketMax = max(hSocketMax, pnode->hSocket);
{
TRY_LOCK(pnode->cs_vSend, lockSend);
if (lockSend && !pnode->vSend.empty())
FD_SET(pnode->hSocket, &fdsetSend);
}
}
}
vnThreadsRunning[THREAD_SOCKETHANDLER]--;
int nSelect = select(hSocketMax + 1, &fdsetRecv, &fdsetSend, &fdsetError, &timeout);
vnThreadsRunning[THREAD_SOCKETHANDLER]++;
if (fShutdown)
return;
if (nSelect == SOCKET_ERROR)
{
int nErr = WSAGetLastError();
if (hSocketMax != INVALID_SOCKET)
{
printf("socket select error %d\n", nErr);
for (unsigned int i = 0; i <= hSocketMax; i++)
FD_SET(i, &fdsetRecv);
}
FD_ZERO(&fdsetSend);
FD_ZERO(&fdsetError);
Sleep(timeout.tv_usec/1000);
}
//
// Accept new connections
//
BOOST_FOREACH(SOCKET hListenSocket, vhListenSocket)
if (hListenSocket != INVALID_SOCKET && FD_ISSET(hListenSocket, &fdsetRecv))
{
#ifdef USE_IPV6
struct sockaddr_storage sockaddr;
#else
struct sockaddr sockaddr;
#endif
socklen_t len = sizeof(sockaddr);
SOCKET hSocket = accept(hListenSocket, (struct sockaddr*)&sockaddr, &len);
CAddress addr;
int nInbound = 0;
if (hSocket != INVALID_SOCKET)
if (!addr.SetSockAddr((const struct sockaddr*)&sockaddr))
printf("warning: unknown socket family\n");
{
LOCK(cs_vNodes);
BOOST_FOREACH(CNode* pnode, vNodes)
if (pnode->fInbound)
nInbound++;
}
if (hSocket == INVALID_SOCKET)
{
if (WSAGetLastError() != WSAEWOULDBLOCK)
printf("socket error accept failed: %d\n", WSAGetLastError());
}
else if (nInbound >= GetArg("-maxconnections", 125) - MAX_OUTBOUND_CONNECTIONS)
{
{
LOCK(cs_setservAddNodeAddresses);
if (!setservAddNodeAddresses.count(addr))
closesocket(hSocket);
}
}
else if (CNode::IsBanned(addr))
{
printf("connection from %s dropped (banned)\n", addr.ToString().c_str());
closesocket(hSocket);
}
else
{
printf("accepted connection %s\n", addr.ToString().c_str());
CNode* pnode = new CNode(hSocket, addr, "", true);
pnode->AddRef();
{
LOCK(cs_vNodes);
vNodes.push_back(pnode);
}
}
}
//
// Service each socket
//
vector<CNode*> vNodesCopy;
{
LOCK(cs_vNodes);
vNodesCopy = vNodes;
BOOST_FOREACH(CNode* pnode, vNodesCopy)
pnode->AddRef();
}
BOOST_FOREACH(CNode* pnode, vNodesCopy)
{
if (fShutdown)
return;
//
// Receive
//
if (pnode->hSocket == INVALID_SOCKET)
continue;
if (FD_ISSET(pnode->hSocket, &fdsetRecv) || FD_ISSET(pnode->hSocket, &fdsetError))
{
TRY_LOCK(pnode->cs_vRecv, lockRecv);
if (lockRecv)
{
CDataStream& vRecv = pnode->vRecv;
unsigned int nPos = vRecv.size();
if (nPos > ReceiveBufferSize()) {
if (!pnode->fDisconnect)
printf("socket recv flood control disconnect (%d bytes)\n", vRecv.size());
pnode->CloseSocketDisconnect();
}
else {
// typical socket buffer is 8K-64K
char pchBuf[0x10000];
int nBytes = recv(pnode->hSocket, pchBuf, sizeof(pchBuf), MSG_DONTWAIT);
if (nBytes > 0)
{
vRecv.resize(nPos + nBytes);
memcpy(&vRecv[nPos], pchBuf, nBytes);
pnode->nLastRecv = GetTime();
}
else if (nBytes == 0)
{
// socket closed gracefully
if (!pnode->fDisconnect)
printf("socket closed\n");
pnode->CloseSocketDisconnect();
}
else if (nBytes < 0)
{
// error
int nErr = WSAGetLastError();
if (nErr != WSAEWOULDBLOCK && nErr != WSAEMSGSIZE && nErr != WSAEINTR && nErr != WSAEINPROGRESS)
{
if (!pnode->fDisconnect)
printf("socket recv error %d\n", nErr);
pnode->CloseSocketDisconnect();
}
}
}
}
}
//
// Send
//
if (pnode->hSocket == INVALID_SOCKET)
continue;
if (FD_ISSET(pnode->hSocket, &fdsetSend))
{
TRY_LOCK(pnode->cs_vSend, lockSend);
if (lockSend)
{
CDataStream& vSend = pnode->vSend;
if (!vSend.empty())
{
int nBytes = send(pnode->hSocket, &vSend[0], vSend.size(), MSG_NOSIGNAL | MSG_DONTWAIT);
if (nBytes > 0)
{
vSend.erase(vSend.begin(), vSend.begin() + nBytes);
pnode->nLastSend = GetTime();
}
else if (nBytes < 0)
{
// error
int nErr = WSAGetLastError();
if (nErr != WSAEWOULDBLOCK && nErr != WSAEMSGSIZE && nErr != WSAEINTR && nErr != WSAEINPROGRESS)
{
printf("socket send error %d\n", nErr);
pnode->CloseSocketDisconnect();
}
}
}
}
}
//
// Inactivity checking
//
if (pnode->vSend.empty())
pnode->nLastSendEmpty = GetTime();
if (GetTime() - pnode->nTimeConnected > 60)
{
if (pnode->nLastRecv == 0 || pnode->nLastSend == 0)
{
printf("socket no message in first 60 seconds, %d %d\n", pnode->nLastRecv != 0, pnode->nLastSend != 0);
pnode->fDisconnect = true;
}
else if (GetTime() - pnode->nLastSend > 90*60 && GetTime() - pnode->nLastSendEmpty > 90*60)
{
printf("socket not sending\n");
pnode->fDisconnect = true;
}
else if (GetTime() - pnode->nLastRecv > 90*60)
{
printf("socket inactivity timeout\n");
pnode->fDisconnect = true;
}
}
}
{
LOCK(cs_vNodes);
BOOST_FOREACH(CNode* pnode, vNodesCopy)
pnode->Release();
}
Sleep(10);
}
}
#ifdef USE_UPNP
void ThreadMapPort(void* parg)
{
IMPLEMENT_RANDOMIZE_STACK(ThreadMapPort(parg));
// Make this thread recognisable as the UPnP thread
RenameThread("bitcoin-UPnP");
try
{
vnThreadsRunning[THREAD_UPNP]++;
ThreadMapPort2(parg);
vnThreadsRunning[THREAD_UPNP]--;
}
catch (std::exception& e) {
vnThreadsRunning[THREAD_UPNP]--;
PrintException(&e, "ThreadMapPort()");
} catch (...) {
vnThreadsRunning[THREAD_UPNP]--;
PrintException(NULL, "ThreadMapPort()");
}
printf("ThreadMapPort exited\n");
}
void ThreadMapPort2(void* parg)
{
printf("ThreadMapPort started\n");
char port[6];
sprintf(port, "%d", GetListenPort());
const char * multicastif = 0;
const char * minissdpdpath = 0;
struct UPNPDev * devlist = 0;
char lanaddr[64];
#ifndef UPNPDISCOVER_SUCCESS
/* miniupnpc 1.5 */
devlist = upnpDiscover(2000, multicastif, minissdpdpath, 0);
#else
/* miniupnpc 1.6 */
int error = 0;
devlist = upnpDiscover(2000, multicastif, minissdpdpath, 0, 0, &error);
#endif
struct UPNPUrls urls;
struct IGDdatas data;
int r;
r = UPNP_GetValidIGD(devlist, &urls, &data, lanaddr, sizeof(lanaddr));
if (r == 1)
{
if (fDiscover) {
char externalIPAddress[40];
r = UPNP_GetExternalIPAddress(urls.controlURL, data.first.servicetype, externalIPAddress);
if(r != UPNPCOMMAND_SUCCESS)
printf("UPnP: GetExternalIPAddress() returned %d\n", r);
else
{
if(externalIPAddress[0])
{
printf("UPnP: ExternalIPAddress = %s\n", externalIPAddress);
AddLocal(CNetAddr(externalIPAddress), LOCAL_UPNP);
}
else
printf("UPnP: GetExternalIPAddress failed.\n");
}
}
string strDesc = "LeproCoin " + FormatFullVersion();
#ifndef UPNPDISCOVER_SUCCESS
/* miniupnpc 1.5 */
r = UPNP_AddPortMapping(urls.controlURL, data.first.servicetype,
port, port, lanaddr, strDesc.c_str(), "TCP", 0);
#else
/* miniupnpc 1.6 */
r = UPNP_AddPortMapping(urls.controlURL, data.first.servicetype,
port, port, lanaddr, strDesc.c_str(), "TCP", 0, "0");
#endif
if(r!=UPNPCOMMAND_SUCCESS)
printf("AddPortMapping(%s, %s, %s) failed with code %d (%s)\n",
port, port, lanaddr, r, strupnperror(r));
else
printf("UPnP Port Mapping successful.\n");
int i = 1;
loop {
if (fShutdown || !fUseUPnP)
{
r = UPNP_DeletePortMapping(urls.controlURL, data.first.servicetype, port, "TCP", 0);
printf("UPNP_DeletePortMapping() returned : %d\n", r);
freeUPNPDevlist(devlist); devlist = 0;
FreeUPNPUrls(&urls);
return;
}
if (i % 600 == 0) // Refresh every 20 minutes
{
#ifndef UPNPDISCOVER_SUCCESS
/* miniupnpc 1.5 */
r = UPNP_AddPortMapping(urls.controlURL, data.first.servicetype,
port, port, lanaddr, strDesc.c_str(), "TCP", 0);
#else
/* miniupnpc 1.6 */
r = UPNP_AddPortMapping(urls.controlURL, data.first.servicetype,
port, port, lanaddr, strDesc.c_str(), "TCP", 0, "0");
#endif
if(r!=UPNPCOMMAND_SUCCESS)
printf("AddPortMapping(%s, %s, %s) failed with code %d (%s)\n",
port, port, lanaddr, r, strupnperror(r));
else
printf("UPnP Port Mapping successful.\n");;
}
Sleep(2000);
i++;
}
} else {
printf("No valid UPnP IGDs found\n");
freeUPNPDevlist(devlist); devlist = 0;
if (r != 0)
FreeUPNPUrls(&urls);
loop {
if (fShutdown || !fUseUPnP)
return;
Sleep(2000);
}
}
}
void MapPort()
{
if (fUseUPnP && vnThreadsRunning[THREAD_UPNP] < 1)
{
if (!CreateThread(ThreadMapPort, NULL))
printf("Error: ThreadMapPort(ThreadMapPort) failed\n");
}
}
#else
void MapPort()
{
// Intentionally left blank.
}
#endif
// DNS seeds
// Each pair gives a source name and a seed name.
// The first name is used as information source for addrman.
// The second name should resolve to a list of seed addresses.
static const char *strDNSSeed[][2] = {
//{"litecoinpool.org", "dnsseed.litecoinpool.org"},
//{"bytesized-vps.com", "dnsseed.bytesized-vps.com"},
//{"xurious.com", "dnsseed.ltc.xurious.com"},
};
void ThreadDNSAddressSeed(void* parg)
{
IMPLEMENT_RANDOMIZE_STACK(ThreadDNSAddressSeed(parg));
// Make this thread recognisable as the DNS seeding thread
RenameThread("bitcoin-dnsseed");
try
{
vnThreadsRunning[THREAD_DNSSEED]++;
ThreadDNSAddressSeed2(parg);
vnThreadsRunning[THREAD_DNSSEED]--;
}
catch (std::exception& e) {
vnThreadsRunning[THREAD_DNSSEED]--;
PrintException(&e, "ThreadDNSAddressSeed()");
} catch (...) {
vnThreadsRunning[THREAD_DNSSEED]--;
throw; // support pthread_cancel()
}
printf("ThreadDNSAddressSeed exited\n");
}
void ThreadDNSAddressSeed2(void* parg)
{
printf("ThreadDNSAddressSeed started\n");
int found = 0;
if (!fTestNet)
{
printf("Loading addresses from DNS seeds (could take a while)\n");
for (unsigned int seed_idx = 0; seed_idx < ARRAYLEN(strDNSSeed); seed_idx++) {
if (GetNameProxy()) {
AddOneShot(strDNSSeed[seed_idx][1]);
} else {
vector<CNetAddr> vaddr;
vector<CAddress> vAdd;
if (LookupHost(strDNSSeed[seed_idx][1], vaddr))
{
BOOST_FOREACH(CNetAddr& ip, vaddr)
{
int nOneDay = 24*3600;
CAddress addr = CAddress(CService(ip, GetDefaultPort()));
addr.nTime = GetTime() - 3*nOneDay - GetRand(4*nOneDay); // use a random age between 3 and 7 days old
vAdd.push_back(addr);
found++;
}
}
addrman.Add(vAdd, CNetAddr(strDNSSeed[seed_idx][0], true));
}
}
}
printf("%d addresses found from DNS seeds\n", found);
}
unsigned int pnSeed[] =
{
0x2EFDCB71, 0xCC1B3AD6, 0xADA77149,
};
void DumpAddresses()
{
int64 nStart = GetTimeMillis();
CAddrDB adb;
adb.Write(addrman);
printf("Flushed %d addresses to peers.dat %"PRI64d"ms\n",
addrman.size(), GetTimeMillis() - nStart);
}
void ThreadDumpAddress2(void* parg)
{
vnThreadsRunning[THREAD_DUMPADDRESS]++;
while (!fShutdown)
{
DumpAddresses();
vnThreadsRunning[THREAD_DUMPADDRESS]--;
Sleep(100000);
vnThreadsRunning[THREAD_DUMPADDRESS]++;
}
vnThreadsRunning[THREAD_DUMPADDRESS]--;
}
void ThreadDumpAddress(void* parg)
{
IMPLEMENT_RANDOMIZE_STACK(ThreadDumpAddress(parg));
// Make this thread recognisable as the address dumping thread
RenameThread("bitcoin-adrdump");
try
{
ThreadDumpAddress2(parg);
}
catch (std::exception& e) {
PrintException(&e, "ThreadDumpAddress()");
}
printf("ThreadDumpAddress exited\n");
}
void ThreadOpenConnections(void* parg)
{
IMPLEMENT_RANDOMIZE_STACK(ThreadOpenConnections(parg));
// Make this thread recognisable as the connection opening thread
RenameThread("bitcoin-opencon");
try
{
vnThreadsRunning[THREAD_OPENCONNECTIONS]++;
ThreadOpenConnections2(parg);
vnThreadsRunning[THREAD_OPENCONNECTIONS]--;
}
catch (std::exception& e) {
vnThreadsRunning[THREAD_OPENCONNECTIONS]--;
PrintException(&e, "ThreadOpenConnections()");
} catch (...) {
vnThreadsRunning[THREAD_OPENCONNECTIONS]--;
PrintException(NULL, "ThreadOpenConnections()");
}
printf("ThreadOpenConnections exited\n");
}
void static ProcessOneShot()
{
string strDest;
{
LOCK(cs_vOneShots);
if (vOneShots.empty())
return;
strDest = vOneShots.front();
vOneShots.pop_front();
}
CAddress addr;
CSemaphoreGrant grant(*semOutbound, true);
if (grant) {
if (!OpenNetworkConnection(addr, &grant, strDest.c_str(), true))
AddOneShot(strDest);
}
}
void ThreadOpenConnections2(void* parg)
{
printf("ThreadOpenConnections started\n");
// Connect to specific addresses
if (mapArgs.count("-connect"))
{
for (int64 nLoop = 0;; nLoop++)
{
ProcessOneShot();
BOOST_FOREACH(string strAddr, mapMultiArgs["-connect"])
{
CAddress addr;
OpenNetworkConnection(addr, NULL, strAddr.c_str());
for (int i = 0; i < 10 && i < nLoop; i++)
{
Sleep(500);
if (fShutdown)
return;
}
}
}
}
// Initiate network connections
int64 nStart = GetTime();
loop
{
ProcessOneShot();
vnThreadsRunning[THREAD_OPENCONNECTIONS]--;
Sleep(500);
vnThreadsRunning[THREAD_OPENCONNECTIONS]++;
if (fShutdown)
return;
vnThreadsRunning[THREAD_OPENCONNECTIONS]--;
CSemaphoreGrant grant(*semOutbound);
vnThreadsRunning[THREAD_OPENCONNECTIONS]++;
if (fShutdown)
return;
// Add seed nodes if IRC isn't working
if (addrman.size()==0 && (GetTime() - nStart > 60) && !fTestNet)
{
std::vector<CAddress> vAdd;
for (unsigned int i = 0; i < ARRAYLEN(pnSeed); i++)
{
// It'll only connect to one or two seed nodes because once it connects,
// it'll get a pile of addresses with newer timestamps.
// Seed nodes are given a random 'last seen time' of between one and two
// weeks ago.
const int64 nOneWeek = 7*24*60*60;
struct in_addr ip;
memcpy(&ip, &pnSeed[i], sizeof(ip));
CAddress addr(CService(ip, GetDefaultPort()));
addr.nTime = GetTime()-GetRand(nOneWeek)-nOneWeek;
vAdd.push_back(addr);
}
addrman.Add(vAdd, CNetAddr("127.0.0.1"));
}
//
// Choose an address to connect to based on most recently seen
//
CAddress addrConnect;
// Only connect out to one peer per network group (/16 for IPv4).
// Do this here so we don't have to critsect vNodes inside mapAddresses critsect.
int nOutbound = 0;
set<vector<unsigned char> > setConnected;
{
LOCK(cs_vNodes);
BOOST_FOREACH(CNode* pnode, vNodes) {
if (!pnode->fInbound) {
setConnected.insert(pnode->addr.GetGroup());
nOutbound++;
}
}
}
int64 nANow = GetAdjustedTime();
int nTries = 0;
loop
{
// use an nUnkBias between 10 (no outgoing connections) and 90 (8 outgoing connections)
CAddress addr = addrman.Select(10 + min(nOutbound,8)*10);
// if we selected an invalid address, restart
if (!addr.IsValid() || setConnected.count(addr.GetGroup()) || IsLocal(addr))
break;
nTries++;
if (IsLimited(addr))
continue;
// only consider very recently tried nodes after 30 failed attempts
if (nANow - addr.nLastTry < 600 && nTries < 30)
continue;
// do not allow non-default ports, unless after 50 invalid addresses selected already
if (addr.GetPort() != GetDefaultPort() && nTries < 50)
continue;
addrConnect = addr;
break;
}
if (addrConnect.IsValid())
OpenNetworkConnection(addrConnect, &grant);
}
}
void ThreadOpenAddedConnections(void* parg)
{
IMPLEMENT_RANDOMIZE_STACK(ThreadOpenAddedConnections(parg));
// Make this thread recognisable as the connection opening thread
RenameThread("bitcoin-opencon");
try
{
vnThreadsRunning[THREAD_ADDEDCONNECTIONS]++;
ThreadOpenAddedConnections2(parg);
vnThreadsRunning[THREAD_ADDEDCONNECTIONS]--;
}
catch (std::exception& e) {
vnThreadsRunning[THREAD_ADDEDCONNECTIONS]--;
PrintException(&e, "ThreadOpenAddedConnections()");
} catch (...) {
vnThreadsRunning[THREAD_ADDEDCONNECTIONS]--;
PrintException(NULL, "ThreadOpenAddedConnections()");
}
printf("ThreadOpenAddedConnections exited\n");
}
void ThreadOpenAddedConnections2(void* parg)
{
printf("ThreadOpenAddedConnections started\n");
if (mapArgs.count("-addnode") == 0)
return;
if (GetNameProxy()) {
while(!fShutdown) {
BOOST_FOREACH(string& strAddNode, mapMultiArgs["-addnode"]) {
CAddress addr;
CSemaphoreGrant grant(*semOutbound);
OpenNetworkConnection(addr, &grant, strAddNode.c_str());
Sleep(500);
}
vnThreadsRunning[THREAD_ADDEDCONNECTIONS]--;
Sleep(120000); // Retry every 2 minutes
vnThreadsRunning[THREAD_ADDEDCONNECTIONS]++;
}
return;
}
vector<vector<CService> > vservAddressesToAdd(0);
BOOST_FOREACH(string& strAddNode, mapMultiArgs["-addnode"])
{
vector<CService> vservNode(0);
if(Lookup(strAddNode.c_str(), vservNode, GetDefaultPort(), fNameLookup, 0))
{
vservAddressesToAdd.push_back(vservNode);
{
LOCK(cs_setservAddNodeAddresses);
BOOST_FOREACH(CService& serv, vservNode)
setservAddNodeAddresses.insert(serv);
}
}
}
loop
{
vector<vector<CService> > vservConnectAddresses = vservAddressesToAdd;
// Attempt to connect to each IP for each addnode entry until at least one is successful per addnode entry
// (keeping in mind that addnode entries can have many IPs if fNameLookup)
{
LOCK(cs_vNodes);
BOOST_FOREACH(CNode* pnode, vNodes)
for (vector<vector<CService> >::iterator it = vservConnectAddresses.begin(); it != vservConnectAddresses.end(); it++)
BOOST_FOREACH(CService& addrNode, *(it))
if (pnode->addr == addrNode)
{
it = vservConnectAddresses.erase(it);
it--;
break;
}
}
BOOST_FOREACH(vector<CService>& vserv, vservConnectAddresses)
{
CSemaphoreGrant grant(*semOutbound);
OpenNetworkConnection(CAddress(*(vserv.begin())), &grant);
Sleep(500);
if (fShutdown)
return;
}
if (fShutdown)
return;
vnThreadsRunning[THREAD_ADDEDCONNECTIONS]--;
Sleep(120000); // Retry every 2 minutes
vnThreadsRunning[THREAD_ADDEDCONNECTIONS]++;
if (fShutdown)
return;
}
}
// if succesful, this moves the passed grant to the constructed node
bool OpenNetworkConnection(const CAddress& addrConnect, CSemaphoreGrant *grantOutbound, const char *strDest, bool fOneShot)
{
//
// Initiate outbound network connection
//
if (fShutdown)
return false;
if (!strDest)
if (IsLocal(addrConnect) ||
FindNode((CNetAddr)addrConnect) || CNode::IsBanned(addrConnect) ||
FindNode(addrConnect.ToStringIPPort().c_str()))
return false;
if (strDest && FindNode(strDest))
return false;
vnThreadsRunning[THREAD_OPENCONNECTIONS]--;
CNode* pnode = ConnectNode(addrConnect, strDest);
vnThreadsRunning[THREAD_OPENCONNECTIONS]++;
if (fShutdown)
return false;
if (!pnode)
return false;
if (grantOutbound)
grantOutbound->MoveTo(pnode->grantOutbound);
pnode->fNetworkNode = true;
if (fOneShot)
pnode->fOneShot = true;
return true;
}
void ThreadMessageHandler(void* parg)
{
IMPLEMENT_RANDOMIZE_STACK(ThreadMessageHandler(parg));
// Make this thread recognisable as the message handling thread
RenameThread("bitcoin-msghand");
try
{
vnThreadsRunning[THREAD_MESSAGEHANDLER]++;
ThreadMessageHandler2(parg);
vnThreadsRunning[THREAD_MESSAGEHANDLER]--;
}
catch (std::exception& e) {
vnThreadsRunning[THREAD_MESSAGEHANDLER]--;
PrintException(&e, "ThreadMessageHandler()");
} catch (...) {
vnThreadsRunning[THREAD_MESSAGEHANDLER]--;
PrintException(NULL, "ThreadMessageHandler()");
}
printf("ThreadMessageHandler exited\n");
}
void ThreadMessageHandler2(void* parg)
{
printf("ThreadMessageHandler started\n");
SetThreadPriority(THREAD_PRIORITY_BELOW_NORMAL);
while (!fShutdown)
{
vector<CNode*> vNodesCopy;
{
LOCK(cs_vNodes);
vNodesCopy = vNodes;
BOOST_FOREACH(CNode* pnode, vNodesCopy)
pnode->AddRef();
}
// Poll the connected nodes for messages
CNode* pnodeTrickle = NULL;
if (!vNodesCopy.empty())
pnodeTrickle = vNodesCopy[GetRand(vNodesCopy.size())];
BOOST_FOREACH(CNode* pnode, vNodesCopy)
{
// Receive messages
{
TRY_LOCK(pnode->cs_vRecv, lockRecv);
if (lockRecv)
ProcessMessages(pnode);
}
if (fShutdown)
return;
// Send messages
{
TRY_LOCK(pnode->cs_vSend, lockSend);
if (lockSend)
SendMessages(pnode, pnode == pnodeTrickle);
}
if (fShutdown)
return;
}
{
LOCK(cs_vNodes);
BOOST_FOREACH(CNode* pnode, vNodesCopy)
pnode->Release();
}
// Wait and allow messages to bunch up.
// Reduce vnThreadsRunning so StopNode has permission to exit while
// we're sleeping, but we must always check fShutdown after doing this.
vnThreadsRunning[THREAD_MESSAGEHANDLER]--;
Sleep(100);
if (fRequestShutdown)
StartShutdown();
vnThreadsRunning[THREAD_MESSAGEHANDLER]++;
if (fShutdown)
return;
}
}
bool BindListenPort(const CService &addrBind, string& strError)
{
strError = "";
int nOne = 1;
#ifdef WIN32
// Initialize Windows Sockets
WSADATA wsadata;
int ret = WSAStartup(MAKEWORD(2,2), &wsadata);
if (ret != NO_ERROR)
{
strError = strprintf("Error: TCP/IP socket library failed to start (WSAStartup returned error %d)", ret);
printf("%s\n", strError.c_str());
return false;
}
#endif
// Create socket for listening for incoming connections
#ifdef USE_IPV6
struct sockaddr_storage sockaddr;
#else
struct sockaddr sockaddr;
#endif
socklen_t len = sizeof(sockaddr);
if (!addrBind.GetSockAddr((struct sockaddr*)&sockaddr, &len))
{
strError = strprintf("Error: bind address family for %s not supported", addrBind.ToString().c_str());
printf("%s\n", strError.c_str());
return false;
}
SOCKET hListenSocket = socket(((struct sockaddr*)&sockaddr)->sa_family, SOCK_STREAM, IPPROTO_TCP);
if (hListenSocket == INVALID_SOCKET)
{
strError = strprintf("Error: Couldn't open socket for incoming connections (socket returned error %d)", WSAGetLastError());
printf("%s\n", strError.c_str());
return false;
}
#ifdef SO_NOSIGPIPE
// Different way of disabling SIGPIPE on BSD
setsockopt(hListenSocket, SOL_SOCKET, SO_NOSIGPIPE, (void*)&nOne, sizeof(int));
#endif
#ifndef WIN32
// Allow binding if the port is still in TIME_WAIT state after
// the program was closed and restarted. Not an issue on windows.
setsockopt(hListenSocket, SOL_SOCKET, SO_REUSEADDR, (void*)&nOne, sizeof(int));
#endif
#ifdef WIN32
// Set to nonblocking, incoming connections will also inherit this
if (ioctlsocket(hListenSocket, FIONBIO, (u_long*)&nOne) == SOCKET_ERROR)
#else
if (fcntl(hListenSocket, F_SETFL, O_NONBLOCK) == SOCKET_ERROR)
#endif
{
strError = strprintf("Error: Couldn't set properties on socket for incoming connections (error %d)", WSAGetLastError());
printf("%s\n", strError.c_str());
return false;
}
#ifdef USE_IPV6
// some systems don't have IPV6_V6ONLY but are always v6only; others do have the option
// and enable it by default or not. Try to enable it, if possible.
if (addrBind.IsIPv6()) {
#ifdef IPV6_V6ONLY
setsockopt(hListenSocket, IPPROTO_IPV6, IPV6_V6ONLY, (void*)&nOne, sizeof(int));
#endif
#ifdef WIN32
int nProtLevel = 10 /* PROTECTION_LEVEL_UNRESTRICTED */;
int nParameterId = 23 /* IPV6_PROTECTION_LEVEl */;
// this call is allowed to fail
setsockopt(hListenSocket, IPPROTO_IPV6, nParameterId, (const char*)&nProtLevel, sizeof(int));
#endif
}
#endif
if (::bind(hListenSocket, (struct sockaddr*)&sockaddr, len) == SOCKET_ERROR)
{
int nErr = WSAGetLastError();
if (nErr == WSAEADDRINUSE)
strError = strprintf(_("Unable to bind to %s on this computer. LeproCoin is probably already running."), addrBind.ToString().c_str());
else
strError = strprintf(_("Unable to bind to %s on this computer (bind returned error %d, %s)"), addrBind.ToString().c_str(), nErr, strerror(nErr));
printf("%s\n", strError.c_str());
return false;
}
printf("Bound to %s\n", addrBind.ToString().c_str());
// Listen for incoming connections
if (listen(hListenSocket, SOMAXCONN) == SOCKET_ERROR)
{
strError = strprintf("Error: Listening for incoming connections failed (listen returned error %d)", WSAGetLastError());
printf("%s\n", strError.c_str());
return false;
}
vhListenSocket.push_back(hListenSocket);
if (addrBind.IsRoutable() && fDiscover)
AddLocal(addrBind, LOCAL_BIND);
return true;
}
void static Discover()
{
if (!fDiscover)
return;
#ifdef WIN32
// Get local host ip
char pszHostName[1000] = "";
if (gethostname(pszHostName, sizeof(pszHostName)) != SOCKET_ERROR)
{
vector<CNetAddr> vaddr;
if (LookupHost(pszHostName, vaddr))
{
BOOST_FOREACH (const CNetAddr &addr, vaddr)
{
AddLocal(addr, LOCAL_IF);
}
}
}
#else
// Get local host ip
struct ifaddrs* myaddrs;
if (getifaddrs(&myaddrs) == 0)
{
for (struct ifaddrs* ifa = myaddrs; ifa != NULL; ifa = ifa->ifa_next)
{
if (ifa->ifa_addr == NULL) continue;
if ((ifa->ifa_flags & IFF_UP) == 0) continue;
if (strcmp(ifa->ifa_name, "lo") == 0) continue;
if (strcmp(ifa->ifa_name, "lo0") == 0) continue;
if (ifa->ifa_addr->sa_family == AF_INET)
{
struct sockaddr_in* s4 = (struct sockaddr_in*)(ifa->ifa_addr);
CNetAddr addr(s4->sin_addr);
if (AddLocal(addr, LOCAL_IF))
printf("IPv4 %s: %s\n", ifa->ifa_name, addr.ToString().c_str());
}
#ifdef USE_IPV6
else if (ifa->ifa_addr->sa_family == AF_INET6)
{
struct sockaddr_in6* s6 = (struct sockaddr_in6*)(ifa->ifa_addr);
CNetAddr addr(s6->sin6_addr);
if (AddLocal(addr, LOCAL_IF))
printf("IPv6 %s: %s\n", ifa->ifa_name, addr.ToString().c_str());
}
#endif
}
freeifaddrs(myaddrs);
}
#endif
CreateThread(ThreadGetMyExternalIP, NULL);
}
void StartNode(void* parg)
{
// Make this thread recognisable as the startup thread
RenameThread("bitcoin-start");
if (semOutbound == NULL) {
// initialize semaphore
int nMaxOutbound = min(MAX_OUTBOUND_CONNECTIONS, (int)GetArg("-maxconnections", 125));
semOutbound = new CSemaphore(nMaxOutbound);
}
if (pnodeLocalHost == NULL)
pnodeLocalHost = new CNode(INVALID_SOCKET, CAddress(CService("127.0.0.1", 0), nLocalServices));
Discover();
//
// Start threads
//
if (!GetBoolArg("-dnsseed", true))
printf("DNS seeding disabled\n");
else
if (!CreateThread(ThreadDNSAddressSeed, NULL))
printf("Error: CreateThread(ThreadDNSAddressSeed) failed\n");
// Map ports with UPnP
if (fUseUPnP)
MapPort();
// Get addresses from IRC and advertise ours
if (!CreateThread(ThreadIRCSeed, NULL))
printf("Error: CreateThread(ThreadIRCSeed) failed\n");
// Send and receive from sockets, accept connections
if (!CreateThread(ThreadSocketHandler, NULL))
printf("Error: CreateThread(ThreadSocketHandler) failed\n");
// Initiate outbound connections from -addnode
if (!CreateThread(ThreadOpenAddedConnections, NULL))
printf("Error: CreateThread(ThreadOpenAddedConnections) failed\n");
// Initiate outbound connections
if (!CreateThread(ThreadOpenConnections, NULL))
printf("Error: CreateThread(ThreadOpenConnections) failed\n");
// Process messages
if (!CreateThread(ThreadMessageHandler, NULL))
printf("Error: CreateThread(ThreadMessageHandler) failed\n");
// Dump network addresses
if (!CreateThread(ThreadDumpAddress, NULL))
printf("Error; CreateThread(ThreadDumpAddress) failed\n");
// Generate coins in the background
GenerateBitcoins(GetBoolArg("-gen", false), pwalletMain);
}
bool StopNode()
{
printf("StopNode()\n");
fShutdown = true;
nTransactionsUpdated++;
int64 nStart = GetTime();
if (semOutbound)
for (int i=0; i<MAX_OUTBOUND_CONNECTIONS; i++)
semOutbound->post();
do
{
int nThreadsRunning = 0;
for (int n = 0; n < THREAD_MAX; n++)
nThreadsRunning += vnThreadsRunning[n];
if (nThreadsRunning == 0)
break;
if (GetTime() - nStart > 20)
break;
Sleep(20);
} while(true);
if (vnThreadsRunning[THREAD_SOCKETHANDLER] > 0) printf("ThreadSocketHandler still running\n");
if (vnThreadsRunning[THREAD_OPENCONNECTIONS] > 0) printf("ThreadOpenConnections still running\n");
if (vnThreadsRunning[THREAD_MESSAGEHANDLER] > 0) printf("ThreadMessageHandler still running\n");
if (vnThreadsRunning[THREAD_MINER] > 0) printf("ThreadBitcoinMiner still running\n");
if (vnThreadsRunning[THREAD_RPCLISTENER] > 0) printf("ThreadRPCListener still running\n");
if (vnThreadsRunning[THREAD_RPCHANDLER] > 0) printf("ThreadsRPCServer still running\n");
#ifdef USE_UPNP
if (vnThreadsRunning[THREAD_UPNP] > 0) printf("ThreadMapPort still running\n");
#endif
if (vnThreadsRunning[THREAD_DNSSEED] > 0) printf("ThreadDNSAddressSeed still running\n");
if (vnThreadsRunning[THREAD_ADDEDCONNECTIONS] > 0) printf("ThreadOpenAddedConnections still running\n");
if (vnThreadsRunning[THREAD_DUMPADDRESS] > 0) printf("ThreadDumpAddresses still running\n");
while (vnThreadsRunning[THREAD_MESSAGEHANDLER] > 0 || vnThreadsRunning[THREAD_RPCHANDLER] > 0)
Sleep(20);
Sleep(50);
DumpAddresses();
return true;
}
class CNetCleanup
{
public:
CNetCleanup()
{
}
~CNetCleanup()
{
// Close sockets
BOOST_FOREACH(CNode* pnode, vNodes)
if (pnode->hSocket != INVALID_SOCKET)
closesocket(pnode->hSocket);
BOOST_FOREACH(SOCKET hListenSocket, vhListenSocket)
if (hListenSocket != INVALID_SOCKET)
if (closesocket(hListenSocket) == SOCKET_ERROR)
printf("closesocket(hListenSocket) failed with error %d\n", WSAGetLastError());
#ifdef WIN32
// Shutdown Windows Sockets
WSACleanup();
#endif
}
}
instance_of_cnetcleanup;<|fim▁end|> |
bool GetMyExternalIP2(const CService& addrConnect, const char* pszGet, const char* pszKeyword, CNetAddr& ipRet)
{
SOCKET hSocket; |
<|file_name|>wistia.py<|end_file_name|><|fim▁begin|># Miro - an RSS based video player application
# Copyright 2009 - Participatory Culture Foundation
#
# This file is part of vidscraper.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
# OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
# NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
# THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import datetime
import re<|fim▁hole|>import urllib
from lxml import builder
from lxml import etree
from lxml.html import builder as E
from lxml.html import tostring
import oauth2
import simplejson
from vidscraper.decorators import provide_shortmem, parse_url, returns_unicode
from vidscraper import util
from vidscraper.errors import Error
from django.conf import settings
class WistiaError(Error):
pass
WISTIA_OEMBED_API_URL = 'http://fast.wistia.com/oembed?embedType=seo&url='
#'http://fast.wistia.com/oembed?url=http://home.wistia.com/medias/'
EMaker = builder.ElementMaker()
EMBED = EMaker.embed
EMBED_WIDTH = 425
EMBED_HEIGHT = 344
def get_shortmem(url):
shortmem = {}
video_id = WISTIA_REGEX.match(url).groupdict()['video_id']
apiurl = '%s?%s' % (WISTIA_OEMBED_API_URL, urllib.quote(url))
finalexcept = None
backoff = util.random_exponential_backoff(2)
for i in range(3):
try:
reponse = urllib.urlopen(apiurl)
api_raw_data = response.read()
api_data = simplejson.loads(api_raw_data)
except Exception as e:
finalexcept = e
continue
else:
shortmem['oembed'] = api_data
break
backoff.next()
if 'oembed' in shortmem:
return shortmem
errmsg = u'Wistia API error : '
if finalexcept is not None:
"""if isinstance(finalexcept, urllib.HTTPError):
errmsg += finalexcept.code + " - " + HTTPResponseMessages[ finalexcept.code ][0]
elif isinstance(finalexcept, urllib.URLError):
errmsg += "Could not connect - " + finalexcept.reason
else:"""
errmsg += str(finalexcept)
else:
errmsg += u' Unrecognized error. Sorry about that, chief.'
return None
def parse_api(scraper_func, shortmem=None):
def new_scraper_func(url, shortmem={}, *args, **kwargs):
if not shortmem:
shortmem = get_shortmem(url)
return scraper_func(url, shortmem=shortmem, *args, **kwargs)
return new_scraper_func
@parse_api
@returns_unicode
def scrape_title(url, shortmem={}):
try:
return shortmem['oembed']['title'] or u''
except KeyError:
return u''
@parse_api
@returns_unicode
def scrape_description(url, shortmem={}):
try:
description = shortmem['oembed']['title'] # No desc provided in oembed. Use title.
except KeyError:
description = ''
return util.clean_description_html(description)
@returns_unicode
def get_embed(url, shortmem={}, width=EMBED_WIDTH, height=EMBED_HEIGHT):
return shortmem['oembed']['html']
@parse_api
@returns_unicode
def get_thumbnail_url(url, shortmem={}):
return shortmem['oembed']['thumbnail_url']
@parse_api
@returns_unicode
def get_user(url, shortmem={}):
return shortmem['oembed']['provider_name']
@parse_api
@returns_unicode
def get_user_url(url, shortmem={}):
return shortmem['oembed']['provider_url']
@parse_api
@returns_unicode
def get_duration(url, shortmem={}):
return shortmem['oembed']['duration']
WISTIA_REGEX = re.compile(r'https?://(.+)?(wistia\.com|wi\.st|wistia\.net)/(medias|embed/iframe)/(?P<video_id>\w+)')
SUITE = {
'regex': WISTIA_REGEX,
'funcs': {
'title': scrape_title,
'description': scrape_description,
'embed': get_embed,
'thumbnail_url': get_thumbnail_url,
'user': get_user,
'user_url': get_user_url,
'duration': get_duration
},
'order': ['title', 'description', 'file_url', 'embed']}<|fim▁end|> | |
<|file_name|>UDPConnection.hpp<|end_file_name|><|fim▁begin|>//
// UDPConnection.hpp for server in /home/galibe_s/rendu/Spider/server/core<|fim▁hole|>// Login <[email protected]>
//
// Started on Sun Nov 6 17:00:50 2016 stephane galibert
// Last update Thu Nov 10 12:34:21 2016 stephane galibert
//
#pragma once
#include <iostream>
#include <string>
#include <queue>
#include <memory>
#include <boost/bind.hpp>
#include "AConnection.hpp"
class ConnectionManager;
class UDPConnection : public AConnection
{
public:
UDPConnection(boost::asio::io_service &io_service,
RequestHandler &reqHandler,
PluginManager &pluginManager,
ConnectionManager &cm,
ServerConfig &config,
int port);
virtual ~UDPConnection(void);
virtual void start(void);
virtual void write(Packet *packet);
virtual void addLog(std::string const& toadd);
virtual void connectDB(void);
virtual void disconnectDB(void);
virtual void broadcast(std::string const& msg);
virtual void kill(void);
protected:
virtual void do_write(boost::system::error_code const& ec, size_t len);
virtual void do_read(boost::system::error_code const& ec, size_t len);
virtual void do_handshake(boost::system::error_code const& ec);
void write(void);
void read(void);
boost::asio::ip::udp::socket _socket;
boost::asio::ip::udp::endpoint _endpoint;
boost::asio::streambuf _read;
std::queue<Packet *> _toWrites;
};<|fim▁end|> | //
// Made by stephane galibert |
<|file_name|>p1.rs<|end_file_name|><|fim▁begin|>use serialize::base64::{self, ToBase64};
use serialize::hex::FromHex;
<|fim▁hole|>fn run() {
let hex = "49276d206b696c6c696e6720796f757220627261696e206c696b65206120706f697 \
36f6e6f7573206d757368726f6f6d";
let b64_exp = "SSdtIGtpbGxpbmcgeW91ciBicmFpbiBsaWtlIGEgcG9pc29ub3VzIG11c2hyb29t";
let b64_act = hex.from_hex().unwrap().to_base64(base64::STANDARD);
assert_eq!(b64_exp, b64_act);
}<|fim▁end|> | #[test] |
<|file_name|>config.py<|end_file_name|><|fim▁begin|>import os
import re
import yaml
try:
from packaging.version import parse as parse_version
except ImportError:
from pkg_resources import parse_version
from toolbox.config.common import BUTTON_CONFIG_KEYS, CRP_TYPES, CURRENT_MAX_VERSION, CURRENT_MIN_VERSION, PROTOCOLS
from .utils import counted_error, fatal_error
def compare_version(config: dict, min_version: str, max_version: str):
version = parse_version(config['version'])
if version < parse_version(min_version):
return -1
if version > parse_version(max_version):
return 1
return 0
def validate_version(config: dict):
cmp = compare_version(config, CURRENT_MIN_VERSION, CURRENT_MAX_VERSION)
if cmp < 0:
fatal_error('Please, upgrade to version %s with upgrade.py!', CURRENT_MIN_VERSION)
if cmp > 0:
fatal_error('Please, use a newer toolbox for version %s!', config['version'])
def get_crp_type(config: dict) -> str:
crp_type = config.get('crp_type') or 'static'
if crp_type not in CRP_TYPES:
fatal_error("Unknown crp_type: '%s' / %s", crp_type, CRP_TYPES)
return crp_type
def read_config(path: str, *, pre_validate: bool = True) -> dict:
"""
Read the config.yml file
:param path: path to the file or the base directory
:param pre_validate: check version and crp_type fields
:return: dict
"""
if os.path.isdir(path):
path = os.path.join(path, 'config.yml')
try:
with open(path, 'r') as f:
config = yaml.safe_load(f)
if pre_validate:
validate_version(config)
get_crp_type(config)
return config
except Exception as e:
fatal_error('%s(%s)', type(e).__name__, e)
def parse_bool(value) -> bool:
return str(value).lower() in ('true', '1')
def validate_bool(key, value):
if str(value).lower() not in ('true', 'false', '1', '0'):
counted_error('Invalid %s value. It must be boolean.', key)
def validate_flag(config: dict, flag_required: bool = False):
validate_bool('enable_flag_input', config.get('enable_flag_input'))
if config.get('flag'):
try:
if config['flag'][0:6] == 'regex:':
re.compile(config['flag'][6:])
except TypeError:
counted_error('Invalid flag value. It must be string.')
except Exception:
counted_error('Failed to compile regex flag.')
if not parse_bool(config.get('enable_flag_input')):
counted_error('enable_flag_input must be true for static flags.')
elif flag_required:
counted_error('A static (or regex) flag must be set.')
def validate_ports(ports: list, buttons: dict = None): # pylint: disable=too-many-branches
unique_ports = set()
ssh_ports_count = 0
for port in ports:
try:
port, protocol = port.split('/', 1)
unique_ports.add(port)
try:
if not 0 < int(port) < 65536:
raise ValueError
except Exception:<|fim▁hole|> counted_error('Invalid protocol in config.yml: %s. Valid protocols: %s', protocol, PROTOCOLS)
if protocol == 'ssh':
ssh_ports_count += 1
except Exception:
counted_error('Invalid port format. [port/protocol]')
if len(unique_ports) != len(ports):
counted_error('Duplicate port numbers found.')
if ssh_ports_count > 1:
counted_error('More than one SSH ports. Please, use a single SSH connection.')
if buttons is not None:
if not isinstance(buttons, dict):
counted_error('The buttons field must be a dict.')
else:
for button_key, button in buttons.items():
if button_key not in ports:
counted_error('Button key %s is not found in ports.', button_key)
for key in button.keys():
if key not in BUTTON_CONFIG_KEYS:
counted_error('Key %s is invalid for button %s.', key, button_key)<|fim▁end|> | counted_error('Invalid port number: %s. Ports must be numbers between 1 and 65535.', port)
if protocol not in PROTOCOLS: |
<|file_name|>index.js<|end_file_name|><|fim▁begin|>import React from 'react';
import PageTemplate from '../../components/PageTemplate';<|fim▁hole|>import Carousel from '../../components/Carousel';
import dadosIniciais from '../../data/dados_iniciais.json';
function Home() {
return (
<PageTemplate>
<BannerMain
videoTitle={dadosIniciais.categorias[0].videos[0].titulo}
url={dadosIniciais.categorias[0].videos[0].url}
videoDescription={'O que é front end?'}
/>
<Carousel
ignoreFirstVideo
category={dadosIniciais.categorias[0]}
/>
<Carousel
category={dadosIniciais.categorias[1]}
/>
<Carousel
category={dadosIniciais.categorias[2]}
/>
<Carousel
category={dadosIniciais.categorias[3]}
/>
<Carousel
category={dadosIniciais.categorias[4]}
/>
<Carousel
category={dadosIniciais.categorias[5]}
/>
</PageTemplate>
);
}
export default Home;<|fim▁end|> | import BannerMain from '../../components/BannerMain'; |
<|file_name|>run_combined.py<|end_file_name|><|fim▁begin|>import os
import csv
import pickle<|fim▁hole|>from indra.literature import id_lookup
from indra.sources import trips, reach, index_cards
from assembly_eval import have_file, run_assembly
if __name__ == '__main__':
pmc_ids = [s.strip() for s in open('pmcids.txt', 'rt').readlines()]
# Load the REACH reading output
with open('reach/reach_stmts_batch_4_eval.pkl') as f:
reach_stmts = pickle.load(f)
# Load the PMID to PMCID map
pmcid_to_pmid = {}
with open('pmc_batch_4_id_map.txt') as f:
csvreader = csv.reader(f, delimiter='\t')
for row in csvreader:
pmcid_to_pmid[row[0]] = row[1]
for pmcid in pmc_ids:
print 'Processing %s...' % pmcid
# Process TRIPS
trips_fname = 'trips/' + pmcid + '.ekb'
tp = trips.process_xml(open(trips_fname).read())
# Get REACH statements
reach_stmts_for_pmcid = reach_stmts.get(pmcid_to_pmid[pmcid], [])
if not reach_stmts_for_pmcid:
print "No REACH statements for %s" % pmcid
# Get NACTEM/ISI statements
fname = 'nactem/' + pmcid + '.cards'
if not os.path.exists(fname):
nactem_stmts = []
else:
icp = index_cards.process_json_file(fname, 'nactem')
nactem_stmts = icp.statements
# Combine all statements
all_statements = tp.statements + reach_stmts_for_pmcid + nactem_stmts
# Run assembly
run_assembly(all_statements, 'combined', pmcid)<|fim▁end|> |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.