prompt
large_stringlengths
70
991k
completion
large_stringlengths
0
1.02k
<|file_name|>utils.py<|end_file_name|><|fim▁begin|>import core logger = core.log.getLogger("monitoring-utils") class MonitoringUtils(object): def __init__(self): pass @staticmethod def check_existing_tag_in_topology(root, node, node_type, node_urns, domain=None): tag_exists = False try: elements = [] if not isinstance(node_urns, list): node_urns = [node_urns] try: for node_urn in node_urns: if node == "link": elements.extend( MonitoringUtils. check_existing_link_tag_in_topology( root, node_type, node_urn)) else: node_elements = MonitoringUtils.\ check_existing_generic_tag_in_topology( root, node, node_type, node_urn, domain) if len(node_elements) > 0: elements = node_elements except: pass if len(elements) > 0: tag_exists = True except: pass return tag_exists @staticmethod def check_existing_generic_tag_in_topology(root, node, node_type, node_urn, domain=None): elements = [] if node_type == "tn": if domain is not None: domain = domain if "urn" in domain else \ "urn:publicid:IDN+ocf:" + domain if node_type is None: elements = root.xpath( "//topology[@name='%s']//%s[@id='%s']" % (domain, node, node_urn)) elements = root.xpath( "//topology[@name='%s']//%s[@type='%s'][@id='%s']" % (domain, node, node_type, node_urn)) else: elements = root.xpath( "//%s[@type='%s'][@id='%s']" % (node, node_type, node_urn)) if node_type is None: elements = root.xpath("//%s[@id='%s']" % (node, node_urn)) return elements @staticmethod def check_existing_link_tag_in_topology(root, node_type, node_urn): elements = [] interfaces_same_link = True elem = root.xpath( "//link[@type='%s']//interface_ref[@client_id='%s']" % (node_type, node_urn)) if node_type is None: elem = root.xpath( "//link//interface_ref[@client_id='%s']" % node_urn) for element in elements: if element.getparent() == elem[0].getparent(): interfaces_same_link &= True else: interfaces_same_link &= False if interfaces_same_link: elements.extend(elem) return elements @staticmethod def find_virtual_link_end_to_end(hybrid_links): # Retrieve the endpoints of the slice ("abstract link" in M/MS) e2e_link_urns = set() # 1) Check for SDNRM-SDNRM end-paths for se_link in hybrid_links: # 1) Check for SDN-SDN end paths # 2) Check for SDN-TN end paths for link_end in [":ofam", ":tnrm"]: if link_end in se_link["source"]: e2e_link_urns.add(se_link["source"]) if link_end in se_link["destination"]: e2e_link_urns.add(se_link["destination"]) return list(e2e_link_urns) @staticmethod def find_virtual_links(topology_root): links_ids = [] for link_id in topology_root.xpath("//topology//link[@id]"): links_ids.append(link_id.attrib["id"]) return links_ids @staticmethod def find_slice_name(topology_root): slice_name = "" try: slice_name = topology_root.xpath("//topology")[0].attrib["name"] except Exception as e: logger.warning("Unable to retrieve slice name for topology. \ Details: %s" % e)<|fim▁hole|><|fim▁end|>
return slice_name
<|file_name|>float.rs<|end_file_name|><|fim▁begin|>// Copyright 2018 Developers of the Rand project. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // https://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! Basic floating-point number distributions use crate::distributions::utils::FloatSIMDUtils; use crate::distributions::{Distribution, Standard}; use crate::Rng; use core::mem; #[cfg(feature = "simd_support")] use packed_simd::*; #[cfg(feature = "serde1")] use serde::{Serialize, Deserialize}; /// A distribution to sample floating point numbers uniformly in the half-open /// interval `(0, 1]`, i.e. including 1 but not 0. /// /// All values that can be generated are of the form `n * ε/2`. For `f32` /// the 24 most significant random bits of a `u32` are used and for `f64` the /// 53 most significant bits of a `u64` are used. The conversion uses the /// multiplicative method. /// /// See also: [`Standard`] which samples from `[0, 1)`, [`Open01`] /// which samples from `(0, 1)` and [`Uniform`] which samples from arbitrary /// ranges. /// /// # Example /// ``` /// use rand::{thread_rng, Rng}; /// use rand::distributions::OpenClosed01; /// /// let val: f32 = thread_rng().sample(OpenClosed01); /// println!("f32 from (0, 1): {}", val); /// ``` /// /// [`Standard`]: crate::distributions::Standard /// [`Open01`]: crate::distributions::Open01 /// [`Uniform`]: crate::distributions::uniform::Uniform #[derive(Clone, Copy, Debug)] #[cfg_attr(feature = "serde1", derive(Serialize, Deserialize))] pub struct OpenClosed01; /// A distribution to sample floating point numbers uniformly in the open /// interval `(0, 1)`, i.e. not including either endpoint. /// /// All values that can be generated are of the form `n * ε + ε/2`. For `f32` /// the 23 most significant random bits of an `u32` are used, for `f64` 52 from /// an `u64`. The conversion uses a transmute-based method. /// /// See also: [`Standard`] which samples from `[0, 1)`, [`OpenClosed01`] /// which samples from `(0, 1]` and [`Uniform`] which samples from arbitrary /// ranges. /// /// # Example /// ``` /// use rand::{thread_rng, Rng}; /// use rand::distributions::Open01; /// /// let val: f32 = thread_rng().sample(Open01); /// println!("f32 from (0, 1): {}", val); /// ``` /// /// [`Standard`]: crate::distributions::Standard /// [`OpenClosed01`]: crate::distributions::OpenClosed01 /// [`Uniform`]: crate::distributions::uniform::Uniform #[derive(Clone, Copy, Debug)] #[cfg_attr(feature = "serde1", derive(Serialize, Deserialize))] pub struct Open01; // This trait is needed by both this lib and rand_distr hence is a hidden export #[doc(hidden)] pub trait IntoFloat { type F; /// Helper method to combine the fraction and a contant exponent into a /// float. /// /// Only the least significant bits of `self` may be set, 23 for `f32` and /// 52 for `f64`. /// The resulting value will fall in a range that depends on the exponent. /// As an example the range with exponent 0 will be /// [2<sup>0</sup>..2<sup>1</sup>), which is [1..2). fn into_float_with_exponent(self, exponent: i32) -> Self::F; } macro_rules! float_impls { ($ty:ident, $uty:ident, $f_scalar:ident, $u_scalar:ty, $fraction_bits:expr, $exponent_bias:expr) => { impl IntoFloat for $uty { type F = $ty; #[inline(always)] fn into_float_with_exponent(self, exponent: i32) -> $ty { // The exponent is encoded using an offset-binary representation let exponent_bits: $u_scalar = (($exponent_bias + exponent) as $u_scalar) << $fraction_bits; $ty::from_bits(self | exponent_bits) } } impl Distribution<$ty> for Standard { fn sample<R: Rng + ?Sized>(&self, rng: &mut R) -> $ty { // Multiply-based method; 24/53 random bits; [0, 1) interval. // We use the most significant bits because for simple RNGs // those are usually more random. let float_size = mem::size_of::<$f_scalar>() as u32 * 8; let precision = $fraction_bits + 1; let scale = 1.0 / ((1 as $u_scalar << precision) as $f_scalar); let value: $uty = rng.gen(); let value = value >> (float_size - precision); scale * $ty::cast_from_int(value) } } impl Distribution<$ty> for OpenClosed01 { fn sample<R: Rng + ?Sized>(&self, rng: &mut R) -> $ty { // Multiply-based method; 24/53 random bits; (0, 1] interval. // We use the most significant bits because for simple RNGs // those are usually more random. let float_size = mem::size_of::<$f_scalar>() as u32 * 8; let precision = $fraction_bits + 1; let scale = 1.0 / ((1 as $u_scalar << precision) as $f_scalar); let value: $uty = rng.gen(); let value = value >> (float_size - precision); // Add 1 to shift up; will not overflow because of right-shift: scale * $ty::cast_from_int(value + 1) } } impl Distribution<$ty> for Open01 { fn sample<R: Rng + ?Sized>(&self, rng: &mut R) -> $ty { // Transmute-based method; 23/52 random bits; (0, 1) interval. // We use the most significant bits because for simple RNGs // those are usually more random. use core::$f_scalar::EPSILON; let float_size = mem::size_of::<$f_scalar>() as u32 * 8; let value: $uty = rng.gen(); let fraction = value >> (float_size - $fraction_bits); fraction.into_float_with_exponent(0) - (1.0 - EPSILON / 2.0) } } } } float_impls! { f32, u32, f32, u32, 23, 127 } float_impls! { f64, u64, f64, u64, 52, 1023 } #[cfg(feature = "simd_support")] float_impls! { f32x2, u32x2, f32, u32, 23, 127 } #[cfg(feature = "simd_support")] float_impls! { f32x4, u32x4, f32, u32, 23, 127 } #[cfg(feature = "simd_support")] float_impls! { f32x8, u32x8, f32, u32, 23, 127 } #[cfg(feature = "simd_support")] float_impls! { f32x16, u32x16, f32, u32, 23, 127 } #[cfg(feature = "simd_support")] float_impls! { f64x2, u64x2, f64, u64, 52, 1023 } #[cfg(feature = "simd_support")] float_impls! { f64x4, u64x4, f64, u64, 52, 1023 } #[cfg(feature = "simd_support")] float_impls! { f64x8, u64x8, f64, u64, 52, 1023 } #[cfg(test)] mod tests { use super::*; use crate::rngs::mock::StepRng; const EPSILON32: f32 = ::core::f32::EPSILON; const EPSILON64: f64 = ::core::f64::EPSILON; macro_rules! test_f32 { ($fnn:ident, $ty:ident, $ZERO:expr, $EPSILON:expr) => { #[test] fn $fnn() { // Standard let mut zeros = StepRng::new(0, 0); assert_eq!(zeros.gen::<$ty>(), $ZERO); let mut one = StepRng::new(1 << 8 | 1 << (8 + 32), 0); assert_eq!(one.gen::<$ty>(), $EPSILON / 2.0); let mut max = StepRng::new(!0, 0); assert_eq!(max.gen::<$ty>(), 1.0 - $EPSILON / 2.0); // OpenClosed01 let mut zeros = StepRng::new(0, 0); assert_eq!(zeros.sample::<$ty, _>(OpenClosed01), 0.0 + $EPSILON / 2.0); let mut one = StepRng::new(1 << 8 | 1 << (8 + 32), 0); assert_eq!(one.sample::<$ty, _>(OpenClosed01), $EPSILON); let mut max = StepRng::new(!0, 0); assert_eq!(max.sample::<$ty, _>(OpenClosed01), $ZERO + 1.0); // Open01 let mut zeros = StepRng::new(0, 0); assert_eq!(zeros.sample::<$ty, _>(Open01), 0.0 + $EPSILON / 2.0); let mut one = StepRng::new(1 << 9 | 1 << (9 + 32), 0); assert_eq!(one.sample::<$ty, _>(Open01), $EPSILON / 2.0 * 3.0); let mut max = StepRng::new(!0, 0); assert_eq!(max.sample::<$ty, _>(Open01), 1.0 - $EPSILON / 2.0); } }; } test_f32! { f32_edge_cases, f32, 0.0, EPSILON32 } #[cfg(feature = "simd_support")] test_f32! { f32x2_edge_cases, f32x2, f32x2::splat(0.0), f32x2::splat(EPSILON32) } #[cfg(feature = "simd_support")] test_f32! { f32x4_edge_cases, f32x4, f32x4::splat(0.0), f32x4::splat(EPSILON32) } #[cfg(feature = "simd_support")] test_f32! { f32x8_edge_cases, f32x8, f32x8::splat(0.0), f32x8::splat(EPSILON32) } #[cfg(feature = "simd_support")] test_f32! { f32x16_edge_cases, f32x16, f32x16::splat(0.0), f32x16::splat(EPSILON32) } macro_rules! test_f64 { ($fnn:ident, $ty:ident, $ZERO:expr, $EPSILON:expr) => { #[test] fn $fnn() { // Standard let mut zeros = StepRng::new(0, 0); assert_eq!(zeros.gen::<$ty>(), $ZERO); let mut one = StepRng::new(1 << 11, 0); assert_eq!(one.gen::<$ty>(), $EPSILON / 2.0); let mut max = StepRng::new(!0, 0); assert_eq!(max.gen::<$ty>(), 1.0 - $EPSILON / 2.0); // OpenClosed01 let mut zeros = StepRng::new(0, 0); assert_eq!(zeros.sample::<$ty, _>(OpenClosed01), 0.0 + $EPSILON / 2.0); let mut one = StepRng::new(1 << 11, 0); assert_eq!(one.sample::<$ty, _>(OpenClosed01), $EPSILON); let mut max = StepRng::new(!0, 0); assert_eq!(max.sample::<$ty, _>(OpenClosed01), $ZERO + 1.0); // Open01 let mut zeros = StepRng::new(0, 0); assert_eq!(zeros.sample::<$ty, _>(Open01), 0.0 + $EPSILON / 2.0); let mut one = StepRng::new(1 << 12, 0); assert_eq!(one.sample::<$ty, _>(Open01), $EPSILON / 2.0 * 3.0); let mut max = StepRng::new(!0, 0); assert_eq!(max.sample::<$ty, _>(Open01), 1.0 - $EPSILON / 2.0); } }; } test_f64! { f64_edge_cases, f64, 0.0, EPSILON64 } #[cfg(feature = "simd_support")] test_f64! { f64x2_edge_cases, f64x2, f64x2::splat(0.0), f64x2::splat(EPSILON64) } #[cfg(feature = "simd_support")] test_f64! { f64x4_edge_cases, f64x4, f64x4::splat(0.0), f64x4::splat(EPSILON64) } #[cfg(feature = "simd_support")] test_f64! { f64x8_edge_cases, f64x8, f64x8::splat(0.0), f64x8::splat(EPSILON64) } #[test] fn value_stability() { fn test_samples<T: Copy + core::fmt::Debug + PartialEq, D: Distribution<T>>( distr: &D, zero: T, expected: &[T], ) { let mut rng = crate::test::rng(0x6f44f5646c2a7334); let mut buf = [zero; 3]; for x in &mut buf { *x = rng.sample(&distr); } assert_eq!(&buf, expected); } test_samples(&Standard, 0f32, &[0.0035963655, 0.7346052, 0.09778172]); test_samples(&Standard, 0f64, &[ 0.7346051961657583, 0.20298547462974248, 0.8166436635290655, ]); test_samples(&OpenClosed01, 0f32, &[0.003596425, 0.73460525, 0.09778178]); test_samples(&OpenClosed01, 0f64, &[ 0.7346051961657584, 0.2029854746297426, 0.8166436635290656, ]); test_samples(&Open01, 0f32, &[0.0035963655, 0.73460525, 0.09778172]); test_samples(&Open01, 0f64, &[ 0.7346051961657584, 0.20298547462974248, 0.8166436635290656, ]); #[cfg(feature = "simd_support")] { // We only test a sub-set of types here. Values are identical to // non-SIMD types; we assume this pattern continues across all // SIMD types. test_samples(&Standard, f32x2::new(0.0, 0.0), &[ f32x2::new(0.0035963655, 0.7346052), f32x2::new(0.09778172, 0.20298547), f32x2::new(0.34296435, 0.81664366), ]); test_samples(&Standard, f64x2::new(0.0, 0.0), &[<|fim▁hole|> ]); } } }<|fim▁end|>
f64x2::new(0.7346051961657583, 0.20298547462974248), f64x2::new(0.8166436635290655, 0.7423708925400552), f64x2::new(0.16387782224016323, 0.9087068770169618),
<|file_name|>httpUtils.ts<|end_file_name|><|fim▁begin|>import * as Cookies from 'js-cookie' import AppSettings from '../appSettings' import { IFetchResult } from '../interfaces' import History from './history' export class HttpUtils { parseJSON<T> (response: Response): Promise<IFetchResult<T>> { return new Promise((resolve, reject) => { if (response.status === 401) { const location = window.location History.push(`/account/logout?returnUrl=${encodeURIComponent(location.pathname + location.search)}&logout=true`) return } else if (response.status === 403) { History.push('/account/login?returnUrl=/') throw { message: 'Access Denied', status: response.status, response } } response.text() .then((text) => { if (text.length > 0) { if (!response.ok && response.status === 500) { try { const data = JSON.parse(text) as T resolve({ status: response.status, ok: response.ok, data: data }) } catch { reject({ ok: false, status: response.status, data: { errorMessage: 'Something has gone wrong on the server!', configurationData: undefined, contentData: undefined, errorMessages: ['Something has gone wrong on the server!'],<|fim▁hole|> } } else { resolve({ status: response.status, ok: response.ok, data: JSON.parse(text) as T }) } } else { resolve({ status: response.status, ok: response.ok, data: {} as T }) } }) .catch(err => { reject(err) }) }) } get<T> (url: string): Promise<IFetchResult<T>> { return this.futchGet(url) } post<T, R> (url: string, postData: T): Promise<IFetchResult<R>> { return this.futch<T, R>(url, 'POST', postData) } put<T, R> (url: string, postData: T): Promise<IFetchResult<R>> { return this.futch<T, R>(url, 'PUT', postData) } delete<T, R> (url: string, postData: T): Promise<IFetchResult<R>> { return this.futch<T, R>(url, 'DELETE', postData) } postXhr = (url: string, opts: any, onProgress: any, onComplete: any) => { return new Promise((res, rej) => { let xhr = new XMLHttpRequest() xhr.open(opts.method || 'get', url) for (let k in opts.headers || {}) { xhr.setRequestHeader(k, opts.headers[k]) } xhr.onload = res xhr.onerror = rej xhr.onreadystatechange = onComplete xhr.setRequestHeader('X-CSRF-TOKEN-ARRAGROCMS', this.getCSRFCookie()) if (xhr.upload && onProgress) { xhr.upload.onprogress = onProgress // event.loaded / event.total * 100 //event.lengthComputable } xhr.send(opts.body) }) } private getCSRFCookie (): string { const csrf = Cookies.get('ARRAGROCMSCSRF') return csrf === undefined ? '' : csrf } private futchGet<T> (url: string): Promise<IFetchResult<T>> { return fetch(url, { credentials: 'same-origin' }) .then((response: Response) => this.parseJSON<T>(response)) .catch((error) => { if (url !== '/api/user/current') { if (error.data && error.data.errorMessage) { AppSettings.error(`${error.data.errorMessage} - ${url}`, AppSettings.AlertSettings) } else if (error.message) { AppSettings.error(`${error.message} - ${url}`, AppSettings.AlertSettings) } } throw error }) } private futch<T, R> (url: string, verb: string, postData: T): Promise<IFetchResult<R>> { return fetch(url, { credentials: 'same-origin', method: verb, headers: { 'Content-Type': 'application/json', 'X-CSRF-TOKEN-ARRAGROCMS': this.getCSRFCookie() }, body: JSON.stringify(postData) }) .then((response: Response) => this.parseJSON<R>(response)) .catch((error) => { if (error.data && error.data.errorMessage) { AppSettings.error(`${error.data.errorMessage} - ${url}`, AppSettings.AlertSettings) } else if (error.message) { AppSettings.error(`${error.message} - ${url}`, AppSettings.AlertSettings) } throw error }) } } const httpUtils = new HttpUtils() export default httpUtils<|fim▁end|>
errors: {}, rulesExceptionListContainers: [] } })
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>from setuptools import setup with open('requirements.txt') as f: required = f.read().splitlines()<|fim▁hole|> setup( name="aloft.py", version="0.0.4", author="Nate Mara", author_email="[email protected]", description="A simple API for getting winds aloft data from NOAA", license="MIT", test_suite="tests", keywords="aviation weather winds aloft", url="https://github.com/natemara/aloft.py", packages=['aloft'], classifiers=[ "Development Status :: 3 - Alpha", "Topic :: Utilities", "License :: OSI Approved :: MIT License", ], install_requires=required, )<|fim▁end|>
<|file_name|>PropertyFactory.java<|end_file_name|><|fim▁begin|>package models.factories; import models.squares.PropertySquare; import java.util.Set; /** * @author Ani Kristo */<|fim▁hole|>}<|fim▁end|>
interface PropertyFactory { Set<? extends PropertySquare> makeSquares();
<|file_name|>source_map.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! The SourceMap tracks all the source code used within a single crate, mapping //! from integer byte positions to the original source code location. Each bit //! of source parsed during crate parsing (typically files, in-memory strings, //! or various bits of macro expansion) cover a continuous range of bytes in the //! SourceMap and are represented by SourceFiles. Byte positions are stored in //! `spans` and used pervasively in the compiler. They are absolute positions //! within the SourceMap, which upon request can be converted to line and column //! information, source code snippets, etc. pub use syntax_pos::*; pub use syntax_pos::hygiene::{ExpnFormat, ExpnInfo}; pub use self::ExpnFormat::*; use rustc_data_structures::fx::FxHashMap; use rustc_data_structures::stable_hasher::StableHasher; use rustc_data_structures::sync::{Lrc, Lock, LockGuard, MappedLockGuard}; use std::cmp; use std::hash::Hash; use std::path::{Path, PathBuf}; use std::env; use std::fs; use std::io; use errors::SourceMapper; /// Return the span itself if it doesn't come from a macro expansion, /// otherwise return the call site span up to the `enclosing_sp` by /// following the `expn_info` chain. pub fn original_sp(sp: Span, enclosing_sp: Span) -> Span { let call_site1 = sp.ctxt().outer().expn_info().map(|ei| ei.call_site); let call_site2 = enclosing_sp.ctxt().outer().expn_info().map(|ei| ei.call_site); match (call_site1, call_site2) { (None, _) => sp, (Some(call_site1), Some(call_site2)) if call_site1 == call_site2 => sp, (Some(call_site1), _) => original_sp(call_site1, enclosing_sp), } } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)] pub struct Spanned<T> { pub node: T, pub span: Span, }<|fim▁hole|>pub fn respan<T>(sp: Span, t: T) -> Spanned<T> { Spanned {node: t, span: sp} } pub fn dummy_spanned<T>(t: T) -> Spanned<T> { respan(DUMMY_SP, t) } // _____________________________________________________________________________ // SourceFile, MultiByteChar, FileName, FileLines // /// An abstraction over the fs operations used by the Parser. pub trait FileLoader { /// Query the existence of a file. fn file_exists(&self, path: &Path) -> bool; /// Return an absolute path to a file, if possible. fn abs_path(&self, path: &Path) -> Option<PathBuf>; /// Read the contents of an UTF-8 file into memory. fn read_file(&self, path: &Path) -> io::Result<String>; } /// A FileLoader that uses std::fs to load real files. pub struct RealFileLoader; impl FileLoader for RealFileLoader { fn file_exists(&self, path: &Path) -> bool { fs::metadata(path).is_ok() } fn abs_path(&self, path: &Path) -> Option<PathBuf> { if path.is_absolute() { Some(path.to_path_buf()) } else { env::current_dir() .ok() .map(|cwd| cwd.join(path)) } } fn read_file(&self, path: &Path) -> io::Result<String> { fs::read_to_string(path) } } // This is a SourceFile identifier that is used to correlate SourceFiles between // subsequent compilation sessions (which is something we need to do during // incremental compilation). #[derive(Copy, Clone, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable, Debug)] pub struct StableSourceFileId(u128); impl StableSourceFileId { pub fn new(source_file: &SourceFile) -> StableSourceFileId { StableSourceFileId::new_from_pieces(&source_file.name, source_file.name_was_remapped, source_file.unmapped_path.as_ref()) } pub fn new_from_pieces(name: &FileName, name_was_remapped: bool, unmapped_path: Option<&FileName>) -> StableSourceFileId { let mut hasher = StableHasher::new(); name.hash(&mut hasher); name_was_remapped.hash(&mut hasher); unmapped_path.hash(&mut hasher); StableSourceFileId(hasher.finish()) } } // _____________________________________________________________________________ // SourceMap // #[derive(Default)] pub(super) struct SourceMapFiles { pub(super) source_files: Vec<Lrc<SourceFile>>, stable_id_to_source_file: FxHashMap<StableSourceFileId, Lrc<SourceFile>> } pub struct SourceMap { pub(super) files: Lock<SourceMapFiles>, file_loader: Box<dyn FileLoader + Sync + Send>, // This is used to apply the file path remapping as specified via // --remap-path-prefix to all SourceFiles allocated within this SourceMap. path_mapping: FilePathMapping, } impl SourceMap { pub fn new(path_mapping: FilePathMapping) -> SourceMap { SourceMap { files: Default::default(), file_loader: Box::new(RealFileLoader), path_mapping, } } pub fn with_file_loader(file_loader: Box<dyn FileLoader + Sync + Send>, path_mapping: FilePathMapping) -> SourceMap { SourceMap { files: Default::default(), file_loader: file_loader, path_mapping, } } pub fn path_mapping(&self) -> &FilePathMapping { &self.path_mapping } pub fn file_exists(&self, path: &Path) -> bool { self.file_loader.file_exists(path) } pub fn load_file(&self, path: &Path) -> io::Result<Lrc<SourceFile>> { let src = self.file_loader.read_file(path)?; let filename = path.to_owned().into(); Ok(self.new_source_file(filename, src)) } pub fn files(&self) -> MappedLockGuard<Vec<Lrc<SourceFile>>> { LockGuard::map(self.files.borrow(), |files| &mut files.source_files) } pub fn source_file_by_stable_id(&self, stable_id: StableSourceFileId) -> Option<Lrc<SourceFile>> { self.files.borrow().stable_id_to_source_file.get(&stable_id).map(|sf| sf.clone()) } fn next_start_pos(&self) -> usize { match self.files.borrow().source_files.last() { None => 0, // Add one so there is some space between files. This lets us distinguish // positions in the source_map, even in the presence of zero-length files. Some(last) => last.end_pos.to_usize() + 1, } } /// Creates a new source_file. /// If a file already exists in the source_map with the same id, that file is returned /// unmodified pub fn new_source_file(&self, filename: FileName, src: String) -> Lrc<SourceFile> { let start_pos = self.next_start_pos(); // The path is used to determine the directory for loading submodules and // include files, so it must be before remapping. // Note that filename may not be a valid path, eg it may be `<anon>` etc, // but this is okay because the directory determined by `path.pop()` will // be empty, so the working directory will be used. let unmapped_path = filename.clone(); let (filename, was_remapped) = match filename { FileName::Real(filename) => { let (filename, was_remapped) = self.path_mapping.map_prefix(filename); (FileName::Real(filename), was_remapped) }, other => (other, false), }; let file_id = StableSourceFileId::new_from_pieces(&filename, was_remapped, Some(&unmapped_path)); return match self.source_file_by_stable_id(file_id) { Some(lrc_sf) => lrc_sf, None => { let source_file = Lrc::new(SourceFile::new( filename, was_remapped, unmapped_path, src, Pos::from_usize(start_pos), )); let mut files = self.files.borrow_mut(); files.source_files.push(source_file.clone()); files.stable_id_to_source_file.insert(file_id, source_file.clone()); source_file } } } /// Allocates a new SourceFile representing a source file from an external /// crate. The source code of such an "imported source_file" is not available, /// but we still know enough to generate accurate debuginfo location /// information for things inlined from other crates. pub fn new_imported_source_file( &self, filename: FileName, name_was_remapped: bool, crate_of_origin: u32, src_hash: u128, name_hash: u128, source_len: usize, mut file_local_lines: Vec<BytePos>, mut file_local_multibyte_chars: Vec<MultiByteChar>, mut file_local_non_narrow_chars: Vec<NonNarrowChar>, ) -> Lrc<SourceFile> { let start_pos = self.next_start_pos(); let end_pos = Pos::from_usize(start_pos + source_len); let start_pos = Pos::from_usize(start_pos); for pos in &mut file_local_lines { *pos = *pos + start_pos; } for mbc in &mut file_local_multibyte_chars { mbc.pos = mbc.pos + start_pos; } for swc in &mut file_local_non_narrow_chars { *swc = *swc + start_pos; } let source_file = Lrc::new(SourceFile { name: filename, name_was_remapped, unmapped_path: None, crate_of_origin, src: None, src_hash, external_src: Lock::new(ExternalSource::AbsentOk), start_pos, end_pos, lines: file_local_lines, multibyte_chars: file_local_multibyte_chars, non_narrow_chars: file_local_non_narrow_chars, name_hash, }); let mut files = self.files.borrow_mut(); files.source_files.push(source_file.clone()); files.stable_id_to_source_file.insert(StableSourceFileId::new(&source_file), source_file.clone()); source_file } pub fn mk_substr_filename(&self, sp: Span) -> String { let pos = self.lookup_char_pos(sp.lo()); format!("<{}:{}:{}>", pos.file.name, pos.line, pos.col.to_usize() + 1) } // If there is a doctest_offset, apply it to the line pub fn doctest_offset_line(&self, file: &FileName, orig: usize) -> usize { return match file { FileName::DocTest(_, offset) => { return if *offset >= 0 { orig + *offset as usize } else { orig - (-(*offset)) as usize } }, _ => orig } } /// Lookup source information about a BytePos pub fn lookup_char_pos(&self, pos: BytePos) -> Loc { let chpos = self.bytepos_to_file_charpos(pos); match self.lookup_line(pos) { Ok(SourceFileAndLine { sf: f, line: a }) => { let line = a + 1; // Line numbers start at 1 let linebpos = f.lines[a]; let linechpos = self.bytepos_to_file_charpos(linebpos); let col = chpos - linechpos; let col_display = { let start_width_idx = f .non_narrow_chars .binary_search_by_key(&linebpos, |x| x.pos()) .unwrap_or_else(|x| x); let end_width_idx = f .non_narrow_chars .binary_search_by_key(&pos, |x| x.pos()) .unwrap_or_else(|x| x); let special_chars = end_width_idx - start_width_idx; let non_narrow: usize = f .non_narrow_chars[start_width_idx..end_width_idx] .into_iter() .map(|x| x.width()) .sum(); col.0 - special_chars + non_narrow }; debug!("byte pos {:?} is on the line at byte pos {:?}", pos, linebpos); debug!("char pos {:?} is on the line at char pos {:?}", chpos, linechpos); debug!("byte is on line: {}", line); assert!(chpos >= linechpos); Loc { file: f, line, col, col_display, } } Err(f) => { let col_display = { let end_width_idx = f .non_narrow_chars .binary_search_by_key(&pos, |x| x.pos()) .unwrap_or_else(|x| x); let non_narrow: usize = f .non_narrow_chars[0..end_width_idx] .into_iter() .map(|x| x.width()) .sum(); chpos.0 - end_width_idx + non_narrow }; Loc { file: f, line: 0, col: chpos, col_display, } } } } // If the relevant source_file is empty, we don't return a line number. pub fn lookup_line(&self, pos: BytePos) -> Result<SourceFileAndLine, Lrc<SourceFile>> { let idx = self.lookup_source_file_idx(pos); let f = (*self.files.borrow().source_files)[idx].clone(); match f.lookup_line(pos) { Some(line) => Ok(SourceFileAndLine { sf: f, line: line }), None => Err(f) } } pub fn lookup_char_pos_adj(&self, pos: BytePos) -> LocWithOpt { let loc = self.lookup_char_pos(pos); LocWithOpt { filename: loc.file.name.clone(), line: loc.line, col: loc.col, file: Some(loc.file) } } /// Returns `Some(span)`, a union of the lhs and rhs span. The lhs must precede the rhs. If /// there are gaps between lhs and rhs, the resulting union will cross these gaps. /// For this to work, the spans have to be: /// /// * the ctxt of both spans much match /// * the lhs span needs to end on the same line the rhs span begins /// * the lhs span must start at or before the rhs span pub fn merge_spans(&self, sp_lhs: Span, sp_rhs: Span) -> Option<Span> { // make sure we're at the same expansion id if sp_lhs.ctxt() != sp_rhs.ctxt() { return None; } let lhs_end = match self.lookup_line(sp_lhs.hi()) { Ok(x) => x, Err(_) => return None }; let rhs_begin = match self.lookup_line(sp_rhs.lo()) { Ok(x) => x, Err(_) => return None }; // if we must cross lines to merge, don't merge if lhs_end.line != rhs_begin.line { return None; } // ensure these follow the expected order and we don't overlap if (sp_lhs.lo() <= sp_rhs.lo()) && (sp_lhs.hi() <= sp_rhs.lo()) { Some(sp_lhs.to(sp_rhs)) } else { None } } pub fn span_to_string(&self, sp: Span) -> String { if self.files.borrow().source_files.is_empty() && sp.is_dummy() { return "no-location".to_string(); } let lo = self.lookup_char_pos_adj(sp.lo()); let hi = self.lookup_char_pos_adj(sp.hi()); format!("{}:{}:{}: {}:{}", lo.filename, lo.line, lo.col.to_usize() + 1, hi.line, hi.col.to_usize() + 1) } pub fn span_to_filename(&self, sp: Span) -> FileName { self.lookup_char_pos(sp.lo()).file.name.clone() } pub fn span_to_unmapped_path(&self, sp: Span) -> FileName { self.lookup_char_pos(sp.lo()).file.unmapped_path.clone() .expect("SourceMap::span_to_unmapped_path called for imported SourceFile?") } pub fn is_multiline(&self, sp: Span) -> bool { let lo = self.lookup_char_pos(sp.lo()); let hi = self.lookup_char_pos(sp.hi()); lo.line != hi.line } pub fn span_to_lines(&self, sp: Span) -> FileLinesResult { debug!("span_to_lines(sp={:?})", sp); if sp.lo() > sp.hi() { return Err(SpanLinesError::IllFormedSpan(sp)); } let lo = self.lookup_char_pos(sp.lo()); debug!("span_to_lines: lo={:?}", lo); let hi = self.lookup_char_pos(sp.hi()); debug!("span_to_lines: hi={:?}", hi); if lo.file.start_pos != hi.file.start_pos { return Err(SpanLinesError::DistinctSources(DistinctSources { begin: (lo.file.name.clone(), lo.file.start_pos), end: (hi.file.name.clone(), hi.file.start_pos), })); } assert!(hi.line >= lo.line); let mut lines = Vec::with_capacity(hi.line - lo.line + 1); // The span starts partway through the first line, // but after that it starts from offset 0. let mut start_col = lo.col; // For every line but the last, it extends from `start_col` // and to the end of the line. Be careful because the line // numbers in Loc are 1-based, so we subtract 1 to get 0-based // lines. for line_index in lo.line-1 .. hi.line-1 { let line_len = lo.file.get_line(line_index) .map(|s| s.chars().count()) .unwrap_or(0); lines.push(LineInfo { line_index, start_col, end_col: CharPos::from_usize(line_len) }); start_col = CharPos::from_usize(0); } // For the last line, it extends from `start_col` to `hi.col`: lines.push(LineInfo { line_index: hi.line - 1, start_col, end_col: hi.col }); Ok(FileLines {file: lo.file, lines: lines}) } /// Extract the source surrounding the given `Span` using the `extract_source` function. The /// extract function takes three arguments: a string slice containing the source, an index in /// the slice for the beginning of the span and an index in the slice for the end of the span. fn span_to_source<F>(&self, sp: Span, extract_source: F) -> Result<String, SpanSnippetError> where F: Fn(&str, usize, usize) -> String { if sp.lo() > sp.hi() { return Err(SpanSnippetError::IllFormedSpan(sp)); } let local_begin = self.lookup_byte_offset(sp.lo()); let local_end = self.lookup_byte_offset(sp.hi()); if local_begin.sf.start_pos != local_end.sf.start_pos { return Err(SpanSnippetError::DistinctSources(DistinctSources { begin: (local_begin.sf.name.clone(), local_begin.sf.start_pos), end: (local_end.sf.name.clone(), local_end.sf.start_pos) })); } else { self.ensure_source_file_source_present(local_begin.sf.clone()); let start_index = local_begin.pos.to_usize(); let end_index = local_end.pos.to_usize(); let source_len = (local_begin.sf.end_pos - local_begin.sf.start_pos).to_usize(); if start_index > end_index || end_index > source_len { return Err(SpanSnippetError::MalformedForSourcemap( MalformedSourceMapPositions { name: local_begin.sf.name.clone(), source_len, begin_pos: local_begin.pos, end_pos: local_end.pos, })); } if let Some(ref src) = local_begin.sf.src { return Ok(extract_source(src, start_index, end_index)); } else if let Some(src) = local_begin.sf.external_src.borrow().get_source() { return Ok(extract_source(src, start_index, end_index)); } else { return Err(SpanSnippetError::SourceNotAvailable { filename: local_begin.sf.name.clone() }); } } } /// Return the source snippet as `String` corresponding to the given `Span` pub fn span_to_snippet(&self, sp: Span) -> Result<String, SpanSnippetError> { self.span_to_source(sp, |src, start_index, end_index| src[start_index..end_index] .to_string()) } pub fn span_to_margin(&self, sp: Span) -> Option<usize> { match self.span_to_prev_source(sp) { Err(_) => None, Ok(source) => source.split('\n').last().map(|last_line| { last_line.len() - last_line.trim_start().len() }) } } /// Return the source snippet as `String` before the given `Span` pub fn span_to_prev_source(&self, sp: Span) -> Result<String, SpanSnippetError> { self.span_to_source(sp, |src, start_index, _| src[..start_index].to_string()) } /// Extend the given `Span` to just after the previous occurrence of `c`. Return the same span /// if no character could be found or if an error occurred while retrieving the code snippet. pub fn span_extend_to_prev_char(&self, sp: Span, c: char) -> Span { if let Ok(prev_source) = self.span_to_prev_source(sp) { let prev_source = prev_source.rsplit(c).nth(0).unwrap_or("").trim_start(); if !prev_source.is_empty() && !prev_source.contains('\n') { return sp.with_lo(BytePos(sp.lo().0 - prev_source.len() as u32)); } } sp } /// Extend the given `Span` to just after the previous occurrence of `pat` when surrounded by /// whitespace. Return the same span if no character could be found or if an error occurred /// while retrieving the code snippet. pub fn span_extend_to_prev_str(&self, sp: Span, pat: &str, accept_newlines: bool) -> Span { // assure that the pattern is delimited, to avoid the following // fn my_fn() // ^^^^ returned span without the check // ---------- correct span for ws in &[" ", "\t", "\n"] { let pat = pat.to_owned() + ws; if let Ok(prev_source) = self.span_to_prev_source(sp) { let prev_source = prev_source.rsplit(&pat).nth(0).unwrap_or("").trim_start(); if !prev_source.is_empty() && (!prev_source.contains('\n') || accept_newlines) { return sp.with_lo(BytePos(sp.lo().0 - prev_source.len() as u32)); } } } sp } /// Given a `Span`, try to get a shorter span ending before the first occurrence of `c` `char` pub fn span_until_char(&self, sp: Span, c: char) -> Span { match self.span_to_snippet(sp) { Ok(snippet) => { let snippet = snippet.split(c).nth(0).unwrap_or("").trim_end(); if !snippet.is_empty() && !snippet.contains('\n') { sp.with_hi(BytePos(sp.lo().0 + snippet.len() as u32)) } else { sp } } _ => sp, } } /// Given a `Span`, try to get a shorter span ending just after the first occurrence of `char` /// `c`. pub fn span_through_char(&self, sp: Span, c: char) -> Span { if let Ok(snippet) = self.span_to_snippet(sp) { if let Some(offset) = snippet.find(c) { return sp.with_hi(BytePos(sp.lo().0 + (offset + c.len_utf8()) as u32)); } } sp } /// Given a `Span`, get a new `Span` covering the first token and all its trailing whitespace or /// the original `Span`. /// /// If `sp` points to `"let mut x"`, then a span pointing at `"let "` will be returned. pub fn span_until_non_whitespace(&self, sp: Span) -> Span { let mut whitespace_found = false; self.span_take_while(sp, |c| { if !whitespace_found && c.is_whitespace() { whitespace_found = true; } if whitespace_found && !c.is_whitespace() { false } else { true } }) } /// Given a `Span`, get a new `Span` covering the first token without its trailing whitespace or /// the original `Span` in case of error. /// /// If `sp` points to `"let mut x"`, then a span pointing at `"let"` will be returned. pub fn span_until_whitespace(&self, sp: Span) -> Span { self.span_take_while(sp, |c| !c.is_whitespace()) } /// Given a `Span`, get a shorter one until `predicate` yields false. pub fn span_take_while<P>(&self, sp: Span, predicate: P) -> Span where P: for <'r> FnMut(&'r char) -> bool { if let Ok(snippet) = self.span_to_snippet(sp) { let offset = snippet.chars() .take_while(predicate) .map(|c| c.len_utf8()) .sum::<usize>(); sp.with_hi(BytePos(sp.lo().0 + (offset as u32))) } else { sp } } pub fn def_span(&self, sp: Span) -> Span { self.span_until_char(sp, '{') } /// Returns a new span representing just the start-point of this span pub fn start_point(&self, sp: Span) -> Span { let pos = sp.lo().0; let width = self.find_width_of_character_at_span(sp, false); let corrected_start_position = pos.checked_add(width).unwrap_or(pos); let end_point = BytePos(cmp::max(corrected_start_position, sp.lo().0)); sp.with_hi(end_point) } /// Returns a new span representing just the end-point of this span pub fn end_point(&self, sp: Span) -> Span { let pos = sp.hi().0; let width = self.find_width_of_character_at_span(sp, false); let corrected_end_position = pos.checked_sub(width).unwrap_or(pos); let end_point = BytePos(cmp::max(corrected_end_position, sp.lo().0)); sp.with_lo(end_point) } /// Returns a new span representing the next character after the end-point of this span pub fn next_point(&self, sp: Span) -> Span { let start_of_next_point = sp.hi().0; let width = self.find_width_of_character_at_span(sp, true); // If the width is 1, then the next span should point to the same `lo` and `hi`. However, // in the case of a multibyte character, where the width != 1, the next span should // span multiple bytes to include the whole character. let end_of_next_point = start_of_next_point.checked_add( width - 1).unwrap_or(start_of_next_point); let end_of_next_point = BytePos(cmp::max(sp.lo().0 + 1, end_of_next_point)); Span::new(BytePos(start_of_next_point), end_of_next_point, sp.ctxt()) } /// Finds the width of a character, either before or after the provided span. fn find_width_of_character_at_span(&self, sp: Span, forwards: bool) -> u32 { // Disregard malformed spans and assume a one-byte wide character. if sp.lo() >= sp.hi() { debug!("find_width_of_character_at_span: early return malformed span"); return 1; } let local_begin = self.lookup_byte_offset(sp.lo()); let local_end = self.lookup_byte_offset(sp.hi()); debug!("find_width_of_character_at_span: local_begin=`{:?}`, local_end=`{:?}`", local_begin, local_end); let start_index = local_begin.pos.to_usize(); let end_index = local_end.pos.to_usize(); debug!("find_width_of_character_at_span: start_index=`{:?}`, end_index=`{:?}`", start_index, end_index); // Disregard indexes that are at the start or end of their spans, they can't fit bigger // characters. if (!forwards && end_index == usize::min_value()) || (forwards && start_index == usize::max_value()) { debug!("find_width_of_character_at_span: start or end of span, cannot be multibyte"); return 1; } let source_len = (local_begin.sf.end_pos - local_begin.sf.start_pos).to_usize(); debug!("find_width_of_character_at_span: source_len=`{:?}`", source_len); // Ensure indexes are also not malformed. if start_index > end_index || end_index > source_len { debug!("find_width_of_character_at_span: source indexes are malformed"); return 1; } let src = local_begin.sf.external_src.borrow(); // We need to extend the snippet to the end of the src rather than to end_index so when // searching forwards for boundaries we've got somewhere to search. let snippet = if let Some(ref src) = local_begin.sf.src { let len = src.len(); (&src[start_index..len]) } else if let Some(src) = src.get_source() { let len = src.len(); (&src[start_index..len]) } else { return 1; }; debug!("find_width_of_character_at_span: snippet=`{:?}`", snippet); let mut target = if forwards { end_index + 1 } else { end_index - 1 }; debug!("find_width_of_character_at_span: initial target=`{:?}`", target); while !snippet.is_char_boundary(target - start_index) && target < source_len { target = if forwards { target + 1 } else { match target.checked_sub(1) { Some(target) => target, None => { break; } } }; debug!("find_width_of_character_at_span: target=`{:?}`", target); } debug!("find_width_of_character_at_span: final target=`{:?}`", target); if forwards { (target - end_index) as u32 } else { (end_index - target) as u32 } } pub fn get_source_file(&self, filename: &FileName) -> Option<Lrc<SourceFile>> { for sf in self.files.borrow().source_files.iter() { if *filename == sf.name { return Some(sf.clone()); } } None } /// For a global BytePos compute the local offset within the containing SourceFile pub fn lookup_byte_offset(&self, bpos: BytePos) -> SourceFileAndBytePos { let idx = self.lookup_source_file_idx(bpos); let sf = (*self.files.borrow().source_files)[idx].clone(); let offset = bpos - sf.start_pos; SourceFileAndBytePos {sf: sf, pos: offset} } /// Converts an absolute BytePos to a CharPos relative to the source_file. pub fn bytepos_to_file_charpos(&self, bpos: BytePos) -> CharPos { let idx = self.lookup_source_file_idx(bpos); let map = &(*self.files.borrow().source_files)[idx]; // The number of extra bytes due to multibyte chars in the SourceFile let mut total_extra_bytes = 0; for mbc in map.multibyte_chars.iter() { debug!("{}-byte char at {:?}", mbc.bytes, mbc.pos); if mbc.pos < bpos { // every character is at least one byte, so we only // count the actual extra bytes. total_extra_bytes += mbc.bytes as u32 - 1; // We should never see a byte position in the middle of a // character assert!(bpos.to_u32() >= mbc.pos.to_u32() + mbc.bytes as u32); } else { break; } } assert!(map.start_pos.to_u32() + total_extra_bytes <= bpos.to_u32()); CharPos(bpos.to_usize() - map.start_pos.to_usize() - total_extra_bytes as usize) } // Return the index of the source_file (in self.files) which contains pos. pub fn lookup_source_file_idx(&self, pos: BytePos) -> usize { let files = self.files.borrow(); let files = &files.source_files; let count = files.len(); // Binary search for the source_file. let mut a = 0; let mut b = count; while b - a > 1 { let m = (a + b) / 2; if files[m].start_pos > pos { b = m; } else { a = m; } } assert!(a < count, "position {} does not resolve to a source location", pos.to_usize()); return a; } pub fn count_lines(&self) -> usize { self.files().iter().fold(0, |a, f| a + f.count_lines()) } pub fn generate_fn_name_span(&self, span: Span) -> Option<Span> { let prev_span = self.span_extend_to_prev_str(span, "fn", true); self.span_to_snippet(prev_span).map(|snippet| { let len = snippet.find(|c: char| !c.is_alphanumeric() && c != '_') .expect("no label after fn"); prev_span.with_hi(BytePos(prev_span.lo().0 + len as u32)) }).ok() } /// Take the span of a type parameter in a function signature and try to generate a span for the /// function name (with generics) and a new snippet for this span with the pointed type /// parameter as a new local type parameter. /// /// For instance: /// ```rust,ignore (pseudo-Rust) /// // Given span /// fn my_function(param: T) /// // ^ Original span /// /// // Result /// fn my_function(param: T) /// // ^^^^^^^^^^^ Generated span with snippet `my_function<T>` /// ``` /// /// Attention: The method used is very fragile since it essentially duplicates the work of the /// parser. If you need to use this function or something similar, please consider updating the /// source_map functions and this function to something more robust. pub fn generate_local_type_param_snippet(&self, span: Span) -> Option<(Span, String)> { // Try to extend the span to the previous "fn" keyword to retrieve the function // signature let sugg_span = self.span_extend_to_prev_str(span, "fn", false); if sugg_span != span { if let Ok(snippet) = self.span_to_snippet(sugg_span) { // Consume the function name let mut offset = snippet.find(|c: char| !c.is_alphanumeric() && c != '_') .expect("no label after fn"); // Consume the generics part of the function signature let mut bracket_counter = 0; let mut last_char = None; for c in snippet[offset..].chars() { match c { '<' => bracket_counter += 1, '>' => bracket_counter -= 1, '(' => if bracket_counter == 0 { break; } _ => {} } offset += c.len_utf8(); last_char = Some(c); } // Adjust the suggestion span to encompass the function name with its generics let sugg_span = sugg_span.with_hi(BytePos(sugg_span.lo().0 + offset as u32)); // Prepare the new suggested snippet to append the type parameter that triggered // the error in the generics of the function signature let mut new_snippet = if last_char == Some('>') { format!("{}, ", &snippet[..(offset - '>'.len_utf8())]) } else { format!("{}<", &snippet[..offset]) }; new_snippet.push_str( &self.span_to_snippet(span).unwrap_or_else(|_| "T".to_string())); new_snippet.push('>'); return Some((sugg_span, new_snippet)); } } None } } impl SourceMapper for SourceMap { fn lookup_char_pos(&self, pos: BytePos) -> Loc { self.lookup_char_pos(pos) } fn span_to_lines(&self, sp: Span) -> FileLinesResult { self.span_to_lines(sp) } fn span_to_string(&self, sp: Span) -> String { self.span_to_string(sp) } fn span_to_filename(&self, sp: Span) -> FileName { self.span_to_filename(sp) } fn merge_spans(&self, sp_lhs: Span, sp_rhs: Span) -> Option<Span> { self.merge_spans(sp_lhs, sp_rhs) } fn call_span_if_macro(&self, sp: Span) -> Span { if self.span_to_filename(sp.clone()).is_macros() { let v = sp.macro_backtrace(); if let Some(use_site) = v.last() { return use_site.call_site; } } sp } fn ensure_source_file_source_present(&self, source_file: Lrc<SourceFile>) -> bool { source_file.add_external_src( || match source_file.name { FileName::Real(ref name) => self.file_loader.read_file(name).ok(), _ => None, } ) } fn doctest_offset_line(&self, file: &FileName, line: usize) -> usize { self.doctest_offset_line(file, line) } } #[derive(Clone)] pub struct FilePathMapping { mapping: Vec<(PathBuf, PathBuf)>, } impl FilePathMapping { pub fn empty() -> FilePathMapping { FilePathMapping { mapping: vec![] } } pub fn new(mapping: Vec<(PathBuf, PathBuf)>) -> FilePathMapping { FilePathMapping { mapping, } } /// Applies any path prefix substitution as defined by the mapping. /// The return value is the remapped path and a boolean indicating whether /// the path was affected by the mapping. pub fn map_prefix(&self, path: PathBuf) -> (PathBuf, bool) { // NOTE: We are iterating over the mapping entries from last to first // because entries specified later on the command line should // take precedence. for &(ref from, ref to) in self.mapping.iter().rev() { if let Ok(rest) = path.strip_prefix(from) { return (to.join(rest), true); } } (path, false) } } // _____________________________________________________________________________ // Tests // #[cfg(test)] mod tests { use super::*; use rustc_data_structures::sync::Lrc; fn init_source_map() -> SourceMap { let sm = SourceMap::new(FilePathMapping::empty()); sm.new_source_file(PathBuf::from("blork.rs").into(), "first line.\nsecond line".to_string()); sm.new_source_file(PathBuf::from("empty.rs").into(), String::new()); sm.new_source_file(PathBuf::from("blork2.rs").into(), "first line.\nsecond line".to_string()); sm } #[test] fn t3() { // Test lookup_byte_offset let sm = init_source_map(); let srcfbp1 = sm.lookup_byte_offset(BytePos(23)); assert_eq!(srcfbp1.sf.name, PathBuf::from("blork.rs").into()); assert_eq!(srcfbp1.pos, BytePos(23)); let srcfbp1 = sm.lookup_byte_offset(BytePos(24)); assert_eq!(srcfbp1.sf.name, PathBuf::from("empty.rs").into()); assert_eq!(srcfbp1.pos, BytePos(0)); let srcfbp2 = sm.lookup_byte_offset(BytePos(25)); assert_eq!(srcfbp2.sf.name, PathBuf::from("blork2.rs").into()); assert_eq!(srcfbp2.pos, BytePos(0)); } #[test] fn t4() { // Test bytepos_to_file_charpos let sm = init_source_map(); let cp1 = sm.bytepos_to_file_charpos(BytePos(22)); assert_eq!(cp1, CharPos(22)); let cp2 = sm.bytepos_to_file_charpos(BytePos(25)); assert_eq!(cp2, CharPos(0)); } #[test] fn t5() { // Test zero-length source_files. let sm = init_source_map(); let loc1 = sm.lookup_char_pos(BytePos(22)); assert_eq!(loc1.file.name, PathBuf::from("blork.rs").into()); assert_eq!(loc1.line, 2); assert_eq!(loc1.col, CharPos(10)); let loc2 = sm.lookup_char_pos(BytePos(25)); assert_eq!(loc2.file.name, PathBuf::from("blork2.rs").into()); assert_eq!(loc2.line, 1); assert_eq!(loc2.col, CharPos(0)); } fn init_source_map_mbc() -> SourceMap { let sm = SourceMap::new(FilePathMapping::empty()); // € is a three byte utf8 char. sm.new_source_file(PathBuf::from("blork.rs").into(), "fir€st €€€€ line.\nsecond line".to_string()); sm.new_source_file(PathBuf::from("blork2.rs").into(), "first line€€.\n€ second line".to_string()); sm } #[test] fn t6() { // Test bytepos_to_file_charpos in the presence of multi-byte chars let sm = init_source_map_mbc(); let cp1 = sm.bytepos_to_file_charpos(BytePos(3)); assert_eq!(cp1, CharPos(3)); let cp2 = sm.bytepos_to_file_charpos(BytePos(6)); assert_eq!(cp2, CharPos(4)); let cp3 = sm.bytepos_to_file_charpos(BytePos(56)); assert_eq!(cp3, CharPos(12)); let cp4 = sm.bytepos_to_file_charpos(BytePos(61)); assert_eq!(cp4, CharPos(15)); } #[test] fn t7() { // Test span_to_lines for a span ending at the end of source_file let sm = init_source_map(); let span = Span::new(BytePos(12), BytePos(23), NO_EXPANSION); let file_lines = sm.span_to_lines(span).unwrap(); assert_eq!(file_lines.file.name, PathBuf::from("blork.rs").into()); assert_eq!(file_lines.lines.len(), 1); assert_eq!(file_lines.lines[0].line_index, 1); } /// Given a string like " ~~~~~~~~~~~~ ", produces a span /// converting that range. The idea is that the string has the same /// length as the input, and we uncover the byte positions. Note /// that this can span lines and so on. fn span_from_selection(input: &str, selection: &str) -> Span { assert_eq!(input.len(), selection.len()); let left_index = selection.find('~').unwrap() as u32; let right_index = selection.rfind('~').map(|x|x as u32).unwrap_or(left_index); Span::new(BytePos(left_index), BytePos(right_index + 1), NO_EXPANSION) } /// Test span_to_snippet and span_to_lines for a span converting 3 /// lines in the middle of a file. #[test] fn span_to_snippet_and_lines_spanning_multiple_lines() { let sm = SourceMap::new(FilePathMapping::empty()); let inputtext = "aaaaa\nbbbbBB\nCCC\nDDDDDddddd\neee\n"; let selection = " \n ~~\n~~~\n~~~~~ \n \n"; sm.new_source_file(Path::new("blork.rs").to_owned().into(), inputtext.to_string()); let span = span_from_selection(inputtext, selection); // check that we are extracting the text we thought we were extracting assert_eq!(&sm.span_to_snippet(span).unwrap(), "BB\nCCC\nDDDDD"); // check that span_to_lines gives us the complete result with the lines/cols we expected let lines = sm.span_to_lines(span).unwrap(); let expected = vec![ LineInfo { line_index: 1, start_col: CharPos(4), end_col: CharPos(6) }, LineInfo { line_index: 2, start_col: CharPos(0), end_col: CharPos(3) }, LineInfo { line_index: 3, start_col: CharPos(0), end_col: CharPos(5) } ]; assert_eq!(lines.lines, expected); } #[test] fn t8() { // Test span_to_snippet for a span ending at the end of source_file let sm = init_source_map(); let span = Span::new(BytePos(12), BytePos(23), NO_EXPANSION); let snippet = sm.span_to_snippet(span); assert_eq!(snippet, Ok("second line".to_string())); } #[test] fn t9() { // Test span_to_str for a span ending at the end of source_file let sm = init_source_map(); let span = Span::new(BytePos(12), BytePos(23), NO_EXPANSION); let sstr = sm.span_to_string(span); assert_eq!(sstr, "blork.rs:2:1: 2:12"); } /// Test failing to merge two spans on different lines #[test] fn span_merging_fail() { let sm = SourceMap::new(FilePathMapping::empty()); let inputtext = "bbbb BB\ncc CCC\n"; let selection1 = " ~~\n \n"; let selection2 = " \n ~~~\n"; sm.new_source_file(Path::new("blork.rs").to_owned().into(), inputtext.to_owned()); let span1 = span_from_selection(inputtext, selection1); let span2 = span_from_selection(inputtext, selection2); assert!(sm.merge_spans(span1, span2).is_none()); } /// Returns the span corresponding to the `n`th occurrence of /// `substring` in `source_text`. trait SourceMapExtension { fn span_substr(&self, file: &Lrc<SourceFile>, source_text: &str, substring: &str, n: usize) -> Span; } impl SourceMapExtension for SourceMap { fn span_substr(&self, file: &Lrc<SourceFile>, source_text: &str, substring: &str, n: usize) -> Span { println!("span_substr(file={:?}/{:?}, substring={:?}, n={})", file.name, file.start_pos, substring, n); let mut i = 0; let mut hi = 0; loop { let offset = source_text[hi..].find(substring).unwrap_or_else(|| { panic!("source_text `{}` does not have {} occurrences of `{}`, only {}", source_text, n, substring, i); }); let lo = hi + offset; hi = lo + substring.len(); if i == n { let span = Span::new( BytePos(lo as u32 + file.start_pos.0), BytePos(hi as u32 + file.start_pos.0), NO_EXPANSION, ); assert_eq!(&self.span_to_snippet(span).unwrap()[..], substring); return span; } i += 1; } } } }<|fim▁end|>
<|file_name|>Indices1.cpp<|end_file_name|><|fim▁begin|>#include <seqan/sequence.h> #include <seqan/index.h> using namespace seqan; int main() { String<Dna5> genome = "TTATTAAGCGTATAGCCCTATAAATATAA"; Index<String<Dna5>, IndexEsa<> > esaIndex(genome); <|fim▁hole|> while(find(esaFinder, "TATAA")){ std::cout << position(esaFinder) << '\n'; // -> 0 } }<|fim▁end|>
Finder<Index<String<Dna5>, IndexEsa<> > > esaFinder(esaIndex);
<|file_name|>baserlib.py<|end_file_name|><|fim▁begin|># # # Copyright (C) 2006, 2007, 2008, 2012 Google Inc. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301, USA. """Remote API base resources library. """ # pylint: disable=C0103 # C0103: Invalid name, since the R_* names are not conforming import logging from ganeti import luxi from ganeti import rapi from ganeti import http from ganeti import errors from ganeti import compat from ganeti import constants from ganeti import pathutils from ganeti import utils # Dummy value to detect unchanged parameters _DEFAULT = object() #: Supported HTTP methods _SUPPORTED_METHODS = compat.UniqueFrozenset([ http.HTTP_DELETE, http.HTTP_GET, http.HTTP_POST, http.HTTP_PUT, ]) def _BuildOpcodeAttributes(): """Builds list of attributes used for per-handler opcodes. """ return [(method, "%s_OPCODE" % method, "%s_RENAME" % method, "%s_ALIASES" % method, "Get%sOpInput" % method.capitalize()) for method in _SUPPORTED_METHODS] OPCODE_ATTRS = _BuildOpcodeAttributes() def BuildUriList(ids, uri_format, uri_fields=("name", "uri")): """Builds a URI list as used by index resources. @param ids: list of ids as strings @param uri_format: format to be applied for URI @param uri_fields: optional parameter for field IDs """ (field_id, field_uri) = uri_fields def _MapId(m_id): return { field_id: m_id, field_uri: uri_format % m_id, } # Make sure the result is sorted, makes it nicer to look at and simplifies # unittests. ids.sort() return map(_MapId, ids) def MapFields(names, data): """Maps two lists into one dictionary. Example:: >>> MapFields(["a", "b"], ["foo", 123]) {'a': 'foo', 'b': 123} @param names: field names (list of strings) @param data: field data (list) """ if len(names) != len(data): raise AttributeError("Names and data must have the same length") return dict(zip(names, data)) def MapBulkFields(itemslist, fields): """Map value to field name in to one dictionary. @param itemslist: a list of items values @param fields: a list of items names @return: a list of mapped dictionaries """ items_details = [] for item in itemslist: mapped = MapFields(fields, item) items_details.append(mapped) return items_details def FillOpcode(opcls, body, static, rename=None): """Fills an opcode with body parameters. Parameter types are checked. @type opcls: L{opcodes.OpCode} @param opcls: Opcode class @type body: dict @param body: Body parameters as received from client @type static: dict @param static: Static parameters which can't be modified by client @type rename: dict @param rename: Renamed parameters, key as old name, value as new name @return: Opcode object """ if body is None: params = {} else: CheckType(body, dict, "Body contents") # Make copy to be modified params = body.copy() if rename: for old, new in rename.items(): if new in params and old in params: raise http.HttpBadRequest("Parameter '%s' was renamed to '%s', but" " both are specified" % (old, new)) if old in params: assert new not in params params[new] = params.pop(old) if static: overwritten = set(params.keys()) & set(static.keys()) if overwritten: raise http.HttpBadRequest("Can't overwrite static parameters %r" % overwritten) params.update(static) # Convert keys to strings (simplejson decodes them as unicode) params = dict((str(key), value) for (key, value) in params.items()) try: op = opcls(**params) # pylint: disable=W0142 op.Validate(False) except (errors.OpPrereqError, TypeError), err: raise http.HttpBadRequest("Invalid body parameters: %s" % err) return op def HandleItemQueryErrors(fn, *args, **kwargs): """Converts errors when querying a single item. """ try: return fn(*args, **kwargs) except errors.OpPrereqError, err: if len(err.args) == 2 and err.args[1] == errors.ECODE_NOENT: raise http.HttpNotFound() raise def FeedbackFn(msg): """Feedback logging function for jobs. We don't have a stdout for printing log messages, so log them to the http log at least. @param msg: the message """ (_, log_type, log_msg) = msg logging.info("%s: %s", log_type, log_msg) def CheckType(value, exptype, descr): """Abort request if value type doesn't match expected type. @param value: Value @type exptype: type @param exptype: Expected type @type descr: string @param descr: Description of value @return: Value (allows inline usage) """ if not isinstance(value, exptype): raise http.HttpBadRequest("%s: Type is '%s', but '%s' is expected" % (descr, type(value).__name__, exptype.__name__)) return value def CheckParameter(data, name, default=_DEFAULT, exptype=_DEFAULT): """Check and return the value for a given parameter. If no default value was given and the parameter doesn't exist in the input data, an error is raise. @type data: dict @param data: Dictionary containing input data @type name: string @param name: Parameter name @param default: Default value (can be None) @param exptype: Expected type (can be None) """ try: value = data[name] except KeyError: if default is not _DEFAULT: return default raise http.HttpBadRequest("Required parameter '%s' is missing" % name) if exptype is _DEFAULT: return value return CheckType(value, exptype, "'%s' parameter" % name) class ResourceBase(object): """Generic class for resources. """ # Default permission requirements GET_ACCESS = [] PUT_ACCESS = [rapi.RAPI_ACCESS_WRITE] POST_ACCESS = [rapi.RAPI_ACCESS_WRITE] DELETE_ACCESS = [rapi.RAPI_ACCESS_WRITE] def __init__(self, items, queryargs, req, _client_cls=None): """Generic resource constructor. @param items: a list with variables encoded in the URL @param queryargs: a dictionary with additional options from URL @param req: Request context @param _client_cls: L{luxi} client class (unittests only) """ assert isinstance(queryargs, dict) self.items = items self.queryargs = queryargs self._req = req if _client_cls is None: _client_cls = luxi.Client self._client_cls = _client_cls def _GetRequestBody(self): """Returns the body data. """ return self._req.private.body_data request_body = property(fget=_GetRequestBody)<|fim▁hole|> def _checkIntVariable(self, name, default=0): """Return the parsed value of an int argument. """ val = self.queryargs.get(name, default) if isinstance(val, list): if val: val = val[0] else: val = default try: val = int(val) except (ValueError, TypeError): raise http.HttpBadRequest("Invalid value for the" " '%s' parameter" % (name,)) return val def _checkStringVariable(self, name, default=None): """Return the parsed value of a string argument. """ val = self.queryargs.get(name, default) if isinstance(val, list): if val: val = val[0] else: val = default return val def getBodyParameter(self, name, *args): """Check and return the value for a given parameter. If a second parameter is not given, an error will be returned, otherwise this parameter specifies the default value. @param name: the required parameter """ if args: return CheckParameter(self.request_body, name, default=args[0]) return CheckParameter(self.request_body, name) def useLocking(self): """Check if the request specifies locking. """ return bool(self._checkIntVariable("lock")) def useBulk(self): """Check if the request specifies bulk querying. """ return bool(self._checkIntVariable("bulk")) def useForce(self): """Check if the request specifies a forced operation. """ return bool(self._checkIntVariable("force")) def dryRun(self): """Check if the request specifies dry-run mode. """ return bool(self._checkIntVariable("dry-run")) def GetClient(self, query=False): """Wrapper for L{luxi.Client} with HTTP-specific error handling. @param query: this signifies that the client will only be used for queries; if the build-time parameter enable-split-queries is enabled, then the client will be connected to the query socket instead of the masterd socket """ if query and constants.ENABLE_SPLIT_QUERY: address = pathutils.QUERY_SOCKET else: address = None # Could be a function, pylint: disable=R0201 try: return self._client_cls(address=address) except luxi.NoMasterError, err: raise http.HttpBadGateway("Can't connect to master daemon: %s" % err) except luxi.PermissionError: raise http.HttpInternalServerError("Internal error: no permission to" " connect to the master daemon") def SubmitJob(self, op, cl=None): """Generic wrapper for submit job, for better http compatibility. @type op: list @param op: the list of opcodes for the job @type cl: None or luxi.Client @param cl: optional luxi client to use @rtype: string @return: the job ID """ if cl is None: cl = self.GetClient() try: return cl.SubmitJob(op) except errors.JobQueueFull: raise http.HttpServiceUnavailable("Job queue is full, needs archiving") except errors.JobQueueDrainError: raise http.HttpServiceUnavailable("Job queue is drained, cannot submit") except luxi.NoMasterError, err: raise http.HttpBadGateway("Master seems to be unreachable: %s" % err) except luxi.PermissionError: raise http.HttpInternalServerError("Internal error: no permission to" " connect to the master daemon") except luxi.TimeoutError, err: raise http.HttpGatewayTimeout("Timeout while talking to the master" " daemon: %s" % err) def GetResourceOpcodes(cls): """Returns all opcodes used by a resource. """ return frozenset(filter(None, (getattr(cls, op_attr, None) for (_, op_attr, _, _, _) in OPCODE_ATTRS))) def GetHandlerAccess(handler, method): """Returns the access rights for a method on a handler. @type handler: L{ResourceBase} @type method: string @rtype: string or None """ return getattr(handler, "%s_ACCESS" % method, None) def GetHandler(get_fn, aliases): result = get_fn() if not isinstance(result, dict) or aliases is None: return result for (param, alias) in aliases.items(): if param in result: if alias in result: raise http.HttpBadRequest("Parameter '%s' has an alias of '%s', but" " both values are present in response" % (param, alias)) result[alias] = result[param] return result class _MetaOpcodeResource(type): """Meta class for RAPI resources. """ def __call__(mcs, *args, **kwargs): """Instantiates class and patches it for use by the RAPI daemon. """ # Access to private attributes of a client class, pylint: disable=W0212 obj = type.__call__(mcs, *args, **kwargs) for (method, op_attr, rename_attr, aliases_attr, fn_attr) in OPCODE_ATTRS: if hasattr(obj, method): # If the method handler is already defined, "*_RENAME" or # "Get*OpInput" shouldn't be (they're only used by the automatically # generated handler) assert not hasattr(obj, rename_attr) assert not hasattr(obj, fn_attr) # The aliases are allowed only on GET calls assert not hasattr(obj, aliases_attr) or method == http.HTTP_GET # GET methods can add aliases of values they return under a different # name if method == http.HTTP_GET and hasattr(obj, aliases_attr): setattr(obj, method, compat.partial(GetHandler, getattr(obj, method), getattr(obj, aliases_attr))) else: # Try to generate handler method on handler instance try: opcode = getattr(obj, op_attr) except AttributeError: pass else: setattr(obj, method, compat.partial(obj._GenericHandler, opcode, getattr(obj, rename_attr, None), getattr(obj, fn_attr, obj._GetDefaultData))) return obj class OpcodeResource(ResourceBase): """Base class for opcode-based RAPI resources. Instances of this class automatically gain handler functions through L{_MetaOpcodeResource} for any method for which a C{$METHOD$_OPCODE} variable is defined at class level. Subclasses can define a C{Get$Method$OpInput} method to do their own opcode input processing (e.g. for static values). The C{$METHOD$_RENAME} variable defines which values are renamed (see L{baserlib.FillOpcode}). Still default behavior cannot be totally overriden. There are opcode params that are available to all opcodes, e.g. "depends". In case those params (currently only "depends") are found in the original request's body, they are added to the dictionary of parsed parameters and eventually passed to the opcode. If the parsed body is not represented as a dictionary object, the values are not added. @cvar GET_OPCODE: Set this to a class derived from L{opcodes.OpCode} to automatically generate a GET handler submitting the opcode @cvar GET_RENAME: Set this to rename parameters in the GET handler (see L{baserlib.FillOpcode}) @cvar GET_ALIASES: Set this to duplicate return values in GET results (see L{baserlib.GetHandler}) @ivar GetGetOpInput: Define this to override the default method for getting opcode parameters (see L{baserlib.OpcodeResource._GetDefaultData}) @cvar PUT_OPCODE: Set this to a class derived from L{opcodes.OpCode} to automatically generate a PUT handler submitting the opcode @cvar PUT_RENAME: Set this to rename parameters in the PUT handler (see L{baserlib.FillOpcode}) @ivar GetPutOpInput: Define this to override the default method for getting opcode parameters (see L{baserlib.OpcodeResource._GetDefaultData}) @cvar POST_OPCODE: Set this to a class derived from L{opcodes.OpCode} to automatically generate a POST handler submitting the opcode @cvar POST_RENAME: Set this to rename parameters in the POST handler (see L{baserlib.FillOpcode}) @ivar GetPostOpInput: Define this to override the default method for getting opcode parameters (see L{baserlib.OpcodeResource._GetDefaultData}) @cvar DELETE_OPCODE: Set this to a class derived from L{opcodes.OpCode} to automatically generate a DELETE handler submitting the opcode @cvar DELETE_RENAME: Set this to rename parameters in the DELETE handler (see L{baserlib.FillOpcode}) @ivar GetDeleteOpInput: Define this to override the default method for getting opcode parameters (see L{baserlib.OpcodeResource._GetDefaultData}) """ __metaclass__ = _MetaOpcodeResource def _GetDefaultData(self): return (self.request_body, None) def _GetRapiOpName(self): """Extracts the name of the RAPI operation from the class name """ if self.__class__.__name__.startswith("R_2_"): return self.__class__.__name__[4:] return self.__class__.__name__ def _GetCommonStatic(self): """Return the static parameters common to all the RAPI calls The reason is a parameter present in all the RAPI calls, and the reason trail has to be build for all of them, so the parameter is read here and used to build the reason trail, that is the actual parameter passed forward. """ trail = [] usr_reason = self._checkStringVariable("reason", default=None) if usr_reason: trail.append((constants.OPCODE_REASON_SRC_USER, usr_reason, utils.EpochNano())) reason_src = "%s:%s" % (constants.OPCODE_REASON_SRC_RLIB2, self._GetRapiOpName()) trail.append((reason_src, "", utils.EpochNano())) common_static = { "reason": trail, } return common_static def _GetDepends(self): ret = {} if isinstance(self.request_body, dict): depends = self.getBodyParameter("depends", None) if depends: ret.update({"depends": depends}) return ret def _GenericHandler(self, opcode, rename, fn): (body, specific_static) = fn() if isinstance(body, dict): body.update(self._GetDepends()) static = self._GetCommonStatic() if specific_static: static.update(specific_static) op = FillOpcode(opcode, body, static, rename=rename) return self.SubmitJob([op])<|fim▁end|>
<|file_name|>RecursiveLexicon.py<|end_file_name|><|fim▁begin|>from SimpleLexicon import SimpleLexicon from LOTlib.Evaluation.EvaluationException import RecursionDepthException class RecursiveLexicon(SimpleLexicon): """ A lexicon where word meanings can call each other. Analogous to a RecursiveLOTHypothesis from a LOTHypothesis. To achieve this, we require the LOThypotheses in self.values to take a "recurse" call that is always passed in by default here on __call__ as the first argument. This throws a RecursionDepthException when it gets too deep. See Examples.EvenOdd """ def __init__(self, recursive_depth_bound=10, *args, **kwargs): self.recursive_depth_bound = recursive_depth_bound SimpleLexicon.__init__(self, *args, **kwargs) def __call__(self, word, *args): """ Wrap in self as a first argument that we don't have to in the grammar. This way, we can use self(word, X Y) as above. """ self.recursive_call_depth = 0 return self.value[word](self.recursive_call, *args) # pass in "self" as lex, using the recursive version<|fim▁hole|> def recursive_call(self, word, *args): """ This gets called internally on recursive calls. It keeps track of the depth to allow us to escape """ self.recursive_call_depth += 1 if self.recursive_call_depth > self.recursive_depth_bound: raise RecursionDepthException # print ">>>", self.value[word] return self.value[word](self.recursive_call, *args)<|fim▁end|>
<|file_name|>test_serialization.py<|end_file_name|><|fim▁begin|>import os import shutil import tempfile import numpy as np import pytest import torch from spotlight.cross_validation import random_train_test_split from spotlight.datasets import movielens from spotlight.evaluation import mrr_score, sequence_mrr_score from spotlight.evaluation import rmse_score from spotlight.factorization.explicit import ExplicitFactorizationModel from spotlight.factorization.implicit import ImplicitFactorizationModel from spotlight.sequence.implicit import ImplicitSequenceModel from spotlight.sequence.representations import CNNNet RANDOM_STATE = np.random.RandomState(42) CUDA = bool(os.environ.get('SPOTLIGHT_CUDA', False)) def _reload(model): dirname = tempfile.mkdtemp() try: fname = os.path.join(dirname, "model.pkl") torch.save(model, fname) model = torch.load(fname) finally: shutil.rmtree(dirname) return model @pytest.fixture(scope="module") def data(): interactions = movielens.get_movielens_dataset('100K') train, test = random_train_test_split(interactions, random_state=RANDOM_STATE) return train, test def test_explicit_serialization(data): train, test = data model = ExplicitFactorizationModel(loss='regression', n_iter=3, batch_size=1024, learning_rate=1e-3, l2=1e-5, use_cuda=CUDA) model.fit(train) rmse_original = rmse_score(model, test) rmse_recovered = rmse_score(_reload(model), test) assert rmse_original == rmse_recovered def test_implicit_serialization(data): <|fim▁hole|> model = ImplicitFactorizationModel(loss='bpr', n_iter=3, batch_size=1024, learning_rate=1e-2, l2=1e-6, use_cuda=CUDA) model.fit(train) mrr_original = mrr_score(model, test, train=train).mean() mrr_recovered = mrr_score(_reload(model), test, train=train).mean() assert mrr_original == mrr_recovered def test_implicit_sequence_serialization(data): train, test = data train = train.to_sequence(max_sequence_length=128) test = test.to_sequence(max_sequence_length=128) model = ImplicitSequenceModel(loss='bpr', representation=CNNNet(train.num_items, embedding_dim=32, kernel_width=3, dilation=(1, ), num_layers=1), batch_size=128, learning_rate=1e-1, l2=0.0, n_iter=5, random_state=RANDOM_STATE, use_cuda=CUDA) model.fit(train) mrr_original = sequence_mrr_score(model, test).mean() mrr_recovered = sequence_mrr_score(_reload(model), test).mean() assert mrr_original == mrr_recovered<|fim▁end|>
train, test = data
<|file_name|>task-lists.rs<|end_file_name|><|fim▁begin|>// ignore-tidy-linelength<|fim▁hole|>// Unfortunately that requires LXML, because the built-in xml module doesn't support all of xpath. // @has task_lists/index.html '//ul/li/input[@type="checkbox"]' '' // @has task_lists/index.html '//ul/li/input[@disabled]' '' // @has task_lists/index.html '//ul/li' 'a' // @has task_lists/index.html '//ul/li' 'b' //! This tests 'task list' support, a common markdown extension. //! - [ ] a //! - [x] b<|fim▁end|>
// FIXME: this doesn't test as much as I'd like; ideally it would have these query too: // has task_lists/index.html '//li/input[@type="checkbox" and disabled]/following-sibling::text()' 'a' // has task_lists/index.html '//li/input[@type="checkbox"]/following-sibling::text()' 'b'
<|file_name|>FlipDirection.java<|end_file_name|><|fim▁begin|>package ontology.effects.unary; <|fim▁hole|>import core.game.Game; import ontology.Types; import ontology.effects.Effect; import tools.Direction; import tools.Utils; import tools.Vector2d; /** * Created with IntelliJ IDEA. * User: Diego * Date: 03/12/13 * Time: 16:17 * This is a Java port from Tom Schaul's VGDL - https://github.com/schaul/py-vgdl */ public class FlipDirection extends Effect { public FlipDirection(InteractionContent cnt) { is_stochastic = true; this.parseParameters(cnt); } @Override public void execute(VGDLSprite sprite1, VGDLSprite sprite2, Game game) { sprite1.orientation = (Direction) Utils.choice(Types.DBASEDIRS, game.getRandomGenerator()); } }<|fim▁end|>
import core.VGDLSprite; import core.content.InteractionContent;
<|file_name|>kill_ships.js<|end_file_name|><|fim▁begin|>import Objective from '../objective'; export default class KillShipsObjective extends Objective { constructor (game, ships) { super(game); if (!Array.isArray(ships)) { ships = [ships]; } this.bots = _.filter(ships, ship => { return ship.alive; }); this.ships = ships; } isComplete () { var all_dead = true; for (var i = 0; i < this.ships.length; i++) { <|fim▁hole|> if (this.ships[i].alive) { all_dead = false; break; } } return all_dead; } };<|fim▁end|>
<|file_name|>to_rawdata.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # -*- coding: ascii -*- """ package.module ~~~~~~~~~~~~~ A description which can be long and explain the complete<|fim▁hole|>:copyright: year by my name, see AUTHORS for more details :license: license_name, see LICENSE for more details """ import struct import sys outputfilename = 'raw_audio.out' def do_convert(filename): """ """ try: f_in = open(filename, 'r') f_out = open(outputfilename, 'wb') sample = 0 for line in f_in: try: sample = int(line) data = struct.pack("i", sample) # pack integer in a binary string f_out.write(data) except: print "Cannot convert: " + line finally: f_in.close() f_out.close() if __name__=='__main__': print "Converting..." do_convert(sys.argv[1]) print "done. Written to " + outputfilename<|fim▁end|>
functionality of this module even with indented code examples. Class/Function however should not be documented here.
<|file_name|>ForwardDocumentEventProtos.java<|end_file_name|><|fim▁begin|>// Generated by the protocol buffer compiler. DO NOT EDIT! // source: ForwardDocumentEvent.proto package Diadoc.Api.Proto; public final class ForwardDocumentEventProtos { private ForwardDocumentEventProtos() {} public static void registerAllExtensions( com.google.protobuf.ExtensionRegistry registry) { } public interface ForwardDocumentEventOrBuilder extends // @@protoc_insertion_point(interface_extends:Diadoc.Api.Proto.ForwardDocumentEvent) com.google.protobuf.MessageOrBuilder { /** * <code>optional .Diadoc.Api.Proto.Timestamp Timestamp = 1;</code> */ boolean hasTimestamp(); /** * <code>optional .Diadoc.Api.Proto.Timestamp Timestamp = 1;</code> */ Diadoc.Api.Proto.TimestampProtos.Timestamp getTimestamp(); /** * <code>optional .Diadoc.Api.Proto.Timestamp Timestamp = 1;</code> */ Diadoc.Api.Proto.TimestampProtos.TimestampOrBuilder getTimestampOrBuilder(); /** * <code>optional string ToBoxId = 2;</code> */ boolean hasToBoxId(); /** * <code>optional string ToBoxId = 2;</code> */ java.lang.String getToBoxId(); /** * <code>optional string ToBoxId = 2;</code> */ com.google.protobuf.ByteString getToBoxIdBytes(); } /** * Protobuf type {@code Diadoc.Api.Proto.ForwardDocumentEvent} */ public static final class ForwardDocumentEvent extends com.google.protobuf.GeneratedMessage implements // @@protoc_insertion_point(message_implements:Diadoc.Api.Proto.ForwardDocumentEvent) ForwardDocumentEventOrBuilder { // Use ForwardDocumentEvent.newBuilder() to construct. private ForwardDocumentEvent(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private ForwardDocumentEvent(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final ForwardDocumentEvent defaultInstance; public static ForwardDocumentEvent getDefaultInstance() { return defaultInstance; } public ForwardDocumentEvent getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private ForwardDocumentEvent( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { Diadoc.Api.Proto.TimestampProtos.Timestamp.Builder subBuilder = null; if (((bitField0_ & 0x00000001) == 0x00000001)) { subBuilder = timestamp_.toBuilder(); } timestamp_ = input.readMessage(Diadoc.Api.Proto.TimestampProtos.Timestamp.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(timestamp_); timestamp_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000001; break; } case 18: { com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000002; toBoxId_ = bs; break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return Diadoc.Api.Proto.ForwardDocumentEventProtos.internal_static_Diadoc_Api_Proto_ForwardDocumentEvent_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return Diadoc.Api.Proto.ForwardDocumentEventProtos.internal_static_Diadoc_Api_Proto_ForwardDocumentEvent_fieldAccessorTable .ensureFieldAccessorsInitialized( Diadoc.Api.Proto.ForwardDocumentEventProtos.ForwardDocumentEvent.class, Diadoc.Api.Proto.ForwardDocumentEventProtos.ForwardDocumentEvent.Builder.class); } public static com.google.protobuf.Parser<ForwardDocumentEvent> PARSER = new com.google.protobuf.AbstractParser<ForwardDocumentEvent>() { public ForwardDocumentEvent parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new ForwardDocumentEvent(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<ForwardDocumentEvent> getParserForType() { return PARSER; } private int bitField0_; public static final int TIMESTAMP_FIELD_NUMBER = 1; private Diadoc.Api.Proto.TimestampProtos.Timestamp timestamp_; /** * <code>optional .Diadoc.Api.Proto.Timestamp Timestamp = 1;</code> */ public boolean hasTimestamp() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional .Diadoc.Api.Proto.Timestamp Timestamp = 1;</code> */ public Diadoc.Api.Proto.TimestampProtos.Timestamp getTimestamp() { return timestamp_; } /** * <code>optional .Diadoc.Api.Proto.Timestamp Timestamp = 1;</code> */ public Diadoc.Api.Proto.TimestampProtos.TimestampOrBuilder getTimestampOrBuilder() { return timestamp_; } public static final int TOBOXID_FIELD_NUMBER = 2; private java.lang.Object toBoxId_; /** * <code>optional string ToBoxId = 2;</code> */ public boolean hasToBoxId() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional string ToBoxId = 2;</code> */ public java.lang.String getToBoxId() { java.lang.Object ref = toBoxId_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { toBoxId_ = s; } return s; } } /** * <code>optional string ToBoxId = 2;</code> */ public com.google.protobuf.ByteString getToBoxIdBytes() { java.lang.Object ref = toBoxId_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); toBoxId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private void initFields() { timestamp_ = Diadoc.Api.Proto.TimestampProtos.Timestamp.getDefaultInstance(); toBoxId_ = ""; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (hasTimestamp()) { if (!getTimestamp().isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeMessage(1, timestamp_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeBytes(2, getToBoxIdBytes()); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, timestamp_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(2, getToBoxIdBytes()); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } public static Diadoc.Api.Proto.ForwardDocumentEventProtos.ForwardDocumentEvent parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static Diadoc.Api.Proto.ForwardDocumentEventProtos.ForwardDocumentEvent parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static Diadoc.Api.Proto.ForwardDocumentEventProtos.ForwardDocumentEvent parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static Diadoc.Api.Proto.ForwardDocumentEventProtos.ForwardDocumentEvent parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static Diadoc.Api.Proto.ForwardDocumentEventProtos.ForwardDocumentEvent parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static Diadoc.Api.Proto.ForwardDocumentEventProtos.ForwardDocumentEvent parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Diadoc.Api.Proto.ForwardDocumentEventProtos.ForwardDocumentEvent parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static Diadoc.Api.Proto.ForwardDocumentEventProtos.ForwardDocumentEvent parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static Diadoc.Api.Proto.ForwardDocumentEventProtos.ForwardDocumentEvent parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static Diadoc.Api.Proto.ForwardDocumentEventProtos.ForwardDocumentEvent parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(Diadoc.Api.Proto.ForwardDocumentEventProtos.ForwardDocumentEvent prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code Diadoc.Api.Proto.ForwardDocumentEvent} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:Diadoc.Api.Proto.ForwardDocumentEvent) Diadoc.Api.Proto.ForwardDocumentEventProtos.ForwardDocumentEventOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return Diadoc.Api.Proto.ForwardDocumentEventProtos.internal_static_Diadoc_Api_Proto_ForwardDocumentEvent_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return Diadoc.Api.Proto.ForwardDocumentEventProtos.internal_static_Diadoc_Api_Proto_ForwardDocumentEvent_fieldAccessorTable .ensureFieldAccessorsInitialized( Diadoc.Api.Proto.ForwardDocumentEventProtos.ForwardDocumentEvent.class, Diadoc.Api.Proto.ForwardDocumentEventProtos.ForwardDocumentEvent.Builder.class); } // Construct using Diadoc.Api.Proto.ForwardDocumentEventProtos.ForwardDocumentEvent.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { getTimestampFieldBuilder(); } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); if (timestampBuilder_ == null) { timestamp_ = Diadoc.Api.Proto.TimestampProtos.Timestamp.getDefaultInstance(); } else { timestampBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); toBoxId_ = ""; bitField0_ = (bitField0_ & ~0x00000002); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return Diadoc.Api.Proto.ForwardDocumentEventProtos.internal_static_Diadoc_Api_Proto_ForwardDocumentEvent_descriptor; } public Diadoc.Api.Proto.ForwardDocumentEventProtos.ForwardDocumentEvent getDefaultInstanceForType() { return Diadoc.Api.Proto.ForwardDocumentEventProtos.ForwardDocumentEvent.getDefaultInstance(); } public Diadoc.Api.Proto.ForwardDocumentEventProtos.ForwardDocumentEvent build() { Diadoc.Api.Proto.ForwardDocumentEventProtos.ForwardDocumentEvent result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public Diadoc.Api.Proto.ForwardDocumentEventProtos.ForwardDocumentEvent buildPartial() { Diadoc.Api.Proto.ForwardDocumentEventProtos.ForwardDocumentEvent result = new Diadoc.Api.Proto.ForwardDocumentEventProtos.ForwardDocumentEvent(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } if (timestampBuilder_ == null) { result.timestamp_ = timestamp_; } else { result.timestamp_ = timestampBuilder_.build(); } if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.toBoxId_ = toBoxId_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof Diadoc.Api.Proto.ForwardDocumentEventProtos.ForwardDocumentEvent) { return mergeFrom((Diadoc.Api.Proto.ForwardDocumentEventProtos.ForwardDocumentEvent)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(Diadoc.Api.Proto.ForwardDocumentEventProtos.ForwardDocumentEvent other) { if (other == Diadoc.Api.Proto.ForwardDocumentEventProtos.ForwardDocumentEvent.getDefaultInstance()) return this; if (other.hasTimestamp()) { mergeTimestamp(other.getTimestamp()); } if (other.hasToBoxId()) { bitField0_ |= 0x00000002; toBoxId_ = other.toBoxId_; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (hasTimestamp()) { if (!getTimestamp().isInitialized()) { return false; } } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { Diadoc.Api.Proto.ForwardDocumentEventProtos.ForwardDocumentEvent parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (Diadoc.Api.Proto.ForwardDocumentEventProtos.ForwardDocumentEvent) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private Diadoc.Api.Proto.TimestampProtos.Timestamp timestamp_ = Diadoc.Api.Proto.TimestampProtos.Timestamp.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< Diadoc.Api.Proto.TimestampProtos.Timestamp, Diadoc.Api.Proto.TimestampProtos.Timestamp.Builder, Diadoc.Api.Proto.TimestampProtos.TimestampOrBuilder> timestampBuilder_; /** * <code>optional .Diadoc.Api.Proto.Timestamp Timestamp = 1;</code> */ public boolean hasTimestamp() { return ((bitField0_ & 0x00000001) == 0x00000001); } /**<|fim▁hole|> */ public Diadoc.Api.Proto.TimestampProtos.Timestamp getTimestamp() { if (timestampBuilder_ == null) { return timestamp_; } else { return timestampBuilder_.getMessage(); } } /** * <code>optional .Diadoc.Api.Proto.Timestamp Timestamp = 1;</code> */ public Builder setTimestamp(Diadoc.Api.Proto.TimestampProtos.Timestamp value) { if (timestampBuilder_ == null) { if (value == null) { throw new NullPointerException(); } timestamp_ = value; onChanged(); } else { timestampBuilder_.setMessage(value); } bitField0_ |= 0x00000001; return this; } /** * <code>optional .Diadoc.Api.Proto.Timestamp Timestamp = 1;</code> */ public Builder setTimestamp( Diadoc.Api.Proto.TimestampProtos.Timestamp.Builder builderForValue) { if (timestampBuilder_ == null) { timestamp_ = builderForValue.build(); onChanged(); } else { timestampBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } /** * <code>optional .Diadoc.Api.Proto.Timestamp Timestamp = 1;</code> */ public Builder mergeTimestamp(Diadoc.Api.Proto.TimestampProtos.Timestamp value) { if (timestampBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && timestamp_ != Diadoc.Api.Proto.TimestampProtos.Timestamp.getDefaultInstance()) { timestamp_ = Diadoc.Api.Proto.TimestampProtos.Timestamp.newBuilder(timestamp_).mergeFrom(value).buildPartial(); } else { timestamp_ = value; } onChanged(); } else { timestampBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /** * <code>optional .Diadoc.Api.Proto.Timestamp Timestamp = 1;</code> */ public Builder clearTimestamp() { if (timestampBuilder_ == null) { timestamp_ = Diadoc.Api.Proto.TimestampProtos.Timestamp.getDefaultInstance(); onChanged(); } else { timestampBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } /** * <code>optional .Diadoc.Api.Proto.Timestamp Timestamp = 1;</code> */ public Diadoc.Api.Proto.TimestampProtos.Timestamp.Builder getTimestampBuilder() { bitField0_ |= 0x00000001; onChanged(); return getTimestampFieldBuilder().getBuilder(); } /** * <code>optional .Diadoc.Api.Proto.Timestamp Timestamp = 1;</code> */ public Diadoc.Api.Proto.TimestampProtos.TimestampOrBuilder getTimestampOrBuilder() { if (timestampBuilder_ != null) { return timestampBuilder_.getMessageOrBuilder(); } else { return timestamp_; } } /** * <code>optional .Diadoc.Api.Proto.Timestamp Timestamp = 1;</code> */ private com.google.protobuf.SingleFieldBuilder< Diadoc.Api.Proto.TimestampProtos.Timestamp, Diadoc.Api.Proto.TimestampProtos.Timestamp.Builder, Diadoc.Api.Proto.TimestampProtos.TimestampOrBuilder> getTimestampFieldBuilder() { if (timestampBuilder_ == null) { timestampBuilder_ = new com.google.protobuf.SingleFieldBuilder< Diadoc.Api.Proto.TimestampProtos.Timestamp, Diadoc.Api.Proto.TimestampProtos.Timestamp.Builder, Diadoc.Api.Proto.TimestampProtos.TimestampOrBuilder>( getTimestamp(), getParentForChildren(), isClean()); timestamp_ = null; } return timestampBuilder_; } private java.lang.Object toBoxId_ = ""; /** * <code>optional string ToBoxId = 2;</code> */ public boolean hasToBoxId() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional string ToBoxId = 2;</code> */ public java.lang.String getToBoxId() { java.lang.Object ref = toBoxId_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { toBoxId_ = s; } return s; } else { return (java.lang.String) ref; } } /** * <code>optional string ToBoxId = 2;</code> */ public com.google.protobuf.ByteString getToBoxIdBytes() { java.lang.Object ref = toBoxId_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); toBoxId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <code>optional string ToBoxId = 2;</code> */ public Builder setToBoxId( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; toBoxId_ = value; onChanged(); return this; } /** * <code>optional string ToBoxId = 2;</code> */ public Builder clearToBoxId() { bitField0_ = (bitField0_ & ~0x00000002); toBoxId_ = getDefaultInstance().getToBoxId(); onChanged(); return this; } /** * <code>optional string ToBoxId = 2;</code> */ public Builder setToBoxIdBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; toBoxId_ = value; onChanged(); return this; } // @@protoc_insertion_point(builder_scope:Diadoc.Api.Proto.ForwardDocumentEvent) } static { defaultInstance = new ForwardDocumentEvent(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:Diadoc.Api.Proto.ForwardDocumentEvent) } private static final com.google.protobuf.Descriptors.Descriptor internal_static_Diadoc_Api_Proto_ForwardDocumentEvent_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_Diadoc_Api_Proto_ForwardDocumentEvent_fieldAccessorTable; public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; } private static com.google.protobuf.Descriptors.FileDescriptor descriptor; static { java.lang.String[] descriptorData = { "\n\032ForwardDocumentEvent.proto\022\020Diadoc.Api" + ".Proto\032\017Timestamp.proto\"W\n\024ForwardDocume" + "ntEvent\022.\n\tTimestamp\030\001 \001(\0132\033.Diadoc.Api." + "Proto.Timestamp\022\017\n\007ToBoxId\030\002 \001(\tB\034B\032Forw" + "ardDocumentEventProtos" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { public com.google.protobuf.ExtensionRegistry assignDescriptors( com.google.protobuf.Descriptors.FileDescriptor root) { descriptor = root; return null; } }; com.google.protobuf.Descriptors.FileDescriptor .internalBuildGeneratedFileFrom(descriptorData, new com.google.protobuf.Descriptors.FileDescriptor[] { Diadoc.Api.Proto.TimestampProtos.getDescriptor(), }, assigner); internal_static_Diadoc_Api_Proto_ForwardDocumentEvent_descriptor = getDescriptor().getMessageTypes().get(0); internal_static_Diadoc_Api_Proto_ForwardDocumentEvent_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_Diadoc_Api_Proto_ForwardDocumentEvent_descriptor, new java.lang.String[] { "Timestamp", "ToBoxId", }); Diadoc.Api.Proto.TimestampProtos.getDescriptor(); } // @@protoc_insertion_point(outer_class_scope) }<|fim▁end|>
* <code>optional .Diadoc.Api.Proto.Timestamp Timestamp = 1;</code>
<|file_name|>urls.py<|end_file_name|><|fim▁begin|><|fim▁hole|> app_name = 'sms' urlpatterns = [ url(r'^$', views.index, name="index"), ]<|fim▁end|>
from django.conf.urls import url from sms import views
<|file_name|>server_test.go<|end_file_name|><|fim▁begin|>package fasthttp import ( "bufio" "bytes" "crypto/tls" "fmt" "io" "io/ioutil" "net" "os" "strings" "sync" "testing" "time" "github.com/valyala/fasthttp/fasthttputil" ) func TestServerErrSmallBuffer(t *testing.T) { logger := &customLogger{} s := &Server{ Handler: func(ctx *RequestCtx) { ctx.WriteString("shouldn't be never called") }, ReadBufferSize: 20, Logger: logger, } ln := fasthttputil.NewInmemoryListener() serverCh := make(chan error, 1) go func() { err := s.Serve(ln) serverCh <- err }() clientCh := make(chan error, 1) go func() { c, err := ln.Dial() if err != nil { clientCh <- fmt.Errorf("unexpected error: %s", err) return } _, err = c.Write([]byte("GET / HTTP/1.1\r\nHost: aabb.com\r\nVERY-long-Header: sdfdfsd dsf dsaf dsf df fsd\r\n\r\n")) if err != nil { clientCh <- fmt.Errorf("unexpected error when sending request: %s", err) return } br := bufio.NewReader(c) var resp Response if err = resp.Read(br); err != nil { clientCh <- fmt.Errorf("unexpected error: %s", err) return } statusCode := resp.StatusCode() if statusCode != StatusRequestHeaderFieldsTooLarge { clientCh <- fmt.Errorf("unexpected status code: %d. Expecting %d", statusCode, StatusRequestHeaderFieldsTooLarge) return } if !resp.ConnectionClose() { clientCh <- fmt.Errorf("missing 'Connection: close' response header") return } clientCh <- nil }() var err error // wait for the client select { case <-time.After(time.Second): t.Fatalf("timeout when waiting for the client. Server log: %q", logger.out) case err = <-clientCh: if err != nil { t.Fatalf("unexpected client error: %s. Server log: %q", err, logger.out) } } // wait for the server if err := ln.Close(); err != nil { t.Fatalf("unexpected error: %s. Server log: %q", err, logger.out) } select { case <-time.After(time.Second): t.Fatalf("timeout when waiting for the server. Server log: %q", logger.out) case err = <-serverCh: if err != nil { t.Fatalf("unexpected server error: %s. Server log: %q", err, logger.out) } } expectedErr := errSmallBuffer.Error() if !strings.Contains(logger.out, expectedErr) { t.Fatalf("unexpected log output: %q. Expecting %q", logger.out, expectedErr) } } func TestRequestCtxIsTLS(t *testing.T) { var ctx RequestCtx // tls.Conn ctx.c = &tls.Conn{} if !ctx.IsTLS() { t.Fatalf("IsTLS must return true") } // non-tls.Conn ctx.c = &readWriter{} if ctx.IsTLS() { t.Fatalf("IsTLS must return false") } // overridden tls.Conn ctx.c = &struct { *tls.Conn fooBar bool }{} if !ctx.IsTLS() { t.Fatalf("IsTLS must return true") } } func TestRequestCtxRedirect(t *testing.T) { testRequestCtxRedirect(t, "http://qqq/", "", "http://qqq/") testRequestCtxRedirect(t, "http://qqq/foo/bar?baz=111", "", "http://qqq/foo/bar?baz=111") testRequestCtxRedirect(t, "http://qqq/foo/bar?baz=111", "#aaa", "http://qqq/foo/bar?baz=111#aaa") testRequestCtxRedirect(t, "http://qqq/foo/bar?baz=111", "?abc=de&f", "http://qqq/foo/bar?abc=de&f") testRequestCtxRedirect(t, "http://qqq/foo/bar?baz=111", "?abc=de&f#sf", "http://qqq/foo/bar?abc=de&f#sf") testRequestCtxRedirect(t, "http://qqq/foo/bar?baz=111", "x.html", "http://qqq/foo/x.html") testRequestCtxRedirect(t, "http://qqq/foo/bar?baz=111", "x.html?a=1", "http://qqq/foo/x.html?a=1") testRequestCtxRedirect(t, "http://qqq/foo/bar?baz=111", "x.html#aaa=bbb&cc=ddd", "http://qqq/foo/x.html#aaa=bbb&cc=ddd") testRequestCtxRedirect(t, "http://qqq/foo/bar?baz=111", "x.html?b=1#aaa=bbb&cc=ddd", "http://qqq/foo/x.html?b=1#aaa=bbb&cc=ddd") testRequestCtxRedirect(t, "http://qqq/foo/bar?baz=111", "/x.html", "http://qqq/x.html") testRequestCtxRedirect(t, "http://qqq/foo/bar?baz=111", "/x.html#aaa=bbb&cc=ddd", "http://qqq/x.html#aaa=bbb&cc=ddd") testRequestCtxRedirect(t, "http://qqq/foo/bar?baz=111", "../x.html", "http://qqq/x.html") testRequestCtxRedirect(t, "http://qqq/foo/bar?baz=111", "../../x.html", "http://qqq/x.html") testRequestCtxRedirect(t, "http://qqq/foo/bar?baz=111", "./.././../x.html", "http://qqq/x.html") testRequestCtxRedirect(t, "http://qqq/foo/bar?baz=111", "http://foo.bar/baz", "http://foo.bar/baz") testRequestCtxRedirect(t, "http://qqq/foo/bar?baz=111", "https://foo.bar/baz", "https://foo.bar/baz") testRequestCtxRedirect(t, "https://foo.com/bar?aaa", "//google.com/aaa?bb", "https://google.com/aaa?bb") } func testRequestCtxRedirect(t *testing.T, origURL, redirectURL, expectedURL string) { var ctx RequestCtx var req Request req.SetRequestURI(origURL) ctx.Init(&req, nil, nil) ctx.Redirect(redirectURL, StatusFound) loc := ctx.Response.Header.Peek("Location") if string(loc) != expectedURL { t.Fatalf("unexpected redirect url %q. Expecting %q. origURL=%q, redirectURL=%q", loc, expectedURL, origURL, redirectURL) } } func TestServerResponseServerHeader(t *testing.T) { serverName := "foobar serv" s := &Server{ Handler: func(ctx *RequestCtx) { name := ctx.Response.Header.Server() if string(name) != serverName { fmt.Fprintf(ctx, "unexpected server name: %q. Expecting %q", name, serverName) } else { ctx.WriteString("OK") } // make sure the server name is sent to the client after ctx.Response.Reset() ctx.NotFound() }, Name: serverName, } ln := fasthttputil.NewInmemoryListener() serverCh := make(chan struct{}) go func() { if err := s.Serve(ln); err != nil { t.Fatalf("unexpected error: %s", err) } close(serverCh) }() clientCh := make(chan struct{}) go func() { c, err := ln.Dial() if err != nil { t.Fatalf("unexpected error: %s", err) } if _, err = c.Write([]byte("GET / HTTP/1.1\r\nHost: aa\r\n\r\n")); err != nil { t.Fatalf("unexpected error: %s", err) } br := bufio.NewReader(c) var resp Response if err = resp.Read(br); err != nil { t.Fatalf("unexpected error: %s", err) } if resp.StatusCode() != StatusNotFound { t.Fatalf("unexpected status code: %d. Expecting %d", resp.StatusCode(), StatusNotFound) } if string(resp.Body()) != "404 Page not found" { t.Fatalf("unexpected body: %q. Expecting %q", resp.Body(), "404 Page not found") } if string(resp.Header.Server()) != serverName { t.Fatalf("unexpected server header: %q. Expecting %q", resp.Header.Server(), serverName) } if err = c.Close(); err != nil { t.Fatalf("unexpected error: %s", err) } close(clientCh) }() select { case <-clientCh: case <-time.After(time.Second): t.Fatalf("timeout") } if err := ln.Close(); err != nil { t.Fatalf("unexpected error: %s", err) } select { case <-serverCh: case <-time.After(time.Second): t.Fatalf("timeout") } } func TestServerResponseBodyStream(t *testing.T) { ln := fasthttputil.NewInmemoryListener() readyCh := make(chan struct{}) h := func(ctx *RequestCtx) { ctx.SetConnectionClose() if ctx.IsBodyStream() { t.Fatalf("IsBodyStream must return false") } ctx.SetBodyStreamWriter(func(w *bufio.Writer) { fmt.Fprintf(w, "first") if err := w.Flush(); err != nil { return } <-readyCh fmt.Fprintf(w, "second") // there is no need to flush w here, since it will // be flushed automatically after returning from StreamWriter. }) if !ctx.IsBodyStream() { t.Fatalf("IsBodyStream must return true") } } serverCh := make(chan struct{}) go func() { if err := Serve(ln, h); err != nil { t.Fatalf("unexpected error: %s", err) } close(serverCh) }() clientCh := make(chan struct{}) go func() { c, err := ln.Dial() if err != nil { t.Fatalf("unexpected error: %s", err) } if _, err = c.Write([]byte("GET / HTTP/1.1\r\nHost: aa\r\n\r\n")); err != nil { t.Fatalf("unexpected error: %s", err) } br := bufio.NewReader(c) var respH ResponseHeader if err = respH.Read(br); err != nil { t.Fatalf("unexpected error: %s", err) } if respH.StatusCode() != StatusOK { t.Fatalf("unexpected status code: %d. Expecting %d", respH.StatusCode(), StatusOK) } buf := make([]byte, 1024) n, err := br.Read(buf) if err != nil { t.Fatalf("unexpected error: %s", err) } b := buf[:n] if string(b) != "5\r\nfirst\r\n" { t.Fatalf("unexpected result %q. Expecting %q", b, "5\r\nfirst\r\n") } close(readyCh) tail, err := ioutil.ReadAll(br) if err != nil { t.Fatalf("unexpected error: %s", err) } if string(tail) != "6\r\nsecond\r\n0\r\n\r\n" { t.Fatalf("unexpected tail %q. Expecting %q", tail, "6\r\nsecond\r\n0\r\n\r\n") } close(clientCh) }() select { case <-clientCh: case <-time.After(time.Second): t.Fatalf("timeout") } if err := ln.Close(); err != nil { t.Fatalf("unexpected error: %s", err) } select { case <-serverCh: case <-time.After(time.Second): t.Fatalf("timeout") } } func TestServerDisableKeepalive(t *testing.T) { s := &Server{ Handler: func(ctx *RequestCtx) { ctx.WriteString("OK") }, DisableKeepalive: true, } ln := fasthttputil.NewInmemoryListener() serverCh := make(chan struct{}) go func() { if err := s.Serve(ln); err != nil { t.Fatalf("unexpected error: %s", err) } close(serverCh) }() clientCh := make(chan struct{}) go func() { c, err := ln.Dial() if err != nil { t.Fatalf("unexpected error: %s", err) } if _, err = c.Write([]byte("GET / HTTP/1.1\r\nHost: aa\r\n\r\n")); err != nil { t.Fatalf("unexpected error: %s", err) } br := bufio.NewReader(c) var resp Response if err = resp.Read(br); err != nil { t.Fatalf("unexpected error: %s", err) } if resp.StatusCode() != StatusOK { t.Fatalf("unexpected status code: %d. Expecting %d", resp.StatusCode(), StatusOK) } if !resp.ConnectionClose() { t.Fatalf("expecting 'Connection: close' response header") } if string(resp.Body()) != "OK" { t.Fatalf("unexpected body: %q. Expecting %q", resp.Body(), "OK") } // make sure the connection is closed data, err := ioutil.ReadAll(br) if err != nil { t.Fatalf("unexpected error: %s", err) } if len(data) > 0 { t.Fatalf("unexpected data read from the connection: %q. Expecting empty data", data) } close(clientCh) }() select { case <-clientCh: case <-time.After(time.Second): t.Fatalf("timeout") } if err := ln.Close(); err != nil { t.Fatalf("unexpected error: %s", err) } select { case <-serverCh: case <-time.After(time.Second): t.Fatalf("timeout") } } func TestServerMaxConnsPerIPLimit(t *testing.T) { s := &Server{ Handler: func(ctx *RequestCtx) { ctx.WriteString("OK") }, MaxConnsPerIP: 1, Logger: &customLogger{}, } ln := fasthttputil.NewInmemoryListener() serverCh := make(chan struct{}) go func() { fakeLN := &fakeIPListener{ Listener: ln, } if err := s.Serve(fakeLN); err != nil { t.Fatalf("unexpected error: %s", err) } close(serverCh) }() clientCh := make(chan struct{}) go func() { c1, err := ln.Dial() if err != nil { t.Fatalf("unexpected error: %s", err) } c2, err := ln.Dial() if err != nil { t.Fatalf("unexpected error: %s", err) } br := bufio.NewReader(c2) var resp Response if err = resp.Read(br); err != nil { t.Fatalf("unexpected error: %s", err) } if resp.StatusCode() != StatusTooManyRequests { t.Fatalf("unexpected status code for the second connection: %d. Expecting %d", resp.StatusCode(), StatusTooManyRequests) } if _, err = c1.Write([]byte("GET / HTTP/1.1\r\nHost: aa\r\n\r\n")); err != nil { t.Fatalf("unexpected error when writing to the first connection: %s", err) } br = bufio.NewReader(c1) if err = resp.Read(br); err != nil { t.Fatalf("unexpected error: %s", err) } if resp.StatusCode() != StatusOK { t.Fatalf("unexpected status code for the first connection: %d. Expecting %d", resp.StatusCode(), StatusOK) } if string(resp.Body()) != "OK" { t.Fatalf("unexpected body for the first connection: %q. Expecting %q", resp.Body(), "OK") } close(clientCh) }() select { case <-clientCh: case <-time.After(time.Second): t.Fatalf("timeout") } if err := ln.Close(); err != nil { t.Fatalf("unexpected error: %s", err) } select { case <-serverCh: case <-time.After(time.Second): t.Fatalf("timeout") } } type fakeIPListener struct { net.Listener } func (ln *fakeIPListener) Accept() (net.Conn, error) { conn, err := ln.Listener.Accept() if err != nil { return nil, err } return &fakeIPConn{ Conn: conn, }, nil } type fakeIPConn struct { net.Conn } func (conn *fakeIPConn) RemoteAddr() net.Addr { addr, err := net.ResolveTCPAddr("tcp4", "1.2.3.4:5789") if err != nil { panic(fmt.Sprintf("BUG: unexpected error: %s", err)) } return addr } func TestServerConcurrencyLimit(t *testing.T) { s := &Server{ Handler: func(ctx *RequestCtx) { ctx.WriteString("OK") }, Concurrency: 1, Logger: &customLogger{}, } ln := fasthttputil.NewInmemoryListener() serverCh := make(chan struct{}) go func() { if err := s.Serve(ln); err != nil { t.Fatalf("unexpected error: %s", err) } close(serverCh) }() clientCh := make(chan struct{}) go func() { c1, err := ln.Dial() if err != nil { t.Fatalf("unexpected error: %s", err) } c2, err := ln.Dial() if err != nil { t.Fatalf("unexpected error: %s", err) } br := bufio.NewReader(c2) var resp Response if err = resp.Read(br); err != nil { t.Fatalf("unexpected error: %s", err) } if resp.StatusCode() != StatusServiceUnavailable { t.Fatalf("unexpected status code for the second connection: %d. Expecting %d", resp.StatusCode(), StatusServiceUnavailable) } if _, err = c1.Write([]byte("GET / HTTP/1.1\r\nHost: aa\r\n\r\n")); err != nil { t.Fatalf("unexpected error when writing to the first connection: %s", err) } br = bufio.NewReader(c1) if err = resp.Read(br); err != nil { t.Fatalf("unexpected error: %s", err) } if resp.StatusCode() != StatusOK { t.Fatalf("unexpected status code for the first connection: %d. Expecting %d", resp.StatusCode(), StatusOK) } if string(resp.Body()) != "OK" { t.Fatalf("unexpected body for the first connection: %q. Expecting %q", resp.Body(), "OK") } close(clientCh) }() select { case <-clientCh: case <-time.After(time.Second): t.Fatalf("timeout") } if err := ln.Close(); err != nil { t.Fatalf("unexpected error: %s", err) } select { case <-serverCh: case <-time.After(time.Second): t.Fatalf("timeout") } } func TestServerWriteFastError(t *testing.T) { s := &Server{ Name: "foobar", } var buf bytes.Buffer expectedBody := "access denied" s.writeFastError(&buf, StatusForbidden, expectedBody) br := bufio.NewReader(&buf) var resp Response if err := resp.Read(br); err != nil { t.Fatalf("unexpected error: %s", err) } if resp.StatusCode() != StatusForbidden { t.Fatalf("unexpected status code: %d. Expecting %d", resp.StatusCode(), StatusForbidden) } body := resp.Body() if string(body) != expectedBody { t.Fatalf("unexpected body: %q. Expecting %q", body, expectedBody) } server := string(resp.Header.Server()) if server != s.Name { t.Fatalf("unexpected server: %q. Expecting %q", server, s.Name) } contentType := string(resp.Header.ContentType()) if contentType != "text/plain" { t.Fatalf("unexpected content-type: %q. Expecting %q", contentType, "text/plain") } if !resp.Header.ConnectionClose() { t.Fatalf("expecting 'Connection: close' response header") } } func TestServerServeTLSEmbed(t *testing.T) { ln := fasthttputil.NewInmemoryListener() certFile := "./ssl-cert-snakeoil.pem" keyFile := "./ssl-cert-snakeoil.key" certData, err := ioutil.ReadFile(certFile) if err != nil { t.Fatalf("unexpected error when reading %q: %s", certFile, err) } keyData, err := ioutil.ReadFile(keyFile) if err != nil { t.Fatalf("unexpected error when reading %q: %s", keyFile, err) } // start the server ch := make(chan struct{}) go func() { err := ServeTLSEmbed(ln, certData, keyData, func(ctx *RequestCtx) { ctx.WriteString("success") }) if err != nil { t.Fatalf("unexpected error: %s", err) } close(ch) }() // establish connection to the server conn, err := ln.Dial() if err != nil { t.Fatalf("unexpected error: %s", err) } tlsConn := tls.Client(conn, &tls.Config{ InsecureSkipVerify: true, }) // send request if _, err = tlsConn.Write([]byte("GET / HTTP/1.1\r\nHost: aaa\r\n\r\n")); err != nil { t.Fatalf("unexpected error: %s", err) } // read response respCh := make(chan struct{}) go func() { br := bufio.NewReader(tlsConn) var resp Response if err := resp.Read(br); err != nil { t.Fatalf("unexpected error") } body := resp.Body() if string(body) != "success" { t.Fatalf("unexpected response body %q. Expecting %q", body, "success") } close(respCh) }() select { case <-respCh: case <-time.After(time.Second): t.Fatalf("timeout") } // close the server if err = ln.Close(); err != nil { t.Fatalf("unexpected error: %s", err) } select { case <-ch: case <-time.After(time.Second): t.Fatalf("timeout") } } func TestServerMultipartFormDataRequest(t *testing.T) { reqS := `POST /upload HTTP/1.1 Host: qwerty.com Content-Length: 521 Content-Type: multipart/form-data; boundary=----WebKitFormBoundaryJwfATyF8tmxSJnLg ------WebKitFormBoundaryJwfATyF8tmxSJnLg Content-Disposition: form-data; name="f1" value1 ------WebKitFormBoundaryJwfATyF8tmxSJnLg Content-Disposition: form-data; name="fileaaa"; filename="TODO" Content-Type: application/octet-stream - SessionClient with referer and cookies support. - Client with requests' pipelining support. - ProxyHandler similar to FSHandler. - WebSockets. See https://tools.ietf.org/html/rfc6455 . - HTTP/2.0. See https://tools.ietf.org/html/rfc7540 . ------WebKitFormBoundaryJwfATyF8tmxSJnLg-- GET / HTTP/1.1 Host: asbd Connection: close ` ln := fasthttputil.NewInmemoryListener() s := &Server{ Handler: func(ctx *RequestCtx) { switch string(ctx.Path()) { case "/upload": f, err := ctx.MultipartForm() if err != nil { t.Fatalf("unexpected error: %s", err) } if len(f.Value) != 1 { t.Fatalf("unexpected values %d. Expecting %d", len(f.Value), 1) } if len(f.File) != 1 { t.Fatalf("unexpected file values %d. Expecting %d", len(f.File), 1) } fv := ctx.FormValue("f1") if string(fv) != "value1" { t.Fatalf("unexpected form value: %q. Expecting %q", fv, "value1") } ctx.Redirect("/", StatusSeeOther) default: ctx.WriteString("non-upload") } }, } ch := make(chan struct{}) go func() { if err := s.Serve(ln); err != nil { t.Fatalf("unexpected error: %s", err) } close(ch) }() conn, err := ln.Dial() if err != nil { t.Fatalf("unexpected error: %s", err) } if _, err = conn.Write([]byte(reqS)); err != nil { t.Fatalf("unexpected error: %s", err) } var resp Response br := bufio.NewReader(conn) respCh := make(chan struct{}) go func() { if err := resp.Read(br); err != nil { t.Fatalf("error when reading response: %s", err) } if resp.StatusCode() != StatusSeeOther { t.Fatalf("unexpected status code %d. Expecting %d", resp.StatusCode(), StatusSeeOther) } loc := resp.Header.Peek("Location") if string(loc) != "http://qwerty.com/" { t.Fatalf("unexpected location %q. Expecting %q", loc, "http://qwerty.com/") } if err := resp.Read(br); err != nil { t.Fatalf("error when reading the second response: %s", err) } if resp.StatusCode() != StatusOK { t.Fatalf("unexpected status code: %d. Expecting %d", resp.StatusCode(), StatusOK) } body := resp.Body() if string(body) != "non-upload" { t.Fatalf("unexpected body %q. Expecting %q", body, "non-upload") } close(respCh) }() select { case <-respCh: case <-time.After(time.Second): t.Fatalf("timeout") } if err := ln.Close(); err != nil { t.Fatalf("error when closing listener: %s", err) } select { case <-ch: case <-time.After(time.Second): t.Fatalf("timeout when waiting for the server to stop") } } func TestServerDisableHeaderNamesNormalizing(t *testing.T) { headerName := "CASE-senSITive-HEAder-NAME" headerNameLower := strings.ToLower(headerName) headerValue := "foobar baz" s := &Server{ Handler: func(ctx *RequestCtx) { hv := ctx.Request.Header.Peek(headerName) if string(hv) != headerValue { t.Fatalf("unexpected header value for %q: %q. Expecting %q", headerName, hv, headerValue) } hv = ctx.Request.Header.Peek(headerNameLower) if len(hv) > 0 { t.Fatalf("unexpected header value for %q: %q. Expecting empty value", headerNameLower, hv) } ctx.Response.Header.Set(headerName, headerValue) ctx.WriteString("ok") ctx.SetContentType("aaa") }, DisableHeaderNamesNormalizing: true, } rw := &readWriter{} rw.r.WriteString(fmt.Sprintf("GET / HTTP/1.1\r\n%s: %s\r\nHost: google.com\r\n\r\n", headerName, headerValue)) ch := make(chan error) go func() { ch <- s.ServeConn(rw) }() select { case err := <-ch: if err != nil { t.Fatalf("Unexpected error from serveConn: %s", err) } case <-time.After(100 * time.Millisecond): t.Fatalf("timeout") } br := bufio.NewReader(&rw.w) var resp Response resp.Header.DisableNormalizing() if err := resp.Read(br); err != nil { t.Fatalf("unexpected error: %s", err) } hv := resp.Header.Peek(headerName) if string(hv) != headerValue { t.Fatalf("unexpected header value for %q: %q. Expecting %q", headerName, hv, headerValue) } hv = resp.Header.Peek(headerNameLower) if len(hv) > 0 { t.Fatalf("unexpected header value for %q: %q. Expecting empty value", headerNameLower, hv) } } func TestServerReduceMemoryUsageSerial(t *testing.T) { ln := fasthttputil.NewInmemoryListener() s := &Server{ Handler: func(ctx *RequestCtx) {}, ReduceMemoryUsage: true, }<|fim▁hole|> go func() { if err := s.Serve(ln); err != nil { t.Fatalf("unexpected error: %s", err) } close(ch) }() testServerRequests(t, ln) if err := ln.Close(); err != nil { t.Fatalf("error when closing listener: %s", err) } select { case <-ch: case <-time.After(time.Second): t.Fatalf("timeout when waiting for the server to stop") } } func TestServerReduceMemoryUsageConcurrent(t *testing.T) { ln := fasthttputil.NewInmemoryListener() s := &Server{ Handler: func(ctx *RequestCtx) {}, ReduceMemoryUsage: true, } ch := make(chan struct{}) go func() { if err := s.Serve(ln); err != nil { t.Fatalf("unexpected error: %s", err) } close(ch) }() gCh := make(chan struct{}) for i := 0; i < 10; i++ { go func() { testServerRequests(t, ln) gCh <- struct{}{} }() } for i := 0; i < 10; i++ { select { case <-gCh: case <-time.After(time.Second): t.Fatalf("timeout on goroutine %d", i) } } if err := ln.Close(); err != nil { t.Fatalf("error when closing listener: %s", err) } select { case <-ch: case <-time.After(time.Second): t.Fatalf("timeout when waiting for the server to stop") } } func testServerRequests(t *testing.T, ln *fasthttputil.InmemoryListener) { conn, err := ln.Dial() if err != nil { t.Fatalf("unexpected error: %s", err) } br := bufio.NewReader(conn) var resp Response for i := 0; i < 10; i++ { if _, err = fmt.Fprintf(conn, "GET / HTTP/1.1\r\nHost: aaa\r\n\r\n"); err != nil { t.Fatalf("unexpected error on iteration %d: %s", i, err) } respCh := make(chan struct{}) go func() { if err = resp.Read(br); err != nil { t.Fatalf("unexpected error when reading response on iteration %d: %s", i, err) } close(respCh) }() select { case <-respCh: case <-time.After(time.Second): t.Fatalf("timeout on iteration %d", i) } } if err = conn.Close(); err != nil { t.Fatalf("error when closing the connection: %s", err) } } func TestServerHTTP10ConnectionKeepAlive(t *testing.T) { ln := fasthttputil.NewInmemoryListener() ch := make(chan struct{}) go func() { err := Serve(ln, func(ctx *RequestCtx) { if string(ctx.Path()) == "/close" { ctx.SetConnectionClose() } }) if err != nil { t.Fatalf("unexpected error: %s", err) } close(ch) }() conn, err := ln.Dial() if err != nil { t.Fatalf("unexpected error: %s", err) } _, err = fmt.Fprintf(conn, "%s", "GET / HTTP/1.0\r\nHost: aaa\r\nConnection: keep-alive\r\n\r\n") if err != nil { t.Fatalf("error when writing request: %s", err) } _, err = fmt.Fprintf(conn, "%s", "GET /close HTTP/1.0\r\nHost: aaa\r\nConnection: keep-alive\r\n\r\n") if err != nil { t.Fatalf("error when writing request: %s", err) } br := bufio.NewReader(conn) var resp Response if err = resp.Read(br); err != nil { t.Fatalf("error when reading response: %s", err) } if resp.ConnectionClose() { t.Fatalf("response mustn't have 'Connection: close' header") } if err = resp.Read(br); err != nil { t.Fatalf("error when reading response: %s", err) } if !resp.ConnectionClose() { t.Fatalf("response must have 'Connection: close' header") } tailCh := make(chan struct{}) go func() { tail, err := ioutil.ReadAll(br) if err != nil { t.Fatalf("error when reading tail: %s", err) } if len(tail) > 0 { t.Fatalf("unexpected non-zero tail %q", tail) } close(tailCh) }() select { case <-tailCh: case <-time.After(time.Second): t.Fatalf("timeout when reading tail") } if err = conn.Close(); err != nil { t.Fatalf("error when closing the connection: %s", err) } if err = ln.Close(); err != nil { t.Fatalf("error when closing listener: %s", err) } select { case <-ch: case <-time.After(time.Second): t.Fatalf("timeout when waiting for the server to stop") } } func TestServerHTTP10ConnectionClose(t *testing.T) { ln := fasthttputil.NewInmemoryListener() ch := make(chan struct{}) go func() { err := Serve(ln, func(ctx *RequestCtx) { // The server must close the connection irregardless // of request and response state set inside request // handler, since the HTTP/1.0 request // had no 'Connection: keep-alive' header. ctx.Request.Header.ResetConnectionClose() ctx.Request.Header.Set("Connection", "keep-alive") ctx.Response.Header.ResetConnectionClose() ctx.Response.Header.Set("Connection", "keep-alive") }) if err != nil { t.Fatalf("unexpected error: %s", err) } close(ch) }() conn, err := ln.Dial() if err != nil { t.Fatalf("unexpected error: %s", err) } _, err = fmt.Fprintf(conn, "%s", "GET / HTTP/1.0\r\nHost: aaa\r\n\r\n") if err != nil { t.Fatalf("error when writing request: %s", err) } br := bufio.NewReader(conn) var resp Response if err = resp.Read(br); err != nil { t.Fatalf("error when reading response: %s", err) } if !resp.ConnectionClose() { t.Fatalf("HTTP1.0 response must have 'Connection: close' header") } tailCh := make(chan struct{}) go func() { tail, err := ioutil.ReadAll(br) if err != nil { t.Fatalf("error when reading tail: %s", err) } if len(tail) > 0 { t.Fatalf("unexpected non-zero tail %q", tail) } close(tailCh) }() select { case <-tailCh: case <-time.After(time.Second): t.Fatalf("timeout when reading tail") } if err = conn.Close(); err != nil { t.Fatalf("error when closing the connection: %s", err) } if err = ln.Close(); err != nil { t.Fatalf("error when closing listener: %s", err) } select { case <-ch: case <-time.After(time.Second): t.Fatalf("timeout when waiting for the server to stop") } } func TestRequestCtxFormValue(t *testing.T) { var ctx RequestCtx var req Request req.SetRequestURI("/foo/bar?baz=123&aaa=bbb") req.SetBodyString("qqq=port&mmm=sddd") req.Header.SetContentType("application/x-www-form-urlencoded") ctx.Init(&req, nil, nil) v := ctx.FormValue("baz") if string(v) != "123" { t.Fatalf("unexpected value %q. Expecting %q", v, "123") } v = ctx.FormValue("mmm") if string(v) != "sddd" { t.Fatalf("unexpected value %q. Expecting %q", v, "sddd") } v = ctx.FormValue("aaaasdfsdf") if len(v) > 0 { t.Fatalf("unexpected value for unknown key %q", v) } } func TestRequestCtxUserValue(t *testing.T) { var ctx RequestCtx for i := 0; i < 5; i++ { k := fmt.Sprintf("key-%d", i) ctx.SetUserValue(k, i) } for i := 5; i < 10; i++ { k := fmt.Sprintf("key-%d", i) ctx.SetUserValueBytes([]byte(k), i) } for i := 0; i < 10; i++ { k := fmt.Sprintf("key-%d", i) v := ctx.UserValue(k) n, ok := v.(int) if !ok || n != i { t.Fatalf("unexpected value obtained for key %q: %v. Expecting %d", k, v, i) } } vlen := 0 ctx.VisitUserValues(func(key []byte, value interface{}) { vlen++ v := ctx.UserValueBytes(key) if v != value { t.Fatalf("unexpected value obtained from VisitUserValues for key: %q, expecting: %#v but got: %#v", key, v, value) } }) if len(ctx.userValues) != vlen { t.Fatalf("the length of user values returned from VisitUserValues is not equal to the length of the userValues, expecting: %d but got: %d", len(ctx.userValues), vlen) } } func TestServerHeadRequest(t *testing.T) { s := &Server{ Handler: func(ctx *RequestCtx) { fmt.Fprintf(ctx, "Request method is %q", ctx.Method()) ctx.SetContentType("aaa/bbb") }, } rw := &readWriter{} rw.r.WriteString("HEAD /foobar HTTP/1.1\r\nHost: aaa.com\r\n\r\n") ch := make(chan error) go func() { ch <- s.ServeConn(rw) }() select { case err := <-ch: if err != nil { t.Fatalf("Unexpected error from serveConn: %s", err) } case <-time.After(100 * time.Millisecond): t.Fatalf("timeout") } br := bufio.NewReader(&rw.w) var resp Response resp.SkipBody = true if err := resp.Read(br); err != nil { t.Fatalf("Unexpected error when parsing response: %s", err) } if resp.Header.StatusCode() != StatusOK { t.Fatalf("unexpected status code: %d. Expecting %d", resp.Header.StatusCode(), StatusOK) } if len(resp.Body()) > 0 { t.Fatalf("Unexpected non-zero body %q", resp.Body()) } if resp.Header.ContentLength() != 24 { t.Fatalf("unexpected content-length %d. Expecting %d", resp.Header.ContentLength(), 24) } if string(resp.Header.ContentType()) != "aaa/bbb" { t.Fatalf("unexpected content-type %q. Expecting %q", resp.Header.ContentType(), "aaa/bbb") } data, err := ioutil.ReadAll(br) if err != nil { t.Fatalf("Unexpected error when reading remaining data: %s", err) } if len(data) > 0 { t.Fatalf("unexpected remaining data %q", data) } } func TestServerExpect100Continue(t *testing.T) { s := &Server{ Handler: func(ctx *RequestCtx) { if !ctx.IsPost() { t.Fatalf("unexpected method %q. Expecting POST", ctx.Method()) } if string(ctx.Path()) != "/foo" { t.Fatalf("unexpected path %q. Expecting %q", ctx.Path(), "/foo") } ct := ctx.Request.Header.ContentType() if string(ct) != "a/b" { t.Fatalf("unexpectected content-type: %q. Expecting %q", ct, "a/b") } if string(ctx.PostBody()) != "12345" { t.Fatalf("unexpected body: %q. Expecting %q", ctx.PostBody(), "12345") } ctx.WriteString("foobar") }, } rw := &readWriter{} rw.r.WriteString("POST /foo HTTP/1.1\r\nHost: gle.com\r\nExpect: 100-continue\r\nContent-Length: 5\r\nContent-Type: a/b\r\n\r\n12345") ch := make(chan error) go func() { ch <- s.ServeConn(rw) }() select { case err := <-ch: if err != nil { t.Fatalf("Unexpected error from serveConn: %s", err) } case <-time.After(100 * time.Millisecond): t.Fatalf("timeout") } br := bufio.NewReader(&rw.w) verifyResponse(t, br, StatusOK, string(defaultContentType), "foobar") data, err := ioutil.ReadAll(br) if err != nil { t.Fatalf("Unexpected error when reading remaining data: %s", err) } if len(data) > 0 { t.Fatalf("unexpected remaining data %q", data) } } func TestCompressHandler(t *testing.T) { expectedBody := "foo/bar/baz" h := CompressHandler(func(ctx *RequestCtx) { ctx.Write([]byte(expectedBody)) }) var ctx RequestCtx var resp Response // verify uncompressed response h(&ctx) s := ctx.Response.String() br := bufio.NewReader(bytes.NewBufferString(s)) if err := resp.Read(br); err != nil { t.Fatalf("unexpected error: %s", err) } ce := resp.Header.Peek("Content-Encoding") if string(ce) != "" { t.Fatalf("unexpected Content-Encoding: %q. Expecting %q", ce, "") } body := resp.Body() if string(body) != expectedBody { t.Fatalf("unexpected body %q. Expecting %q", body, expectedBody) } // verify gzip-compressed response ctx.Request.Reset() ctx.Response.Reset() ctx.Request.Header.Set("Accept-Encoding", "gzip, deflate, sdhc") h(&ctx) s = ctx.Response.String() br = bufio.NewReader(bytes.NewBufferString(s)) if err := resp.Read(br); err != nil { t.Fatalf("unexpected error: %s", err) } ce = resp.Header.Peek("Content-Encoding") if string(ce) != "gzip" { t.Fatalf("unexpected Content-Encoding: %q. Expecting %q", ce, "gzip") } body, err := resp.BodyGunzip() if err != nil { t.Fatalf("unexpected error: %s", err) } if string(body) != expectedBody { t.Fatalf("unexpected body %q. Expecting %q", body, expectedBody) } // an attempt to compress already compressed response ctx.Request.Reset() ctx.Response.Reset() ctx.Request.Header.Set("Accept-Encoding", "gzip, deflate, sdhc") hh := CompressHandler(h) hh(&ctx) s = ctx.Response.String() br = bufio.NewReader(bytes.NewBufferString(s)) if err := resp.Read(br); err != nil { t.Fatalf("unexpected error: %s", err) } ce = resp.Header.Peek("Content-Encoding") if string(ce) != "gzip" { t.Fatalf("unexpected Content-Encoding: %q. Expecting %q", ce, "gzip") } body, err = resp.BodyGunzip() if err != nil { t.Fatalf("unexpected error: %s", err) } if string(body) != expectedBody { t.Fatalf("unexpected body %q. Expecting %q", body, expectedBody) } // verify deflate-compressed response ctx.Request.Reset() ctx.Response.Reset() ctx.Request.Header.Set("Accept-Encoding", "foobar, deflate, sdhc") h(&ctx) s = ctx.Response.String() br = bufio.NewReader(bytes.NewBufferString(s)) if err := resp.Read(br); err != nil { t.Fatalf("unexpected error: %s", err) } ce = resp.Header.Peek("Content-Encoding") if string(ce) != "deflate" { t.Fatalf("unexpected Content-Encoding: %q. Expecting %q", ce, "deflate") } body, err = resp.BodyInflate() if err != nil { t.Fatalf("unexpected error: %s", err) } if string(body) != expectedBody { t.Fatalf("unexpected body %q. Expecting %q", body, expectedBody) } } func TestRequestCtxWriteString(t *testing.T) { var ctx RequestCtx n, err := ctx.WriteString("foo") if err != nil { t.Fatalf("unexpected error: %s", err) } if n != 3 { t.Fatalf("unexpected n %d. Expecting 3", n) } n, err = ctx.WriteString("привет") if err != nil { t.Fatalf("unexpected error: %s", err) } if n != 12 { t.Fatalf("unexpected n=%d. Expecting 12", n) } s := ctx.Response.Body() if string(s) != "fooпривет" { t.Fatalf("unexpected response body %q. Expecting %q", s, "fooпривет") } } func TestServeConnNonHTTP11KeepAlive(t *testing.T) { rw := &readWriter{} rw.r.WriteString("GET /foo HTTP/1.0\r\nConnection: keep-alive\r\nHost: google.com\r\n\r\n") rw.r.WriteString("GET /bar HTTP/1.0\r\nHost: google.com\r\n\r\n") rw.r.WriteString("GET /must/be/ignored HTTP/1.0\r\nHost: google.com\r\n\r\n") requestsServed := 0 ch := make(chan struct{}) go func() { err := ServeConn(rw, func(ctx *RequestCtx) { requestsServed++ ctx.SuccessString("aaa/bbb", "foobar") }) if err != nil { t.Fatalf("unexpected error in ServeConn: %s", err) } close(ch) }() select { case <-ch: case <-time.After(time.Second): t.Fatalf("timeout") } br := bufio.NewReader(&rw.w) var resp Response // verify the first response if err := resp.Read(br); err != nil { t.Fatalf("Unexpected error when parsing response: %s", err) } if string(resp.Header.Peek("Connection")) != "keep-alive" { t.Fatalf("unexpected Connection header %q. Expecting %q", resp.Header.Peek("Connection"), "keep-alive") } if resp.Header.ConnectionClose() { t.Fatalf("unexpected Connection: close") } // verify the second response if err := resp.Read(br); err != nil { t.Fatalf("Unexpected error when parsing response: %s", err) } if string(resp.Header.Peek("Connection")) != "close" { t.Fatalf("unexpected Connection header %q. Expecting %q", resp.Header.Peek("Connection"), "close") } if !resp.Header.ConnectionClose() { t.Fatalf("expecting Connection: close") } data, err := ioutil.ReadAll(br) if err != nil { t.Fatalf("Unexpected error when reading remaining data: %s", err) } if len(data) != 0 { t.Fatalf("Unexpected data read after responses %q", data) } if requestsServed != 2 { t.Fatalf("unexpected number of requests served: %d. Expecting 2", requestsServed) } } func TestRequestCtxSetBodyStreamWriter(t *testing.T) { var ctx RequestCtx var req Request ctx.Init(&req, nil, defaultLogger) if ctx.IsBodyStream() { t.Fatalf("IsBodyStream must return false") } ctx.SetBodyStreamWriter(func(w *bufio.Writer) { fmt.Fprintf(w, "body writer line 1\n") if err := w.Flush(); err != nil { t.Fatalf("unexpected error: %s", err) } fmt.Fprintf(w, "body writer line 2\n") }) if !ctx.IsBodyStream() { t.Fatalf("IsBodyStream must return true") } s := ctx.Response.String() br := bufio.NewReader(bytes.NewBufferString(s)) var resp Response if err := resp.Read(br); err != nil { t.Fatalf("Error when reading response: %s", err) } body := string(resp.Body()) expectedBody := "body writer line 1\nbody writer line 2\n" if body != expectedBody { t.Fatalf("unexpected body: %q. Expecting %q", body, expectedBody) } } func TestRequestCtxIfModifiedSince(t *testing.T) { var ctx RequestCtx var req Request ctx.Init(&req, nil, defaultLogger) lastModified := time.Now().Add(-time.Hour) if !ctx.IfModifiedSince(lastModified) { t.Fatalf("IfModifiedSince must return true for non-existing If-Modified-Since header") } ctx.Request.Header.Set("If-Modified-Since", string(AppendHTTPDate(nil, lastModified))) if ctx.IfModifiedSince(lastModified) { t.Fatalf("If-Modified-Since current time must return false") } past := lastModified.Add(-time.Hour) if ctx.IfModifiedSince(past) { t.Fatalf("If-Modified-Since past time must return false") } future := lastModified.Add(time.Hour) if !ctx.IfModifiedSince(future) { t.Fatalf("If-Modified-Since future time must return true") } } func TestRequestCtxSendFileNotModified(t *testing.T) { var ctx RequestCtx var req Request ctx.Init(&req, nil, defaultLogger) filePath := "./server_test.go" lastModified, err := FileLastModified(filePath) if err != nil { t.Fatalf("unexpected error: %s", err) } ctx.Request.Header.Set("If-Modified-Since", string(AppendHTTPDate(nil, lastModified))) ctx.SendFile(filePath) s := ctx.Response.String() var resp Response br := bufio.NewReader(bytes.NewBufferString(s)) if err := resp.Read(br); err != nil { t.Fatalf("error when reading response: %s", err) } if resp.StatusCode() != StatusNotModified { t.Fatalf("unexpected status code: %d. Expecting %d", resp.StatusCode(), StatusNotModified) } if len(resp.Body()) > 0 { t.Fatalf("unexpected non-zero response body: %q", resp.Body()) } } func TestRequestCtxSendFileModified(t *testing.T) { var ctx RequestCtx var req Request ctx.Init(&req, nil, defaultLogger) filePath := "./server_test.go" lastModified, err := FileLastModified(filePath) if err != nil { t.Fatalf("unexpected error: %s", err) } lastModified = lastModified.Add(-time.Hour) ctx.Request.Header.Set("If-Modified-Since", string(AppendHTTPDate(nil, lastModified))) ctx.SendFile(filePath) s := ctx.Response.String() var resp Response br := bufio.NewReader(bytes.NewBufferString(s)) if err := resp.Read(br); err != nil { t.Fatalf("error when reading response: %s", err) } if resp.StatusCode() != StatusOK { t.Fatalf("unexpected status code: %d. Expecting %d", resp.StatusCode(), StatusOK) } f, err := os.Open(filePath) if err != nil { t.Fatalf("cannot open file: %s", err) } body, err := ioutil.ReadAll(f) f.Close() if err != nil { t.Fatalf("error when reading file: %s", err) } if !bytes.Equal(resp.Body(), body) { t.Fatalf("unexpected response body: %q. Expecting %q", resp.Body(), body) } } func TestRequestCtxSendFile(t *testing.T) { var ctx RequestCtx var req Request ctx.Init(&req, nil, defaultLogger) filePath := "./server_test.go" ctx.SendFile(filePath) w := &bytes.Buffer{} bw := bufio.NewWriter(w) if err := ctx.Response.Write(bw); err != nil { t.Fatalf("error when writing response: %s", err) } if err := bw.Flush(); err != nil { t.Fatalf("error when flushing response: %s", err) } var resp Response br := bufio.NewReader(w) if err := resp.Read(br); err != nil { t.Fatalf("error when reading response: %s", err) } if resp.StatusCode() != StatusOK { t.Fatalf("unexpected status code: %d. Expecting %d", resp.StatusCode(), StatusOK) } f, err := os.Open(filePath) if err != nil { t.Fatalf("cannot open file: %s", err) } body, err := ioutil.ReadAll(f) f.Close() if err != nil { t.Fatalf("error when reading file: %s", err) } if !bytes.Equal(resp.Body(), body) { t.Fatalf("unexpected response body: %q. Expecting %q", resp.Body(), body) } } func TestRequestCtxHijack(t *testing.T) { hijackStartCh := make(chan struct{}) hijackStopCh := make(chan struct{}) s := &Server{ Handler: func(ctx *RequestCtx) { if ctx.Hijacked() { t.Fatalf("connection mustn't be hijacked") } ctx.Hijack(func(c net.Conn) { <-hijackStartCh b := make([]byte, 1) // ping-pong echo via hijacked conn for { n, err := c.Read(b) if n != 1 { if err == io.EOF { close(hijackStopCh) return } if err != nil { t.Fatalf("unexpected error: %s", err) } t.Fatalf("unexpected number of bytes read: %d. Expecting 1", n) } if _, err = c.Write(b); err != nil { t.Fatalf("unexpected error when writing data: %s", err) } } }) if !ctx.Hijacked() { t.Fatalf("connection must be hijacked") } ctx.Success("foo/bar", []byte("hijack it!")) }, } hijackedString := "foobar baz hijacked!!!" rw := &readWriter{} rw.r.WriteString("GET /foo HTTP/1.1\r\nHost: google.com\r\n\r\n") rw.r.WriteString(hijackedString) ch := make(chan error) go func() { ch <- s.ServeConn(rw) }() select { case err := <-ch: if err != nil { t.Fatalf("Unexpected error from serveConn: %s", err) } case <-time.After(100 * time.Millisecond): t.Fatalf("timeout") } br := bufio.NewReader(&rw.w) verifyResponse(t, br, StatusOK, "foo/bar", "hijack it!") close(hijackStartCh) select { case <-hijackStopCh: case <-time.After(100 * time.Millisecond): t.Fatalf("timeout") } data, err := ioutil.ReadAll(br) if err != nil { t.Fatalf("Unexpected error when reading remaining data: %s", err) } if string(data) != hijackedString { t.Fatalf("Unexpected data read after the first response %q. Expecting %q", data, hijackedString) } } func TestRequestCtxInit(t *testing.T) { var ctx RequestCtx var logger customLogger globalConnID = 0x123456 ctx.Init(&ctx.Request, zeroTCPAddr, &logger) ip := ctx.RemoteIP() if !ip.IsUnspecified() { t.Fatalf("unexpected ip for bare RequestCtx: %q. Expected 0.0.0.0", ip) } ctx.Logger().Printf("foo bar %d", 10) expectedLog := "#0012345700000000 - 0.0.0.0:0<->0.0.0.0:0 - GET http:/// - foo bar 10\n" if logger.out != expectedLog { t.Fatalf("Unexpected log output: %q. Expected %q", logger.out, expectedLog) } } func TestTimeoutHandlerSuccess(t *testing.T) { ln := fasthttputil.NewInmemoryListener() h := func(ctx *RequestCtx) { if string(ctx.Path()) == "/" { ctx.Success("aaa/bbb", []byte("real response")) } } s := &Server{ Handler: TimeoutHandler(h, 10*time.Second, "timeout!!!"), } serverCh := make(chan struct{}) go func() { if err := s.Serve(ln); err != nil { t.Fatalf("unexepcted error: %s", err) } close(serverCh) }() concurrency := 20 clientCh := make(chan struct{}, concurrency) for i := 0; i < concurrency; i++ { go func() { conn, err := ln.Dial() if err != nil { t.Fatalf("unexepcted error: %s", err) } if _, err = conn.Write([]byte("GET / HTTP/1.1\r\nHost: google.com\r\n\r\n")); err != nil { t.Fatalf("unexpected error: %s", err) } br := bufio.NewReader(conn) verifyResponse(t, br, StatusOK, "aaa/bbb", "real response") clientCh <- struct{}{} }() } for i := 0; i < concurrency; i++ { select { case <-clientCh: case <-time.After(time.Second): t.Fatalf("timeout") } } if err := ln.Close(); err != nil { t.Fatalf("unexpected error: %s", err) } select { case <-serverCh: case <-time.After(time.Second): t.Fatalf("timeout") } } func TestTimeoutHandlerTimeout(t *testing.T) { ln := fasthttputil.NewInmemoryListener() readyCh := make(chan struct{}) doneCh := make(chan struct{}) h := func(ctx *RequestCtx) { ctx.Success("aaa/bbb", []byte("real response")) <-readyCh doneCh <- struct{}{} } s := &Server{ Handler: TimeoutHandler(h, 20*time.Millisecond, "timeout!!!"), } serverCh := make(chan struct{}) go func() { if err := s.Serve(ln); err != nil { t.Fatalf("unexepcted error: %s", err) } close(serverCh) }() concurrency := 20 clientCh := make(chan struct{}, concurrency) for i := 0; i < concurrency; i++ { go func() { conn, err := ln.Dial() if err != nil { t.Fatalf("unexepcted error: %s", err) } if _, err = conn.Write([]byte("GET / HTTP/1.1\r\nHost: google.com\r\n\r\n")); err != nil { t.Fatalf("unexpected error: %s", err) } br := bufio.NewReader(conn) verifyResponse(t, br, StatusRequestTimeout, string(defaultContentType), "timeout!!!") clientCh <- struct{}{} }() } for i := 0; i < concurrency; i++ { select { case <-clientCh: case <-time.After(time.Second): t.Fatalf("timeout") } } close(readyCh) for i := 0; i < concurrency; i++ { select { case <-doneCh: case <-time.After(time.Second): t.Fatalf("timeout") } } if err := ln.Close(); err != nil { t.Fatalf("unexpected error: %s", err) } select { case <-serverCh: case <-time.After(time.Second): t.Fatalf("timeout") } } func TestServerGetOnly(t *testing.T) { h := func(ctx *RequestCtx) { if !ctx.IsGet() { t.Fatalf("non-get request: %q", ctx.Method()) } ctx.Success("foo/bar", []byte("success")) } s := &Server{ Handler: h, GetOnly: true, } rw := &readWriter{} rw.r.WriteString("POST /foo HTTP/1.1\r\nHost: google.com\r\nContent-Length: 5\r\nContent-Type: aaa\r\n\r\n12345") ch := make(chan error) go func() { ch <- s.ServeConn(rw) }() select { case err := <-ch: if err == nil { t.Fatalf("expecting error") } if err != errGetOnly { t.Fatalf("Unexpected error from serveConn: %s. Expecting %s", err, errGetOnly) } case <-time.After(100 * time.Millisecond): t.Fatalf("timeout") } br := bufio.NewReader(&rw.w) var resp Response if err := resp.Read(br); err != nil { t.Fatalf("unexpected error: %s", err) } statusCode := resp.StatusCode() if statusCode != StatusBadRequest { t.Fatalf("unexpected status code: %d. Expecting %d", statusCode, StatusBadRequest) } if !resp.ConnectionClose() { t.Fatalf("missing 'Connection: close' response header") } } func TestServerTimeoutErrorWithResponse(t *testing.T) { s := &Server{ Handler: func(ctx *RequestCtx) { go func() { ctx.Success("aaa/bbb", []byte("xxxyyy")) }() var resp Response resp.SetStatusCode(123) resp.SetBodyString("foobar. Should be ignored") ctx.TimeoutErrorWithResponse(&resp) resp.SetStatusCode(456) resp.ResetBody() fmt.Fprintf(resp.BodyWriter(), "path=%s", ctx.Path()) resp.Header.SetContentType("foo/bar") ctx.TimeoutErrorWithResponse(&resp) }, } rw := &readWriter{} rw.r.WriteString("GET /foo HTTP/1.1\r\nHost: google.com\r\n\r\n") rw.r.WriteString("GET /bar HTTP/1.1\r\nHost: google.com\r\n\r\n") ch := make(chan error) go func() { ch <- s.ServeConn(rw) }() select { case err := <-ch: if err != nil { t.Fatalf("Unexpected error from serveConn: %s", err) } case <-time.After(100 * time.Millisecond): t.Fatalf("timeout") } br := bufio.NewReader(&rw.w) verifyResponse(t, br, 456, "foo/bar", "path=/foo") data, err := ioutil.ReadAll(br) if err != nil { t.Fatalf("Unexpected error when reading remaining data: %s", err) } if len(data) != 0 { t.Fatalf("Unexpected data read after the first response %q. Expecting %q", data, "") } } func TestServerTimeoutErrorWithCode(t *testing.T) { s := &Server{ Handler: func(ctx *RequestCtx) { go func() { ctx.Success("aaa/bbb", []byte("xxxyyy")) }() ctx.TimeoutErrorWithCode("should be ignored", 234) ctx.TimeoutErrorWithCode("stolen ctx", StatusBadRequest) }, } rw := &readWriter{} rw.r.WriteString("GET /foo HTTP/1.1\r\nHost: google.com\r\n\r\n") rw.r.WriteString("GET /foo HTTP/1.1\r\nHost: google.com\r\n\r\n") ch := make(chan error) go func() { ch <- s.ServeConn(rw) }() select { case err := <-ch: if err != nil { t.Fatalf("Unexpected error from serveConn: %s", err) } case <-time.After(100 * time.Millisecond): t.Fatalf("timeout") } br := bufio.NewReader(&rw.w) verifyResponse(t, br, StatusBadRequest, string(defaultContentType), "stolen ctx") data, err := ioutil.ReadAll(br) if err != nil { t.Fatalf("Unexpected error when reading remaining data: %s", err) } if len(data) != 0 { t.Fatalf("Unexpected data read after the first response %q. Expecting %q", data, "") } } func TestServerTimeoutError(t *testing.T) { s := &Server{ Handler: func(ctx *RequestCtx) { go func() { ctx.Success("aaa/bbb", []byte("xxxyyy")) }() ctx.TimeoutError("should be ignored") ctx.TimeoutError("stolen ctx") }, } rw := &readWriter{} rw.r.WriteString("GET /foo HTTP/1.1\r\nHost: google.com\r\n\r\n") rw.r.WriteString("GET /foo HTTP/1.1\r\nHost: google.com\r\n\r\n") ch := make(chan error) go func() { ch <- s.ServeConn(rw) }() select { case err := <-ch: if err != nil { t.Fatalf("Unexpected error from serveConn: %s", err) } case <-time.After(100 * time.Millisecond): t.Fatalf("timeout") } br := bufio.NewReader(&rw.w) verifyResponse(t, br, StatusRequestTimeout, string(defaultContentType), "stolen ctx") data, err := ioutil.ReadAll(br) if err != nil { t.Fatalf("Unexpected error when reading remaining data: %s", err) } if len(data) != 0 { t.Fatalf("Unexpected data read after the first response %q. Expecting %q", data, "") } } func TestServerMaxKeepaliveDuration(t *testing.T) { s := &Server{ Handler: func(ctx *RequestCtx) { time.Sleep(20 * time.Millisecond) }, MaxKeepaliveDuration: 10 * time.Millisecond, } rw := &readWriter{} rw.r.WriteString("GET /aaa HTTP/1.1\r\nHost: aa.com\r\n\r\n") rw.r.WriteString("GET /bbbb HTTP/1.1\r\nHost: bbb.com\r\n\r\n") ch := make(chan error) go func() { ch <- s.ServeConn(rw) }() select { case err := <-ch: if err != nil { t.Fatalf("Unexpected error from serveConn: %s", err) } case <-time.After(100 * time.Millisecond): t.Fatalf("timeout") } br := bufio.NewReader(&rw.w) var resp Response if err := resp.Read(br); err != nil { t.Fatalf("Unexpected error when parsing response: %s", err) } if !resp.ConnectionClose() { t.Fatalf("Response must have 'connection: close' header") } verifyResponseHeader(t, &resp.Header, 200, 0, string(defaultContentType)) data, err := ioutil.ReadAll(br) if err != nil { t.Fatalf("Unexpected error when reading remaining data: %s", err) } if len(data) != 0 { t.Fatalf("Unexpected data read after the first response %q. Expecting %q", data, "") } } func TestServerMaxRequestsPerConn(t *testing.T) { s := &Server{ Handler: func(ctx *RequestCtx) {}, MaxRequestsPerConn: 1, } rw := &readWriter{} rw.r.WriteString("GET /foo1 HTTP/1.1\r\nHost: google.com\r\n\r\n") rw.r.WriteString("GET /bar HTTP/1.1\r\nHost: aaa.com\r\n\r\n") ch := make(chan error) go func() { ch <- s.ServeConn(rw) }() select { case err := <-ch: if err != nil { t.Fatalf("Unexpected error from serveConn: %s", err) } case <-time.After(100 * time.Millisecond): t.Fatalf("timeout") } br := bufio.NewReader(&rw.w) var resp Response if err := resp.Read(br); err != nil { t.Fatalf("Unexpected error when parsing response: %s", err) } if !resp.ConnectionClose() { t.Fatalf("Response must have 'connection: close' header") } verifyResponseHeader(t, &resp.Header, 200, 0, string(defaultContentType)) data, err := ioutil.ReadAll(br) if err != nil { t.Fatalf("Unexpected error when reading remaining data: %s", err) } if len(data) != 0 { t.Fatalf("Unexpected data read after the first response %q. Expecting %q", data, "") } } func TestServerConnectionClose(t *testing.T) { s := &Server{ Handler: func(ctx *RequestCtx) { ctx.SetConnectionClose() }, } rw := &readWriter{} rw.r.WriteString("GET /foo1 HTTP/1.1\r\nHost: google.com\r\n\r\n") rw.r.WriteString("GET /must/be/ignored HTTP/1.1\r\nHost: aaa.com\r\n\r\n") ch := make(chan error) go func() { ch <- s.ServeConn(rw) }() select { case err := <-ch: if err != nil { t.Fatalf("Unexpected error from serveConn: %s", err) } case <-time.After(100 * time.Millisecond): t.Fatalf("timeout") } br := bufio.NewReader(&rw.w) var resp Response if err := resp.Read(br); err != nil { t.Fatalf("Unexpected error when parsing response: %s", err) } if !resp.ConnectionClose() { t.Fatalf("expecting Connection: close header") } data, err := ioutil.ReadAll(br) if err != nil { t.Fatalf("Unexpected error when reading remaining data: %s", err) } if len(data) != 0 { t.Fatalf("Unexpected data read after the first response %q. Expecting %q", data, "") } } func TestServerRequestNumAndTime(t *testing.T) { n := uint64(0) var connT time.Time s := &Server{ Handler: func(ctx *RequestCtx) { n++ if ctx.ConnRequestNum() != n { t.Fatalf("unexpected request number: %d. Expecting %d", ctx.ConnRequestNum(), n) } if connT.IsZero() { connT = ctx.ConnTime() } if ctx.ConnTime() != connT { t.Fatalf("unexpected serve conn time: %s. Expecting %s", ctx.ConnTime(), connT) } }, } rw := &readWriter{} rw.r.WriteString("GET /foo1 HTTP/1.1\r\nHost: google.com\r\n\r\n") rw.r.WriteString("GET /bar HTTP/1.1\r\nHost: google.com\r\n\r\n") rw.r.WriteString("GET /baz HTTP/1.1\r\nHost: google.com\r\n\r\n") ch := make(chan error) go func() { ch <- s.ServeConn(rw) }() select { case err := <-ch: if err != nil { t.Fatalf("Unexpected error from serveConn: %s", err) } case <-time.After(100 * time.Millisecond): t.Fatalf("timeout") } if n != 3 { t.Fatalf("unexpected number of requests served: %d. Expecting %d", n, 3) } br := bufio.NewReader(&rw.w) verifyResponse(t, br, 200, string(defaultContentType), "") } func TestServerEmptyResponse(t *testing.T) { s := &Server{ Handler: func(ctx *RequestCtx) { // do nothing :) }, } rw := &readWriter{} rw.r.WriteString("GET /foo1 HTTP/1.1\r\nHost: google.com\r\n\r\n") ch := make(chan error) go func() { ch <- s.ServeConn(rw) }() select { case err := <-ch: if err != nil { t.Fatalf("Unexpected error from serveConn: %s", err) } case <-time.After(100 * time.Millisecond): t.Fatalf("timeout") } br := bufio.NewReader(&rw.w) verifyResponse(t, br, 200, string(defaultContentType), "") } type customLogger struct { lock sync.Mutex out string } func (cl *customLogger) Printf(format string, args ...interface{}) { cl.lock.Lock() cl.out += fmt.Sprintf(format, args...)[6:] + "\n" cl.lock.Unlock() } func TestServerLogger(t *testing.T) { cl := &customLogger{} s := &Server{ Handler: func(ctx *RequestCtx) { logger := ctx.Logger() h := &ctx.Request.Header logger.Printf("begin") ctx.Success("text/html", []byte(fmt.Sprintf("requestURI=%s, body=%q, remoteAddr=%s", h.RequestURI(), ctx.Request.Body(), ctx.RemoteAddr()))) logger.Printf("end") }, Logger: cl, } rw := &readWriter{} rw.r.WriteString("GET /foo1 HTTP/1.1\r\nHost: google.com\r\n\r\n") rw.r.WriteString("POST /foo2 HTTP/1.1\r\nHost: aaa.com\r\nContent-Length: 5\r\nContent-Type: aa\r\n\r\nabcde") rwx := &readWriterRemoteAddr{ rw: rw, addr: &net.TCPAddr{ IP: []byte{1, 2, 3, 4}, Port: 8765, }, } globalConnID = 0 ch := make(chan error) go func() { ch <- s.ServeConn(rwx) }() select { case err := <-ch: if err != nil { t.Fatalf("Unexpected error from serveConn: %s", err) } case <-time.After(100 * time.Millisecond): t.Fatalf("timeout") } br := bufio.NewReader(&rw.w) verifyResponse(t, br, 200, "text/html", "requestURI=/foo1, body=\"\", remoteAddr=1.2.3.4:8765") verifyResponse(t, br, 200, "text/html", "requestURI=/foo2, body=\"abcde\", remoteAddr=1.2.3.4:8765") expectedLogOut := `#0000000100000001 - 1.2.3.4:8765<->1.2.3.4:8765 - GET http://google.com/foo1 - begin #0000000100000001 - 1.2.3.4:8765<->1.2.3.4:8765 - GET http://google.com/foo1 - end #0000000100000002 - 1.2.3.4:8765<->1.2.3.4:8765 - POST http://aaa.com/foo2 - begin #0000000100000002 - 1.2.3.4:8765<->1.2.3.4:8765 - POST http://aaa.com/foo2 - end ` if cl.out != expectedLogOut { t.Fatalf("Unexpected logger output: %q. Expected %q", cl.out, expectedLogOut) } } func TestServerRemoteAddr(t *testing.T) { s := &Server{ Handler: func(ctx *RequestCtx) { h := &ctx.Request.Header ctx.Success("text/html", []byte(fmt.Sprintf("requestURI=%s, remoteAddr=%s, remoteIP=%s", h.RequestURI(), ctx.RemoteAddr(), ctx.RemoteIP()))) }, } rw := &readWriter{} rw.r.WriteString("GET /foo1 HTTP/1.1\r\nHost: google.com\r\n\r\n") rwx := &readWriterRemoteAddr{ rw: rw, addr: &net.TCPAddr{ IP: []byte{1, 2, 3, 4}, Port: 8765, }, } ch := make(chan error) go func() { ch <- s.ServeConn(rwx) }() select { case err := <-ch: if err != nil { t.Fatalf("Unexpected error from serveConn: %s", err) } case <-time.After(100 * time.Millisecond): t.Fatalf("timeout") } br := bufio.NewReader(&rw.w) verifyResponse(t, br, 200, "text/html", "requestURI=/foo1, remoteAddr=1.2.3.4:8765, remoteIP=1.2.3.4") } type readWriterRemoteAddr struct { net.Conn rw io.ReadWriteCloser addr net.Addr } func (rw *readWriterRemoteAddr) Close() error { return rw.rw.Close() } func (rw *readWriterRemoteAddr) Read(b []byte) (int, error) { return rw.rw.Read(b) } func (rw *readWriterRemoteAddr) Write(b []byte) (int, error) { return rw.rw.Write(b) } func (rw *readWriterRemoteAddr) RemoteAddr() net.Addr { return rw.addr } func (rw *readWriterRemoteAddr) LocalAddr() net.Addr { return rw.addr } func TestServerConnError(t *testing.T) { s := &Server{ Handler: func(ctx *RequestCtx) { ctx.Error("foobar", 423) }, } rw := &readWriter{} rw.r.WriteString("GET /foo/bar?baz HTTP/1.1\r\nHost: google.com\r\n\r\n") ch := make(chan error) go func() { ch <- s.ServeConn(rw) }() select { case err := <-ch: if err != nil { t.Fatalf("Unexpected error from serveConn: %s", err) } case <-time.After(100 * time.Millisecond): t.Fatalf("timeout") } br := bufio.NewReader(&rw.w) var resp Response if err := resp.Read(br); err != nil { t.Fatalf("Unexpected error when reading response: %s", err) } if resp.Header.StatusCode() != 423 { t.Fatalf("Unexpected status code %d. Expected %d", resp.Header.StatusCode(), 423) } if resp.Header.ContentLength() != 6 { t.Fatalf("Unexpected Content-Length %d. Expected %d", resp.Header.ContentLength(), 6) } if !bytes.Equal(resp.Header.Peek("Content-Type"), defaultContentType) { t.Fatalf("Unexpected Content-Type %q. Expected %q", resp.Header.Peek("Content-Type"), defaultContentType) } if !bytes.Equal(resp.Body(), []byte("foobar")) { t.Fatalf("Unexpected body %q. Expected %q", resp.Body(), "foobar") } } func TestServeConnSingleRequest(t *testing.T) { s := &Server{ Handler: func(ctx *RequestCtx) { h := &ctx.Request.Header ctx.Success("aaa", []byte(fmt.Sprintf("requestURI=%s, host=%s", h.RequestURI(), h.Peek("Host")))) }, } rw := &readWriter{} rw.r.WriteString("GET /foo/bar?baz HTTP/1.1\r\nHost: google.com\r\n\r\n") ch := make(chan error) go func() { ch <- s.ServeConn(rw) }() select { case err := <-ch: if err != nil { t.Fatalf("Unexpected error from serveConn: %s", err) } case <-time.After(100 * time.Millisecond): t.Fatalf("timeout") } br := bufio.NewReader(&rw.w) verifyResponse(t, br, 200, "aaa", "requestURI=/foo/bar?baz, host=google.com") } func TestServeConnMultiRequests(t *testing.T) { s := &Server{ Handler: func(ctx *RequestCtx) { h := &ctx.Request.Header ctx.Success("aaa", []byte(fmt.Sprintf("requestURI=%s, host=%s", h.RequestURI(), h.Peek("Host")))) }, } rw := &readWriter{} rw.r.WriteString("GET /foo/bar?baz HTTP/1.1\r\nHost: google.com\r\n\r\nGET /abc HTTP/1.1\r\nHost: foobar.com\r\n\r\n") ch := make(chan error) go func() { ch <- s.ServeConn(rw) }() select { case err := <-ch: if err != nil { t.Fatalf("Unexpected error from serveConn: %s", err) } case <-time.After(100 * time.Millisecond): t.Fatalf("timeout") } br := bufio.NewReader(&rw.w) verifyResponse(t, br, 200, "aaa", "requestURI=/foo/bar?baz, host=google.com") verifyResponse(t, br, 200, "aaa", "requestURI=/abc, host=foobar.com") } func verifyResponse(t *testing.T, r *bufio.Reader, expectedStatusCode int, expectedContentType, expectedBody string) { var resp Response if err := resp.Read(r); err != nil { t.Fatalf("Unexpected error when parsing response: %s", err) } if !bytes.Equal(resp.Body(), []byte(expectedBody)) { t.Fatalf("Unexpected body %q. Expected %q", resp.Body(), []byte(expectedBody)) } verifyResponseHeader(t, &resp.Header, expectedStatusCode, len(resp.Body()), expectedContentType) } type readWriter struct { net.Conn r bytes.Buffer w bytes.Buffer } func (rw *readWriter) Close() error { return nil } func (rw *readWriter) Read(b []byte) (int, error) { return rw.r.Read(b) } func (rw *readWriter) Write(b []byte) (int, error) { return rw.w.Write(b) } func (rw *readWriter) RemoteAddr() net.Addr { return zeroTCPAddr } func (rw *readWriter) LocalAddr() net.Addr { return zeroTCPAddr } func (rw *readWriter) SetReadDeadline(t time.Time) error { return nil } func (rw *readWriter) SetWriteDeadline(t time.Time) error { return nil }<|fim▁end|>
ch := make(chan struct{})
<|file_name|>test_permission.py<|end_file_name|><|fim▁begin|>import unittest from bolt.discord.permissions import Permission class TestPermission(unittest.TestCase): def test_permission_from_list_to_list(self): expected = ['MANAGE_WEBHOOKS', 'USE_EXTERNAL_EMOJIS'] permission = Permission(['MANAGE_WEBHOOKS', 'USE_EXTERNAL_EMOJIS']) actual = permission.to_list() self.assertListEqual(sorted(actual), sorted(expected)) def test_permission_from_int_to_list(self): expected = ['ADMINISTRATOR', 'SEND_MESSAGES'] permission = Permission(2056) actual = permission.to_list() self.assertListEqual(sorted(actual), sorted(expected)) def test_permission_in_permission(self): self.assertTrue("ADMINISTRATOR" in Permission(2056))<|fim▁hole|> def test_permission_not_in_permission(self): self.assertTrue("USE_VAD" not in Permission(2056)) def test_permissions_not_in_permission(self): self.assertTrue(["SPEAK", "MANAGE_EMOJIS"] not in Permission(2056)) def test_permission_add(self): permission = Permission(2056) self.assertTrue(permission.allows("ADMINISTRATOR")) self.assertFalse(permission.allows("MENTION_EVERYONE")) permission.add("MENTION_EVERYONE") self.assertTrue(permission.allows("MENTION_EVERYONE")) def test_permission_remove(self): permission = Permission(2056) self.assertTrue(permission.allows("ADMINISTRATOR")) self.assertTrue(permission.allows("SEND_MESSAGES")) permission.remove("SEND_MESSAGES") self.assertFalse(permission.allows("SEND_MESSAGES"))<|fim▁end|>
def test_permissions_in_permission(self): self.assertTrue(["ADMINISTRATOR", "SEND_MESSAGES"] in Permission(2056))
<|file_name|>rec-align-u64.rs<|end_file_name|><|fim▁begin|>// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // Issue #2303 #![feature(intrinsics)] use std::mem; mod rusti { extern "rust-intrinsic" { pub fn pref_align_of<T>() -> uint; pub fn min_align_of<T>() -> uint; } } // This is the type with the questionable alignment #[derive(Debug)] struct Inner { c64: u64 } // This is the type that contains the type with the // questionable alignment, for testing #[derive(Debug)] struct Outer { c8: u8, t: Inner } #[cfg(any(target_os = "linux",<|fim▁hole|> target_os = "freebsd", target_os = "dragonfly", target_os = "openbsd"))] mod m { #[cfg(target_arch = "x86")] pub mod m { pub fn align() -> uint { 4 } pub fn size() -> uint { 12 } } #[cfg(any(target_arch = "x86_64", target_arch = "arm", target_arch = "aarch64"))] pub mod m { pub fn align() -> uint { 8 } pub fn size() -> uint { 16 } } } #[cfg(target_os = "bitrig")] mod m { #[cfg(target_arch = "x86_64")] pub mod m { pub fn align() -> uint { 8 } pub fn size() -> uint { 16 } } } #[cfg(target_os = "windows")] mod m { #[cfg(target_arch = "x86")] pub mod m { pub fn align() -> uint { 8 } pub fn size() -> uint { 16 } } #[cfg(target_arch = "x86_64")] pub mod m { pub fn align() -> uint { 8 } pub fn size() -> uint { 16 } } } #[cfg(target_os = "android")] mod m { #[cfg(any(target_arch = "arm", target_arch = "aarch64"))] pub mod m { pub fn align() -> uint { 8 } pub fn size() -> uint { 16 } } } pub fn main() { unsafe { let x = Outer {c8: 22, t: Inner {c64: 44}}; let y = format!("{:?}", x); println!("align inner = {:?}", rusti::min_align_of::<Inner>()); println!("size outer = {:?}", mem::size_of::<Outer>()); println!("y = {:?}", y); // per clang/gcc the alignment of `Inner` is 4 on x86. assert_eq!(rusti::min_align_of::<Inner>(), m::m::align()); // per clang/gcc the size of `Outer` should be 12 // because `Inner`s alignment was 4. assert_eq!(mem::size_of::<Outer>(), m::m::size()); assert_eq!(y, "Outer { c8: 22, t: Inner { c64: 44 } }".to_string()); } }<|fim▁end|>
target_os = "macos",
<|file_name|>container.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # coding=utf-8 """ The `META-INF/container.xml` file, storing: 1. the Rendition objects 2. the Rendition Mapping Document """ from yael.element import Element from yael.jsonable import JSONAble from yael.mediatype import MediaType from yael.namespace import Namespace from yael.rendition import Rendition from yael.rmdocument import RMDocument import yael.util __author__ = "Alberto Pettarin" __copyright__ = "Copyright 2015, Alberto Pettarin (www.albertopettarin.it)" __license__ = "MIT" __version__ = "0.0.9" __email__ = "[email protected]" __status__ = "Development" class Container(Element): """ Build the `META-INF/container.xml` file or parse it from `string` or `obj`. """ A_ACCESSMODE = "accessMode" A_FULL_PATH = "full-path" A_HREF = "href" A_LABEL = "label" A_LANGUAGE = "language" A_LAYOUT = "layout" A_MEDIA = "media" A_MEDIA_TYPE = "media-type" A_MEDIA_TYPE = "media-type" A_REL = "rel" A_NS_ACCESSMODE = "{{{0}}}{1}".format(Namespace.RENDITION, A_ACCESSMODE) A_NS_LABEL = "{{{0}}}{1}".format(Namespace.RENDITION, A_LABEL) A_NS_LANGUAGE = "{{{0}}}{1}".format(Namespace.RENDITION, A_LANGUAGE) A_NS_LAYOUT = "{{{0}}}{1}".format(Namespace.RENDITION, A_LAYOUT) A_NS_MEDIA = "{{{0}}}{1}".format(Namespace.RENDITION, A_MEDIA) E_CONTAINER = "container" E_LINK = "link" E_ROOTFILE = "rootfile" E_ROOTFILES = "rootfiles" V_ACCESSMODE_AUDITORY = "auditory" V_ACCESSMODE_TACTILE = "tactile" V_ACCESSMODE_TEXTUAL = "textual" V_ACCESSMODE_VISUAL = "visual" V_LAYOUT_PRE_PAGINATED = "pre-paginated" V_LAYOUT_REFLOWABLE = "reflowable" V_REL_MAPPING = "mapping" def __init__(self, internal_path=None, obj=None, string=None): self.renditions = [] self.rm_document = None Element.__init__( self, internal_path=internal_path, obj=obj, string=string) def json_object(self, recursive=True): obj = { "internal_path": self.internal_path, "renditions": len(self.renditions), "rm_document": (self.rm_document == None), } if recursive: obj["renditions"] = JSONAble.safe(self.renditions) obj["rm_document"] = JSONAble.safe(self.rm_document) return obj def parse_object(self, obj): try: # locate `<container>` element container_arr = yael.util.query_xpath( obj=obj, query="/{0}:{1}", args=['c', Container.E_CONTAINER], nsp={'c': Namespace.CONTAINER}, required=Container.E_CONTAINER) container = container_arr[0] # locate `<rootfile>` elements rootfile_arr = yael.util.query_xpath( obj=container, query="{0}:{1}/{0}:{2}", args=['c', Container.E_ROOTFILES, Container.E_ROOTFILE], nsp={'c': Namespace.CONTAINER}, required=None) for rootfile in rootfile_arr: self._parse_rootfile(rootfile) # locate `<link>` optional element link_arr = yael.util.query_xpath( obj=container, query="{0}:{1}", args=['c', Container.E_LINK], nsp={'c': Namespace.CONTAINER}, required=None) for link in link_arr: self._parse_link(link) except: raise Exception("Error while parsing the given object") def add_rendition(self, rendition): """ Add a Rendition to this Container. :param rendition: Rendition to be added :type rendition: :class:`yael.rendition.Rendition` """ self.renditions.append(rendition) @property def renditions(self): """ The list of Rendition objects in this Container. :rtype: list of :class:`yael.rendition.Rendition` """ return self.__renditions @renditions.setter def renditions(self, renditions): self.__renditions = renditions @property def rm_document(self): """ The Rendition Mapping Document object in this Container, or None if it is not present. :rtype: :class:`yael.rmdocument.RMDocument` """ return self.__rm_document @rm_document.setter def rm_document(self, rm_document): self.__rm_document = rm_document @property def default_rendition(self): """ The Default Rendition object in this Container, or None if there are no Renditions. :rtype: :class:`yael.rendition.Rendition` """ return yael.util.safe_first(self.renditions) def _parse_rootfile(self, obj): """ Parse the given `<rootfile>` node object, and append the parsed Rendition to this Container. """ # required attributes full_path = obj.get(Container.A_FULL_PATH) media_type = obj.get(Container.A_MEDIA_TYPE) if (full_path != None) and (media_type != None): r_obj = Rendition(internal_path=full_path) r_obj.v_full_path = full_path r_obj.v_media_type = media_type <|fim▁hole|> r_obj.v_rendition_accessmode = obj.get(Container.A_NS_ACCESSMODE) r_obj.v_rendition_label = obj.get(Container.A_NS_LABEL) r_obj.v_rendition_language = obj.get(Container.A_NS_LANGUAGE) r_obj.v_rendition_layout = obj.get(Container.A_NS_LAYOUT) r_obj.v_rendition_media = obj.get(Container.A_NS_MEDIA) self.renditions.append(r_obj) def _parse_link(self, obj): """ Parse the given `<link>` node object, and append the parsed RMDocument to this Container. """ # required attributes for rendition mapping document rel = obj.get(Container.A_REL) href = obj.get(Container.A_HREF) media_type = obj.get(Container.A_MEDIA_TYPE) if ((rel == Container.V_REL_MAPPING) and (media_type == MediaType.XHTML) and (href != None)): self.rm_document = RMDocument(internal_path=href) return None<|fim▁end|>
# multiple renditions
<|file_name|>peliculasyseries.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # -*- Channel PeliculasySeries -*- # -*- Created for Alfa-addon -*- # -*- By the Alfa Develop Group -*- import re import urllib import base64 from channelselector import get_thumb from core import httptools from core import jsontools from core import scrapertools from core import servertools from core import tmdb from lib import jsunpack from core.item import Item from channels import filtertools from channels import autoplay from platformcode import config, logger IDIOMAS = {'la': 'Latino', 'lat':'Latino', 'cas':'Castellano','es': 'Castellano', 'vs': 'VOSE', 'vos':'VOSE', 'vo':'VO', 'ori':'VO', 'so':'VOS', 'sor':'VOS'} list_language = IDIOMAS.values() list_quality = ['TS','Screener','DVDRip','HDRip', 'HDTV', 'micro720', 'micro1080'] list_servers = ['openload', 'rapidvideo', 'powvideo', 'gamovideo', 'streamplay', 'flashx', 'clipwatching', 'vidoza', 'thevideome'] __comprueba_enlaces__ = config.get_setting('comprueba_enlaces', 'peliculasyseries') __comprueba_enlaces_num__ = config.get_setting('comprueba_enlaces_num', 'peliculasyseries') host = 'https://peliculasyseries.org/' def mainlist(item): logger.info() autoplay.init(item.channel, list_servers, list_quality) itemlist = [] itemlist.append(Item(channel=item.channel, title='Peliculas', action='menu_movies', thumbnail= get_thumb('movies', auto=True))) itemlist.append(Item(channel=item.channel, title='Series', url=host+'series', action='list_all', type='tvshows', thumbnail= get_thumb('tvshows', auto=True))) itemlist.append( item.clone(title="Buscar", action="search", url=host + 'buscar/q/', thumbnail=get_thumb("search", auto=True), extra='movie')) itemlist = filtertools.show_option(itemlist, item.channel, list_language, list_quality) autoplay.show_option(item.channel, itemlist) return itemlist def menu_movies(item): logger.info() itemlist=[] itemlist.append(Item(channel=item.channel, title='Todas', url=host + 'movies', action='list_all', thumbnail=get_thumb('all', auto=True), type='movies')) itemlist.append(Item(channel=item.channel, title='Genero', action='section', thumbnail=get_thumb('genres', auto=True), type='movies')) return itemlist def get_source(url): logger.info() data = httptools.downloadpage(url).data data = re.sub(r'\n|\r|\t|&nbsp;|<br>|\s{2,}', "", data) return data def get_language(lang_data): logger.info() language = [] lang_data = lang_data.replace('language-ES', '').replace('medium', '').replace('serie', '').replace('-','') if 'class' in lang_data: lang_list = scrapertools.find_multiple_matches(lang_data, 'class=" ([^"]+)"') else: return lang_data.strip() for lang in lang_list: if lang not in IDIOMAS: lang = 'VOS' if lang not in language: language.append(IDIOMAS[lang]) return language def section(item): logger.info() itemlist=[] duplicados=[] data = get_source(host) data = scrapertools.find_single_match(data, 'data-toggle="dropdown">Géneros.*?multi-column-dropdown">.*?"clearfix"') if 'Genero' in item.title: patron = '<li><a href="([^"]+)">([^<]+)</a>' matches = re.compile(patron, re.DOTALL).findall(data) for scrapedurl, scrapedtitle in matches: title = scrapedtitle if title not in duplicados: itemlist.append(Item(channel=item.channel, url=scrapedurl, title=title, action='list_all', type=item.type)) duplicados.append(title) return itemlist def list_all(item): logger.info() itemlist = [] data = get_source(item.url) if item.type == 'movies': patron = '<div class="col-md-2 w3l-movie-gride-agile"><a href="([^"]+)" class=".*?">' patron += '<img src="([^"]+)" title="([^"]+)" class="img-responsive".*?' patron += '<div class="calidad" >([^<]+)</div> <div class="audio-info">' patron += '(.*?)<div class="w3l-action-icon">.*?<p>([^<]+)</p>' matches = re.compile(patron, re.DOTALL).findall(data) for scrapedurl, scrapedthumbnail, scrapedtitle, quality, lang_data, year in matches: title = '%s [%s] [%s]' % (scrapedtitle, year, quality) if 'screener' in quality.lower(): quality = 'Screener' contentTitle = scrapedtitle thumbnail = scrapedthumbnail url = scrapedurl language = get_language(lang_data) itemlist.append(item.clone(action='findvideos', title=title, url=url, thumbnail=thumbnail, contentTitle=contentTitle, language=language, quality=quality, infoLabels={'year':year})) elif item.type == 'tvshows': patron = '<div class="col-md-2 w3l-movie-gride-agile"><a href="([^"]+)" class=".*?">' patron += '<img src="([^"]+)" title="([^"]+)" class="img-responsive".*?<p>([^<]+)</p>' matches = re.compile(patron, re.DOTALL).findall(data) for scrapedurl, scrapedthumbnail, scrapedtitle, year in matches: title = scrapedtitle contentSerieName = scrapedtitle thumbnail = scrapedthumbnail url = scrapedurl itemlist.append(item.clone(action='seasons', title=title, url=url, thumbnail=thumbnail, contentSerieName=contentSerieName, context=filtertools.context(item, list_language, list_quality), infoLabels={'year':year})) tmdb.set_infoLabels(itemlist, seekTmdb=True) # Paginación url_next_page = scrapertools.find_single_match(data,"<a class='last' href='([^']+)'>»</a>") if url_next_page: itemlist.append(item.clone(title="Siguiente >>", url=url_next_page, action='list_all')) return itemlist def seasons(item): logger.info() itemlist=[] data=get_source(item.url) patron='<a href="([^"]+)"><img class="thumb-item" src="([^"]+)" alt="[^"]+" >' patron += '<div class="season-item">Temporada (\d+)</div>' matches = re.compile(patron, re.DOTALL).findall(data) infoLabels = item.infoLabels for scrapedurl, scrapedthumbnail, season in matches: infoLabels['season']=season title = 'Temporada %s' % season itemlist.append(Item(channel=item.channel, title=title, url=scrapedurl, action='episodesxseasons', thumbnail=scrapedthumbnail, infoLabels=infoLabels)) tmdb.set_infoLabels_itemlist(itemlist, seekTmdb=True) if config.get_videolibrary_support() and len(itemlist) > 0: itemlist.append( Item(channel=item.channel, title='[COLOR yellow]Añadir esta serie a la videoteca[/COLOR]', url=item.url, action="add_serie_to_library", extra="episodios", contentSerieName=item.contentSerieName)) return itemlist def episodios(item): logger.info() itemlist = [] templist = seasons(item) for tempitem in templist: itemlist += episodesxseasons(tempitem) return itemlist def episodesxseasons(item): logger.info() itemlist = [] data=get_source(item.url) patron ='class="row-serie-item"><a href="([^"]+)">.*?<img class="episode-thumb-item" src="([^"]+)" alt="([^"]+)" >' patron += '<divclass="audio-info-series">(.*?)<div class="episode-item">%s+x(\d+)</div>' % item.infoLabels['season'] matches = re.compile(patron, re.DOTALL).findall(data) infoLabels = item.infoLabels for scrapedurl, scrapedthumbnail, scrapedtitle, lang_data, scrapedepisode in matches: infoLabels['episode'] = scrapedepisode url = scrapedurl language = get_language(lang_data) title = '%sx%s - %s %s' % (infoLabels['season'], infoLabels['episode'], scrapedtitle, language) <|fim▁hole|> itemlist = filtertools.get_links(itemlist, item, list_language) tmdb.set_infoLabels_itemlist(itemlist, seekTmdb=True) return itemlist def findvideos(item): logger.info() from lib import generictools itemlist = [] data = get_source(item.url) patron = '<div class="available-source" ><div class="([^"]+)">.*?' patron += 'data-data="([^"]+)".*?<span class="quality-text">([^<]+)<' matches = re.compile(patron, re.DOTALL).findall(data) for lang_data, scrapedurl, quality in matches: lang = get_language(lang_data) if 'screener' in quality.lower(): quality = 'Screener' quality = quality title = '%s [%s] [%s]' url = base64.b64decode(scrapedurl[1:]) itemlist.append( Item(channel=item.channel, url=url, title=title, action='play', quality=quality, language=IDIOMAS[lang], infoLabels=item.infoLabels)) itemlist = servertools.get_servers_itemlist(itemlist, lambda x: x.title % (x.server.capitalize(), x.quality, x.language)) # Requerido para Filtrar enlaces if __comprueba_enlaces__: itemlist = servertools.check_list_links(itemlist, __comprueba_enlaces_num__) # Requerido para FilterTools itemlist = filtertools.get_links(itemlist, item, list_language, list_quality) # Requerido para AutoPlay autoplay.start(itemlist, item) itemlist = sorted(itemlist, key=lambda it: it.language) if item.contentType != 'episode': if config.get_videolibrary_support() and len(itemlist) > 0 and item.extra != 'findvideos': itemlist.append( Item(channel=item.channel, title='[COLOR yellow]Añadir esta pelicula a la videoteca[/COLOR]', url=item.url, action="add_pelicula_to_library", extra="findvideos", contentTitle=item.contentTitle)) return itemlist def search(item, texto): logger.info() texto = texto.replace(" ", "+") item.url = item.url + texto if texto != '': return search_results(item) else: return [] def search_results(item): logger.info() itemlist=[] data=get_source(item.url) patron = '<li class="search-results-item media-item" .*?<a href="([^"]+)" title="([^"]+)">.*?' patron += '<img class="content" src="([^"]+)" .*?>(Pelicula|Serie) del año([^<]+)</p>' matches = re.compile(patron, re.DOTALL).findall(data) for scrapedurl, scrapedtitle, scrapedthumb, content_type, year in matches: title = scrapedtitle if len(year)==0: year = '-' url = scrapedurl thumbnail = scrapedthumb if not '/serie' in url: action = 'findvideos' else: action = 'seasons' new_item=Item(channel=item.channel, title=title, url=url, thumbnail=thumbnail, action=action, infoLabels={'year':year}) if new_item.action == 'findvideos': new_item.contentTitle = new_item.title else: new_item.contentSerieName = new_item.title itemlist.append(new_item) tmdb.set_infoLabels_itemlist(itemlist, seekTmdb=True) return itemlist def newest(categoria): logger.info() itemlist = [] item = Item() try: if categoria in ['peliculas']: item.url = host + 'movies' elif categoria == 'infantiles': item.url = host + 'genero/animation/' item.type='movies' itemlist = list_all(item) if itemlist[-1].title == 'Siguiente >>': itemlist.pop() except: import sys for line in sys.exc_info(): logger.error("{0}".format(line)) return [] return itemlist<|fim▁end|>
itemlist.append(Item(channel=item.channel, title= title, url=url, action='findvideos', thumbnail=scrapedthumbnail, language=language, infoLabels=infoLabels))
<|file_name|>IOService.cpp<|end_file_name|><|fim▁begin|>// // IOService.cpp // Firedrake // // Created by Sidney Just // Copyright (c) 2015 by Sidney Just // Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated // documentation files (the "Software"), to deal in the Software without restriction, including without limitation // the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, // and to permit persons to whom the Software is furnished to do so, subject to the following conditions: // The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, // INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR // PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE // FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, // ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. // #include "IOService.h" namespace IO { IODefineMeta(Service, RegistryEntry) String *kServiceProviderMatchKey; String *kServiceClassMatchKey; String *kServicePropertiesMatchKey; extern void RegisterClass(MetaClass *meta, Dictionary *properties); extern void RegisterProvider(Service *provider); extern void EnumerateClassesForProvider(Service *provider, const Function<bool (MetaClass *meta, Dictionary *properties)> &callback); void Service::InitialWakeUp(MetaClass *meta) { if(meta == Service::GetMetaClass()) { kServiceProviderMatchKey = String::Alloc()->InitWithCString("kServiceProviderMatchKey"); kServiceClassMatchKey = String::Alloc()->InitWithCString("kServiceClassMatchKey"); kServicePropertiesMatchKey = String::Alloc()->InitWithCString("kServicePropertiesMatchKey"); } RegistryEntry::InitialWakeUp(meta); } void Service::RegisterService(MetaClass *meta, Dictionary *properties) { RegisterClass(meta, properties); } Service *Service::InitWithProperties(Dictionary *properties) { if(!RegistryEntry::Init()) return nullptr; _started = false; _properties = properties->Retain(); return this; } Dictionary *Service::GetProperties() const { return _properties; } void Service::Start() { _started = true; }<|fim▁hole|> // Matching void Service::RegisterProvider() { IO::RegisterProvider(this); } bool Service::MatchProperties(__unused Dictionary *properties) { return true; } void Service::StartMatching() { DoMatch(); } void Service::DoMatch() { EnumerateClassesForProvider(this, [this](MetaClass *meta, Dictionary *properties) { if(MatchProperties(properties)) { Service *service = static_cast<Service *>(meta->Alloc()); service = service->InitWithProperties(properties); if(service) { PublishService(service); return true; } } return false; }); } void Service::PublishService(Service *service) { AttachChild(service); } void Service::AttachToParent(RegistryEntry *parent) { RegistryEntry::AttachToParent(parent); Start(); } }<|fim▁end|>
void Service::Stop() {}
<|file_name|>usrp_transmit_path.py<|end_file_name|><|fim▁begin|># # Copyright 2009 Free Software Foundation, Inc. # # This file is part of GNU Radio # # GNU Radio is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 3, or (at your option) # any later version. # # GNU Radio is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with GNU Radio; see the file COPYING. If not, write to # the Free Software Foundation, Inc., 51 Franklin Street, # Boston, MA 02110-1301, USA. # from gnuradio import gr import usrp_options import transmit_path from pick_bitrate import pick_tx_bitrate<|fim▁hole|> Hackery that has the -f / --freq option set both tx_freq and rx_freq """ def freq_callback(option, opt_str, value, parser): parser.values.rx_freq = value parser.values.tx_freq = value if not parser.has_option('--freq'): parser.add_option('-f', '--freq', type="eng_float", action="callback", callback=freq_callback, help="set Tx and/or Rx frequency to FREQ [default=%default]", metavar="FREQ") def add_options(parser, expert): add_freq_option(parser) usrp_options.add_tx_options(parser) transmit_path.transmit_path.add_options(parser, expert) expert.add_option("", "--tx-freq", type="eng_float", default=None, help="set transmit frequency to FREQ [default=%default]", metavar="FREQ") parser.add_option("-v", "--verbose", action="store_true", default=False) class usrp_transmit_path(gr.hier_block2): def __init__(self, modulator_class, options): ''' See below for what options should hold ''' gr.hier_block2.__init__(self, "usrp_transmit_path", gr.io_signature(0, 0, 0), # Input signature gr.io_signature(0, 0, 0)) # Output signature if options.tx_freq is None: sys.stderr.write("-f FREQ or --freq FREQ or --tx-freq FREQ must be specified\n") raise SystemExit tx_path = transmit_path.transmit_path(modulator_class, options) for attr in dir(tx_path): #forward the methods if not attr.startswith('_') and not hasattr(self, attr): setattr(self, attr, getattr(tx_path, attr)) #setup usrp self._modulator_class = modulator_class self._setup_usrp_sink(options) #connect self.connect(tx_path, self.u) def _setup_usrp_sink(self, options): """ Creates a USRP sink, determines the settings for best bitrate, and attaches to the transmitter's subdevice. """ self.u = usrp_options.create_usrp_sink(options) dac_rate = self.u.dac_rate() if options.verbose: print 'USRP Sink:', self.u (self._bitrate, self._samples_per_symbol, self._interp) = \ pick_tx_bitrate(options.bitrate, self._modulator_class.bits_per_symbol(), \ options.samples_per_symbol, options.interp, dac_rate, \ self.u.get_interp_rates()) self.u.set_interp(self._interp) self.u.set_auto_tr(True) if not self.u.set_center_freq(options.tx_freq): print "Failed to set Rx frequency to %s" % (eng_notation.num_to_str(options.tx_freq)) raise ValueError, eng_notation.num_to_str(options.tx_freq)<|fim▁end|>
from gnuradio import eng_notation def add_freq_option(parser): """
<|file_name|>about.component.ts<|end_file_name|><|fim▁begin|>import { Component, OnInit } from "@angular/core"; import { Title } from "@angular/platform-browser"; import AppConfig from "../core/app.config";<|fim▁hole|> // styleUrls: ["about.component.scss"], templateUrl: "about.component.html", }) export default class AboutComponent implements OnInit { constructor(private config: AppConfig, private titleService: Title) {} public ngOnInit(): void { this.titleService.setTitle(this.config.PAGE_TITLES_ABOUT); } }<|fim▁end|>
import LazyLoadService from "../core/lazy-load.service"; @Component({ selector: "ac-about",
<|file_name|>index.js<|end_file_name|><|fim▁begin|>// pages/list/index.js Page({ /** * 页面的初始数据 */ data: { }, /** * 生命周期函数--监听页面加载 */ onLoad: function (options) { }, /** * 生命周期函数--监听页面初次渲染完成 */ onReady: function () {<|fim▁hole|> * 生命周期函数--监听页面显示 */ onShow: function () { }, /** * 生命周期函数--监听页面隐藏 */ onHide: function () { }, /** * 生命周期函数--监听页面卸载 */ onUnload: function () { }, /** * 页面相关事件处理函数--监听用户下拉动作 */ onPullDownRefresh: function () { }, /** * 页面上拉触底事件的处理函数 */ onReachBottom: function () { }, /** * 用户点击右上角分享 */ onShareAppMessage: function () { } })<|fim▁end|>
}, /**
<|file_name|>urls.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- """ ------ Urls ------ Arquivo de configuração das urls da aplicação blog Autores: * Alisson Barbosa Ferreira <[email protected]> Data: ============== ================== Criação Atualização ============== ================== 29/11/2014 29/11/2014 ============== ================== """ <|fim▁hole|> urlpatterns = patterns('blog.views', url(r'^cadastro-usuario/$', 'usuario', name='usuario'), url(r'^cadastro-post/$', 'post', name='post'), url(r'^api-all-posts', 'all_posts', name='all_posts'), url(r'^api-get-post/(?P<pk>[0-9]+)/$', 'get_post', name='get_post'), url(r'^api-auth', 'api_auth', name='api_auth'), url(r'^api-token', 'api_token', name='api_token'), url(r'^api-login', 'api_login', name='api_login'), url(r'^enviar-email/$', 'enviar_email', name='enviar_email'), url(r'^autorelacionamento/$', 'autorelacionamento', name='autorelacionamento'), )<|fim▁end|>
from django.conf.urls import patterns, url
<|file_name|>TestParser.java<|end_file_name|><|fim▁begin|><|fim▁hole|> import java.io.IOException; import java.util.Iterator; import com.drogon.core.Drogon; /** Contributors: Nachi */ public class TestParser { public static void main(String[] args) { String str = "Mozilla/5.0 (Linux; Android 4.4.2; SGH-T399N Build/KOT49H) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/30.0.0.0 Mobile Safari/537.36"; try { long start = System.currentTimeMillis(); for(int i=0;i<1;i++){ Drogon parser = new Drogon(); Iterator<String> iterator = parser.getProductList(str).iterator(); while(iterator.hasNext()){ System.out.println(iterator.next()); } } System.out.println("Time taken = "+(System.currentTimeMillis() - start)+"ms"); } catch (IOException e) { e.printStackTrace(); } } }<|fim▁end|>
package com.drogon.test;
<|file_name|>MappingGenerator.java<|end_file_name|><|fim▁begin|>package alien4cloud.tosca.parser.mapping.generator; import java.io.IOException; import java.util.AbstractMap; import java.util.Map; import javax.annotation.PostConstruct; import javax.annotation.Resource; import lombok.extern.slf4j.Slf4j; import org.springframework.context.ApplicationContext; import org.springframework.stereotype.Component; import org.yaml.snakeyaml.nodes.MappingNode; import org.yaml.snakeyaml.nodes.Node; import org.yaml.snakeyaml.nodes.NodeTuple; import org.yaml.snakeyaml.nodes.ScalarNode; import org.yaml.snakeyaml.nodes.SequenceNode; import alien4cloud.tosca.parser.IChecker; import alien4cloud.tosca.parser.INodeParser; import alien4cloud.tosca.parser.KeyValueMappingTarget; import alien4cloud.tosca.parser.MappingTarget; import alien4cloud.tosca.parser.ParserUtils; import alien4cloud.tosca.parser.ParsingContextExecution; import alien4cloud.tosca.parser.ParsingError; import alien4cloud.tosca.parser.ParsingException; import alien4cloud.tosca.parser.ParsingResult; import alien4cloud.tosca.parser.YamlSimpleParser; import alien4cloud.tosca.parser.impl.ErrorCode; import alien4cloud.tosca.parser.impl.base.CheckedTypeNodeParser; import alien4cloud.tosca.parser.impl.base.ScalarParser; import alien4cloud.tosca.parser.impl.base.TypeNodeParser; import alien4cloud.tosca.parser.mapping.DefaultParser; import com.google.common.collect.Maps; /** * Load type mapping definition from yaml and add it to the type mapping registry. */ @Slf4j @Component public class MappingGenerator extends DefaultParser<Map<String, INodeParser>> { @Resource private ApplicationContext applicationContext; private Map<String, INodeParser> parsers = Maps.newHashMap(); private Map<String, IMappingBuilder> mappingBuilders = Maps.newHashMap(); private Map<String, IChecker> checkers = Maps.newHashMap(); @PostConstruct public void initialize() { Map<String, INodeParser> contextParsers = applicationContext.getBeansOfType(INodeParser.class); // register parsers based on their class name. for (INodeParser parser : contextParsers.values()) { parsers.put(parser.getClass().getName(), parser); } Map<String, IMappingBuilder> contextMappingBuilders = applicationContext.getBeansOfType(IMappingBuilder.class); for (IMappingBuilder mappingBuilder : contextMappingBuilders.values()) { mappingBuilders.put(mappingBuilder.getKey(), mappingBuilder); } Map<String, IChecker> contextCheckers = applicationContext.getBeansOfType(IChecker.class); for (IChecker checker : contextCheckers.values()) { checkers.put(checker.getName(), checker); } } public Map<String, INodeParser> process(String resourceLocation) throws ParsingException { org.springframework.core.io.Resource resource = applicationContext.getResource(resourceLocation); YamlSimpleParser<Map<String, INodeParser>> nodeParser = new YamlSimpleParser<>(this); try { ParsingResult<Map<String, INodeParser>> result = nodeParser.parseFile(resource.getURI().toString(), resource.getFilename(), resource.getInputStream(), null); if (result.getContext().getParsingErrors().isEmpty()) { return result.getResult(); } throw new ParsingException(resource.getFilename(), result.getContext().getParsingErrors()); } catch (IOException e) { log.error("Failed to open stream", e); throw new ParsingException(resource.getFilename(), new ParsingError(ErrorCode.MISSING_FILE, "Unable to load file.", null, e.getMessage(), null, resourceLocation)); } } public Map<String, INodeParser> parse(Node node, ParsingContextExecution context) { Map<String, INodeParser> parsers = Maps.newHashMap(); if (node instanceof SequenceNode) { SequenceNode types = (SequenceNode) node; for (Node mapping : types.getValue()) { Map.Entry<String, INodeParser<?>> entry = processTypeMapping(mapping, context); if (entry != null) { parsers.put(entry.getKey(), entry.getValue()); } } } else { context.getParsingErrors().add( new ParsingError(ErrorCode.SYNTAX_ERROR, "Mapping should be a sequence of type mappings", node.getStartMark(), "Actually was " + node.getClass().getSimpleName(), node.getEndMark(), "")); } return parsers; } private Map.Entry<String, INodeParser<?>> processTypeMapping(Node node, ParsingContextExecution context) { try { return doProcessTypeMapping(node, context); } catch (ClassNotFoundException | IllegalAccessException | InstantiationException e) { log.error("Failed to load class while parsing mapping", e); context.getParsingErrors().add( new ParsingError(ErrorCode.SYNTAX_ERROR, "Unable to load class", node.getStartMark(), e.getMessage(), node.getEndMark(), "")); return null; } } private Map.Entry<String, INodeParser<?>> doProcessTypeMapping(Node node, ParsingContextExecution context) throws ClassNotFoundException, IllegalAccessException, InstantiationException { if (node instanceof MappingNode) { MappingNode mapping = (MappingNode) node; String yamlType = null; INodeParser<?> parser = null; for (NodeTuple tuple : mapping.getValue()) { if (yamlType == null) { yamlType = ParserUtils.getScalar(tuple.getKeyNode(), context); String type = ParserUtils.getScalar(tuple.getValueNode(), context); if (type.startsWith("__")) { parser = getWrapperParser(type, mapping, context); return new AbstractMap.SimpleEntry<String, INodeParser<?>>(yamlType, parser); } parser = this.parsers.get(type); if (parser != null) { log.debug("Mapping yaml type <" + yamlType + "> using parser <" + type + ">"); return new AbstractMap.SimpleEntry<String, INodeParser<?>>(yamlType, parser); } parser = buildTypeNodeParser(yamlType, type); // log.debug("Mapping yaml type <" + yamlType + "> to class <" + type + ">"); // Class<?> javaClass = Class.forName(type); // parser = new TypeNodeParser<>(javaClass, yamlType); } else { // process a mapping map(tuple, (TypeNodeParser) parser, context); } } return new AbstractMap.SimpleEntry<String, INodeParser<?>>(yamlType, parser); } else { context.getParsingErrors().add( new ParsingError(ErrorCode.SYNTAX_ERROR, "Unable to process type mapping.", node.getStartMark(), "Mapping must be defined using a mapping node.", node.getEndMark(), "")); } return null; } private TypeNodeParser<?> buildTypeNodeParser(String yamlType, String javaType) throws ClassNotFoundException { String realJavaType = javaType; IChecker checker = null; if (javaType.contains("|")) { realJavaType = javaType.substring(0, javaType.indexOf("|")); String checkerName = javaType.substring(javaType.indexOf("|") + 1); log.debug(String.format("After parsing <%s>, realJavaType is <%s>, checkerName is <%s>", javaType, realJavaType, checkerName)); checker = checkers.get(checkerName); if (checker == null) { log.warn(String.format("Can not find checker <%s>, using a standard TypeNodeParser", checkerName)); } } Class<?> javaClass = Class.forName(realJavaType); if (checker == null) { log.debug("Mapping yaml type <" + yamlType + "> to class <" + realJavaType + ">"); return new TypeNodeParser<>(javaClass, yamlType); } else { // TODO check that the type are compatible log.debug("Mapping yaml type <" + yamlType + "> to class <" + realJavaType + "> using checker " + checker.toString()); return new CheckedTypeNodeParser<>(javaClass, yamlType, checker); } } private INodeParser<?> getWrapperParser(String wrapperKey, MappingNode mapping, ParsingContextExecution context) { IMappingBuilder builder = this.mappingBuilders.get(wrapperKey.substring(2)); return builder.buildMapping(mapping, context).getParser(); } private void map(NodeTuple tuple, TypeNodeParser<?> parser, ParsingContextExecution context) { String key = ParserUtils.getScalar(tuple.getKeyNode(), context); int positionMappingIndex = positionMappingIndex(key); if (positionMappingIndex > -1) { mapPositionMapping(positionMappingIndex, tuple.getValueNode(), parser, context); } else { MappingTarget mappingTarget = getMappingTarget(tuple.getValueNode(), context); if (mappingTarget != null) { parser.getYamlToObjectMapping().put(key, mappingTarget); } } } private MappingTarget getMappingTarget(Node mappingNode, ParsingContextExecution context) { if (mappingNode instanceof ScalarNode) { // create a scalar mapping String value = ParserUtils.getScalar(mappingNode, context); return new MappingTarget(value, parsers.get(ScalarParser.class.getName())); } else if (mappingNode instanceof MappingNode) { return mapMappingNode((MappingNode) mappingNode, context); } return null; } private int positionMappingIndex(String key) { if (key.startsWith("__")) { try { int position = Integer.valueOf(key.substring(2)); return position; } catch (NumberFormatException e) { // not a position mapping return -1; } } return -1; } private void mapPositionMapping(Integer index, Node positionMapping, TypeNodeParser<?> parser, ParsingContextExecution context) { if (positionMapping instanceof MappingNode) { MappingNode mappingNode = (MappingNode) positionMapping; String key = null; MappingTarget valueMappingTarget = null; for (NodeTuple tuple : mappingNode.getValue()) { String tupleKey = ParserUtils.getScalar(tuple.getKeyNode(), context); if (tupleKey.equals("key")) {<|fim▁hole|> context.getParsingErrors().add( new ParsingError(ErrorCode.SYNTAX_ERROR, "Unknown key for position mapping.", tuple.getKeyNode().getStartMark(), tupleKey, tuple .getKeyNode().getEndMark(), "")); } } if (valueMappingTarget == null) { return; } if (key == null) { parser.getYamlOrderedToObjectMapping().put(index, valueMappingTarget); } else { parser.getYamlOrderedToObjectMapping().put(index, new KeyValueMappingTarget(key, valueMappingTarget.getPath(), valueMappingTarget.getParser())); } } else { context.getParsingErrors().add( new ParsingError(ErrorCode.SYNTAX_ERROR, "Position mapping must be a mapping node with key and value fields.", positionMapping .getStartMark(), "", positionMapping.getEndMark(), "")); } } private MappingTarget mapMappingNode(MappingNode mappingNode, ParsingContextExecution context) { String key = ParserUtils.getScalar(mappingNode.getValue().get(0).getKeyNode(), context); IMappingBuilder mappingBuilder = mappingBuilders.get(key); if (mappingBuilder != null) { log.debug("Mapping yaml key <" + key + "> using mapping builder " + mappingBuilder.getClass().getName()); return mappingBuilder.buildMapping(mappingNode, context); } context.getParsingErrors().add( new ParsingError(ErrorCode.SYNTAX_ERROR, "No mapping target found for key", mappingNode.getValue().get(0).getKeyNode().getStartMark(), key, mappingNode.getValue().get(0).getKeyNode().getEndMark(), "")); return null; } }<|fim▁end|>
key = ParserUtils.getScalar(tuple.getValueNode(), context); } else if (tupleKey.equals("value")) { valueMappingTarget = getMappingTarget(tuple.getValueNode(), context); } else {
<|file_name|>tasks.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- from framework.tasks import app from framework.tasks.handlers import enqueue_task from website import settings from . import piwik @app.task(bind=True, max_retries=5, default_retry_delay=60) def _update_node(self, node_id, updated_fields=None): # Avoid circular imports from framework.transactions.context import TokuTransaction from website import models node = models.Node.load(node_id) try: with TokuTransaction(): piwik._update_node_object(node, updated_fields) except Exception as error:<|fim▁hole|> raise self.retry(exc=error) def update_node(node_id, updated_fields): if settings.USE_CELERY: signature = _update_node.s(node_id, updated_fields) enqueue_task(signature) else: _update_node(node_id, updated_fields)<|fim▁end|>
<|file_name|>urls.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- from __future__ import absolute_import, unicode_literals from django.conf.urls import url, include from rest_framework.routers import DefaultRouter from rest_framework.urlpatterns import format_suffix_patterns from . import views router = DefaultRouter() router.register(r'minion', views.MinionViewSet, 'minion') router.register(r'data', views.MinionDataViewSet, 'data') urlpatterns = [ url( r'^', include(router.urls)<|fim▁hole|><|fim▁end|>
), ]
<|file_name|>router.js<|end_file_name|><|fim▁begin|>app.router = Backbone.Router.extend({<|fim▁hole|> '!/event-list/': function () { app.preRoute('event-list', this.el); new app.eventListView({ el: this.el }); }, '!/event-detail/:key': function (key) { app.preRoute('event-detail', this.el); new app.eventDetailView({ el: this.el, key: key }); }, '!/event-create/': function () { app.preRoute('event-create', this.el); new app.eventCreateView({ el: this.el }); }, '!/account/': function () { app.preRoute('account', this.el); new app.accountView({ el: this.el }); }, }, initialize: function () { firebase.auth().onAuthStateChanged(function(user) { if (user) { Backbone.history.start(); new app.headerView(); new app.footerView(); // TODO: hook up email verification // if (!user.emailVerified) // firebase.auth().currentUser.sendEmailVerification() var database = firebase.database(); var currentUser = firebase.auth().currentUser; // add user to database of users // TODO: show add photo wizard if no photo database.ref('users/' + currentUser.uid).update({ email: currentUser.email, displayName: currentUser.displayName, }); } else { window.location = '/'; } }, function(error) { console.error(error); }); }, // home: function () { // app.preRoute(this.el); // new app.homeView({ el: this.el }); // }, }); $(function () { app.router = new app.router(); });<|fim▁end|>
el : $('main'), routes: { // '': 'home', // '!/': 'home',
<|file_name|>sparse_feature_stats_generator.py<|end_file_name|><|fim▁begin|># Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Module that computes statistics used to validate sparse features. Currently, this module generates the following statistics for each sparse feature: - missing_value: Number of examples missing the value_feature. - missing_index: A RankHistogram from index_name to the number of examples missing the corresponding index_feature. - min_length_diff: A RankHistogram from index_name to the minimum of len(index_feature) - len(value_feature). - max_length_diff: A RankHistogram from index_name to the maximum of len(index_feature) - len(value_feature). """ from __future__ import absolute_import from __future__ import division from __future__ import print_function from typing import Dict, Iterable, List, Text, Tuple, Union from tensorflow_data_validation import types from tensorflow_data_validation.statistics.generators import stats_generator from tensorflow_data_validation.statistics.generators.constituents import count_missing_generator from tensorflow_data_validation.statistics.generators.constituents import length_diff_generator from tensorflow_metadata.proto.v0 import schema_pb2 from tensorflow_metadata.proto.v0 import statistics_pb2 # TODO(https://issues.apache.org/jira/browse/SPARK-22674): Switch to # `collections.namedtuple` or `typing.NamedTuple` once the Spark issue is # resolved. from tfx_bsl.types import tfx_namedtuple # pylint: disable=g-bad-import-order # LINT.IfChange(custom_stat_names) _MAX_LENGTH_DIFF_NAME = 'max_length_diff' _MIN_LENGTH_DIFF_NAME = 'min_length_diff' _MISSING_INDEX_NAME = 'missing_index' _MISSING_VALUE_NAME = 'missing_value' # LINT.ThenChange(../../anomalies/schema.cc:sparse_feature_custom_stat_names) # Named tuple containing the FeaturePaths for the value and index features # that comprise a given sparse feature. _SparseFeatureComponents = tfx_namedtuple.namedtuple( '_SparseFeatureComponents', ['value_feature', 'index_features']) def _get_all_sparse_features( schema: schema_pb2.Schema ) -> List[Tuple[types.FeaturePath, schema_pb2.SparseFeature]]: """Returns all sparse features in a schema.""" def _recursion_helper( parent_path: types.FeaturePath, container: Union[schema_pb2.Schema, schema_pb2.StructDomain] ) -> List[Tuple[types.FeaturePath, schema_pb2.SparseFeature]]: """Helper function that is used in finding sparse features in a tree.""" result = [] for sf in container.sparse_feature: # Sparse features do not have a struct_domain, so they cannot be parent # features. Thus, once this reaches a sparse feature, add it to the # result. result.append((parent_path.child(sf.name), sf)) for f in container.feature: if f.type == schema_pb2.STRUCT: result.extend( _recursion_helper(parent_path.child(f.name), f.struct_domain)) return result return _recursion_helper(types.FeaturePath([]), schema) def _get_components( sparse_features: Iterable[Tuple[types.FeaturePath, schema_pb2.SparseFeature]] ) -> Dict[types.FeaturePath, _SparseFeatureComponents]: """Returns the index and value feature paths that comprise sparse features.""" # A dict mapping sparse feature paths to their component index and value # feature paths. sparse_feature_components = dict() # The index and value features for a given sparse feature have the same parent # path as the sparse feature. for path, feature in sparse_features: parent_path = path.parent() value_feature = parent_path.child(feature.value_feature.name) index_features = set() for index_feature in feature.index_feature: index_features.add(parent_path.child(index_feature.name)) sparse_feature_components[path] = _SparseFeatureComponents( value_feature, index_features) return sparse_feature_components class SparseFeatureStatsGenerator(stats_generator.CompositeStatsGenerator): """Generates statistics for sparse features.""" def __init__(self, schema: schema_pb2.Schema, name: Text = 'SparseFeatureStatsGenerator') -> None: """Initializes a sparse feature statistics generator. Args: schema: A required schema for the dataset. name: An optional unique name associated with the statistics generator. """ self._sparse_feature_components = _get_components( _get_all_sparse_features(schema)) # Create length diff generators for each index / value pair and count # missing generator for all paths. constituents = [] for _, (value, indices) in self._sparse_feature_components.items(): required_paths = [value] + list(indices) constituents.append( count_missing_generator.CountMissingGenerator(value, required_paths)) for index in indices: constituents.append( length_diff_generator.LengthDiffGenerator(index, value, required_paths)) constituents.append( count_missing_generator.CountMissingGenerator( index, required_paths)) super(SparseFeatureStatsGenerator, self).__init__(name, constituents, schema) def extract_composite_output(self, accumulator): stats = statistics_pb2.DatasetFeatureStatistics() for feature_path, (value, indices) in self._sparse_feature_components.items(): required_paths = [value] + list(indices) feature_stats = stats.features.add(path=feature_path.to_proto()) feature_stats.custom_stats.add( name=_MISSING_VALUE_NAME, num=accumulator[count_missing_generator.CountMissingGenerator.key( value, required_paths)]) index_features_num_missing_histogram = statistics_pb2.RankHistogram() max_length_diff_histogram = statistics_pb2.RankHistogram() min_length_diff_histogram = statistics_pb2.RankHistogram() for index in sorted(indices): index_label = index.steps()[-1] missing_bucket = index_features_num_missing_histogram.buckets.add()<|fim▁hole|> count_missing_generator.CountMissingGenerator.key( index, required_paths)] min_diff, max_diff = accumulator[ length_diff_generator.LengthDiffGenerator.key( index, value, required_paths)] max_length_bucket = max_length_diff_histogram.buckets.add() max_length_bucket.label = index_label max_length_bucket.sample_count = max_diff min_length_bucket = min_length_diff_histogram.buckets.add() min_length_bucket.label = index_label min_length_bucket.sample_count = min_diff feature_stats.custom_stats.add( name=_MISSING_INDEX_NAME, rank_histogram=index_features_num_missing_histogram) feature_stats.custom_stats.add( name=_MAX_LENGTH_DIFF_NAME, rank_histogram=max_length_diff_histogram) feature_stats.custom_stats.add( name=_MIN_LENGTH_DIFF_NAME, rank_histogram=min_length_diff_histogram) return stats<|fim▁end|>
missing_bucket.label = index_label missing_bucket.sample_count = accumulator[
<|file_name|>zipper.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python2.7 -tt """ Copyright (c) 2013, Adel Qodmani All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """ import tarfile # For the compression import os # For everything related to path import logging import sys # For the argv and exit import datetime def main(): """ zipper source-dir-full-path dest-dir-full-path Tars and zips the source-dir and put it in the dest-dir with the name: source-dir-name_date_time.tar.gz """ check_args() source_path = sys.argv[1] source_path = source_path.rstrip('/') logging.debug("source_path: %s" % source_path) dest_path = sys.argv[2] dest_path = dest_path.rstrip('/') logging.debug("dest_path: %s" % dest_path) # source name is the name of the dir to be archived source_name = source_path.split("/")[-1] logging.debug("source_name: %s" % source_name) # tar_path tar_path = create_tar_path(source_name, dest_path) logging.debug("tar_path: %s" % tar_path) create_tar_file(tar_path, source_path) def check_args(): """ Checks if the args supplied to the script are what it expects """ if len(sys.argv) > 1 and sys.argv[1] == "--help": help_text = ("zipper creates a zipped tar-ball of the <source> directory" + "and puts it in \nthe <destination> directory ") usage = "e.g: zipper /tmp/ /home/sally/Desktop/" result = "will create a file called tmp_date_time.tar.gz in " result += "/home/sally/Desktop/ which has all the contents of /tmp/" print(help_text) print(usage) print(result) sys.exit(0) elif len(sys.argv) < 3: print("Missing arguments!") print("Usage:") print("\tzipper source destination") print("You can get the help by: zipper --help") logging.error("Missing arguments!") logging.error("Shutting down!") sys.exit(1) elif not os.path.isabs(sys.argv[1]): print("Source directory is not an absolute path") print("You can get the help by: zipper --help") logging.error("Source is not absolute") logging.error("Shutting down") sys.exit(2) elif not os.path.isabs(sys.argv[2]): print("Destination directory is not an absolute path") print("You can get the help by: zipper --help") logging.error("Destination is not absolute") logging.error("Shutting down") sys.exit(3) elif not os.path.isdir(sys.argv[1]): print("Path given as a source directory is not a directory") print("You can get the help by: zipper --help") logging.error("Source is not a directory") logging.error("Shutting down") sys.exit(4) elif not os.path.isdir(sys.argv[2]): print("Path given as destination directory is not a directory") print("You can get the help by: zipper --help") logging.error("Destination is not a directory") logging.error("Shutting down") sys.exit(5) def create_tar_path(source_name, dest_path): """ Creates a path for a backup that will be in the desktop of the user and the file name will be the /path/to/desktktop/source_name_date.tar.gz """ # Get the path to the desktop ready path = os.path.expanduser('~') # changes ~ to home dir path logging.debug(path) path = os.path.join(path, dest_path+"/") logging.debug(path) # string from time(strftime): %Year %month %day %Hour %Minute $Second now = datetime.datetime.now().strftime("%Y-%m-%d_%H-%M-%S") logging.debug(now) # The dest path is the path + source_name + date + extension path = os.path.join(path, source_name) logging.debug(path) path += '_' + now + ".tar.gz" logging.debug(path) return path def create_tar_file(tar_path, source_path): # "w:gz" is open for writing a gz tarball try: tar = tarfile.open(tar_path, "w:gz") tar.add(source_path) tar.close() logging.debug("Tar ball [%s] created for directory [%s]" % (tar_path, source_path)) except IOError: logging.critical("IOError exception! Aborting ..") sys.exit(6) except TarError: logging.critical("TarError exception! Aborting ...")<|fim▁hole|> sys.exit(7) if __name__ == "__main__": # Set up the logging env # Format: (asctime) (filename) (funcname) (linenumber) (level) (msg) # The time can be formated with the datefmt parameter FORMAT = "%(asctime)s %(filename)s::%(funcName)s::%(lineno)d" FORMAT += " [%(levelname)s]: %(msg)s" DATE_FORMAT = "%Y-%m-%d %H:%M:%S" try: STREAM = open("/home/aral/learn/zipper/log", "a+") except IOError: print("Can't create a log file in [%s]" % STREAM) sys.abort() # Setting the log stream to go to stderr and print all log info from debug # and higher levels (debug, info, warning, error, critical) logging.basicConfig(stream=STREAM, level=logging.DEBUG, format=FORMAT, datefmt=DATE_FORMAT) main()<|fim▁end|>
<|file_name|>conf.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # # Viper documentation build configuration file, created by # sphinx-quickstart on Mon May 5 18:24:15 2014. # # This file is execfile()d with the current directory set to its containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. import sys, os # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. #sys.path.insert(0, os.path.abspath('.')) # -- General configuration ----------------------------------------------------- # If your documentation needs a minimal Sphinx version, state it here. #needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be extensions # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. extensions = [] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix of source filenames. source_suffix = '.rst' # The encoding of source files. #source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' # General information about the project. project = u'Viper' copyright = u'2014, Claudio Guarnieri' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. version = '1.1' # The full version, including alpha/beta/rc tags. release = '1.1' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. #language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: #today = '' # Else, today_fmt is used as the format for a strftime call. #today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = [] # The reST default role (used for this markup: `text`) to use for all documents. #default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. #add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). #add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. #show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. #modindex_common_prefix = [] # -- Options for HTML output --------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. html_theme = 'default' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. #html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. #html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # "<project> v<release> documentation". #html_title = None # A shorter title for the navigation bar. Default is the same as html_title. #html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. #html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. #html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. #html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. #html_use_smartypants = True # Custom sidebar templates, maps document names to template names. #html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. #html_additional_pages = {} # If false, no module index is generated. #html_domain_indices = True # If false, no index is generated. #html_use_index = True # If true, the index is split into individual pages for each letter. #html_split_index = False # If true, links to the reST sources are added to the pages. #html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. #html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. #html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a <link> tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. #html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). #html_file_suffix = None # Output file base name for HTML help builder. htmlhelp_basename = 'Viperdoc' # -- Options for LaTeX output -------------------------------------------------- latex_elements = { # The paper size ('letterpaper' or 'a4paper'). #'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). #'pointsize': '10pt', # Additional stuff for the LaTeX preamble. #'preamble': '', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass [howto/manual]). latex_documents = [ ('index', 'Viper.tex', u'Viper Documentation', u'Claudio Guarnieri', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of # the title page. #latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. #latex_use_parts = False # If true, show page references after internal links. #latex_show_pagerefs = False # If true, show URL addresses after external links. #latex_show_urls = False # Documents to append as an appendix to all manuals. #latex_appendices = [] # If false, no module index is generated. #latex_domain_indices = True # -- Options for manual page output -------------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ ('index', 'viper', u'Viper Documentation', [u'Claudio Guarnieri'], 1) ] # If true, show URL addresses after external links. #man_show_urls = False <|fim▁hole|> # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ ('index', 'Viper', u'Viper Documentation', u'Claudio Guarnieri', 'Viper', 'One line description of project.', 'Miscellaneous'), ] # Documents to append as an appendix to all manuals. #texinfo_appendices = [] # If false, no module index is generated. #texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. #texinfo_show_urls = 'footnote'<|fim▁end|>
# -- Options for Texinfo output ------------------------------------------------
<|file_name|>BlendingFactorSrc.d.ts<|end_file_name|><|fim▁begin|>/** * */ export declare enum BlendingFactorSrc { ZERO = 0,<|fim▁hole|> SRC_ALPHA_SATURATE = 776, SRC_ALPHA = 770, ONE_MINUS_SRC_ALPHA = 771, DST_ALPHA = 772, ONE_MINUS_DST_ALPHA = 773 }<|fim▁end|>
ONE = 1, DST_COLOR = 774, ONE_MINUS_DST_COLOR = 775,
<|file_name|>dololi.py<|end_file_name|><|fim▁begin|>import lief import sys import os import traceback import configparser import struct from collections import OrderedDict # Opcodes X86_PUSH_BYTE = 0x6a X86_32_PUSH_DWORD = 0x68 x86_32_CALL = [0xff, 0x15] X86_64_CALL = [0xff, 0xd0] X86_64_MOV_R9 = [0x49, 0xb9] X86_64_MOV_R8 = [0x49, 0xb8] X86_64_MOV_RDX = [0x48, 0xba] X86_64_MOV_RCX = [0x48, 0xb9] X86_64_MOV_RAX = [0x48, 0xc7, 0xc0] def get_config(conf_file="dololi.conf"): assert os.path.isfile(conf_file) conf = configparser.ConfigParser() conf.read(conf_file) return conf def is_dll(pe_file): return pe_file.header.has_characteristic(lief.PE.HEADER_CHARACTERISTICS.DLL) def get_pe_type(arch): assert arch == "32" or arch == "64" if arch == "32": return lief.PE.PE_TYPE.PE32 else: return lief.PE.PE_TYPE.PE32_PLUS def is_64_bits(pe_type): return pe_type == lief.PE.PE_TYPE.PE32_PLUS def get_reg_by_argn(argn): return { "1": "r9", "2": "r8", "3": "rdx", "4": "rcx", "5": "rax" }[argn] def get_opcodes_by_reg(reg): return { "r9" : X86_64_MOV_R9, "r8" : X86_64_MOV_R8, "rdx": X86_64_MOV_RDX, "rcx": X86_64_MOV_RCX, "rax": X86_64_MOV_RAX }[reg] def dololi(arch, conf, out_file_name): code_rva = int(conf["DEFAULT"].get("CODE_RVA")) data_rva = int(conf["DEFAULT"].get("DATA_RVA")) pe_type = get_pe_type(arch) is_64bits = is_64_bits(pe_type) pe_loader = lief.PE.Binary("dololi", pe_type) code_cnt,\ reg_size,\ pack_fmt = ([], 8, "<Q") if is_64bits else ([], 4, "<I") data_cnt = "" data_off = 0 reg_cnt = 1 func_num = 0 funcs = OrderedDict() # Parse CODE and DATA contents from config file for k, v in conf["CODE"].items(): if k.endswith("_byte"): value = int(v) value = struct.pack("<B", value) code_cnt.extend([X86_PUSH_BYTE, value[0]]) elif k.endswith("_word"): value = int(v) value = struct.pack("<H", value) code_cnt.extend([X86_32_PUSH_DWORD, value[0], value[1], 0x0, 0x0]) elif k.endswith("_dword") or k.endswith("_qword"): reg_size, pack_fmt = {"dword":(4, "<I"), "qword":(8, "<Q")}[k.split('_')[-1]] if v.lower().endswith("_data"): data_key = v.lower().rstrip("_data") assert "str" in data_key.lower(), "Data should contain arrays or strings" data_value = conf["DATA"][data_key] + '\0' data_cnt += data_value addr = struct.pack(pack_fmt, pe_loader.optional_header.imagebase + data_rva + data_off) if is_64bits: code_cnt.extend(get_opcodes_by_reg(get_reg_by_argn(str(reg_cnt)))) reg_cnt = (reg_cnt % 4) + 1 if reg_size < 8: addr += bytes("\x00" * (8 - reg_size), 'ascii') code_cnt.extend(list(addr)) else: code_cnt.extend([X86_32_PUSH_DWORD]) code_cnt.extend(list(addr)) data_off += len(data_value) else: value = int(v) value = struct.pack(pack_fmt, value) if is_64bits: code_cnt.extend(get_opcodes_by_reg(get_reg_by_argn(str(reg_cnt)))) reg_cnt = (reg_cnt % 4) + 1 if reg_size < 8: value += [0x0] * (8 - reg_size) code_cnt.extend(list(value)) else: code_cnt.extend([X86_32_PUSH_DWORD]) code_cnt.extend(list(value)) elif k.endswith("_func"): assert len(v.split(';')) == 2, "DLL name;Export function name" dll_name, export_name = v.strip("\r\n").split(';')<|fim▁hole|> func_num_str = "".join(["FUNC_", str(func_num)]) if is_64bits: code_cnt.extend(get_opcodes_by_reg(get_reg_by_argn("5"))) reg_cnt = (reg_cnt % 4) + 1 else: code_cnt.extend(x86_32_CALL) for i in range(4): code_cnt.append(func_num_str) if is_64bits: code_cnt.extend(X86_64_CALL) if dll_name not in funcs: funcs[dll_name] = set() funcs[dll_name].add((export_name, func_num_str)) func_num += 1 else: # code_rva and data_rva from DEFAULT section pass # Add function addresses for k, v in funcs.items(): for f in v: func_addr = pe_loader.predict_function_rva(k, f[0]) offset = code_rva if func_num == 1 else 0 # dirty hack to adjust function address addr = struct.pack(pack_fmt, pe_loader.optional_header.imagebase + data_rva - offset + func_addr) # TO DO, number of bytes should be adjusted automatically for i in range(4): code_cnt[code_cnt.index(f[1])] = addr[i] # set .text section fields text_sect = lief.PE.Section(".text") text_sect.virtual_address = code_rva text_sect.content = code_cnt text_sect = pe_loader.add_section(text_sect, lief.PE.SECTION_TYPES.TEXT) # set .data section fields data_sect = lief.PE.Section(".data") data_sect.virtual_address = data_rva data_sect.content = list(map(ord, data_cnt)) data_sect = pe_loader.add_section(data_sect, lief.PE.SECTION_TYPES.DATA) pe_loader.optional_header.addressof_entrypoint = text_sect.virtual_address builder = lief.PE.Builder(pe_loader) builder.build_imports(True) builder.build() builder.write(out_file_name) print("{0} was successfully created!".format(out_file_name)) if __name__ == "__main__": assert len(sys.argv) > 1, "Usage: {0} <32|64> [Output file name]".format(sys.argv[0]) if sys.argv[1] not in ("32", "64"): print("Use 32 to build x86_32 bit or 64 for x86_64 bit loader") sys.exit(1) dololi(sys.argv[1], get_config(), "dololi.exe" if len(sys.argv) < 3 else sys.argv[2])<|fim▁end|>
dll = pe_loader.add_library(dll_name) dll.add_entry(export_name)
<|file_name|>HttpBody.java<|end_file_name|><|fim▁begin|>package com.survivorserver.Dasfaust.WebMarket.mojang.http; public class HttpBody { <|fim▁hole|> private String bodyString; public HttpBody(String bodyString) { this.bodyString = bodyString; } public byte[] getBytes() { return bodyString != null ? bodyString.getBytes() : new byte[0]; } }<|fim▁end|>
<|file_name|>hello_runfiles.rs<|end_file_name|><|fim▁begin|>use std::fs::File; use std::io::prelude::*; use runfiles::Runfiles; fn main() {<|fim▁hole|> let mut buffer = String::new(); f.read_to_string(&mut buffer).unwrap(); println!("This program's source is {} characters long.", buffer.len()); }<|fim▁end|>
let r = Runfiles::create().unwrap(); let mut f = File::open(r.rlocation("examples/hello_runfiles/hello_runfiles.rs")).unwrap();
<|file_name|>lexical-scope-in-for-loop.rs<|end_file_name|><|fim▁begin|>// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // ignore-android: FIXME(#10381) // min-lldb-version: 310 // compile-flags:-g // === GDB TESTS =================================================================================== // gdb-command:run // FIRST ITERATION // gdb-command:print x // gdb-check:$1 = 1 // gdb-command:continue // gdb-command:print x // gdb-check:$2 = -1 // gdb-command:continue // SECOND ITERATION // gdb-command:print x // gdb-check:$3 = 2 // gdb-command:continue // gdb-command:print x // gdb-check:$4 = -2 // gdb-command:continue // THIRD ITERATION // gdb-command:print x // gdb-check:$5 = 3 // gdb-command:continue // gdb-command:print x // gdb-check:$6 = -3 // gdb-command:continue // AFTER LOOP // gdb-command:print x // gdb-check:$7 = 1000000 // gdb-command:continue // === LLDB TESTS ================================================================================== // lldb-command:run // FIRST ITERATION // lldb-command:print x // lldb-check:[...]$0 = 1 // lldb-command:continue // lldb-command:print x // lldb-check:[...]$1 = -1 // lldb-command:continue // SECOND ITERATION // lldb-command:print x // lldb-check:[...]$2 = 2 // lldb-command:continue // lldb-command:print x // lldb-check:[...]$3 = -2 // lldb-command:continue // THIRD ITERATION // lldb-command:print x // lldb-check:[...]$4 = 3 // lldb-command:continue // lldb-command:print x // lldb-check:[...]$5 = -3 // lldb-command:continue // AFTER LOOP // lldb-command:print x // lldb-check:[...]$6 = 1000000 // lldb-command:continue #![omit_gdb_pretty_printer_section] fn main() { let range = [1i, 2, 3]; <|fim▁hole|> zzz(); // #break sentinel(); let x = -1i * x; zzz(); // #break sentinel(); } zzz(); // #break sentinel(); } fn zzz() {()} fn sentinel() {()}<|fim▁end|>
let x = 1000000i; // wan meeeljen doollaars! for &x in range.iter() {
<|file_name|>autocodeparameterrender.component.ts<|end_file_name|><|fim▁begin|>import { Component, OnInit, Input } from '@angular/core'; @Component({ selector: 'app-autocodeparameterrender', templateUrl: './autocodeparameterrender.component.html', styleUrls: ['./autocodeparameterrender.component.css', './_autocodeparameterrender.component.scss', '../_app.general.scss'] })<|fim▁hole|>export class AutocodeparameterrenderComponent implements OnInit { @Input() data: any; constructor() { } ngOnInit() { } }<|fim▁end|>
<|file_name|>hosted_docker.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # # ----------------------------------------------------------------------------------- # Copyright (c) Microsoft Open Technologies (Shanghai) Co. Ltd. All rights reserved. # # The MIT License (MIT) # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # ----------------------------------------------------------------------------------- import sys sys.path.append("..") from hackathon import ( RequiredFeature, Component, Context, ) from hackathon.database.models import ( Experiment, DockerContainer, HackathonAzureKey, PortBinding, DockerHostServer, ) from hackathon.constants import ( EStatus, PortBindingType, VEStatus, HEALTH, ) from compiler.ast import ( flatten, ) from threading import ( Lock, ) from hackathon.template.docker_template_unit import ( DockerTemplateUnit, ) from hackathon.azureformation.endpoint import ( Endpoint ) from docker_formation_base import ( DockerFormationBase, ) from hackathon.azureformation.service import ( Service, ) from hackathon.hackathon_response import ( internal_server_error ) from hackathon.constants import ( HEALTH_STATUS, ) import json import requests from datetime import timedelta class HostedDockerFormation(DockerFormationBase, Component): template_manager = RequiredFeature("template_manager") hackathon_manager = RequiredFeature("hackathon_manager") scheduler = RequiredFeature("scheduler") """ Docker resource management based on docker remote api v1.18 Host resource are required. Azure key required in case of azure. """ application_json = {'content-type': 'application/json'} host_ports = [] host_port_max_num = 30 docker_host_manager = RequiredFeature("docker_host_manager") def __init__(self): self.lock = Lock() def report_health(self): """Report health of DockerHostServers :rtype: dict :return health status item of docker. OK when all servers running, Warning if some of them working, Error if no server running """ try: hosts = self.db.find_all_objects(DockerHostServer) alive = 0 for host in hosts: if self.ping(host): alive += 1 if alive == len(hosts): return { HEALTH.STATUS: HEALTH_STATUS.OK } elif alive > 0: return { HEALTH.STATUS: HEALTH_STATUS.WARNING, HEALTH.DESCRIPTION: 'at least one docker host servers are down' } else: return { HEALTH.STATUS: HEALTH_STATUS.ERROR, HEALTH.DESCRIPTION: 'all docker host servers are down' } except Exception as e: return { HEALTH.STATUS: HEALTH_STATUS.ERROR, HEALTH.DESCRIPTION: e.message } def get_available_host_port(self, docker_host, private_port): """ We use double operation to ensure ports not conflicted, first we get ports from host machine, but in multiple threads situation, the interval between two requests is too short, maybe the first thread do not get port ended, so the host machine don't update ports in time, thus the second thread may get the same port. To avoid this condition, we use static variable host_ports to cache the latest host_port_max_num ports. Every thread visit variable host_ports is synchronized. To save space, we will release the ports if the number over host_port_max_num. :param docker_host: :param private_port: :return: """ self.log.debug("try to assign docker port %d on server %r" % (private_port, docker_host)) containers = self.__containers_info(docker_host) host_ports = flatten(map(lambda p: p['Ports'], containers)) # todo if azure return -1 def sub(port): return port["PublicPort"] if "PublicPort" in port else -1 host_public_ports = map(lambda x: sub(x), host_ports) return self.__get_available_host_port(host_public_ports, private_port) def stop(self, name, **kwargs): """ stop a container :param name: container's name :param docker_host: host machine where container running :return: """ container = kwargs["container"] expr_id = kwargs["expr_id"] docker_host = self.docker_host_manager.get_host_server_by_id(container.host_server_id) if self.__get_container(name, docker_host) is not None: containers_url = '%s/containers/%s/stop' % (self.get_vm_url(docker_host), name) req = requests.post(containers_url) self.log.debug(req.content) self.__stop_container(expr_id, container, docker_host) def delete(self, name, **kwargs): """ delete a container :param name: :param docker_host: :return: """ container = kwargs["container"] expr_id = kwargs["expr_id"] docker_host = self.docker_host_manager.get_host_server_by_id(container.host_server_id) containers_url = '%s/containers/%s?force=1' % (self.get_vm_url(docker_host), name) req = requests.delete(containers_url) self.log.debug(req.content) self.__stop_container(expr_id, container, docker_host) def start(self, unit, **kwargs): """ In this function, we create a container and then start a container :param unit: docker template unit :param docker_host: :return: """ virtual_environment = kwargs["virtual_environment"] hackathon = kwargs["hackathon"] experiment = kwargs["experiment"] container_name = unit.get_name() host_server = self.docker_host_manager.get_available_docker_host(1, hackathon) container = DockerContainer(experiment, name=container_name, host_server_id=host_server.id, virtual_environment=virtual_environment, image=unit.get_image_with_tag()) self.db.add_object(container) self.db.commit() # port binding ps = map(lambda p: [p.port_from, p.port_to], self.__assign_ports(experiment, host_server, virtual_environment, unit.get_ports())) # guacamole config guacamole = unit.get_remote() port_cfg = filter(lambda p: p[DockerTemplateUnit.PORTS_PORT] == guacamole[DockerTemplateUnit.REMOTE_PORT], unit.get_ports()) if len(port_cfg) > 0: gc = { "displayname": container_name, "name": container_name, "protocol": guacamole[DockerTemplateUnit.REMOTE_PROTOCOL], "hostname": host_server.public_ip, "port": port_cfg[0]["public_port"] } if DockerTemplateUnit.REMOTE_USERNAME in guacamole: gc["username"] = guacamole[DockerTemplateUnit.REMOTE_USERNAME] if DockerTemplateUnit.REMOTE_PASSWORD in guacamole: gc["password"] = guacamole[DockerTemplateUnit.REMOTE_PASSWORD] # save guacamole config into DB virtual_environment.remote_paras = json.dumps(gc) exist = self.__get_container(container_name, host_server) if exist is not None: container.container_id = exist["Id"] host_server.container_count += 1 self.db.commit() else: container_config = unit.get_container_config() # create container try: container_create_result = self.__create(host_server, container_config, container_name) except Exception as e: self.log.error(e) self.log.error("container %s fail to create" % container_name) return None container.container_id = container_create_result["Id"] # start container<|fim▁hole|> self.db.commit() except Exception as e: self.log.error(e) self.log.error("container %s fail to start" % container["Id"]) return None # check if self.__get_container(container_name, host_server) is None: self.log.error( "container %s has started, but can not find it in containers' info, maybe it exited again." % container_name) return None self.log.debug("starting container %s is ended ... " % container_name) virtual_environment.status = VEStatus.RUNNING self.db.commit() return container def get_vm_url(self, docker_host): return 'http://%s:%d' % (docker_host.public_dns, docker_host.public_docker_api_port) def pull_image(self, context): docker_host, image_name, tag = context.docker_host, context.image_name, context.tag pull_image_url = self.get_vm_url(docker_host) + "/images/create?fromImage=" + image_name + '&tag=' + tag self.log.debug(" send request to pull image:" + pull_image_url) return requests.post(pull_image_url) def get_pulled_images(self, docker_host): get_images_url = self.get_vm_url(docker_host) + "/images/json?all=0" current_images_info = json.loads(self.util.get_remote(get_images_url)) # [{},{},{}] current_images_tags = map(lambda x: x['RepoTags'], current_images_info) # [[],[],[]] return flatten(current_images_tags) # [ imange:tag, image:tag ] def ensure_images(self): hackathons = self.hackathon_manager.get_online_hackathons() map(lambda h: self.__ensure_images_for_hackathon(h), hackathons) def check_container_status_is_normal(self, docker_container): """check container's running status on docker host if status is Running or Restarting returns True , else returns False :type docker_container: DockerContainer :param docker_container: the container that you want to check :type boolean :return True: the container running status is running or restarting , else returns False """ docker_host = self.db.find_first_object_by(DockerHostServer, id=docker_container.host_server_id) if docker_host is not None: container_info = self.__get_container_info_by_container_id(docker_host, docker_container.container_id) if container_info is None: return False return container_info['State']['Running'] or container_info['State']['Restarting'] else: return False def ping(self, docker_host): """Ping docker host to check running status :type docker_host : DockerHostServer :param docker_host: the hots that you want to check docker service running status :type boolean :return: True: running status is OK, else return False """ try: ping_url = '%s/_ping' % self.__get_vm_url(docker_host) req = requests.get(ping_url) self.log.debug(req.content) return req.status_code == 200 and req.content == 'OK' except Exception as e: self.log.error(e) return False # --------------------------------------------- helper function ---------------------------------------------# def __name_match(self, id, lists): for list in lists: if id in list: return True return False def __get_schedule_job_id(self, hackathon): return "pull_images_for_hackathon_%s" % hackathon.id def __ensure_images_for_hackathon(self, hackathon): # only ensure those alauda is disabled if hackathon.is_alauda_enabled(): self.log.debug("schedule job of hackathon '%s(%d)' removed for alauda enabled" % (hackathon.name, hackathon.id)) self.scheduler.remove_job(self.__get_schedule_job_id(hackathon)) return self.log.debug("adding schedule job to ensure images for hackathon [%d]%s" % (hackathon.id, hackathon.name)) next_run_time = self.util.get_now() + timedelta(seconds=3) context = Context(hackathon_id=hackathon.id) self.scheduler.add_interval(feature="template_manager", method="pull_images_for_hackathon", id=self.__get_schedule_job_id(hackathon), context=context, next_run_time=next_run_time, minutes=60) def __get_vm_url(self, docker_host): return 'http://%s:%d' % (docker_host.public_dns, docker_host.public_docker_api_port) def __clear_ports_cache(self): """ cache ports, if ports' number more than host_port_max_num, release the ports. But if there is a thread apply new ports, we will do this operation in the next loop. Because the host machine do not update the ports information, if we release ports now, the new ports will be lost. :return: """ num = self.db.count(Experiment, Experiment.status == EStatus.STARTING) if num > 0: self.log.debug("there are %d experiment is starting, host ports will updated in next loop" % num) return self.log.debug("-----release ports cache successfully------") self.host_ports = [] def __stop_container(self, expr_id, container, docker_host): self.__release_ports(expr_id, docker_host) docker_host.container_count -= 1 if docker_host.container_count < 0: docker_host.container_count = 0 self.db.commit() def __containers_info(self, docker_host): containers_url = '%s/containers/json' % self.get_vm_url(docker_host) req = requests.get(containers_url) self.log.debug(req.content) return self.util.convert(json.loads(req.content)) def __get_available_host_port(self, port_bindings, port): """ simple lock mechanism, visit static variable ports synchronize, because port_bindings is not in real-time, so we should cache the latest ports, when the cache ports number is more than host_port_max_num, we will release it to save space. :param port_bindings: :param port: :return: """ self.lock.acquire() try: host_port = port + 10000 while host_port in port_bindings or host_port in self.host_ports: host_port += 1 if host_port >= 65535: self.log.error("port used up on this host server") raise Exception("no port available") if len(self.host_ports) >= self.host_port_max_num: self.__clear_ports_cache() self.host_ports.append(host_port) self.log.debug("host_port is %d " % host_port) return host_port finally: self.lock.release() def __get_container(self, name, docker_host): containers = self.__containers_info(docker_host) return next((c for c in containers if name in c["Names"] or '/' + name in c["Names"]), None) def __create(self, docker_host, container_config, container_name): """ only create a container, in this step, we cannot start a container. :param docker_host: :param container_config: :param container_name: :return: """ containers_url = '%s/containers/create?name=%s' % (self.get_vm_url(docker_host), container_name) req = requests.post(containers_url, data=json.dumps(container_config), headers=self.application_json) self.log.debug(req.content) container = json.loads(req.content) if container is None: raise AssertionError("container is none") return container def __start(self, docker_host, container_id): """ start a container :param docker_host: :param container_id: :return: """ url = '%s/containers/%s/start' % (self.get_vm_url(docker_host), container_id) req = requests.post(url, headers=self.application_json) self.log.debug(req.content) def __get_available_public_ports(self, expr_id, host_server, host_ports): self.log.debug("starting to get azure ports") ep = Endpoint(Service(self.load_azure_key_id(expr_id))) host_server_name = host_server.vm_name host_server_dns = host_server.public_dns.split('.')[0] public_endpoints = ep.assign_public_endpoints(host_server_dns, 'Production', host_server_name, host_ports) if not isinstance(public_endpoints, list): self.log.debug("failed to get public ports") return internal_server_error('cannot get public ports') self.log.debug("public ports : %s" % public_endpoints) return public_endpoints def load_azure_key_id(self, expr_id): expr = self.db.get_object(Experiment, expr_id) hak = self.db.find_first_object_by(HackathonAzureKey, hackathon_id=expr.hackathon_id) return hak.azure_key_id def __assign_ports(self, expr, host_server, ve, port_cfg): """ assign ports from host server :param expr: :param host_server: :param ve: :param port_cfg: :return: """ # get 'host_port' map(lambda p: p.update( {DockerTemplateUnit.PORTS_HOST_PORT: self.get_available_host_port(host_server, p[ DockerTemplateUnit.PORTS_PORT])} ), port_cfg) # get 'public' cfg public_ports_cfg = filter(lambda p: DockerTemplateUnit.PORTS_PUBLIC in p, port_cfg) host_ports = [u[DockerTemplateUnit.PORTS_HOST_PORT] for u in public_ports_cfg] if self.util.safe_get_config("environment", "prod") == "local": map(lambda cfg: cfg.update({DockerTemplateUnit.PORTS_PUBLIC_PORT: cfg[DockerTemplateUnit.PORTS_HOST_PORT]}), public_ports_cfg) else: public_ports = self.__get_available_public_ports(expr.id, host_server, host_ports) for i in range(len(public_ports_cfg)): public_ports_cfg[i][DockerTemplateUnit.PORTS_PUBLIC_PORT] = public_ports[i] binding_dockers = [] # update port binding for public_cfg in public_ports_cfg: binding_cloud_service = PortBinding(name=public_cfg[DockerTemplateUnit.PORTS_NAME], port_from=public_cfg[DockerTemplateUnit.PORTS_PUBLIC_PORT], port_to=public_cfg[DockerTemplateUnit.PORTS_HOST_PORT], binding_type=PortBindingType.CLOUD_SERVICE, binding_resource_id=host_server.id, virtual_environment=ve, experiment=expr, url=public_cfg[DockerTemplateUnit.PORTS_URL] if DockerTemplateUnit.PORTS_URL in public_cfg else None) binding_docker = PortBinding(name=public_cfg[DockerTemplateUnit.PORTS_NAME], port_from=public_cfg[DockerTemplateUnit.PORTS_HOST_PORT], port_to=public_cfg[DockerTemplateUnit.PORTS_PORT], binding_type=PortBindingType.DOCKER, binding_resource_id=host_server.id, virtual_environment=ve, experiment=expr) binding_dockers.append(binding_docker) self.db.add_object(binding_cloud_service) self.db.add_object(binding_docker) self.db.commit() local_ports_cfg = filter(lambda p: DockerTemplateUnit.PORTS_PUBLIC not in p, port_cfg) for local_cfg in local_ports_cfg: port_binding = PortBinding(name=local_cfg[DockerTemplateUnit.PORTS_NAME], port_from=local_cfg[DockerTemplateUnit.PORTS_HOST_PORT], port_to=local_cfg[DockerTemplateUnit.PORTS_PORT], binding_type=PortBindingType.DOCKER, binding_resource_id=host_server.id, virtual_environment=ve, experiment=expr) binding_dockers.append(port_binding) self.db.add_object(port_binding) self.db.commit() return binding_dockers def __release_ports(self, expr_id, host_server): """ release the specified experiment's ports """ self.log.debug("Begin to release ports: expr_id: %d, host_server: %r" % (expr_id, host_server)) ports_binding = self.db.find_all_objects_by(PortBinding, experiment_id=expr_id) if ports_binding is not None: docker_binding = filter( lambda u: self.util.safe_get_config("environment", "prod") != "local" and u.binding_type == 1, ports_binding) ports_to = [d.port_to for d in docker_binding] if len(ports_to) != 0: self.__release_public_ports(expr_id, host_server, ports_to) for port in ports_binding: self.db.delete_object(port) self.db.commit() self.log.debug("End to release ports: expr_id: %d, host_server: %r" % (expr_id, host_server)) def __release_public_ports(self, expr_id, host_server, host_ports): ep = Endpoint(Service(self.load_azure_key_id(expr_id))) host_server_name = host_server.vm_name host_server_dns = host_server.public_dns.split('.')[0] self.log.debug("starting to release ports ... ") ep.release_public_endpoints(host_server_dns, 'Production', host_server_name, host_ports) def __get_container_info_by_container_id(self, docker_host, container_id): """get a container info by container_id from a docker host :type docker_host: str|unicode :param: the docker host which you want to search container from :type container_id: str|unicode :param as a parameter that you want to search container though docker remote API :return dic object of the container info if not None """ try: get_container_url = self.get_vm_url(docker_host) + "/container/%s/json?all=0" % container_id req = requests.get(get_container_url) if req.status_code >= 200 and req.status_code < 300 : container_info = json.loads(req.content) return container_info return None except Exception as ex: self.log.error(ex) return None<|fim▁end|>
try: self.__start(host_server, container_create_result["Id"]) host_server.container_count += 1
<|file_name|>image-viewer.js<|end_file_name|><|fim▁begin|>window.ImageViewer = function(url, alt, title){ var img = $('<img />').attr('src', url).attr('alt', title).css({ display: 'inline-block', 'max-width': '90vw', 'max-height': '90vh' }); var a = $('<a></a>').attr('target', '_blank') .attr('title', title) .attr('href', url) .css({ display: 'inline-block', height: '100%' }) .append(img); <|fim▁hole|> }; var closeBtn = $('<a class="icon-remove-sign"></a>').css({ color: 'red', 'font-size': 'x-large', 'margin-left': '-0.1em' }).bind('click', close_it); var closeWrapper = $('<div></div>').css({ height: '100%', width: '2em', 'text-align': 'left', 'display': 'inline-block', 'vertical-algin': 'top', 'margin-top': '-0.6em', 'float': 'right' }).append(closeBtn); var container = $('<div></div>').append( $('<div></div>').css({ margin: '5vh 1vw', display: 'inline-block', 'vertical-align': 'top' }).append(a).append(closeWrapper)) .css({ 'z-index': 30000000, 'position': 'fixed', 'padding': 0, 'margin': 0, 'width': '100vw', 'height': '100vh', 'top': 0, 'left': 0, 'text-align': 'center', 'cursor': 'default', 'vertical-align': 'middle' }) .bind('click',close_it) .appendTo('body'); var overlay = $('<div class="blockUI blockMsg blockPage">').css({ 'z-index': 9999, 'position': 'fixed', padding: 0, margin: 0, width: '100vw', height: '100vh', top: '0vh', left: '0vw', 'text-align': 'center', 'cursor': 'default', 'vertical-align': 'middle', 'background-color': 'gray', 'opacity': '0.4' }).bind('click', close_it).appendTo('body'); this.close = close_it; return this; }<|fim▁end|>
var close_it = function(){ overlay.remove(); container.remove();
<|file_name|>lista-schede-fake-json.service.ts<|end_file_name|><|fim▁begin|>import { Injectable } from '@angular/core'; import { Observable } from "rxjs/Observable"; import 'rxjs/add/observable/of'; import { Http } from "@angular/http"; import { SchedaContatto } from "../scheda-contatto/scheda-contatto.model"; @Injectable() export class ListaSchedeService_FakeJson { private schede: SchedaContatto[] = JSON.parse(` [ { "idScheda": 798495, "dataOrainserimento": "2017-06-25T01:02:10.4551733+02:00", "idOperatore": 205, "idPostazione":"121156A", "nomeUtente": "MARIO ROSSI", "numTelefono": 3351234567, "indirizzo": "Via Cavour, 5, Roma, RM", "infoAddizionali": "Portone Uffici Centrali VVF", "classificazioneNUE": "Incendio", "attributiClassificazione":"Attributi di classificazione", "note":"Note", "priorita": 5, "numPersoneCoinvolte":"2", "competenza":"VVF" }, { "idScheda": 587469, "dataOrainserimento": "2015-06-15T01:02:10.4551733+02:00", "idOperatore": 549, "idPostazione":"121165B", "nomeUtente": "ALDO BALDO", "numTelefono": 3334587993, "indirizzo": "Via Roma, 7, Milano, MI", "infoAddizionali": "Angolo via della Spiga",<|fim▁hole|> "note":"Note2", "priorita": 1, "numPersoneCoinvolte":"1", "competenza":"VVF" } ]`); constructor(private http: Http) { } public getSchede(): Observable<SchedaContatto[]> { return Observable.of(this.schede); } public getScheda(id: number | string): Observable<SchedaContatto> { return Observable.of(this.schede // (+) before `id` turns the string into a number .find(scheda => scheda.idScheda === +id)); } }<|fim▁end|>
"classificazioneNUE": "Apertura porta", "attributiClassificazione":"Attributi di classificazione",
<|file_name|>legality.cpp<|end_file_name|><|fim▁begin|>// QueenMaxima, a chess playing program. // Copyright (C) 1996-2013 Erik van het Hof and Hermen Reitsma // // This program is free software; you can redistribute it and/or // modify it under the terms of the GNU General Public License // as published by the Free Software Foundation; either version 2 // of the License, or (at your option) any later version. // // This program is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU General Public License for more details. // // You should have received a copy of the GNU General Public License // along with this program; if not, write to the Free Software // Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. // #include <string.h> #include <stdio.h> #include "hashcodes.h" #include "legality.h" #include "fast.h" #include "attack.h" bool _fast_moveokw (TFastNode* node, int move) // returns "move would be generated" { int ssq = SOURCESQ (move); int piece = PIECE (move); if ((!piece) || node -> matrix [ssq] != piece) { return false; } int tsq = TARGETSQ (move); if (! (move & _LL(4294934528))) { //a normal noncapture if (node->matrix[tsq]) { return false; } if (piece==PAWN) { return !node->matrix [ssq+8]; } return t.pieceattacks [piece] [ssq] [tsq] && (piece == KNIGHT || piece == KING || nooccs(node,ssq,tsq)); } else if (!SPECIAL(move)) { //a normal capture if (node->matrix [tsq] != CAPTURED (move) + KING) { return false; } return t.pieceattacks [piece][ssq][tsq] && (piece == PAWN || piece==KNIGHT || piece==KING || nooccs(node,ssq,tsq)); } else switch (SPECIALCASE (move)) { case _SHORT_CASTLE: return _SCW (node) && node -> matrix [g1] == 0 && node -> matrix [f1] == 0; case _LONG_CASTLE: return _LCW (node) && node -> matrix [b1] == 0 && node -> matrix [c1] == 0 && node -> matrix [d1] == 0; case _EN_PASSANT: return _EPSQ (node) == tsq && node -> matrix [ssq] == PAWN; default: // promotion if (_CAPTURE (move)) { return node -> matrix [ssq] == PAWN && node -> matrix [tsq] == CAPTURED (move)+KING; } else { return node -> matrix [ssq] == PAWN && node -> matrix [tsq] == 0; } } BOOST_ASSERT (false); return false; } bool _fast_moveokb(TFastNode* node, int move) { // returns "move would be generated" int ssq = SOURCESQ (move), piece = PIECE(move); if (!piece || node -> matrix [ssq] != piece+KING) { return false; } int tsq = TARGETSQ(move); if (! (move & _LL(4294934528))) { //not a special move or a capture if (node->matrix[tsq]) { return false; } if (piece==PAWN) { return !node->matrix[ssq-8]; } return (t.pieceattacks [piece] [tsq] [ssq] && (piece == KNIGHT || piece == KING || nooccs (node, ssq, tsq))); } else if (!SPECIAL (move)) { //a normal capture if (node->matrix [tsq] != CAPTURED (move)) { return false; } return t.pieceattacks [piece][tsq][ssq] && (piece == PAWN || piece==KNIGHT || piece==KING || nooccs (node, ssq, tsq)); } else switch (SPECIALCASE (move)) { case _SHORT_CASTLE: return _SCB (node) && (node -> matrix [g8] == 0) && (node -> matrix [f8] == 0); case _LONG_CASTLE: return _LCB (node) && (node -> matrix [b8] == 0) && (node -> matrix [c8] == 0) && (node -> matrix [d8] == 0); case _EN_PASSANT: return (_EPSQ (node) == tsq) && (node -> matrix [ssq] == BPAWN); default: // promotion if (_CAPTURE (move)) { return (node -> matrix [ssq] == BPAWN) && (node -> matrix [tsq] == CAPTURED (move)); } else { return (node -> matrix [ssq] == BPAWN) && (node -> matrix [tsq] == 0); } } BOOST_ASSERT (false); return false; } bool inspect_move_legality_b (TFastNode* node, int move) { bool result; int flags = node -> flags, fifty = node -> fifty; if (move == ENCODESCB) { return ! attacked_by_PNBRQK (node, e8) && ! attacked_by_PNBRQK (node, f8) && ! attacked_by_PNBRQK (node, g8); } if (move == ENCODELCB) { return ! attacked_by_PNBRQK (node, e8) && ! attacked_by_PNBRQK (node, d8) && ! attacked_by_PNBRQK (node, c8); } _fast_dobmove (node, move); result = ! attacked_by_PNBRQK (node, node -> bkpos); _fast_undobmove (node, move, flags, fifty); return result; } bool inspect_move_legality_w (TFastNode* node, int move) { bool result; int flags = node -> flags, fifty = node -> fifty; if (move == ENCODESCW) { return ! attacked_by_pnbrqk (node, e1) && ! attacked_by_pnbrqk (node, f1) && ! attacked_by_pnbrqk (node, g1); } if (move == ENCODELCW) { return ! attacked_by_pnbrqk (node, e1) && ! attacked_by_pnbrqk (node, d1) && ! attacked_by_pnbrqk (node, c1); } _fast_dowmove (node, move); result = ! attacked_by_pnbrqk (node, node -> wkpos); _fast_undowmove (node, move, flags, fifty); return result; } bool legal_move (TFastNode* node, int move) { if (node -> flags & _WTM) { return legal_move_w(node, move); } else { return legal_move_b(node, move); } } bool legal_move_w (TFastNode* node, int move) // returns "white move is legal in node" { int sq = SOURCESQ (move), offset_ssq_wk, offset_ssq_tsq, piece = PIECE (move); if (piece < KING) { // voor promoties, en_passant en normale zetten geldt: offset_ssq_wk = t.direction [sq] [node->wkpos]; if (! offset_ssq_wk) { return true; // niet op een lijn met de koning. vaak knalt ie er hier meteen weer uit. } offset_ssq_tsq = t.direction [sq] [TARGETSQ (move)]; if (offset_ssq_wk == offset_ssq_tsq || offset_ssq_wk == - offset_ssq_tsq) { return true; // stuk blijft op dezelfde lijn } if (! SPECIAL (move) || SPECIALCASE (move) != _EN_PASSANT || (offset_ssq_wk != 1 && offset_ssq_wk != -1)) { // uitzonderingen met en_passant komen alleen voor als koning en pion op hor. lijn staan (toch?) // omdat er bij enpassant slaan twee stukken van de horizontale lijn verdwijnen. sq += offset_ssq_wk; while (sq != node -> wkpos) { if (node -> matrix [sq]) { return true; // niet gepind want er staat een stuk (w/z) tussen } sq += offset_ssq_wk; } sq = t.sqonline [node -> wkpos] [SOURCESQ (move)]; while (sq != node -> wkpos) { BOOST_ASSERT (sq >= 0 && sq <= 63); piece = node -> matrix [sq]; if (piece) { return piece < BBISHOP || ! t.pieceattacks [piece - KING] [sq] [node->wkpos]; } sq = t.sqonline [node -> wkpos] [sq]; } return true; } // en passant en koning, pion en geslagen pion op 1 horizontale lijn int csq = TARGETSQ (move) - 8; sq += offset_ssq_wk; while (sq != node->wkpos) { if (node -> matrix [sq] && (sq != csq)) { return true; // niet gepind want er staat een stuk (w/z) tussen } sq += offset_ssq_wk; } sq = t.sqonline [node -> wkpos] [SOURCESQ (move)]; while (sq != node -> wkpos) { BOOST_ASSERT (sq >= 0 && sq <= 63); piece = node -> matrix [sq]; if (piece && (sq != csq)) { return piece < BROOK || ! t.pieceattacks [piece - KING] [sq] [node -> wkpos]; } sq = t.sqonline [node -> wkpos] [sq]; } return true; } // de koning zet if (attacked_by_pnbrqk (node, TARGETSQ (move))) { return false; } if (! SPECIAL (move)) { return true; } if (attacked_by_pnbrqk (node, e1)) { return false; } if (move == ENCODESCW) { return ! attacked_by_pnbrqk (node, f1); } return ! attacked_by_pnbrqk (node, d1); } bool legal_move_b (TFastNode* node, int move) // returns "black move is legal in node" { int sq = SOURCESQ (move), offset_ssq_bk, offset_ssq_tsq, piece = PIECE (move); if (piece < KING) { // voor promoties, en_passant en normale zetten geldt: offset_ssq_bk = t.direction [sq] [node->bkpos]; if (! offset_ssq_bk) { return true; // niet op een lijn met de koning. vaak knalt ie er hier meteen weer uit. } offset_ssq_tsq = t.direction [sq] [TARGETSQ (move)]; if (offset_ssq_bk == offset_ssq_tsq || offset_ssq_bk == - offset_ssq_tsq) { return true; // stuk blijft op dezelfde lijn } sq += offset_ssq_bk; if (! SPECIAL (move) || SPECIALCASE (move) != _EN_PASSANT || (offset_ssq_bk != 1 && offset_ssq_bk != -1)) { // uitzonderingen met en_passant komen alleen voor als koning en pion op hor. lijn staan (toch?) // omdat er bij enpassant slaan twee stukken van de horizontale lijn verdwijnen. while (sq != node -> bkpos) { if (node -> matrix [sq]) { return true; // niet gepind want er staat een stuk (w/z) tussen } sq += offset_ssq_bk; } sq = t.sqonline [node -> bkpos] [SOURCESQ (move)]; while (sq != node -> bkpos) { BOOST_ASSERT (sq >= 0 && sq <= 63); piece = node -> matrix [sq]; if (piece) { return piece > QUEEN || piece < BISHOP || ! t.pieceattacks [piece] [sq] [node->bkpos]; } sq = t.sqonline [node -> bkpos] [sq]; } return true; } // en passant en koning, pion en geslagen pion op 1 horizontale lijn int csq = TARGETSQ (move) + 8; while (sq != node -> bkpos) { if (node -> matrix [sq] && sq != csq) { return true; // niet gepind want er staat een stuk (w/z) tussen } sq += offset_ssq_bk; } sq = t.sqonline [node -> bkpos] [ SOURCESQ (move)]; while (sq != node -> bkpos) { BOOST_ASSERT (sq >= 0 && sq <= 63); piece = node -> matrix [sq]; if (piece && sq != csq) { return piece > QUEEN || piece < ROOK || ! t.pieceattacks [piece] [sq] [node -> bkpos]; } sq = t.sqonline [node -> bkpos] [sq]; } return true; } // de koning zet if (attacked_by_PNBRQK (node, TARGETSQ (move))) { return false; } if (! SPECIAL (move)) { return true; } if (attacked_by_PNBRQK (node, e8)) { return false; } if (move == ENCODESCB) {<|fim▁hole|> } return ! attacked_by_PNBRQK (node, d8); } int _fast_inspectnode (TFastNode* node) { _int64 hashcode = 0, pawncode = 0; int i; if ((bool)((node -> hashcode & 1) > 0) != (bool)((node -> flags & _WTM) >0)) { return 500; } if (node->wpawns > 8 || node->wpawns < 0) { return 1; } if (node->wknights + node->wpawns > 10 || node->wknights < 0) { return 2; } if (node->wbishops + node->wpawns > 10 || node->wbishops < 0) { return 3; } if (node->wrooks + node->wpawns > 10 || node->wrooks < 0) { return 4; } if (node->wqueens + node->wpawns > 10 || node->wqueens < 0) { return 5; } if (node->wkpos < 0 || node->wkpos > 63) { return 6; } if (node->bpawns > 8 || node->bpawns < 0) { return 11; } if (node->bknights + node->bpawns > 10 || node->bknights < 0) { return 12; } if (node->bbishops + node->bpawns > 10 || node->bbishops < 0) { return 13; } if (node->brooks + node->bpawns > 10 || node->brooks < 0) { return 14; } if (node->bqueens + node->bpawns > 10 || node->bqueens < 0) { return 15; } if (node->bkpos < 0 || node->bkpos > 63) { return 16; } int sq; char matrix[64]; memset (matrix, 0, sizeof (matrix)); for (i = 0; i < node->wpawns; i++) { sq = node->wpawnlist [i]; hashcode ^= hashnumbers [PAWN - 1] [sq]; pawncode ^= hashnumbers [PAWN - 1] [sq]; if (node->index [sq] != i) { std::cout << boost::format("node->index[%d] = %d ; i = %d\n") % sq % node->index[sq] % i; return 21; } matrix [sq] = PAWN; if (node->matrix [sq] != PAWN) { return 41; } } for (i = 0; i < node->wknights; i++) { sq = node->wknightlist [i]; hashcode ^= hashnumbers [KNIGHT - 1] [sq]; if (node->index [sq] != i) { return 22; } matrix [sq] = KNIGHT; if (node->matrix [sq] != KNIGHT) { return 42; } } for (i = 0; i < node->wbishops; i++) { sq = node->wbishoplist [i]; hashcode ^= hashnumbers [BISHOP - 1] [sq]; if (node->index [sq] != i) { return 23; } matrix [sq] = BISHOP; if (node->matrix [sq] != BISHOP) { return 43; } } for (i = 0; i < node->wrooks; i++) { sq = node->wrooklist [i]; hashcode ^= hashnumbers [ROOK - 1] [sq]; if (node->index [sq] != i) { return 24; } matrix [sq] = ROOK; if (node->matrix [sq] != ROOK) { return 44; } } for (i = 0; i < node->wqueens; i++) { sq = node->wqueenlist [i]; hashcode ^= hashnumbers [QUEEN - 1] [sq]; if (node->index [sq] != i) { return 25; } matrix [sq] = QUEEN; if (node->matrix [sq] != QUEEN) { return 45; } } matrix [node->wkpos] = KING; hashcode ^= hashnumbers [KING - 1] [node -> wkpos]; if (node->matrix [node->wkpos] != KING) { return 46; } for (i = 0; i < node->bpawns; i++) { sq = node->bpawnlist [i]; hashcode ^= hashnumbers [BPAWN - 1] [sq]; pawncode ^= hashnumbers [BPAWN - 1] [sq]; if (node->index [sq] != i) { return 31; } matrix [sq] = BPAWN; if (node->matrix [sq] != BPAWN) { return 51; } } for (i = 0; i < node->bknights; i++) { sq = node->bknightlist [i]; hashcode ^= hashnumbers [BKNIGHT - 1] [sq]; if (node->index [sq] != i) { return 32; } matrix [sq] = BKNIGHT; if (node->matrix [sq] != BKNIGHT) { return 52; } } for (i = 0; i < node->bbishops; i++) { sq = node->bbishoplist [i]; hashcode ^= hashnumbers [BBISHOP - 1] [sq]; if (node->index [sq] != i) { return 33; } matrix [sq] = BBISHOP; if (node->matrix [sq] != BBISHOP) { return 53; } } for (i = 0; i < node->brooks; i++) { sq = node->brooklist [i]; hashcode ^= hashnumbers [BROOK - 1] [sq]; if (node->index [sq] != i) { return 34; } matrix [sq] = BROOK; if (node->matrix [sq] != BROOK) { return 54; } } for (i = 0; i < node->bqueens; i++) { sq = node->bqueenlist [i]; hashcode ^= hashnumbers [BQUEEN - 1] [sq]; if (node->index [sq] != i) { return 35; } matrix [sq] = BQUEEN; if (node->matrix [sq] != BQUEEN) { return 55; } } matrix [node->bkpos] = BKING; hashcode ^= hashnumbers [BKING - 1] [node -> bkpos]; if (node->matrix [node->bkpos] != BKING) { return 56; } if (memcmp (matrix, node->matrix, sizeof (matrix))) { return 100; } if (_SCW(node)) { hashcode ^= _LL(0x47bc71a493da706e); } if (_SCB(node)) { hashcode ^= _LL(0x6fed622e98f98b7e); } if (_LCW(node)) { hashcode ^= _LL(0x6338be439fd357dc); } if (_LCB(node)) { hashcode ^= _LL(0xce107ca2947d2d58); } if (_EPSQ(node)) { hashcode ^= ephash [_EPSQ (node)]; } if (node-> flags & _WTM) { hashcode |= 1; } else { hashcode &= _LL(0xFFFFFFFFFFFFFFFE); } if (hashcode != node -> hashcode) { std::cout << boost::format("hashcode = %Ld, node -> hashcode = %Ld\n") % hashcode % node -> hashcode; return 200; } if (pawncode != node -> pawncode) { std::cout << boost::format("pawncode = %Ld, node -> pawncode = %Ld\n") % pawncode % node -> pawncode; return 201; } // if (_result_value < -CHESS_INF || _result_value > CHESS_INF) { // return 300; // } return 0; }<|fim▁end|>
return ! attacked_by_PNBRQK (node, f8);
<|file_name|>response.go<|end_file_name|><|fim▁begin|>package echo import ( "bufio" "net" "net/http" ) type ( // Response wraps an http.ResponseWriter and implements its interface to be used // by an HTTP handler to construct an HTTP response. // See: https://golang.org/pkg/net/http/#ResponseWriter Response struct { echo *Echo beforeFuncs []func() afterFuncs []func() Writer http.ResponseWriter Status int Size int64 Committed bool } ) // NewResponse creates a new instance of Response. func NewResponse(w http.ResponseWriter, e *Echo) (r *Response) { return &Response{Writer: w, echo: e} } // Header returns the header map for the writer that will be sent by // WriteHeader. Changing the header after a call to WriteHeader (or Write) has // no effect unless the modified headers were declared as trailers by setting // the "Trailer" header before the call to WriteHeader (see example) // To suppress implicit response headers, set their value to nil. // Example: https://golang.org/pkg/net/http/#example_ResponseWriter_trailers func (r *Response) Header() http.Header { return r.Writer.Header() } // Before registers a function which is called just before the response is written. func (r *Response) Before(fn func()) { r.beforeFuncs = append(r.beforeFuncs, fn) } // After registers a function which is called just after the response is written. // If the `Content-Length` is unknown, none of the after function is executed. func (r *Response) After(fn func()) { r.afterFuncs = append(r.afterFuncs, fn) } // WriteHeader sends an HTTP response header with status code. If WriteHeader is // not called explicitly, the first call to Write will trigger an implicit // WriteHeader(http.StatusOK). Thus explicit calls to WriteHeader are mainly // used to send error codes. func (r *Response) WriteHeader(code int) { if r.Committed { r.echo.Logger.Warn("response already committed") return } for _, fn := range r.beforeFuncs { fn() } r.Status = code r.Writer.WriteHeader(code) r.Committed = true } // Write writes the data to the connection as part of an HTTP reply. func (r *Response) Write(b []byte) (n int, err error) { if !r.Committed { if r.Status == 0 { r.Status = http.StatusOK } r.WriteHeader(r.Status) } n, err = r.Writer.Write(b) r.Size += int64(n) for _, fn := range r.afterFuncs { fn() } return } // Flush implements the http.Flusher interface to allow an HTTP handler to flush // buffered data to the client.<|fim▁hole|>func (r *Response) Flush() { r.Writer.(http.Flusher).Flush() } // Hijack implements the http.Hijacker interface to allow an HTTP handler to // take over the connection. // See [http.Hijacker](https://golang.org/pkg/net/http/#Hijacker) func (r *Response) Hijack() (net.Conn, *bufio.ReadWriter, error) { return r.Writer.(http.Hijacker).Hijack() } func (r *Response) reset(w http.ResponseWriter) { r.beforeFuncs = nil r.afterFuncs = nil r.Writer = w r.Size = 0 r.Status = http.StatusOK r.Committed = false }<|fim▁end|>
// See [http.Flusher](https://golang.org/pkg/net/http/#Flusher)
<|file_name|>ITC06010401_UpdateNetwork.py<|end_file_name|><|fim▁begin|>#encoding:utf-8 __authors__ = ['wei keke'] __version__ = "V0.1" ''' # ChangeLog: #--------------------------------------------------------------------------------- # Version Date Desc Author #--------------------------------------------------------------------------------- # V0.1 2014/10/17 初始版本 #--------------------------------------------------------------------------------- ''' import TestData.Network.ITC06_Setup as ModuleData from TestAPIs.DataCenterAPIs import DataCenterAPIs ''' @note: PreData ''' dc_name = ModuleData.dc_name dc_id = DataCenterAPIs().getDataCenterIdByName(ModuleData.dc_name) nw_name = 'network001'<|fim▁hole|><network> <name>%s</name> <data_center id= "%s"/> </network> ''' %(nw_name,dc_id) ''' @note:TestData ''' new_nw_name = 'network002' update_info = ''' <network> <name>%s</name> <description>lalala</description> <mtu>2000</mtu> </network> '''%new_nw_name ''' @note: ExpectedData ''' expected_status_code = 200<|fim▁end|>
nw_info = '''
<|file_name|>ListenAddress.hpp<|end_file_name|><|fim▁begin|><|fim▁hole|>// This Source Code Form is subject to the terms of the Mozilla Public // License, v. 2.0. If a copy of the MPL was not distributed with this // file, You can obtain one at http://mozilla.org/MPL/2.0/. #ifndef OPENTXS_PROTO_LISTENADDRESS_HPP #define OPENTXS_PROTO_LISTENADDRESS_HPP #include "VerifyContracts.hpp" namespace opentxs { namespace proto { OPENTXS_PROTO_EXPORT bool CheckProto_1( const ListenAddress& address, const bool silent); OPENTXS_PROTO_EXPORT bool CheckProto_2(const ListenAddress&, const bool); OPENTXS_PROTO_EXPORT bool CheckProto_3(const ListenAddress&, const bool); OPENTXS_PROTO_EXPORT bool CheckProto_4(const ListenAddress&, const bool); OPENTXS_PROTO_EXPORT bool CheckProto_5(const ListenAddress&, const bool); OPENTXS_PROTO_EXPORT bool CheckProto_6(const ListenAddress&, const bool); OPENTXS_PROTO_EXPORT bool CheckProto_7(const ListenAddress&, const bool); OPENTXS_PROTO_EXPORT bool CheckProto_8(const ListenAddress&, const bool); OPENTXS_PROTO_EXPORT bool CheckProto_9(const ListenAddress&, const bool); OPENTXS_PROTO_EXPORT bool CheckProto_10(const ListenAddress&, const bool); OPENTXS_PROTO_EXPORT bool CheckProto_11(const ListenAddress&, const bool); OPENTXS_PROTO_EXPORT bool CheckProto_12(const ListenAddress&, const bool); OPENTXS_PROTO_EXPORT bool CheckProto_13(const ListenAddress&, const bool); OPENTXS_PROTO_EXPORT bool CheckProto_14(const ListenAddress&, const bool); OPENTXS_PROTO_EXPORT bool CheckProto_15(const ListenAddress&, const bool); OPENTXS_PROTO_EXPORT bool CheckProto_16(const ListenAddress&, const bool); OPENTXS_PROTO_EXPORT bool CheckProto_17(const ListenAddress&, const bool); OPENTXS_PROTO_EXPORT bool CheckProto_18(const ListenAddress&, const bool); OPENTXS_PROTO_EXPORT bool CheckProto_19(const ListenAddress&, const bool); OPENTXS_PROTO_EXPORT bool CheckProto_20(const ListenAddress&, const bool); } // namespace proto } // namespace opentxs #endif // OPENTXS_PROTO_LISTENADDRESS_HPP<|fim▁end|>
// Copyright (c) 2020 The Open-Transactions developers
<|file_name|>index.js<|end_file_name|><|fim▁begin|>function validateUser(req){ return (req.session.hasOwnProperty('user')&&req.session.user.length!=0); } module.exports = { index:function(req,res,next){ if(validateUser(req)){ next() }else{<|fim▁hole|>};<|fim▁end|>
res.render('login',{params:req.params,session:req.session}); } }
<|file_name|>NPCHandler.cpp<|end_file_name|><|fim▁begin|>/* * Copyright (C) 2010-2012 Project SkyFire <http://www.projectskyfire.org/> * Copyright (C) 2010-2012 Oregon <http://www.oregoncore.com/> * Copyright (C) 2008-2012 TrinityCore <http://www.trinitycore.org/> * Copyright (C) 2005-2012 MaNGOS <http://getmangos.com/> * * This program is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License as published by the * Free Software Foundation; either version 2 of the License, or (at your * option) any later version. * * This program is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for * more details. * * You should have received a copy of the GNU General Public License along * with this program. If not, see <http://www.gnu.org/licenses/>. */ #include "Common.h" #include "Language.h" #include "Database/DatabaseEnv.h" #include "WorldPacket.h" #include "WorldSession.h" #include "Opcodes.h" #include "Log.h" #include "World.h" #include "ObjectMgr.h" #include "SpellMgr.h" #include "Player.h" #include "GossipDef.h" #include "SpellAuras.h" #include "UpdateMask.h" #include "ObjectAccessor.h" #include "Creature.h" #include "MapManager.h" #include "Pet.h" #include "BattlegroundMgr.h" #include "Battleground.h" #include "Guild.h" #include "ScriptMgr.h" void WorldSession::HandleTabardVendorActivateOpcode(WorldPacket & recv_data) { uint64 guid; recv_data >> guid; Creature *unit = GetPlayer()->GetNPCIfCanInteractWith(guid, UNIT_NPC_FLAG_TABARDDESIGNER); if (!unit) { sLog->outDebug("WORLD: HandleTabardVendorActivateOpcode - Unit (GUID: %u) not found or you can not interact with him.", uint32(GUID_LOPART(guid))); return; } // remove fake death if (GetPlayer()->hasUnitState(UNIT_STAT_DIED)) GetPlayer()->RemoveSpellsCausingAura(SPELL_AURA_FEIGN_DEATH); SendTabardVendorActivate(guid); }<|fim▁hole|> data << guid; SendPacket(&data); } void WorldSession::HandleBankerActivateOpcode(WorldPacket & recv_data) { sLog->outDebug( "WORLD: Received CMSG_BANKER_ACTIVATE"); uint64 guid; recv_data >> guid; Creature *unit = GetPlayer()->GetNPCIfCanInteractWith(guid, UNIT_NPC_FLAG_BANKER); if (!unit) { sLog->outDebug("WORLD: HandleBankerActivateOpcode - Unit (GUID: %u) not found or you can not interact with him.", uint32(GUID_LOPART(guid))); return; } // remove fake death if (GetPlayer()->hasUnitState(UNIT_STAT_DIED)) GetPlayer()->RemoveSpellsCausingAura(SPELL_AURA_FEIGN_DEATH); SendShowBank(guid); } void WorldSession::SendShowBank(uint64 guid) { WorldPacket data(SMSG_SHOW_BANK, 8); data << guid; SendPacket(&data); } void WorldSession::HandleTrainerListOpcode(WorldPacket & recv_data) { uint64 guid; recv_data >> guid; SendTrainerList(guid); } void WorldSession::SendTrainerList(uint64 guid) { std::string str = GetSkyFireString(LANG_NPC_TAINER_HELLO); SendTrainerList(guid, str); } void WorldSession::SendTrainerList(uint64 guid, const std::string& strTitle) { sLog->outDebug("WORLD: SendTrainerList"); Creature *unit = GetPlayer()->GetNPCIfCanInteractWith(guid, UNIT_NPC_FLAG_TRAINER); if (!unit) { sLog->outDebug("WORLD: SendTrainerList - Unit (GUID: %u) not found or you can not interact with him.", uint32(GUID_LOPART(guid))); return; } // remove fake death if (GetPlayer()->hasUnitState(UNIT_STAT_DIED)) GetPlayer()->RemoveSpellsCausingAura(SPELL_AURA_FEIGN_DEATH); // trainer list loaded at check; if (!unit->isCanTrainingOf(_player, true)) return; CreatureTemplate const *ci = unit->GetCreatureTemplate(); if (!ci) { sLog->outDebug("WORLD: SendTrainerList - (GUID: %u) NO CREATUREINFO!", GUID_LOPART(guid)); return; } TrainerSpellData const* trainer_spells = unit->GetTrainerSpells(); if (!trainer_spells) { sLog->outDebug("WORLD: SendTrainerList - Training spells not found for creature (GUID: %u Entry: %u)", GUID_LOPART(guid), unit->GetEntry()); return; } WorldPacket data(SMSG_TRAINER_LIST, 8+4+4+trainer_spells->spellList.size()*38 + strTitle.size()+1); data << guid; data << uint32(trainer_spells->trainerType); size_t count_pos = data.wpos(); data << uint32(trainer_spells->spellList.size()); // reputation discount float fDiscountMod = _player->GetReputationPriceDiscount(unit); uint32 count = 0; for (TrainerSpellList::const_iterator itr = trainer_spells->spellList.begin(); itr != trainer_spells->spellList.end(); ++itr) { TrainerSpell const* tSpell = *itr; if (!_player->IsSpellFitByClassAndRace(tSpell->spell)) continue; ++count; bool primary_prof_first_rank = sSpellMgr->IsPrimaryProfessionFirstRankSpell(tSpell->spell); SpellChainNode const* chain_node = sSpellMgr->GetSpellChainNode(tSpell->spell); uint32 req_spell = sSpellMgr->GetSpellRequired(tSpell->spell); data << uint32(tSpell->spell); data << uint8(_player->GetTrainerSpellState(tSpell)); data << uint32(floor(tSpell->spellcost * fDiscountMod)); data << uint32(primary_prof_first_rank ? 1 : 0); // primary prof. learn confirmation dialog data << uint32(primary_prof_first_rank ? 1 : 0); // must be equal prev. field to have learn button in enabled state data << uint8(tSpell->reqlevel); data << uint32(tSpell->reqskill); data << uint32(tSpell->reqskillvalue); data << uint32(chain_node && chain_node->prev ? chain_node->prev : req_spell); data << uint32(chain_node && chain_node->prev ? req_spell : 0); data << uint32(0); } data << strTitle; data.put<uint32>(count_pos, count); SendPacket(&data); } void WorldSession::HandleTrainerBuySpellOpcode(WorldPacket & recv_data) { uint64 guid; uint32 spellId = 0; recv_data >> guid >> spellId; sLog->outDebug("WORLD: Received CMSG_TRAINER_BUY_SPELL NpcGUID=%u, learn spell id is: %u", uint32(GUID_LOPART(guid)), spellId); Creature *unit = GetPlayer()->GetNPCIfCanInteractWith(guid, UNIT_NPC_FLAG_TRAINER); if (!unit) { sLog->outDebug("WORLD: HandleTrainerBuySpellOpcode - Unit (GUID: %u) not found or you can not interact with him.", uint32(GUID_LOPART(guid))); return; } // remove fake death if (GetPlayer()->hasUnitState(UNIT_STAT_DIED)) GetPlayer()->RemoveSpellsCausingAura(SPELL_AURA_FEIGN_DEATH); if (!unit->isCanTrainingOf(_player, true)) return; // check present spell in trainer spell list TrainerSpellData const* trainer_spells = unit->GetTrainerSpells(); if (!trainer_spells) return; // not found, cheat? TrainerSpell const* trainer_spell = trainer_spells->Find(spellId); if (!trainer_spell) return; // can't be learn, cheat? Or double learn with lags... if (_player->GetTrainerSpellState(trainer_spell) != TRAINER_SPELL_GREEN) return; // apply reputation discount uint32 nSpellCost = uint32(floor(trainer_spell->spellcost * _player->GetReputationPriceDiscount(unit))); // check money requirement if (_player->GetMoney() < nSpellCost) return; WorldPacket data(SMSG_PLAY_SPELL_VISUAL, 12); // visual effect on trainer data << uint64(guid) << uint32(0xB3); SendPacket(&data); data.Initialize(SMSG_PLAY_SPELL_IMPACT, 12); // visual effect on player data << uint64(_player->GetGUID()) << uint32(0x016A); SendPacket(&data); _player->ModifyMoney(-int32(nSpellCost)); // learn explicitly to prevent lost money at lags, learning spell will be only show spell animation _player->learnSpell(trainer_spell->spell); data.Initialize(SMSG_TRAINER_BUY_SUCCEEDED, 12); data << uint64(guid) << uint32(spellId); SendPacket(&data); } void WorldSession::HandleGossipHelloOpcode(WorldPacket & recv_data) { sLog->outDebug("WORLD: Received CMSG_GOSSIP_HELLO"); uint64 guid; recv_data >> guid; Creature *unit = GetPlayer()->GetNPCIfCanInteractWith(guid, UNIT_NPC_FLAG_NONE); if (!unit) { sLog->outDebug("WORLD: HandleGossipHelloOpcode - Unit (GUID: %u) not found or you can not interact with him.", uint32(GUID_LOPART(guid))); return; } GetPlayer()->RemoveAurasWithInterruptFlags(AURA_INTERRUPT_FLAG_TALK); // remove fake death //if (GetPlayer()->hasUnitState(UNIT_STAT_DIED)) // GetPlayer()->RemoveSpellsCausingAura(SPELL_AURA_FEIGN_DEATH); if (unit->isArmorer() || unit->isCivilian() || unit->isQuestGiver() || unit->isServiceProvider()) { unit->StopMoving(); } // If spiritguide, no need for gossip menu, just put player into resurrect queue if (unit->isSpiritGuide()) { BattleGround *bg = _player->GetBattleGround(); if (bg) { bg->AddPlayerToResurrectQueue(unit->GetGUID(), _player->GetGUID()); sBattleGroundMgr->SendAreaSpiritHealerQueryOpcode(_player, bg, unit->GetGUID()); return; } } if (!sScriptMgr->GossipHello(_player, unit)) { _player->TalkedToCreature(unit->GetEntry(), unit->GetGUID()); _player->PrepareGossipMenu(unit, unit->GetCreatureTemplate()->GossipMenuId); _player->SendPreparedGossip(unit); } } /*void WorldSession::HandleGossipSelectOptionOpcode(WorldPacket & recv_data) { sLog->outDebug("WORLD: CMSG_GOSSIP_SELECT_OPTION"); uint32 option; uint32 unk; uint64 guid; std::string code = ""; recv_data >> guid >> unk >> option; if (_player->PlayerTalkClass->GossipOptionCoded(option)) { sLog->outDebug("reading string"); recv_data >> code; sLog->outDebug("string read: %s", code.c_str()); } Creature *unit = GetPlayer()->GetNPCIfCanInteractWith(guid, UNIT_NPC_FLAG_NONE); if (!unit) { sLog->outDebug("WORLD: HandleGossipSelectOptionOpcode - Unit (GUID: %u) not found or you can't interact with him.", uint32(GUID_LOPART(guid))); return; } // remove fake death if (GetPlayer()->hasUnitState(UNIT_STAT_DIED)) GetPlayer()->RemoveSpellsCausingAura(SPELL_AURA_FEIGN_DEATH); if (!code.empty()) { if (!sScriptMgr->GossipSelectWithCode(_player, unit, _player->PlayerTalkClass->GossipOptionSender (option), _player->PlayerTalkClass->GossipOptionAction(option), code.c_str())) unit->OnGossipSelect (_player, option); } else { if (!sScriptMgr->GossipSelect (_player, unit, _player->PlayerTalkClass->GossipOptionSender (option), _player->PlayerTalkClass->GossipOptionAction (option))) unit->OnGossipSelect (_player, option); } }*/ void WorldSession::HandleSpiritHealerActivateOpcode(WorldPacket & recv_data) { sLog->outDebug("WORLD: CMSG_SPIRIT_HEALER_ACTIVATE"); uint64 guid; recv_data >> guid; Creature *unit = GetPlayer()->GetNPCIfCanInteractWith(guid, UNIT_NPC_FLAG_SPIRITHEALER); if (!unit) { sLog->outDebug("WORLD: HandleSpiritHealerActivateOpcode - Unit (GUID: %u) not found or you can not interact with him.", uint32(GUID_LOPART(guid))); return; } // remove fake death if (GetPlayer()->hasUnitState(UNIT_STAT_DIED)) GetPlayer()->RemoveSpellsCausingAura(SPELL_AURA_FEIGN_DEATH); SendSpiritResurrect(); } void WorldSession::SendSpiritResurrect() { _player->ResurrectPlayer(0.5f, true); _player->DurabilityLossAll(0.25f, true); // get corpse nearest graveyard WorldSafeLocsEntry const *corpseGrave = NULL; Corpse *corpse = _player->GetCorpse(); if (corpse) corpseGrave = sObjectMgr->GetClosestGraveYard( corpse->GetPositionX(), corpse->GetPositionY(), corpse->GetPositionZ(), corpse->GetMapId(), _player->GetTeam()); // now can spawn bones _player->SpawnCorpseBones(); // teleport to nearest from corpse graveyard, if different from nearest to player ghost if (corpseGrave) { WorldSafeLocsEntry const *ghostGrave = sObjectMgr->GetClosestGraveYard( _player->GetPositionX(), _player->GetPositionY(), _player->GetPositionZ(), _player->GetMapId(), _player->GetTeam()); if (corpseGrave != ghostGrave) _player->TeleportTo(corpseGrave->map_id, corpseGrave->x, corpseGrave->y, corpseGrave->z, _player->GetOrientation()); // or update at original position else _player->UpdateObjectVisibility(); } // or update at original position else _player->UpdateObjectVisibility(); } void WorldSession::HandleBinderActivateOpcode(WorldPacket & recv_data) { uint64 npcGUID; recv_data >> npcGUID; if (!GetPlayer()->IsInWorld() || !GetPlayer()->isAlive()) return; Creature *unit = GetPlayer()->GetNPCIfCanInteractWith(npcGUID, UNIT_NPC_FLAG_INNKEEPER); if (!unit) { sLog->outDebug("WORLD: HandleBinderActivateOpcode - Unit (GUID: %u) not found or you can not interact with him.", uint32(GUID_LOPART(npcGUID))); return; } // remove fake death if (GetPlayer()->hasUnitState(UNIT_STAT_DIED)) GetPlayer()->RemoveSpellsCausingAura(SPELL_AURA_FEIGN_DEATH); SendBindPoint(unit); } void WorldSession::SendBindPoint(Creature *npc) { // prevent set homebind to instances in any case if (GetPlayer()->GetMap()->Instanceable()) return; // send spell for bind 3286 bind magic npc->CastSpell(_player, 3286, true); // Bind WorldPacket data(SMSG_TRAINER_BUY_SUCCEEDED, (8+4)); data << npc->GetGUID(); data << uint32(3286); // Bind SendPacket(&data); _player->PlayerTalkClass->CloseGossip(); } //Need fix void WorldSession::HandleListStabledPetsOpcode(WorldPacket & recv_data) { sLog->outDebug("WORLD: Recv MSG_LIST_STABLED_PETS"); uint64 npcGUID; recv_data >> npcGUID; Creature *unit = GetPlayer()->GetNPCIfCanInteractWith(npcGUID, UNIT_NPC_FLAG_STABLEMASTER); if (!unit) { sLog->outDebug("WORLD: HandleListStabledPetsOpcode - Unit (GUID: %u) not found or you can not interact with him.", uint32(GUID_LOPART(npcGUID))); return; } // remove fake death if (GetPlayer()->hasUnitState(UNIT_STAT_DIED)) GetPlayer()->RemoveSpellsCausingAura(SPELL_AURA_FEIGN_DEATH); // remove mounts this fix bug where getting pet from stable while mounted deletes pet. if (GetPlayer()->IsMounted()) GetPlayer()->RemoveSpellsCausingAura(SPELL_AURA_MOUNTED); SendStablePet(npcGUID); } void WorldSession::SendStablePet(uint64 guid) { sLog->outDebug("WORLD: Recv MSG_LIST_STABLED_PETS Send."); WorldPacket data(MSG_LIST_STABLED_PETS, 200); // guess size data << uint64 (guid); Pet *pet = _player->GetPet(); data << uint8(0); // place holder for slot show number data << uint8(GetPlayer()->m_stableSlots); uint8 num = 0; // counter for place holder // not let move dead pet in slot if (pet && pet->isAlive() && pet->getPetType() == HUNTER_PET) { data << uint32(pet->GetCharmInfo()->GetPetNumber()); data << uint32(pet->GetEntry()); data << uint32(pet->getLevel()); data << pet->GetName(); // petname data << uint32(pet->GetLoyaltyLevel()); // loyalty data << uint8(0x01); // client slot 1 == current pet (0) ++num; } // 0 1 2 3 4 5 6 QueryResult_AutoPtr result = CharacterDatabase.PQuery("SELECT owner, slot, id, entry, level, loyalty, name FROM character_pet WHERE owner = '%u' AND slot > 0 AND slot < 3", _player->GetGUIDLow()); if (result) { do { Field *fields = result->Fetch(); data << uint32(fields[2].GetUInt32()); // petnumber data << uint32(fields[3].GetUInt32()); // creature entry data << uint32(fields[4].GetUInt32()); // level data << fields[6].GetString(); // name data << uint32(fields[5].GetUInt32()); // loyalty data << uint8(fields[1].GetUInt32()+1); // slot ++num; }while (result->NextRow()); } data.put<uint8>(8, num); // set real data to placeholder SendPacket(&data); } void WorldSession::HandleStablePet(WorldPacket & recv_data) { sLog->outDebug("WORLD: Recv CMSG_STABLE_PET"); uint64 npcGUID; recv_data >> npcGUID; if (!GetPlayer()->isAlive()) return; Creature *unit = GetPlayer()->GetNPCIfCanInteractWith(npcGUID, UNIT_NPC_FLAG_STABLEMASTER); if (!unit) { sLog->outDebug("WORLD: HandleStablePet - Unit (GUID: %u) not found or you can not interact with him.", uint32(GUID_LOPART(npcGUID))); return; } // remove fake death if (GetPlayer()->hasUnitState(UNIT_STAT_DIED)) GetPlayer()->RemoveSpellsCausingAura(SPELL_AURA_FEIGN_DEATH); Pet *pet = _player->GetPet(); WorldPacket data(SMSG_STABLE_RESULT, 200); // guess size // can't place in stable dead pet if (!pet||!pet->isAlive()||pet->getPetType() != HUNTER_PET) { data << uint8(0x06); SendPacket(&data); return; } uint32 free_slot = 1; QueryResult_AutoPtr result = CharacterDatabase.PQuery("SELECT owner, slot, id FROM character_pet WHERE owner = '%u' AND slot > 0 AND slot < 3 ORDER BY slot ", _player->GetGUIDLow()); if (result) { do { Field *fields = result->Fetch(); uint32 slot = fields[1].GetUInt32(); if (slot == free_slot) // this slot not free ++free_slot; }while (result->NextRow()); } if (free_slot > 0 && free_slot <= GetPlayer()->m_stableSlots) { _player->RemovePet(pet, PetSaveMode(free_slot)); data << uint8(0x08); } else data << uint8(0x06); SendPacket(&data); } void WorldSession::HandleUnstablePet(WorldPacket & recv_data) { sLog->outDebug("WORLD: Recv CMSG_UNSTABLE_PET."); uint64 npcGUID; uint32 petnumber; recv_data >> npcGUID >> petnumber; Creature *unit = GetPlayer()->GetNPCIfCanInteractWith(npcGUID, UNIT_NPC_FLAG_STABLEMASTER); if (!unit) { sLog->outDebug("WORLD: HandleUnstablePet - Unit (GUID: %u) not found or you can not interact with him.", uint32(GUID_LOPART(npcGUID))); return; } // remove fake death if (GetPlayer()->hasUnitState(UNIT_STAT_DIED)) GetPlayer()->RemoveSpellsCausingAura(SPELL_AURA_FEIGN_DEATH); WorldPacket data(SMSG_STABLE_RESULT, 200); // guess size Pet* pet = _player->GetPet(); if (pet && pet->isAlive()) { uint8 i = 0x06; data << uint8(i); SendPacket(&data); return; } // delete dead pet if (pet) _player->RemovePet(pet, PET_SAVE_AS_DELETED); Pet *newpet = NULL; QueryResult_AutoPtr result = CharacterDatabase.PQuery("SELECT entry FROM character_pet WHERE owner = '%u' AND id = '%u' AND slot > 0 AND slot < 3", _player->GetGUIDLow(), petnumber); if (result) { Field *fields = result->Fetch(); uint32 petentry = fields[0].GetUInt32(); newpet = new Pet(_player, HUNTER_PET); if (!newpet->LoadPetFromDB(_player, petentry, petnumber)) { delete newpet; newpet = NULL; } } if (newpet) data << uint8(0x09); else data << uint8(0x06); SendPacket(&data); } void WorldSession::HandleBuyStableSlot(WorldPacket & recv_data) { sLog->outDebug("WORLD: Recv CMSG_BUY_STABLE_SLOT."); uint64 npcGUID; recv_data >> npcGUID; Creature *unit = GetPlayer()->GetNPCIfCanInteractWith(npcGUID, UNIT_NPC_FLAG_STABLEMASTER); if (!unit) { sLog->outDebug("WORLD: HandleBuyStableSlot - Unit (GUID: %u) not found or you can not interact with him.", uint32(GUID_LOPART(npcGUID))); return; } // remove fake death if (GetPlayer()->hasUnitState(UNIT_STAT_DIED)) GetPlayer()->RemoveSpellsCausingAura(SPELL_AURA_FEIGN_DEATH); WorldPacket data(SMSG_STABLE_RESULT, 200); if (GetPlayer()->m_stableSlots < 2) // max slots amount = 2 { StableSlotPricesEntry const *SlotPrice = sStableSlotPricesStore.LookupEntry(GetPlayer()->m_stableSlots+1); if (_player->GetMoney() >= SlotPrice->Price) { ++GetPlayer()->m_stableSlots; _player->ModifyMoney(-int32(SlotPrice->Price)); data << uint8(0x0A); // success buy } else data << uint8(0x06); } else data << uint8(0x06); SendPacket(&data); } void WorldSession::HandleStableRevivePet(WorldPacket &/* recv_data */) { sLog->outDebug("HandleStableRevivePet: Not implemented"); } void WorldSession::HandleStableSwapPet(WorldPacket & recv_data) { sLog->outDebug("WORLD: Recv CMSG_STABLE_SWAP_PET."); uint64 npcGUID; uint32 pet_number; recv_data >> npcGUID >> pet_number; Creature *unit = GetPlayer()->GetNPCIfCanInteractWith(npcGUID, UNIT_NPC_FLAG_STABLEMASTER); if (!unit) { sLog->outDebug("WORLD: HandleStableSwapPet - Unit (GUID: %u) not found or you can not interact with him.", uint32(GUID_LOPART(npcGUID))); return; } // remove fake death if (GetPlayer()->hasUnitState(UNIT_STAT_DIED)) GetPlayer()->RemoveSpellsCausingAura(SPELL_AURA_FEIGN_DEATH); WorldPacket data(SMSG_STABLE_RESULT, 200); // guess size Pet* pet = _player->GetPet(); if (!pet || pet->getPetType() != HUNTER_PET) return; // find swapped pet slot in stable QueryResult_AutoPtr result = CharacterDatabase.PQuery("SELECT slot, entry FROM character_pet WHERE owner = '%u' AND id = '%u'", _player->GetGUIDLow(), pet_number); if (!result) return; Field *fields = result->Fetch(); uint32 slot = fields[0].GetUInt32(); uint32 petentry = fields[1].GetUInt32(); // move alive pet to slot or delele dead pet _player->RemovePet(pet, pet->isAlive() ? PetSaveMode(slot) : PET_SAVE_AS_DELETED); // summon unstabled pet Pet *newpet = new Pet(_player); if (!newpet->LoadPetFromDB(_player, petentry, pet_number)) { delete newpet; data << uint8(0x06); } else data << uint8(0x09); SendPacket(&data); } void WorldSession::HandleRepairItemOpcode(WorldPacket & recv_data) { sLog->outDebug("WORLD: CMSG_REPAIR_ITEM"); uint64 npcGUID, itemGUID; uint8 guildBank; // new in 2.3.2, bool that means from guild bank money recv_data >> npcGUID >> itemGUID >> guildBank; Creature *unit = GetPlayer()->GetNPCIfCanInteractWith(npcGUID, UNIT_NPC_FLAG_REPAIR); if (!unit) { sLog->outDebug("WORLD: HandleRepairItemOpcode - Unit (GUID: %u) not found or you can not interact with him.", uint32(GUID_LOPART(npcGUID))); return; } // remove fake death if (GetPlayer()->hasUnitState(UNIT_STAT_DIED)) GetPlayer()->RemoveSpellsCausingAura(SPELL_AURA_FEIGN_DEATH); // reputation discount float discountMod = _player->GetReputationPriceDiscount(unit); uint32 TotalCost = 0; if (itemGUID) { sLog->outDebug("ITEM: Repair item, itemGUID = %u, npcGUID = %u", GUID_LOPART(itemGUID), GUID_LOPART(npcGUID)); Item* item = _player->GetItemByGuid(itemGUID); if (item) TotalCost= _player->DurabilityRepair(item->GetPos(), true, discountMod, guildBank>0?true:false); } else { sLog->outDebug("ITEM: Repair all items, npcGUID = %u", GUID_LOPART(npcGUID)); TotalCost = _player->DurabilityRepairAll(true, discountMod, guildBank>0?true:false); } if (guildBank) { uint32 GuildId = _player->GetGuildId(); if (!GuildId) return; Guild *pGuild = sObjectMgr->GetGuildById(GuildId); if (!pGuild) return; pGuild->LogBankEvent(GUILD_BANK_LOG_REPAIR_MONEY, 0, _player->GetGUIDLow(), TotalCost); pGuild->SendMoneyInfo(this, _player->GetGUIDLow()); } }<|fim▁end|>
void WorldSession::SendTabardVendorActivate(uint64 guid) { WorldPacket data(MSG_TABARDVENDOR_ACTIVATE, 8);
<|file_name|>types.go<|end_file_name|><|fim▁begin|>package api import ( kapi "k8s.io/kubernetes/pkg/api" "k8s.io/kubernetes/pkg/api/unversioned" ) const ( ClusterNetworkDefault = "default" EgressNetworkPolicyMaxRules = 50 ) // +genclient=true type ClusterNetwork struct { unversioned.TypeMeta kapi.ObjectMeta <|fim▁hole|> HostSubnetLength uint32 ServiceNetwork string PluginName string } type ClusterNetworkList struct { unversioned.TypeMeta unversioned.ListMeta Items []ClusterNetwork } // HostSubnet encapsulates the inputs needed to define the container subnet network on a node type HostSubnet struct { unversioned.TypeMeta kapi.ObjectMeta // host may just be an IP address, resolvable hostname or a complete DNS Host string HostIP string Subnet string } // HostSubnetList is a collection of HostSubnets type HostSubnetList struct { unversioned.TypeMeta unversioned.ListMeta Items []HostSubnet } // NetNamespace holds the network id against its name type NetNamespace struct { unversioned.TypeMeta kapi.ObjectMeta NetName string NetID uint32 } // NetNamespaceList is a collection of NetNamespaces type NetNamespaceList struct { unversioned.TypeMeta unversioned.ListMeta Items []NetNamespace } // EgressNetworkPolicyRuleType gives the type of an EgressNetworkPolicyRule type EgressNetworkPolicyRuleType string const ( EgressNetworkPolicyRuleAllow EgressNetworkPolicyRuleType = "Allow" EgressNetworkPolicyRuleDeny EgressNetworkPolicyRuleType = "Deny" ) // EgressNetworkPolicyPeer specifies a target to apply egress policy to type EgressNetworkPolicyPeer struct { CIDRSelector string } // EgressNetworkPolicyRule contains a single egress network policy rule type EgressNetworkPolicyRule struct { Type EgressNetworkPolicyRuleType To EgressNetworkPolicyPeer } // EgressNetworkPolicySpec provides a list of policies on outgoing traffic type EgressNetworkPolicySpec struct { Egress []EgressNetworkPolicyRule } // EgressNetworkPolicy describes the current egress network policy type EgressNetworkPolicy struct { unversioned.TypeMeta kapi.ObjectMeta Spec EgressNetworkPolicySpec } // EgressNetworkPolicyList is a collection of EgressNetworkPolicy type EgressNetworkPolicyList struct { unversioned.TypeMeta unversioned.ListMeta Items []EgressNetworkPolicy }<|fim▁end|>
Network string
<|file_name|>Barra.java<|end_file_name|><|fim▁begin|>package selector; import java.awt.event.AdjustmentEvent; import java.awt.event.AdjustmentListener; import java.util.EventObject; class Barra implements AdjustmentListener { SelectorApplet applet; public Barra(SelectorApplet applet) { this.applet = applet; <|fim▁hole|> } public void adjustmentValueChanged(AdjustmentEvent e) { Object obj = e.getSource(); if (obj == this.applet.sbElectrico) this.applet.sbElectrico_adjustmentValueChanged(e); else if (obj == this.applet.sbMagnetico) this.applet.sbMagnetico_adjustmentValueChanged(e); else this.applet.sbVelocidad_adjustmentValueChanged(e); } }<|fim▁end|>
<|file_name|>http_api_test.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python """Tests for HTTP API.""" import json from grr.gui import api_aff4_object_renderers from grr.gui import api_call_renderers from grr.gui import http_api from grr.lib import flags from grr.lib import registry from grr.lib import test_lib from grr.lib import utils from grr.lib.rdfvalues import structs as rdf_structs from grr.proto import tests_pb2 class SampleGetRendererArgs(rdf_structs.RDFProtoStruct): protobuf = tests_pb2.SampleGetRendererArgs class SampleGetRenderer(api_call_renderers.ApiCallRenderer): args_type = SampleGetRendererArgs def Render(self, args, token=None): return { "method": "GET", "path": args.path, "foo": args.foo } class SampleGetRendererWithAdditionalArgsArgs(rdf_structs.RDFProtoStruct): protobuf = tests_pb2.SampleGetRendererWithAdditionalArgsArgs class SampleGetRendererWithAdditionalArgs(api_call_renderers.ApiCallRenderer): args_type = SampleGetRendererWithAdditionalArgsArgs additional_args_types = { "AFF4Object": api_aff4_object_renderers.ApiAFF4ObjectRendererArgs, "RDFValueCollection": (api_aff4_object_renderers. ApiRDFValueCollectionRendererArgs) } def Render(self, args, token=None): result = { "method": "GET", "path": args.path, "foo": args.foo } if args.additional_args: rendered_additional_args = [] for arg in args.additional_args: rendered_additional_args.append(str(arg)) result["additional_args"] = rendered_additional_args return result class TestHttpRoutingInit(registry.InitHook): def RunOnce(self): http_api.RegisterHttpRouteHandler( "GET", "/test_sample/<path:path>", SampleGetRenderer) http_api.RegisterHttpRouteHandler( "GET", "/test_sample_with_additional_args/<path:path>", SampleGetRendererWithAdditionalArgs) class RenderHttpResponseTest(test_lib.GRRBaseTest): """Test for api_call_renderers.RenderHttpResponse logic.""" def _CreateRequest(self, method, path, query_parameters=None): if not query_parameters: query_parameters = {} request = utils.DataObject() request.method = method request.path = path request.scheme = "http" request.environ = { "SERVER_NAME": "foo.bar", "SERVER_PORT": 1234 } request.user = "test" if method == "GET": request.GET = query_parameters request.META = {} return request def _RenderResponse(self, request): response = http_api.RenderHttpResponse(request) if response.content.startswith(")]}'\n"): response.content = response.content[5:] return response def testReturnsRendererMatchingUrlAndMethod(self): renderer, _ = http_api.GetRendererForHttpRequest( self._CreateRequest("GET", "/test_sample/some/path")) self.assertTrue(isinstance(renderer, SampleGetRenderer)) def testPathParamsAreReturnedWithMatchingRenderer(self): _, path_params = http_api.GetRendererForHttpRequest( self._CreateRequest("GET", "/test_sample/some/path")) self.assertEqual(path_params, {"path": "some/path"}) def testRaisesIfNoRendererMatchesUrl(self): self.assertRaises(api_call_renderers.ApiCallRendererNotFoundError, http_api.GetRendererForHttpRequest, self._CreateRequest("GET", "/some/missing/path")) def testRendersGetRendererCorrectly(self): response = self._RenderResponse( self._CreateRequest("GET", "/test_sample/some/path")) self.assertEqual( json.loads(response.content), {"method": "GET", "path": "some/path", "foo": ""}) self.assertEqual(response.status_code, 200) def testQueryParamsArePassedIntoRendererArgs(self): response = self._RenderResponse( self._CreateRequest("GET", "/test_sample/some/path", query_parameters={"foo": "bar"})) self.assertEqual( json.loads(response.content), {"method": "GET", "path": "some/path", "foo": "bar"}) def testRouteArgumentTakesPrecedenceOverQueryParams(self): response = self._RenderResponse( self._CreateRequest("GET", "/test_sample/some/path", query_parameters={"path": "foobar"})) self.assertEqual( json.loads(response.content), {"method": "GET", "path": "some/path", "foo": ""}) def testAdditionalArgumentsAreParsedCorrectly(self): additional_args = http_api.FillAdditionalArgsFromRequest( { "AFF4Object.limit_lists": "10", "RDFValueCollection.with_total_count": "1" }, { "AFF4Object": api_aff4_object_renderers.ApiAFF4ObjectRendererArgs, "RDFValueCollection": api_aff4_object_renderers.ApiRDFValueCollectionRendererArgs }) additional_args = sorted(additional_args, key=lambda x: x.name) self.assertListEqual( [x.name for x in additional_args], ["AFF4Object", "RDFValueCollection"]) self.assertListEqual( [x.type for x in additional_args], ["ApiAFF4ObjectRendererArgs", "ApiRDFValueCollectionRendererArgs"]) self.assertListEqual( [x.args for x in additional_args], [api_aff4_object_renderers.ApiAFF4ObjectRendererArgs(limit_lists=10), api_aff4_object_renderers.ApiRDFValueCollectionRendererArgs( with_total_count=True)]) def testAdditionalArgumentsAreFoundAndPassedToTheRenderer(self): response = self._RenderResponse( self._CreateRequest("GET", "/test_sample_with_additional_args/some/path", query_parameters={"foo": "42"}))<|fim▁hole|> "path": "some/path", "foo": "42"}) def main(argv): test_lib.main(argv) if __name__ == "__main__": flags.StartMain(main)<|fim▁end|>
self.assertEqual( json.loads(response.content), {"method": "GET",
<|file_name|>issues_test.rs<|end_file_name|><|fim▁begin|>extern crate goji; extern crate serde_json; <|fim▁hole|>#[test] fn deserialise_issue_results() { let issue_results_str = r#"{ "expand": "names,schema", "startAt": 0, "maxResults": 50, "total": 0, "issues": [] }"#; let results: IssueResults = serde_json::from_str(issue_results_str).unwrap(); assert_eq!(results.expand, "names,schema"); assert_eq!(results.start_at, 0); assert_eq!(results.max_results, 50); assert_eq!(results.total, 0); assert_eq!(results.issues.len(), 0); }<|fim▁end|>
use goji::issues::*;
<|file_name|>request.go<|end_file_name|><|fim▁begin|>package commands import ( "errors" "fmt" "io" "os" "reflect" "strconv" "time" "github.com/ipfs/go-ipfs/commands/files" "github.com/ipfs/go-ipfs/core" "github.com/ipfs/go-ipfs/repo/config" u "github.com/ipfs/go-ipfs/util" context "gx/ipfs/QmZy2y8t9zQH2a1b8q2ZSLKp17ATuJoCNxxyMFG5qFExpt/go-net/context" ) type OptMap map[string]interface{} type Context struct { Online bool ConfigRoot string ReqLog *ReqLog config *config.Config LoadConfig func(path string) (*config.Config, error) node *core.IpfsNode ConstructNode func() (*core.IpfsNode, error) } // GetConfig returns the config of the current Command exection // context. It may load it with the providied function. func (c *Context) GetConfig() (*config.Config, error) { var err error if c.config == nil { if c.LoadConfig == nil { return nil, errors.New("nil LoadConfig function") } c.config, err = c.LoadConfig(c.ConfigRoot) } return c.config, err } // GetNode returns the node of the current Command exection // context. It may construct it with the provided function. func (c *Context) GetNode() (*core.IpfsNode, error) { var err error if c.node == nil { if c.ConstructNode == nil { return nil, errors.New("nil ConstructNode function") } c.node, err = c.ConstructNode() } return c.node, err } // NodeWithoutConstructing returns the underlying node variable // so that clients may close it. func (c *Context) NodeWithoutConstructing() *core.IpfsNode { return c.node } // Request represents a call to a command from a consumer type Request interface { Path() []string Option(name string) *OptionValue Options() OptMap SetOption(name string, val interface{}) SetOptions(opts OptMap) error Arguments() []string SetArguments([]string) Files() files.File SetFiles(files.File) Context() context.Context SetRootContext(context.Context) error InvocContext() *Context SetInvocContext(Context) Command() *Command Values() map[string]interface{} Stdin() io.Reader ConvertOptions() error } type request struct { path []string options OptMap arguments []string files files.File cmd *Command ctx Context rctx context.Context optionDefs map[string]Option values map[string]interface{} stdin io.Reader } // Path returns the command path of this request func (r *request) Path() []string { return r.path } // Option returns the value of the option for given name. func (r *request) Option(name string) *OptionValue { // find the option with the specified name option, found := r.optionDefs[name] if !found { return nil } // try all the possible names, break if we find a value for _, n := range option.Names() { val, found := r.options[n] if found { return &OptionValue{val, found, option} } } // MAYBE_TODO: use default value instead of nil return &OptionValue{nil, false, option} } // Options returns a copy of the option map func (r *request) Options() OptMap { output := make(OptMap) for k, v := range r.options { output[k] = v } return output } func (r *request) SetRootContext(ctx context.Context) error { ctx, err := getContext(ctx, r) if err != nil { return err } r.rctx = ctx return nil } // SetOption sets the value of the option for given name. func (r *request) SetOption(name string, val interface{}) { // find the option with the specified name option, found := r.optionDefs[name] if !found { return } // try all the possible names, if we already have a value then set over it for _, n := range option.Names() { _, found := r.options[n] if found { r.options[n] = val return } } r.options[name] = val } // SetOptions sets the option values, unsetting any values that were previously set func (r *request) SetOptions(opts OptMap) error { r.options = opts return r.ConvertOptions() } // Arguments returns the arguments slice func (r *request) Arguments() []string { return r.arguments } func (r *request) SetArguments(args []string) { r.arguments = args } func (r *request) Files() files.File { return r.files } func (r *request) SetFiles(f files.File) { r.files = f } func (r *request) Context() context.Context { return r.rctx } func getContext(base context.Context, req Request) (context.Context, error) { tout, found, err := req.Option("timeout").String() if err != nil { return nil, fmt.Errorf("error parsing timeout option: %s", err) } var ctx context.Context if found { duration, err := time.ParseDuration(tout) if err != nil { return nil, fmt.Errorf("error parsing timeout option: %s", err) } tctx, _ := context.WithTimeout(base, duration) ctx = tctx } else { cctx, _ := context.WithCancel(base) ctx = cctx } return ctx, nil } func (r *request) InvocContext() *Context { return &r.ctx } func (r *request) SetInvocContext(ctx Context) { r.ctx = ctx } func (r *request) Command() *Command { return r.cmd } type converter func(string) (interface{}, error) var converters = map[reflect.Kind]converter{ Bool: func(v string) (interface{}, error) { if v == "" { return true, nil } return strconv.ParseBool(v) }, Int: func(v string) (interface{}, error) { val, err := strconv.ParseInt(v, 0, 32) if err != nil { return nil, err } return int(val), err }, Uint: func(v string) (interface{}, error) { val, err := strconv.ParseUint(v, 0, 32) if err != nil { return nil, err } return int(val), err }, Float: func(v string) (interface{}, error) { return strconv.ParseFloat(v, 64) }, } func (r *request) Values() map[string]interface{} { return r.values } func (r *request) Stdin() io.Reader { return r.stdin } func (r *request) ConvertOptions() error { for k, v := range r.options { opt, ok := r.optionDefs[k] if !ok { continue } kind := reflect.TypeOf(v).Kind() if kind != opt.Type() { if kind == String { convert := converters[opt.Type()] str, ok := v.(string) if !ok { return u.ErrCast() } val, err := convert(str) if err != nil { value := fmt.Sprintf("value '%v'", v) if len(str) == 0 { value = "empty value" } return fmt.Errorf("Could not convert %s to type '%s' (for option '-%s')", value, opt.Type().String(), k) } r.options[k] = val } else { return fmt.Errorf("Option '%s' should be type '%s', but got type '%s'", k, opt.Type().String(), kind.String()) } } else { r.options[k] = v }<|fim▁hole|> k, name) } } } return nil } // NewEmptyRequest initializes an empty request func NewEmptyRequest() (Request, error) { return NewRequest(nil, nil, nil, nil, nil, nil) } // NewRequest returns a request initialized with given arguments // An non-nil error will be returned if the provided option values are invalid func NewRequest(path []string, opts OptMap, args []string, file files.File, cmd *Command, optDefs map[string]Option) (Request, error) { if opts == nil { opts = make(OptMap) } if optDefs == nil { optDefs = make(map[string]Option) } ctx := Context{} values := make(map[string]interface{}) req := &request{ path: path, options: opts, arguments: args, files: file, cmd: cmd, ctx: ctx, optionDefs: optDefs, values: values, stdin: os.Stdin, } err := req.ConvertOptions() if err != nil { return nil, err } return req, nil }<|fim▁end|>
for _, name := range opt.Names() { if _, ok := r.options[name]; name != k && ok { return fmt.Errorf("Duplicate command options were provided ('%s' and '%s')",
<|file_name|>browser.d.ts<|end_file_name|><|fim▁begin|>/// <reference path="browser/ambient/angular-protractor/index.d.ts" /> /// <reference path="browser/ambient/angular/index.d.ts" /> /// <reference path="browser/ambient/assertion-error/index.d.ts" /><|fim▁hole|>/// <reference path="browser/ambient/chai/index.d.ts" /> /// <reference path="browser/ambient/jquery/index.d.ts" /> /// <reference path="browser/ambient/lodash/index.d.ts" /> /// <reference path="browser/ambient/mocha/index.d.ts" /> /// <reference path="browser/ambient/selenium-webdriver/index.d.ts" /> /// <reference path="browser/ambient/sinon-chai/index.d.ts" /> /// <reference path="browser/ambient/sinon/index.d.ts" /> /// <reference path="browser/ambient/socket.io-client/index.d.ts" /><|fim▁end|>
<|file_name|>improve.py<|end_file_name|><|fim▁begin|># Copyright (c) 2014 by Ecreall under licence AGPL terms # available on http://www.gnu.org/licenses/agpl.html # licence: AGPL # author: Amen Souissi import deform import colander from pyramid.view import view_config from dace.objectofcollaboration.principal.util import get_current from dace.processinstance.core import DEFAULTMAPPING_ACTIONS_VIEWS from pontus.form import FormView from pontus.schema import Schema, select from pontus.widget import RadioChoiceWidget from pontus.view import BasicView from pontus.view_operation import MultipleView from lac.views.widget import EmailInputWidget from lac.content.processes.lac_view_manager.behaviors import ( Improve) from lac.content.lac_application import CreationCulturelleApplication from lac import _ class ImproveStudyReport(BasicView): title = 'Alert improve' name = 'alertimprove' template = 'lac:views/lac_view_manager/questionnaire/templates/improve_info.pt'<|fim▁hole|> def update(self): result = {} values = {'context': self.context} body = self.content(args=values, template=self.template)['body'] item = self.adapt_item(body, self.viewid) result['coordinates'] = {self.coordinates: [item]} return result class Improve1Schema(Schema): id = colander.SchemaNode( colander.String(), widget=deform.widget.HiddenWidget(), title="ID", missing="improve" ) url = colander.SchemaNode( colander.String(), widget=deform.widget.HiddenWidget(), title="URL", missing="None" ) improvement = colander.SchemaNode( colander.String(), widget=deform.widget.TextAreaWidget(rows=3, cols=30), title=_('Vos suggestions') ) email = colander.SchemaNode( colander.String(), widget=EmailInputWidget(), validator=colander.All( colander.Email(), colander.Length(max=100) ), title=_('Email') ) class ImproveFormView(FormView): title = _('Votre avis') schema = select(Improve1Schema(), ['id', 'url', 'improvement', 'email']) behaviors = [Improve] formid = 'formimprove' name = 'improveform' def before_update(self): user = get_current() if getattr(user, 'email', ''): self.schema.get('email').widget = deform.widget.HiddenWidget() def default_data(self): user = get_current() return {'email': getattr(user, 'email', '')} @view_config( name='improve', context=CreationCulturelleApplication, renderer='pontus:templates/views_templates/grid.pt', ) class ImproveView(MultipleView): title = _('Votre avis') name = 'improve' viewid = 'improve' template = 'daceui:templates/simple_mergedmultipleview.pt' views = (ImproveStudyReport, ImproveFormView) validators = [Improve.get_validator()] requirements = {'css_links': [], 'js_links': ['lac:static/js/questionnaire.js']} DEFAULTMAPPING_ACTIONS_VIEWS.update( {Improve: ImproveView})<|fim▁end|>
<|file_name|>sg-ski.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python import sys def parse_map_file(path): map_grid = [] # Create a two-dimensional list based on the input data with open(path, 'r') as f: width, height = map(int, f.readline().split()) for line in f: row = map(int, line.split()) map_grid.append(row) # Input checking if height < 1 or width < 1: raise ValueError('grid height and width should be >= 1') elif height != len(map_grid) or width != len(map_grid[0]): raise ValueError('actual map does not match declared map dimensions') return width, height, map_grid def make_grid(width, height, initial_value): return [width*[initial_value] for i in range(height)] def get_length_and_elevation(x, y, map_grid, path_lengths, final_elevations): path_length = path_lengths[y][x] if path_length != -1: return path_length, final_elevations[y][x] current_elevation = map_grid[y][x] longest_path = 0 lowest_elevation = current_elevation neighbors = [ (x, y - 1), # up (x, y + 1), # down (x - 1, y), # left (x + 1, y), # right ] for xn, yn in neighbors: try: neighbor = map_grid[yn][xn] except IndexError: continue if neighbor < current_elevation: path_length, final_elevation = get_length_and_elevation(xn, yn, map_grid, path_lengths, final_elevations) if path_length > longest_path or (path_length == longest_path and final_elevation < lowest_elevation): longest_path = path_length lowest_elevation = final_elevation path_length = longest_path + 1 path_lengths[y][x] = path_length final_elevations[y][x] = lowest_elevation return path_length, lowest_elevation def main(): if len(sys.argv) != 2: sys.exit('Usage: {} <map file>'.format(sys.argv[0])) print 'Parsing map data...' try: width, height, map_grid = parse_map_file(sys.argv[1]) except IOError as e: sys.exit('Unable to read map file: {}'.format(e)) except ValueError as e: sys.exit('Invalid map file: {}: {}'.format(sys.argv[1], e))<|fim▁hole|> # Initialize corresponding grids for path lengths and final elevations path_lengths = make_grid(width, height, -1) final_elevations = make_grid(width, height, -1) print 'Finding the best path...' longest_path = -1 steepest_drop = -1 for y, row in enumerate(map_grid): for x, initial_elevation in enumerate(row): path_length, final_elevation = get_length_and_elevation(x, y, map_grid, path_lengths, final_elevations) drop = initial_elevation - final_elevation if path_length > longest_path or (path_length == longest_path and drop > steepest_drop): longest_path = path_length steepest_drop = drop print '\nlength = {}, drop = {}\n'.format(longest_path, steepest_drop) if __name__ == '__main__': main()<|fim▁end|>
<|file_name|>HttpGet.java<|end_file_name|><|fim▁begin|>/* * Copyright (c) 2008-2016 Computer Network Information Center (CNIC), Chinese Academy of Sciences. * * This file is part of Duckling project.<|fim▁hole|> * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package net.duckling.vmt.domain; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.net.MalformedURLException; import java.net.URL; import org.apache.log4j.Logger; public class HttpGet { private static final Logger LOG=Logger.getLogger(HttpGet.class); private String path; private String encode="UTF-8"; public HttpGet(String url,String encode){ this.path=url; this.encode=encode; } public HttpGet(String url){ this.path=url; } public String connect(){ URL url = null; try { url = new URL(path); } catch (MalformedURLException e) { LOG.error(e.getMessage()+",can't touch this url="+path, e); return null; } try (InputStream ins = url.openConnection().getInputStream(); BufferedReader reader = new BufferedReader(new InputStreamReader(ins, encode));) { String line; StringBuffer sb = new StringBuffer(); while ((line = reader.readLine()) != null) { sb.append(line).append("\n"); } return sb.toString(); } catch (IOException e) { LOG.error(e.getMessage(), e); return null; } } }<|fim▁end|>
*
<|file_name|>issue-2935.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. #[feature(managed_boxes)]; //type t = { a: int }; // type t = { a: bool }; type t = bool; trait it { fn f(&self); } impl it for t { fn f(&self) { } } <|fim▁hole|> // let z = @({a: 4i} as it); // let z = @({a: true} as it); let z = @(@true as @it); // x.f(); // y.f(); // (*z).f(); error!("ok so far..."); z.f(); //segfault }<|fim▁end|>
pub fn main() { // let x = ({a: 4i} as it); // let y = @({a: 4i});
<|file_name|>zmq.py<|end_file_name|><|fim▁begin|>""" kombu.transport.zmq =================== ZeroMQ transport. """ from __future__ import absolute_import, unicode_literals import errno import os import socket try: import zmq from zmq import ZMQError except ImportError: zmq = ZMQError = None # noqa from kombu.five import Empty from kombu.log import get_logger from kombu.serialization import pickle from kombu.utils import cached_property from kombu.utils.eventio import poll, READ from . import virtual logger = get_logger('kombu.transport.zmq') DEFAULT_PORT = 5555 DEFAULT_HWM = 128 DEFAULT_INCR = 1 dumps, loads = pickle.dumps, pickle.loads class MultiChannelPoller(object): eventflags = READ def __init__(self): # active channels self._channels = set() # file descriptor -> channel map self._fd_to_chan = {} # poll implementation (epoll/kqueue/select) self.poller = poll() def close(self): for fd in self._fd_to_chan: try: self.poller.unregister(fd) except KeyError: pass self._channels.clear() self._fd_to_chan.clear() self.poller = None def add(self, channel): self._channels.add(channel) def discard(self, channel): self._channels.discard(channel) self._fd_to_chan.pop(channel.client.connection.fd, None) def _register(self, channel): conn = channel.client.connection self._fd_to_chan[conn.fd] = channel self.poller.register(conn.fd, self.eventflags) def on_poll_start(self): for channel in self._channels: self._register(channel) def on_readable(self, fileno): chan = self._fd_to_chan[fileno] return chan.drain_events(), chan def get(self, timeout=None): self.on_poll_start() events = self.poller.poll(timeout) for fileno, _ in events or []: return self.on_readable(fileno) raise Empty() @property def fds(self): return self._fd_to_chan class Client(object): def __init__(self, uri='tcp://127.0.0.1', port=DEFAULT_PORT, hwm=DEFAULT_HWM, swap_size=None, enable_sink=True, context=None): try: scheme, parts = uri.split('://') except ValueError: scheme = 'tcp' parts = uri endpoints = parts.split(';') self.port = port if scheme != 'tcp': raise NotImplementedError('Currently only TCP can be used') self.context = context or zmq.Context.instance() if enable_sink: self.sink = self.context.socket(zmq.PULL) self.sink.bind('tcp://*:{0.port}'.format(self)) else: self.sink = None self.vent = self.context.socket(zmq.PUSH) if hasattr(zmq, 'SNDHWM'): self.vent.setsockopt(zmq.SNDHWM, hwm) else: self.vent.setsockopt(zmq.HWM, hwm) if swap_size: self.vent.setsockopt(zmq.SWAP, swap_size) for endpoint in endpoints: if scheme == 'tcp' and ':' not in endpoint: endpoint += ':' + str(DEFAULT_PORT) endpoint = ''.join([scheme, '://', endpoint]) self.connect(endpoint) def connect(self, endpoint): self.vent.connect(endpoint) def get(self, queue=None, timeout=None): sink = self.sink try: if timeout is not None: prev_timeout, sink.RCVTIMEO = sink.RCVTIMEO, timeout try: return sink.recv() finally: sink.RCVTIMEO = prev_timeout else: return sink.recv() except ZMQError as exc: if exc.errno == zmq.EAGAIN: raise socket.error(errno.EAGAIN, exc.strerror) else: raise def put(self, queue, message, **kwargs): return self.vent.send(message) def close(self): if self.sink and not self.sink.closed: self.sink.close() if not self.vent.closed: self.vent.close() @property def connection(self): if self.sink: return self.sink return self.vent class Channel(virtual.Channel): Client = Client hwm = DEFAULT_HWM swap_size = None enable_sink = True port_incr = DEFAULT_INCR from_transport_options = ( virtual.Channel.from_transport_options + ('hwm', 'swap_size', 'enable_sink', 'port_incr') ) def __init__(self, *args, **kwargs): super_ = super(Channel, self) super_.__init__(*args, **kwargs) # Evaluate socket self.client.connection.closed self.connection.cycle.add(self) self.connection_errors = self.connection.connection_errors def _get(self, queue, timeout=None): try: return loads(self.client.get(queue, timeout)) except socket.error as exc: if exc.errno == errno.EAGAIN and timeout != 0: raise Empty() else: raise def _put(self, queue, message, **kwargs): self.client.put(queue, dumps(message, -1), **kwargs) def _purge(self, queue): return 0 def _poll(self, cycle, timeout=None): return cycle.get(timeout=timeout) def close(self): if not self.closed: self.connection.cycle.discard(self) try: self.__dict__['client'].close() except KeyError: pass super(Channel, self).close() def _prepare_port(self, port): return (port + self.channel_id - 1) * self.port_incr def _create_client(self): conninfo = self.connection.client port = self._prepare_port(conninfo.port or DEFAULT_PORT) return self.Client(uri=conninfo.hostname or 'tcp://127.0.0.1', port=port, hwm=self.hwm, swap_size=self.swap_size, enable_sink=self.enable_sink, context=self.connection.context) @cached_property def client(self): return self._create_client() class Transport(virtual.Transport): Channel = Channel can_parse_url = True default_port = DEFAULT_PORT driver_type = 'zeromq' driver_name = 'zmq' connection_errors = virtual.Transport.connection_errors + (ZMQError,) implements = virtual.Transport.implements.extend( async=True, ) polling_interval = None def __init__(self, *args, **kwargs): if zmq is None: raise ImportError('The zmq library is not installed') super(Transport, self).__init__(*args, **kwargs) self.cycle = MultiChannelPoller() def driver_version(self): return zmq.__version__ def register_with_event_loop(self, connection, loop): cycle = self.cycle cycle.poller = loop.poller add_reader = loop.add_reader on_readable = self.on_readable cycle_poll_start = cycle.on_poll_start def on_poll_start(): cycle_poll_start() [add_reader(fd, on_readable, fd) for fd in cycle.fds] loop.on_tick.add(on_poll_start) def on_readable(self, fileno): self._handle_event(self.cycle.on_readable(fileno)) def drain_events(self, connection, timeout=None): more_to_read = False for channel in connection.channels: try: evt = channel.cycle.get(timeout=timeout) except socket.error as exc: if exc.errno == errno.EAGAIN: continue raise else: connection._handle_event((evt, channel)) more_to_read = True if not more_to_read: raise socket.error(errno.EAGAIN, os.strerror(errno.EAGAIN)) def _handle_event(self, evt): item, channel = evt self._deliver(*item) def establish_connection(self): self.context.closed return super(Transport, self).establish_connection() def close_connection(self, connection): super(Transport, self).close_connection(connection) try:<|fim▁hole|> except KeyError: pass @cached_property def context(self): return zmq.Context(1)<|fim▁end|>
connection.__dict__['context'].term()
<|file_name|>database.py<|end_file_name|><|fim▁begin|>import os from sqlalchemy import create_engine, MetaData from sqlalchemy.orm import scoped_session, sessionmaker metadata = MetaData() def get_sa_db_uri(driver='', username='', password='', host='', port='', database=''): """get SQLAlchemy DB URI: driver://username:password@host:port/database""" assert driver if driver == 'sqlite': # get absolute file path<|fim▁hole|> db_file = database db_uri = '%s:///%s' % (driver, db_file) else: db_uri = ('%s://%s:%s@%s:%s/%s' % (driver, username, password, host, port, database)) return db_uri class DB(object): def __init__(self, db_uri): self.engine = create_engine(db_uri, convert_unicode=True) self.session = scoped_session( sessionmaker(autocommit=False, autoflush=False, bind=self.engine)) def init_database(self): metadata.create_all(bind=self.engine)<|fim▁end|>
if not database.startswith('/'): db_file = os.path.abspath(database) else:
<|file_name|>doc.go<|end_file_name|><|fim▁begin|>// Copyright 2019 Google LLC All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. <|fim▁hole|>// Package crane holds libraries used to implement the crane CLI. package crane<|fim▁end|>
<|file_name|>attribute.py<|end_file_name|><|fim▁begin|># This module has moved to zope.annotation.attribute # and will go away in Zope 3.5 import zope.deprecation<|fim▁hole|><|fim▁end|>
zope.deprecation.moved( 'zope.annotation.attribute', "Zope 3.5", )
<|file_name|>TextAlignJustify.d.ts<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|>
export { TextAlignJustify as default } from "./";
<|file_name|>size_hint.rs<|end_file_name|><|fim▁begin|>#![feature(core)] extern crate core; #[cfg(test)] mod tests { use core::option::Iter; // #[derive(Clone, Copy, PartialEq, PartialOrd, Eq, Ord, Debug, Hash)] // #[stable(feature = "rust1", since = "1.0.0")] // pub enum Option<T> { // /// No value // #[stable(feature = "rust1", since = "1.0.0")] // None, // /// Some value `T` // #[stable(feature = "rust1", since = "1.0.0")] // Some(T) // } // pub struct Iter<'a, A: 'a> { inner: Item<&'a A> } // impl<'a, A> Iterator for Iter<'a, A> { // type Item = &'a A; // // #[inline] // fn next(&mut self) -> Option<&'a A> { self.inner.next() } // #[inline] // fn size_hint(&self) -> (usize, Option<usize>) { self.inner.size_hint() } // } type T = u32; #[test] fn size_hint_test1() { let x: Option<T> = Some::<T>(7); let iter: Iter<T> = x.iter(); let (lower, upper): (usize, Option<usize>) = iter.size_hint(); assert_eq!(lower, 1); assert_eq!(upper, Some::<usize>(1)); } #[test] fn size_hint_test2() { let x: Option<T> = None::<T>; let iter: Iter<T> = x.iter();<|fim▁hole|> assert_eq!(upper, Some::<usize>(0)); } }<|fim▁end|>
let (lower, upper): (usize, Option<usize>) = iter.size_hint(); assert_eq!(lower, 0);
<|file_name|>materialize-css.js<|end_file_name|><|fim▁begin|>// // https://github.com/Dogfalo/materialize/issues/634#issuecomment-113213629 // and // https://github.com/noodny/materializecss-amd/blob/master/config.js //<|fim▁hole|> 'global', 'initial', 'animation', 'buttons', 'cards', 'carousel', 'character_counter', 'chips', 'collapsible', 'dropdown', 'forms', 'hammerjs', 'jquery.easing', 'jquery.hammer', 'jquery.timeago', 'leanModal', 'materialbox', 'parallax', 'picker', 'picker.date', 'prism', 'pushpin', 'scrollFire', 'scrollspy', 'sideNav', 'slider', 'tabs', 'toasts', 'tooltip', 'transitions', 'velocity' ], function(Materialize) { return Materialize; } );<|fim▁end|>
// require([
<|file_name|>test.py<|end_file_name|><|fim▁begin|># Copyright (C) 2010 Google Inc. All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following disclaimer # in the documentation and/or other materials provided with the # distribution. # * Neither the Google name nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. import base64 import copy import sys import time from webkitpy.layout_tests.port import DeviceFailure, Driver, DriverOutput, Port from webkitpy.layout_tests.port.base import VirtualTestSuite from webkitpy.layout_tests.models.test_configuration import TestConfiguration from webkitpy.layout_tests.models import test_run_results from webkitpy.common.system.filesystem_mock import MockFileSystem from webkitpy.common.system.crashlogs import CrashLogs # This sets basic expectations for a test. Each individual expectation # can be overridden by a keyword argument in TestList.add(). class TestInstance(object): def __init__(self, name): self.name = name self.base = name[(name.rfind("/") + 1):name.rfind(".")] self.crash = False self.web_process_crash = False self.exception = False self.keyboard = False self.error = '' self.timeout = False self.is_reftest = False self.device_failure = False # The values of each field are treated as raw byte strings. They<|fim▁hole|> # will be converted to unicode strings where appropriate using # FileSystem.read_text_file(). self.actual_text = self.base + '-txt' self.actual_checksum = self.base + '-checksum' # We add the '\x8a' for the image file to prevent the value from # being treated as UTF-8 (the character is invalid) self.actual_image = self.base + '\x8a' + '-png' + 'tEXtchecksum\x00' + self.actual_checksum self.expected_text = self.actual_text self.expected_image = self.actual_image self.actual_audio = None self.expected_audio = None # This is an in-memory list of tests, what we want them to produce, and # what we want to claim are the expected results. class TestList(object): def __init__(self): self.tests = {} def add(self, name, **kwargs): test = TestInstance(name) for key, value in kwargs.items(): test.__dict__[key] = value self.tests[name] = test def add_reftest(self, name, reference_name, same_image, crash=False): self.add(name, actual_checksum='xxx', actual_image='XXX', is_reftest=True, crash=crash) if same_image: self.add(reference_name, actual_checksum='xxx', actual_image='XXX', is_reftest=True) else: self.add(reference_name, actual_checksum='yyy', actual_image='YYY', is_reftest=True) def keys(self): return self.tests.keys() def __contains__(self, item): return item in self.tests def __getitem__(self, item): return self.tests[item] # # These numbers may need to be updated whenever we add or delete tests. This includes virtual tests. # TOTAL_TESTS = 114 TOTAL_SKIPS = 29 UNEXPECTED_PASSES = 1 UNEXPECTED_FAILURES = 25 def unit_test_list(): tests = TestList() tests.add('failures/expected/crash.html', crash=True) tests.add('failures/expected/exception.html', exception=True) tests.add('failures/expected/device_failure.html', device_failure=True) tests.add('failures/expected/timeout.html', timeout=True) tests.add('failures/expected/missing_text.html', expected_text=None) tests.add('failures/expected/needsrebaseline.html', actual_text='needsrebaseline text') tests.add('failures/expected/needsmanualrebaseline.html', actual_text='needsmanualrebaseline text') tests.add('failures/expected/image.html', actual_image='image_fail-pngtEXtchecksum\x00checksum_fail', expected_image='image-pngtEXtchecksum\x00checksum-png') tests.add('failures/expected/image_checksum.html', actual_checksum='image_checksum_fail-checksum', actual_image='image_checksum_fail-png') tests.add('failures/expected/audio.html', actual_audio=base64.b64encode('audio_fail-wav'), expected_audio='audio-wav', actual_text=None, expected_text=None, actual_image=None, expected_image=None, actual_checksum=None) tests.add('failures/expected/keyboard.html', keyboard=True) tests.add('failures/expected/missing_check.html', expected_image='missing_check-png') tests.add('failures/expected/missing_image.html', expected_image=None) tests.add('failures/expected/missing_audio.html', expected_audio=None, actual_text=None, expected_text=None, actual_image=None, expected_image=None, actual_checksum=None) tests.add('failures/expected/missing_text.html', expected_text=None) tests.add('failures/expected/newlines_leading.html', expected_text="\nfoo\n", actual_text="foo\n") tests.add('failures/expected/newlines_trailing.html', expected_text="foo\n\n", actual_text="foo\n") tests.add('failures/expected/newlines_with_excess_CR.html', expected_text="foo\r\r\r\n", actual_text="foo\n") tests.add('failures/expected/testharness.html', actual_text='This is a testharness.js-based test.\nFAIL: assert fired\n.Harness: the test ran to completion.\n\n', expected_text=None, actual_image=None, expected_image=None, actual_checksum=None) tests.add('failures/expected/text.html', actual_text='text_fail-png') tests.add('failures/expected/crash_then_text.html') tests.add('failures/expected/skip_text.html', actual_text='text diff') tests.add('failures/flaky/text.html') tests.add('failures/unexpected/missing_text.html', expected_text=None) tests.add('failures/unexpected/missing_check.html', expected_image='missing-check-png') tests.add('failures/unexpected/missing_image.html', expected_image=None) tests.add('failures/unexpected/missing_render_tree_dump.html', actual_text="""layer at (0,0) size 800x600 RenderView at (0,0) size 800x600 layer at (0,0) size 800x34 RenderBlock {HTML} at (0,0) size 800x34 RenderBody {BODY} at (8,8) size 784x18 RenderText {#text} at (0,0) size 133x18 text run at (0,0) width 133: "This is an image test!" """, expected_text=None) tests.add('failures/unexpected/crash.html', crash=True) tests.add('failures/unexpected/crash-with-stderr.html', crash=True, error="mock-std-error-output") tests.add('failures/unexpected/web-process-crash-with-stderr.html', web_process_crash=True, error="mock-std-error-output") tests.add('failures/unexpected/pass.html') tests.add('failures/unexpected/text-checksum.html', actual_text='text-checksum_fail-txt', actual_checksum='text-checksum_fail-checksum') tests.add('failures/unexpected/text-image-checksum.html', actual_text='text-image-checksum_fail-txt', actual_image='text-image-checksum_fail-pngtEXtchecksum\x00checksum_fail', actual_checksum='text-image-checksum_fail-checksum') tests.add('failures/unexpected/checksum-with-matching-image.html', actual_checksum='text-image-checksum_fail-checksum') tests.add('failures/unexpected/skip_pass.html') tests.add('failures/unexpected/text.html', actual_text='text_fail-txt') tests.add('failures/unexpected/text_then_crash.html') tests.add('failures/unexpected/timeout.html', timeout=True) tests.add('http/tests/passes/text.html') tests.add('http/tests/passes/image.html') tests.add('http/tests/ssl/text.html') tests.add('passes/args.html') tests.add('passes/error.html', error='stuff going to stderr') tests.add('passes/image.html') tests.add('passes/audio.html', actual_audio=base64.b64encode('audio-wav'), expected_audio='audio-wav', actual_text=None, expected_text=None, actual_image=None, expected_image=None, actual_checksum=None) tests.add('passes/platform_image.html') tests.add('passes/checksum_in_image.html', expected_image='tEXtchecksum\x00checksum_in_image-checksum') tests.add('passes/skipped/skip.html') tests.add('passes/testharness.html', actual_text='This is a testharness.js-based test.\nPASS: assert is fine\nHarness: the test ran to completion.\n\n', expected_text=None, actual_image=None, expected_image=None, actual_checksum=None) # Note that here the checksums don't match but the images do, so this test passes "unexpectedly". # See https://bugs.webkit.org/show_bug.cgi?id=69444 . tests.add('failures/unexpected/checksum.html', actual_checksum='checksum_fail-checksum') # Text output files contain "\r\n" on Windows. This may be # helpfully filtered to "\r\r\n" by our Python/Cygwin tooling. tests.add('passes/text.html', expected_text='\nfoo\n\n', actual_text='\nfoo\r\n\r\r\n') # For reftests. tests.add_reftest('passes/reftest.html', 'passes/reftest-expected.html', same_image=True) # This adds a different virtual reference to ensure that that also works. tests.add('virtual/passes/reftest-expected.html', actual_checksum='xxx', actual_image='XXX', is_reftest=True) tests.add_reftest('passes/mismatch.html', 'passes/mismatch-expected-mismatch.html', same_image=False) tests.add_reftest('passes/svgreftest.svg', 'passes/svgreftest-expected.svg', same_image=True) tests.add_reftest('passes/xhtreftest.xht', 'passes/xhtreftest-expected.html', same_image=True) tests.add_reftest('passes/phpreftest.php', 'passes/phpreftest-expected-mismatch.svg', same_image=False) tests.add_reftest('failures/expected/reftest.html', 'failures/expected/reftest-expected.html', same_image=False) tests.add_reftest('failures/expected/mismatch.html', 'failures/expected/mismatch-expected-mismatch.html', same_image=True) tests.add_reftest('failures/unexpected/crash-reftest.html', 'failures/unexpected/crash-reftest-expected.html', same_image=True, crash=True) tests.add_reftest('failures/unexpected/reftest.html', 'failures/unexpected/reftest-expected.html', same_image=False) tests.add_reftest('failures/unexpected/mismatch.html', 'failures/unexpected/mismatch-expected-mismatch.html', same_image=True) tests.add('failures/unexpected/reftest-nopixel.html', actual_checksum=None, actual_image=None, is_reftest=True) tests.add('failures/unexpected/reftest-nopixel-expected.html', actual_checksum=None, actual_image=None, is_reftest=True) tests.add('reftests/foo/test.html') tests.add('reftests/foo/test-ref.html') tests.add('reftests/foo/multiple-match-success.html', actual_checksum='abc', actual_image='abc') tests.add('reftests/foo/multiple-match-failure.html', actual_checksum='abc', actual_image='abc') tests.add('reftests/foo/multiple-mismatch-success.html', actual_checksum='abc', actual_image='abc') tests.add('reftests/foo/multiple-mismatch-failure.html', actual_checksum='abc', actual_image='abc') tests.add('reftests/foo/multiple-both-success.html', actual_checksum='abc', actual_image='abc') tests.add('reftests/foo/multiple-both-failure.html', actual_checksum='abc', actual_image='abc') tests.add('reftests/foo/matching-ref.html', actual_checksum='abc', actual_image='abc') tests.add('reftests/foo/mismatching-ref.html', actual_checksum='def', actual_image='def') tests.add('reftests/foo/second-mismatching-ref.html', actual_checksum='ghi', actual_image='ghi') # The following files shouldn't be treated as reftests tests.add_reftest('reftests/foo/unlistedtest.html', 'reftests/foo/unlistedtest-expected.html', same_image=True) tests.add('reftests/foo/reference/bar/common.html') tests.add('reftests/foo/reftest/bar/shared.html') tests.add('websocket/tests/passes/text.html') # For testing that we don't run tests under platform/. Note that these don't contribute to TOTAL_TESTS. tests.add('platform/test-mac-leopard/http/test.html') tests.add('platform/test-win-win7/http/test.html') # For testing if perf tests are running in a locked shard. tests.add('perf/foo/test.html') tests.add('perf/foo/test-ref.html') # For testing --pixel-test-directories. tests.add('failures/unexpected/pixeldir/image_in_pixeldir.html', actual_image='image_in_pixeldir-pngtEXtchecksum\x00checksum_fail', expected_image='image_in_pixeldir-pngtEXtchecksum\x00checksum-png') tests.add('failures/unexpected/image_not_in_pixeldir.html', actual_image='image_not_in_pixeldir-pngtEXtchecksum\x00checksum_fail', expected_image='image_not_in_pixeldir-pngtEXtchecksum\x00checksum-png') # For testing that virtual test suites don't expand names containing themselves # See webkit.org/b/97925 and base_unittest.PortTest.test_tests(). tests.add('passes/test-virtual-passes.html') tests.add('passes/passes/test-virtual-passes.html') return tests # Here we use a non-standard location for the layout tests, to ensure that # this works. The path contains a '.' in the name because we've seen bugs # related to this before. LAYOUT_TEST_DIR = '/test.checkout/LayoutTests' PERF_TEST_DIR = '/test.checkout/PerformanceTests' # Here we synthesize an in-memory filesystem from the test list # in order to fully control the test output and to demonstrate that # we don't need a real filesystem to run the tests. def add_unit_tests_to_mock_filesystem(filesystem): # Add the test_expectations file. filesystem.maybe_make_directory('/mock-checkout/LayoutTests') if not filesystem.exists('/mock-checkout/LayoutTests/TestExpectations'): filesystem.write_text_file('/mock-checkout/LayoutTests/TestExpectations', """ Bug(test) failures/expected/crash.html [ Crash ] Bug(test) failures/expected/crash_then_text.html [ Failure ] Bug(test) failures/expected/image.html [ ImageOnlyFailure ] Bug(test) failures/expected/needsrebaseline.html [ NeedsRebaseline ] Bug(test) failures/expected/needsmanualrebaseline.html [ NeedsManualRebaseline ] Bug(test) failures/expected/audio.html [ Failure ] Bug(test) failures/expected/image_checksum.html [ ImageOnlyFailure ] Bug(test) failures/expected/mismatch.html [ ImageOnlyFailure ] Bug(test) failures/expected/missing_check.html [ Missing Pass ] Bug(test) failures/expected/missing_image.html [ Missing Pass ] Bug(test) failures/expected/missing_audio.html [ Missing Pass ] Bug(test) failures/expected/missing_text.html [ Missing Pass ] Bug(test) failures/expected/newlines_leading.html [ Failure ] Bug(test) failures/expected/newlines_trailing.html [ Failure ] Bug(test) failures/expected/newlines_with_excess_CR.html [ Failure ] Bug(test) failures/expected/reftest.html [ ImageOnlyFailure ] Bug(test) failures/expected/text.html [ Failure ] Bug(test) failures/expected/testharness.html [ Failure ] Bug(test) failures/expected/timeout.html [ Timeout ] Bug(test) failures/expected/keyboard.html [ WontFix ] Bug(test) failures/expected/exception.html [ WontFix ] Bug(test) failures/expected/device_failure.html [ WontFix ] Bug(test) failures/unexpected/pass.html [ Failure ] Bug(test) passes/skipped/skip.html [ Skip ] Bug(test) passes/text.html [ Pass ] """) filesystem.maybe_make_directory(LAYOUT_TEST_DIR + '/reftests/foo') filesystem.write_text_file(LAYOUT_TEST_DIR + '/reftests/foo/reftest.list', """ == test.html test-ref.html == multiple-match-success.html mismatching-ref.html == multiple-match-success.html matching-ref.html == multiple-match-failure.html mismatching-ref.html == multiple-match-failure.html second-mismatching-ref.html != multiple-mismatch-success.html mismatching-ref.html != multiple-mismatch-success.html second-mismatching-ref.html != multiple-mismatch-failure.html mismatching-ref.html != multiple-mismatch-failure.html matching-ref.html == multiple-both-success.html matching-ref.html == multiple-both-success.html mismatching-ref.html != multiple-both-success.html second-mismatching-ref.html == multiple-both-failure.html matching-ref.html != multiple-both-failure.html second-mismatching-ref.html != multiple-both-failure.html matching-ref.html """) # FIXME: This test was only being ignored because of missing a leading '/'. # Fixing the typo causes several tests to assert, so disabling the test entirely. # Add in a file should be ignored by port.find_test_files(). #files[LAYOUT_TEST_DIR + '/userscripts/resources/iframe.html'] = 'iframe' def add_file(test, suffix, contents): dirname = filesystem.join(LAYOUT_TEST_DIR, test.name[0:test.name.rfind('/')]) base = test.base filesystem.maybe_make_directory(dirname) filesystem.write_binary_file(filesystem.join(dirname, base + suffix), contents) # Add each test and the expected output, if any. test_list = unit_test_list() for test in test_list.tests.values(): add_file(test, test.name[test.name.rfind('.'):], '') if test.is_reftest: continue if test.actual_audio: add_file(test, '-expected.wav', test.expected_audio) continue add_file(test, '-expected.txt', test.expected_text) add_file(test, '-expected.png', test.expected_image) filesystem.write_text_file(filesystem.join(LAYOUT_TEST_DIR, 'virtual', 'passes', 'args-expected.txt'), 'args-txt --virtual-arg') # Clear the list of written files so that we can watch what happens during testing. filesystem.clear_written_files() class TestPort(Port): port_name = 'test' default_port_name = 'test-mac-leopard' """Test implementation of the Port interface.""" ALL_BASELINE_VARIANTS = ( 'test-linux-x86_64', 'test-mac-snowleopard', 'test-mac-leopard', 'test-win-win7', 'test-win-xp', ) FALLBACK_PATHS = { 'xp': ['test-win-win7', 'test-win-xp'], 'win7': ['test-win-win7'], 'leopard': ['test-mac-leopard', 'test-mac-snowleopard'], 'snowleopard': ['test-mac-snowleopard'], 'lucid': ['test-linux-x86_64', 'test-win-win7'], } @classmethod def determine_full_port_name(cls, host, options, port_name): if port_name == 'test': return TestPort.default_port_name return port_name def __init__(self, host, port_name=None, **kwargs): Port.__init__(self, host, port_name or TestPort.default_port_name, **kwargs) self._tests = unit_test_list() self._flakes = set() # FIXME: crbug.com/279494. This needs to be in the "real layout tests # dir" in a mock filesystem, rather than outside of the checkout, so # that tests that want to write to a TestExpectations file can share # this between "test" ports and "real" ports. This is the result of # rebaseline_unittest.py having tests that refer to "real" port names # and real builders instead of fake builders that point back to the # test ports. rebaseline_unittest.py needs to not mix both "real" ports # and "test" ports self._generic_expectations_path = '/mock-checkout/LayoutTests/TestExpectations' self._results_directory = None self._operating_system = 'mac' if self._name.startswith('test-win'): self._operating_system = 'win' elif self._name.startswith('test-linux'): self._operating_system = 'linux' version_map = { 'test-win-xp': 'xp', 'test-win-win7': 'win7', 'test-mac-leopard': 'leopard', 'test-mac-snowleopard': 'snowleopard', 'test-linux-x86_64': 'lucid', } self._version = version_map[self._name] def repository_paths(self): """Returns a list of (repository_name, repository_path) tuples of its depending code base.""" # FIXME: We override this just to keep the perf tests happy. return [('blink', self.layout_tests_dir())] def buildbot_archives_baselines(self): return self._name != 'test-win-xp' def default_pixel_tests(self): return True def _path_to_driver(self): # This routine shouldn't normally be called, but it is called by # the mock_drt Driver. We return something, but make sure it's useless. return 'MOCK _path_to_driver' def default_child_processes(self): return 1 def check_build(self, needs_http, printer): return test_run_results.OK_EXIT_STATUS def check_sys_deps(self, needs_http): return test_run_results.OK_EXIT_STATUS def default_configuration(self): return 'Release' def diff_image(self, expected_contents, actual_contents): diffed = actual_contents != expected_contents if not actual_contents and not expected_contents: return (None, None) if not actual_contents or not expected_contents: return (True, None) if diffed: return ("< %s\n---\n> %s\n" % (expected_contents, actual_contents), None) return (None, None) def layout_tests_dir(self): return LAYOUT_TEST_DIR def perf_tests_dir(self): return PERF_TEST_DIR def webkit_base(self): return '/test.checkout' def _skipped_tests_for_unsupported_features(self, test_list): return set(['failures/expected/skip_text.html', 'failures/unexpected/skip_pass.html', 'virtual/skipped']) def name(self): return self._name def operating_system(self): return self._operating_system def _path_to_wdiff(self): return None def default_results_directory(self): return '/tmp/layout-test-results' def setup_test_run(self): pass def _driver_class(self): return TestDriver def start_http_server(self, additional_dirs=None, number_of_servers=None): pass def start_websocket_server(self): pass def acquire_http_lock(self): pass def stop_http_server(self): pass def stop_websocket_server(self): pass def release_http_lock(self): pass def _path_to_lighttpd(self): return "/usr/sbin/lighttpd" def _path_to_lighttpd_modules(self): return "/usr/lib/lighttpd" def _path_to_lighttpd_php(self): return "/usr/bin/php-cgi" def _path_to_apache(self): return "/usr/sbin/httpd" def _path_to_apache_config_file(self): return self._filesystem.join(self.layout_tests_dir(), 'http', 'conf', 'httpd.conf') def path_to_generic_test_expectations_file(self): return self._generic_expectations_path def _port_specific_expectations_files(self): return [self._filesystem.join(self._webkit_baseline_path(d), 'TestExpectations') for d in ['test', 'test-win-xp']] def all_test_configurations(self): """Returns a sequence of the TestConfigurations the port supports.""" # By default, we assume we want to test every graphics type in # every configuration on every system. test_configurations = [] for version, architecture in self._all_systems(): for build_type in self._all_build_types(): test_configurations.append(TestConfiguration( version=version, architecture=architecture, build_type=build_type)) return test_configurations def _all_systems(self): return (('leopard', 'x86'), ('snowleopard', 'x86'), ('xp', 'x86'), ('win7', 'x86'), ('lucid', 'x86'), ('lucid', 'x86_64')) def _all_build_types(self): return ('debug', 'release') def configuration_specifier_macros(self): """To avoid surprises when introducing new macros, these are intentionally fixed in time.""" return {'mac': ['leopard', 'snowleopard'], 'win': ['xp', 'win7'], 'linux': ['lucid']} def all_baseline_variants(self): return self.ALL_BASELINE_VARIANTS def virtual_test_suites(self): return [ VirtualTestSuite('passes', 'passes', ['--virtual-arg'], use_legacy_naming=True), VirtualTestSuite('skipped', 'failures/expected', ['--virtual-arg2'], use_legacy_naming=True), ] class TestDriver(Driver): """Test/Dummy implementation of the driver interface.""" next_pid = 1 def __init__(self, *args, **kwargs): super(TestDriver, self).__init__(*args, **kwargs) self.started = False self.pid = 0 def cmd_line(self, pixel_tests, per_test_args): pixel_tests_flag = '-p' if pixel_tests else '' return [self._port._path_to_driver()] + [pixel_tests_flag] + self._port.get_option('additional_drt_flag', []) + per_test_args def run_test(self, driver_input, stop_when_done): if not self.started: self.started = True self.pid = TestDriver.next_pid TestDriver.next_pid += 1 start_time = time.time() test_name = driver_input.test_name test_args = driver_input.args or [] test = self._port._tests[test_name] if test.keyboard: raise KeyboardInterrupt if test.exception: raise ValueError('exception from ' + test_name) if test.device_failure: raise DeviceFailure('device failure in ' + test_name) audio = None actual_text = test.actual_text crash = test.crash web_process_crash = test.web_process_crash if 'flaky/text.html' in test_name and not test_name in self._port._flakes: self._port._flakes.add(test_name) actual_text = 'flaky text failure' if 'crash_then_text.html' in test_name: if test_name in self._port._flakes: actual_text = 'text failure' else: self._port._flakes.add(test_name) crashed_process_name = self._port.driver_name() crashed_pid = 1 crash = True if 'text_then_crash.html' in test_name: if test_name in self._port._flakes: crashed_process_name = self._port.driver_name() crashed_pid = 1 crash = True else: self._port._flakes.add(test_name) actual_text = 'text failure' if actual_text and test_args and test_name == 'passes/args.html': actual_text = actual_text + ' ' + ' '.join(test_args) if test.actual_audio: audio = base64.b64decode(test.actual_audio) crashed_process_name = None crashed_pid = None if crash: crashed_process_name = self._port.driver_name() crashed_pid = 1 elif web_process_crash: crashed_process_name = 'WebProcess' crashed_pid = 2 crash_log = '' if crashed_process_name: crash_logs = CrashLogs(self._port.host) crash_log = crash_logs.find_newest_log(crashed_process_name, None) or '' if stop_when_done: self.stop() if test.actual_checksum == driver_input.image_hash: image = None else: image = test.actual_image return DriverOutput(actual_text, image, test.actual_checksum, audio, crash=(crash or web_process_crash), crashed_process_name=crashed_process_name, crashed_pid=crashed_pid, crash_log=crash_log, test_time=time.time() - start_time, timeout=test.timeout, error=test.error, pid=self.pid) def stop(self): self.started = False<|fim▁end|>
<|file_name|>print_progressbar.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-<|fim▁hole|> def print_progress (iteration, total, prefix = '', suffix = '', decimals = 1, barLength = 100): """ Call in a loop to create terminal progress bar @params: iteration - Required : current iteration (Int) total - Required : total iterations (Int) prefix - Optional : prefix string (Str) suffix - Optional : suffix string (Str) decimals - Optional : positive number of decimals in percent complete (Int) barLength - Optional : character length of bar (Int) copied from: http://stackoverflow.com/questions/3173320/text-progress-bar-in-the-console With slight adjustment so that it allows just one iteration (total = 0) """ formatStr = "{0:." + str(decimals) + "f}" percent = formatStr.format(100 * (iteration / float(total))) if not total == 0 else formatStr.format(100) filledLength = int(round(barLength * iteration / float(total))) if not total == 0 else int(round(barLength)) bar = '█' * filledLength + '-' * (barLength - filledLength) sys.stdout.write('\r%s |%s| %s%s %s' % (prefix, bar, percent, '%', suffix)), if iteration == total: sys.stdout.write('\n') sys.stdout.flush()<|fim▁end|>
import sys
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>""" Flask-Validictory ------------- Simple integration between Flask and Validictory. """ import os from setuptools import setup module_path = os.path.join(os.path.dirname(__file__), 'flask_validictory.py') version_line = [line for line in open(module_path) if line.startswith('__version_info__')][0] __version__ = '.'.join(eval(version_line.split('__version_info__ = ')[-1])) setup( name='Flask-Validictory', version=__version__, url='https://github.com/inner-loop/flask-validictory/', license='MIT', author='Mark Angrish', author_email='[email protected]', description='Simple integration between Flask and Validictory.', long_description=__doc__, py_modules=['flask_validictory'], zip_safe=False,<|fim▁hole|> ], classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Web Environment', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Internet :: WWW/HTTP :: Dynamic Content', 'Topic :: Software Development :: Libraries :: Python Modules' ] )<|fim▁end|>
include_package_data=True, platforms='any', install_requires=[ 'Flask>=0.10.1', 'validictory>=0.9.1'
<|file_name|>PetrolStationGenerator.java<|end_file_name|><|fim▁begin|>package simulation.generators; import simulation.data.PetrolStation; import simulation.data.Road; /** * Created by user on 03.06.2017. */ public class PetrolStationGenerator { private Road road; private int minimalDistanceBetweenStations = 50; private int maximumDistanceBetweenStations = 200; private float minimalFuelPrice = 3.5f; private float maximumFuelPrice = 4f; public PetrolStationGenerator(Road road) { this.road = road; } public void generateStationsOnTheRoad(){ RandomIntegerGenerator generator = new RandomIntegerGenerator(); int lastStationPosition = 0; road.addPetrolStation(generateStation(lastStationPosition)); while (lastStationPosition < road.getDistance()){ int nextStationDistance = generator.generateNumberFromRange(minimalDistanceBetweenStations,maximumDistanceBetweenStations); if(lastStationPosition+nextStationDistance <= road.getDistance()){ road.addPetrolStation(generateStation(lastStationPosition+nextStationDistance)); lastStationPosition += nextStationDistance; }else{ break; } } } private PetrolStation generateStation(int positionOnRoad){ float fuelPrice = new RandomFloatGenerator().generateNumberFromRange(minimalFuelPrice,maximumFuelPrice); return new PetrolStation(positionOnRoad,fuelPrice); }<|fim▁hole|> return road; } public void setRoad(Road road) { this.road = road; } public int getMinimalDistanceBetweenStations() { return minimalDistanceBetweenStations; } public void setMinimalDistanceBetweenStations(int minimalDistanceBetweenStations) { this.minimalDistanceBetweenStations = minimalDistanceBetweenStations; } public int getMaximumDistanceBetweenStations() { return maximumDistanceBetweenStations; } public void setMaximumDistanceBetweenStations(int maximumDistanceBetweenStations) { this.maximumDistanceBetweenStations = maximumDistanceBetweenStations; } public float getMinimalFuelPrice() { return minimalFuelPrice; } public void setMinimalFuelPrice(float minimalFuelPrice) { this.minimalFuelPrice = minimalFuelPrice; } public float getMaximumFuelPrice() { return maximumFuelPrice; } public void setMaximumFuelPrice(float maximumFuelPrice) { this.maximumFuelPrice = maximumFuelPrice; } }<|fim▁end|>
public Road getRoad() {
<|file_name|>fulljslint.js<|end_file_name|><|fim▁begin|>// jslint.js // 2009-03-28 // TO DO: In ADsafe, make lib run only. /* Copyright (c) 2002 Douglas Crockford (www.JSLint.com) Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. The Software shall be used for Good, not Evil. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ /* JSLINT is a global function. It takes two parameters. var myResult = JSLINT(source, option); The first parameter is either a string or an array of strings. If it is a string, it will be split on '\n' or '\r'. If it is an array of strings, it is assumed that each string represents one line. The source can be a JavaScript text, or HTML text, or a Konfabulator text. The second parameter is an optional object of options which control the operation of JSLINT. Most of the options are booleans: They are all are optional and have a default value of false. If it checks out, JSLINT returns true. Otherwise, it returns false. If false, you can inspect JSLINT.errors to find out the problems. JSLINT.errors is an array of objects containing these members: { line : The line (relative to 0) at which the lint was found character : The character (relative to 0) at which the lint was found reason : The problem evidence : The text line in which the problem occurred raw : The raw message before the details were inserted a : The first detail b : The second detail c : The third detail d : The fourth detail } If a fatal error was found, a null will be the last element of the JSLINT.errors array. You can request a Function Report, which shows all of the functions and the parameters and vars that they use. This can be used to find implied global variables and other problems. The report is in HTML and can be inserted in an HTML <body>. var myReport = JSLINT.report(limited); If limited is true, then the report will be limited to only errors. */ /*jslint evil: true, nomen: false, onevar: false, regexp: false , strict: true */ /*global JSLINT*/ /*members "\b", "\t", "\n", "\f", "\r", "\"", "%", "(begin)", "(breakage)", "(context)", "(error)", "(global)", "(identifier)", "(line)", "(loopage)", "(name)", "(onevar)", "(params)", "(scope)", "(verb)", ")", "++", "--", "\/", ADSAFE, Array, Boolean, COM, Canvas, CustomAnimation, Date, Debug, E, Error, EvalError, FadeAnimation, Flash, FormField, Frame, Function, HotKey, Image, JSON, LN10, LN2, LOG10E, LOG2E, MAX_VALUE, MIN_VALUE, Math, MenuItem, MoveAnimation, NEGATIVE_INFINITY, Number, Object, Option, PI, POSITIVE_INFINITY, Point, RangeError, Rectangle, ReferenceError, RegExp, ResizeAnimation, RotateAnimation, SQRT1_2, SQRT2, ScrollBar, String, Style, SyntaxError, System, Text, TextArea, Timer, TypeError, URIError, URL, Web, Window, XMLDOM, XMLHttpRequest, "\\", "]", a, abbr, acronym, active, address, adsafe, after, alert, aliceblue, animator, antiquewhite, appleScript, applet, apply, approved, aqua, aquamarine, area, arguments, arity, autocomplete, azure, b, background, "background-attachment", "background-color", "background-image", "background-position", "background-repeat", base, bdo, beep, before, beige, big, bisque, bitwise, black, blanchedalmond, block, blockquote, blue, blueviolet, blur, body, border, "border-bottom", "border-bottom-color", "border-bottom-style", "border-bottom-width", "border-collapse", "border-color", "border-left", "border-left-color", "border-left-style", "border-left-width", "border-right", "border-right-color", "border-right-style", "border-right-width", "border-spacing", "border-style", "border-top", "border-top-color", "border-top-style", "border-top-width", "border-width", bottom, br, brown, browser, burlywood, button, bytesToUIString, c, cadetblue, call, callee, caller, canvas, cap, caption, "caption-side", cases, center, charAt, charCodeAt, character, chartreuse, chocolate, chooseColor, chooseFile, chooseFolder, cite, clear, clearInterval, clearTimeout, clip, close, closeWidget, closed, cm, code, col, colgroup, color, comment, condition, confirm, console, constructor, content, convertPathToHFS, convertPathToPlatform, coral, cornflowerblue, cornsilk, "counter-increment", "counter-reset", create, crimson, css, cursor, cyan, d, darkblue, darkcyan, darkgoldenrod, darkgray, darkgreen, darkkhaki, darkmagenta, darkolivegreen, darkorange, darkorchid, darkred, darksalmon, darkseagreen, darkslateblue, darkslategray, darkturquoise, darkviolet, dd, debug, decodeURI, decodeURIComponent, deeppink, deepskyblue, defaultStatus, defineClass, del, deserialize, dfn, dimgray, dir, direction, display, div, dl, document, dodgerblue, dt, else, em, embed, empty, "empty-cells", encodeURI, encodeURIComponent, entityify, eqeqeq, errors, escape, eval, event, evidence, evil, ex, exec, exps, fieldset, filesystem, firebrick, first, "first-child", "first-letter", "first-line", float, floor, floralwhite, focus, focusWidget, font, "font-face", "font-family", "font-size", "font-size-adjust", "font-stretch", "font-style", "font-variant", "font-weight", forestgreen, forin, form, fragment, frame, frames, frameset, from, fromCharCode, fuchsia, fud, funct, function, g, gainsboro, gc, getComputedStyle, ghostwhite, gold, goldenrod, gray, green, greenyellow, h1, h2, h3, h4, h5, h6, hasOwnProperty, head, height, help, history, honeydew, hotpink, hover, hr, html, i, iTunes, id, identifier, iframe, img, immed, import, in, include, indent, indexOf, indianred, indigo, init, input, ins, isAlpha, isApplicationRunning, isDigit, isFinite, isNaN, ivory, join, kbd, khaki, konfabulatorVersion, label, labelled, lang, lavender, lavenderblush, lawngreen, laxbreak, lbp, led, left, legend, lemonchiffon, length, "letter-spacing", li, lib, lightblue, lightcoral, lightcyan, lightgoldenrodyellow, lightgreen, lightpink, lightsalmon, lightseagreen, lightskyblue, lightslategray, lightsteelblue, lightyellow, lime, limegreen, line, "line-height", linen, link, "list-style", "list-style-image", "list-style-position", "list-style-type", load, loadClass, location, log, m, magenta, map, margin, "margin-bottom", "margin-left", "margin-right", "margin-top", "marker-offset", maroon, match, "max-height", "max-width", md5, media, mediumaquamarine, mediumblue, mediumorchid, mediumpurple, mediumseagreen, mediumslateblue, mediumspringgreen, mediumturquoise, mediumvioletred, menu, message, meta, midnightblue, "min-height", "min-width", mintcream, mistyrose, mm, moccasin, moveBy, moveTo, name, navajowhite, navigator, navy, new, newcap, noframes, nomen, noscript, nud, object, ol, oldlace, olive, olivedrab, on, onblur, onerror, onevar, onfocus, onload, onresize, onunload, opacity, open, openURL, opener, opera, optgroup, option, orange, orangered, orchid, outer, outline, "outline-color", "outline-style", "outline-width", overflow, p, padding, "padding-bottom", "padding-left", "padding-right", "padding-top", page, palegoldenrod, palegreen, paleturquoise, palevioletred, papayawhip, param, parent, parseFloat, parseInt, passfail, pc, peachpuff, peru, pink, play, plum, plusplus, pop, popupMenu, position, powderblue, pre, predef, preferenceGroups, preferences, print, prompt, prototype, pt, purple, push, px, q, quit, quotes, random, range, raw, reach, readFile, readUrl, reason, red, regexp, reloadWidget, replace, report, reserved, resizeBy, resizeTo, resolvePath, resumeUpdates, rhino, right, rosybrown, royalblue, runCommand, runCommandInBg, saddlebrown, safe, salmon, samp, sandybrown, saveAs, savePreferences, screen, script, scroll, scrollBy, scrollTo, seagreen, seal, search, seashell, select, self, serialize, setInterval, setTimeout, shift, showWidgetPreferences, sidebar, sienna, silver, skyblue, slateblue, slategray, sleep, slice, small, snow, sort, span, spawn, speak, split, springgreen, src, status, steelblue, strict, strong, style, styleproperty, sub, substr, sup, supplant, suppressUpdates, sync, system, table, "table-layout", tan, tbody, td, teal, tellWidget, test, "text-align", "text-decoration", "text-indent", "text-shadow", "text-transform", textarea, tfoot, th, thead, thistle, title, toLowerCase, toString, toUpperCase, toint32, token, tomato, top, tr, tt, turquoise, type, u, ul, undef, unescape, "unicode-bidi", unwatch, updateNow, value, valueOf, var, version, "vertical-align", violet, visibility, visited, watch, wheat, white, "white-space", whitesmoke, widget, width, window, "word-spacing", yahooCheckLogin, yahooLogin, yahooLogout, yellow, yellowgreen, "z-index" */ // We build the application inside a function so that we produce only a single // global variable. The function will be invoked, its return value is the JSLINT // application itself. "use strict"; JSLINT = (function () { var adsafe_id, // The widget's ADsafe id. adsafe_may, // The widget may load approved scripts. adsafe_went, // ADSAFE.go has been called. anonname, // The guessed name for anonymous functions. approved, // ADsafe approved urls. atrule = { 'import' : true, media : true, 'font-face': true, page : true }, badbreak = { ')': true, ']': true, '++': true, '--': true }, // These are members that should not be permitted in third party ads. banned = { // the member names that ADsafe prohibits. apply : true, 'arguments' : true, call : true, callee : true, caller : true, constructor : true, 'eval' : true, prototype : true, unwatch : true, valueOf : true, watch : true }, // These are the JSLint boolean options. boolOptions = { adsafe : true, // if ADsafe should be enforced bitwise : true, // if bitwise operators should not be allowed browser : true, // if the standard browser globals should be predefined cap : true, // if upper case HTML should be allowed css : true, // if CSS workarounds should be tolerated debug : true, // if debugger statements should be allowed eqeqeq : true, // if === should be required evil : true, // if eval should be allowed forin : true, // if for in statements must filter fragment : true, // if HTML fragments should be allowed immed : true, // if immediate invocations must be wrapped in parens laxbreak : true, // if line breaks should not be checked newcap : true, // if constructor names must be capitalized nomen : true, // if names should be checked on : true, // if HTML event handlers should be allowed onevar : true, // if only one var statement per function should be allowed passfail : true, // if the scan should stop on first error plusplus : true, // if increment/decrement should not be allowed regexp : true, // if the . should not be allowed in regexp literals rhino : true, // if the Rhino environment globals should be predefined undef : true, // if variables should be declared before used safe : true, // if use of some browser features should be restricted sidebar : true, // if the System object should be predefined strict : true, // require the "use strict"; pragma sub : true, // if all forms of subscript notation are tolerated white : true, // if strict whitespace rules apply widget : true // if the Yahoo Widgets globals should be predefined }, // browser contains a set of global names which are commonly provided by a // web browser environment. browser = { alert : true, blur : true, clearInterval : true, clearTimeout : true, close : true, closed : true, confirm : true, console : true, Debug : true, defaultStatus : true, document : true, event : true, focus : true, frames : true, getComputedStyle: true, history : true, Image : true, length : true, location : true, moveBy : true, moveTo : true, name : true, navigator : true, onblur : true, onerror : true, onfocus : true, onload : true, onresize : true, onunload : true, open : true, opener : true, opera : true, Option : true, parent : true, print : true, prompt : true, resizeBy : true, resizeTo : true, screen : true, scroll : true, scrollBy : true, scrollTo : true, self : true, setInterval : true, setTimeout : true, status : true, top : true, window : true, XMLHttpRequest : true }, cssAttributeData, cssAny, cssColorData = { "aliceblue": true, "antiquewhite": true, "aqua": true, "aquamarine": true, "azure": true, "beige": true, "bisque": true, "black": true, "blanchedalmond": true, "blue": true, "blueviolet": true, "brown": true, "burlywood": true, "cadetblue": true, "chartreuse": true, "chocolate": true, "coral": true, "cornflowerblue": true, "cornsilk": true, "crimson": true, "cyan": true, "darkblue": true, "darkcyan": true, "darkgoldenrod": true, "darkgray": true, "darkgreen": true, "darkkhaki": true, "darkmagenta": true, "darkolivegreen": true, "darkorange": true, "darkorchid": true, "darkred": true, "darksalmon": true, "darkseagreen": true, "darkslateblue": true, "darkslategray": true, "darkturquoise": true, "darkviolet": true, "deeppink": true, "deepskyblue": true, "dimgray": true, "dodgerblue": true, "firebrick": true, "floralwhite": true, "forestgreen": true, "fuchsia": true, "gainsboro": true, "ghostwhite": true, "gold": true, "goldenrod": true, "gray": true, "green": true, "greenyellow": true, "honeydew": true, "hotpink": true, "indianred": true, "indigo": true, "ivory": true, "khaki": true, "lavender": true, "lavenderblush": true, "lawngreen": true, "lemonchiffon": true, "lightblue": true, "lightcoral": true, "lightcyan": true, "lightgoldenrodyellow": true, "lightgreen": true, "lightpink": true, "lightsalmon": true, "lightseagreen": true, "lightskyblue": true, "lightslategray": true, "lightsteelblue": true, "lightyellow": true, "lime": true, "limegreen": true, "linen": true, "magenta": true, "maroon": true, "mediumaquamarine": true, "mediumblue": true, "mediumorchid": true, "mediumpurple": true, "mediumseagreen": true, "mediumslateblue": true, "mediumspringgreen": true, "mediumturquoise": true, "mediumvioletred": true, "midnightblue": true, "mintcream": true, "mistyrose": true, "moccasin": true, "navajowhite": true, "navy": true, "oldlace": true, "olive": true, "olivedrab": true, "orange": true, "orangered": true, "orchid": true, "palegoldenrod": true, "palegreen": true, "paleturquoise": true, "palevioletred": true, "papayawhip": true, "peachpuff": true, "peru": true, "pink": true, "plum": true, "powderblue": true, "purple": true, "red": true, "rosybrown": true, "royalblue": true, "saddlebrown": true, "salmon": true, "sandybrown": true, "seagreen": true, "seashell": true, "sienna": true, "silver": true, "skyblue": true, "slateblue": true, "slategray": true, "snow": true, "springgreen": true, "steelblue": true, "tan": true, "teal": true, "thistle": true, "tomato": true, "turquoise": true, "violet": true, "wheat": true, "white": true, "whitesmoke": true, "yellow": true, "yellowgreen": true }, cssBorderStyle, cssLengthData = { '%': true, 'cm': true, 'em': true, 'ex': true, 'in': true, 'mm': true, 'pc': true, 'pt': true, 'px': true }, escapes = { '\b': '\\b', '\t': '\\t', '\n': '\\n', '\f': '\\f', '\r': '\\r', '"' : '\\"', '/' : '\\/', '\\': '\\\\' }, funct, // The current function functions, // All of the functions global, // The global scope htmltag = { a: {}, abbr: {}, acronym: {}, address: {}, applet: {}, area: {empty: true, parent: ' map '}, b: {}, base: {empty: true, parent: ' head '}, bdo: {}, big: {}, blockquote: {}, body: {parent: ' html noframes '}, br: {empty: true}, button: {}, canvas: {parent: ' body p div th td '}, caption: {parent: ' table '}, center: {}, cite: {}, code: {}, col: {empty: true, parent: ' table colgroup '}, colgroup: {parent: ' table '}, dd: {parent: ' dl '}, del: {}, dfn: {}, dir: {}, div: {}, dl: {}, dt: {parent: ' dl '}, em: {}, embed: {}, fieldset: {}, font: {}, form: {}, frame: {empty: true, parent: ' frameset '}, frameset: {parent: ' html frameset '}, h1: {}, h2: {}, h3: {}, h4: {}, h5: {}, h6: {}, head: {parent: ' html '}, html: {parent: '*'}, hr: {empty: true}, i: {}, iframe: {}, img: {empty: true}, input: {empty: true}, ins: {}, kbd: {}, label: {}, legend: {parent: ' fieldset '}, li: {parent: ' dir menu ol ul '}, link: {empty: true, parent: ' head '}, map: {}, menu: {}, meta: {empty: true, parent: ' head noframes noscript '}, noframes: {parent: ' html body '}, noscript: {parent: ' body head noframes '}, object: {}, ol: {}, optgroup: {parent: ' select '}, option: {parent: ' optgroup select '}, p: {}, param: {empty: true, parent: ' applet object '}, pre: {}, q: {}, samp: {}, script: {empty: true, parent: ' body div frame head iframe p pre span '}, select: {}, small: {}, span: {}, strong: {}, style: {parent: ' head ', empty: true}, sub: {}, sup: {}, table: {}, tbody: {parent: ' table '}, td: {parent: ' tr '}, textarea: {}, tfoot: {parent: ' table '}, th: {parent: ' tr '}, thead: {parent: ' table '}, title: {parent: ' head '}, tr: {parent: ' table tbody thead tfoot '}, tt: {}, u: {}, ul: {}, 'var': {} }, ids, // HTML ids implied, // Implied globals inblock, indent, jsonmode, lines, lookahead, member, membersOnly, nexttoken, noreach, option, predefined, // Global variables defined by option prereg, prevtoken, pseudorule = { 'first-child': true, link : true, visited : true, hover : true, active : true, focus : true, lang : true, 'first-letter' : true, 'first-line' : true, before : true, after : true }, rhino = { defineClass : true, deserialize : true, gc : true, help : true, load : true, loadClass : true, print : true, quit : true, readFile : true, readUrl : true, runCommand : true, seal : true, serialize : true, spawn : true, sync : true, toint32 : true, version : true }, scope, // The current scope sidebar = { System : true }, src, stack, // standard contains the global names that are provided by the // ECMAScript standard. standard = { Array : true, Boolean : true, Date : true, decodeURI : true, decodeURIComponent : true, encodeURI : true, encodeURIComponent : true, Error : true, 'eval' : true, EvalError : true, Function : true, isFinite : true, isNaN : true, JSON : true, Math : true, Number : true, Object : true, parseInt : true, parseFloat : true, RangeError : true, ReferenceError : true, RegExp : true, String : true, SyntaxError : true, TypeError : true, URIError : true }, standard_member = { E : true, LN2 : true, LN10 : true, LOG2E : true, LOG10E : true, PI : true, SQRT1_2 : true, SQRT2 : true, MAX_VALUE : true, MIN_VALUE : true, NEGATIVE_INFINITY : true, POSITIVE_INFINITY : true }, syntax = {}, tab, token, urls, warnings, // widget contains the global names which are provided to a Yahoo // (fna Konfabulator) widget. widget = { alert : true, animator : true, appleScript : true, beep : true, bytesToUIString : true, Canvas : true, chooseColor : true, chooseFile : true, chooseFolder : true, closeWidget : true, COM : true, convertPathToHFS : true, convertPathToPlatform : true, CustomAnimation : true, escape : true, FadeAnimation : true, filesystem : true, Flash : true, focusWidget : true, form : true, FormField : true, Frame : true, HotKey : true, Image : true, include : true, isApplicationRunning : true, iTunes : true, konfabulatorVersion : true, log : true, md5 : true, MenuItem : true, MoveAnimation : true, openURL : true, play : true, Point : true, popupMenu : true, preferenceGroups : true, preferences : true, print : true, prompt : true, random : true, Rectangle : true, reloadWidget : true, ResizeAnimation : true, resolvePath : true, resumeUpdates : true, RotateAnimation : true, runCommand : true, runCommandInBg : true, saveAs : true, savePreferences : true, screen : true, ScrollBar : true, showWidgetPreferences : true, sleep : true, speak : true, Style : true, suppressUpdates : true, system : true, tellWidget : true, Text : true, TextArea : true, Timer : true, unescape : true, updateNow : true, URL : true, Web : true, widget : true, Window : true, XMLDOM : true, XMLHttpRequest : true, yahooCheckLogin : true, yahooLogin : true, yahooLogout : true }, // xmode is used to adapt to the exceptions in html parsing. // It can have these states: // false .js script file // html // outer // script // style // scriptstring // styleproperty xmode, xquote, // unsafe comment or string ax = /@cc|<\/?|script|\]*s\]|<\s*!|&lt/i, // unsafe characters that are silently deleted by one or more browsers cx = /[\u0000-\u001f\u007f-\u009f\u00ad\u0600-\u0604\u070f\u17b4\u17b5\u200c-\u200f\u2028-\u202f\u2060-\u206f\ufeff\ufff0-\uffff]/, // token tx = /^\s*([(){}\[.,:;'"~\?\]#@]|==?=?|\/(\*(global|extern|jslint|member|members)?|=|\/)?|\*[\/=]?|\+[+=]?|-[\-=]?|%=?|&[&=]?|\|[|=]?|>>?>?=?|<([\/=!]|\!(\[|--)?|<=?)?|\^=?|\!=?=?|[a-zA-Z_$][a-zA-Z0-9_$]*|[0-9]+([xX][0-9a-fA-F]+|\.[0-9]*)?([eE][+\-]?[0-9]+)?)/, // html token hx = /^\s*(['"=>\/&#]|<(?:\/|\!(?:--)?)?|[a-zA-Z][a-zA-Z0-9_\-]*|[0-9]+|--|.)/, // outer html token ox = /[>&]|<[\/!]?|--/, // star slash lx = /\*\/|\/\*/, // identifier ix = /^([a-zA-Z_$][a-zA-Z0-9_$]*)$/, // javascript url jx = /^(?:javascript|jscript|ecmascript|vbscript|mocha|livescript)\s*:/i, // url badness ux = /&|\+|\u00AD|\.\.|\/\*|%[^;]|base64|url|expression|data|mailto/i, // style sx = /^\s*([{:#*%.=,>+\[\]@()"';*]|[a-zA-Z0-9_][a-zA-Z0-9_\-]*|<\/|\/\*)/, ssx = /^\s*([@#!"'};:\-\/%.=,+\[\]()*_]|[a-zA-Z][a-zA-Z0-9._\-]*|\d+(?:\.\d+)?|<\/)/, // query characters qx = /[\[\]\/\\"'*<>.&:(){}+=#_]/, // query characters for ids dx = /[\[\]\/\\"'*<>.&:(){}+=#]/, rx = { outer: hx, html: hx, style: sx, styleproperty: ssx }; function F() {} if (typeof Object.create !== 'function') { Object.create = function (o) { F.prototype = o; return new F(); }; } function combine(t, o) { var n; for (n in o) { if (o.hasOwnProperty(n)) { t[n] = o[n]; } } } String.prototype.entityify = function () { return this. replace(/&/g, '&amp;'). replace(/</g, '&lt;'). replace(/>/g, '&gt;'); }; String.prototype.isAlpha = function () { return (this >= 'a' && this <= 'z\uffff') || (this >= 'A' && this <= 'Z\uffff'); }; String.prototype.isDigit = function () { return (this >= '0' && this <= '9'); }; String.prototype.supplant = function (o) { return this.replace(/\{([^{}]*)\}/g, function (a, b) { var r = o[b]; return typeof r === 'string' || typeof r === 'number' ? r : a; }); }; String.prototype.name = function () { // If the string looks like an identifier, then we can return it as is. // If the string contains no control characters, no quote characters, and no // backslash characters, then we can simply slap some quotes around it. // Otherwise we must also replace the offending characters with safe // sequences. if (ix.test(this)) { return this; } if (/[&<"\/\\\x00-\x1f]/.test(this)) { return '"' + this.replace(/[&<"\/\\\x00-\x1f]/g, function (a) { var c = escapes[a]; if (c) { return c; } c = a.charCodeAt(); return '\\u00' + Math.floor(c / 16).toString(16) + (c % 16).toString(16); }) + '"'; } return '"' + this + '"'; }; function assume() { if (!option.safe) { if (option.rhino) { combine(predefined, rhino); } if (option.browser || option.sidebar) { combine(predefined, browser); } if (option.sidebar) { combine(predefined, sidebar); } if (option.widget) { combine(predefined, widget); } } } // Produce an error warning. function quit(m, l, ch) { throw { name: 'JSLintError', line: l, character: ch, message: m + " (" + Math.floor((l / lines.length) * 100) + "% scanned)." }; } function warning(m, t, a, b, c, d) { var ch, l, w; t = t || nexttoken; if (t.id === '(end)') { // `~ t = token; } l = t.line || 0; ch = t.from || 0; w = { id: '(error)', raw: m, evidence: lines[l] || '', line: l, character: ch, a: a, b: b, c: c, d: d }; w.reason = m.supplant(w); JSLINT.errors.push(w); if (option.passfail) { quit('Stopping. ', l, ch); } warnings += 1; if (warnings === 50) { quit("Too many errors.", l, ch); } return w; } function warningAt(m, l, ch, a, b, c, d) { return warning(m, { line: l, from: ch }, a, b, c, d); } function error(m, t, a, b, c, d) { var w = warning(m, t, a, b, c, d); quit("Stopping, unable to continue.", w.line, w.character); } function errorAt(m, l, ch, a, b, c, d) { return error(m, { line: l, from: ch }, a, b, c, d); } // lexical analysis var lex = (function lex() { var character, from, line, s; // Private lex methods function nextLine() { var at; line += 1; if (line >= lines.length) { return false; } character = 0; s = lines[line].replace(/\t/g, tab); at = s.search(cx); if (at >= 0) { warningAt("Unsafe character.", line, at); } return true; } // Produce a token object. The token inherits from a syntax symbol. function it(type, value) { var i, t; if (type === '(color)') { t = {type: type}; } else if (type === '(punctuator)' || (type === '(identifier)' && syntax.hasOwnProperty(value))) { t = syntax[value] || syntax['(error)']; // Mozilla bug workaround. if (!t.id) { t = syntax[type]; } } else { t = syntax[type]; } t = Object.create(t); if (type === '(string)' || type === '(range)') { if (jx.test(value)) { warningAt("Script URL.", line, from); } } if (type === '(identifier)') { t.identifier = true; if (option.nomen && value.charAt(0) === '_') { warningAt("Unexpected '_' in '{a}'.", line, from, value); } } t.value = value; t.line = line; t.character = character; t.from = from; i = t.id; if (i !== '(endline)') { prereg = i && (('(,=:[!&|?{};'.indexOf(i.charAt(i.length - 1)) >= 0) || i === 'return'); } return t; } // Public lex methods return { init: function (source) { if (typeof source === 'string') { lines = source. replace(/\r\n/g, '\n'). replace(/\r/g, '\n'). split('\n'); } else { lines = source; } line = -1; nextLine(); from = 0; }, range: function (begin, end) { var c, value = ''; from = character; if (s.charAt(0) !== begin) { errorAt("Expected '{a}' and instead saw '{b}'.", line, character, begin, s.charAt(0)); } for (;;) { s = s.slice(1); character += 1; c = s.charAt(0); switch (c) { case '': errorAt("Missing '{a}'.", line, character, c); break; case end: s = s.slice(1); character += 1; return it('(range)', value); case xquote: case '\\': case '\'': case '"': warningAt("Unexpected '{a}'.", line, character, c); } value += c; } }, // token -- this is called by advance to get the next token. token: function () { var b, c, captures, d, depth, high, i, l, low, q, t; function match(x) { var r = x.exec(s), r1; if (r) { l = r[0].length; r1 = r[1]; c = r1.charAt(0); s = s.substr(l); character += l; from = character - r1.length; return r1; } } function string(x) { var c, j, r = ''; if (jsonmode && x !== '"') { warningAt("Strings must use doublequote.", line, character); } if (xquote === x || (xmode === 'scriptstring' && !xquote)) { return it('(punctuator)', x); } function esc(n) { var i = parseInt(s.substr(j + 1, n), 16); j += n; if (i >= 32 && i <= 126 && i !== 34 && i !== 92 && i !== 39) { warningAt("Unnecessary escapement.", line, character); } character += n; c = String.fromCharCode(i); } j = 0; for (;;) { while (j >= s.length) { j = 0; if (xmode !== 'html' || !nextLine()) { errorAt("Unclosed string.", line, from); } } c = s.charAt(j); if (c === x) { character += 1; s = s.substr(j + 1); return it('(string)', r, x); } if (c < ' ') { if (c === '\n' || c === '\r') { break; } warningAt("Control character in string: {a}.", line, character + j, s.slice(0, j)); } else if (c === xquote) { warningAt("Bad HTML string", line, character + j); } else if (c === '<') { if (option.safe && xmode === 'html') { warningAt("ADsafe string violation.", line, character + j); } else if (s.charAt(j + 1) === '/' && (xmode || option.safe)) { warningAt("Expected '<\\/' and instead saw '</'.", line, character); } else if (s.charAt(j + 1) === '!' && (xmode || option.safe)) { warningAt("Unexpected '<!' in a string.", line, character); } } else if (c === '\\') { if (xmode === 'html') { if (option.safe) { warningAt("ADsafe string violation.", line, character + j); } } else if (xmode === 'styleproperty') { j += 1; character += 1; c = s.charAt(j); if (c !== x) { warningAt("Escapement in style string.", line, character + j); } } else { j += 1; character += 1; c = s.charAt(j); switch (c) { case xquote: warningAt("Bad HTML string", line, character + j); break; case '\\': case '\'': case '"': case '/': break; case 'b': c = '\b'; break; case 'f': c = '\f'; break; case 'n': c = '\n'; break; case 'r': c = '\r'; break; case 't': c = '\t'; break; case 'u': esc(4); break; case 'v': c = '\v'; break; case 'x': if (jsonmode) { warningAt("Avoid \\x-.", line, character); } esc(2); break; default: warningAt("Bad escapement.", line, character); } } } r += c; character += 1; j += 1; } } for (;;) { if (!s) { return it(nextLine() ? '(endline)' : '(end)', ''); } while (xmode === 'outer') { i = s.search(ox); if (i === 0) { break; } else if (i > 0) { character += 1; s = s.slice(i); break; } else { if (!nextLine()) { return it('(end)', ''); } } } t = match(rx[xmode] || tx); if (!t) { if (xmode === 'html') { return it('(error)', s.charAt(0)); } else { t = ''; c = ''; while (s && s < '!') { s = s.substr(1); } if (s) { errorAt("Unexpected '{a}'.", line, character, s.substr(0, 1)); } } } else { // identifier if (c.isAlpha() || c === '_' || c === '$') { return it('(identifier)', t); } // number if (c.isDigit()) { if (xmode !== 'style' && !isFinite(Number(t))) { warningAt("Bad number '{a}'.", line, character, t); } if (xmode !== 'styleproperty' && s.substr(0, 1).isAlpha()) { warningAt("Missing space after '{a}'.", line, character, t); } if (c === '0') { d = t.substr(1, 1); if (d.isDigit()) { if (token.id !== '.' && xmode !== 'styleproperty') { warningAt("Don't use extra leading zeros '{a}'.", line, character, t); } } else if (jsonmode && (d === 'x' || d === 'X')) { warningAt("Avoid 0x-. '{a}'.", line, character, t); } } if (t.substr(t.length - 1) === '.') { warningAt( "A trailing decimal point can be confused with a dot '{a}'.", line, character, t); } return it('(number)', t); } switch (t) { // string case '"': case "'": return string(t); // // comment case '//': if (src || (xmode && xmode !== 'script')) { warningAt("Unexpected comment.", line, character); } else if (xmode === 'script' && /<\s*\//i.test(s)) { warningAt("Unexpected <\/ in comment.", line, character); } else if ((option.safe || xmode === 'script') && ax.test(s)) { warningAt("Dangerous comment.", line, character); } s = ''; token.comment = true; break; // /* comment case '/*': if (src || (xmode && xmode !== 'script' && xmode !== 'style' && xmode !== 'styleproperty')) { warningAt("Unexpected comment.", line, character); } if (option.safe && ax.test(s)) { warningAt("ADsafe comment violation.", line, character); } for (;;) { i = s.search(lx); if (i >= 0) { break; } if (!nextLine()) { errorAt("Unclosed comment.", line, character); } else { if (option.safe && ax.test(s)) { warningAt("ADsafe comment violation.", line, character); } } } character += i + 2; if (s.substr(i, 1) === '/') { errorAt("Nested comment.", line, character); } s = s.substr(i + 2); token.comment = true; break; // /*global /*extern /*members /*jslint */ case '/*global': case '/*extern': case '/*members': case '/*member': case '/*jslint': case '*/': return { value: t, type: 'special', line: line, character: character, from: from }; case '': break; // / case '/': if (prereg) { depth = 0; captures = 0; l = 0; for (;;) { b = true; c = s.charAt(l); l += 1; switch (c) { case '': errorAt("Unclosed regular expression.", line, from); return; case '/': if (depth > 0) { warningAt("Unescaped '{a}'.", line, from + l, '/'); } c = s.substr(0, l - 1); q = { g: true, i: true, m: true }; while (q[s.charAt(l)] === true) { q[s.charAt(l)] = false; l += 1; } character += l; s = s.substr(l); return it('(regexp)', c); case '\\': c = s.charAt(l); if (c < ' ') { warningAt("Unexpected control character in regular expression.", line, from + l); } else if (c === '<') { warningAt("Unexpected escaped character '{a}' in regular expression.", line, from + l, c); } l += 1; break; case '(': depth += 1; b = false; if (s.charAt(l) === '?') { l += 1; switch (s.charAt(l)) { case ':': case '=': case '!': l += 1; break; default: warningAt("Expected '{a}' and instead saw '{b}'.", line, from + l, ':', s.charAt(l)); } } else { captures += 1; } break; case ')': if (depth === 0) { warningAt("Unescaped '{a}'.", line, from + l, ')'); } else { depth -= 1; } break; case ' ': q = 1; while (s.charAt(l) === ' ') { l += 1; q += 1; } if (q > 1) { warningAt("Spaces are hard to count. Use {{a}}.", line, from + l, q); } break; case '[': if (s.charAt(l) === '^') { l += 1; } q = false; klass: do { c = s.charAt(l); l += 1; switch (c) { case '[': case '^': warningAt("Unescaped '{a}'.", line, from + l, c); q = true; break; case '-': if (q) { q = false; } else { warningAt("Unescaped '{a}'.", line, from + l, '-'); q = true; } break; case ']': if (!q) { warningAt("Unescaped '{a}'.", line, from + l - 1, '-'); } break klass; case '\\': c = s.charAt(l); if (c < ' ') { warningAt("Unexpected control character in regular expression.", line, from + l); } else if (c === '<') { warningAt("Unexpected escaped character '{a}' in regular expression.", line, from + l, c); } l += 1; q = true; break; case '/': warningAt("Unescaped '{a}'.", line, from + l - 1, '/'); q = true; break; case '<': if (xmode === 'script') { c = s.charAt(l); if (c === '!' || c === '/') { warningAt("HTML confusion in regular expression '<{a}'.", line, from + l, c); } } q = true; break; default: q = true; } } while (c); break; case '.': if (option.regexp) { warningAt("Unexpected '{a}'.", line, from + l, c); } break; case ']': case '?': case '{': case '}': case '+': case '*': warningAt("Unescaped '{a}'.", line, from + l, c); break; case '<': if (xmode === 'script') { c = s.charAt(l); if (c === '!' || c === '/') { warningAt("HTML confusion in regular expression '<{a}'.", line, from + l, c); } } } if (b) { switch (s.charAt(l)) { case '?': case '+': case '*': l += 1; if (s.charAt(l) === '?') { l += 1; } break; case '{': l += 1; c = s.charAt(l); if (c < '0' || c > '9') { warningAt("Expected a number and instead saw '{a}'.", line, from + l, c); } l += 1; low = +c; for (;;) { c = s.charAt(l); if (c < '0' || c > '9') { break; } l += 1; low = +c + (low * 10); } high = low; if (c === ',') { l += 1; high = Infinity; c = s.charAt(l); if (c >= '0' && c <= '9') { l += 1; high = +c; for (;;) { c = s.charAt(l); if (c < '0' || c > '9') { break; } l += 1; high = +c + (high * 10); } } } if (s.charAt(l) !== '}') { warningAt("Expected '{a}' and instead saw '{b}'.", line, from + l, '}', c); } else { l += 1; } if (s.charAt(l) === '?') { l += 1; } if (low > high) { warningAt("'{a}' should not be greater than '{b}'.", line, from + l, low, high); } } } } c = s.substr(0, l - 1); character += l; s = s.substr(l); return it('(regexp)', c); } return it('(punctuator)', t); // punctuator case '#': if (xmode === 'html' || xmode === 'styleproperty') { for (;;) { c = s.charAt(0); if ((c < '0' || c > '9') && (c < 'a' || c > 'f') && (c < 'A' || c > 'F')) { break; } character += 1; s = s.substr(1); t += c; } if (t.length !== 4 && t.length !== 7) { warningAt("Bad hex color '{a}'.", line, from + l, t); } return it('(color)', t); } return it('(punctuator)', t); default: if (xmode === 'outer' && c === '&') { character += 1; s = s.substr(1); for (;;) { c = s.charAt(0); character += 1; s = s.substr(1); if (c === ';') { break; } if (!((c >= '0' && c <= '9') || (c >= 'a' && c <= 'z') || c === '#')) { errorAt("Bad entity", line, from + l, character); } } break; } return it('(punctuator)', t); } } } } }; }()); function addlabel(t, type) { if (t === 'hasOwnProperty') { error("'hasOwnProperty' is a really bad name."); } if (option.safe && funct['(global)']) { warning('ADsafe global: ' + t + '.', token); } // Define t in the current function in the current scope. if (funct.hasOwnProperty(t)) { warning(funct[t] === true ? "'{a}' was used before it was defined." : "'{a}' is already defined.", nexttoken, t); } funct[t] = type; if (type === 'label') { scope[t] = funct; } else if (funct['(global)']) { global[t] = funct; if (implied.hasOwnProperty(t)) { warning("'{a}' was used before it was defined.", nexttoken, t); delete implied[t]; } } else { funct['(scope)'][t] = funct; } } function doOption() { var b, obj, filter, o = nexttoken.value, t, v; switch (o) { case '*/': error("Unbegun comment."); break; case '/*global': case '/*extern': if (option.safe) { warning("ADsafe restriction."); } obj = predefined; break; case '/*members': case '/*member': o = '/*members'; if (!membersOnly) { membersOnly = {}; } obj = membersOnly; break; case '/*jslint': if (option.safe) { warning("ADsafe restriction."); } obj = option; filter = boolOptions; } for (;;) { t = lex.token(); if (t.id === ',') { t = lex.token(); } while (t.id === '(endline)') { t = lex.token(); } if (t.type === 'special' && t.value === '*/') { break; } if (t.type !== '(string)' && t.type !== '(identifier)' && o !== '/*members') { error("Bad option.", t); } if (filter) { if (filter[t.value] !== true) { error("Bad option.", t); } v = lex.token(); if (v.id !== ':') { error("Expected '{a}' and instead saw '{b}'.", t, ':', t.value); } v = lex.token(); if (v.value === 'true') { b = true; } else if (v.value === 'false') { b = false; } else { error("Expected '{a}' and instead saw '{b}'.", t, 'true', t.value); } } else { b = true; } obj[t.value] = b; } if (filter) { assume(); } } // We need a peek function. If it has an argument, it peeks that much farther // ahead. It is used to distinguish // for ( var i in ... // from // for ( var i = ... function peek(p) { var i = p || 0, j = 0, t; while (j <= i) { t = lookahead[j]; if (!t) { t = lookahead[j] = lex.token(); } j += 1; } return t; } // Produce the next token. It looks for programming errors. function advance(id, t) { var l; switch (token.id) { case '(number)': if (nexttoken.id === '.') { warning( "A dot following a number can be confused with a decimal point.", token); } break; case '-': if (nexttoken.id === '-' || nexttoken.id === '--') { warning("Confusing minusses."); } break; case '+': if (nexttoken.id === '+' || nexttoken.id === '++') { warning("Confusing plusses."); } break; } if (token.type === '(string)' || token.identifier) { anonname = token.value; } if (id && nexttoken.id !== id) { if (t) { if (nexttoken.id === '(end)') { warning("Unmatched '{a}'.", t, t.id); } else { warning("Expected '{a}' to match '{b}' from line {c} and instead saw '{d}'.", nexttoken, id, t.id, t.line + 1, nexttoken.value); } } else if (nexttoken.type !== '(identifier)' || nexttoken.value !== id) { warning("Expected '{a}' and instead saw '{b}'.", nexttoken, id, nexttoken.value); } } prevtoken = token; token = nexttoken; for (;;) { nexttoken = lookahead.shift() || lex.token(); if (nexttoken.id === '(end)' || nexttoken.id === '(error)') { return; } if (nexttoken.type === 'special') { doOption(); } else { if (nexttoken.id !== '(endline)') { break; } l = !xmode && !option.laxbreak && (token.type === '(string)' || token.type === '(number)' || token.type === '(identifier)' || badbreak[token.id]); } } if (!option.evil && nexttoken.value === 'eval') { warning("eval is evil.", nexttoken); } if (l) { switch (nexttoken.id) { case '{': case '}': case ']': case '.': break; case ')': switch (token.id) { case ')': case '}': case ']': break; default: warning("Line breaking error '{a}'.", token, ')'); } break; default: warning("Line breaking error '{a}'.", token, token.value); } } } // This is the heart of JSLINT, the Pratt parser. In addition to parsing, it // is looking for ad hoc lint patterns. We add to Pratt's model .fud, which is // like nud except that it is only used on the first token of a statement. // Having .fud makes it much easier to define JavaScript. I retained Pratt's // nomenclature. // .nud Null denotation // .fud First null denotation // .led Left denotation // lbp Left binding power // rbp Right binding power // They are key to the parsing method called Top Down Operator Precedence. function parse(rbp, initial) { var left, o; if (nexttoken.id === '(end)') { error("Unexpected early end of program.", token); } advance(); if (option.safe && predefined[token.value] === true && (nexttoken.id !== '(' && nexttoken.id !== '.')) { warning('ADsafe violation.', token); } if (initial) { anonname = 'anonymous'; funct['(verb)'] = token.value; } if (initial === true && token.fud) { left = token.fud(); } else { if (token.nud) { o = token.exps; left = token.nud(); } else { if (nexttoken.type === '(number)' && token.id === '.') { warning( "A leading decimal point can be confused with a dot: '.{a}'.", token, nexttoken.value); advance(); return token; } else { error("Expected an identifier and instead saw '{a}'.", token, token.id); } } while (rbp < nexttoken.lbp) { o = nexttoken.exps; advance(); if (token.led) { left = token.led(left); } else { error("Expected an operator and instead saw '{a}'.", token, token.id); } } if (initial && !o) { warning( "Expected an assignment or function call and instead saw an expression.", token); } } return left; } // Functions for conformance of style. function abut(left, right) { left = left || token; right = right || nexttoken; if (left.line !== right.line || left.character !== right.from) { warning("Unexpected space after '{a}'.", right, left.value); } } function adjacent(left, right) { left = left || token; right = right || nexttoken; if (option.white || xmode === 'styleproperty' || xmode === 'style') { if (left.character !== right.from && left.line === right.line) { warning("Unexpected space after '{a}'.", right, left.value); } } } function nospace(left, right) { left = left || token; right = right || nexttoken; if (option.white && !left.comment) { if (left.line === right.line) { adjacent(left, right); } } } function nonadjacent(left, right) { left = left || token; right = right || nexttoken; if (option.white) { if (left.character === right.from) { warning("Missing space after '{a}'.", nexttoken, left.value); } } } function indentation(bias) { var i; if (option.white && nexttoken.id !== '(end)') { i = indent + (bias || 0); if (nexttoken.from !== i) { warning("Expected '{a}' to have an indentation of {b} instead of {c}.", nexttoken, nexttoken.value, i, nexttoken.from); } } } function nolinebreak(t) { if (t.line !== nexttoken.line) { warning("Line breaking error '{a}'.", t, t.id); } } // Parasitic constructors for making the symbols that will be inherited by // tokens. function symbol(s, p) { var x = syntax[s]; if (!x || typeof x !== 'object') { syntax[s] = x = { id: s, lbp: p, value: s }; } return x; } function delim(s) { return symbol(s, 0); } function stmt(s, f) { var x = delim(s); x.identifier = x.reserved = true; x.fud = f; return x; } function blockstmt(s, f) { var x = stmt(s, f); x.block = true; return x; } function reserveName(x) { var c = x.id.charAt(0); if ((c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z')) { x.identifier = x.reserved = true; } return x; } function prefix(s, f) { var x = symbol(s, 150); reserveName(x); x.nud = (typeof f === 'function') ? f : function () { if (option.plusplus && (this.id === '++' || this.id === '--')) { warning("Unexpected use of '{a}'.", this, this.id); } this.right = parse(150); this.arity = 'unary'; return this; }; return x; } function type(s, f) { var x = delim(s); x.type = s; x.nud = f; return x; } function reserve(s, f) { var x = type(s, f); x.identifier = x.reserved = true; return x; } function reservevar(s, v) { return reserve(s, function () { if (this.id === 'this') { if (option.safe) { warning("ADsafe violation.", this); } } return this; }); } function infix(s, f, p) { var x = symbol(s, p); reserveName(x); x.led = (typeof f === 'function') ? f : function (left) { nonadjacent(prevtoken, token); nonadjacent(token, nexttoken); this.left = left; this.right = parse(p); return this; }; return x; } function relation(s, f) { var x = symbol(s, 100); x.led = function (left) { nonadjacent(prevtoken, token); nonadjacent(token, nexttoken); var right = parse(100); if ((left && left.id === 'NaN') || (right && right.id === 'NaN')) { warning("Use the isNaN function to compare with NaN.", this); } else if (f) { f.apply(this, [left, right]); } this.left = left; this.right = right; return this; }; return x; } function isPoorRelation(node) { return (node.type === '(number)' && !+node.value) || (node.type === '(string)' && !node.value) || node.type === 'true' || node.type === 'false' || node.type === 'undefined' || node.type === 'null'; } function assignop(s, f) { symbol(s, 20).exps = true; return infix(s, function (left) { var l; this.left = left; nonadjacent(prevtoken, token); nonadjacent(token, nexttoken); if (option.safe) { l = left; do { if (predefined[l.value] === true) { warning('ADsafe violation.', l); } l = l.left; } while (l); } if (left) { if (left.id === '.' || left.id === '[') { if (left.left.value === 'arguments') { warning('Bad assignment.', this); } this.right = parse(19); return this; } else if (left.identifier && !left.reserved) { if (funct[left.value] === 'exception') { warning("Do not assign to the exception parameter.", left); } this.right = parse(19); return this; } if (left === syntax['function']) { warning( "Expected an identifier in an assignment and instead saw a function invocation.", token); } } error("Bad assignment.", this); }, 20); } function bitwise(s, f, p) { var x = symbol(s, p); reserveName(x); x.led = (typeof f === 'function') ? f : function (left) { if (option.bitwise) { warning("Unexpected use of '{a}'.", this, this.id); } nonadjacent(prevtoken, token); nonadjacent(token, nexttoken); this.left = left; this.right = parse(p); return this; }; return x; } function bitwiseassignop(s) { symbol(s, 20).exps = true; return infix(s, function (left) { if (option.bitwise) { warning("Unexpected use of '{a}'.", this, this.id); } nonadjacent(prevtoken, token); nonadjacent(token, nexttoken); if (left) { if (left.id === '.' || left.id === '[' || (left.identifier && !left.reserved)) { parse(19); return left; } if (left === syntax['function']) { warning( "Expected an identifier in an assignment, and instead saw a function invocation.", token); } } error("Bad assignment.", this); }, 20); } function suffix(s, f) { var x = symbol(s, 150); x.led = function (left) { if (option.plusplus) { warning("Unexpected use of '{a}'.", this, this.id); } this.left = left; return this; }; return x; } function optionalidentifier() { if (nexttoken.reserved && nexttoken.value !== 'arguments') { warning("Expected an identifier and instead saw '{a}' (a reserved word).", nexttoken, nexttoken.id); } if (nexttoken.identifier) { advance(); return token.value; } } function identifier() { var i = optionalidentifier(); if (i) { return i; } if (token.id === 'function' && nexttoken.id === '(') { warning("Missing name in function statement."); } else { error("Expected an identifier and instead saw '{a}'.", nexttoken, nexttoken.value); } } function reachable(s) { var i = 0, t; if (nexttoken.id !== ';' || noreach) { return; } for (;;) { t = peek(i); if (t.reach) { return; } if (t.id !== '(endline)') { if (t.id === 'function') { warning( "Inner functions should be listed at the top of the outer function.", t); break; } warning("Unreachable '{a}' after '{b}'.", t, t.value, s); break; } i += 1; } } function statement(noindent) { var i = indent, r, s = scope, t = nexttoken; // We don't like the empty statement. if (t.id === ';') { warning("Unnecessary semicolon.", t); advance(';'); return; } // Is this a labelled statement? if (t.identifier && !t.reserved && peek().id === ':') { advance(); advance(':'); scope = Object.create(s); addlabel(t.value, 'label'); if (!nexttoken.labelled) { warning("Label '{a}' on {b} statement.", nexttoken, t.value, nexttoken.value); } if (jx.test(t.value + ':')) { warning("Label '{a}' looks like a javascript url.", t, t.value); } nexttoken.label = t.value; t = nexttoken; } // Parse the statement. if (!noindent) { indentation(); } r = parse(0, true); // Look for the final semicolon. if (!t.block) { if (nexttoken.id !== ';') { warningAt("Missing semicolon.", token.line, token.from + token.value.length); } else { adjacent(token, nexttoken); advance(';'); nonadjacent(token, nexttoken); } } // Restore the indentation. indent = i; scope = s; return r; } function use_strict() { if (nexttoken.type === '(string)' && /^use +strict(?:,.+)?$/.test(nexttoken.value)) { advance(); advance(';'); return true; } else { return false; } } function statements(begin) { var a = [], f, p; if (begin && !use_strict() && option.strict) { warning('Missing "use strict" statement.', nexttoken); } if (option.adsafe) { switch (begin) { case 'script': if (!adsafe_may) { if (nexttoken.value !== 'ADSAFE' || peek(0).id !== '.' || (peek(1).value !== 'id' && peek(1).value !== 'go')) { error('ADsafe violation: Missing ADSAFE.id or ADSAFE.go.', nexttoken); } } if (nexttoken.value === 'ADSAFE' && peek(0).id === '.' && peek(1).value === 'id') { if (adsafe_may) { error('ADsafe violation.', nexttoken); } advance('ADSAFE'); advance('.'); advance('id'); advance('('); if (nexttoken.value !== adsafe_id) { error('ADsafe violation: id does not match.', nexttoken); } advance('(string)'); advance(')'); advance(';'); adsafe_may = true; } break; case 'lib': if (nexttoken.value === 'ADSAFE') { advance('ADSAFE'); advance('.'); advance('lib'); advance('('); advance('(string)'); advance(','); f = parse(0); if (f.id !== 'function') { error('The second argument to lib must be a function.', f); } p = f.funct['(params)']; if (p && p !== 'lib') { error("Expected '{a}' and instead saw '{b}'.", f, 'lib', p); } advance(')'); advance(';'); return a; } else { error("ADsafe lib violation."); } } } while (!nexttoken.reach && nexttoken.id !== '(end)') { if (nexttoken.id === ';') { warning("Unnecessary semicolon."); advance(';'); } else { a.push(statement()); } } return a; } function block(f) { var a, b = inblock, s = scope, t; inblock = f; if (f) { scope = Object.create(scope); } nonadjacent(token, nexttoken); t = nexttoken; if (nexttoken.id === '{') { advance('{'); if (nexttoken.id !== '}' || token.line !== nexttoken.line) { indent += option.indent; if (!f && nexttoken.from === indent + option.indent) { indent += option.indent; } if (!f) { use_strict(); } a = statements(); indent -= option.indent; indentation(); } advance('}', t); } else { warning("Expected '{a}' and instead saw '{b}'.", nexttoken, '{', nexttoken.value); noreach = true; a = [statement()]; noreach = false; } funct['(verb)'] = null; scope = s; inblock = b; return a; } // An identity function, used by string and number tokens. function idValue() { return this; } function countMember(m) { if (membersOnly && membersOnly[m] !== true) { warning("Unexpected /*member '{a}'.", nexttoken, m); } if (typeof member[m] === 'number') { member[m] += 1; } else { member[m] = 1; } } function note_implied(token) { var name = token.value, line = token.line + 1, a = implied[name]; if (typeof a === 'function') { a = false; } if (!a) { a = [line]; implied[name] = a; } else if (a[a.length - 1] !== line) { a.push(line); } } // CSS parsing. function cssName() { if (nexttoken.identifier) { advance(); return true; } } function cssNumber() { if (nexttoken.id === '-') { advance('-'); advance('(number)'); } if (nexttoken.type === '(number)') { advance(); return true; } } function cssString() { if (nexttoken.type === '(string)') { advance(); return true; } } function cssColor() { var i, number; if (nexttoken.identifier) { if (nexttoken.value === 'rgb') { advance(); advance('('); for (i = 0; i < 3; i += 1) { number = nexttoken.value; if (nexttoken.type !== '(number)' || number < 0) { warning("Expected a positive number and instead saw '{a}'", nexttoken, number); advance(); } else { advance(); if (nexttoken.id === '%') { advance('%'); if (number > 100) { warning("Expected a percentage and instead saw '{a}'", token, number); } } else { if (number > 255) { warning("Expected a small number and instead saw '{a}'", token, number); } } } } advance(')'); return true; } else if (cssColorData[nexttoken.value] === true) { advance(); return true; } } else if (nexttoken.type === '(color)') { advance(); return true; } return false; } function cssLength() { if (nexttoken.id === '-') { advance('-'); adjacent(); } if (nexttoken.type === '(number)') { advance(); if (nexttoken.type !== '(string)' && cssLengthData[nexttoken.value] === true) { adjacent(); advance(); } else if (+token.value !== 0) { warning("Expected a linear unit and instead saw '{a}'.", nexttoken, nexttoken.value); } return true; } return false; } function cssLineHeight() { if (nexttoken.id === '-') { advance('-'); adjacent(); } if (nexttoken.type === '(number)') { advance(); if (nexttoken.type !== '(string)' && cssLengthData[nexttoken.value] === true) { adjacent(); advance(); } return true; } return false; } function cssWidth() { if (nexttoken.identifier) { switch (nexttoken.value) { case 'thin': case 'medium': case 'thick': advance(); return true; } } else { return cssLength(); } } function cssMargin() { if (nexttoken.identifier) { if (nexttoken.value === 'auto') { advance(); return true; } } else { return cssLength(); } } function cssAttr() { if (nexttoken.identifier && nexttoken.value === 'attr') { advance(); advance('('); if (!nexttoken.identifier) { warning("Expected a name and instead saw '{a}'.", nexttoken, nexttoken.value); } advance(); advance(')'); return true; } return false; } function cssCommaList() { while (nexttoken.id !== ';') { if (!cssName() && !cssString()) { warning("Expected a name and instead saw '{a}'.", nexttoken, nexttoken.value); } if (nexttoken.id !== ',') { return true; } advance(','); } } function cssCounter() { if (nexttoken.identifier && nexttoken.value === 'counter') { advance(); advance('('); if (!nexttoken.identifier) { } advance(); if (nexttoken.id === ',') { advance(','); if (nexttoken.type !== '(string)') { warning("Expected a string and instead saw '{a}'.", nexttoken, nexttoken.value); } advance(); } advance(')'); return true; } if (nexttoken.identifier && nexttoken.value === 'counters') { advance(); advance('('); if (!nexttoken.identifier) { warning("Expected a name and instead saw '{a}'.", nexttoken, nexttoken.value); } advance(); if (nexttoken.id === ',') { advance(','); if (nexttoken.type !== '(string)') { warning("Expected a string and instead saw '{a}'.", nexttoken, nexttoken.value); } advance(); } if (nexttoken.id === ',') { advance(','); if (nexttoken.type !== '(string)') { warning("Expected a string and instead saw '{a}'.", nexttoken, nexttoken.value); } advance(); } advance(')'); return true; } return false; } function cssShape() { var i; if (nexttoken.identifier && nexttoken.value === 'rect') { advance(); advance('('); for (i = 0; i < 4; i += 1) { if (!cssLength()) { warning("Expected a number and instead saw '{a}'.", nexttoken, nexttoken.value); break; } } advance(')'); return true; } return false; } function cssUrl() { var url; if (nexttoken.identifier && nexttoken.value === 'url') { nexttoken = lex.range('(', ')'); url = nexttoken.value; advance(); if (option.safe && ux.test(url)) { error("ADsafe URL violation."); } urls.push(url); return true; } return false; } cssAny = [cssUrl, function () { for (;;) { if (nexttoken.identifier) { switch (nexttoken.value.toLowerCase()) { case 'url': cssUrl(); break; case 'expression': warning("Unexpected expression '{a}'.", nexttoken, nexttoken.value); advance(); break; default: advance(); } } else { if (nexttoken.id === ';' || nexttoken.id === '!' || nexttoken.id === '(end)' || nexttoken.id === '}') { return true; } advance(); } } }]; cssBorderStyle = [ 'none', 'hidden', 'dotted', 'dashed', 'solid', 'double', 'ridge', 'inset', 'outset' ]; cssAttributeData = { background: [ true, 'background-attachment', 'background-color', 'background-image', 'background-position', 'background-repeat' ], 'background-attachment': ['scroll', 'fixed'], 'background-color': ['transparent', cssColor], 'background-image': ['none', cssUrl], 'background-position': [ 2, [cssLength, 'top', 'bottom', 'left', 'right', 'center'] ], 'background-repeat': [ 'repeat', 'repeat-x', 'repeat-y', 'no-repeat' ], 'border': [true, 'border-color', 'border-style', 'border-width'], 'border-bottom': [true, 'border-bottom-color', 'border-bottom-style', 'border-bottom-width'], 'border-bottom-color': cssColor, 'border-bottom-style': cssBorderStyle, 'border-bottom-width': cssWidth, 'border-collapse': ['collapse', 'separate'], 'border-color': ['transparent', 4, cssColor], 'border-left': [ true, 'border-left-color', 'border-left-style', 'border-left-width' ], 'border-left-color': cssColor, 'border-left-style': cssBorderStyle, 'border-left-width': cssWidth, 'border-right': [ true, 'border-right-color', 'border-right-style', 'border-right-width' ], 'border-right-color': cssColor, 'border-right-style': cssBorderStyle, 'border-right-width': cssWidth, 'border-spacing': [2, cssLength], 'border-style': [4, cssBorderStyle], 'border-top': [ true, 'border-top-color', 'border-top-style', 'border-top-width' ], 'border-top-color': cssColor, 'border-top-style': cssBorderStyle, 'border-top-width': cssWidth, 'border-width': [4, cssWidth], bottom: [cssLength, 'auto'], 'caption-side' : ['bottom', 'left', 'right', 'top'], clear: ['both', 'left', 'none', 'right'], clip: [cssShape, 'auto'], color: cssColor, content: [ 'open-quote', 'close-quote', 'no-open-quote', 'no-close-quote', cssString, cssUrl, cssCounter, cssAttr ], 'counter-increment': [ cssName, 'none' ], 'counter-reset': [ cssName, 'none' ], cursor: [ cssUrl, 'auto', 'crosshair', 'default', 'e-resize', 'help', 'move', 'n-resize', 'ne-resize', 'nw-resize', 'pointer', 's-resize', 'se-resize', 'sw-resize', 'w-resize', 'text', 'wait' ], direction: ['ltr', 'rtl'], display: [ 'block', 'compact', 'inline', 'inline-block', 'inline-table', 'list-item', 'marker', 'none', 'run-in', 'table', 'table-caption', 'table-column', 'table-column-group', 'table-footer-group', 'table-header-group', 'table-row', 'table-row-group' ], 'empty-cells': ['show', 'hide'], 'float': ['left', 'none', 'right'], font: [ 'caption', 'icon', 'menu', 'message-box', 'small-caption', 'status-bar', true, 'font-size', 'font-style', 'font-weight', 'font-family' ], 'font-family': cssCommaList, 'font-size': [ 'xx-small', 'x-small', 'small', 'medium', 'large', 'x-large', 'xx-large', 'larger', 'smaller', cssLength ], 'font-size-adjust': ['none', cssNumber], 'font-stretch': [ 'normal', 'wider', 'narrower', 'ultra-condensed', 'extra-condensed', 'condensed', 'semi-condensed', 'semi-expanded', 'expanded', 'extra-expanded' ], 'font-style': [ 'normal', 'italic', 'oblique' ], 'font-variant': [ 'normal', 'small-caps' ], 'font-weight': [ 'normal', 'bold', 'bolder', 'lighter', cssNumber ], height: [cssLength, 'auto'], left: [cssLength, 'auto'], 'letter-spacing': ['normal', cssLength], 'line-height': ['normal', cssLineHeight], 'list-style': [ true, 'list-style-image', 'list-style-position', 'list-style-type' ], 'list-style-image': ['none', cssUrl], 'list-style-position': ['inside', 'outside'], 'list-style-type': [ 'circle', 'disc', 'square', 'decimal', 'decimal-leading-zero', 'lower-roman', 'upper-roman', 'lower-greek', 'lower-alpha', 'lower-latin', 'upper-alpha', 'upper-latin', 'hebrew', 'katakana', 'hiragana-iroha', 'katakana-oroha', 'none' ], margin: [4, cssMargin], 'margin-bottom': cssMargin, 'margin-left': cssMargin, 'margin-right': cssMargin, 'margin-top': cssMargin, 'marker-offset': [cssLength, 'auto'], 'max-height': [cssLength, 'none'], 'max-width': [cssLength, 'none'], 'min-height': cssLength, 'min-width': cssLength, opacity: cssNumber, outline: [true, 'outline-color', 'outline-style', 'outline-width'], 'outline-color': ['invert', cssColor], 'outline-style': [ 'dashed', 'dotted', 'double', 'groove', 'inset', 'none', 'outset', 'ridge', 'solid' ], 'outline-width': cssWidth, overflow: ['auto', 'hidden', 'scroll', 'visible'], padding: [4, cssLength], 'padding-bottom': cssLength, 'padding-left': cssLength, 'padding-right': cssLength, 'padding-top': cssLength, position: ['absolute', 'fixed', 'relative', 'static'], quotes: [8, cssString], right: [cssLength, 'auto'], 'table-layout': ['auto', 'fixed'], 'text-align': ['center', 'justify', 'left', 'right'], 'text-decoration': ['none', 'underline', 'overline', 'line-through', 'blink'], 'text-indent': cssLength, 'text-shadow': ['none', 4, [cssColor, cssLength]], 'text-transform': ['capitalize', 'uppercase', 'lowercase', 'none'], top: [cssLength, 'auto'], 'unicode-bidi': ['normal', 'embed', 'bidi-override'], 'vertical-align': [ 'baseline', 'bottom', 'sub', 'super', 'top', 'text-top', 'middle', 'text-bottom', cssLength ], visibility: ['visible', 'hidden', 'collapse'], 'white-space': ['normal', 'pre', 'nowrap'], width: [cssLength, 'auto'], 'word-spacing': ['normal', cssLength], 'z-index': ['auto', cssNumber] }; function styleAttribute() { var v; while (nexttoken.id === '*' || nexttoken.id === '#' || nexttoken.value === '_') { if (!option.css) { warning("Unexpected '{a}'.", nexttoken, nexttoken.value); } advance(); } if (nexttoken.id === '-') { if (!option.css) { warning("Unexpected '{a}'.", nexttoken, nexttoken.value); } advance('-'); if (!nexttoken.identifier) { warning("Expected a non-standard style attribute and instead saw '{a}'.", nexttoken, nexttoken.value); } advance(); return cssAny; } else { if (!nexttoken.identifier) { warning("Excepted a style attribute, and instead saw '{a}'.", nexttoken, nexttoken.value); } else { if (cssAttributeData.hasOwnProperty(nexttoken.value)) { v = cssAttributeData[nexttoken.value]; } else { v = cssAny; if (!option.css) { warning("Unrecognized style attribute '{a}'.", nexttoken, nexttoken.value); } } } advance(); return v; } } function styleValue(v) { var i = 0, n, once, match, round, start = 0, vi; switch (typeof v) { case 'function': return v(); case 'string': if (nexttoken.identifier && nexttoken.value === v) { advance(); return true; } return false; } for (;;) { if (i >= v.length) { return false; } vi = v[i]; i += 1; if (vi === true) { break; } else if (typeof vi === 'number') { n = vi; vi = v[i]; i += 1; } else { n = 1; } match = false; while (n > 0) { if (styleValue(vi)) { match = true; n -= 1; } else { break; } } if (match) { return true; } } start = i; once = []; for (;;) { round = false; for (i = start; i < v.length; i += 1) { if (!once[i]) { if (styleValue(cssAttributeData[v[i]])) { match = true; round = true; once[i] = true; break; } } } if (!round) { return match; } } } function substyle() { var v; for (;;) { if (nexttoken.id === '}' || nexttoken.id === '(end)' || xquote && nexttoken.id === xquote) { return; } while (nexttoken.id === ';') { warning("Misplaced ';'."); advance(';'); } v = styleAttribute(); advance(':'); if (nexttoken.identifier && nexttoken.value === 'inherit') { advance(); } else { styleValue(v); } while (nexttoken.id !== ';' && nexttoken.id !== '!' && nexttoken.id !== '}' && nexttoken.id !== '(end)' && nexttoken.id !== xquote) { warning("Unexpected token '{a}'.", nexttoken, nexttoken.value); advance(); } if (nexttoken.id === '!') { advance('!'); adjacent(); if (nexttoken.identifier && nexttoken.value === 'important') { advance(); } else { warning("Expected '{a}' and instead saw '{b}'.", nexttoken, 'important', nexttoken.value); } } if (nexttoken.id === '}' || nexttoken.id === xquote) { warning("Missing '{a}'.", nexttoken, ';'); } else { advance(';'); } } } function stylePattern() { var name; if (nexttoken.id === '{') { warning("Expected a style pattern, and instead saw '{a}'.", nexttoken, nexttoken.id); } else if (nexttoken.id === '@') { advance('@'); name = nexttoken.value; if (nexttoken.identifier && atrule[name] === true) { advance(); return name; } warning("Expected an at-rule, and instead saw @{a}.", nexttoken, name); } for (;;) { if (nexttoken.identifier) { if (!htmltag.hasOwnProperty(nexttoken.value)) { warning("Expected a tagName, and instead saw {a}.", nexttoken, nexttoken.value); } advance(); } else { switch (nexttoken.id) { case '>': case '+': advance(); if (!nexttoken.identifier || !htmltag.hasOwnProperty(nexttoken.value)) { warning("Expected a tagName, and instead saw {a}.", nexttoken, nexttoken.value); } advance(); break; case ':': advance(':'); if (pseudorule[nexttoken.value] !== true) { warning("Expected a pseudo, and instead saw :{a}.", nexttoken, nexttoken.value); } advance(); if (nexttoken.value === 'lang') { advance('('); if (!nexttoken.identifier) { warning("Expected a lang code, and instead saw :{a}.", nexttoken, nexttoken.value); } advance(')'); } break; case '#': advance('#'); if (!nexttoken.identifier) { warning("Expected an id, and instead saw #{a}.", nexttoken, nexttoken.value); } advance(); break; case '*': advance('*'); break; case '.': advance('.'); if (!nexttoken.identifier) { warning("Expected a class, and instead saw #.{a}.", nexttoken, nexttoken.value); } advance(); break; case '[': advance('['); if (!nexttoken.identifier) { warning("Expected an attribute, and instead saw [{a}].", nexttoken, nexttoken.value); } advance(); if (nexttoken.id === '=' || nexttoken.id === '~=' || nexttoken.id === '|=') { advance(); if (nexttoken.type !== '(string)') { warning("Expected a string, and instead saw {a}.", nexttoken, nexttoken.value); } advance(); } advance(']'); break; default: error("Expected a CSS selector, and instead saw {a}.", nexttoken, nexttoken.value); } } if (nexttoken.id === '</' || nexttoken.id === '{' || nexttoken.id === '(end)') { return ''; } if (nexttoken.id === ',') { advance(','); } } } function styles() { while (nexttoken.id !== '</' && nexttoken.id !== '(end)') { stylePattern(); xmode = 'styleproperty'; if (nexttoken.id === ';') { advance(';'); } else { advance('{'); substyle(); xmode = 'style'; advance('}'); } } } // HTML parsing. function doBegin(n) { if (n !== 'html' && !option.fragment) { if (n === 'div' && option.adsafe) { error("ADSAFE: Use the fragment option."); } else { error("Expected '{a}' and instead saw '{b}'.", token, 'html', n); } } if (option.adsafe) { if (n === 'html') { error("Currently, ADsafe does not operate on whole HTML documents. It operates on <div> fragments and .js files.", token); } if (option.fragment) { if (n !== 'div') { error("ADsafe violation: Wrap the widget in a div.", token); } } else { error("Use the fragment option.", token); } } option.browser = true; assume(); } function doAttribute(n, a, v) { var u, x; if (a === 'id') { u = typeof v === 'string' ? v.toUpperCase() : ''; if (ids[u] === true) { warning("Duplicate id='{a}'.", nexttoken, v); } if (option.adsafe) { if (adsafe_id) { if (v.slice(0, adsafe_id.length) !== adsafe_id) { warning("ADsafe violation: An id must have a '{a}' prefix", nexttoken, adsafe_id); } else if (!/^[A-Z]+_[A-Z]+$/.test(v)) { warning("ADSAFE violation: bad id."); } } else { adsafe_id = v; if (!/^[A-Z]+_$/.test(v)) { warning("ADSAFE violation: bad id."); } } } x = v.search(dx); if (x >= 0) { warning("Unexpected character '{a}' in {b}.", token, v.charAt(x), a); } ids[u] = true; } else if (a === 'class' || a === 'type' || a === 'name') { x = v.search(qx); if (x >= 0) { warning("Unexpected character '{a}' in {b}.", token, v.charAt(x), a); } ids[u] = true; } else if (a === 'href' || a === 'background' || a === 'content' || a === 'data' || a.indexOf('src') >= 0 || a.indexOf('url') >= 0) { if (option.safe && ux.test(v)) { error("ADsafe URL violation."); } urls.push(v); } else if (a === 'for') { if (option.adsafe) { if (adsafe_id) { if (v.slice(0, adsafe_id.length) !== adsafe_id) { warning("ADsafe violation: An id must have a '{a}' prefix", nexttoken, adsafe_id); } else if (!/^[A-Z]+_[A-Z]+$/.test(v)) { warning("ADSAFE violation: bad id."); } } else { warning("ADSAFE violation: bad id."); } } } else if (a === 'name') { if (option.adsafe && v.indexOf('_') >= 0) { warning("ADsafe name violation."); } } } function doTag(n, a) { var i, t = htmltag[n], x; src = false; if (!t) { error("Unrecognized tag '<{a}>'.", nexttoken, n === n.toLowerCase() ? n : n + ' (capitalization error)'); } if (stack.length > 0) { if (n === 'html') { error("Too many <html> tags.", token); } x = t.parent; if (x) { if (x.indexOf(' ' + stack[stack.length - 1].name + ' ') < 0) { error("A '<{a}>' must be within '<{b}>'.", token, n, x); } } else if (!option.adsafe && !option.fragment) { i = stack.length; do { if (i <= 0) { error("A '<{a}>' must be within '<{b}>'.", token, n, 'body'); } i -= 1; } while (stack[i].name !== 'body'); } } switch (n) { case 'div': if (option.adsafe && stack.length === 1 && !adsafe_id) { warning("ADSAFE violation: missing ID_."); } break; case 'script': xmode = 'script'; advance('>'); indent = nexttoken.from; if (a.lang) { warning("lang is deprecated.", token); } if (option.adsafe && stack.length !== 1) { warning("ADsafe script placement violation.", token); } if (a.src) { if (option.adsafe && (!adsafe_may || !approved[a.src])) { warning("ADsafe unapproved script source.", token); } if (a.type) { warning("type is unnecessary.", token); } } else { if (adsafe_went) { error("ADsafe script violation.", token); } statements('script'); } xmode = 'html'; advance('</'); if (!nexttoken.identifier && nexttoken.value !== 'script') { warning("Expected '{a}' and instead saw '{b}'.", nexttoken, 'script', nexttoken.value); } advance(); xmode = 'outer'; break; case 'style': xmode = 'style'; advance('>'); styles(); xmode = 'html'; advance('</'); if (!nexttoken.identifier && nexttoken.value !== 'style') { warning("Expected '{a}' and instead saw '{b}'.", nexttoken, 'style', nexttoken.value); } advance(); xmode = 'outer'; break; case 'input': switch (a.type) { case 'radio': case 'checkbox': case 'text': case 'button': case 'file': case 'reset': case 'submit': case 'password': case 'file': case 'hidden': case 'image': break; default: warning("Bad input type."); } if (option.adsafe && a.autocomplete !== 'off') { warning("ADsafe autocomplete violation."); } break; case 'applet': case 'body': case 'embed': case 'frame': case 'frameset': case 'head': case 'iframe': case 'img': case 'noembed': case 'noframes': case 'object': case 'param': if (option.adsafe) { warning("ADsafe violation: Disallowed tag: " + n); } break; } } function closetag(n) { return '</' + n + '>'; } function html() { var a, attributes, e, n, q, t, v, w = option.white, wmode; xmode = 'html'; xquote = ''; stack = null; for (;;) { switch (nexttoken.value) { case '<': xmode = 'html'; advance('<'); attributes = {}; t = nexttoken; if (!t.identifier) { warning("Bad identifier {a}.", t, t.value); } n = t.value; if (option.cap) { n = n.toLowerCase(); } t.name = n; advance(); if (!stack) { stack = []; doBegin(n); } v = htmltag[n]; if (typeof v !== 'object') { error("Unrecognized tag '<{a}>'.", t, n); } e = v.empty; t.type = n; for (;;) { if (nexttoken.id === '/') { advance('/'); if (nexttoken.id !== '>') { warning("Expected '{a}' and instead saw '{b}'.", nexttoken, '>', nexttoken.value); } break; } if (nexttoken.id && nexttoken.id.substr(0, 1) === '>') { break; } if (!nexttoken.identifier) { if (nexttoken.id === '(end)' || nexttoken.id === '(error)') { error("Missing '>'.", nexttoken); } warning("Bad identifier."); } option.white = true; nonadjacent(token, nexttoken); a = nexttoken.value; option.white = w; advance(); if (!option.cap && a !== a.toLowerCase()) { warning("Attribute '{a}' not all lower case.", nexttoken, a); } a = a.toLowerCase(); xquote = ''; if (attributes.hasOwnProperty(a)) { warning("Attribute '{a}' repeated.", nexttoken, a); } if (a.slice(0, 2) === 'on') { if (!option.on) { warning("Avoid HTML event handlers."); } xmode = 'scriptstring'; advance('='); q = nexttoken.id; if (q !== '"' && q !== "'") { error("Missing quote."); } xquote = q; wmode = option.white; option.white = false; advance(q); statements('on'); option.white = wmode; if (nexttoken.id !== q) { error("Missing close quote on script attribute."); } xmode = 'html'; xquote = ''; advance(q); v = false; } else if (a === 'style') { xmode = 'scriptstring'; advance('='); q = nexttoken.id; if (q !== '"' && q !== "'") { error("Missing quote."); } xmode = 'styleproperty'; xquote = q; advance(q); substyle(); xmode = 'html'; xquote = ''; advance(q); v = false; } else { if (nexttoken.id === '=') { advance('='); v = nexttoken.value; if (!nexttoken.identifier && nexttoken.id !== '"' && nexttoken.id !== '\'' && nexttoken.type !== '(string)' && nexttoken.type !== '(number)' && nexttoken.type !== '(color)') { warning("Expected an attribute value and instead saw '{a}'.", token, a); } advance(); } else { v = true; } } attributes[a] = v; doAttribute(n, a, v); } doTag(n, attributes); if (!e) { stack.push(t); } xmode = 'outer'; advance('>'); break; case '</': xmode = 'html'; advance('</'); if (!nexttoken.identifier) { warning("Bad identifier."); } n = nexttoken.value; if (option.cap) { n = n.toLowerCase(); } advance(); if (!stack) { error("Unexpected '{a}'.", nexttoken, closetag(n)); } t = stack.pop(); if (!t) { error("Unexpected '{a}'.", nexttoken, closetag(n)); } if (t.name !== n) { error("Expected '{a}' and instead saw '{b}'.", nexttoken, closetag(t.name), closetag(n)); } if (nexttoken.id !== '>') { error("Missing '{a}'.", nexttoken, '>'); } xmode = 'outer'; advance('>'); break; case '<!': if (option.safe) { warning("ADsafe HTML violation."); } xmode = 'html'; for (;;) { advance(); if (nexttoken.id === '>' || nexttoken.id === '(end)') { break; } if (nexttoken.id === '--') { warning("Unexpected --."); } } xmode = 'outer'; advance('>'); break; case '<!--': xmode = 'html'; if (option.safe) { warning("ADsafe HTML violation."); } for (;;) { advance(); if (nexttoken.id === '(end)') { error("Missing '-->'."); } if (nexttoken.id === '<!' || nexttoken.id === '<!--') { error("Unexpected '<!' in HTML comment."); } if (nexttoken.id === '--') { advance('--'); break; } } abut(); xmode = 'outer'; advance('>'); break; case '(end)': return; default: if (nexttoken.id === '(end)') { error("Missing '{a}'.", nexttoken, '</' + stack[stack.length - 1].value + '>'); } else { advance(); } } if (stack && stack.length === 0 && (option.adsafe || !option.fragment || nexttoken.id === '(end)')) { break; } } if (nexttoken.id !== '(end)') { error("Unexpected material after the end."); } } // Build the syntax table by declaring the syntactic elements of the language. type('(number)', idValue); type('(string)', idValue); syntax['(identifier)'] = { type: '(identifier)', lbp: 0, identifier: true, nud: function () { var v = this.value, s = scope[v]; if (typeof s === 'function') { s = false; } // The name is in scope and defined in the current function. if (s && (s === funct || s === funct['(global)'])) { // If we are not also in the global scope, change 'unused' to 'var', // and reject labels. if (!funct['(global)']) { switch (funct[v]) { case 'unused': funct[v] = 'var'; break; case 'label': warning("'{a}' is a statement label.", token, v); break; } } // The name is not defined in the function. If we are in the global scope, // then we have an undefined variable. } else if (funct['(global)']) { if (option.undef) { warning("'{a}' is not defined.", token, v); } note_implied(token); // If the name is already defined in the current // function, but not as outer, then there is a scope error. } else { switch (funct[v]) { case 'closure': case 'function': case 'var': case 'unused': warning("'{a}' used out of scope.", token, v); break; case 'label': warning("'{a}' is a statement label.", token, v); break; case 'outer': case true: break; default: // If the name is defined in an outer function, make an outer entry, and if // it was unused, make it var. if (s === true) { funct[v] = true; } else if (typeof s !== 'object') { if (option.undef) { warning("'{a}' is not defined.", token, v); } else { funct[v] = true; } note_implied(token); } else { switch (s[v]) { case 'function': case 'var': case 'unused': s[v] = 'closure'; funct[v] = 'outer'; break; case 'closure': case 'parameter': funct[v] = 'outer'; break; case 'label': warning("'{a}' is a statement label.", token, v); } } } } return this; }, led: function () { error("Expected an operator and instead saw '{a}'.", nexttoken, nexttoken.value); } }; type('(regexp)', function () { return this; }); delim('(endline)'); delim('(begin)'); delim('(end)').reach = true; delim('</').reach = true; delim('<!'); delim('<!--'); delim('-->'); delim('(error)').reach = true; delim('}').reach = true; delim(')'); delim(']'); delim('"').reach = true; delim("'").reach = true; delim(';'); delim(':').reach = true; delim(','); delim('#'); delim('@'); reserve('else'); reserve('case').reach = true; reserve('catch'); reserve('default').reach = true; reserve('finally'); reservevar('arguments'); reservevar('eval'); reservevar('false'); reservevar('Infinity'); reservevar('NaN'); reservevar('null'); reservevar('this'); reservevar('true'); reservevar('undefined'); assignop('=', 'assign', 20); assignop('+=', 'assignadd', 20); assignop('-=', 'assignsub', 20); assignop('*=', 'assignmult', 20); assignop('/=', 'assigndiv', 20).nud = function () { error("A regular expression literal can be confused with '/='."); }; assignop('%=', 'assignmod', 20); bitwiseassignop('&=', 'assignbitand', 20); bitwiseassignop('|=', 'assignbitor', 20); bitwiseassignop('^=', 'assignbitxor', 20); bitwiseassignop('<<=', 'assignshiftleft', 20); bitwiseassignop('>>=', 'assignshiftright', 20); bitwiseassignop('>>>=', 'assignshiftrightunsigned', 20); infix('?', function (left) { this.left = left; this.right = parse(10); advance(':'); this['else'] = parse(10); return this; }, 30); infix('||', 'or', 40); infix('&&', 'and', 50); bitwise('|', 'bitor', 70); bitwise('^', 'bitxor', 80); bitwise('&', 'bitand', 90); relation('==', function (left, right) { if (option.eqeqeq) { warning("Expected '{a}' and instead saw '{b}'.", this, '===', '=='); } else if (isPoorRelation(left)) { warning("Use '{a}' to compare with '{b}'.", this, '===', left.value); } else if (isPoorRelation(right)) { warning("Use '{a}' to compare with '{b}'.", this, '===', right.value); } return this; }); relation('==='); relation('!=', function (left, right) { if (option.eqeqeq) { warning("Expected '{a}' and instead saw '{b}'.", this, '!==', '!='); } else if (isPoorRelation(left)) { warning("Use '{a}' to compare with '{b}'.", this, '!==', left.value); } else if (isPoorRelation(right)) { warning("Use '{a}' to compare with '{b}'.", this, '!==', right.value); } return this; }); relation('!=='); relation('<'); relation('>'); relation('<='); relation('>='); bitwise('<<', 'shiftleft', 120); bitwise('>>', 'shiftright', 120); bitwise('>>>', 'shiftrightunsigned', 120); infix('in', 'in', 120); infix('instanceof', 'instanceof', 120); infix('+', function (left) { nonadjacent(prevtoken, token); nonadjacent(token, nexttoken); var right = parse(130); if (left && right && left.id === '(string)' && right.id === '(string)') { left.value += right.value; left.character = right.character; if (jx.test(left.value)) { warning("JavaScript URL.", left); } return left; } this.left = left; this.right = right; return this; }, 130); prefix('+', 'num'); infix('-', 'sub', 130); prefix('-', 'neg'); infix('*', 'mult', 140); infix('/', 'div', 140); infix('%', 'mod', 140); suffix('++', 'postinc'); prefix('++', 'preinc'); syntax['++'].exps = true; suffix('--', 'postdec'); prefix('--', 'predec'); syntax['--'].exps = true; prefix('delete', function () { var p = parse(0); if (p.id !== '.' && p.id !== '[') { warning("Expected '{a}' and instead saw '{b}'.", nexttoken, '.', nexttoken.value); } }).exps = true; prefix('~', function () { if (option.bitwise) { warning("Unexpected '{a}'.", this, '~'); } parse(150); return this; }); prefix('!', 'not'); prefix('typeof', 'typeof'); prefix('new', function () { var c = parse(155), i; if (c && c.id !== 'function') { if (c.identifier) { c['new'] = true; switch (c.value) { case 'Object': warning("Use the object literal notation {}.", token); break; case 'Array': warning("Use the array literal notation [].", token); break; case 'Number': case 'String': case 'Boolean': case 'Math': warning("Do not use {a} as a constructor.", token, c.value); break; case 'Function': if (!option.evil) { warning("The Function constructor is eval."); } break; case 'Date': case 'RegExp': break; default: if (c.id !== 'function') { i = c.value.substr(0, 1); if (option.newcap && (i < 'A' || i > 'Z')) { warning( "A constructor name should start with an uppercase letter.", token); } } } } else { if (c.id !== '.' && c.id !== '[' && c.id !== '(') { warning("Bad constructor.", token); } } } else { warning("Weird construction. Delete 'new'.", this); } adjacent(token, nexttoken); if (nexttoken.id !== '(') { warning("Missing '()' invoking a constructor."); } this.first = c; return this; }); syntax['new'].exps = true; infix('.', function (left) { adjacent(prevtoken, token); var t = this, m = identifier(); if (typeof m === 'string') { countMember(m); } t.left = left; t.right = m; if (!option.evil && left && left.value === 'document' && (m === 'write' || m === 'writeln')) { warning("document.write can be a form of eval.", left); } if (option.adsafe) { if (left && left.value === 'ADSAFE') { if (m === 'id' || m === 'lib') { warning("ADsafe violation.", this); } else if (m === 'go') { if (xmode !== 'script') { warning("ADsafe violation.", this); } else if (adsafe_went || nexttoken.id !== '(' || peek(0).id !== '(string)' || peek(0).value !== adsafe_id || peek(1).id !== ',') { error("ADsafe violation: go.", this); } adsafe_went = true; adsafe_may = false; } } } if (option.safe) { for (;;) { if (banned[m] === true) { warning("ADsafe restricted word '{a}'.", token, m); } if (predefined[left.value] !== true || nexttoken.id === '(') { break; } if (standard_member[m] === true) { if (nexttoken.id === '.') { warning("ADsafe violation.", this); } break; } if (nexttoken.id !== '.') { warning("ADsafe violation.", this); break; } advance('.'); token.left = t; token.right = m; t = token; m = identifier(); if (typeof m === 'string') { countMember(m); } } } return t; }, 160); infix('(', function (left) { adjacent(prevtoken, token); nospace(); var n = 0, p = []; if (left) { if (left.type === '(identifier)') { if (left.value.match(/^[A-Z]([A-Z0-9_$]*[a-z][A-Za-z0-9_$]*)?$/)) { if (left.value !== 'Number' && left.value !== 'String' && left.value !== 'Boolean' && left.value !== 'Date') { if (left.value === 'Math') { warning("Math is not a function.", left); } else if (option.newcap) { warning("Missing 'new' prefix when invoking a constructor.", left); } } } } else if (left.id === '.') { if (option.safe && left.left.value === 'Math' && left.right === 'random') { warning("ADsafe violation.", left); } } } if (nexttoken.id !== ')') { for (;;) { p[p.length] = parse(10); n += 1; if (nexttoken.id !== ',') { break; } advance(','); nonadjacent(token, nexttoken); } } advance(')'); if (option.immed && left.id === 'function' && nexttoken.id !== ')') { warning("Wrap the entire immediate function invocation in parens.", this); } nospace(prevtoken, token); if (typeof left === 'object') { if (left.value === 'parseInt' && n === 1) { warning("Missing radix parameter.", left); } if (!option.evil) { if (left.value === 'eval' || left.value === 'Function' || left.value === 'execScript') { warning("eval is evil.", left); } else if (p[0] && p[0].id === '(string)' && (left.value === 'setTimeout' || left.value === 'setInterval')) { warning( "Implied eval is evil. Pass a function instead of a string.", left); } } if (!left.identifier && left.id !== '.' && left.id !== '[' && left.id !== '(' && left.id !== '&&' && left.id !== '||' && left.id !== '?') { warning("Bad invocation.", left); } } this.left = left; return this; }, 155).exps = true; prefix('(', function () { nospace(); var v = parse(0); advance(')', this); nospace(prevtoken, token); if (option.immed && v.id === 'function') { if (nexttoken.id === '(') { warning( "Move the invocation into the parens that contain the function.", nexttoken); } else { warning( "Do not wrap function literals in parens unless they are to be immediately invoked.", this); } } return v; }); infix('[', function (left) { nospace(); var e = parse(0), s; if (e && e.type === '(string)') { if (option.safe && banned[e.value] === true) { warning("ADsafe restricted word '{a}'.", this, e.value); } countMember(e.value); if (!option.sub && ix.test(e.value)) { s = syntax[e.value]; if (!s || !s.reserved) { warning("['{a}'] is better written in dot notation.", e, e.value); } } } else if (!e || (e.type !== '(number)' && (e.id !== '+' || e.arity !== 'unary'))) { if (option.safe) { warning('ADsafe subscripting.'); } } advance(']', this); nospace(prevtoken, token); this.left = left; this.right = e; return this; }, 160); prefix('[', function () { if (nexttoken.id === ']') { advance(']'); return; } var b = token.line !== nexttoken.line; if (b) { indent += option.indent; if (nexttoken.from === indent + option.indent) { indent += option.indent; } } for (;;) { if (b && token.line !== nexttoken.line) { indentation(); } parse(10); if (nexttoken.id === ',') { adjacent(token, nexttoken); advance(','); if (nexttoken.id === ',') { warning("Extra comma.", token); } else if (nexttoken.id === ']') { warning("Extra comma.", token); break; } nonadjacent(token, nexttoken); } else { if (b) { indent -= option.indent; indentation(); } break; } } advance(']', this); return; }, 160); (function (x) { x.nud = function () { var b, i, s, seen = {}; b = token.line !== nexttoken.line; if (b) { indent += option.indent; if (nexttoken.from === indent + option.indent) { indent += option.indent; } } for (;;) { if (nexttoken.id === '}') { break; } if (b) { indentation(); } i = optionalidentifier(true); if (!i) { if (nexttoken.id === '(string)') { i = nexttoken.value; if (ix.test(i)) { s = syntax[i]; } advance(); } else if (nexttoken.id === '(number)') { i = nexttoken.value.toString(); advance(); } else { error("Expected '{a}' and instead saw '{b}'.", nexttoken, '}', nexttoken.value); } } if (seen[i] === true) { warning("Duplicate member '{a}'.", nexttoken, i); } seen[i] = true; countMember(i); advance(':'); nonadjacent(token, nexttoken); parse(10); if (nexttoken.id === ',') { adjacent(token, nexttoken); advance(','); if (nexttoken.id === ',' || nexttoken.id === '}') { warning("Extra comma.", token); } nonadjacent(token, nexttoken); } else { break; } } if (b) { indent -= option.indent; indentation(); } advance('}', this); return; }; x.fud = function () { error("Expected to see a statement and instead saw a block.", token); }; }(delim('{'))); function varstatement(prefix) { // JavaScript does not have block scope. It only has function scope. So, // declaring a variable in a block can have unexpected consequences. if (funct['(onevar)'] && option.onevar) { warning("Too many var statements."); } else if (!funct['(global)']) { funct['(onevar)'] = true; } for (;;) { nonadjacent(token, nexttoken); addlabel(identifier(), 'unused'); if (prefix) { return; } if (nexttoken.id === '=') { nonadjacent(token, nexttoken); advance('='); nonadjacent(token, nexttoken); if (peek(0).id === '=') { error("Variable {a} was not declared correctly.", nexttoken, nexttoken.value); } parse(20); } if (nexttoken.id !== ',') { return; } adjacent(token, nexttoken); advance(','); nonadjacent(token, nexttoken); } } stmt('var', varstatement); stmt('new', function () { error("'new' should not be used as a statement."); }); function functionparams() { var i, t = nexttoken, p = []; advance('('); nospace(); if (nexttoken.id === ')') { advance(')'); nospace(prevtoken, token); return; } for (;;) { i = identifier(); p.push(i); addlabel(i, 'parameter'); if (nexttoken.id === ',') { advance(','); nonadjacent(token, nexttoken); } else { advance(')', t); nospace(prevtoken, token); return p.join(', '); } } } function doFunction(i) { var s = scope; scope = Object.create(s); funct = { '(name)' : i || '"' + anonname + '"', '(line)' : nexttoken.line + 1, '(context)' : funct, '(breakage)': 0, '(loopage)' : 0, '(scope)' : scope }; token.funct = funct; functions.push(funct); if (i) { addlabel(i, 'function'); } funct['(params)'] = functionparams(); block(false); scope = s; funct = funct['(context)']; } blockstmt('function', function () { if (inblock) { warning( "Function statements cannot be placed in blocks. Use a function expression or move the statement to the top of the outer function.", token); } var i = identifier(); adjacent(token, nexttoken); addlabel(i, 'unused'); doFunction(i); if (nexttoken.id === '(' && nexttoken.line === token.line) { error( "Function statements are not invocable. Wrap the whole function invocation in parens."); } }); prefix('function', function () { var i = optionalidentifier(); if (i) { adjacent(token, nexttoken); } else { nonadjacent(token, nexttoken); } doFunction(i); if (funct['(loopage)'] && nexttoken.id !== '(') { warning("Be careful when making functions within a loop. Consider putting the function in a closure."); } return this; }); blockstmt('if', function () { var t = nexttoken; advance('('); nonadjacent(this, t); nospace(); parse(20); if (nexttoken.id === '=') { warning("Expected a conditional expression and instead saw an assignment."); advance('='); parse(20); } advance(')', t); nospace(prevtoken, token); block(true); if (nexttoken.id === 'else') { nonadjacent(token, nexttoken); advance('else'); if (nexttoken.id === 'if' || nexttoken.id === 'switch') { statement(true); } else { block(true); } } return this; }); blockstmt('try', function () { var b, e, s; if (option.adsafe) { warning("ADsafe try violation.", this); } block(false); if (nexttoken.id === 'catch') { advance('catch'); nonadjacent(token, nexttoken); advance('('); s = scope; scope = Object.create(s); e = nexttoken.value; if (nexttoken.type !== '(identifier)') { warning("Expected an identifier and instead saw '{a}'.", nexttoken, e); } else { addlabel(e, 'exception'); } advance(); advance(')'); block(false); b = true; scope = s; } if (nexttoken.id === 'finally') { advance('finally'); block(false); return; } else if (!b) { error("Expected '{a}' and instead saw '{b}'.", nexttoken, 'catch', nexttoken.value); } }); blockstmt('while', function () { var t = nexttoken; funct['(breakage)'] += 1; funct['(loopage)'] += 1; advance('('); nonadjacent(this, t); nospace(); parse(20); if (nexttoken.id === '=') { warning("Expected a conditional expression and instead saw an assignment."); advance('='); parse(20); } advance(')', t); nospace(prevtoken, token); block(true); funct['(breakage)'] -= 1; funct['(loopage)'] -= 1; }).labelled = true; reserve('with'); blockstmt('switch', function () { var t = nexttoken, g = false; funct['(breakage)'] += 1; advance('('); nonadjacent(this, t); nospace(); this.condition = parse(20); advance(')', t); nospace(prevtoken, token); nonadjacent(token, nexttoken); t = nexttoken; advance('{'); nonadjacent(token, nexttoken); indent += option.indent; this.cases = []; for (;;) { switch (nexttoken.id) { case 'case': switch (funct['(verb)']) { case 'break': case 'case': case 'continue': case 'return': case 'switch': case 'throw': break; default: warning( "Expected a 'break' statement before 'case'.", token); } indentation(-option.indent); advance('case'); this.cases.push(parse(20)); g = true; advance(':'); funct['(verb)'] = 'case'; break; case 'default': switch (funct['(verb)']) { case 'break': case 'continue': case 'return': case 'throw': break; default: warning( "Expected a 'break' statement before 'default'.", token); } indentation(-option.indent); advance('default'); g = true; advance(':'); break; case '}': indent -= option.indent; indentation(); advance('}', t); if (this.cases.length === 1 || this.condition.id === 'true' || this.condition.id === 'false') { warning("This 'switch' should be an 'if'.", this); } funct['(breakage)'] -= 1; funct['(verb)'] = undefined; return; case '(end)': error("Missing '{a}'.", nexttoken, '}'); return; default: if (g) { switch (token.id) { case ',': error("Each value should have its own case label."); return; case ':': statements(); break; default: error("Missing ':' on a case clause.", token); } } else { error("Expected '{a}' and instead saw '{b}'.", nexttoken, 'case', nexttoken.value); } } } }).labelled = true; stmt('debugger', function () { if (!option.debug) { warning("All 'debugger' statements should be removed."); } }); stmt('do', function () { funct['(breakage)'] += 1; funct['(loopage)'] += 1; block(true); advance('while'); var t = nexttoken; nonadjacent(token, t); advance('('); nospace(); parse(20); if (nexttoken.id === '=') { warning("Expected a conditional expression and instead saw an assignment."); advance('='); parse(20); } advance(')', t); nospace(prevtoken, token); funct['(breakage)'] -= 1; funct['(loopage)'] -= 1; }).labelled = true; blockstmt('for', function () { var s, t = nexttoken; funct['(breakage)'] += 1; funct['(loopage)'] += 1; advance('('); nonadjacent(this, t); nospace(); if (peek(nexttoken.id === 'var' ? 1 : 0).id === 'in') { if (nexttoken.id === 'var') { advance('var'); varstatement(true); } else { advance(); }<|fim▁hole|> if (!option.forin && (s.length > 1 || typeof s[0] !== 'object' || s[0].value !== 'if')) { warning("The body of a for in should be wrapped in an if statement to filter unwanted properties from the prototype.", this); } funct['(breakage)'] -= 1; funct['(loopage)'] -= 1; return this; } else { if (nexttoken.id !== ';') { if (nexttoken.id === 'var') { advance('var'); varstatement(); } else { for (;;) { parse(0, 'for'); if (nexttoken.id !== ',') { break; } advance(','); } } } advance(';'); if (nexttoken.id !== ';') { parse(20); if (nexttoken.id === '=') { warning("Expected a conditional expression and instead saw an assignment."); advance('='); parse(20); } } advance(';'); if (nexttoken.id === ';') { error("Expected '{a}' and instead saw '{b}'.", nexttoken, ')', ';'); } if (nexttoken.id !== ')') { for (;;) { parse(0, 'for'); if (nexttoken.id !== ',') { break; } advance(','); } } advance(')', t); nospace(prevtoken, token); block(true); funct['(breakage)'] -= 1; funct['(loopage)'] -= 1; } }).labelled = true; stmt('break', function () { var v = nexttoken.value; if (funct['(breakage)'] === 0) { warning("Unexpected '{a}'.", nexttoken, this.value); } nolinebreak(this); if (nexttoken.id !== ';') { if (token.line === nexttoken.line) { if (funct[v] !== 'label') { warning("'{a}' is not a statement label.", nexttoken, v); } else if (scope[v] !== funct) { warning("'{a}' is out of scope.", nexttoken, v); } advance(); } } reachable('break'); }); stmt('continue', function () { var v = nexttoken.value; if (funct['(breakage)'] === 0) { warning("Unexpected '{a}'.", nexttoken, this.value); } nolinebreak(this); if (nexttoken.id !== ';') { if (token.line === nexttoken.line) { if (funct[v] !== 'label') { warning("'{a}' is not a statement label.", nexttoken, v); } else if (scope[v] !== funct) { warning("'{a}' is out of scope.", nexttoken, v); } advance(); } } reachable('continue'); }); stmt('return', function () { nolinebreak(this); if (nexttoken.id === '(regexp)') { warning("Wrap the /regexp/ literal in parens to disambiguate the slash operator."); } if (nexttoken.id !== ';' && !nexttoken.reach) { nonadjacent(token, nexttoken); parse(20); } reachable('return'); }); stmt('throw', function () { nolinebreak(this); nonadjacent(token, nexttoken); parse(20); reachable('throw'); }); reserve('void'); // Superfluous reserved words reserve('class'); reserve('const'); reserve('enum'); reserve('export'); reserve('extends'); reserve('float'); reserve('goto'); reserve('import'); reserve('let'); reserve('super'); function jsonValue() { function jsonObject() { var t = nexttoken; advance('{'); if (nexttoken.id !== '}') { for (;;) { if (nexttoken.id === '(end)') { error("Missing '}' to match '{' from line {a}.", nexttoken, t.line + 1); } else if (nexttoken.id === '}') { warning("Unexpected comma.", token); break; } else if (nexttoken.id === ',') { error("Unexpected comma.", nexttoken); } else if (nexttoken.id !== '(string)') { warning("Expected a string and instead saw {a}.", nexttoken, nexttoken.value); } advance(); advance(':'); jsonValue(); if (nexttoken.id !== ',') { break; } advance(','); } } advance('}'); } function jsonArray() { var t = nexttoken; advance('['); if (nexttoken.id !== ']') { for (;;) { if (nexttoken.id === '(end)') { error("Missing ']' to match '[' from line {a}.", nexttoken, t.line + 1); } else if (nexttoken.id === ']') { warning("Unexpected comma.", token); break; } else if (nexttoken.id === ',') { error("Unexpected comma.", nexttoken); } jsonValue(); if (nexttoken.id !== ',') { break; } advance(','); } } advance(']'); } switch (nexttoken.id) { case '{': jsonObject(); break; case '[': jsonArray(); break; case 'true': case 'false': case 'null': case '(number)': case '(string)': advance(); break; case '-': advance('-'); if (token.character !== nexttoken.from) { warning("Unexpected space after '-'.", token); } adjacent(token, nexttoken); advance('(number)'); break; default: error("Expected a JSON value.", nexttoken); } } // The actual JSLINT function itself. var itself = function (s, o) { var a, i; JSLINT.errors = []; predefined = Object.create(standard); if (o) { a = o.predef; if (a instanceof Array) { for (i = 0; i < a.length; i += 1) { predefined[a[i]] = true; } } if (o.adsafe) { o.safe = true; } if (o.safe) { o.browser = false; o.css = false; o.debug = false; o.eqeqeq = true; o.evil = false; o.forin = false; o.nomen = true; o.on = false; o.rhino = false; o.safe = true; o.sidebar = false; o.strict = true; o.sub = false; o.undef = true; o.widget = false; predefined.Date = false; predefined['eval'] = false; predefined.Function = false; predefined.Object = false; predefined.ADSAFE = true; predefined.lib = true; } option = o; } else { option = {}; } option.indent = option.indent || 4; adsafe_id = ''; adsafe_may = false; adsafe_went = false; approved = {}; if (option.approved) { for (i = 0; i < option.approved.length; i += 1) { approved[option.approved[i]] = option.approved[i]; } } approved.test = 'test'; /////////////////////////////////////// tab = ''; for (i = 0; i < option.indent; i += 1) { tab += ' '; } indent = 0; global = Object.create(predefined); scope = global; funct = { '(global)': true, '(name)': '(global)', '(scope)': scope, '(breakage)': 0, '(loopage)': 0 }; functions = []; ids = {}; urls = []; src = false; xmode = false; stack = null; member = {}; membersOnly = null; implied = {}; inblock = false; lookahead = []; jsonmode = false; warnings = 0; lex.init(s); prereg = true; prevtoken = token = nexttoken = syntax['(begin)']; assume(); try { advance(); if (nexttoken.value.charAt(0) === '<') { html(); if (option.adsafe && !adsafe_went) { warning("ADsafe violation: Missing ADSAFE.go.", this); } } else { switch (nexttoken.id) { case '{': case '[': option.laxbreak = true; jsonmode = true; jsonValue(); break; case '@': case '*': case '#': case '.': case ':': xmode = 'style'; advance(); if (token.id !== '@' || !nexttoken.identifier || nexttoken.value !== 'charset') { error('A css file should begin with @charset "UTF-8";'); } advance(); if (nexttoken.type !== '(string)' && nexttoken.value !== 'UTF-8') { error('A css file should begin with @charset "UTF-8";'); } advance(); advance(';'); styles(); break; default: if (option.adsafe && option.fragment) { warning("ADsafe violation.", this); } statements('lib'); } } advance('(end)'); } catch (e) { if (e) { JSLINT.errors.push({ reason : e.message, line : e.line || nexttoken.line, character : e.character || nexttoken.from }, null); } } return JSLINT.errors.length === 0; }; function to_array(o) { var a = [], k; for (k in o) { if (o.hasOwnProperty(k)) { a.push(k); } } return a; } // Report generator. itself.report = function (option, sep) { var a = [], c, e, f, i, k, l, m = '', n, o = [], s, v, cl, ex, va, un, ou, gl, la; function detail(h, s, sep) { if (s.length) { o.push('<div><i>' + h + '</i> ' + s.sort().join(sep || ', ') + '</div>'); } } s = to_array(implied); k = JSLINT.errors.length; if (k || s.length > 0) { o.push('<div id=errors><i>Error:</i>'); if (s.length > 0) { s.sort(); for (i = 0; i < s.length; i += 1) { s[i] = '<code>' + s[i] + '</code>&nbsp;<i>' + implied[s[i]].join(' ') + '</i>'; } o.push('<p><i>Implied global:</i> ' + s.join(', ') + '</p>'); c = true; } for (i = 0; i < k; i += 1) { c = JSLINT.errors[i]; if (c) { e = c.evidence || ''; o.push('<p>Problem' + (isFinite(c.line) ? ' at line ' + (c.line + 1) + ' character ' + (c.character + 1) : '') + ': ' + c.reason.entityify() + '</p><p class=evidence>' + (e && (e.length > 80 ? e.slice(0, 77) + '...' : e).entityify()) + '</p>'); } } o.push('</div>'); if (!c) { return o.join(''); } } if (!option) { o.push('<br><div id=functions>'); if (urls.length > 0) { detail("URLs<br>", urls, '<br>'); } s = to_array(scope); if (s.length === 0) { if (jsonmode) { if (k === 0) { o.push('<p>JSON: good.</p>'); } else { o.push('<p>JSON: bad.</p>'); } } else { o.push('<div><i>No new global variables introduced.</i></div>'); } } else { o.push('<div><i>Global</i> ' + s.sort().join(', ') + '</div>'); } for (i = 0; i < functions.length; i += 1) { f = functions[i]; cl = []; ex = []; va = []; un = []; ou = []; gl = []; la = []; for (k in f) { if (f.hasOwnProperty(k) && k.charAt(0) !== '(') { v = f[k]; switch (v) { case 'closure': cl.push(k); break; case 'exception': ex.push(k); break; case 'var': va.push(k); break; case 'unused': un.push(k); break; case 'label': la.push(k); break; case 'outer': ou.push(k); break; case true: gl.push(k); break; } } } o.push('<br><div class=function><i>' + f['(line)'] + '</i> ' + (f['(name)'] || '') + '(' + (f['(params)'] || '') + ')</div>'); detail('Closure', cl); detail('Variable', va); detail('Exception', ex); detail('Outer', ou); detail('Global', gl); detail('<big><b>Unused</b></big>', un); detail('Label', la); } a = []; for (k in member) { if (typeof member[k] === 'number') { a.push(k); } } if (a.length) { a = a.sort(); m = '<br><pre>/*members '; l = 10; for (i = 0; i < a.length; i += 1) { k = a[i]; n = k.name(); if (l + n.length > 72) { o.push(m + '<br>'); m = ' '; l = 1; } l += n.length + 2; if (member[k] === 1) { n = '<i>' + n + '</i>'; } if (i < a.length - 1) { n += ', '; } m += n; } o.push(m + '<br>*/</pre>'); } o.push('</div>'); } return o.join(''); }; return itself; }());<|fim▁end|>
advance('in'); parse(20); advance(')', t); s = block(true);
<|file_name|>tk.cpp<|end_file_name|><|fim▁begin|>/* Copyright (c) 2006-2013, Alexis Royer, http://alexis.royer.free.fr/CLI All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation <|fim▁hole|> without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ #include "cli/pch.h" #include <string.h> // strcmp #include "cli/tk.h" CLI_NS_BEGIN(cli) CLI_NS_BEGIN(tk) bool operator==(const char* const STR_String1, const String& STR_String2) { return (strcmp(STR_String1, STR_String2) == 0); } bool operator==(const String& STR_String1, const char* const STR_String2) { return (strcmp(STR_String1, STR_String2) == 0); } bool operator==(const String& STR_String1, const String& STR_String2) { return (strcmp(STR_String1, STR_String2) == 0); } bool operator!=(const char* const STR_String1, const String& STR_String2) { return (strcmp(STR_String1, STR_String2) != 0); } bool operator!=(const String& STR_String1, const char* const STR_String2) { return (strcmp(STR_String1, STR_String2) != 0); } bool operator!=(const String& STR_String1, const String& STR_String2) { return (strcmp(STR_String1, STR_String2) != 0); } bool operator<(const String& STR_String1, const String& STR_String2) { return (strcmp(STR_String1, STR_String2) < 0); } bool operator>(const String& STR_String1, const String& STR_String2) { return (strcmp(STR_String1, STR_String2) > 0); } bool operator<=(const String& STR_String1, const String& STR_String2) { return (strcmp(STR_String1, STR_String2) <= 0); } bool operator>=(const String& STR_String1, const String& STR_String2) { return (strcmp(STR_String1, STR_String2) >= 0); } CLI_NS_END(tk) CLI_NS_END(cli)<|fim▁end|>
and/or other materials provided with the distribution. * Neither the name of the CLI library project nor the names of its contributors may be used to endorse or promote products derived from this software
<|file_name|>machinekitapplicationcontrols_de.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" encoding="utf-8"?> <!DOCTYPE TS> <TS version="2.1" language="de_DE"> <context> <name>AppPage</name> <message> <source>QML Error:</source> <translation>QML Fehler:</translation> </message> <message> <source>Loading QML file failed</source> <translation type="vanished">Laden der QML Datei fehlgeschlagen </translation> </message> </context> <context> <name>ApplicationFileDialog</name> <message> <source>Please choose a file</source> <translation type="vanished">Datei auswählen</translation> </message> <message> <source>All machinable files (%1)</source> <translation>Alle verarbeitbaren Dateien (%1)</translation> </message> <message> <source>rs274ngc files (*.ngc)</source> <translation>rs274ngc Dateien (*.ngc)</translation> </message> <message> <source>All files (*)</source> <translation>Alle Dateien (*)</translation> </message> <message> <source>Open File</source> <translation>Datei öffnen</translation> </message> </context> <context> <name>ApplicationProgressBar</name> <message> <source>Uploading file %1</source> <translation>Lade Datei %1 hoch</translation> </message> <message> <source>Downloading file %1</source> <translation>Lade Datei %1 herunter</translation> </message> <message> <source>%1% - %2</source> <translation>%1% - %2</translation> </message> </context> <context> <name>ApplicationRemoteFileDialog</name> <message> <source>Remote files</source> <translation type="vanished">Netzwerkdateien</translation> </message> <message> <source>Name</source> <translation>Name</translation> </message> <message> <source>Size</source> <translation>Größe</translation><|fim▁hole|> <source>Last Modified</source> <translation>Zuletzt Modifiziert</translation> </message> <message> <source>Remove file</source> <translation>Datei entfernen</translation> </message> <message> <source>Open file</source> <translation>Datei öffnen</translation> </message> <message> <source>Upload file...</source> <translation>Datei hochladen...</translation> </message> <message> <source>Refresh</source> <translation>Aktualisieren</translation> </message> <message> <source>Upload...</source> <translation>Hochladen...</translation> </message> <message> <source>Remove</source> <translation>Entfernen</translation> </message> <message> <source>Open</source> <translation>Öffnen</translation> </message> <message> <source>Close</source> <translation>Schließen</translation> </message> <message> <source>Remote Files</source> <translation>Netzwerkdateien</translation> </message> <message> <source>Up</source> <translation>Nach oben</translation> </message> <message> <source>Open directory</source> <translation>Ordner öffnen</translation> </message> <message> <source>Remove directory</source> <translation>Ordner löschen</translation> </message> <message> <source>Create directory</source> <translation>Neuer Ordner</translation> </message> <message> <source>Enter Directory Name</source> <translation>Ordnernamen eingeben</translation> </message> </context> <context> <name>BlockDeleteAction</name> <message> <source>Block Delete</source> <translation>Block Löschen</translation> </message> <message> <source>Enable block delete [%1]</source> <translation>Block Löschen aktivieren [%1]</translation> </message> </context> <context> <name>ConfigPage</name> <message> <source>Back</source> <translation>Zurück</translation> </message> <message> <source>Remote UIs</source> <translation>Remote UIs</translation> </message> <message> <source>Local UIs</source> <translation>Lokale UIs</translation> </message> </context> <context> <name>ConnectionWindow</name> <message> <source>Instance Error:</source> <translation>Instanzfehler:</translation> </message> <message> <source>Check uri and uuid</source> <translation>URI und UUID überprüfen</translation> </message> <message> <source>Application Config Error:</source> <translation>Application Config Fehler:</translation> </message> <message> <source>Application Launcher Error:</source> <translation>Application Launcher Error:</translation> </message> </context> <context> <name>DecreaseSpindleSpeedAction</name> <message> <source>-</source> <translation>-</translation> </message> <message> <source>Turn spindle slower [%1]</source> <translation>Spindelgeschwindkeit reduzieren [%1]</translation> </message> </context> <context> <name>DigitalReadOut</name> <message> <source>Vel:</source> <translation>Gsw:</translation> </message> <message> <source>DTG:</source> <translation>DTG:</translation> </message> <message> <source>Rad:</source> <translation>Rad:</translation> </message> <message> <source>Dia:</source> <translation>Dm:</translation> </message> <message> <source>DTG</source> <translation>DTG</translation> </message> <message> <source>TLO</source> <translation>TLO</translation> </message> </context> <context> <name>EditToolTableAction</name> <message> <source>Edit &amp;Tool Table...</source> <translation>Werkzeugtabelle &amp;bearbeiten...</translation> </message> <message> <source>Edit Tool Table [%1]</source> <translation>Werkzeugtabelle bearbeiten [%1]</translation> </message> </context> <context> <name>EditWithSystemEditorAction</name> <message> <source>Edit G-Code file with System Editor [%1]</source> <translation>G-Code Datei mit Systemeditor bearbeiten [%1]</translation> </message> <message> <source>&amp;Edit File with System Editor...</source> <translation>Datei mit Systemeditor &amp;bearbeiten...</translation> </message> </context> <context> <name>EstopAction</name> <message> <source>Estop</source> <translation>Notaus</translation> </message> <message> <source>Toggle Emergency Stop [%1]</source> <translation>Notaus umschalten [%1]</translation> </message> </context> <context> <name>EstopPowerAction</name> <message> <source>Power</source> <translation>Strom</translation> </message> <message> <source>Reset Machine [%1]</source> <translation>Maschine zurücksetzen [%1]</translation> </message> </context> <context> <name>FeedHoldAction</name> <message> <source>Feed Hold</source> <translation>Vorschub anhalten</translation> </message> <message> <source>Enable feed hold [%1]</source> <translation>Vorschub anhalten aktivieren [%1]</translation> </message> </context> <context> <name>FeedOverrideAction</name> <message> <source>Feed Override</source> <translation>Vorschub überschreiben</translation> </message> <message> <source>Enable feed override [%1]</source> <translation>Vorschub überschreiben aktivieren [%1]</translation> </message> </context> <context> <name>FloodAction</name> <message> <source>Flood</source> <translation>Überspülen</translation> </message> <message> <source>Enable flood [%1]</source> <translation>Überspülen aktivieren [%1]</translation> </message> </context> <context> <name>HomeAxisAction</name> <message> <source>Home</source> <translation>Heimen</translation> </message> <message> <source>Home All</source> <translation>Heime Alle</translation> </message> <message> <source>Home axis %1 [%2]</source> <translation>Heime Achse %1 [%2]</translation> </message> <message> <source>Home all axes [%1]</source> <translation>Heime alle achsen [%1]</translation> </message> </context> <context> <name>IncreaseSpindleSpeedAction</name> <message> <source>+</source> <translation>+</translation> </message> <message> <source>Turn spindle faster [%1]</source> <translation>Spindelgschwindigkeit erhöhen [%1]</translation> </message> </context> <context> <name>InstancePage</name> <message> <source>Multicast</source> <translation>Multicast</translation> </message> <message> <source>Available Instances:</source> <translation>Verfügbare Instanzen:</translation> </message> <message> <source>Unicast</source> <translation>Unicast</translation> </message> <message> <source>Machinekit Instances:</source> <translation>Machinekit Instanzen:</translation> </message> <message> <source>Instance %1:</source> <translation>Instanz %1:</translation> </message> <message> <source>IP address or hostname</source> <translation>IP Adresse oder Hostname</translation> </message> <message> <source>Warning!&lt;br&gt;No network connection found, service discovery unavailable. Please check your network connection.</source> <translation>Warnung!&lt;br&gt;Keine Netzwerkverbindung gefunden. Dienstsuche nicht verfügbar. Bitte prüfen Sie Ihre Netzwerkverbindung.</translation> </message> </context> <context> <name>JogAction</name> <message> <source>Jog Axis %1 [%2]</source> <translation>Achse %1 bewegen [%2]</translation> </message> </context> <context> <name>JogDistanceHandler</name> <message> <source>Continuous</source> <translation>Kontinuierlich</translation> </message> </context> <context> <name>LanguageControlButton</name> <message> <source>English</source> <translation>Englisch</translation> </message> <message> <source>German</source> <translation>Deutsch</translation> </message> <message> <source>Russian</source> <translation>Russisch</translation> </message> <message> <source>Restart Application</source> <translation>Applikation neustarten</translation> </message> <message> <source>For the change to take effect, you need to restart the application. Restart now?</source> <translation>Damit die Änderung übernommen wird, muss die Anwendung neugestartet werden. Jetzt neustarten?</translation> </message> <message> <source>Spanish</source> <translation>Spanisch</translation> </message> </context> <context> <name>LauncherPage</name> <message> <source>System Shutdown</source> <translation>System Herunterfahren</translation> </message> <message> <source>Do you really want to shutdown the Machinekit system?</source> <translation>Wollen Sie das Machinekit System wirklich beenden?</translation> </message> <message> <source>Available Launchers:</source> <translation>Verfügbare Starter:</translation> </message> <message> <source>List</source> <translation>Liste</translation> </message> <message> <source>Small</source> <translation>Klein</translation> </message> <message> <source>Big</source> <translation>Groß</translation> </message> <message> <source>Shutdown</source> <translation>Herunterfahren</translation> </message> <message> <source>Back</source> <translation>Zurück</translation> </message> <message> <source>Terminating...</source> <translation>Beende...</translation> </message> <message> <source>Running...</source> <translation>Läuft...</translation> </message> <message> <source>Kill</source> <translation>Abschießen</translation> </message> <message> <source>Terminate</source> <translation>Beenden</translation> </message> </context> <context> <name>LoadingPage</name> <message> <source>Back</source> <translation>Zurück</translation> </message> <message> <source>Loading %1...</source> <translation>Lade %1...</translation> </message> <message> <source>user interface</source> <translation>Benutzeroberfläche</translation> </message> </context> <context> <name>MdiCommandAction</name> <message> <source>Go</source> <translation>Ausführen</translation> </message> <message> <source>Execute MDI command [%1]</source> <translation>MDI Kommando ausführen [%1]</translation> </message> </context> <context> <name>MdiCommandEdit</name> <message> <source>MDI</source> <translation>MDI</translation> </message> </context> <context> <name>MdiHistoryTable</name> <message> <source>Clear list</source> <translation type="vanished">Liste leeren</translation> </message> <message> <source>Clear History</source> <translation>Verlauf löschen</translation> </message> </context> <context> <name>MistAction</name> <message> <source>Mist</source> <translation>Nebel</translation> </message> <message> <source>Enable mist [%1]</source> <translation>Nebel einschalten [%1]</translation> </message> </context> <context> <name>OpenAction</name> <message> <source>Open file</source> <translation type="vanished">Datei öffnen</translation> </message> <message> <source>Open G-Code file [%1]</source> <translation type="vanished">GCode Datei öffen [%1]</translation> </message> <message> <source>Open File...</source> <translation>Datei öffnen...</translation> </message> <message> <source>Open File from Machine...</source> <translation>Datei von Maschine öffnen...</translation> </message> <message> <source>Open G-Code file stored on machine [%1]</source> <translation>Auf der Maschine gespeicherte G-Code Datei öffnen [%1]</translation> </message> <message> <source>Open G-Code file stored on local computer [%1]</source> <translation>Auf dem lokalen Computer gespeicherte G-Code Datei öffnen [%1]</translation> </message> </context> <context> <name>OptionalStopAction</name> <message> <source>Optional stop (M1)</source> <translation type="vanished">Optionaler Stop (M1)</translation> </message> <message> <source>Optional stop [%1]</source> <translation>Optionaler Stop [%1]</translation> </message> <message> <source>Optional Stop (M1)</source> <translation>Optionaler Stop (M1)</translation> </message> </context> <context> <name>OverrideLimitsAction</name> <message> <source>Override Limits</source> <translation>Maschinenlimits überschreiben</translation> </message> <message> <source>Override the machine limits [%1]</source> <translation>Maschinenlimits überschreiben [%1]</translation> </message> </context> <context> <name>PauseResumeProgramAction</name> <message> <source>Resume</source> <translation>Fortsetzen</translation> </message> <message> <source>Pause</source> <translation>Pausieren</translation> </message> <message> <source>Pause execution [%1]</source> <translation>Ausführen pausieren [%1]</translation> </message> <message> <source>Resume execution [%1]</source> <translation>Ausführung fortsetzen [%1]</translation> </message> </context> <context> <name>PowerAction</name> <message> <source>Power</source> <translation>Power</translation> </message> <message> <source>Toggle Machine power [%1]</source> <translation>Maschinen Power schalten [%1]</translation> </message> </context> <context> <name>ReopenAction</name> <message> <source>Reopen file</source> <translation type="vanished">Datei wieder öffnen</translation> </message> <message> <source>Reopen current file [%1]</source> <translation>Aktuelle Datei wieder öffnen [%1]</translation> </message> <message> <source>Reopen File</source> <translation>Datei erneut öffnen</translation> </message> </context> <context> <name>RunProgramAction</name> <message> <source>Run</source> <translation>Starten</translation> </message> <message> <source>Begin executing current file [%1]</source> <translation>Ausführen der aktuellen Datei starten [%1]</translation> </message> </context> <context> <name>SelectedPage</name> <message> <source>Starting %1...</source> <translation>Starte %1...</translation> </message> <message> <source>Error starting %1</source> <translation>Fehler beim starten von %1</translation> </message> <message> <source>%1 exited</source> <translation>%1 geschlossen</translation> </message> <message> <source>Back</source> <translation>Zurück</translation> </message> <message> <source>Process exited with return code %1. See the log for details.</source> <translation>Prozess beendet mit Rückgabewert %1. Siehe Log für Details.</translation> </message> <message> <source>Application Output</source> <translation>Ausgabe der Anwendung</translation> </message> <message> <source>Machinekit Log</source> <translation>Machinekit Log</translation> </message> </context> <context> <name>ServiceWindow</name> <message> <source>Back</source> <translation>Zurück</translation> </message> <message> <source>Waiting for services to appear...</source> <translation>Warte auf erscheinen der Dienste...</translation> </message> <message> <source>%1 service</source> <translation>%1 Dienst</translation> </message> </context> <context> <name>ShutdownAction</name> <message> <source>Sh&amp;utdown</source> <translation>&amp;Herunterfahren</translation> </message> <message> <source>Shutdown Machinekit instance [%1]</source> <translation>Machinekit Instanz herunterfahren [%1]</translation> </message> </context> <context> <name>SpindleCcwAction</name> <message> <source>CCW</source> <translation>CCW</translation> </message> <message> <source>Turn spindle counterclockwise [%1]</source> <translation>Spindle gegen Uhrzeigersinn drehen [%1]</translation> </message> </context> <context> <name>SpindleCwAction</name> <message> <source>CW</source> <translation>CW</translation> </message> <message> <source>Turn spindle clockwise [%1]</source> <translation>Spindel im Uhrzeigersinn drehen [%1]</translation> </message> </context> <context> <name>SpindleOverrideAction</name> <message> <source>Spindle Override</source> <translation>Spindelgeschwindigkeit überschreiben</translation> </message> <message> <source>Enable spindle override [%1]</source> <translation>Spindelgeschwindigkeit überschreiben aktivieren [%1]</translation> </message> </context> <context> <name>StepProgramAction</name> <message> <source>Step</source> <translation>Schritt</translation> </message> <message> <source>Execute next line [%1]</source> <translation>Nächste Zeile ausführen [%1]</translation> </message> </context> <context> <name>StopProgramAction</name> <message> <source>Stop</source> <translation>Anhalten</translation> </message> <message> <source>Stop program execution [%1]</source> <translation>Programausführung anhalten [%1]</translation> </message> </context> <context> <name>StopSpindleAction</name> <message> <source>Stop</source> <translation>Anhalten</translation> </message> <message> <source>Stop spindle [%1]</source> <translation>Spindel anhalten [%1]</translation> </message> </context> <context> <name>TeleopAction</name> <message> <source>Teleop mode</source> <translation type="vanished">Teleop Modus</translation> </message> <message> <source>Enable teleop mode [%1]</source> <translation>Teleop Modus aktivieren [%1]</translation> </message> <message> <source>Teleop Mode</source> <translation>Teleop Modus</translation> </message> </context> <context> <name>ToolTableEditor</name> <message> <source>Tool ID</source> <translation>Werkzeug ID</translation> </message> <message> <source>Pocket</source> <translation>Position</translation> </message> <message> <source>X</source> <translation>X</translation> </message> <message> <source>Y</source> <translation>Y</translation> </message> <message> <source>Z</source> <translation>Z</translation> </message> <message> <source>A</source> <translation>A</translation> </message> <message> <source>B</source> <translation>B</translation> </message> <message> <source>C</source> <translation>C</translation> </message> <message> <source>U</source> <translation>U</translation> </message> <message> <source>V</source> <translation>V</translation> </message> <message> <source>W</source> <translation>W</translation> </message> <message> <source>Diameter</source> <translation>Durchmesser</translation> </message> <message> <source>Front Angle</source> <translation>Vorderer Winkel</translation> </message> <message> <source>Back Angle</source> <translation>Hinterer Winkel</translation> </message> <message> <source>Orientation</source> <translation>Orientierung</translation> </message> <message> <source>Comment</source> <translation>Kommentar</translation> </message> </context> <context> <name>ToolTableEditorDialog</name> <message> <source>Tool Table Editor</source> <translation>Werkzeug Editor</translation> </message> <message> <source>Error in tool table.</source> <translation>Fehler in Werkzeugtabelle.</translation> </message> <message> <source>Tool table has modifications.</source> <translation>Werkzeugtabelle wurde verändert.</translation> </message> <message> <source>Add Row</source> <translation>Reihe hinzufügen</translation> </message> <message> <source>Remove Row</source> <translation>Reihe entfernen</translation> </message> <message> <source>Reset Modifications</source> <translation>Änderungen zurücksetzen</translation> </message> <message> <source>Update Tool Table</source> <translation>Werkzeugtabelle übernehmen</translation> </message> <message> <source>Close</source> <translation>Schließen</translation> </message> </context> <context> <name>TouchOffAction</name> <message> <source>Touch Off</source> <translation>Abweichung setzen</translation> </message> <message> <source>Set G5x offset for active axis [%1]</source> <translation>Setze G5x Abweichung für die aktive Achse [%1]</translation> </message> </context> <context> <name>TouchOffDialog</name> <message> <source>Touch Off</source> <translation>Abweichung setzen</translation> </message> <message> <source>Enter %1 coordinate relative to workpiece:</source> <translation>%1 Koordinate relative zum Arbeitsstück eingeben:</translation> </message> <message> <source>Coordinate system:</source> <translation>Koordinatensystem:</translation> </message> </context> <context> <name>UnhomeAxisAction</name> <message> <source>Unhome</source> <translation>Unheimen</translation> </message> <message> <source>Unhome axis %1 [%2]</source> <translation>Achse unheimen %1 [%2]</translation> </message> </context> </TS><|fim▁end|>
</message> <message>
<|file_name|>account.go<|end_file_name|><|fim▁begin|>package routes import ( "crypto/sha256" "crypto/subtle" "database/sql" "encoding/hex" "html" "log" "math" "net/http" "strconv" "strings" c "github.com/Azareal/Gosora/common" p "github.com/Azareal/Gosora/common/phrases" qgen "github.com/Azareal/Gosora/query_gen" ) // A blank list to fill out that parameter in Page for routes which don't use it var tList []interface{} func AccountLogin(w http.ResponseWriter, r *http.Request, u *c.User, h *c.Header) c.RouteError { if u.Loggedin { return c.LocalError("You're already logged in.", w, r, u) } h.Title = p.GetTitlePhrase("login") return renderTemplate("login", w, r, h, c.Page{h, tList, nil}) } // TODO: Log failed attempted logins? // TODO: Lock IPS out if they have too many failed attempts? // TODO: Log unusual countries in comparison to the country a user usually logs in from? Alert the user about this? func AccountLoginSubmit(w http.ResponseWriter, r *http.Request, u *c.User) c.RouteError { if u.Loggedin { return c.LocalError("You're already logged in.", w, r, u) } name := c.SanitiseSingleLine(r.PostFormValue("username")) uid, e, requiresExtraAuth := c.Auth.Authenticate(name, r.PostFormValue("password")) if e != nil { // TODO: uid is currently set to 0 as authenticate fetches the user by username and password. Get the actual uid, so we can alert the user of attempted logins? What if someone takes advantage of the response times to deduce if an account exists? if !c.Config.DisableLoginLog { li := &c.LoginLogItem{UID: uid, Success: false, IP: u.GetIP()} if _, ie := li.Create(); ie != nil { return c.InternalError(ie, w, r) } } return c.LocalError(e.Error(), w, r, u) } // TODO: Take 2FA into account if !c.Config.DisableLoginLog { li := &c.LoginLogItem{UID: uid, Success: true, IP: u.GetIP()} if _, e = li.Create(); e != nil { return c.InternalError(e, w, r) } } // TODO: Do we want to slacken this by only doing it when the IP changes? if requiresExtraAuth { provSession, signedSession, e := c.Auth.CreateProvisionalSession(uid) if e != nil { return c.InternalError(e, w, r) } // TODO: Use the login log ID in the provisional cookie? c.Auth.SetProvisionalCookies(w, uid, provSession, signedSession) http.Redirect(w, r, "/accounts/mfa_verify/", http.StatusSeeOther) return nil } return loginSuccess(uid, w, r, u) } func loginSuccess(uid int, w http.ResponseWriter, r *http.Request, u *c.User) c.RouteError { userPtr, err := c.Users.Get(uid) if err != nil { return c.LocalError("Bad account", w, r, u) } *u = *userPtr var session string if u.Session == "" { session, err = c.Auth.CreateSession(uid) if err != nil { return c.InternalError(err, w, r) } } else { session = u.Session } c.Auth.SetCookies(w, uid, session) if u.IsAdmin { // Is this error check redundant? We already check for the error in PreRoute for the same IP // TODO: Should we be logging this? log.Printf("#%d has logged in with IP %s", uid, u.GetIP()) } http.Redirect(w, r, "/", http.StatusSeeOther) return nil } func extractCookie(name string, r *http.Request) (string, error) { cookie, err := r.Cookie(name) if err != nil { return "", err } return cookie.Value, nil } func mfaGetCookies(r *http.Request) (uid int, provSession, signedSession string, err error) { suid, err := extractCookie("uid", r) if err != nil { return 0, "", "", err } uid, err = strconv.Atoi(suid) if err != nil { return 0, "", "", err } provSession, err = extractCookie("provSession", r) if err != nil { return 0, "", "", err } signedSession, err = extractCookie("signedSession", r) return uid, provSession, signedSession, err } func mfaVerifySession(provSession, signedSession string, uid int) bool { bProvSession := []byte(provSession) bSignedSession := []byte(signedSession) bUid := []byte(strconv.Itoa(uid)) h := sha256.New() h.Write([]byte(c.SessionSigningKeyBox.Load().(string))) h.Write(bProvSession) h.Write(bUid) expected := hex.EncodeToString(h.Sum(nil)) if subtle.ConstantTimeCompare(bSignedSession, []byte(expected)) == 1 { return true } h = sha256.New() h.Write([]byte(c.OldSessionSigningKeyBox.Load().(string))) h.Write(bProvSession) h.Write(bUid) expected = hex.EncodeToString(h.Sum(nil)) return subtle.ConstantTimeCompare(bSignedSession, []byte(expected)) == 1 } func AccountLoginMFAVerify(w http.ResponseWriter, r *http.Request, u *c.User, h *c.Header) c.RouteError { if u.Loggedin { return c.LocalError("You're already logged in.", w, r, u) } h.Title = p.GetTitlePhrase("login_mfa_verify") uid, provSession, signedSession, err := mfaGetCookies(r) if err != nil { return c.LocalError("Invalid cookie", w, r, u) } if !mfaVerifySession(provSession, signedSession, uid) { return c.LocalError("Invalid session", w, r, u) } return renderTemplate("login_mfa_verify", w, r, h, c.Page{h, tList, nil}) } func AccountLoginMFAVerifySubmit(w http.ResponseWriter, r *http.Request, u *c.User) c.RouteError { uid, provSession, signedSession, err := mfaGetCookies(r) if err != nil { return c.LocalError("Invalid cookie", w, r, u) } if !mfaVerifySession(provSession, signedSession, uid) { return c.LocalError("Invalid session", w, r, u) } token := r.PostFormValue("mfa_token") err = c.Auth.ValidateMFAToken(token, uid) if err != nil { return c.LocalError(err.Error(), w, r, u) } return loginSuccess(uid, w, r, u) } func AccountLogout(w http.ResponseWriter, r *http.Request, u *c.User) c.RouteError { c.Auth.Logout(w, u.ID) http.Redirect(w, r, "/", http.StatusSeeOther) return nil } func AccountRegister(w http.ResponseWriter, r *http.Request, u *c.User, h *c.Header) c.RouteError { if u.Loggedin { return c.LocalError("You're already logged in.", w, r, u) } h.Title = p.GetTitlePhrase("register") h.AddScriptAsync("register.js") var token string if c.Config.DisableJSAntispam { h := sha256.New() h.Write([]byte(c.JSTokenBox.Load().(string))) h.Write([]byte(u.GetIP())) token = hex.EncodeToString(h.Sum(nil)) } return renderTemplate("register", w, r, h, c.RegisterPage{h, h.Settings["activation_type"] != 2, token, nil}) } func isNumeric(data string) (numeric bool) { for _, ch := range data { if ch < 48 || ch > 57 { return false } } return true } func AccountRegisterSubmit(w http.ResponseWriter, r *http.Request, user *c.User) c.RouteError { headerLite, _ := c.SimpleUserCheck(w, r, user) // TODO: Should we push multiple validation errors to the user instead of just one? regSuccess := true regErrMsg := "" regErrReason := "" regError := func(userMsg, reason string) { regSuccess = false if regErrMsg == "" { regErrMsg = userMsg } regErrReason += reason + "|" } if r.PostFormValue("tos") != "0" { regError(p.GetErrorPhrase("register_might_be_machine"), "trap-question") } { h := sha256.New() h.Write([]byte(c.JSTokenBox.Load().(string))) h.Write([]byte(user.GetIP())) if !c.Config.DisableJSAntispam { if r.PostFormValue("golden-watch") != hex.EncodeToString(h.Sum(nil)) { regError(p.GetErrorPhrase("register_might_be_machine"), "js-antispam") } } else { if r.PostFormValue("areg") != hex.EncodeToString(h.Sum(nil)) { regError(p.GetErrorPhrase("register_might_be_machine"), "token") } } } name := c.SanitiseSingleLine(r.PostFormValue("name")) if name == "" { regError(p.GetErrorPhrase("register_need_username"), "no-username") } // This is so a numeric name won't interfere with mentioning a user by ID, there might be a better way of doing this like perhaps !@ to mean IDs and @ to mean usernames in the pre-parser nameBits := strings.Split(name, " ") if isNumeric(nameBits[0]) { regError(p.GetErrorPhrase("register_first_word_numeric"), "numeric-name") } if strings.Contains(name, "http://") || strings.Contains(name, "https://") || strings.Contains(name, "ftp://") || strings.Contains(name, "ssh://") { regError(p.GetErrorPhrase("register_url_username"), "url-name") } // TODO: Add a dedicated function for validating emails email := c.SanitiseSingleLine(r.PostFormValue("email")) if headerLite.Settings["activation_type"] == 2 && email == "" { regError(p.GetErrorPhrase("register_need_email"), "no-email") } if c.HasSuspiciousEmail(email) { regError(p.GetErrorPhrase("register_suspicious_email"), "suspicious-email") } password := r.PostFormValue("password") // ? Move this into Create()? What if we want to programatically set weak passwords for tests? err := c.WeakPassword(password, name, email) if err != nil { regError(err.Error(), "weak-password") } else { // Do the two inputted passwords match..? confirmPassword := r.PostFormValue("confirm_password") if password != confirmPassword { regError(p.GetErrorPhrase("register_password_mismatch"), "password-mismatch") } } regLog := c.RegLogItem{Username: name, Email: email, FailureReason: regErrReason, Success: regSuccess, IP: user.GetIP()} if !c.Config.DisableRegLog && regSuccess { if _, e := regLog.Create(); e != nil { return c.InternalError(e, w, r) } } if !regSuccess { return c.LocalError(regErrMsg, w, r, user) } var active bool var group int switch headerLite.Settings["activation_type"] { case 1: // Activate All active = true group = c.Config.DefaultGroup default: // Anything else. E.g. Admin Activation or Email Activation. group = c.Config.ActivationGroup } pushLog := func(reason string) error { if !c.Config.DisableRegLog { regLog.FailureReason += reason + "|" _, e := regLog.Create() return e } return nil } canonEmail := c.CanonEmail(email) uid, err := c.Users.Create(name, password, canonEmail, group, active) if err != nil { regLog.Success = false if err == c.ErrAccountExists { err = pushLog("username-exists") if err != nil { return c.InternalError(err, w, r) } return c.LocalError(p.GetErrorPhrase("register_username_unavailable"), w, r, user) } else if err == c.ErrLongUsername { err = pushLog("username-too-long") if err != nil { return c.InternalError(err, w, r) } return c.LocalError(p.GetErrorPhrase("register_username_too_long_prefix")+strconv.Itoa(c.Config.MaxUsernameLength), w, r, user) } err2 := pushLog("internal-error") if err2 != nil { return c.InternalError(err2, w, r) } return c.InternalError(err, w, r) } u, err := c.Users.Get(uid) if err == sql.ErrNoRows { return c.LocalError("You no longer exist.", w, r, user) } else if err != nil { return c.InternalError(err, w, r) } err = c.GroupPromotions.PromoteIfEligible(u, u.Level, u.Posts, u.CreatedAt) if err != nil { return c.InternalError(err, w, r) } u.CacheRemove() session, err := c.Auth.CreateSession(uid) if err != nil { return c.InternalError(err, w, r) } c.Auth.SetCookies(w, uid, session) // Check if this user actually owns this email, if email activation is on, automatically flip their account to active when the email is validated. Validation is also useful for determining whether this user should receive any alerts, etc. via email if c.Site.EnableEmails && canonEmail != "" { token, err := c.GenerateSafeString(80) if err != nil { return c.InternalError(err, w, r) } // TODO: Add an EmailStore and move this there _, err = qgen.NewAcc().Insert("emails").Columns("email,uid,validated,token").Fields("?,?,?,?").Exec(canonEmail, uid, 0, token) if err != nil { return c.InternalError(err, w, r) } err = c.SendActivationEmail(name, canonEmail, token) if err != nil { return c.LocalError(p.GetErrorPhrase("register_email_fail"), w, r, user) } } http.Redirect(w, r, "/", http.StatusSeeOther) return nil } // TODO: Figure a way of making this into middleware? func accountEditHead(titlePhrase string, w http.ResponseWriter, r *http.Request, u *c.User, h *c.Header) { h.Title = p.GetTitlePhrase(titlePhrase) h.Path = "/user/edit/" h.AddSheet(h.Theme.Name + "/account.css") h.AddScriptAsync("account.js") } func AccountEdit(w http.ResponseWriter, r *http.Request, u *c.User, h *c.Header) c.RouteError {<|fim▁hole|> switch { case r.FormValue("avatar_updated") == "1": h.AddNotice("account_avatar_updated") case r.FormValue("name_updated") == "1": h.AddNotice("account_name_updated") case r.FormValue("mfa_setup_success") == "1": h.AddNotice("account_mfa_setup_success") } // TODO: Find a more efficient way of doing this mfaSetup := false _, err := c.MFAstore.Get(u.ID) if err != sql.ErrNoRows && err != nil { return c.InternalError(err, w, r) } else if err != sql.ErrNoRows { mfaSetup = true } // Normalise the score so that the user sees their relative progress to the next level rather than showing them their total score prevScore := c.GetLevelScore(u.Level) score := u.Score //score = 23 currentScore := score - prevScore nextScore := c.GetLevelScore(u.Level+1) - prevScore //perc := int(math.Ceil((float64(nextScore) / float64(currentScore)) * 100)) * 2 perc := int(math.Floor((float64(currentScore) / float64(nextScore)) * 100)) // * 2 pi := c.Account{h, "dashboard", "account_own_edit", c.AccountDashPage{h, mfaSetup, currentScore, nextScore, u.Level + 1, perc}} return renderTemplate("account", w, r, h, pi) } //edit_password func AccountEditPassword(w http.ResponseWriter, r *http.Request, u *c.User, h *c.Header) c.RouteError { accountEditHead("account_password", w, r, u, h) return renderTemplate("account_own_edit_password", w, r, h, c.Page{h, tList, nil}) } // TODO: Require re-authentication if the user hasn't logged in in a while func AccountEditPasswordSubmit(w http.ResponseWriter, r *http.Request, u *c.User) c.RouteError { _, ferr := c.SimpleUserCheck(w, r, u) if ferr != nil { return ferr } var realPassword, salt string currentPassword := r.PostFormValue("current-password") newPassword := r.PostFormValue("new-password") confirmPassword := r.PostFormValue("confirm-password") // TODO: Use a reusable statement err := qgen.NewAcc().Select("users").Columns("password,salt").Where("uid=?").QueryRow(u.ID).Scan(&realPassword, &salt) if err == sql.ErrNoRows { return c.LocalError("Your account no longer exists.", w, r, u) } else if err != nil { return c.InternalError(err, w, r) } err = c.CheckPassword(realPassword, currentPassword, salt) if err == c.ErrMismatchedHashAndPassword { return c.LocalError("That's not the correct password.", w, r, u) } else if err != nil { return c.InternalError(err, w, r) } if newPassword != confirmPassword { return c.LocalError("The two passwords don't match.", w, r, u) } c.SetPassword(u.ID, newPassword) // TODO: Limited version of WeakPassword() // Log the user out as a safety precaution c.Auth.ForceLogout(u.ID) http.Redirect(w, r, "/", http.StatusSeeOther) return nil } func AccountEditAvatarSubmit(w http.ResponseWriter, r *http.Request, u *c.User) c.RouteError { _, ferr := c.SimpleUserCheck(w, r, u) if ferr != nil { return ferr } if !u.Perms.UploadAvatars { return c.NoPermissions(w, r, u) } ext, ferr := c.UploadAvatar(w, r, u, u.ID) if ferr != nil { return ferr } ferr = c.ChangeAvatar("."+ext, w, r, u) if ferr != nil { return ferr } // TODO: Only schedule a resize if the avatar isn't tiny err := u.ScheduleAvatarResize() if err != nil { return c.InternalError(err, w, r) } http.Redirect(w, r, "/user/edit/?avatar_updated=1", http.StatusSeeOther) return nil } func AccountEditRevokeAvatarSubmit(w http.ResponseWriter, r *http.Request, u *c.User) c.RouteError { _, ferr := c.SimpleUserCheck(w, r, u) if ferr != nil { return ferr } ferr = c.ChangeAvatar("", w, r, u) if ferr != nil { return ferr } http.Redirect(w, r, "/user/edit/?avatar_updated=1", http.StatusSeeOther) return nil } func AccountEditUsernameSubmit(w http.ResponseWriter, r *http.Request, u *c.User) c.RouteError { _, ferr := c.SimpleUserCheck(w, r, u) if ferr != nil { return ferr } newName := c.SanitiseSingleLine(r.PostFormValue("new-name")) if newName == "" { return c.LocalError("You can't leave your username blank", w, r, u) } err := u.ChangeName(newName) if err != nil { return c.LocalError("Unable to change names. Does someone else already have this name?", w, r, u) } http.Redirect(w, r, "/user/edit/?name_updated=1", http.StatusSeeOther) return nil } func AccountEditMFA(w http.ResponseWriter, r *http.Request, u *c.User, h *c.Header) c.RouteError { accountEditHead("account_mfa", w, r, u, h) mfaItem, err := c.MFAstore.Get(u.ID) if err != sql.ErrNoRows && err != nil { return c.InternalError(err, w, r) } else if err == sql.ErrNoRows { return c.LocalError("Two-factor authentication hasn't been setup on your account", w, r, u) } pi := c.Page{h, tList, mfaItem.Scratch} return renderTemplate("account_own_edit_mfa", w, r, h, pi) } // If not setup, generate a string, otherwise give an option to disable mfa given the right code func AccountEditMFASetup(w http.ResponseWriter, r *http.Request, u *c.User, h *c.Header) c.RouteError { accountEditHead("account_mfa_setup", w, r, u, h) // Flash an error if mfa is already setup _, e := c.MFAstore.Get(u.ID) if e != sql.ErrNoRows && e != nil { return c.InternalError(e, w, r) } else if e != sql.ErrNoRows { return c.LocalError("You have already setup two-factor authentication", w, r, u) } // TODO: Entitise this? code, e := c.GenerateGAuthSecret() if e != nil { return c.InternalError(e, w, r) } pi := c.Page{h, tList, c.FriendlyGAuthSecret(code)} return renderTemplate("account_own_edit_mfa_setup", w, r, h, pi) } // Form should bounce the random mfa secret back and the otp to be verified server-side to reduce the chances of a bug arising on the JS side which makes every code mismatch func AccountEditMFASetupSubmit(w http.ResponseWriter, r *http.Request, u *c.User) c.RouteError { _, ferr := c.SimpleUserCheck(w, r, u) if ferr != nil { return ferr } // Flash an error if mfa is already setup _, err := c.MFAstore.Get(u.ID) if err != sql.ErrNoRows && err != nil { return c.InternalError(err, w, r) } else if err != sql.ErrNoRows { return c.LocalError("You have already setup two-factor authentication", w, r, u) } code := r.PostFormValue("code") otp := r.PostFormValue("otp") ok, err := c.VerifyGAuthToken(code, otp) if err != nil { //fmt.Println("err: ", err) return c.LocalError("Something weird happened", w, r, u) // TODO: Log this error? } // TODO: Use AJAX for this if !ok { return c.LocalError("The token isn't right", w, r, u) } // TODO: How should we handle races where a mfa key is already setup? Right now, it's a fairly generic error, maybe try parsing the error message? err = c.MFAstore.Create(code, u.ID) if err != nil { return c.InternalError(err, w, r) } http.Redirect(w, r, "/user/edit/?mfa_setup_success=1", http.StatusSeeOther) return nil } // TODO: Implement this func AccountEditMFADisableSubmit(w http.ResponseWriter, r *http.Request, u *c.User) c.RouteError { _, ferr := c.SimpleUserCheck(w, r, u) if ferr != nil { return ferr } // Flash an error if mfa is already setup mfaItem, err := c.MFAstore.Get(u.ID) if err != sql.ErrNoRows && err != nil { return c.InternalError(err, w, r) } else if err == sql.ErrNoRows { return c.LocalError("You don't have two-factor enabled on your account", w, r, u) } err = mfaItem.Delete() if err != nil { return c.InternalError(err, w, r) } http.Redirect(w, r, "/user/edit/?mfa_disabled=1", http.StatusSeeOther) return nil } func AccountEditPrivacy(w http.ResponseWriter, r *http.Request, u *c.User, h *c.Header) c.RouteError { accountEditHead("account_privacy", w, r, u, h) profileComments := u.Privacy.ShowComments receiveConvos := u.Privacy.AllowMessage enableEmbeds := !c.DefaultParseSettings.NoEmbed if u.ParseSettings != nil { enableEmbeds = !u.ParseSettings.NoEmbed } pi := c.Account{h, "privacy", "account_own_edit_privacy", c.AccountPrivacyPage{h, profileComments, receiveConvos, enableEmbeds}} return renderTemplate("account", w, r, h, pi) } func AccountEditPrivacySubmit(w http.ResponseWriter, r *http.Request, u *c.User) c.RouteError { //headerLite, _ := c.SimpleUserCheck(w, r, u) sProfileComments := r.FormValue("profile_comments") sEnableEmbeds := r.FormValue("enable_embeds") oProfileComments := r.FormValue("o_profile_comments") oEnableEmbeds := r.FormValue("o_enable_embeds") if sProfileComments != oProfileComments || sEnableEmbeds != oEnableEmbeds { profileComments, e := strconv.Atoi(sProfileComments) enableEmbeds, e2 := strconv.Atoi(sEnableEmbeds) if e != nil || e2 != nil { return c.LocalError("malformed integer", w, r, u) } e = u.UpdatePrivacy(profileComments, enableEmbeds) if e == c.ErrProfileCommentsOutOfBounds || e == c.ErrEnableEmbedsOutOfBounds { return c.LocalError(e.Error(), w, r, u) } else if e != nil { return c.InternalError(e, w, r) } } http.Redirect(w, r, "/user/edit/privacy/?updated=1", http.StatusSeeOther) return nil } func AccountEditEmail(w http.ResponseWriter, r *http.Request, u *c.User, h *c.Header) c.RouteError { accountEditHead("account_email", w, r, u, h) emails, err := c.Emails.GetEmailsByUser(u) if err != nil { return c.InternalError(err, w, r) } // Was this site migrated from another forum software? Most of them don't have multiple emails for a single user. // This also applies when the admin switches site.EnableEmails on after having it off for a while. if len(emails) == 0 && u.Email != "" { emails = append(emails, c.Email{UserID: u.ID, Email: u.Email, Validated: false, Primary: true}) } if !c.Site.EnableEmails { h.AddNotice("account_mail_disabled") } if r.FormValue("verified") == "1" { h.AddNotice("account_mail_verify_success") } pi := c.Account{h, "edit_emails", "account_own_edit_email", c.EmailListPage{h, emails}} return renderTemplate("account", w, r, h, pi) } func AccountEditEmailAddSubmit(w http.ResponseWriter, r *http.Request, u *c.User) c.RouteError { email := c.SanitiseSingleLine(r.PostFormValue("email")) canonEmail := c.CanonEmail(email) _, err := c.Emails.Get(u, canonEmail) if err == nil { return c.LocalError("You have already added this email.", w, r, u) } else if err != sql.ErrNoRows && err != nil { return c.InternalError(err, w, r) } var token string if c.Site.EnableEmails { token, err = c.GenerateSafeString(80) if err != nil { return c.InternalError(err, w, r) } } err = c.Emails.Add(u.ID, canonEmail, token) if err != nil { return c.InternalError(err, w, r) } if c.Site.EnableEmails { err = c.SendValidationEmail(u.Name, canonEmail, token) if err != nil { return c.LocalError(p.GetErrorPhrase("register_email_fail"), w, r, u) } } http.Redirect(w, r, "/user/edit/email/?added=1", http.StatusSeeOther) return nil } func AccountEditEmailRemoveSubmit(w http.ResponseWriter, r *http.Request, u *c.User) c.RouteError { headerLite, _ := c.SimpleUserCheck(w, r, u) email := c.SanitiseSingleLine(r.PostFormValue("email")) canonEmail := c.CanonEmail(email) // Quick and dirty check _, err := c.Emails.Get(u, canonEmail) if err == sql.ErrNoRows { return c.LocalError("This email isn't set on this user.", w, r, u) } else if err != nil { return c.InternalError(err, w, r) } if headerLite.Settings["activation_type"] == 2 && u.Email == canonEmail { return c.LocalError("You can't remove your primary email when mandatory email activation is enabled.", w, r, u) } err = c.Emails.Delete(u.ID, canonEmail) if err != nil { return c.InternalError(err, w, r) } http.Redirect(w, r, "/user/edit/email/?removed=1", http.StatusSeeOther) return nil } // TODO: Should we make this an AnonAction so someone can do this without being logged in? func AccountEditEmailTokenSubmit(w http.ResponseWriter, r *http.Request, user *c.User, token string) c.RouteError { header, ferr := c.UserCheck(w, r, user) if ferr != nil { return ferr } if !c.Site.EnableEmails { http.Redirect(w, r, "/user/edit/email/", http.StatusSeeOther) return nil } targetEmail := c.Email{UserID: user.ID} emails, err := c.Emails.GetEmailsByUser(user) if err == sql.ErrNoRows { return c.LocalError("A verification email was never sent for you!", w, r, user) } else if err != nil { // TODO: Better error if we don't have an email or it's not in the emails table for some reason return c.LocalError("You are not logged in", w, r, user) } for _, email := range emails { if subtle.ConstantTimeCompare([]byte(email.Token), []byte(token)) == 1 { targetEmail = email } } if len(emails) == 0 { return c.LocalError("A verification email was never sent for you!", w, r, user) } if targetEmail.Token == "" { return c.LocalError("That's not a valid token!", w, r, user) } err = c.Emails.VerifyEmail(user.Email) if err != nil { return c.InternalError(err, w, r) } // If Email Activation is on, then activate the account while we're here if header.Settings["activation_type"] == 2 { if err = user.Activate(); err != nil { return c.InternalError(err, w, r) } u2, err := c.Users.Get(user.ID) if err == sql.ErrNoRows { return c.LocalError("The user no longer exists.", w, r, user) } else if err != nil { return c.InternalError(err, w, r) } err = c.GroupPromotions.PromoteIfEligible(u2, u2.Level, u2.Posts, u2.CreatedAt) if err != nil { return c.InternalError(err, w, r) } u2.CacheRemove() } http.Redirect(w, r, "/user/edit/email/?verified=1", http.StatusSeeOther) return nil } func AccountLogins(w http.ResponseWriter, r *http.Request, u *c.User, h *c.Header) c.RouteError { accountEditHead("account_logins", w, r, u, h) page, _ := strconv.Atoi(r.FormValue("page")) perPage := 12 offset, page, lastPage := c.PageOffset(c.LoginLogs.CountUser(u.ID), page, perPage) logs, err := c.LoginLogs.GetOffset(u.ID, offset, perPage) if err != nil { return c.InternalError(err, w, r) } pageList := c.Paginate(page, lastPage, 5) pi := c.Account{h, "logins", "account_logins", c.AccountLoginsPage{h, logs, c.Paginator{pageList, page, lastPage}}} return renderTemplate("account", w, r, h, pi) } func AccountBlocked(w http.ResponseWriter, r *http.Request, user *c.User, h *c.Header) c.RouteError { accountEditHead("account_blocked", w, r, user, h) page, _ := strconv.Atoi(r.FormValue("page")) perPage := 12 offset, page, lastPage := c.PageOffset(c.UserBlocks.BlockedByCount(user.ID), page, perPage) uids, err := c.UserBlocks.BlockedByOffset(user.ID, offset, perPage) if err != nil { return c.InternalError(err, w, r) } var blocks []*c.User for _, uid := range uids { u, err := c.Users.Get(uid) if err != nil { return c.InternalError(err, w, r) } blocks = append(blocks, u) } pageList := c.Paginate(page, lastPage, 5) pi := c.Account{h, "blocked", "account_blocked", c.AccountBlocksPage{h, blocks, c.Paginator{pageList, page, lastPage}}} return renderTemplate("account", w, r, h, pi) } func LevelList(w http.ResponseWriter, r *http.Request, u *c.User, h *c.Header) c.RouteError { h.Title = p.GetTitlePhrase("account_level_list") fScores := c.GetLevels(21) levels := make([]c.LevelListItem, len(fScores)) for i, fScore := range fScores { if i == 0 { continue } var status string if u.Level > (i - 1) { status = "complete" } else if u.Level < (i - 1) { status = "future" } else { status = "inprogress" } iScore := int(math.Ceil(fScore)) //perc := int(math.Ceil((fScore/float64(u.Score))*100)) * 2 perc := int(math.Ceil((float64(u.Score) / fScore) * 100)) levels[i] = c.LevelListItem{i - 1, iScore, status, perc} } return renderTemplate("level_list", w, r, h, c.LevelListPage{h, levels[1:]}) } func Alerts(w http.ResponseWriter, r *http.Request, u *c.User, h *c.Header) c.RouteError { return nil } func AccountPasswordReset(w http.ResponseWriter, r *http.Request, u *c.User, h *c.Header) c.RouteError { if u.Loggedin { return c.LocalError("You're already logged in.", w, r, u) } if !c.Site.EnableEmails { return c.LocalError(p.GetNoticePhrase("account_mail_disabled"), w, r, u) } if r.FormValue("email_sent") == "1" { h.AddNotice("password_reset_email_sent") } h.Title = p.GetTitlePhrase("password_reset") return renderTemplate("password_reset", w, r, h, c.Page{h, tList, nil}) } // TODO: Ratelimit this func AccountPasswordResetSubmit(w http.ResponseWriter, r *http.Request, user *c.User) c.RouteError { if user.Loggedin { return c.LocalError("You're already logged in.", w, r, user) } if !c.Site.EnableEmails { return c.LocalError(p.GetNoticePhrase("account_mail_disabled"), w, r, user) } username := r.PostFormValue("username") tuser, err := c.Users.GetByName(username) if err == sql.ErrNoRows { // Someone trying to stir up trouble? http.Redirect(w, r, "/accounts/password-reset/?email_sent=1", http.StatusSeeOther) return nil } else if err != nil { return c.InternalError(err, w, r) } token, err := c.GenerateSafeString(80) if err != nil { return c.InternalError(err, w, r) } // TODO: Move these queries somewhere else var disc string err = qgen.NewAcc().Select("password_resets").Columns("createdAt").DateCutoff("createdAt", 1, "hour").QueryRow().Scan(&disc) if err != nil && err != sql.ErrNoRows { return c.InternalError(err, w, r) } if err == nil { return c.LocalError("You can only send a password reset email for a user once an hour", w, r, user) } count, err := qgen.NewAcc().Count("password_resets").DateCutoff("createdAt", 6, "hour").Total() if err != nil && err != sql.ErrNoRows { return c.InternalError(err, w, r) } if count >= 3 { return c.LocalError("You can only send a password reset email for a user three times every six hours", w, r, user) } count, err = qgen.NewAcc().Count("password_resets").DateCutoff("createdAt", 12, "hour").Total() if err != nil && err != sql.ErrNoRows { return c.InternalError(err, w, r) } if count >= 4 { return c.LocalError("You can only send a password reset email for a user four times every twelve hours", w, r, user) } err = c.PasswordResetter.Create(tuser.Email, tuser.ID, token) if err != nil { return c.InternalError(err, w, r) } var s string if c.Config.SslSchema { s = "s" } err = c.SendEmail(tuser.Email, p.GetTmplPhrase("password_reset_subject"), p.GetTmplPhrasef("password_reset_body", tuser.Name, "http"+s+"://"+c.Site.URL+"/accounts/password-reset/token/?uid="+strconv.Itoa(tuser.ID)+"&token="+token)) if err != nil { return c.LocalError(p.GetErrorPhrase("password_reset_email_fail"), w, r, user) } http.Redirect(w, r, "/accounts/password-reset/?email_sent=1", http.StatusSeeOther) return nil } func AccountPasswordResetToken(w http.ResponseWriter, r *http.Request, u *c.User, h *c.Header) c.RouteError { if u.Loggedin { return c.LocalError("You're already logged in.", w, r, u) } // TODO: Find a way to flash this notice /*if r.FormValue("token_verified") == "1" { h.AddNotice("password_reset_token_token_verified") }*/ uid, err := strconv.Atoi(r.FormValue("uid")) if err != nil { return c.LocalError("Invalid uid", w, r, u) } token := r.FormValue("token") err = c.PasswordResetter.ValidateToken(uid, token) if err == sql.ErrNoRows || err == c.ErrBadResetToken { return c.LocalError("This reset token has expired.", w, r, u) } else if err != nil { return c.InternalError(err, w, r) } _, err = c.MFAstore.Get(uid) if err != sql.ErrNoRows && err != nil { return c.InternalError(err, w, r) } mfa := err != sql.ErrNoRows h.Title = p.GetTitlePhrase("password_reset_token") return renderTemplate("password_reset_token", w, r, h, c.ResetPage{h, uid, html.EscapeString(token), mfa}) } func AccountPasswordResetTokenSubmit(w http.ResponseWriter, r *http.Request, u *c.User) c.RouteError { if u.Loggedin { return c.LocalError("You're already logged in.", w, r, u) } uid, err := strconv.Atoi(r.FormValue("uid")) if err != nil { return c.LocalError("Invalid uid", w, r, u) } if !c.Users.Exists(uid) { return c.LocalError("This reset token has expired.", w, r, u) } err = c.PasswordResetter.ValidateToken(uid, r.FormValue("token")) if err == sql.ErrNoRows || err == c.ErrBadResetToken { return c.LocalError("This reset token has expired.", w, r, u) } else if err != nil { return c.InternalError(err, w, r) } mfaToken := r.PostFormValue("mfa_token") err = c.Auth.ValidateMFAToken(mfaToken, uid) if err != nil && err != c.ErrNoMFAToken { return c.LocalError(err.Error(), w, r, u) } newPassword := r.PostFormValue("password") confirmPassword := r.PostFormValue("confirm_password") if newPassword != confirmPassword { return c.LocalError("The two passwords don't match.", w, r, u) } c.SetPassword(uid, newPassword) // TODO: Limited version of WeakPassword() err = c.PasswordResetter.FlushTokens(uid) if err != nil { return c.InternalError(err, w, r) } // Log the user out as a safety precaution c.Auth.ForceLogout(uid) //http.Redirect(w, r, "/accounts/password-reset/token/?token_verified=1", http.StatusSeeOther) http.Redirect(w, r, "/", http.StatusSeeOther) return nil }<|fim▁end|>
accountEditHead("account", w, r, u, h)
<|file_name|>index.ts<|end_file_name|><|fim▁begin|>import config from "./webpack.config"; export default { key: "uneven-slashes", label: "Sitemap with uneven slashes in the base URL and paths",<|fim▁hole|> config };<|fim▁end|>
<|file_name|>train.py<|end_file_name|><|fim▁begin|># Copyright (c) 2014-2015, NVIDIA CORPORATION. All rights reserved. import time import os.path from collections import OrderedDict, namedtuple import gevent import flask from digits import device_query from digits.task import Task from digits.utils import subclass, override # NOTE: Increment this everytime the picked object changes PICKLE_VERSION = 2 # Used to store network outputs NetworkOutput = namedtuple('NetworkOutput', ['kind', 'data']) @subclass class TrainTask(Task): """ Defines required methods for child classes """ def __init__(self, dataset, train_epochs, snapshot_interval, learning_rate, lr_policy, **kwargs): """ Arguments: dataset -- a DatasetJob containing the dataset for this model train_epochs -- how many epochs of training data to train on snapshot_interval -- how many epochs between taking a snapshot learning_rate -- the base learning rate lr_policy -- a hash of options to be used for the learning rate policy Keyword arguments: gpu_count -- how many GPUs to use for training (integer) selected_gpus -- a list of GPU indexes to be used for training batch_size -- if set, override any network specific batch_size with this value val_interval -- how many epochs between validating the model with an epoch of validation data pretrained_model -- filename for a model to use for fine-tuning crop_size -- crop each image down to a square of this size use_mean -- subtract the dataset's mean file or mean pixel random_seed -- optional random seed """ self.gpu_count = kwargs.pop('gpu_count', None) self.selected_gpus = kwargs.pop('selected_gpus', None) self.batch_size = kwargs.pop('batch_size', None) self.val_interval = kwargs.pop('val_interval', None) self.pretrained_model = kwargs.pop('pretrained_model', None) self.crop_size = kwargs.pop('crop_size', None) self.use_mean = kwargs.pop('use_mean', None) self.random_seed = kwargs.pop('random_seed', None) self.solver_type = kwargs.pop('solver_type', None) self.shuffle = kwargs.pop('shuffle', None) self.network = kwargs.pop('network', None) self.framework_id = kwargs.pop('framework_id', None) super(TrainTask, self).__init__(**kwargs) self.pickver_task_train = PICKLE_VERSION self.dataset = dataset self.train_epochs = train_epochs self.snapshot_interval = snapshot_interval self.learning_rate = learning_rate self.lr_policy = lr_policy self.current_epoch = 0 self.snapshots = [] # data gets stored as dicts of lists (for graphing) self.train_outputs = OrderedDict() self.val_outputs = OrderedDict() def __getstate__(self): state = super(TrainTask, self).__getstate__() if 'dataset' in state: del state['dataset'] if 'snapshots' in state: del state['snapshots'] if '_labels' in state: del state['_labels'] if '_gpu_socketio_thread' in state: del state['_gpu_socketio_thread'] return state def __setstate__(self, state): if state['pickver_task_train'] < 2: state['train_outputs'] = OrderedDict() state['val_outputs'] = OrderedDict() tl = state.pop('train_loss_updates', None) vl = state.pop('val_loss_updates', None) va = state.pop('val_accuracy_updates', None) lr = state.pop('lr_updates', None) if tl: state['train_outputs']['epoch'] = NetworkOutput('Epoch', [x[0] for x in tl]) state['train_outputs']['loss'] = NetworkOutput('SoftmaxWithLoss', [x[1] for x in tl]) state['train_outputs']['learning_rate'] = NetworkOutput('LearningRate', [x[1] for x in lr]) if vl: state['val_outputs']['epoch'] = NetworkOutput('Epoch', [x[0] for x in vl]) if va: state['val_outputs']['accuracy'] = NetworkOutput('Accuracy', [x[1]/100 for x in va]) state['val_outputs']['loss'] = NetworkOutput('SoftmaxWithLoss', [x[1] for x in vl]) if state['use_mean'] == True: state['use_mean'] = 'pixel' elif state['use_mean'] == False: state['use_mean'] = 'none' state['pickver_task_train'] = PICKLE_VERSION super(TrainTask, self).__setstate__(state) self.snapshots = [] self.dataset = None @override def offer_resources(self, resources): if 'gpus' not in resources: return None if not resources['gpus']: return {} # don't use a GPU at all<|fim▁hole|> for resource in resources['gpus']: if resource.remaining() >= 1: identifiers.append(resource.identifier) if len(identifiers) == self.gpu_count: break if len(identifiers) == self.gpu_count: return {'gpus': [(i, 1) for i in identifiers]} else: return None elif self.selected_gpus is not None: all_available = True for i in self.selected_gpus: available = False for gpu in resources['gpus']: if i == gpu.identifier: if gpu.remaining() >= 1: available = True break if not available: all_available = False break if all_available: return {'gpus': [(i, 1) for i in self.selected_gpus]} else: return None return None @override def before_run(self): if 'gpus' in self.current_resources: # start a thread which sends SocketIO updates about GPU utilization self._gpu_socketio_thread = gevent.spawn( self.gpu_socketio_updater, [identifier for (identifier, value) in self.current_resources['gpus']] ) def gpu_socketio_updater(self, gpus): """ This thread sends SocketIO messages about GPU utilization to connected clients Arguments: gpus -- a list of identifiers for the GPUs currently being used """ from digits.webapp import app, socketio devices = [] for index in gpus: device = device_query.get_device(index) if device: devices.append((index, device)) if not devices: raise RuntimeError('Failed to load gpu information for "%s"' % gpus) # this thread continues until killed in after_run() while True: data = [] for index, device in devices: update = {'name': device.name, 'index': index} nvml_info = device_query.get_nvml_info(index) if nvml_info is not None: update.update(nvml_info) data.append(update) with app.app_context(): html = flask.render_template('models/gpu_utilization.html', data = data) socketio.emit('task update', { 'task': self.html_id(), 'update': 'gpu_utilization', 'html': html, }, namespace='/jobs', room=self.job_id, ) gevent.sleep(1) def send_progress_update(self, epoch): """ Sends socketio message about the current progress """ if self.current_epoch == epoch: return self.current_epoch = epoch self.progress = epoch/self.train_epochs self.emit_progress_update() def save_train_output(self, *args): """ Save output to self.train_outputs """ from digits.webapp import socketio if not self.save_output(self.train_outputs, *args): return if self.last_train_update and (time.time() - self.last_train_update) < 5: return self.last_train_update = time.time() self.logger.debug('Training %s%% complete.' % round(100 * self.current_epoch/self.train_epochs,2)) # loss graph data data = self.combined_graph_data() if data: socketio.emit('task update', { 'task': self.html_id(), 'update': 'combined_graph', 'data': data, }, namespace='/jobs', room=self.job_id, ) if data['columns']: # isolate the Loss column data for the sparkline graph_data = data['columns'][0][1:] socketio.emit('task update', { 'task': self.html_id(), 'job_id': self.job_id, 'update': 'combined_graph', 'data': graph_data, }, namespace='/jobs', room='job_management', ) # lr graph data data = self.lr_graph_data() if data: socketio.emit('task update', { 'task': self.html_id(), 'update': 'lr_graph', 'data': data, }, namespace='/jobs', room=self.job_id, ) def save_val_output(self, *args): """ Save output to self.val_outputs """ from digits.webapp import socketio if not self.save_output(self.val_outputs, *args): return # loss graph data data = self.combined_graph_data() if data: socketio.emit('task update', { 'task': self.html_id(), 'update': 'combined_graph', 'data': data, }, namespace='/jobs', room=self.job_id, ) def save_output(self, d, name, kind, value): """ Save output to self.train_outputs or self.val_outputs Returns true if all outputs for this epoch have been added Arguments: d -- the dictionary where the output should be stored name -- name of the output (e.g. "accuracy") kind -- the type of outputs (e.g. "Accuracy") value -- value for this output (e.g. 0.95) """ # don't let them be unicode name = str(name) kind = str(kind) # update d['epoch'] if 'epoch' not in d: d['epoch'] = NetworkOutput('Epoch', [self.current_epoch]) elif d['epoch'].data[-1] != self.current_epoch: d['epoch'].data.append(self.current_epoch) if name not in d: d[name] = NetworkOutput(kind, []) epoch_len = len(d['epoch'].data) name_len = len(d[name].data) # save to back of d[name] if name_len > epoch_len: raise Exception('Received a new output without being told the new epoch') elif name_len == epoch_len: # already exists if isinstance(d[name].data[-1], list): d[name].data[-1].append(value) else: d[name].data[-1] = [d[name].data[-1], value] elif name_len == epoch_len - 1: # expected case d[name].data.append(value) else: # we might have missed one for _ in xrange(epoch_len - name_len - 1): d[name].data.append(None) d[name].data.append(value) for key in d: if key not in ['epoch', 'learning_rate']: if len(d[key].data) != epoch_len: return False return True @override def after_run(self): if hasattr(self, '_gpu_socketio_thread'): self._gpu_socketio_thread.kill() def detect_snapshots(self): """ Populate self.snapshots with snapshots that exist on disk Returns True if at least one usable snapshot is found """ return False def snapshot_list(self): """ Returns an array of arrays for creating an HTML select field """ return [[s[1], 'Epoch #%s' % s[1]] for s in reversed(self.snapshots)] def est_next_snapshot(self): """ Returns the estimated time in seconds until the next snapshot is taken """ return None def can_view_weights(self): """ Returns True if this Task can visualize the weights of each layer for a given model """ raise NotImplementedError() def view_weights(self, model_epoch=None, layers=None): """ View the weights for a specific model and layer[s] """ return None def can_infer_one(self): """ Returns True if this Task can run inference on one input """ raise NotImplementedError() def can_view_activations(self): """ Returns True if this Task can visualize the activations of a model after inference """ raise NotImplementedError() def infer_one(self, data, model_epoch=None, layers=None): """ Run inference on one input """ return None def can_infer_many(self): """ Returns True if this Task can run inference on many inputs """ raise NotImplementedError() def infer_many(self, data, model_epoch=None): """ Run inference on many inputs """ return None def get_labels(self): """ Read labels from labels_file and return them in a list """ # The labels might be set already if hasattr(self, '_labels') and self._labels and len(self._labels) > 0: return self._labels assert hasattr(self.dataset, 'labels_file'), 'labels_file not set' assert self.dataset.labels_file, 'labels_file not set' assert os.path.exists(self.dataset.path(self.dataset.labels_file)), 'labels_file does not exist' labels = [] with open(self.dataset.path(self.dataset.labels_file)) as infile: for line in infile: label = line.strip() if label: labels.append(label) assert len(labels) > 0, 'no labels in labels_file' self._labels = labels return self._labels def lr_graph_data(self): """ Returns learning rate data formatted for a C3.js graph Keyword arguments: """ if not self.train_outputs or 'epoch' not in self.train_outputs or 'learning_rate' not in self.train_outputs: return None # return 100-200 values or fewer stride = max(len(self.train_outputs['epoch'].data)/100,1) e = ['epoch'] + self.train_outputs['epoch'].data[::stride] lr = ['lr'] + self.train_outputs['learning_rate'].data[::stride] return { 'columns': [e, lr], 'xs': { 'lr': 'epoch' }, 'names': { 'lr': 'Learning Rate' }, } def combined_graph_data(self, cull=True): """ Returns all train/val outputs in data for one C3.js graph Keyword arguments: cull -- if True, cut down the number of data points returned to a reasonable size """ data = { 'columns': [], 'xs': {}, 'axes': {}, 'names': {}, } added_train_data = False added_val_data = False if self.train_outputs and 'epoch' in self.train_outputs: if cull: # max 200 data points stride = max(len(self.train_outputs['epoch'].data)/100,1) else: # return all data stride = 1 for name, output in self.train_outputs.iteritems(): if name not in ['epoch', 'learning_rate']: col_id = '%s-train' % name data['xs'][col_id] = 'train_epochs' data['names'][col_id] = '%s (train)' % name if 'accuracy' in output.kind.lower(): data['columns'].append([col_id] + [100*x for x in output.data[::stride]]) data['axes'][col_id] = 'y2' else: data['columns'].append([col_id] + output.data[::stride]) added_train_data = True if added_train_data: data['columns'].append(['train_epochs'] + self.train_outputs['epoch'].data[::stride]) if self.val_outputs and 'epoch' in self.val_outputs: if cull: # max 200 data points stride = max(len(self.val_outputs['epoch'].data)/100,1) else: # return all data stride = 1 for name, output in self.val_outputs.iteritems(): if name not in ['epoch']: col_id = '%s-val' % name data['xs'][col_id] = 'val_epochs' data['names'][col_id] = '%s (val)' % name if 'accuracy' in output.kind.lower(): data['columns'].append([col_id] + [100*x for x in output.data[::stride]]) data['axes'][col_id] = 'y2' else: data['columns'].append([col_id] + output.data[::stride]) added_val_data = True if added_val_data: data['columns'].append(['val_epochs'] + self.val_outputs['epoch'].data[::stride]) if added_train_data: return data else: # return None if only validation data exists # helps with ordering of columns in graph return None # return id of framework used for training @override def get_framework_id(self): return self.framework_id def get_model_files(self): """ return path to model file """ raise NotImplementedError() def get_network_desc(self): """ return text description of model """ raise NotImplementedError()<|fim▁end|>
if self.gpu_count is not None: identifiers = []
<|file_name|>AllTests.java<|end_file_name|><|fim▁begin|>/******************************************************************************* * Copyright (c) 2012 Max Hohenegger. * All rights reserved. This program and the accompanying materials are made available under the terms of the Eclipse * Public License v1.0 which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * Max Hohenegger - initial implementation ******************************************************************************/ package eu.hohenegger.c0ffee_tips.tests; import org.junit.runner.RunWith;<|fim▁hole|>import eu.hohenegger.c0ffee_tips.TestBasic; @RunWith(Suite.class) @SuiteClasses({TestBasic.class}) public class AllTests { }<|fim▁end|>
import org.junit.runners.Suite; import org.junit.runners.Suite.SuiteClasses;
<|file_name|>Epos_AD.py<|end_file_name|><|fim▁begin|># cell definition # name = 'Epos_AD'<|fim▁hole|>inp = 0 outp = 1 parameters = dict() #parametriseerbare cell properties = {'Device ID': ' 0x01', 'Channel [0/1]': ' 0', 'name': 'epos_areadBlk'} #voor netlisten #view variables: iconSource = 'AD' views = {'icon':iconSource}<|fim▁end|>
# libname = 'can'
<|file_name|>optionSelect.js<|end_file_name|><|fim▁begin|>var instance = null;<|fim▁hole|> this.options = options; instance.instanceData.set(this); } OptionSelect.prototype.open = function() { $(this.containerId).show(); }; Template.optionSelect.onCreated(function() { this.instanceData = new ReactiveVar({}); instance = this; }); Template.optionSelect.helpers({ options: function() { return instance.instanceData.get().options; } }); Template.optionSelect.events({ 'click .option-select__selection': function() { var obj = instance.instanceData.get(); obj.optionSelect(this); $(obj.containerId).hide(); }, 'click #option-select-close': function() { $(instance.instanceData.get().containerId).hide(); } });<|fim▁end|>
OptionSelect = function OptionSelect(optionSelectFunction, id, options) { this.optionSelect = optionSelectFunction; this.containerId = id;
<|file_name|>project.py<|end_file_name|><|fim▁begin|>import random import string from model.project import Project def rand_string(prefix, maxlen): chars = string.ascii_letters + string.digits + string.punctuation + " " * 10 return prefix + "".join([random.choice(chars) for i in range(random.randrange(maxlen))])<|fim▁hole|> def generate_project_data(n): return [Project(name=rand_string("name", 10), status=random.choice(("development", "release", "stable", "obsolete")), inherit=random.choice((True, False)), view=random.choice(("public", "private")), description=rand_string("descr", 30)) for i in range(int(n))] test_data = generate_project_data(2)<|fim▁end|>
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>try: from setuptools import setup<|fim▁hole|> from distutils.core import setup setup( name='lkd', version='2', packages=['lkd', 'tests'], author='Karan Goel', author_email='[email protected]', maintainer='Karan Goel', maintainer_email='[email protected]', url='http://www.goel.im/', license='MIT License', long_description='Python wrapper for lkd.to API.', classifiers=[ 'Development Status :: 4 - Beta', 'License :: OSI Approved :: MIT License', 'Operating System :: POSIX', 'Intended Audience :: Developers', 'Natural Language :: English', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Topic :: Internet', 'Topic :: Internet :: WWW/HTTP :: Browsers', 'Topic :: Internet :: WWW/HTTP :: Indexing/Search', 'Topic :: Software Development :: Libraries', 'Topic :: Software Development :: Libraries :: Python Modules', 'Topic :: Utilities', ], )<|fim▁end|>
except ImportError:
<|file_name|>KDirWatch.py<|end_file_name|><|fim▁begin|># encoding: utf-8 # module PyKDE4.kio # from /usr/lib/python2.7/dist-packages/PyKDE4/kio.so # by generator 1.135 # no doc # imports import PyKDE4.kdeui as __PyKDE4_kdeui import PyQt4.QtCore as __PyQt4_QtCore import PyQt4.QtGui as __PyQt4_QtGui class KDirWatch(__PyQt4_QtCore.QObject): # no doc def addDir(self, *args, **kwargs): # real signature unknown pass def addFile(self, *args, **kwargs): # real signature unknown pass def contains(self, *args, **kwargs): # real signature unknown pass def created(self, *args, **kwargs): # real signature unknown pass def ctime(self, *args, **kwargs): # real signature unknown pass def deleted(self, *args, **kwargs): # real signature unknown pass def dirty(self, *args, **kwargs): # real signature unknown pass def exists(self, *args, **kwargs): # real signature unknown pass def internalMethod(self, *args, **kwargs): # real signature unknown pass def isStopped(self, *args, **kwargs): # real signature unknown<|fim▁hole|> pass def removeFile(self, *args, **kwargs): # real signature unknown pass def restartDirScan(self, *args, **kwargs): # real signature unknown pass def self(self, *args, **kwargs): # real signature unknown pass def setCreated(self, *args, **kwargs): # real signature unknown pass def setDeleted(self, *args, **kwargs): # real signature unknown pass def setDirty(self, *args, **kwargs): # real signature unknown pass def startScan(self, *args, **kwargs): # real signature unknown pass def statistics(self, *args, **kwargs): # real signature unknown pass def stopDirScan(self, *args, **kwargs): # real signature unknown pass def stopScan(self, *args, **kwargs): # real signature unknown pass def __init__(self, *args, **kwargs): # real signature unknown pass DNotify = 2 FAM = 0 INotify = 1 Method = None # (!) real value is '' Stat = 3 WatchDirOnly = 0 WatchFiles = 1 WatchMode = None # (!) real value is '' WatchModes = None # (!) real value is '' WatchSubDirs = 2<|fim▁end|>
pass def removeDir(self, *args, **kwargs): # real signature unknown
<|file_name|>rcp_log_linter.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3 import optparse class RcpLogValidator(object): MAX_ERRORS = 25 MAX_INTERVAL_STEP_MS = 3000 MIN_INTERVAL_STEP_MS = 1 def __init__(self): self.interval = 0 self.columns = 0 def _count_commas(self, line): return line.count(',') def _validate_column_count(self, line, line_no): columns = self._count_commas(line) if columns == self.columns: return True # If here we failed. Print msg. print(("Error: Column count mismatch on line {}. Expected {}, " + "found {}").format(line_no, self.columns, columns)) return False def _validate_interval_time(self, line, line_no): try: interval = int(line.split(',', 1)[0]) except ValueError: interval = 0 # Deal with initialization. if self.interval == 0: self.interval = interval return True interval_min = self.interval + self.MIN_INTERVAL_STEP_MS interval_max = self.interval + self.MAX_INTERVAL_STEP_MS if interval_min <= interval and interval <= interval_max: self.interval = interval return True # If here we failed. Print msg. print(("Error: Inconsistent interval value on line {}. Expected " + "{} - {}, found {}").format(line_no, interval_min, \ interval_max, interval)) return False def _validate_line(self, line, line_no='?'): # Check if its a header line. if '|' in line: self.columns = self._count_commas(line) return True return \ self._validate_column_count(line, line_no) and \ self._validate_interval_time(line, line_no) def validate_logfile(self, log_path): errors = 0 line_no = 0 with open(log_path, 'r') as fil: for line in fil: line_no += 1 if errors >= self.MAX_ERRORS: break if not self._validate_line(line, line_no): errors += 1 print() print("=== Final Stats ===") print(" Lines Checked: {}".format(line_no)) print(" Errors Found: {}".format(errors)) print() def clean_logfile(self, input_path, output_path): errors = 0 line_no = 0 with open(input_path, 'r') as file_in: with open(output_path, 'w') as file_out: for line in file_in: line_no += 1 if not self._validate_line(line, line_no): errors += 1 else: file_out.write(line) print() print("=== Processing Stats ===") print(" Lines Checked: {}".format(line_no)) print(" Lines Pruned: {}".format(errors)) print() def main(): parser = optparse.OptionParser() parser.add_option('-f', '--filename', dest="log_file", help="Path of log file to process") parser.add_option('-o', '--output', dest="out_file", help="Path to output file of processed data") options, remainder = parser.parse_args() if not options.log_file:<|fim▁hole|> rcplv = RcpLogValidator() if not options.out_file: rcplv.validate_logfile(options.log_file) else: rcplv.clean_logfile(options.log_file, options.out_file) if __name__ == '__main__': main()<|fim▁end|>
parser.error("No log file path given")
<|file_name|>_color.py<|end_file_name|><|fim▁begin|>import _plotly_utils.basevalidators class ColorValidator(_plotly_utils.basevalidators.ColorValidator): def __init__( self, plotly_name="color", parent_name="surface.hoverlabel.font", **kwargs ): super(ColorValidator, self).__init__( plotly_name=plotly_name, parent_name=parent_name, array_ok=kwargs.pop("array_ok", True),<|fim▁hole|> )<|fim▁end|>
edit_type=kwargs.pop("edit_type", "none"), **kwargs
<|file_name|>validate.go<|end_file_name|><|fim▁begin|>// Copyright 2013 Matthew Baird // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // http://www.apache.org/licenses/LICENSE-2.0<|fim▁hole|>// limitations under the License. package core import ( "encoding/json" "fmt" "github.com/mattbaird/elastigo/api" ) // Validate allows a user to validate a potentially expensive query without executing it. // see http://www.elasticsearch.org/guide/reference/api/validate.html func Validate(index string, _type string, args map[string]interface{}) (api.BaseResponse, error) { var url string var retval api.BaseResponse if len(_type) > 0 { url = fmt.Sprintf("/%s/%s/_validate/", index, _type) } else { url = fmt.Sprintf("/%s/_validate/", index) } body, err := api.DoCommand("GET", url, args, nil) if err != nil { return retval, err } if err == nil { // marshall into json jsonErr := json.Unmarshal(body, &retval) if jsonErr != nil { return retval, jsonErr } } return retval, err } type Validation struct { Valid bool `json:"valid"` Shards api.Status `json:"_shards"` Explainations []Explaination `json:"explanations,omitempty"` } type Explaination struct { Index string `json:"index"` Valid bool `json:"valid"` Error string `json:"error"` }<|fim▁end|>
// Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and
<|file_name|>plot_evoked_topomap_delayed_ssp.py<|end_file_name|><|fim▁begin|>""" =============================================== Create topographic ERF maps in delayed SSP mode =============================================== This script shows how to apply SSP projectors delayed, that is, at the evoked stage. This is particularly useful to support decisions related to the trade-off between denoising and preserving signal. In this example we demonstrate how to use topographic maps for delayed SSP application. """ # Authors: Denis Engemann <[email protected]> # Christian Brodbeck <[email protected]> # Alexandre Gramfort <[email protected]> # # License: BSD (3-clause) import numpy as np import mne from mne import io from mne.datasets import sample print(__doc__) <|fim▁hole|>data_path = sample.data_path() ############################################################################### # Set parameters raw_fname = data_path + '/MEG/sample/sample_audvis_filt-0-40_raw.fif' event_fname = data_path + '/MEG/sample/sample_audvis_filt-0-40_raw-eve.fif' ecg_fname = data_path + '/MEG/sample/sample_audvis_ecg_proj.fif' event_id, tmin, tmax = 1, -0.2, 0.5 # Setup for reading the raw data raw = io.Raw(raw_fname) events = mne.read_events(event_fname) # delete EEG projections (we know it's the last one) raw.del_proj(-1) # add ECG projs for magnetometers [raw.add_proj(p) for p in mne.read_proj(ecg_fname) if 'axial' in p['desc']] # pick magnetometer channels picks = mne.pick_types(raw.info, meg='mag', stim=False, eog=True, include=[], exclude='bads') # We will make of the proj `delayed` option to # interactively select projections at the evoked stage. # more information can be found in the example/plot_evoked_delayed_ssp.py epochs = mne.Epochs(raw, events, event_id, tmin, tmax, picks=picks, baseline=(None, 0), reject=dict(mag=4e-12), proj='delayed') evoked = epochs.average() # average epochs and get an Evoked dataset. ############################################################################### # Interactively select / deselect the SSP projection vectors # set time instants in seconds (from 50 to 150ms in a step of 10ms) times = np.arange(0.05, 0.15, 0.01) evoked.plot_topomap(times, proj='interactive') # Hint: the same works for evoked.plot and viz.plot_topo<|fim▁end|>
<|file_name|>orm.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # coding:utf-8 """ Database operation module. This module is independent with web module. """ import time, logging import db class Field(object): _count = 0 def __init__(self, **kw): self.name = kw.get('name', None) self.ddl = kw.get('ddl', '') self._default = kw.get('default', None) self.comment = kw.get('comment', '') self.nullable = kw.get('nullable', False) self.updatable = kw.get('updatable', True) self.insertable = kw.get('insertable', True) self.unique_key = kw.get('unique_key', False) self.non_unique_key = kw.get('key', False) self.primary_key = kw.get('primary_key', False) self._order = Field._count Field._count += 1 @property def default(self): d = self._default return d() if callable(d) else d def __str__(self): s = ['<%s:%s,%s,default(%s),' % (self.__class__.__name__, self.name, self.ddl, self._default)] self.nullable and s.append('N') self.updatable and s.append('U') self.insertable and s.append('I') s.append('>') return ''.join(s) class StringField(Field): def __init__(self, **kw): if not 'default' in kw: kw['default'] = '' if not 'ddl' in kw: kw['ddl'] = 'varchar(255)' super(StringField, self).__init__(**kw) class IntegerField(Field): def __init__(self, **kw): if not 'default' in kw: kw['default'] = 0 if not 'ddl' in kw: kw['ddl'] = 'bigint' super(IntegerField, self).__init__(**kw) class FloatField(Field): def __init__(self, **kw): if not 'default' in kw: kw['default'] = 0.0 if not 'ddl' in kw: kw['ddl'] = 'real' super(FloatField, self).__init__(**kw) class BooleanField(Field): def __init__(self, **kw): if not 'default' in kw: kw['default'] = False if not 'ddl' in kw: kw['ddl'] = 'bool' super(BooleanField, self).__init__(**kw) class TextField(Field): def __init__(self, **kw): if not 'default' in kw: kw['default'] = '' if not 'ddl' in kw: kw['ddl'] = 'text' super(TextField, self).__init__(**kw) class BlobField(Field): def __init__(self, **kw): if not 'default' in kw: kw['default'] = '' if not 'ddl' in kw: kw['ddl'] = 'blob' super(BlobField, self).__init__(**kw) class VersionField(Field): def __init__(self, name=None): super(VersionField, self).__init__(name=name, default=0, ddl='bigint') class DateTimeField(Field): def __init__(self, **kw): if 'ddl' not in kw: kw['ddl'] = 'datetime' super(DateTimeField, self).__init__(**kw) class DateField(Field): def __init__(self, **kw): if 'ddl' not in kw: kw['ddl'] = 'date' super(DateField, self).__init__(**kw) class EnumField(Field): def __init__(self, **kw): if 'ddl' not in kw: kw['ddl'] = 'enum' super(EnumField, self).__init__(**kw) _triggers = frozenset(['pre_insert', 'pre_update', 'pre_delete']) def _gen_sql(table_name, mappings): pk, unique_keys, keys = None, [], [] sql = ['-- generating SQL for %s:' % table_name, 'create table `%s` (' % table_name] for f in sorted(mappings.values(), lambda x, y: cmp(x._order, y._order)): if not hasattr(f, 'ddl'): raise StandardError('no ddl in field "%s".' % f) ddl = f.ddl nullable = f.nullable has_comment = not (f.comment == '') has_default = f._default is not None left = nullable and ' `%s` %s' % (f.name, ddl) or ' `%s` %s not null' % (f.name, ddl) mid = has_default and ' default \'%s\'' % f._default or None right = has_comment and ' comment \'%s\',' % f.comment or ',' line = mid and '%s%s%s' % (left, mid, right) or '%s%s' % (left, right) if f.primary_key: pk = f.name line = ' `%s` %s not null auto_increment,' % (f.name, ddl) elif f.unique_key: unique_keys.append(f.name) elif f.non_unique_key:<|fim▁hole|> keys.append(f.name) sql.append(line) for uk in unique_keys: sql.append(' unique key(`%s`),' % uk) for k in keys: sql.append(' key(`%s`),' % k) sql.append(' primary key(`%s`)' % pk) sql.append(')ENGINE=InnoDB DEFAULT CHARSET=utf8;') return '\n'.join(sql) class ModelMetaclass(type): """ Metaclass for model objects. """ def __new__(cls, name, bases, attrs): # skip base Model class: if name == 'Model': return type.__new__(cls, name, bases, attrs) # store all subclasses info: if not hasattr(cls, 'subclasses'): cls.subclasses = {} if not name in cls.subclasses: cls.subclasses[name] = name else: logging.warning('Redefine class: %s', name) logging.info('Scan ORMapping %s...', name) mappings = dict() primary_key = None for k, v in attrs.iteritems(): if isinstance(v, Field): if not v.name: v.name = k logging.debug('Found mapping: %s => %s' % (k, v)) # check duplicate primary key: if v.primary_key: if primary_key: raise TypeError('Cannot define more than 1 primary key in class: %s' % name) if v.updatable: # logging.warning('NOTE: change primary key to non-updatable.') v.updatable = False if v.nullable: # logging.warning('NOTE: change primary key to non-nullable.') v.nullable = False primary_key = v mappings[k] = v # check exist of primary key: if not primary_key: raise TypeError('Primary key not defined in class: %s' % name) for k in mappings.iterkeys(): attrs.pop(k) if '__table__' not in attrs: attrs['__table__'] = name.lower() attrs['__mappings__'] = mappings attrs['__primary_key__'] = primary_key attrs['__sql__'] = lambda self: _gen_sql(attrs['__table__'], mappings) for trigger in _triggers: if trigger not in attrs: attrs[trigger] = None return type.__new__(cls, name, bases, attrs) class Model(dict): """ Base class for ORM. >>> class User(Model): ... id = IntegerField(primary_key=True) ... name = StringField() ... email = StringField(updatable=False) ... passwd = StringField(default=lambda: '******') ... last_modified = FloatField() ... def pre_insert(self): ... self.last_modified = time.time() >>> u = User(id=10190, name='Michael', email='[email protected]') >>> r = u.insert() >>> u.email '[email protected]' >>> u.passwd '******' >>> u.last_modified > (time.time() - 2) True >>> f = User.get(10190) >>> f.name u'Michael' >>> f.email u'[email protected]' >>> f.email = '[email protected]' >>> r = f.update() # change email but email is non-updatable! >>> len(User.find_all()) 1 >>> g = User.get(10190) >>> g.email u'[email protected]' >>> r = g.mark_deleted() >>> len(db.select('select * from user where id=10190')) 0 >>> import json >>> print User().__sql__() -- generating SQL for user: create table `user` ( `id` bigint not null, `name` varchar(255) not null, `email` varchar(255) not null, `passwd` varchar(255) not null, `last_modified` real not null, primary key(`id`) ); """ __metaclass__ = ModelMetaclass def __init__(self, **kw): super(Model, self).__init__(**kw) def __getattr__(self, key): try: return self[key] except KeyError: raise AttributeError(r"'Dict' object has no attribute '%s'" % key) def __setattr__(self, key, value): self[key] = value @classmethod def get(cls, key_name, key_value): """ Get by primary/unique key. """ d = db.select_one('select * from %s where %s=?' % (cls.__table__, key_name), key_value) if not d: # TODO: change to logging? raise AttributeError("Can't find in [%s] where %s=[%s]" % (cls.__table__, key_name, key_value)) return cls(**d) if d else None @classmethod def find_first(cls, where, *args): """ Find by where clause and return one result. If multiple results found, only the first one returned. If no result found, return None. """ d = db.select_one('select * from %s %s' % (cls.__table__, where), *args) return cls(**d) if d else None @classmethod def find_all(cls, *args): """ Find all and return list. """ L = db.select('select * from `%s`' % cls.__table__) return [cls(**d) for d in L] @classmethod def find_by(cls, cols, where, *args): """ Find by where clause and return list. """ L = db.select('select %s from `%s` %s' % (cols, cls.__table__, where), *args) if cols.find(',') == -1 and cols.strip() != '*': return [d[0] for d in L] return [cls(**d) for d in L] @classmethod def count_all(cls): """ Find by 'select count(pk) from table' and return integer. """ return db.select_int('select count(`%s`) from `%s`' % (cls.__primary_key__.name, cls.__table__)) @classmethod def count_by(cls, where, *args): """ Find by 'select count(pk) from table where ... ' and return int. """ return db.select_int('select count(`%s`) from `%s` %s' % (cls.__primary_key__.name, cls.__table__, where), *args) def update(self): self.pre_update and self.pre_update() L = [] args = [] for k, v in self.__mappings__.iteritems(): if v.updatable: if hasattr(self, k): arg = getattr(self, k) else: arg = v.default setattr(self, k, arg) L.append('`%s`=?' % k) args.append(arg) pk = self.__primary_key__.name args.append(getattr(self, pk)) db.update('update `%s` set %s where %s=?' % (self.__table__, ','.join(L), pk), *args) return self def delete(self): self.pre_delete and self.pre_delete() pk = self.__primary_key__.name args = (getattr(self, pk), ) db.update('delete from `%s` where `%s`=?' % (self.__table__, pk), *args) return self def insert(self): self.pre_insert and self.pre_insert() params = {} for k, v in self.__mappings__.iteritems(): if v.insertable: if not hasattr(self, k): setattr(self, k, v.default) params[v.name] = getattr(self, k) try: db.insert('%s' % self.__table__, **params) except Exception as e: logging.info(e.args) print "MySQL Model.insert() error: args=", e.args # TODO !!! generalize ORM return package # return {'status': 'Failure', 'msg': e.args, 'data': self} raise return self if __name__ == '__main__': logging.basicConfig(level=logging.DEBUG) db.create_engine('www-data', 'www-data', 'test') db.update('drop table if exists user') db.update('create table user (id int primary key, name text, email text, passwd text, last_modified real)') import doctest doctest.testmod()<|fim▁end|>
<|file_name|>param_route_path.ts<|end_file_name|><|fim▁begin|>import {RegExpWrapper, StringWrapper, isPresent, isBlank} from '../../../src/facade/lang'; import {BaseException} from '../../../src/facade/exceptions'; import {StringMapWrapper} from '../../../src/facade/collection'; import {TouchMap, normalizeString} from '../../utils'; import {Url, RootUrl, convertUrlParamsToArray} from '../../url_parser'; import {RoutePath, GeneratedUrl, MatchedUrl} from './route_path'; /** * `ParamRoutePath`s are made up of `PathSegment`s, each of which can * match a segment of a URL. Different kind of `PathSegment`s match * URL segments in different ways... */ interface PathSegment {<|fim▁hole|> name: string; generate(params: TouchMap): string; match(path: string): boolean; specificity: string; hash: string; } /** * Identified by a `...` URL segment. This indicates that the * Route will continue to be matched by child `Router`s. */ class ContinuationPathSegment implements PathSegment { name: string = ''; specificity = ''; hash = '...'; generate(params: TouchMap): string { return ''; } match(path: string): boolean { return true; } } /** * Identified by a string not starting with a `:` or `*`. * Only matches the URL segments that equal the segment path */ class StaticPathSegment implements PathSegment { name: string = ''; specificity = '2'; hash: string; constructor(public path: string) { this.hash = path; } match(path: string): boolean { return path == this.path; } generate(params: TouchMap): string { return this.path; } } /** * Identified by a string starting with `:`. Indicates a segment * that can contain a value that will be extracted and provided to * a matching `Instruction`. */ class DynamicPathSegment implements PathSegment { static paramMatcher = /^:([^\/]+)$/g; specificity = '1'; hash = ':'; constructor(public name: string) {} match(path: string): boolean { return path.length > 0; } generate(params: TouchMap): string { if (!StringMapWrapper.contains(params.map, this.name)) { throw new BaseException( `Route generator for '${this.name}' was not included in parameters passed.`); } return encodeDynamicSegment(normalizeString(params.get(this.name))); } } /** * Identified by a string starting with `*` Indicates that all the following * segments match this route and that the value of these segments should * be provided to a matching `Instruction`. */ class StarPathSegment implements PathSegment { static wildcardMatcher = /^\*([^\/]+)$/g; specificity = '0'; hash = '*'; constructor(public name: string) {} match(path: string): boolean { return true; } generate(params: TouchMap): string { return normalizeString(params.get(this.name)); } } /** * Parses a URL string using a given matcher DSL, and generates URLs from param maps */ export class ParamRoutePath implements RoutePath { specificity: string; terminal: boolean = true; hash: string; private _segments: PathSegment[]; /** * Takes a string representing the matcher DSL */ constructor(public routePath: string) { this._assertValidPath(routePath); this._parsePathString(routePath); this.specificity = this._calculateSpecificity(); this.hash = this._calculateHash(); var lastSegment = this._segments[this._segments.length - 1]; this.terminal = !(lastSegment instanceof ContinuationPathSegment); } matchUrl(url: Url): MatchedUrl { var nextUrlSegment = url; var currentUrlSegment: Url; var positionalParams = {}; var captured: string[] = []; for (var i = 0; i < this._segments.length; i += 1) { var pathSegment = this._segments[i]; if (pathSegment instanceof ContinuationPathSegment) { break; } currentUrlSegment = nextUrlSegment; if (isPresent(currentUrlSegment)) { // the star segment consumes all of the remaining URL, including matrix params if (pathSegment instanceof StarPathSegment) { positionalParams[pathSegment.name] = currentUrlSegment.toString(); captured.push(currentUrlSegment.toString()); nextUrlSegment = null; break; } captured.push(currentUrlSegment.path); if (pathSegment instanceof DynamicPathSegment) { positionalParams[pathSegment.name] = decodeDynamicSegment(currentUrlSegment.path); } else if (!pathSegment.match(currentUrlSegment.path)) { return null; } nextUrlSegment = currentUrlSegment.child; } else if (!pathSegment.match('')) { return null; } } if (this.terminal && isPresent(nextUrlSegment)) { return null; } var urlPath = captured.join('/'); var auxiliary = []; var urlParams = []; var allParams = positionalParams; if (isPresent(currentUrlSegment)) { // If this is the root component, read query params. Otherwise, read matrix params. var paramsSegment = url instanceof RootUrl ? url : currentUrlSegment; if (isPresent(paramsSegment.params)) { allParams = StringMapWrapper.merge(paramsSegment.params, positionalParams); urlParams = convertUrlParamsToArray(paramsSegment.params); } else { allParams = positionalParams; } auxiliary = currentUrlSegment.auxiliary; } return new MatchedUrl(urlPath, urlParams, allParams, auxiliary, nextUrlSegment); } generateUrl(params: {[key: string]: any}): GeneratedUrl { var paramTokens = new TouchMap(params); var path = []; for (var i = 0; i < this._segments.length; i++) { let segment = this._segments[i]; if (!(segment instanceof ContinuationPathSegment)) { path.push(segment.generate(paramTokens)); } } var urlPath = path.join('/'); var nonPositionalParams = paramTokens.getUnused(); var urlParams = nonPositionalParams; return new GeneratedUrl(urlPath, urlParams); } toString(): string { return this.routePath; } private _parsePathString(routePath: string) { // normalize route as not starting with a "/". Recognition will // also normalize. if (routePath.startsWith("/")) { routePath = routePath.substring(1); } var segmentStrings = routePath.split('/'); this._segments = []; var limit = segmentStrings.length - 1; for (var i = 0; i <= limit; i++) { var segment = segmentStrings[i], match; if (isPresent(match = RegExpWrapper.firstMatch(DynamicPathSegment.paramMatcher, segment))) { this._segments.push(new DynamicPathSegment(match[1])); } else if (isPresent( match = RegExpWrapper.firstMatch(StarPathSegment.wildcardMatcher, segment))) { this._segments.push(new StarPathSegment(match[1])); } else if (segment == '...') { if (i < limit) { throw new BaseException( `Unexpected "..." before the end of the path for "${routePath}".`); } this._segments.push(new ContinuationPathSegment()); } else { this._segments.push(new StaticPathSegment(segment)); } } } private _calculateSpecificity(): string { // The "specificity" of a path is used to determine which route is used when multiple routes // match // a URL. Static segments (like "/foo") are the most specific, followed by dynamic segments // (like // "/:id"). Star segments add no specificity. Segments at the start of the path are more // specific // than proceeding ones. // // The code below uses place values to combine the different types of segments into a single // string that we can sort later. Each static segment is marked as a specificity of "2," each // dynamic segment is worth "1" specificity, and stars are worth "0" specificity. var i, length = this._segments.length, specificity; if (length == 0) { // a single slash (or "empty segment" is as specific as a static segment specificity += '2'; } else { specificity = ''; for (i = 0; i < length; i++) { specificity += this._segments[i].specificity; } } return specificity; } private _calculateHash(): string { // this function is used to determine whether a route config path like `/foo/:id` collides with // `/foo/:name` var i, length = this._segments.length; var hashParts = []; for (i = 0; i < length; i++) { hashParts.push(this._segments[i].hash); } return hashParts.join('/'); } private _assertValidPath(path: string) { if (StringWrapper.contains(path, '#')) { throw new BaseException( `Path "${path}" should not include "#". Use "HashLocationStrategy" instead.`); } var illegalCharacter = RegExpWrapper.firstMatch(ParamRoutePath.RESERVED_CHARS, path); if (isPresent(illegalCharacter)) { throw new BaseException( `Path "${path}" contains "${illegalCharacter[0]}" which is not allowed in a route config.`); } } static RESERVED_CHARS = RegExpWrapper.create('//|\\(|\\)|;|\\?|='); } let REGEXP_PERCENT = /%/g; let REGEXP_SLASH = /\//g; let REGEXP_OPEN_PARENT = /\(/g; let REGEXP_CLOSE_PARENT = /\)/g; let REGEXP_SEMICOLON = /;/g; function encodeDynamicSegment(value: string): string { if (isBlank(value)) { return null; } value = StringWrapper.replaceAll(value, REGEXP_PERCENT, '%25'); value = StringWrapper.replaceAll(value, REGEXP_SLASH, '%2F'); value = StringWrapper.replaceAll(value, REGEXP_OPEN_PARENT, '%28'); value = StringWrapper.replaceAll(value, REGEXP_CLOSE_PARENT, '%29'); value = StringWrapper.replaceAll(value, REGEXP_SEMICOLON, '%3B'); return value; } let REGEXP_ENC_SEMICOLON = /%3B/ig; let REGEXP_ENC_CLOSE_PARENT = /%29/ig; let REGEXP_ENC_OPEN_PARENT = /%28/ig; let REGEXP_ENC_SLASH = /%2F/ig; let REGEXP_ENC_PERCENT = /%25/ig; function decodeDynamicSegment(value: string): string { if (isBlank(value)) { return null; } value = StringWrapper.replaceAll(value, REGEXP_ENC_SEMICOLON, ';'); value = StringWrapper.replaceAll(value, REGEXP_ENC_CLOSE_PARENT, ')'); value = StringWrapper.replaceAll(value, REGEXP_ENC_OPEN_PARENT, '('); value = StringWrapper.replaceAll(value, REGEXP_ENC_SLASH, '/'); value = StringWrapper.replaceAll(value, REGEXP_ENC_PERCENT, '%'); return value; }<|fim▁end|>
<|file_name|>python_editor.py<|end_file_name|><|fim▁begin|>#------------------------------------------------------------------------------ # Copyright (c) 2007, Riverbank Computing Limited # All rights reserved. # # This software is provided without warranty under the terms of the BSD license. # However, when used with the GPL version of PyQt the additional terms described in the PyQt GPL exception also apply # # Author: Riverbank Computing Limited # Description: <Enthought pyface package component> #------------------------------------------------------------------------------ # Standard library imports. import sys # Major package imports. from pyface.qt import QtCore, QtGui # Enthought library imports. from traits.api import Bool, Event, provides, Unicode # Local imports. from pyface.i_python_editor import IPythonEditor, MPythonEditor from pyface.key_pressed_event import KeyPressedEvent from pyface.widget import Widget from pyface.ui.qt4.code_editor.code_widget import AdvancedCodeWidget @provides(IPythonEditor) class PythonEditor(MPythonEditor, Widget): """ The toolkit specific implementation of a PythonEditor. See the IPythonEditor interface for the API documentation. """ #### 'IPythonEditor' interface ############################################ dirty = Bool(False) path = Unicode show_line_numbers = Bool(True) #### Events #### changed = Event key_pressed = Event(KeyPressedEvent) ########################################################################### # 'object' interface. ########################################################################### def __init__(self, parent, **traits): super(PythonEditor, self).__init__(**traits) self.control = self._create_control(parent) ########################################################################### # 'PythonEditor' interface. ########################################################################### def load(self, path=None): """ Loads the contents of the editor. """ if path is None: path = self.path # We will have no path for a new script. if len(path) > 0: f = open(self.path, 'r') text = f.read() f.close() else: text = '' self.control.code.setPlainText(text) self.dirty = False def save(self, path=None): """ Saves the contents of the editor. """ if path is None: path = self.path f = open(path, 'w') f.write(self.control.code.toPlainText()) f.close() self.dirty = False def select_line(self, lineno): """ Selects the specified line. """ self.control.code.set_line_column(lineno, 0) self.control.code.moveCursor(QtGui.QTextCursor.EndOfLine, QtGui.QTextCursor.KeepAnchor) ########################################################################### # Trait handlers. ########################################################################### def _path_changed(self): self._changed_path() def _show_line_numbers_changed(self): if self.control is not None: self.control.code.line_number_widget.setVisible( self.show_line_numbers) self.control.code.update_line_number_width() ########################################################################### # Private interface. ########################################################################### def _create_control(self, parent): """ Creates the toolkit-specific control for the widget. """ self.control = control = AdvancedCodeWidget(parent) self._show_line_numbers_changed() # Install event filter to trap key presses. event_filter = PythonEditorEventFilter(self, self.control) self.control.installEventFilter(event_filter) self.control.code.installEventFilter(event_filter) # Connect signals for text changes.<|fim▁hole|> self.load() return control def _on_dirty_changed(self, dirty): """ Called whenever a change is made to the dirty state of the document. """ self.dirty = dirty def _on_text_changed(self): """ Called whenever a change is made to the text of the document. """ self.changed = True class PythonEditorEventFilter(QtCore.QObject): """ A thin wrapper around the advanced code widget to handle the key_pressed Event. """ def __init__(self, editor, parent): super(PythonEditorEventFilter, self).__init__(parent) self.__editor = editor def eventFilter(self, obj, event): """ Reimplemented to trap key presses. """ if self.__editor.control and obj == self.__editor.control and \ event.type() == QtCore.QEvent.FocusOut: # Hack for Traits UI compatibility. self.__editor.control.emit(QtCore.SIGNAL('lostFocus')) elif self.__editor.control and obj == self.__editor.control.code and \ event.type() == QtCore.QEvent.KeyPress: # Pyface doesn't seem to be Unicode aware. Only keep the key code # if it corresponds to a single Latin1 character. kstr = event.text() try: kcode = ord(str(kstr)) except: kcode = 0 mods = event.modifiers() self.key_pressed = KeyPressedEvent( alt_down = ((mods & QtCore.Qt.AltModifier) == QtCore.Qt.AltModifier), control_down = ((mods & QtCore.Qt.ControlModifier) == QtCore.Qt.ControlModifier), shift_down = ((mods & QtCore.Qt.ShiftModifier) == QtCore.Qt.ShiftModifier), key_code = kcode, event = event) return super(PythonEditorEventFilter, self).eventFilter(obj, event)<|fim▁end|>
control.code.modificationChanged.connect(self._on_dirty_changed) control.code.textChanged.connect(self._on_text_changed) # Load the editor's contents.
<|file_name|>error.rs<|end_file_name|><|fim▁begin|>// This Source Code Form is subject to the terms of the Mozilla Public // License, v. 2.0. If a copy of the MPL was not distributed with this // file, You can obtain one at http://mozilla.org/MPL/2.0/. use std::error; use std::fmt; use svgdom; /// List of all errors. #[derive(Debug)] pub enum Error { /// Only `svg` and `svgz` suffixes are supported. InvalidFileSuffix, /// Failed to open the provided file. FileOpenFailed, /// Only UTF-8 content are supported. NotAnUtf8Str, /// Compressed SVG must use the GZip algorithm. MalformedGZip, /// SVG doesn't have a valid size. /// /// Occurs when width and/or height are <= 0. /// /// Also occurs if width, height and viewBox are not set. /// This is against the SVG spec, but an automatic size detection is not supported yet.<|fim▁hole|> ParsingFailed(svgdom::ParserError), } impl fmt::Display for Error { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { Error::InvalidFileSuffix => { write!(f, "invalid file suffix") } Error::FileOpenFailed => { write!(f, "failed to open the provided file") } Error::NotAnUtf8Str => { write!(f, "provided data has not an UTF-8 encoding") } Error::MalformedGZip => { write!(f, "provided data has a malformed GZip content") } Error::InvalidSize => { write!(f, "SVG has an invalid size") } Error::ParsingFailed(ref e) => { write!(f, "SVG data parsing failed cause {}", e) } } } } impl error::Error for Error { fn description(&self) -> &str { "an SVG simplification error" } }<|fim▁end|>
InvalidSize, /// Failed to parse an SVG data.
<|file_name|>InstructorFicha.java<|end_file_name|><|fim▁begin|>/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package co.edu.sena.mavenproject2.model.entities; import java.io.Serializable; import java.util.Collection; import javax.persistence.CascadeType; import javax.persistence.EmbeddedId; import javax.persistence.Entity; import javax.persistence.FetchType; import javax.persistence.JoinColumn; import javax.persistence.JoinColumns; import javax.persistence.ManyToOne; import javax.persistence.NamedQueries; import javax.persistence.NamedQuery; import javax.persistence.OneToMany; import javax.persistence.Table; import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlTransient; /** * * @author Enrique */ @Entity @Table(name = "instructor_ficha") @XmlRootElement @NamedQueries({ @NamedQuery(name = "InstructorFicha.findAll", query = "SELECT i FROM InstructorFicha i") , @NamedQuery(name = "InstructorFicha.findByTipoDocumento", query = "SELECT i FROM InstructorFicha i WHERE i.instructorFichaPK.tipoDocumento = :tipoDocumento") , @NamedQuery(name = "InstructorFicha.findByNumeroDocumento", query = "SELECT i FROM InstructorFicha i WHERE i.instructorFichaPK.numeroDocumento = :numeroDocumento") , @NamedQuery(name = "InstructorFicha.findByFicha", query = "SELECT i FROM InstructorFicha i WHERE i.instructorFichaPK.ficha = :ficha")}) public class InstructorFicha implements Serializable { private static final long serialVersionUID = 1L; @EmbeddedId protected InstructorFichaPK instructorFichaPK; @JoinColumns({ @JoinColumn(name = "tipo_documento", referencedColumnName = "tipo_documento", nullable = false, insertable = false, updatable = false) , @JoinColumn(name = "numero_documento", referencedColumnName = "numero_documento", nullable = false, insertable = false, updatable = false)}) @ManyToOne(optional = false, fetch = FetchType.LAZY) private Cliente cliente; @JoinColumn(name = "ficha", referencedColumnName = "numero_ficha", nullable = false, insertable = false, updatable = false) @ManyToOne(optional = false, fetch = FetchType.LAZY) private Ficha ficha1; @OneToMany(cascade = CascadeType.ALL, mappedBy = "instructorFicha", fetch = FetchType.LAZY) private Collection<InstructorHasTrimestre> instructorHasTrimestreCollection; public InstructorFicha() { } public InstructorFicha(InstructorFichaPK instructorFichaPK) { this.instructorFichaPK = instructorFichaPK; } public InstructorFicha(String tipoDocumento, String numeroDocumento, String ficha) { this.instructorFichaPK = new InstructorFichaPK(tipoDocumento, numeroDocumento, ficha); } public InstructorFichaPK getInstructorFichaPK() { return instructorFichaPK; } public void setInstructorFichaPK(InstructorFichaPK instructorFichaPK) { this.instructorFichaPK = instructorFichaPK; } public Cliente getCliente() { return cliente; }<|fim▁hole|> public void setCliente(Cliente cliente) { this.cliente = cliente; } public Ficha getFicha1() { return ficha1; } public void setFicha1(Ficha ficha1) { this.ficha1 = ficha1; } @XmlTransient public Collection<InstructorHasTrimestre> getInstructorHasTrimestreCollection() { return instructorHasTrimestreCollection; } public void setInstructorHasTrimestreCollection(Collection<InstructorHasTrimestre> instructorHasTrimestreCollection) { this.instructorHasTrimestreCollection = instructorHasTrimestreCollection; } @Override public int hashCode() { int hash = 0; hash += (instructorFichaPK != null ? instructorFichaPK.hashCode() : 0); return hash; } @Override public boolean equals(Object object) { // TODO: Warning - this method won't work in the case the id fields are not set if (!(object instanceof InstructorFicha)) { return false; } InstructorFicha other = (InstructorFicha) object; if ((this.instructorFichaPK == null && other.instructorFichaPK != null) || (this.instructorFichaPK != null && !this.instructorFichaPK.equals(other.instructorFichaPK))) { return false; } return true; } @Override public String toString() { return "co.edu.sena.mavenproject2.model.entities.InstructorFicha[ instructorFichaPK=" + instructorFichaPK + " ]"; } }<|fim▁end|>
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>from setuptools import setup setup( name='geovalidation.server', version='0.5', long_description="Flask-based server to validate GIS datasets (with prepair and val3dity).", packages=['geovalidation'], include_package_data=True, zip_safe=False, install_requires=[ 'Flask>=1.1' ,'Jinja2>=2.7.2' ,'Werkzeug>=0.9.4' ,'celery>=3.1.11'<|fim▁hole|> ,'subprocess32>=3.2.6' ,'cjio>=0.5' ] author='Hugo Ledoux', author_email='[email protected]' )<|fim▁end|>
,'redis>=2.9.1' ,'lxml>=3.3.3'
<|file_name|>cacconsole.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python # CACConsole Copyright (C) 2015 foospidy # https://github.com/foospidy/CACConsole # See LICENSE for details # This software includes/uses the python-cloudatcost library which # is MIT licensed, see https://github.com/adc4392/python-cloudatcost/blob/master/LICENSE import os import sys from twisted.internet import reactor, stdio from twisted.python import log from twisted.python.log import ILogObserver, FileLogObserver from twisted.python.logfile import DailyLogFile from modules.CloudAtCostConsole import CloudAtCostConsole # prevent creation of compiled bytecode files sys.dont_write_bytecode = True # setup log file log_path = os.path.dirname(os.path.abspath(__file__)) + '/log/' log_file_name = 'cacconsole.log' # create log directory if it doesn't exist if not os.path.exists(os.path.dirname(log_path)): os.makedirs(os.path.dirname(log_path)) log_file = DailyLogFile(log_file_name, log_path) file_log_observer = FileLogObserver(log_file) file_log_observer.timeFormat = "%Y-%m-%d %H:%M:%S,%f," # start logging log.startLoggingWithObserver(file_log_observer.emit, False) # setup local database dbfile = os.path.dirname(os.path.abspath(__file__)) + '/data/cacconsole.db' # create data directory if it doesn't exist if not os.path.exists(os.path.dirname(dbfile)): os.makedirs(os.path.dirname(dbfile)) # load console stdio.StandardIO(CloudAtCostConsole(dbfile)) # start reactor<|fim▁hole|>reactor.run()<|fim▁end|>
<|file_name|>localUserActions.js<|end_file_name|><|fim▁begin|>import { emit } from "../../api"; import { setActiveRoom } from "../room/roomSlice.js"; export function presentChanged(present) { return { type: "localUser/present", present }; } export const ping = () => () => emit("/user/current/ping"); export const changeActiveRoom = roomId => dispatch => { dispatch(setActiveRoom(roomId)); return emit("/user/current/activity", { room: roomId }); }; export function changePresent(present) { return dispatch => {<|fim▁hole|> }; }<|fim▁end|>
return emit("/user/current/present", { present }).then(() => dispatch(presentChanged(present)));
<|file_name|>pprint.py<|end_file_name|><|fim▁begin|># Author: Fred L. Drake, Jr. # [email protected] # # This is a simple little module I wrote to make life easier. I didn't # see anything quite like it in the library, though I may have overlooked # something. I wrote this when I was trying to read some heavily nested # tuples with fairly non-descriptive content. This is modeled very much # after Lisp/Scheme - style pretty-printing of lists. If you find it # useful, thank small children who sleep at night. """Support to pretty-print lists, tuples, & dictionaries recursively. Very simple, but useful, especially in debugging data structures. Classes ------- PrettyPrinter() Handle pretty-printing operations onto a stream using a configured set of formatting parameters. Functions --------- pformat() Format a Python object into a pretty-printed representation. pprint() Pretty-print a Python object to a stream [default is sys.stdout]. saferepr() Generate a 'standard' repr()-like value, but protect against recursive data structures. """ import sys as _sys import warnings from cStringIO import StringIO as _StringIO __all__ = ["pprint","pformat","isreadable","isrecursive","saferepr", "PrettyPrinter"] # cache these for faster access: _commajoin = ", ".join _id = id _len = len _type = type def pprint(object, stream=None, indent=1, width=80, depth=None): """Pretty-print a Python object to a stream [default is sys.stdout].""" printer = PrettyPrinter( stream=stream, indent=indent, width=width, depth=depth) printer.pprint(object) def pformat(object, indent=1, width=80, depth=None): """Format a Python object into a pretty-printed representation.""" return PrettyPrinter(indent=indent, width=width, depth=depth).pformat(object) def saferepr(object): """Version of repr() which can handle recursive data structures.""" return _safe_repr(object, {}, None, 0)[0] def isreadable(object): """Determine if saferepr(object) is readable by eval().""" return _safe_repr(object, {}, None, 0)[1] def isrecursive(object): """Determine if object requires a recursive representation.""" return _safe_repr(object, {}, None, 0)[2] def _sorted(iterable): with warnings.catch_warnings(): if _sys.py3kwarning: warnings.filterwarnings("ignore", "comparing unequal types " "not supported", DeprecationWarning) return sorted(iterable) class PrettyPrinter: def __init__(self, indent=1, width=80, depth=None, stream=None): """Handle pretty printing operations onto a stream using a set of configured parameters. indent Number of spaces to indent for each level of nesting. width Attempted maximum number of columns in the output. depth The maximum depth to print out nested structures. stream The desired output stream. If omitted (or false), the standard output stream available at construction will be used. """ indent = int(indent) width = int(width) assert indent >= 0, "indent must be >= 0" assert depth is None or depth > 0, "depth must be > 0" assert width, "width must be != 0" self._depth = depth self._indent_per_level = indent self._width = width if stream is not None: self._stream = stream else: self._stream = _sys.stdout def pprint(self, object): self._format(object, self._stream, 0, 0, {}, 0) self._stream.write("\n") def pformat(self, object): sio = _StringIO() self._format(object, sio, 0, 0, {}, 0) return sio.getvalue() def isrecursive(self, object): return self.format(object, {}, 0, 0)[2] def isreadable(self, object): s, readable, recursive = self.format(object, {}, 0, 0) return readable and not recursive def _format(self, object, stream, indent, allowance, context, level): level = level + 1 objid = _id(object) if objid in context: stream.write(_recursion(object)) self._recursive = True self._readable = False return rep = self._repr(object, context, level - 1) typ = _type(object) sepLines = _len(rep) > (self._width - 1 - indent - allowance) write = stream.write if self._depth and level > self._depth: write(rep) return r = getattr(typ, "__repr__", None) if issubclass(typ, dict) and r == dict.__repr__: write('{') if self._indent_per_level > 1: write((self._indent_per_level - 1) * ' ') length = _len(object) if length: context[objid] = 1 indent = indent + self._indent_per_level items = _sorted(object.items())<|fim▁hole|> key, ent = items[0] rep = self._repr(key, context, level) write(rep) write(': ') self._format(ent, stream, indent + _len(rep) + 2, allowance + 1, context, level) if length > 1: for key, ent in items[1:]: rep = self._repr(key, context, level) if sepLines: write(',\n%s%s: ' % (' '*indent, rep)) else: write(', %s: ' % rep) self._format(ent, stream, indent + _len(rep) + 2, allowance + 1, context, level) indent = indent - self._indent_per_level del context[objid] write('}') return if ((issubclass(typ, list) and r == list.__repr__) or (issubclass(typ, tuple) and r == tuple.__repr__) or (issubclass(typ, set) and r == set.__repr__) or (issubclass(typ, frozenset) and r == frozenset.__repr__) ): length = _len(object) if issubclass(typ, list): write('[') endchar = ']' elif issubclass(typ, set): if not length: write('set()') return write('set([') endchar = '])' object = _sorted(object) indent += 4 elif issubclass(typ, frozenset): if not length: write('frozenset()') return write('frozenset([') endchar = '])' object = _sorted(object) indent += 10 else: write('(') endchar = ')' if self._indent_per_level > 1 and sepLines: write((self._indent_per_level - 1) * ' ') if length: context[objid] = 1 indent = indent + self._indent_per_level self._format(object[0], stream, indent, allowance + 1, context, level) if length > 1: for ent in object[1:]: if sepLines: write(',\n' + ' '*indent) else: write(', ') self._format(ent, stream, indent, allowance + 1, context, level) indent = indent - self._indent_per_level del context[objid] if issubclass(typ, tuple) and length == 1: write(',') write(endchar) return write(rep) def _repr(self, object, context, level): repr, readable, recursive = self.format(object, context.copy(), self._depth, level) if not readable: self._readable = False if recursive: self._recursive = True return repr def format(self, object, context, maxlevels, level): """Format object for a specific context, returning a string and flags indicating whether the representation is 'readable' and whether the object represents a recursive construct. """ return _safe_repr(object, context, maxlevels, level) # Return triple (repr_string, isreadable, isrecursive). def _safe_repr(object, context, maxlevels, level): typ = _type(object) if typ is str: if 'locale' not in _sys.modules: return repr(object), True, False if "'" in object and '"' not in object: closure = '"' quotes = {'"': '\\"'} else: closure = "'" quotes = {"'": "\\'"} qget = quotes.get sio = _StringIO() write = sio.write for char in object: if char.isalpha(): write(char) else: write(qget(char, repr(char)[1:-1])) return ("%s%s%s" % (closure, sio.getvalue(), closure)), True, False r = getattr(typ, "__repr__", None) if issubclass(typ, dict) and r == dict.__repr__: if not object: return "{}", True, False objid = _id(object) if maxlevels and level >= maxlevels: return "{...}", False, objid in context if objid in context: return _recursion(object), False, True context[objid] = 1 readable = True recursive = False components = [] append = components.append level += 1 saferepr = _safe_repr for k, v in _sorted(object.items()): krepr, kreadable, krecur = saferepr(k, context, maxlevels, level) vrepr, vreadable, vrecur = saferepr(v, context, maxlevels, level) append("%s: %s" % (krepr, vrepr)) readable = readable and kreadable and vreadable if krecur or vrecur: recursive = True del context[objid] return "{%s}" % _commajoin(components), readable, recursive if (issubclass(typ, list) and r == list.__repr__) or \ (issubclass(typ, tuple) and r == tuple.__repr__): if issubclass(typ, list): if not object: return "[]", True, False format = "[%s]" elif _len(object) == 1: format = "(%s,)" else: if not object: return "()", True, False format = "(%s)" objid = _id(object) if maxlevels and level >= maxlevels: return format % "...", False, objid in context if objid in context: return _recursion(object), False, True context[objid] = 1 readable = True recursive = False components = [] append = components.append level += 1 for o in object: orepr, oreadable, orecur = _safe_repr(o, context, maxlevels, level) append(orepr) if not oreadable: readable = False if orecur: recursive = True del context[objid] return format % _commajoin(components), readable, recursive rep = repr(object) return rep, (rep and not rep.startswith('<')), False def _recursion(object): return ("<Recursion on %s with id=%s>" % (_type(object).__name__, _id(object))) def _perfcheck(object=None): import time if object is None: object = [("string", (1, 2), [3, 4], {5: 6, 7: 8})] * 100000 p = PrettyPrinter() t1 = time.time() _safe_repr(object, {}, None, 0) t2 = time.time() p.pformat(object) t3 = time.time() print "_safe_repr:", t2 - t1 print "pformat:", t3 - t2 if __name__ == "__main__": _perfcheck()<|fim▁end|>
<|file_name|>class_mock.js<|end_file_name|><|fim▁begin|>var class_mock = [ [ "Mock", "class_mock.html#a2b9528f2e7fcf9738201a5ea667c1998", null ], [ "Mock", "class_mock.html#a2b9528f2e7fcf9738201a5ea667c1998", null ], [ "MOCK_METHOD0", "class_mock.html#ae710f23cafb1a2f17772e8805d6312d2", null ], [ "MOCK_METHOD1", "class_mock.html#ada59eea6991953353f332e3ea1e74444", null ],<|fim▁hole|> [ "MOCK_METHOD1", "class_mock.html#a2cece30a3ea92b34f612f8032fe3a0f9", null ], [ "MOCK_METHOD1", "class_mock.html#ac70c052254fa9816bd759c006062dc47", null ], [ "MOCK_METHOD1", "class_mock.html#ae2379efbc030f1adf8b032be3bdf081d", null ], [ "MOCK_METHOD1", "class_mock.html#a3fd62026610c5d3d3aeaaf2ade3e18aa", null ], [ "MOCK_METHOD1", "class_mock.html#a890668928abcd28d4d39df164e7b6dd8", null ], [ "MOCK_METHOD1", "class_mock.html#a50e2bda4375a59bb89fd5652bd33eb0f", null ] ];<|fim▁end|>
[ "MOCK_METHOD1", "class_mock.html#a2db4d82b6f92b4e462929f651ac4c3b1", null ], [ "MOCK_METHOD1", "class_mock.html#ae73b4ee90bf6d84205d2b1c17f0b8433", null ],
<|file_name|>test_heat_control.py<|end_file_name|><|fim▁begin|>"""The tests for the heat control thermostat.""" import unittest from homeassistant.bootstrap import _setup_component from homeassistant.const import ( ATTR_UNIT_OF_MEASUREMENT, SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_ON, STATE_OFF, TEMP_CELSIUS, ) from homeassistant.components import thermostat from tests.common import get_test_home_assistant ENTITY = 'thermostat.test' ENT_SENSOR = 'sensor.test' ENT_SWITCH = 'switch.test' MIN_TEMP = 3.0 MAX_TEMP = 65.0 TARGET_TEMP = 42.0 class TestSetupThermostatHeatControl(unittest.TestCase): """Test the Heat Control thermostat with custom config.""" def setUp(self): # pylint: disable=invalid-name """Setup things to be run when tests are started.""" self.hass = get_test_home_assistant() def tearDown(self): # pylint: disable=invalid-name """Stop down everything that was started.""" self.hass.stop() def test_setup_missing_conf(self): """Test set up heat_control with missing config values.""" config = { 'name': 'test', 'target_sensor': ENT_SENSOR } self.assertFalse(_setup_component(self.hass, 'thermostat', { 'thermostat': config})) def test_valid_conf(self): """Test set up heat_control with valid config values.""" self.assertTrue(_setup_component(self.hass, 'thermostat', {'thermostat': { 'platform': 'heat_control', 'name': 'test', 'heater': ENT_SWITCH, 'target_sensor': ENT_SENSOR}})) def test_setup_with_sensor(self): """Test set up heat_control with sensor to trigger update at init.""" self.hass.states.set(ENT_SENSOR, 22.0, { ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS }) thermostat.setup(self.hass, {'thermostat': { 'platform': 'heat_control', 'name': 'test', 'heater': ENT_SWITCH, 'target_sensor': ENT_SENSOR }}) state = self.hass.states.get(ENTITY) self.assertEqual( TEMP_CELSIUS, state.attributes.get('unit_of_measurement')) self.assertEqual(22.0, state.attributes.get('current_temperature')) class TestThermostatHeatControl(unittest.TestCase): """Test the Heat Control thermostat.""" def setUp(self): # pylint: disable=invalid-name """Setup things to be run when tests are started.""" self.hass = get_test_home_assistant() self.hass.config.temperature_unit = TEMP_CELSIUS thermostat.setup(self.hass, {'thermostat': { 'platform': 'heat_control', 'name': 'test', 'heater': ENT_SWITCH,<|fim▁hole|> }}) def tearDown(self): # pylint: disable=invalid-name """Stop down everything that was started.""" self.hass.stop() def test_setup_defaults_to_unknown(self): """Test the setting of defaults to unknown.""" self.assertEqual('unknown', self.hass.states.get(ENTITY).state) def test_default_setup_params(self): """Test the setup with default parameters.""" state = self.hass.states.get(ENTITY) self.assertEqual(7, state.attributes.get('min_temp')) self.assertEqual(35, state.attributes.get('max_temp')) self.assertEqual(None, state.attributes.get('temperature')) def test_custom_setup_params(self): """Test the setup with custom parameters.""" thermostat.setup(self.hass, {'thermostat': { 'platform': 'heat_control', 'name': 'test', 'heater': ENT_SWITCH, 'target_sensor': ENT_SENSOR, 'min_temp': MIN_TEMP, 'max_temp': MAX_TEMP, 'target_temp': TARGET_TEMP }}) state = self.hass.states.get(ENTITY) self.assertEqual(MIN_TEMP, state.attributes.get('min_temp')) self.assertEqual(MAX_TEMP, state.attributes.get('max_temp')) self.assertEqual(TARGET_TEMP, state.attributes.get('temperature')) self.assertEqual(str(TARGET_TEMP), self.hass.states.get(ENTITY).state) def test_set_target_temp(self): """Test the setting of the target temperature.""" thermostat.set_temperature(self.hass, 30) self.hass.pool.block_till_done() self.assertEqual('30.0', self.hass.states.get(ENTITY).state) def test_sensor_bad_unit(self): """Test sensor that have bad unit.""" self._setup_sensor(22.0, unit='bad_unit') self.hass.pool.block_till_done() state = self.hass.states.get(ENTITY) self.assertEqual(None, state.attributes.get('unit_of_measurement')) self.assertEqual(None, state.attributes.get('current_temperature')) def test_sensor_bad_value(self): """Test sensor that have None as state.""" self._setup_sensor(None) self.hass.pool.block_till_done() state = self.hass.states.get(ENTITY) self.assertEqual(None, state.attributes.get('unit_of_measurement')) self.assertEqual(None, state.attributes.get('current_temperature')) def test_set_target_temp_heater_on(self): """Test if target temperature turn heater on.""" self._setup_switch(False) self._setup_sensor(25) self.hass.pool.block_till_done() thermostat.set_temperature(self.hass, 30) self.hass.pool.block_till_done() self.assertEqual(1, len(self.calls)) call = self.calls[0] self.assertEqual('switch', call.domain) self.assertEqual(SERVICE_TURN_ON, call.service) self.assertEqual(ENT_SWITCH, call.data['entity_id']) def test_set_target_temp_heater_off(self): """Test if target temperature turn heater off.""" self._setup_switch(True) self._setup_sensor(30) self.hass.pool.block_till_done() thermostat.set_temperature(self.hass, 25) self.hass.pool.block_till_done() self.assertEqual(1, len(self.calls)) call = self.calls[0] self.assertEqual('switch', call.domain) self.assertEqual(SERVICE_TURN_OFF, call.service) self.assertEqual(ENT_SWITCH, call.data['entity_id']) def test_set_temp_change_heater_on(self): """Test if temperature change turn heater on.""" self._setup_switch(False) thermostat.set_temperature(self.hass, 30) self.hass.pool.block_till_done() self._setup_sensor(25) self.hass.pool.block_till_done() self.assertEqual(1, len(self.calls)) call = self.calls[0] self.assertEqual('switch', call.domain) self.assertEqual(SERVICE_TURN_ON, call.service) self.assertEqual(ENT_SWITCH, call.data['entity_id']) def test_temp_change_heater_off(self): """Test if temperature change turn heater off.""" self._setup_switch(True) thermostat.set_temperature(self.hass, 25) self.hass.pool.block_till_done() self._setup_sensor(30) self.hass.pool.block_till_done() self.assertEqual(1, len(self.calls)) call = self.calls[0] self.assertEqual('switch', call.domain) self.assertEqual(SERVICE_TURN_OFF, call.service) self.assertEqual(ENT_SWITCH, call.data['entity_id']) def _setup_sensor(self, temp, unit=TEMP_CELSIUS): """Setup the test sensor.""" self.hass.states.set(ENT_SENSOR, temp, { ATTR_UNIT_OF_MEASUREMENT: unit }) def _setup_switch(self, is_on): """Setup the test switch.""" self.hass.states.set(ENT_SWITCH, STATE_ON if is_on else STATE_OFF) self.calls = [] def log_call(call): """Log service calls.""" self.calls.append(call) self.hass.services.register('switch', SERVICE_TURN_ON, log_call) self.hass.services.register('switch', SERVICE_TURN_OFF, log_call)<|fim▁end|>
'target_sensor': ENT_SENSOR
<|file_name|>search.js<|end_file_name|><|fim▁begin|>(function () { function getQueryVariable(variable) { var query = window.location.search.substring(1), vars = query.split("&"); for (var i = 0; i < vars.length; i++) { var pair = vars[i].split("="); if (pair[0] === variable) { return pair[1]; } } } function getPreview(query, content, previewLength) { previewLength = previewLength || (content.length * 2); var parts = query.split(" "), match = content.toLowerCase().indexOf(query.toLowerCase()), matchLength = query.length, preview; // Find a relevant location in content for (var i = 0; i < parts.length; i++) { if (match >= 0) { break; } match = content.toLowerCase().indexOf(parts[i].toLowerCase()); matchLength = parts[i].length; } // Create preview if (match >= 0) { var start = match - (previewLength / 2), end = start > 0 ? match + matchLength + (previewLength / 2) : previewLength; preview = content.substring(start, end).trim(); if (start > 0) { preview = "..." + preview; } if (end < content.length) { preview = preview + "..."; } // Highlight query parts preview = preview.replace(new RegExp("(" + parts.join("|") + ")", "gi"), "<strong>$1</strong>"); } else { // Use start of content if no match found preview = content.substring(0, previewLength).trim() + (content.length > previewLength ? "..." : ""); } return preview; } function displaySearchResults(results, query) { var searchResultsEl = document.getElementById("search-results"), searchProcessEl = document.getElementById("search-process"); if (results.length) { var resultsHTML = ""; results.forEach(function (result) { var item = window.data[result.ref], contentPreview = getPreview(query, item.content, 170), titlePreview = getPreview(query, item.title); resultsHTML += "<li><h4><a href='" + item.url.trim() + "'>" + titlePreview + "</a></h4><p><small>" + contentPreview + "</small></p></li>";<|fim▁hole|> searchResultsEl.innerHTML = resultsHTML; searchProcessEl.innerText = "Exibindo"; } else { searchResultsEl.style.display = "none"; searchProcessEl.innerText = "No"; } } window.index = lunr(function () { this.field("id"); this.field("title", {boost: 10}); this.field("date"); this.field("url"); this.field("content"); }); var query = decodeURIComponent((getQueryVariable("q") || "").replace(/\+/g, "%20")), searchQueryContainerEl = document.getElementById("search-query-container"), searchQueryEl = document.getElementById("search-query"); searchQueryEl.innerText = query; searchQueryContainerEl.style.display = "inline"; for (var key in window.data) { window.index.add(window.data[key]); } displaySearchResults(window.index.search(query), query); // Hand the results off to be displayed })();<|fim▁end|>
});
<|file_name|>power.cpp<|end_file_name|><|fim▁begin|>// // begin license header // // This file is part of Pixy CMUcam5 or "Pixy" for short // // All Pixy source code is provided under the terms of the // GNU General Public License v2 (http://www.gnu.org/licenses/gpl-2.0.html). // Those wishing to use Pixy source code, software and/or // technologies under different licensing terms should contact us at // [email protected]. Such licensing terms are available for // all portions of the Pixy codebase presented here. // // end license header // #include <math.h><|fim▁hole|>#include "misc.h" #include "power.h" static const ProcModule g_module[] = { { "pwr_getVin", (ProcPtr)pwr_getVin, {END}, "Get Vin (JP1) voltage" "@r voltage in millivolts" }, { "pwr_get5V", (ProcPtr)pwr_get5v, {END}, "Get 5V voltage" "@r voltage in millivolts" }, { "pwr_getVbus", (ProcPtr)pwr_getVbus, {END}, "Get USB VBUS voltage" "@r voltage in millivolts" }, { "pwr_USBpowered", (ProcPtr)pwr_USBpowered, {END}, "Determine if camera power is from USB host." "@r 0 if power is from Vin (JP1), nonzero if power is from USB" }, END }; uint32_t pwr_getVin() { uint32_t vin; vin = adc_get(VIN_ADCCHAN)*10560/1024 + 330; // 10560 = 3.3*3.2*1000, 330 is diode drop return vin; } uint32_t pwr_get5v() { uint32_t v5; v5 = adc_get(V5_ADCCHAN)*5293/1024; // 5293=3.3*1.604*1000 return v5; } uint32_t pwr_getVbus() { uint32_t vbus; vbus = adc_get(VBUS_ADCCHAN)*5293/1024; // 5293=3.3*1.604*1000 return vbus; } uint32_t pwr_USBpowered() { if (LPC_GPIO_PORT->PIN[5]&0x0100) return 1; else return 0; } void pwr_init() { LPC_GPIO_PORT->DIR[5] |= 0x0100; // choose USB power or vin if (pwr_getVin()>6430) // 6430=5000+1100(ldo)+330(diode) LPC_GPIO_PORT->PIN[5] &= ~0x0100; else // switch usb on LPC_GPIO_PORT->PIN[5] |= 0x0100; #if 0 // Undergrad robotics mod, no need for usb shit fuck my ass nigga please g_chirpUsb->registerModule(g_module); #endif }<|fim▁end|>
#include "pixy_init.h"
<|file_name|>compat.cpp<|end_file_name|><|fim▁begin|>#include "Sub.h" <|fim▁hole|>{ hal.scheduler->delay(ms); }<|fim▁end|>
void Sub::delay(uint32_t ms)