prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>App.spec.js<|end_file_name|><|fim▁begin|>import React from 'react';
import { createStore } from 'redux';
import { createBrowserHistory } from 'history';
import { expect } from 'chai';
import { shallow } from 'enzyme';
import rootReducer from '../../src/reducers';
import Routes from '../../src/Routes';
import App from '../../src/containers/App';
const configureStore = initialState => createStore(
rootReducer,
initialState,
);
describe('Container | App', () => {
<|fim▁hole|> expect(wrapper.find(Routes)).to.have.lengthOf(1);
});
});<|fim▁end|> | it('renders Routes component', () => {
const wrapper = shallow(<App history={createBrowserHistory()} store={configureStore()}/>);
|
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>//! Various code related to computing outlives relations.
pub mod env;<|fim▁hole|>pub mod verify;
use rustc_middle::traits::query::OutlivesBound;
use rustc_middle::ty;
pub fn explicit_outlives_bounds<'tcx>(
param_env: ty::ParamEnv<'tcx>,
) -> impl Iterator<Item = OutlivesBound<'tcx>> + 'tcx {
debug!("explicit_outlives_bounds()");
param_env
.caller_bounds()
.into_iter()
.map(ty::Predicate::kind)
.filter_map(ty::Binder::no_bound_vars)
.filter_map(move |kind| match kind {
ty::PredicateKind::Projection(..)
| ty::PredicateKind::Trait(..)
| ty::PredicateKind::Coerce(..)
| ty::PredicateKind::Subtype(..)
| ty::PredicateKind::WellFormed(..)
| ty::PredicateKind::ObjectSafe(..)
| ty::PredicateKind::ClosureKind(..)
| ty::PredicateKind::TypeOutlives(..)
| ty::PredicateKind::ConstEvaluatable(..)
| ty::PredicateKind::ConstEquate(..)
| ty::PredicateKind::TypeWellFormedFromEnv(..) => None,
ty::PredicateKind::RegionOutlives(ty::OutlivesPredicate(r_a, r_b)) => {
Some(OutlivesBound::RegionSubRegion(r_b, r_a))
}
})
}<|fim▁end|> | pub mod obligations; |
<|file_name|>vm.rs<|end_file_name|><|fim▁begin|>use parking_lot::{Mutex, RwLock};
use std::collections::hash_map::Iter;
use std::collections::HashMap;
use std::collections::HashSet;
use std::convert::TryInto;
use std::mem;
use std::ops::{Index, IndexMut};
use std::ptr;
use std::sync::Arc;
use crate::baseline;
use crate::baseline::dora_compile;
use crate::baseline::dora_entry;
use crate::baseline::dora_native::{self, InternalFct, InternalFctDescriptor, NativeThunks};
use crate::baseline::dora_throw;
use crate::baseline::fct::{JitFct, JitFctId};
use crate::baseline::map::{CodeDescriptor, CodeMap};
use crate::class::{Class, ClassDef, ClassDefId, ClassId};
use crate::driver::cmd::Args;
use crate::error::diag::Diagnostic;
use crate::exception::DoraToNativeInfo;
use crate::field::FieldId;
use crate::gc::{Address, Gc};
use crate::object::{Ref, Testing};
use crate::safepoint::{PollingPage, Safepoint};
use crate::semck::specialize::{specialize_class_id, specialize_class_id_params};
use crate::stdlib;
use crate::sym::Sym::*;
use crate::sym::*;
use crate::threads::{Threads, STACK_SIZE, THREAD};
use crate::ty::{BuiltinType, LambdaTypes, TypeList, TypeLists, TypeParamId};
use crate::utils::GrowableVec;
use dora_parser::ast;
use dora_parser::interner::*;
use dora_parser::lexer::position::Position;
use dora_parser::lexer::File;
use dora_parser::parser::NodeIdGenerator;
pub static mut EXCEPTION_OBJECT: *const u8 = 0 as *const u8;
pub fn has_exception() -> bool {
THREAD.with(|thread| {
let thread = thread.borrow();
let tld = &thread.tld;
tld.exception_object().is_non_null()
})
}
pub fn exception_get_and_clear() -> Address {
THREAD.with(|thread| {
let thread = thread.borrow();
let tld = &thread.tld;
let object = tld.exception_object();
tld.set_exception_object(Address::null());
object
})
}
pub fn exception_set(val: Address) {
THREAD.with(|thread| {
thread.borrow().tld.set_exception_object(val);
});
}
static mut VM_GLOBAL: *const u8 = ptr::null();
pub fn get_vm() -> &'static VM<'static> {
unsafe { &*(VM_GLOBAL as *const VM) }
}
pub fn set_vm(vm: &VM) {
let ptr = vm as *const _ as *const u8;
unsafe {
VM_GLOBAL = ptr;
}
}
pub fn stack_pointer() -> Address {
let local: i32 = 0;
Address::from_ptr(&local as *const i32)
}
pub struct VM<'ast> {
pub args: Args,
pub interner: Interner,
pub ast: &'ast ast::Ast,
pub id_generator: NodeIdGenerator,
pub files: Vec<File>,
pub diag: Mutex<Diagnostic>,
pub sym: Mutex<SymTable>,
pub vips: KnownElements,
pub consts: GrowableVec<Mutex<ConstData>>, // stores all const definitions
pub structs: GrowableVec<Mutex<StructData>>, // stores all struct source definitions
pub struct_defs: GrowableVec<Mutex<StructDef>>, // stores all struct definitions
pub classes: GrowableVec<RwLock<Class>>, // stores all class source definitions
pub class_defs: GrowableVec<RwLock<ClassDef>>, // stores all class definitions
pub fcts: GrowableVec<RwLock<Fct<'ast>>>, // stores all function definitions
pub jit_fcts: GrowableVec<JitFct>, // stores all function implementations
pub enums: Vec<RwLock<EnumData>>, // store all enum definitions
pub traits: Vec<RwLock<TraitData>>, // stores all trait definitions
pub impls: Vec<RwLock<ImplData>>, // stores all impl definitions
pub code_map: Mutex<CodeMap>, // stores all compiled functions
pub globals: GrowableVec<Mutex<GlobalData>>, // stores all global variables
pub gc: Gc, // garbage collector
pub native_thunks: Mutex<NativeThunks>,
pub polling_page: PollingPage,
pub lists: Mutex<TypeLists>,
pub lambda_types: Mutex<LambdaTypes>,
pub compiler_thunk: Mutex<Address>,
pub dora_entry: Mutex<Address>,
pub trap_thunk: Mutex<Address>,
pub throw_thunk: Mutex<Address>,
pub threads: Threads,
pub safepoint: Safepoint,
}
impl<'ast> VM<'ast> {
pub fn new(args: Args, ast: &'ast ast::Ast) -> Box<VM<'ast>> {
let empty_class_id: ClassId = 0.into();
let empty_class_def_id: ClassDefId = 0.into();
let empty_trait_id: TraitId = 0.into();
let empty_fct_id: FctId = 0.into();
let gc = Gc::new(&args);
let vm = Box::new(VM {
args,
consts: GrowableVec::new(),
structs: GrowableVec::new(),
struct_defs: GrowableVec::new(),
classes: GrowableVec::new(),
files: Vec::new(),
class_defs: GrowableVec::new(),
enums: Vec::new(),
traits: Vec::new(),
impls: Vec::new(),
globals: GrowableVec::new(),
interner: Interner::new(),
vips: KnownElements {
bool_class: empty_class_id,
byte_class: empty_class_id,
char_class: empty_class_id,
int_class: empty_class_id,
long_class: empty_class_id,
float_class: empty_class_id,
double_class: empty_class_id,
object_class: empty_class_id,
string_class: empty_class_id,
array_class: empty_class_id,
cls: KnownClasses {
string_buffer: empty_class_id,
},
fct: KnownFunctions {
string_buffer_empty: empty_fct_id,
string_buffer_append: empty_fct_id,
string_buffer_to_string: empty_fct_id,
},
testing_class: empty_class_id,
throwable_class: empty_class_id,
error_class: empty_class_id,
exception_class: empty_class_id,
stack_trace_element_class: empty_class_id,
equals_trait: empty_trait_id,
comparable_trait: empty_trait_id,
stringable_trait: empty_trait_id,
iterator_trait: Mutex::new(None),
int_array_def: Mutex::new(None),
str_class_def: Mutex::new(None),
obj_class_def: Mutex::new(None),
ste_class_def: Mutex::new(None),
ex_class_def: Mutex::new(None),
free_object_class_def: empty_class_def_id,
free_array_class_def: empty_class_def_id,
},
gc,
ast,
id_generator: NodeIdGenerator::new(),
diag: Mutex::new(Diagnostic::new()),
sym: Mutex::new(SymTable::new()),
fcts: GrowableVec::new(),
jit_fcts: GrowableVec::new(),
code_map: Mutex::new(CodeMap::new()),
polling_page: PollingPage::new(),
lists: Mutex::new(TypeLists::new()),
lambda_types: Mutex::new(LambdaTypes::new()),
native_thunks: Mutex::new(NativeThunks::new()),
compiler_thunk: Mutex::new(Address::null()),
dora_entry: Mutex::new(Address::null()),
trap_thunk: Mutex::new(Address::null()),
throw_thunk: Mutex::new(Address::null()),
threads: Threads::new(),
safepoint: Safepoint::new(),
});
set_vm(&vm);
vm
}
pub fn run(&self, fct_id: FctId) -> i32 {
let stack_top = stack_pointer();
let stack_limit = stack_top.sub(STACK_SIZE);
THREAD.with(|thread| {
thread.borrow().tld.set_stack_limit(stack_limit);
});
let tld = THREAD.with(|thread| {
let thread = thread.borrow();
let ptr = &thread.tld;
Address::from_ptr(ptr as *const _)
});
let ptr = self.ensure_compiled(fct_id);
let dora_entry_thunk = self.dora_entry_thunk();
let fct: extern "C" fn(Address, Address) -> i32 =
unsafe { mem::transmute(dora_entry_thunk) };
fct(tld, ptr)
}
pub fn run_test(&self, fct_id: FctId, testing: Ref<Testing>) {
let tld = THREAD.with(|thread| {
let thread = thread.borrow();
let ptr = &thread.tld;
Address::from_ptr(ptr as *const _)
});
let ptr = self.ensure_compiled(fct_id);
let dora_entry_thunk = self.dora_entry_thunk();
let fct: extern "C" fn(Address, Address, Ref<Testing>) -> i32 =
unsafe { mem::transmute(dora_entry_thunk) };
fct(tld, ptr, testing);
}
fn ensure_compiled(&self, fct_id: FctId) -> Address {
let mut dtn = DoraToNativeInfo::new();
let type_params = TypeList::empty();
THREAD.with(|thread| {
thread.borrow().use_dtn(&mut dtn, || {
baseline::generate(self, fct_id, &type_params, &type_params)
})
})
}
pub fn dump_gc_summary(&self, runtime: f32) {
self.gc.dump_summary(runtime);
}
pub fn insert_code_map(&self, start: Address, end: Address, desc: CodeDescriptor) {
let mut code_map = self.code_map.lock();
code_map.insert(start, end, desc);
}
pub fn add_fct(&mut self, mut fct: Fct<'ast>) -> FctId {
let mut fcts = self.fcts.lock();
let fctid = FctId(fcts.len());
fct.id = fctid;
fcts.push(Arc::new(RwLock::new(fct)));
fctid
}
pub fn add_fct_to_sym(&mut self, fct: Fct<'ast>) -> Result<FctId, Sym> {
let name = fct.name;
let fctid = self.add_fct(fct);
let mut sym = self.sym.lock();
match sym.get(name) {
Some(sym) => Err(sym),
None => {
assert!(sym.insert(name, SymFct(fctid)).is_none());
Ok(fctid)
}
}
}
#[cfg(test)]
pub fn cls_by_name(&self, name: &'static str) -> ClassId {
let name = self.interner.intern(name);
self.sym.lock().get_class(name).expect("class not found")
}
#[cfg(test)]
pub fn cls_method_by_name(
&self,
class_name: &'static str,
function_name: &'static str,
is_static: bool,
) -> Option<FctId> {
use crate::class::find_methods_in_class;
let class_name = self.interner.intern(class_name);
let function_name = self.interner.intern(function_name);
let cls_id = self
.sym
.lock()
.get_class(class_name)
.expect("class not found");
let cls = self.cls(cls_id);
let candidates = find_methods_in_class(self, cls, function_name, is_static);
if candidates.len() == 1 {
Some(candidates[0].1)
} else {
None
}
}
#[cfg(test)]
pub fn cls_def_by_name(&self, name: &'static str) -> ClassDefId {
let name = self.interner.intern(name);
let cls_id = self.sym.lock().get_class(name).expect("class not found");
specialize_class_id(self, cls_id)
}
#[cfg(test)]
pub fn field_by_name(
&self,
class_name: &'static str,
field_name: &'static str,
) -> (ClassDefId, FieldId) {
use crate::semck::specialize;
let class_name = self.interner.intern(class_name);
let field_name = self.interner.intern(field_name);
let cls_id = self
.sym
.lock()
.get_class(class_name)
.expect("class not found");
let cls = self.classes.idx(cls_id);
let cls = cls.read();
let field_id = cls.field_by_name(field_name);
let cls_id = specialize::specialize_class_ty(self, cls.ty);
(cls_id, field_id)
}
#[cfg(test)]
pub fn fct_by_name(&self, name: &str) -> Option<FctId> {
let name = self.interner.intern(name);
self.sym.lock().get_fct(name)
}
#[cfg(test)]
pub fn ctor_by_name(&self, name: &str) -> FctId {
let name = self.interner.intern(name);
let cls_id = self.sym.lock().get_class(name).expect("class not found");
let cls = self.classes.idx(cls_id);
let cls = cls.read();
cls.constructor.expect("no ctor found")
}
#[cfg(test)]
pub fn global_by_name(&self, name: &str) -> GlobalId {
let name = self.interner.intern(name);
self.sym.lock().get_global(name).expect("global not found")
}
pub fn cls(&self, cls_id: ClassId) -> BuiltinType {
let list_id = self.lists.lock().insert(TypeList::empty());
BuiltinType::Class(cls_id, list_id)
}
pub fn dora_entry_thunk(&self) -> Address {
let mut dora_entry_thunk = self.dora_entry.lock();
if dora_entry_thunk.is_null() {
*dora_entry_thunk = dora_entry::generate(self);
}
*dora_entry_thunk
}
pub fn throw_thunk(&self) -> Address {
let mut throw_thunk = self.throw_thunk.lock();
if throw_thunk.is_null() {
*throw_thunk = dora_throw::generate(self);
}
*throw_thunk
}
pub fn compiler_thunk(&self) -> Address {
let mut compiler_thunk = self.compiler_thunk.lock();
if compiler_thunk.is_null() {
*compiler_thunk = dora_compile::generate(self);
}
*compiler_thunk
}
pub fn trap_thunk(&self) -> Address {
let mut trap_thunk = self.trap_thunk.lock();
if trap_thunk.is_null() {
let ifct = InternalFct {
ptr: Address::from_ptr(stdlib::trap as *const u8),
args: &[BuiltinType::Int],
return_type: BuiltinType::Unit,
throws: false,
desc: InternalFctDescriptor::TrapThunk,
};
let jit_fct_id = dora_native::generate(self, ifct, false);
let jit_fct = self.jit_fcts.idx(jit_fct_id);
let fct_ptr = jit_fct.fct_ptr();
*trap_thunk = fct_ptr;
}
*trap_thunk
}
pub fn file(&self, idx: FileId) -> &File {
&self.files[idx.0 as usize]
}
}
unsafe impl<'ast> Sync for VM<'ast> {}
impl<'ast> GrowableVec<RwLock<Fct<'ast>>> {
pub fn idx(&self, index: FctId) -> Arc<RwLock<Fct<'ast>>> {
self.idx_usize(index.0)
}
}
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
pub struct StructDefId(usize);
impl From<usize> for StructDefId {
fn from(data: usize) -> StructDefId {
StructDefId(data)
}
}
impl GrowableVec<Mutex<StructDef>> {
pub fn idx(&self, index: StructDefId) -> Arc<Mutex<StructDef>> {
self.idx_usize(index.0)
}
}
pub struct StructDef {
pub fields: Vec<StructFieldDef>,
pub size: i32,
pub align: i32,
pub ref_fields: Vec<i32>,
}
#[derive(Debug, Clone)]
pub struct StructFieldDef {
pub offset: i32,
pub ty: BuiltinType,
}
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
pub struct GlobalId(u32);
impl GlobalId {
pub fn to_usize(self) -> usize {
self.0 as usize
}
}
impl From<u32> for GlobalId {
fn from(data: u32) -> GlobalId {
GlobalId(data)
}
}
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
pub struct FileId(u32);
impl From<u32> for FileId {
fn from(data: u32) -> FileId {
FileId(data)
}
}
#[derive(Debug)]
pub struct GlobalData {
pub id: GlobalId,
pub file: FileId,
pub pos: Position,
pub ty: BuiltinType,
pub reassignable: bool,
pub name: Name,
pub getter: Option<FctId>,
pub address_init: Address,
pub address_value: Address,
}
impl GrowableVec<Mutex<GlobalData>> {
pub fn idx(&self, index: GlobalId) -> Arc<Mutex<GlobalData>> {
self.idx_usize(index.0 as usize)
}
}
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
pub struct ImplId(u32);
impl From<u32> for ImplId {
fn from(data: u32) -> ImplId {
ImplId(data)
}
}
#[derive(Debug)]
pub struct ImplData {
pub id: ImplId,
pub file: FileId,
pub pos: Position,
pub trait_id: Option<TraitId>,
pub class_id: Option<ClassId>,
pub methods: Vec<FctId>,
}
impl ImplData {
pub fn trait_id(&self) -> TraitId {
self.trait_id.expect("trait_id not initialized yet.")
}
pub fn cls_id(&self) -> ClassId {
self.class_id.expect("trait_id not initialized yet.")
}
pub fn find_implements(&self, vm: &VM, fct_id: FctId) -> Option<FctId> {
for &mtd_id in &self.methods {
let mtd = vm.fcts.idx(mtd_id);
let mtd = mtd.read();
if mtd.impl_for == Some(fct_id) {
return Some(mtd_id);
}
}
None
}
}
impl Index<ImplId> for Vec<RwLock<ImplData>> {
type Output = RwLock<ImplData>;
fn index(&self, index: ImplId) -> &RwLock<ImplData> {
&self[index.0 as usize]
}
}
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
pub struct EnumId(u32);
impl From<usize> for EnumId {
fn from(data: usize) -> EnumId {
EnumId(data.try_into().unwrap())
}
}
impl Index<EnumId> for Vec<RwLock<EnumData>> {
type Output = RwLock<EnumData>;
fn index(&self, index: EnumId) -> &RwLock<EnumData> {
&self[index.0 as usize]
}
}
#[derive(Debug)]
pub struct EnumData {
pub id: EnumId,
pub file: FileId,
pub pos: Position,
pub name: Name,
pub values: Vec<Name>,
pub name_to_value: HashMap<Name, u32>,
}
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
pub struct TraitId(u32);
impl From<u32> for TraitId {
fn from(data: u32) -> TraitId {
TraitId(data)
}
}
#[derive(Debug)]
pub struct TraitData {
pub id: TraitId,
pub file: FileId,
pub pos: Position,
pub name: Name,
pub methods: Vec<FctId>,
}
impl TraitData {
pub fn find_method(&self, vm: &VM, name: Name, is_static: bool) -> Option<FctId> {
for &method in &self.methods {
let method = vm.fcts.idx(method);
let method = method.read();
if method.name == name && method.is_static == is_static {
return Some(method.id);
}
}
None
}
pub fn find_method_with_replace(
&self,
vm: &VM,
is_static: bool,
name: Name,
replace: Option<BuiltinType>,
args: &[BuiltinType],
) -> Option<FctId> {
for &method in &self.methods {
let method = vm.fcts.idx(method);
let method = method.read();
if method.name == name
&& method.is_static == is_static
&& params_match(replace, method.params_without_self(), args)
{
return Some(method.id);
}
}
None
}
}
fn params_match(
replace: Option<BuiltinType>,
trait_args: &[BuiltinType],
args: &[BuiltinType],
) -> bool {
if trait_args.len() != args.len() {
return false;<|fim▁hole|> for (ind, &ty) in trait_args.iter().enumerate() {
let other = args[ind];
let found = if ty == BuiltinType::This {
replace.is_none() || replace.unwrap() == other
} else {
ty == other
};
if !found {
return false;
}
}
true
}
impl Index<TraitId> for Vec<RwLock<TraitData>> {
type Output = RwLock<TraitData>;
fn index(&self, index: TraitId) -> &RwLock<TraitData> {
&self[index.0 as usize]
}
}
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
pub struct StructId(u32);
impl GrowableVec<Mutex<StructData>> {
pub fn idx(&self, index: StructId) -> Arc<Mutex<StructData>> {
self.idx_usize(index.0 as usize)
}
}
impl From<u32> for StructId {
fn from(data: u32) -> StructId {
StructId(data)
}
}
#[derive(Debug)]
pub struct StructData {
pub id: StructId,
pub file: FileId,
pub pos: Position,
pub name: Name,
pub fields: Vec<StructFieldData>,
pub specializations: RwLock<HashMap<TypeList, StructDefId>>,
}
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub struct StructFieldId(u32);
impl From<u32> for StructFieldId {
fn from(data: u32) -> StructFieldId {
StructFieldId(data)
}
}
#[derive(Debug)]
pub struct StructFieldData {
pub id: StructFieldId,
pub pos: Position,
pub name: Name,
pub ty: BuiltinType,
}
#[derive(Debug)]
pub struct KnownElements {
pub bool_class: ClassId,
pub byte_class: ClassId,
pub char_class: ClassId,
pub int_class: ClassId,
pub long_class: ClassId,
pub float_class: ClassId,
pub double_class: ClassId,
pub object_class: ClassId,
pub string_class: ClassId,
pub array_class: ClassId,
pub cls: KnownClasses,
pub fct: KnownFunctions,
pub testing_class: ClassId,
pub throwable_class: ClassId,
pub error_class: ClassId,
pub exception_class: ClassId,
pub stack_trace_element_class: ClassId,
pub equals_trait: TraitId,
pub comparable_trait: TraitId,
pub stringable_trait: TraitId,
pub iterator_trait: Mutex<Option<TraitId>>,
int_array_def: Mutex<Option<ClassDefId>>,
str_class_def: Mutex<Option<ClassDefId>>,
obj_class_def: Mutex<Option<ClassDefId>>,
ste_class_def: Mutex<Option<ClassDefId>>,
ex_class_def: Mutex<Option<ClassDefId>>,
pub free_object_class_def: ClassDefId,
pub free_array_class_def: ClassDefId,
}
#[derive(Debug)]
pub struct KnownClasses {
pub string_buffer: ClassId,
}
#[derive(Debug)]
pub struct KnownFunctions {
pub string_buffer_empty: FctId,
pub string_buffer_append: FctId,
pub string_buffer_to_string: FctId,
}
impl KnownElements {
pub fn iterator(&self) -> TraitId {
self.iterator_trait.lock().expect("iterator trait not set")
}
pub fn int_array(&self, vm: &VM) -> ClassDefId {
let mut int_array_def = self.int_array_def.lock();
if let Some(cls_id) = *int_array_def {
cls_id
} else {
let type_args = TypeList::single(BuiltinType::Int);
let cls_id = specialize_class_id_params(vm, self.array_class, &type_args);
*int_array_def = Some(cls_id);
cls_id
}
}
pub fn str(&self, vm: &VM) -> ClassDefId {
let mut str_class_def = self.str_class_def.lock();
if let Some(cls_id) = *str_class_def {
cls_id
} else {
let cls_id = specialize_class_id(vm, self.string_class);
*str_class_def = Some(cls_id);
cls_id
}
}
pub fn obj(&self, vm: &VM) -> ClassDefId {
let mut obj_class_def = self.obj_class_def.lock();
if let Some(cls_id) = *obj_class_def {
cls_id
} else {
let cls_id = specialize_class_id(vm, self.object_class);
*obj_class_def = Some(cls_id);
cls_id
}
}
pub fn stack_trace_element(&self, vm: &VM) -> ClassDefId {
let mut ste_class_def = self.ste_class_def.lock();
if let Some(cls_id) = *ste_class_def {
cls_id
} else {
let cls_id = specialize_class_id(vm, self.stack_trace_element_class);
*ste_class_def = Some(cls_id);
cls_id
}
}
pub fn exception(&self, vm: &VM) -> ClassDefId {
let mut ex_class_def = self.ex_class_def.lock();
if let Some(cls_id) = *ex_class_def {
cls_id
} else {
let cls_id = specialize_class_id(vm, self.exception_class);
*ex_class_def = Some(cls_id);
cls_id
}
}
pub fn find_class(&self, ty: BuiltinType) -> Option<ClassId> {
match ty {
BuiltinType::Bool => Some(self.bool_class),
BuiltinType::Byte => Some(self.byte_class),
BuiltinType::Char => Some(self.char_class),
BuiltinType::Int => Some(self.int_class),
BuiltinType::Long => Some(self.long_class),
BuiltinType::Float => Some(self.float_class),
BuiltinType::Double => Some(self.double_class),
_ => None,
}
}
}
#[derive(Debug, PartialEq, Eq, Copy, Clone, Hash)]
pub struct FctId(pub usize);
impl FctId {
pub fn to_usize(self) -> usize {
self.0
}
}
impl From<usize> for FctId {
fn from(id: usize) -> FctId {
FctId(id)
}
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum FctParent {
Class(ClassId),
Trait(TraitId),
Impl(ImplId),
None,
}
impl FctParent {
pub fn is_none(&self) -> bool {
match self {
&FctParent::None => true,
_ => false,
}
}
pub fn cls_id(&self) -> ClassId {
match self {
&FctParent::Class(id) => id,
_ => unreachable!(),
}
}
}
#[derive(Clone, Debug)]
pub struct TypeParam {
pub name: Name,
pub class_bound: Option<ClassId>,
pub trait_bounds: HashSet<TraitId>,
}
impl TypeParam {
pub fn new(name: Name) -> TypeParam {
TypeParam {
name,
class_bound: None,
trait_bounds: HashSet::new(),
}
}
}
#[derive(Debug)]
pub struct Fct<'ast> {
pub id: FctId,
pub ast: &'ast ast::Function,
pub pos: Position,
pub name: Name,
pub parent: FctParent,
pub has_open: bool,
pub has_override: bool,
pub has_final: bool,
pub has_optimize_immediately: bool,
pub is_static: bool,
pub is_pub: bool,
pub is_abstract: bool,
pub is_test: bool,
pub use_cannon: bool,
pub internal: bool,
pub internal_resolved: bool,
pub overrides: Option<FctId>,
pub param_types: Vec<BuiltinType>,
pub return_type: BuiltinType,
pub is_constructor: bool,
pub file: FileId,
pub vtable_index: Option<u32>,
pub impl_for: Option<FctId>,
pub initialized: bool,
pub throws: bool,
pub type_params: Vec<TypeParam>,
pub kind: FctKind,
}
impl<'ast> Fct<'ast> {
pub fn is_virtual(&self) -> bool {
(self.has_open || self.has_override) && !self.has_final
}
pub fn in_class(&self) -> bool {
match self.parent {
FctParent::Class(_) => true,
_ => false,
}
}
pub fn in_trait(&self) -> bool {
match self.parent {
FctParent::Trait(_) => true,
_ => false,
}
}
pub fn cls_id(&self) -> ClassId {
match self.parent {
FctParent::Class(clsid) => clsid,
_ => unreachable!(),
}
}
pub fn trait_id(&self) -> TraitId {
match self.parent {
FctParent::Trait(traitid) => traitid,
_ => unreachable!(),
}
}
pub fn full_name(&self, vm: &VM) -> String {
let mut repr = String::new();
if let FctParent::Class(class_id) = self.parent {
let cls = vm.classes.idx(class_id);
let cls = cls.read();
let name = cls.name;
repr.push_str(&vm.interner.str(name));
if self.is_static {
repr.push_str("::");
} else {
repr.push_str(".");
}
}
repr.push_str(&vm.interner.str(self.name));
if self.type_params.len() > 0 {
repr.push('[');
repr.push_str(
&self
.type_params
.iter()
.map(|n| vm.interner.str(n.name).to_string())
.collect::<Vec<_>>()
.join(", "),
);
repr.push(']');
}
repr.push_str("(");
for (ind, ty) in self.params_without_self().iter().enumerate() {
if ind > 0 {
repr.push_str(", ");
}
let name = ty.name(vm);
repr.push_str(&name);
}
repr.push_str(")");
if self.return_type != BuiltinType::Unit {
repr.push_str(" -> ");
let name = self.return_type.name(vm);
repr.push_str(&name);
}
repr
}
pub fn is_src(&self) -> bool {
match self.kind {
FctKind::Source(_) => true,
_ => false,
}
}
pub fn pos(&self) -> Position {
self.ast.pos
}
pub fn src(&self) -> &RwLock<FctSrc> {
match self.kind {
FctKind::Source(ref src) => src,
_ => panic!("source expected"),
}
}
pub fn has_self(&self) -> bool {
match self.parent {
FctParent::Class(_) | FctParent::Trait(_) | FctParent::Impl(_) => !self.is_static,
_ => false,
}
}
pub fn params_with_self(&self) -> &[BuiltinType] {
&self.param_types
}
pub fn params_without_self(&self) -> &[BuiltinType] {
if self.has_self() {
&self.param_types[1..]
} else {
&self.param_types
}
}
}
#[derive(Debug)]
pub enum FctKind {
Source(RwLock<FctSrc>),
Definition,
Native(Address),
Builtin(Intrinsic),
}
impl FctKind {
pub fn is_src(&self) -> bool {
match *self {
FctKind::Source(_) => true,
_ => false,
}
}
pub fn is_intrinsic(&self) -> bool {
match *self {
FctKind::Builtin(_) => true,
_ => false,
}
}
pub fn is_definition(&self) -> bool {
match *self {
FctKind::Definition => true,
_ => false,
}
}
}
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub enum Intrinsic {
GenericArrayCtorEmpty,
GenericArrayCtorElem,
GenericArrayLen,
GenericArrayGet,
GenericArraySet,
DefaultValue,
Assert,
Debug,
Shl,
StrLen,
StrGet,
StrSet,
BoolEq,
BoolNot,
BoolToInt,
BoolToLong,
ByteEq,
ByteCmp,
ByteNot,
ByteToInt,
ByteToLong,
CharEq,
CharCmp,
CharToInt,
CharToLong,
IntToByte,
IntToChar,
IntToLong,
IntToFloat,
IntToDouble,
IntAsFloat,
EnumEq,
EnumNe,
IntEq,
IntCmp,
IntAdd,
IntSub,
IntMul,
IntDiv,
IntMod,
IntOr,
IntAnd,
IntXor,
IntShl,
IntSar,
IntShr,
IntNot,
IntNeg,
IntPlus,
LongToInt,
LongToChar,
LongToByte,
LongToFloat,
LongToDouble,
LongAsDouble,
LongEq,
LongCmp,
LongAdd,
LongSub,
LongMul,
LongDiv,
LongMod,
LongOr,
LongAnd,
LongXor,
LongShl,
LongSar,
LongShr,
LongNot,
LongNeg,
LongPlus,
FloatToInt,
FloatToLong,
FloatToDouble,
FloatAsInt,
FloatEq,
FloatCmp,
FloatAdd,
FloatSub,
FloatMul,
FloatDiv,
FloatPlus,
FloatNeg,
FloatIsNan,
FloatSqrt,
FloatArrayLen,
FloatArrayGet,
FloatArraySet,
DoubleToInt,
DoubleToLong,
DoubleToFloat,
DoubleAsLong,
DoubleEq,
DoubleCmp,
DoubleAdd,
DoubleSub,
DoubleMul,
DoubleDiv,
DoublePlus,
DoubleNeg,
DoubleIsNan,
DoubleSqrt,
DoubleArrayLen,
DoubleArrayGet,
DoubleArraySet,
}
#[derive(Debug)]
pub struct FctSrc {
pub map_calls: NodeMap<Arc<CallType>>, // maps function call to FctId
pub map_idents: NodeMap<IdentType>,
pub map_tys: NodeMap<BuiltinType>,
pub map_vars: NodeMap<VarId>,
pub map_convs: NodeMap<ConvInfo>,
pub map_cls: NodeMap<ClassId>,
pub map_fors: NodeMap<ForTypeInfo>,
pub always_returns: bool, // true if function is always exited via return statement
// false if function execution could reach the closing } of this function
pub specializations: RwLock<HashMap<(TypeList, TypeList), JitFctId>>,
pub vars: Vec<Var>, // variables in functions
}
impl Clone for FctSrc {
fn clone(&self) -> FctSrc {
FctSrc {
map_calls: self.map_calls.clone(),
map_idents: self.map_idents.clone(),
map_tys: self.map_tys.clone(),
map_vars: self.map_vars.clone(),
map_convs: self.map_convs.clone(),
map_cls: self.map_cls.clone(),
map_fors: self.map_fors.clone(),
vars: self.vars.clone(),
always_returns: self.always_returns,
specializations: RwLock::new(HashMap::new()),
}
}
}
impl FctSrc {
pub fn new() -> FctSrc {
FctSrc {
map_calls: NodeMap::new(),
map_idents: NodeMap::new(),
map_tys: NodeMap::new(),
map_vars: NodeMap::new(),
map_convs: NodeMap::new(),
map_cls: NodeMap::new(),
map_fors: NodeMap::new(),
vars: Vec::new(),
always_returns: false,
specializations: RwLock::new(HashMap::new()),
}
}
pub fn set_ty(&mut self, id: ast::NodeId, ty: BuiltinType) {
self.map_tys.insert_or_replace(id, ty);
}
pub fn ty(&self, id: ast::NodeId) -> BuiltinType {
self.map_tys.get(id).expect("no type found").clone()
}
pub fn var_self(&self) -> &Var {
&self.vars[0]
}
pub fn var_self_mut(&mut self) -> &mut Var {
&mut self.vars[0]
}
}
#[derive(Clone, Debug)]
pub struct NodeMap<V>
where
V: Clone,
{
map: HashMap<ast::NodeId, V>,
}
impl<V> NodeMap<V>
where
V: Clone,
{
pub fn new() -> NodeMap<V> {
NodeMap {
map: HashMap::new(),
}
}
pub fn get(&self, id: ast::NodeId) -> Option<&V> {
self.map.get(&id)
}
pub fn get_mut(&mut self, id: ast::NodeId) -> Option<&mut V> {
self.map.get_mut(&id)
}
pub fn insert(&mut self, id: ast::NodeId, data: V) {
let old = self.map.insert(id, data);
assert!(old.is_none());
}
pub fn replace(&mut self, id: ast::NodeId, data: V) {
let old = self.map.insert(id, data);
assert!(old.is_some());
}
pub fn insert_or_replace(&mut self, id: ast::NodeId, data: V) {
self.map.insert(id, data);
}
pub fn clear(&mut self) {
self.map.clear();
}
pub fn iter(&self) -> Iter<ast::NodeId, V> {
self.map.iter()
}
}
#[derive(Debug, Copy, Clone)]
pub struct ConvInfo {
pub check_type: BuiltinType,
pub valid: bool,
}
#[derive(Debug, Copy, Clone)]
pub enum Store {
Reg,
Temp(i32, BuiltinType),
}
impl Store {
pub fn offset(&self) -> i32 {
match *self {
Store::Temp(offset, _) => offset,
Store::Reg => panic!(),
}
}
}
#[derive(Debug, Clone)]
pub enum IdentType {
/// name of local variable
Var(VarId),
/// name of a global variable
Global(GlobalId),
/// field expression: <expr>.<field_name>
Field(BuiltinType, FieldId),
/// name of structure
Struct(StructId),
// name of constant
Const(ConstId),
// name of function
Fct(FctId),
// name of function with type params: some_fct[T1, T2, ...]
FctType(FctId, TypeList),
// name of class
Class(ClassId),
// name of class with type params: SomeClass[T1, T2, ...]
ClassType(ClassId, TypeList),
// method expression: <expr>.<method_name>
Method(BuiltinType, Name),
// method expression with type params: <expr>.<method_name>[T1, T2, ...]
MethodType(BuiltinType, Name, TypeList),
// static method expression: SomeClass[T1, T2, ...]::<name>
StaticMethod(BuiltinType, Name),
// static method expression: SomeClass[T1, T2, ...]::<name>[T1, T2, ...]
StaticMethodType(BuiltinType, Name, TypeList),
// function or class type param: e.g. T
TypeParam(BuiltinType),
// static method call on type param: <T>::<name>
TypeParamStaticMethod(BuiltinType, Name),
// name of enum
Enum(EnumId),
// specific value in enum
EnumValue(EnumId, u32),
}
impl IdentType {
pub fn var_id(&self) -> VarId {
match *self {
IdentType::Var(varid) => varid,
_ => panic!(),
}
}
pub fn struct_id(&self) -> StructId {
match self {
&IdentType::Struct(sid) => sid,
_ => panic!(),
}
}
pub fn is_var(&self) -> bool {
match *self {
IdentType::Var(_) => true,
_ => false,
}
}
pub fn is_field(&self) -> bool {
match *self {
IdentType::Field(_, _) => true,
_ => false,
}
}
pub fn is_class(&self) -> bool {
match *self {
IdentType::Class(_) => true,
IdentType::ClassType(_, _) => true,
_ => false,
}
}
pub fn is_fct(&self) -> bool {
match *self {
IdentType::Fct(_) => true,
IdentType::FctType(_, _) => true,
_ => false,
}
}
}
#[derive(Debug, Clone)]
pub struct ForTypeInfo {
pub make_iterator: FctId,
pub next: FctId,
pub has_next: FctId,
pub iterator_type: BuiltinType,
}
#[derive(Debug, Clone)]
pub enum CallType {
Fct(FctId, TypeList, TypeList),
Method(BuiltinType, FctId, TypeList),
CtorNew(ClassId, FctId, TypeList),
Ctor(ClassId, FctId, TypeList),
Expr(BuiltinType, FctId),
Trait(TraitId, FctId),
TraitStatic(TypeParamId, TraitId, FctId),
Intrinsic(Intrinsic),
}
impl CallType {
pub fn is_ctor_new(&self) -> bool {
match *self {
CallType::CtorNew(_, _, _) => true,
_ => false,
}
}
pub fn is_ctor(&self) -> bool {
match *self {
CallType::Ctor(_, _, _) => true,
_ => false,
}
}
pub fn is_method(&self) -> bool {
match *self {
CallType::Method(_, _, _) => true,
_ => false,
}
}
pub fn is_expr(&self) -> bool {
match *self {
CallType::Expr(_, _) => true,
_ => false,
}
}
pub fn to_intrinsic(&self) -> Option<Intrinsic> {
match *self {
CallType::Intrinsic(intrinsic) => Some(intrinsic),
_ => None,
}
}
pub fn fct_id(&self) -> Option<FctId> {
match *self {
CallType::Fct(fctid, _, _) => Some(fctid),
CallType::Method(_, fctid, _) => Some(fctid),
CallType::CtorNew(_, fctid, _) => Some(fctid),
CallType::Ctor(_, fctid, _) => Some(fctid),
CallType::Expr(_, fctid) => Some(fctid),
CallType::Trait(_, fctid) => Some(fctid),
CallType::TraitStatic(_, _, fctid) => Some(fctid),
CallType::Intrinsic(_) => None,
}
}
}
#[derive(Clone, Debug)]
pub struct CallSite<'ast> {
pub callee: FctId,
pub cls_type_params: TypeList,
pub fct_type_params: TypeList,
pub args: Vec<Arg<'ast>>,
pub argsize: i32,
pub super_call: bool,
pub return_type: BuiltinType,
}
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
pub struct ConstId(usize);
impl From<usize> for ConstId {
fn from(data: usize) -> ConstId {
ConstId(data)
}
}
impl GrowableVec<Mutex<ConstData>> {
pub fn idx(&self, index: ConstId) -> Arc<Mutex<ConstData>> {
self.idx_usize(index.0 as usize)
}
}
#[derive(Clone, Debug)]
pub struct ConstData {
pub id: ConstId,
pub file: FileId,
pub pos: Position,
pub name: Name,
pub ty: BuiltinType,
pub expr: Box<ast::Expr>,
pub value: ConstValue,
}
#[derive(Clone, Debug, PartialEq)]
pub enum ConstValue {
None,
Bool(bool),
Char(char),
Int(i64),
Float(f64),
}
impl ConstValue {
pub fn to_bool(&self) -> bool {
match self {
&ConstValue::Bool(b) => b,
_ => unreachable!(),
}
}
pub fn to_char(&self) -> char {
match self {
&ConstValue::Char(c) => c,
_ => unreachable!(),
}
}
pub fn to_int(&self) -> i64 {
match self {
&ConstValue::Int(i) => i,
_ => unreachable!(),
}
}
pub fn to_float(&self) -> f64 {
match self {
&ConstValue::Float(f) => f,
_ => unreachable!(),
}
}
}
#[derive(Copy, Clone, Debug)]
pub enum Arg<'ast> {
Expr(&'ast ast::Expr, BuiltinType, i32),
Stack(i32, BuiltinType, i32),
SelfieNew(BuiltinType, i32),
Selfie(BuiltinType, i32),
}
impl<'ast> Arg<'ast> {
pub fn offset(&self) -> i32 {
match *self {
Arg::Expr(_, _, offset) => offset,
Arg::Stack(_, _, offset) => offset,
Arg::Selfie(_, offset) => offset,
Arg::SelfieNew(_, offset) => offset,
}
}
pub fn ty(&self) -> BuiltinType {
match *self {
Arg::Expr(_, ty, _) => ty,
Arg::Stack(_, ty, _) => ty,
Arg::Selfie(ty, _) => ty,
Arg::SelfieNew(ty, _) => ty,
}
}
}
#[derive(Debug, PartialEq, Eq, Copy, Clone, Hash)]
pub struct VarId(pub usize);
#[derive(Clone, Debug)]
pub struct Var {
pub id: VarId,
pub name: Name,
pub ty: BuiltinType,
pub reassignable: bool,
pub node_id: ast::NodeId,
}
impl Index<VarId> for Vec<Var> {
type Output = Var;
fn index(&self, index: VarId) -> &Var {
&self[index.0]
}
}
impl IndexMut<VarId> for Vec<Var> {
fn index_mut(&mut self, index: VarId) -> &mut Var {
&mut self[index.0]
}
}<|fim▁end|> | }
|
<|file_name|>testrun.js<|end_file_name|><|fim▁begin|>var scroungejs = require('scroungejs'),
startutils = require('./startutil');
startutils.createFileIfNotExist({
pathSrc : './test/indexSrc.html',
pathFin : './test/index.html'
}, function (err, res) {
if (err) return console.log(err);
scroungejs.build({
inputPath : [
'./test/testbuildSrc',
'./node_modules',
'./bttnsys.js'
],
outputPath : './test/testbuildFin',
isRecursive : true,
isSourcePathUnique : true,<|fim▁hole|> return (err) ? console.log(err) : console.log('finished!');
});
});<|fim▁end|> | isCompressed : false,
isConcatenated : false,
basepage : './test/index.html'
}, function (err, res) { |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>import os.path<|fim▁hole|><|fim▁end|> |
RNASEQ_PIPELINE_DIR = os.path.dirname(__file__) |
<|file_name|>SpeciesLearnset.java<|end_file_name|><|fim▁begin|>/*
* PokeDat - A Pokemon Data API.
* Copyright (C) 2015
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package io.github.kaioru.species;
import java.io.Serializable;
/**
* @todo Class Description<|fim▁hole|>
private static final long serialVersionUID = 5370581555765470935L;
}<|fim▁end|> | *
* @author Kaioru
**/
public class SpeciesLearnset implements Serializable { |
<|file_name|>http_uri_name_test.go<|end_file_name|><|fim▁begin|>// Copyright 2019 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//<|fim▁hole|>// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package aip0131
import (
"testing"
"github.com/googleapis/api-linter/rules/internal/testutils"
)
func TestHttpNameField(t *testing.T) {
tests := []struct {
testName string
URI string
MethodName string
problems testutils.Problems
}{
{"Valid", "/v1/{name=publishers/*/books/*}", "GetBook", testutils.Problems{}},
{"InvalidVarName", "/v1/{book=publishers/*/books/*}", "GetBook", testutils.Problems{{Message: "`name`"}}},
{"NoVarName", "/v1/publishers/*/books/*", "GetBook", testutils.Problems{{Message: "`name`"}}},
{"Irrelevant", "/v1/{book=publishers/*/books/*}", "AcquireBook", testutils.Problems{}},
}
for _, test := range tests {
t.Run(test.testName, func(t *testing.T) {
f := testutils.ParseProto3Tmpl(t, `
import "google/api/annotations.proto";
service Library {
rpc {{.MethodName}}({{.MethodName}}Request) returns ({{.MethodName}}Response) {
option (google.api.http) = {
get: "{{.URI}}"
};
}
}
message {{.MethodName}}Request {}
message {{.MethodName}}Response {}
`, test)
method := f.GetServices()[0].GetMethods()[0]
if diff := test.problems.SetDescriptor(method).Diff(httpNameField.Lint(f)); diff != "" {
t.Error(diff)
}
})
}
}<|fim▁end|> | // https://www.apache.org/licenses/LICENSE-2.0
// |
<|file_name|>0003_initial.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime<|fim▁hole|>from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
pass
def backwards(self, orm):
pass
models = {
}
complete_apps = ['website']<|fim▁end|> | from south.db import db
from south.v2 import SchemaMigration |
<|file_name|>DebugCamera.cpp<|end_file_name|><|fim▁begin|>#include <Core/Platform.h>
#include <Shared/misc.h>
#include <Core/Core.h>
#include "DebugCamera.h"
#include "SceneTools.h"
DebugCamera::DebugCamera(Pimp::World* world) :
world(world)
, isEnabled(false)
, isLookingAt(false)
{
ASSERT(nullptr != world);
camera = new Pimp::Camera(world);
world->GetElements().push_back(camera);
camera->SetFOVy(0.563197f);
xform = new Pimp::Xform(world);
world->GetElements().push_back(xform);
AddChildToParent(xform,world->GetRootNode());
AddChildToParent(camera,xform);
}
void DebugCamera::SetEnabled( bool enabled )<|fim▁hole|> return;
else
{
isEnabled = enabled;
if (true == isEnabled)
{
// Adopt current camera.
Pimp::Camera* prevCam = world->GetCamera();
ASSERT(prevCam->GetParents().size() == 1);
Pimp::Node* prevCamParent = prevCam->GetParents()[0];
ASSERT(prevCamParent->GetType() == Pimp::ET_Xform);
Pimp::Xform* prevDirectedCamXform = static_cast<Pimp::Xform*>(prevCamParent);
// And then set it as ours.
xform->SetTranslation(prevDirectedCamXform->GetTranslation());
xform->SetRotation(prevDirectedCamXform->GetRotation());
world->SetCamera(camera);
}
}
}
void DebugCamera::Move( const Vector3& directionViewSpace )
{
float speed = 1.0f; //< Totally framerate-dependent movement speed
Vector3 dirWorld = xform->GetWorldTransform()->TransformNormal(directionViewSpace);
Vector3 pos = xform->GetTranslation();
pos += dirWorld * speed;
xform->SetTranslation(pos);
}
void DebugCamera::Roll(bool positive)
{
Quaternion rot = xform->GetRotation();
const float rollAmount = 0.10f; //< Totally framerate-dependent roll amount
rot = CreateQuaternionFromYawPitchRoll(0, 0, positive ? rollAmount : -rollAmount) * rot;
xform->SetRotation(rot);
}
void DebugCamera::StartLookAt()
{
ASSERT(!isLookingAt);
isLookingAt = true;
lookAtInitialRotation = xform->GetRotation();
}
void DebugCamera::EndLookAt()
{
ASSERT(isLookingAt);
isLookingAt = false;
}
void DebugCamera::LookAt(int deltaMouseX, int deltaMouseY)
{
ASSERT(isLookingAt);
// Calculate new orientation
const float mouseSensitivity = -0.01f;
float yaw = deltaMouseX * mouseSensitivity;
float pitch = deltaMouseY * mouseSensitivity;
Quaternion camOrientationDelta = CreateQuaternionFromYawPitchRoll(yaw, pitch, 0);
Quaternion newRot = camOrientationDelta * lookAtInitialRotation;
xform->SetRotation(newRot);
}
void DebugCamera::DumpCurrentTransformToOutputWindow()
{
Quaternion rot = xform->GetRotation();
Vector3 pos = xform->GetTranslation();
Vector3 rotEulerXYZ = rot.GetEulerAnglesXYZ();
DEBUG_LOG("Current debug camera transform:");
DEBUG_LOG("X = %.2ff", pos.x);
DEBUG_LOG("Y = %.2ff", pos.y);
DEBUG_LOG("Z = %.2ff", pos.z);
DEBUG_LOG("X = %.2ff", rot.x);
DEBUG_LOG("Y = %.2ff", rot.y);
DEBUG_LOG("Z = %.2ff", rot.z);
DEBUG_LOG("W = %.2ff", rot.w);
}<|fim▁end|> | {
if (enabled == isEnabled) |
<|file_name|>parameterWithNestedDestructuring.ts<|end_file_name|><|fim▁begin|>/// <reference path='fourslash.ts'/>
////[[{foo: 'hello', bar: [1]}]]<|fim▁hole|> 1: "var foo: string",
2: "var baz: number"
});<|fim▁end|> | //// .map(([{foo, bar: [baz]}]) => /*1*/foo + /*2*/baz);
verify.quickInfos({ |
<|file_name|>testSelectionExtensionExprFail.py<|end_file_name|><|fim▁begin|>class A:
def test(self):
print "I##|nitializing A", "test"##|
attribute = "hello"
def my_method(self):
print self.attribute
a = A()
a.test()
##r Should expand to Full String "Initializing A"
<|fim▁hole|><|fim▁end|> | # Invalid selection:
# nitializing A", "test" |
<|file_name|>NotExistingRecipeException.java<|end_file_name|><|fim▁begin|>package com.bellini.recipecatalog.exception.recipe;
public class NotExistingRecipeException extends RuntimeException {
private static final long serialVersionUID = 2975419159984559986L;
private Long id;
public NotExistingRecipeException(Long id) {
super();
if (id == null) {
throw new IllegalArgumentException("Null object used as initializer of the exception");
}<|fim▁hole|> @Override
public String getMessage() {
return "Recipe " + id + " not found in database";
}
}<|fim▁end|> | this.id = id;
}
|
<|file_name|>crate-method-reexport-grrrrrrr.rs<|end_file_name|><|fim▁begin|>// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
<|fim▁hole|>// crate.
// aux-build:crate-method-reexport-grrrrrrr2.rs
extern crate crate_method_reexport_grrrrrrr2;
use std::gc::GC;
pub fn main() {
use crate_method_reexport_grrrrrrr2::rust::add;
use crate_method_reexport_grrrrrrr2::rust::cx;
let x = box(GC) ();
x.cx();
let y = ();
y.add("hi".to_string());
}<|fim▁end|> | #![feature(managed_boxes)]
// This is a regression test that the metadata for the
// name_pool::methods impl in the other crate is reachable from this |
<|file_name|>nexus_db_v2.py<|end_file_name|><|fim▁begin|># vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012, Cisco Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# @author: Rohit Agarwalla, Cisco Systems, Inc.
# @author: Arvind Somya, Cisco Systems, Inc. ([email protected])
#
from sqlalchemy.orm import exc
import quantum.db.api as db
from quantum.openstack.common import log as logging
from quantum.plugins.cisco.common import cisco_exceptions as c_exc
from quantum.plugins.cisco.db import nexus_models_v2
LOG = logging.getLogger(__name__)
def initialize():
"""Establish database connection and load models"""
db.configure_db()
def get_all_nexusport_bindings():
"""Lists all the nexusport bindings"""
LOG.debug(_("get_all_nexusport_bindings() called"))
session = db.get_session()
try:
bindings = session.query(nexus_models_v2.NexusPortBinding).all()
return bindings
except exc.NoResultFound:
return []
def get_nexusport_binding(port_id, vlan_id, switch_ip, instance_id):
"""Lists a nexusport binding"""
LOG.debug(_("get_nexusport_binding() called"))
session = db.get_session()
try:
binding = (session.query(nexus_models_v2.NexusPortBinding).
filter_by(vlan_id=vlan_id).filter_by(switch_ip=switch_ip).
filter_by(port_id=port_id).
filter_by(instance_id=instance_id).all())
return binding
except exc.NoResultFound:
raise c_exc.NexusPortBindingNotFound(vlan_id=vlan_id)
def get_nexusvlan_binding(vlan_id, switch_ip):
"""Lists a vlan and switch binding"""
LOG.debug(_("get_nexusvlan_binding() called"))
session = db.get_session()
try:
binding = (session.query(nexus_models_v2.NexusPortBinding).
filter_by(vlan_id=vlan_id).filter_by(switch_ip=switch_ip).
all())
return binding
except exc.NoResultFound:
raise c_exc.NexusPortBindingNotFound(vlan_id=vlan_id)
def add_nexusport_binding(port_id, vlan_id, switch_ip, instance_id):
"""Adds a nexusport binding"""
LOG.debug(_("add_nexusport_binding() called"))
session = db.get_session()
binding = nexus_models_v2.NexusPortBinding(
port_id, vlan_id, switch_ip, instance_id)<|fim▁hole|> session.add(binding)
session.flush()
return binding
def remove_nexusport_binding(port_id, vlan_id, switch_ip, instance_id):
"""Removes a nexusport binding"""
LOG.debug(_("remove_nexusport_binding() called"))
session = db.get_session()
try:
binding = (session.query(nexus_models_v2.NexusPortBinding).
filter_by(vlan_id=vlan_id).filter_by(switch_ip=switch_ip).
filter_by(port_id=port_id).
filter_by(instance_id=instance_id).all())
for bind in binding:
session.delete(bind)
session.flush()
return binding
except exc.NoResultFound:
pass
def update_nexusport_binding(port_id, new_vlan_id):
"""Updates nexusport binding"""
LOG.debug(_("update_nexusport_binding called"))
session = db.get_session()
try:
binding = (session.query(nexus_models_v2.NexusPortBinding).
filter_by(port_id=port_id).one())
if new_vlan_id:
binding["vlan_id"] = new_vlan_id
session.merge(binding)
session.flush()
return binding
except exc.NoResultFound:
raise c_exc.NexusPortBindingNotFound()
def get_nexusvm_binding(vlan_id, instance_id):
"""Lists nexusvm bindings"""
LOG.debug(_("get_nexusvm_binding() called"))
session = db.get_session()
try:
binding = (session.query(nexus_models_v2.NexusPortBinding).
filter_by(instance_id=instance_id).
filter_by(vlan_id=vlan_id).first())
return binding
except exc.NoResultFound:
raise c_exc.NexusPortBindingNotFound(vlan_id=vlan_id)
def get_port_vlan_switch_binding(port_id, vlan_id, switch_ip):
"""Lists nexusvm bindings"""
LOG.debug(_("get_port_vlan_switch_binding() called"))
session = db.get_session()
try:
binding = (session.query(nexus_models_v2.NexusPortBinding).
filter_by(port_id=port_id).filter_by(switch_ip=switch_ip).
filter_by(vlan_id=vlan_id).all())
return binding
except exc.NoResultFound:
raise c_exc.NexusPortBindingNotFound(vlan_id=vlan_id)<|fim▁end|> | |
<|file_name|>settings.py<|end_file_name|><|fim▁begin|>"""
Django settings for pybr11_tutorial project.
Generated by 'django-admin startproject' using Django 1.8.6.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.8/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
<|fim▁hole|># Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '4b5prz%55i#ay!qf=7w=61p^am-4a_jknjf8&jzu1d6ib@-*d^'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'pybr11_tutorial',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.security.SecurityMiddleware',
)
ROOT_URLCONF = 'pybr11_tutorial.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'pybr11_tutorial.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.8/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.8/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.8/howto/static-files/
STATIC_URL = '/static/'<|fim▁end|> | BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
<|file_name|>PasswordRecoveryService.java<|end_file_name|><|fim▁begin|>package ru.ncedu.ecomm.servlets.services.passwordRecovery;
import ru.ncedu.ecomm.data.models.dao.UserDAOObject;
import java.util.ArrayList;
import java.util.List;
import java.util.Random;
import static ru.ncedu.ecomm.data.DAOFactory.getDAOFactory;
public class PasswordRecoveryService {
private final static String ERROR_FOUND_EMAIL = "ErrorEmailNotFound";
private final static String SUCCESS_SEND = "SuccessSend";
private final static String ERROR_SEND = "ErrorSend";
private final static int MAX_HASH = 10;
private final static int MAX_NUMBER = 9;
private PasswordRecoveryService() {
}
private static PasswordRecoveryService instance;
public static synchronized PasswordRecoveryService getInstance() {
if (instance == null) {
instance = new PasswordRecoveryService();
}
return instance;
}
public String sendMailToUser(String toEmail, String contextPath) {
UserDAOObject userByEmail = getDAOFactory().getUserDAO().getUserByEmail(toEmail);
if (userByEmail == null)
return ERROR_FOUND_EMAIL;
userByEmail.setRecoveryHash(getRecoveryHash());
return sendMailToUser(userByEmail, contextPath);<|fim▁hole|>
private String getRecoveryHash() {
String recoveryHash;
UserDAOObject userByHash;
do {
recoveryHash = generateRecoveryHash();
userByHash = getDAOFactory().getUserDAO().getUserByRecoveryHash(recoveryHash);
} while (userByHash != null);
return recoveryHash;
}
private String generateRecoveryHash() {
List<Integer> uniqueHashCollection = new ArrayList<>();
addHashToCollection(uniqueHashCollection);
return getHashFromCollection(uniqueHashCollection);
}
private void addHashToCollection(List<Integer> uniqueHash) {
Random random = new Random();
while (uniqueHash.size() < MAX_HASH) {
uniqueHash.add(random.nextInt(MAX_NUMBER));
}
}
private String getHashFromCollection(List<Integer> uniqueHashCollection) {
StringBuilder recoveryHash = new StringBuilder(MAX_HASH);
for (int hash : uniqueHashCollection) {
recoveryHash.append(hash);
}
return recoveryHash.toString();
}
private String sendMailToUser(UserDAOObject user, String contextPath) {
String textHTML = getTextHtml(user.getEmail(), user.getRecoveryHash(), contextPath);
getDAOFactory().getUserDAO().updateUser(user);
return SendMailService.getInstance().isSentLetterToEmail(user.getEmail(), textHTML) ?
SUCCESS_SEND
: ERROR_SEND;
}
private String getTextHtml(String toEmail, String recoveryHash, String contextPath) {
return "<p>Please change your password in here:</p>" +
"<a href='" + contextPath + "/passwordChange?email="
+ toEmail + "&recoveryHash=" + recoveryHash + "'>Change Password</a>";
}
}<|fim▁end|> | }
|
<|file_name|>select-harness.spec.ts<|end_file_name|><|fim▁begin|>import {MatSelectModule} from '@angular/material/select';
import {MatSelectHarness} from './select-harness';
import {runHarnessTests} from './shared.spec';
<|fim▁hole|><|fim▁end|> | describe('Non-MDC-based MatSelectHarness', () => {
runHarnessTests(MatSelectModule, MatSelectHarness);
}); |
<|file_name|>conf.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# BbQuick documentation build configuration file, created by
# sphinx-quickstart on Fri Feb 10 20:55:10 2012.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = []
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'BbQuick'
copyright = u'2012, Ian A Wilson'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '1.0'
# The full version, including alpha/beta/rc tags.
release = '1.0a'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the<|fim▁hole|># documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'BbQuickdoc'
# -- Options for LaTeX output --------------------------------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'BbQuick.tex', u'BbQuick Documentation',
u'Ian A Wilson', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Additional stuff for the LaTeX preamble.
#latex_preamble = ''
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'bbquick', u'BbQuick Documentation',
[u'Ian A Wilson'], 1)
]<|fim▁end|> | |
<|file_name|>ConfPeriodo.java<|end_file_name|><|fim▁begin|>/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package pe.edu.upeu.modelo;
import java.io.Serializable;
import java.util.Collection;
import java.util.Date;
import javax.persistence.Basic;
import javax.persistence.CascadeType;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.JoinColumn;
import javax.persistence.ManyToOne;
import javax.persistence.NamedQueries;
import javax.persistence.NamedQuery;
import javax.persistence.OneToMany;
import javax.persistence.Table;
import javax.persistence.Temporal;
import javax.persistence.TemporalType;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlTransient;
/**
*
* @author hp
*/
@Entity
@Table(name = "conf_periodo")
@XmlRootElement
@NamedQueries({
@NamedQuery(name = "ConfPeriodo.findAll", query = "SELECT c FROM ConfPeriodo c")})
public class ConfPeriodo implements Serializable {
private static final long serialVersionUID = 1L;
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
@Basic(optional = false)
@Column(name = "id_periodo")
private Integer idPeriodo;
@Basic(optional = false)
@Column(name = "periodo")
private String periodo;
@Basic(optional = false)
@Column(name = "descripcion")
private String descripcion;
@Basic(optional = false)
@Column(name = "fecha_inicio")
@Temporal(TemporalType.DATE)
private Date fechaInicio;
@Basic(optional = false)
@Column(name = "fecha_fin")
@Temporal(TemporalType.DATE)
private Date fechaFin;
@Basic(optional = false)
@Column(name = "estado")
private String estado;
@OneToMany(cascade = CascadeType.ALL, mappedBy = "idPeriodo")
private Collection<GloAreaEje> gloAreaEjeCollection;
@OneToMany(cascade = CascadeType.ALL, mappedBy = "idPeriodo")
private Collection<GloEstadoArea> gloEstadoAreaCollection;
@OneToMany(cascade = CascadeType.ALL, mappedBy = "idPeriodo")
private Collection<GloEstadoDepartamento> gloEstadoDepartamentoCollection;
@OneToMany(cascade = CascadeType.ALL, mappedBy = "idPeriodo")
private Collection<GloEstadoFilial> gloEstadoFilialCollection;
@OneToMany(cascade = CascadeType.ALL, mappedBy = "idPeriodo")
private Collection<GloDepartCoordinador> gloDepartCoordinadorCollection;
@OneToMany(cascade = CascadeType.ALL, mappedBy = "idPeriodo")
private Collection<GloMeta> gloMetaCollection;
@OneToMany(cascade = CascadeType.ALL, mappedBy = "idPeriodo")
private Collection<GloDepartareaCoordinador> gloDepartareaCoordinadorCollection;
@OneToMany(cascade = CascadeType.ALL, mappedBy = "idPeriodo")
private Collection<FinPartidapresupuestaria> finPartidapresupuestariaCollection;
@JoinColumn(name = "id_temporada", referencedColumnName = "id_temporada")
@ManyToOne(optional = false)
private ConfTemporada idTemporada;
public ConfPeriodo() {
}
public ConfPeriodo(Integer idPeriodo) {
this.idPeriodo = idPeriodo;
}
public ConfPeriodo(Integer idPeriodo, String periodo, String descripcion, Date fechaInicio, Date fechaFin, String estado) {
this.idPeriodo = idPeriodo;
this.periodo = periodo;
this.descripcion = descripcion;
this.fechaInicio = fechaInicio;
this.fechaFin = fechaFin;
this.estado = estado;
}
public Integer getIdPeriodo() {
return idPeriodo;
}
public void setIdPeriodo(Integer idPeriodo) {
this.idPeriodo = idPeriodo;
}
public String getPeriodo() {
return periodo;
}
public void setPeriodo(String periodo) {
this.periodo = periodo;
}
public String getDescripcion() {
return descripcion;
}
public void setDescripcion(String descripcion) {
this.descripcion = descripcion;
}
public Date getFechaInicio() {
return fechaInicio;
}
public void setFechaInicio(Date fechaInicio) {
this.fechaInicio = fechaInicio;
}
public Date getFechaFin() {
return fechaFin;
}
public void setFechaFin(Date fechaFin) {
this.fechaFin = fechaFin;
}
public String getEstado() {
return estado;
}
public void setEstado(String estado) {
this.estado = estado;
}
@XmlTransient
public Collection<GloAreaEje> getGloAreaEjeCollection() {
return gloAreaEjeCollection;
}
public void setGloAreaEjeCollection(Collection<GloAreaEje> gloAreaEjeCollection) {
this.gloAreaEjeCollection = gloAreaEjeCollection;
}
@XmlTransient
public Collection<GloEstadoArea> getGloEstadoAreaCollection() {
return gloEstadoAreaCollection;
}
public void setGloEstadoAreaCollection(Collection<GloEstadoArea> gloEstadoAreaCollection) {
this.gloEstadoAreaCollection = gloEstadoAreaCollection;
}
@XmlTransient
public Collection<GloEstadoDepartamento> getGloEstadoDepartamentoCollection() {
return gloEstadoDepartamentoCollection;
}
public void setGloEstadoDepartamentoCollection(Collection<GloEstadoDepartamento> gloEstadoDepartamentoCollection) {
this.gloEstadoDepartamentoCollection = gloEstadoDepartamentoCollection;
}
@XmlTransient
public Collection<GloEstadoFilial> getGloEstadoFilialCollection() {
return gloEstadoFilialCollection;
}
public void setGloEstadoFilialCollection(Collection<GloEstadoFilial> gloEstadoFilialCollection) {
this.gloEstadoFilialCollection = gloEstadoFilialCollection;
}
@XmlTransient
public Collection<GloDepartCoordinador> getGloDepartCoordinadorCollection() {
return gloDepartCoordinadorCollection;
}
public void setGloDepartCoordinadorCollection(Collection<GloDepartCoordinador> gloDepartCoordinadorCollection) {
this.gloDepartCoordinadorCollection = gloDepartCoordinadorCollection;
}
@XmlTransient
public Collection<GloMeta> getGloMetaCollection() {
return gloMetaCollection;
}
public void setGloMetaCollection(Collection<GloMeta> gloMetaCollection) {
<|fim▁hole|> }
@XmlTransient
public Collection<GloDepartareaCoordinador> getGloDepartareaCoordinadorCollection() {
return gloDepartareaCoordinadorCollection;
}
public void setGloDepartareaCoordinadorCollection(Collection<GloDepartareaCoordinador> gloDepartareaCoordinadorCollection) {
this.gloDepartareaCoordinadorCollection = gloDepartareaCoordinadorCollection;
}
@XmlTransient
public Collection<FinPartidapresupuestaria> getFinPartidapresupuestariaCollection() {
return finPartidapresupuestariaCollection;
}
public void setFinPartidapresupuestariaCollection(Collection<FinPartidapresupuestaria> finPartidapresupuestariaCollection) {
this.finPartidapresupuestariaCollection = finPartidapresupuestariaCollection;
}
public ConfTemporada getIdTemporada() {
return idTemporada;
}
public void setIdTemporada(ConfTemporada idTemporada) {
this.idTemporada = idTemporada;
}
@Override
public int hashCode() {
int hash = 0;
hash += (idPeriodo != null ? idPeriodo.hashCode() : 0);
return hash;
}
@Override
public boolean equals(Object object) {
// TODO: Warning - this method won't work in the case the id fields are not set
if (!(object instanceof ConfPeriodo)) {
return false;
}
ConfPeriodo other = (ConfPeriodo) object;
if ((this.idPeriodo == null && other.idPeriodo != null) || (this.idPeriodo != null && !this.idPeriodo.equals(other.idPeriodo))) {
return false;
}
return true;
}
@Override
public String toString() {
return "pe.edu.upeu.modelo.ConfPeriodo[ idPeriodo=" + idPeriodo + " ]";
}
}<|fim▁end|> | this.gloMetaCollection = gloMetaCollection;
|
<|file_name|>cmd_away.py<|end_file_name|><|fim▁begin|>from twisted.plugin import IPlugin
from twisted.words.protocols import irc
from txircd.config import ConfigValidationError
from txircd.module_interface import Command, ICommand, IModuleData, ModuleData
from txircd.utils import trimStringToByteLength
from zope.interface import implementer
from typing import Any, Callable, Dict, List, Optional, Tuple, Union
@implementer(IPlugin, IModuleData, ICommand)
class AwayCommand(ModuleData, Command):
name = "AwayCommand"
core = True
def userCommands(self) -> List[Tuple[str, int, Command]]:
return [ ("AWAY", 1, self) ]
def actions(self) -> List[Tuple[str, int, Callable]]:
return [ ("commandextra-PRIVMSG", 10, self.notifyAway),
("commandextra-NOTICE", 10, self.notifyAway),
("extrawhois", 10, self.addWhois),
("buildisupport", 1, self.buildISupport),
("usermetadataupdate", 10, self.sendAwayNotice) ]
def verifyConfig(self, config: Dict[str, Any]) -> None:
if "away_length" in config:
if not isinstance(config["away_length"], int) or config["away_length"] < 0:
raise ConfigValidationError("away_length", "invalid number")
elif config["away_length"] > 200:
config["away_length"] = 200
self.ircd.logConfigValidationWarning("away_length", "value is too large", 200)
def notifyAway(self, user: "IRCUser", data: Dict[Any, Any]) -> None:
if "targetusers" not in data:
return
for u in data["targetusers"].keys():
if u.metadataKeyExists("away"):
user.sendMessage(irc.RPL_AWAY, u.nick, u.metadataValue("away"))
def addWhois(self, user: "IRCUser", targetUser: "IRCUser") -> None:
if targetUser.metadataKeyExists("away"):
user.sendMessage(irc.RPL_AWAY, targetUser.nick, targetUser.metadataValue("away"))
def buildISupport(self, data: Dict[str, Union[str, int]]) -> None:
data["AWAYLEN"] = self.ircd.config.get("away_length", 200)
def sendAwayNotice(self, user: "IRCUser", key: str, oldValue: str, value: str, fromServer: Optional["IRCServer"]) -> None:
if key == "away":
if value:
user.sendMessage(irc.RPL_NOWAWAY, "You have been marked as being away")<|fim▁hole|>
def parseParams(self, user: "IRCUser", params: List[str], prefix: str, tags: Dict[str, Optional[str]]) -> Optional[Dict[Any, Any]]:
if not params:
return {}
message = " ".join(params)
message = trimStringToByteLength(message, self.ircd.config.get("away_length", 200))
return {
"message": message
}
def execute(self, user: "IRCUser", data: Dict[Any, Any]) -> bool:
if "message" in data and data["message"]:
user.setMetadata("away", data["message"])
else:
user.setMetadata("away", None)
return True
awayCommand = AwayCommand()<|fim▁end|> | else:
user.sendMessage(irc.RPL_UNAWAY, "You are no longer marked as being away") |
<|file_name|>IAS.MachineAccountValidation.js<|end_file_name|><|fim▁begin|>class ias_machineaccountvalidation {
constructor() {
}
// System.Runtime.Remoting.ObjRef CreateObjRef(type requestedType)
CreateObjRef() {
}
// bool Equals(System.Object obj)
Equals() {
}
// int GetHashCode()
GetHashCode() {
}
// System.Object GetLifetimeService()
GetLifetimeService() {
}
// type GetType()
GetType() {
}
// System.Object InitializeLifetimeService()
InitializeLifetimeService() {
}
<|fim▁hole|> ToString() {
}
}
module.exports = ias_machineaccountvalidation;<|fim▁end|> | // string ToString() |
<|file_name|>instr_vextractf64x4.rs<|end_file_name|><|fim▁begin|>use ::{BroadcastMode, Instruction, MaskReg, MergeMode, Mnemonic, OperandSize, Reg, RoundingMode};
use ::RegType::*;
use ::instruction_def::*;
use ::Operand::*;
use ::Reg::*;
use ::RegScale::*;
use ::test::run_test;
#[test]
fn vextractf64x4_1() {
run_test(&Instruction { mnemonic: Mnemonic::VEXTRACTF64x4, operand1: Some(Direct(YMM2)), operand2: Some(Direct(ZMM7)), operand3: Some(Literal8(77)), operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K4), broadcast: None }, &[98, 243, 253, 204, 27, 250, 77], OperandSize::Dword)
}
#[test]
fn vextractf64x4_2() {
run_test(&Instruction { mnemonic: Mnemonic::VEXTRACTF64x4, operand1: Some(Indirect(EBX, Some(OperandSize::Ymmword), None)), operand2: Some(Direct(ZMM1)), operand3: Some(Literal8(80)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[98, 243, 253, 72, 27, 11, 80], OperandSize::Dword)
}
#[test]
fn vextractf64x4_3() {
run_test(&Instruction { mnemonic: Mnemonic::VEXTRACTF64x4, operand1: Some(Direct(YMM23)), operand2: Some(Direct(ZMM11)), operand3: Some(Literal8(52)), operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K6), broadcast: None }, &[98, 51, 253, 206, 27, 223, 52], OperandSize::Qword)
}
#[test]
fn vextractf64x4_4() {<|fim▁hole|><|fim▁end|> | run_test(&Instruction { mnemonic: Mnemonic::VEXTRACTF64x4, operand1: Some(IndirectDisplaced(RAX, 1609800228, Some(OperandSize::Ymmword), None)), operand2: Some(Direct(ZMM10)), operand3: Some(Literal8(118)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[98, 115, 253, 72, 27, 144, 36, 154, 243, 95, 118], OperandSize::Qword)
} |
<|file_name|>hejbejse.py<|end_file_name|><|fim▁begin|># -*- coding: UTF-8 -*-
#/*
# * Copyright (C) 2011 Ivo Brhel
# *
# *
# * This Program is free software; you can redistribute it and/or modify
# * it under the terms of the GNU General Public License as published by
# * the Free Software Foundation; either version 2, or (at your option)
# * any later version.
# *
# * This Program is distributed in the hope that it will be useful,
# * but WITHOUT ANY WARRANTY; without even the implied warranty of
# * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# * GNU General Public License for more details.
# *
# * You should have received a copy of the GNU General Public License
# * along with this program; see the file COPYING. If not, write to
# * the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA.
# * http://www.gnu.org/copyleft/gpl.html
# *
# */
import re,os,urllib,urllib2,cookielib
import util,resolver
from provider import ContentProvider
class HejbejseContentProvider(ContentProvider):
def __init__(self,username=None,password=None,filter=None):
ContentProvider.__init__(self,'hejbejse.tv','http://www.kynychova-tv.cz/',username,password,filter)
opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cookielib.LWPCookieJar()))
urllib2.install_opener(opener)
def capabilities(self):
return ['resolve','categories','list']
def categories(self):
page = util.parse_html('http://www.kynychova-tv.cz/index.php?id=5')
result = []
for title,uri in [(x.h3.text,x.h3.a['href']) for x in page.select('div.entry5') if x.h3]:
item = self.dir_item()
item['title'] = title
item['url'] = uri
result.append(item)
return result
def list(self, url):<|fim▁hole|> url = self._url(url)
page = util.parse_html(url)
result = []
for title,uri in [(x.img['title'],x['href']) for x in page.select('div.entry3')[0].findAll('a')]:
item = self.video_item()
item['title'] = title
item['url'] = uri
result.append(item)
return result
def resolve(self,item,captcha_cb=None,select_cb=None):
item = item.copy()
url = self._url(item['url'])
page = util.parse_html(url)
result = []
data=str(page.select('div.entry3 > center')[0])
resolved = resolver.findstreams(data,['<iframe(.+?)src=[\"\'](?P<url>.+?)[\'\"]'])
try:
for i in resolved:
item = self.video_item()
item['title'] = i['name']
item['url'] = i['url']
item['quality'] = i['quality']
item['surl'] = i['surl']
result.append(item)
except:
print '===Unknown resolver==='
if len(result)==1:
return result[0]
elif len(result) > 1 and select_cb:
return select_cb(result)<|fim▁end|> | |
<|file_name|>pylexotron.py<|end_file_name|><|fim▁begin|># Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import re
from .saferscanner import SaferScanner
class LexingError(Exception):
@classmethod
def from_text(cls, rulestr, unmatched, msg='Lexing error'):
bad_char = len(rulestr) - len(unmatched)
linenum = rulestr[:bad_char].count('\n') + 1
charnum = len(rulestr[:bad_char].rsplit('\n', 1)[-1]) + 1
snippet_start = max(0, min(len(rulestr), bad_char - 10))
snippet_end = max(0, min(len(rulestr), bad_char + 10))
msg += " (Error at: '...%s...')" % (rulestr[snippet_start:snippet_end],)
raise cls(linenum, charnum, msg)
def __init__(self, linenum, charnum, msg='Lexing error'):
self.linenum = linenum
self.charnum = charnum
self.msg = msg
self.args = (linenum, charnum, msg)
def __str__(self):
return '%s at line %d, char %d' % (self.msg, self.linenum, self.charnum)
class Hint:
def __init__(self, text):
self.text = text
def __hash__(self):
return hash((id(self.__class__), self.text))
def __eq__(self, other):
return isinstance(other, self.__class__) and other.text == self.text
def __repr__(self):
return '%s(%r)' % (self.__class__, self.text)
def is_hint(x):
return isinstance(x, Hint)
class ParseContext:
"""
These are meant to be immutable, although it would be something of a
pain to enforce that in python.
"""
def __init__(self, ruleset, bindings, matched, remainder, productionname):
self.ruleset = ruleset
self.bindings = bindings
self.matched = matched
self.remainder = remainder
self.productionname = productionname
def get_production_by_name(self, name):
return self.ruleset[name]
def get_completer(self, symname):
return self.ruleset[(self.productionname, symname)]
def get_binding(self, name, default=None):
return self.bindings.get(name, default)
def with_binding(self, name, val):
newbinds = self.bindings.copy()
newbinds[name] = val
return self.__class__(self.ruleset, newbinds, self.matched,
self.remainder, self.productionname)
def with_match(self, num):
return self.__class__(self.ruleset, self.bindings,
self.matched + self.remainder[:num],
self.remainder[num:], self.productionname)
def with_production_named(self, newname):
return self.__class__(self.ruleset, self.bindings, self.matched,
self.remainder, newname)
def extract_orig(self, tokens=None):
if tokens is None:
tokens = self.matched
if not tokens:
return ''
orig = self.bindings.get('*SRC*', None)
if orig is None:
# pretty much just guess
return ' '.join([t[1] for t in tokens])
# low end of span for first token, to high end of span for last token
orig_text = orig[tokens[0][2][0]:tokens[-1][2][1]]
# Convert all unicode tokens to ascii, where possible. This
# helps avoid problems with performing unicode-incompatible
# operations on tokens (like .lower()). See CASSANDRA-9083
# for one example of this.
try:
orig_text = orig_text.encode('ascii')
except UnicodeEncodeError:
pass
return orig_text
def __repr__(self):
return '<%s matched=%r remainder=%r prodname=%r bindings=%r>' \
% (self.__class__.__name__, self.matched, self.remainder, self.productionname, self.bindings)
class matcher:
def __init__(self, arg):
self.arg = arg
def match(self, ctxt, completions):
raise NotImplementedError
def match_with_results(self, ctxt, completions):
matched_before = len(ctxt.matched)
newctxts = self.match(ctxt, completions)
return [(newctxt, newctxt.matched[matched_before:]) for newctxt in newctxts]
@staticmethod
def try_registered_completion(ctxt, symname, completions):
debugging = ctxt.get_binding('*DEBUG*', False)
if ctxt.remainder or completions is None:
return False
try:
completer = ctxt.get_completer(symname)
except KeyError:
return False
if debugging:
print "Trying completer %r with %r" % (completer, ctxt)
try:
new_compls = completer(ctxt)
except Exception:
if debugging:
import traceback
traceback.print_exc()
return False
if debugging:
print "got %r" % (new_compls,)
completions.update(new_compls)
return True
def __repr__(self):
return '%s(%r)' % (self.__class__.__name__, self.arg)
class choice(matcher):
def match(self, ctxt, completions):
foundctxts = []
for a in self.arg:
subctxts = a.match(ctxt, completions)
foundctxts.extend(subctxts)
return foundctxts
class one_or_none(matcher):
def match(self, ctxt, completions):
return [ctxt] + list(self.arg.match(ctxt, completions))
class repeat(matcher):
def match(self, ctxt, completions):
found = [ctxt]
ctxts = [ctxt]
while True:
new_ctxts = []
for c in ctxts:
new_ctxts.extend(self.arg.match(c, completions))
if not new_ctxts:
return found
found.extend(new_ctxts)
ctxts = new_ctxts
class rule_reference(matcher):
def match(self, ctxt, completions):
prevname = ctxt.productionname
try:
rule = ctxt.get_production_by_name(self.arg)
except KeyError:
raise ValueError("Can't look up production rule named %r" % (self.arg,))
output = rule.match(ctxt.with_production_named(self.arg), completions)
return [c.with_production_named(prevname) for c in output]
class rule_series(matcher):
def match(self, ctxt, completions):
ctxts = [ctxt]
for patpiece in self.arg:
new_ctxts = []
for c in ctxts:
new_ctxts.extend(patpiece.match(c, completions))
if not new_ctxts:
return ()
ctxts = new_ctxts
return ctxts
class named_symbol(matcher):
def __init__(self, name, arg):
matcher.__init__(self, arg)
self.name = name
def match(self, ctxt, completions):
pass_in_compls = completions
if self.try_registered_completion(ctxt, self.name, completions):
# don't collect other completions under this; use a dummy
pass_in_compls = set()
results = self.arg.match_with_results(ctxt, pass_in_compls)
return [c.with_binding(self.name, ctxt.extract_orig(matchtoks)) for (c, matchtoks) in results]
def __repr__(self):
return '%s(%r, %r)' % (self.__class__.__name__, self.name, self.arg)
class named_collector(named_symbol):
def match(self, ctxt, completions):
pass_in_compls = completions
if self.try_registered_completion(ctxt, self.name, completions):
# don't collect other completions under this; use a dummy
pass_in_compls = set()
output = []
for ctxt, matchtoks in self.arg.match_with_results(ctxt, pass_in_compls):
oldval = ctxt.get_binding(self.name, ())
output.append(ctxt.with_binding(self.name, oldval + (ctxt.extract_orig(matchtoks),)))
return output
class terminal_matcher(matcher):
def pattern(self):
raise NotImplementedError
class regex_rule(terminal_matcher):
def __init__(self, pat):
terminal_matcher.__init__(self, pat)
self.regex = pat
self.re = re.compile(pat + '$', re.I | re.S)
def match(self, ctxt, completions):
if ctxt.remainder:
if self.re.match(ctxt.remainder[0][1]):
return [ctxt.with_match(1)]
elif completions is not None:
completions.add(Hint('<%s>' % ctxt.productionname))
return []
def pattern(self):
return self.regex
class text_match(terminal_matcher):
alpha_re = re.compile(r'[a-zA-Z]')
def __init__(self, text):
try:
terminal_matcher.__init__(self, eval(text))
except SyntaxError:
print "bad syntax %r" % (text,)
def match(self, ctxt, completions):
if ctxt.remainder:
if self.arg.lower() == ctxt.remainder[0][1].lower():
return [ctxt.with_match(1)]
elif completions is not None:
completions.add(self.arg)
return []
def pattern(self):
# can't use (?i) here- Scanner component regex flags won't be applied
def ignorecaseify(matchobj):
c = matchobj.group(0)
return '[%s%s]' % (c.upper(), c.lower())
return self.alpha_re.sub(ignorecaseify, re.escape(self.arg))
class case_match(text_match):
def match(self, ctxt, completions):
if ctxt.remainder:
if self.arg == ctxt.remainder[0][1]:
return [ctxt.with_match(1)]
elif completions is not None:
completions.add(self.arg)
return []
def pattern(self):
return re.escape(self.arg)
class word_match(text_match):
def pattern(self):
return r'\b' + text_match.pattern(self) + r'\b'
class case_word_match(case_match):
def pattern(self):
return r'\b' + case_match.pattern(self) + r'\b'
class terminal_type_matcher(matcher):
def __init__(self, tokentype, submatcher):
matcher.__init__(self, tokentype)
self.tokentype = tokentype
self.submatcher = submatcher
def match(self, ctxt, completions):
if ctxt.remainder:
if ctxt.remainder[0][0] == self.tokentype:
return [ctxt.with_match(1)]
elif completions is not None:
self.submatcher.match(ctxt, completions)
return []
def __repr__(self):
return '%s(%r, %r)' % (self.__class__.__name__, self.tokentype, self.submatcher)
class ParsingRuleSet:
RuleSpecScanner = SaferScanner([
(r'::=', lambda s,t: t),
(r'\[[a-z0-9_]+\]=', lambda s,t: ('named_collector', t[1:-2])),
(r'[a-z0-9_]+=', lambda s,t: ('named_symbol', t[:-1])),
(r'/(\[\^?.[^]]*\]|[^/]|\\.)*/', lambda s,t: ('regex', t[1:-1].replace(r'\/', '/'))),
(r'"([^"]|\\.)*"', lambda s,t: ('litstring', t)),
(r'<[^>]*>', lambda s,t: ('reference', t[1:-1])),
(r'\bJUNK\b', lambda s,t: ('junk', t)),
(r'[@()|?*;]', lambda s,t: t),
(r'\s+', None),
(r'#[^\n]*', None),
], re.I | re.S)
def __init__(self):
self.ruleset = {}
self.scanner = None
self.terminals = []
@classmethod
def from_rule_defs(cls, rule_defs):
prs = cls()
prs.ruleset, prs.terminals = cls.parse_rules(rule_defs)
return prs
@classmethod
def parse_rules(cls, rulestr):
tokens, unmatched = cls.RuleSpecScanner.scan(rulestr)
if unmatched:
raise LexingError.from_text(rulestr, unmatched, msg="Syntax rules unparseable")
rules = {}<|fim▁hole|> for t in tokeniter:
if isinstance(t, tuple) and t[0] in ('reference', 'junk'):
assign = tokeniter.next()
if assign != '::=':
raise ValueError('Unexpected token %r; expected "::="' % (assign,))
name = t[1]
production = cls.read_rule_tokens_until(';', tokeniter)
if isinstance(production, terminal_matcher):
terminals.append((name, production))
production = terminal_type_matcher(name, production)
rules[name] = production
else:
raise ValueError('Unexpected token %r; expected name' % (t,))
return rules, terminals
@staticmethod
def mkrule(pieces):
if isinstance(pieces, (tuple, list)):
if len(pieces) == 1:
return pieces[0]
return rule_series(pieces)
return pieces
@classmethod
def read_rule_tokens_until(cls, endtoks, tokeniter):
if isinstance(endtoks, basestring):
endtoks = (endtoks,)
counttarget = None
if isinstance(endtoks, int):
counttarget = endtoks
endtoks = ()
countsofar = 0
myrules = []
mybranches = [myrules]
for t in tokeniter:
countsofar += 1
if t in endtoks:
if len(mybranches) == 1:
return cls.mkrule(mybranches[0])
return choice(map(cls.mkrule, mybranches))
if isinstance(t, tuple):
if t[0] == 'reference':
t = rule_reference(t[1])
elif t[0] == 'litstring':
if t[1][1].isalnum() or t[1][1] == '_':
t = word_match(t[1])
else:
t = text_match(t[1])
elif t[0] == 'regex':
t = regex_rule(t[1])
elif t[0] == 'named_collector':
t = named_collector(t[1], cls.read_rule_tokens_until(1, tokeniter))
elif t[0] == 'named_symbol':
t = named_symbol(t[1], cls.read_rule_tokens_until(1, tokeniter))
elif t == '(':
t = cls.read_rule_tokens_until(')', tokeniter)
elif t == '?':
t = one_or_none(myrules.pop(-1))
elif t == '*':
t = repeat(myrules.pop(-1))
elif t == '@':
x = tokeniter.next()
if not isinstance(x, tuple) or x[0] != 'litstring':
raise ValueError("Unexpected token %r following '@'" % (x,))
t = case_match(x[1])
elif t == '|':
myrules = []
mybranches.append(myrules)
continue
else:
raise ValueError('Unparseable rule token %r after %r' % (t, myrules[-1]))
myrules.append(t)
if countsofar == counttarget:
if len(mybranches) == 1:
return cls.mkrule(mybranches[0])
return choice(map(cls.mkrule, mybranches))
raise ValueError('Unexpected end of rule tokens')
def append_rules(self, rulestr):
rules, terminals = self.parse_rules(rulestr)
self.ruleset.update(rules)
self.terminals.extend(terminals)
if terminals:
self.scanner = None # recreate it if/when necessary
def register_completer(self, func, rulename, symname):
self.ruleset[(rulename, symname)] = func
def make_lexer(self):
def make_handler(name):
if name == 'JUNK':
return None
return lambda s, t: (name, t, s.match.span())
regexes = [(p.pattern(), make_handler(name)) for (name, p) in self.terminals]
return SaferScanner(regexes, re.I | re.S).scan
def lex(self, text):
if self.scanner is None:
self.scanner = self.make_lexer()
tokens, unmatched = self.scanner(text)
if unmatched:
raise LexingError.from_text(text, unmatched, 'text could not be lexed')
return tokens
def parse(self, startsymbol, tokens, init_bindings=None):
if init_bindings is None:
init_bindings = {}
ctxt = ParseContext(self.ruleset, init_bindings, (), tuple(tokens), startsymbol)
pattern = self.ruleset[startsymbol]
return pattern.match(ctxt, None)
def whole_match(self, startsymbol, tokens, srcstr=None):
bindings = {}
if srcstr is not None:
bindings['*SRC*'] = srcstr
for c in self.parse(startsymbol, tokens, init_bindings=bindings):
if not c.remainder:
return c
def lex_and_parse(self, text, startsymbol='Start'):
return self.parse(startsymbol, self.lex(text), init_bindings={'*SRC*': text})
def lex_and_whole_match(self, text, startsymbol='Start'):
tokens = self.lex(text)
return self.whole_match(startsymbol, tokens, srcstr=text)
def complete(self, startsymbol, tokens, init_bindings=None):
if init_bindings is None:
init_bindings = {}
ctxt = ParseContext(self.ruleset, init_bindings, (), tuple(tokens), startsymbol)
pattern = self.ruleset[startsymbol]
if init_bindings.get('*DEBUG*', False):
completions = Debugotron(stream=sys.stderr)
else:
completions = set()
pattern.match(ctxt, completions)
return completions
import sys, traceback
class Debugotron(set):
depth = 10
def __init__(self, initializer=(), stream=sys.stdout):
set.__init__(self, initializer)
self.stream = stream
def add(self, item):
self._note_addition(item)
set.add(self, item)
def _note_addition(self, foo):
self.stream.write("\nitem %r added by:\n" % (foo,))
frame = sys._getframe().f_back.f_back
for i in range(self.depth):
name = frame.f_code.co_name
filename = frame.f_code.co_filename
lineno = frame.f_lineno
if 'self' in frame.f_locals:
clsobj = frame.f_locals['self']
line = '%s.%s() (%s:%d)' % (clsobj, name, filename, lineno)
else:
line = '%s (%s:%d)' % (name, filename, lineno)
self.stream.write(' - %s\n' % (line,))
if i == 0 and 'ctxt' in frame.f_locals:
self.stream.write(' - %s\n' % (frame.f_locals['ctxt'],))
frame = frame.f_back
def update(self, items):
if items:
self._note_addition(items)
set.update(self, items)<|fim▁end|> | terminals = []
tokeniter = iter(tokens) |
<|file_name|>wifi.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
from lazagne.config.module_info import ModuleInfo
try:
from ConfigParser import RawConfigParser # Python 2.7
except ImportError:
from configparser import RawConfigParser # Python 3
from collections import OrderedDict
class Wifi(ModuleInfo):
def __init__(self):
ModuleInfo.__init__(self, 'wifi', 'wifi')
def run(self):<|fim▁hole|> if os.path.exists(directory):
if os.getuid() == 0:
wireless_ssid = [f for f in os.listdir(directory) if os.path.isfile(os.path.join(directory, f))]
for w in wireless_ssid:
cp = RawConfigParser()
cp.read(os.path.join(directory, w))
values = OrderedDict()
try:
values['SSID'] = cp.get('wifi', 'ssid')
values['Password'] = cp.get('wifi-security', 'psk')
pwd_found.append(values)
except Exception:
pass
else:
self.info('You need sudo privileges')
return pwd_found<|fim▁end|> | pwd_found = []
directory = u'/etc/NetworkManager/system-connections'
|
<|file_name|>inputmask.date.extensions.js<|end_file_name|><|fim▁begin|>/*!
* inputmask.date.extensions.js
* https://github.com/RobinHerbots/Inputmask
* Copyright (c) 2010 - 2018 Robin Herbots
* Licensed under the MIT license (http://www.opensource.org/licenses/mit-license.php)
* Version: 4.0.1-beta.7
*/
!function(factory) {
"function" == typeof define && define.amd ? define([ "./dependencyLibs/inputmask.dependencyLib", "./inputmask" ], factory) : "object" == typeof exports ? module.exports = factory(require("./dependencyLibs/inputmask.dependencyLib"), require("./inputmask")) : factory(window.dependencyLib || jQuery, window.Inputmask);
}(function($, Inputmask) {
var formatCode = {
d: [ "[1-9]|[12][0-9]|3[01]", Date.prototype.setDate, "day", Date.prototype.getDate ],
dd: [ "0[1-9]|[12][0-9]|3[01]", Date.prototype.setDate, "day", function() {
return pad(Date.prototype.getDate.call(this), 2);
} ],
ddd: [ "" ],
dddd: [ "" ],
m: [ "[1-9]|1[012]", Date.prototype.setMonth, "month", function() {
return Date.prototype.getMonth.call(this) + 1;
} ],
mm: [ "0[1-9]|1[012]", Date.prototype.setMonth, "month", function() {
return pad(Date.prototype.getMonth.call(this) + 1, 2);
} ],
mmm: [ "" ],
mmmm: [ "" ],
yy: [ "[0-9]{2}", Date.prototype.setFullYear, "year", function() {
return pad(Date.prototype.getFullYear.call(this), 2);
} ],
yyyy: [ "[0-9]{4}", Date.prototype.setFullYear, "year", function() {
return pad(Date.prototype.getFullYear.call(this), 4);
} ],
h: [ "[1-9]|1[0-2]", Date.prototype.setHours, "hours", Date.prototype.getHours ],
hh: [ "0[1-9]|1[0-2]", Date.prototype.setHours, "hours", function() {
return pad(Date.prototype.getHours.call(this), 2);
} ],
hhh: [ "[0-9]+", Date.prototype.setHours, "hours", Date.prototype.getHours ],
H: [ "1?[0-9]|2[0-3]", Date.prototype.setHours, "hours", Date.prototype.getHours ],
HH: [ "[01][0-9]|2[0-3]", Date.prototype.setHours, "hours", function() {
return pad(Date.prototype.getHours.call(this), 2);
} ],
HHH: [ "[0-9]+", Date.prototype.setHours, "hours", Date.prototype.getHours ],
M: [ "[1-5]?[0-9]", Date.prototype.setMinutes, "minutes", Date.prototype.getMinutes ],
MM: [ "[0-5][0-9]", Date.prototype.setMinutes, "minutes", function() {
return pad(Date.prototype.getMinutes.call(this), 2);
} ],
s: [ "[1-5]?[0-9]", Date.prototype.setSeconds, "seconds", Date.prototype.getSeconds ],
ss: [ "[0-5][0-9]", Date.prototype.setSeconds, "seconds", function() {
return pad(Date.prototype.getSeconds.call(this), 2);
} ],
l: [ "[0-9]{3}", Date.prototype.setMilliseconds, "milliseconds", function() {
return pad(Date.prototype.getMilliseconds.call(this), 3);
} ],
L: [ "[0-9]{2}", Date.prototype.setMilliseconds, "milliseconds", function() {
return pad(Date.prototype.getMilliseconds.call(this), 2);
} ],
t: [ "[ap]" ],
tt: [ "[ap]m" ],
T: [ "[AP]" ],
TT: [ "[AP]M" ],
Z: [ "" ],
o: [ "" ],
S: [ "" ]
}, formatAlias = {
isoDate: "yyyy-mm-dd",
isoTime: "HH:MM:ss",
isoDateTime: "yyyy-mm-dd'T'HH:MM:ss",
isoUtcDateTime: "UTC:yyyy-mm-dd'T'HH:MM:ss'Z'"
};
function getTokenizer(opts) {
if (!opts.tokenizer) {
var tokens = [];
for (var ndx in formatCode) -1 === tokens.indexOf(ndx[0]) && tokens.push(ndx[0]);
opts.tokenizer = "(" + tokens.join("+|") + ")+?|.", opts.tokenizer = new RegExp(opts.tokenizer, "g");
}
return opts.tokenizer;
}
function parse(format, dateObjValue, opts) {
for (var match, mask = ""; match = getTokenizer(opts).exec(format); ) {
if (void 0 === dateObjValue) if (formatCode[match[0]]) mask += "(" + formatCode[match[0]][0] + ")"; else switch (match[0]) {
case "[":
mask += "(";
break;
case "]":
mask += ")?";
break;
default:
mask += Inputmask.escapeRegex(match[0]);
} else if (formatCode[match[0]]) mask += formatCode[match[0]][3].call(dateObjValue.date); else mask += match[0];
}
return mask;
}
function pad(val, len) {
for (val = String(val), len = len || 2; val.length < len; ) val = "0" + val;
return val;
}
function analyseMask(maskString, format, opts) {
var targetProp, match, dateOperation, targetValidator, dateObj = {
date: new Date(1, 0, 1)
}, mask = maskString;
function extendProperty(value) {
var correctedValue;
if (opts.min && opts.min[targetProp] || opts.max && opts.max[targetProp]) {
var min = opts.min && opts.min[targetProp] || opts.max[targetProp], max = opts.max && opts.max[targetProp] || opts.min[targetProp];
for (correctedValue = value.replace(/[^0-9]/g, ""), correctedValue += (min.indexOf(correctedValue) < max.indexOf(correctedValue) ? max : min).toString().substr(correctedValue.length); !new RegExp(targetValidator).test(correctedValue); ) correctedValue--;
} else correctedValue = value.replace(/[^0-9]/g, "0");
return correctedValue;
}
function setValue(dateObj, value, opts) {
dateObj[targetProp] = extendProperty(value), dateObj["raw" + targetProp] = value,
void 0 !== dateOperation && dateOperation.call(dateObj.date, "month" == targetProp ? parseInt(dateObj[targetProp]) - 1 : dateObj[targetProp]);
}
if ("string" == typeof mask) {
for (;match = getTokenizer(opts).exec(format); ) {
var value = mask.slice(0, match[0].length);
formatCode.hasOwnProperty(match[0]) && (targetValidator = formatCode[match[0]][0],
targetProp = formatCode[match[0]][2], dateOperation = formatCode[match[0]][1], setValue(dateObj, value)),
mask = mask.slice(value.length);
}
return dateObj;
}
}
return Inputmask.extendAliases({
datetime: {
mask: function(opts) {
return formatCode.S = opts.i18n.ordinalSuffix.join("|"), opts.inputFormat = formatAlias[opts.inputFormat] || opts.inputFormat,
opts.displayFormat = formatAlias[opts.displayFormat] || opts.displayFormat || opts.inputFormat,
opts.outputFormat = formatAlias[opts.outputFormat] || opts.outputFormat || opts.inputFormat,
opts.placeholder = "" !== opts.placeholder ? opts.placeholder : opts.inputFormat.replace(/[\[\]]/, ""),
opts.min = analyseMask(opts.min, opts.inputFormat, opts), opts.max = analyseMask(opts.max, opts.inputFormat, opts),
opts.regex = parse(opts.inputFormat, void 0, opts), null;
},
placeholder: "",
inputFormat: "isoDateTime",
displayFormat: void 0,
outputFormat: void 0,
min: null,
max: null,
i18n: {
dayNames: [ "Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun", "Monday", "Tuesday", "Wednesday", "Thursday", "Friday", "Saturday", "Sunday" ],
monthNames: [ "Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec", "January", "February", "March", "April", "May", "June", "July", "August", "September", "October", "November", "December" ],
ordinalSuffix: [ "st", "nd", "rd", "th" ]
},
postValidation: function(buffer, currentResult, opts) {
var result = currentResult, dateParts = analyseMask(buffer.join(""), opts.inputFormat, opts);
return result && dateParts.date.getTime() == dateParts.date.getTime() && (result = (result = function(dateParts, currentResult) {
return (!isFinite(dateParts.rawday) || "29" == dateParts.day && !isFinite(dateParts.rawyear) || new Date(dateParts.date.getFullYear(), isFinite(dateParts.rawmonth) ? dateParts.month : dateParts.date.getMonth() + 1, 0).getDate() >= dateParts.day) && currentResult;
}(dateParts, result)) && function(dateParts, opts) {
var result = !0;
if (opts.min) {
if (dateParts.rawyear) {
var rawYear = dateParts.rawyear.replace(/[^0-9]/g, "");
result = opts.min.year.substr(0, rawYear.length) <= rawYear;
}
dateParts.year === dateParts.rawyear && opts.min.date.getTime() == opts.min.date.getTime() && (result = opts.min.date.getTime() <= dateParts.date.getTime());<|fim▁hole|> },
onKeyDown: function(e, buffer, caretPos, opts) {
if (e.ctrlKey && e.keyCode === Inputmask.keyCode.RIGHT) {
for (var match, today = new Date(), date = ""; match = getTokenizer(opts).exec(opts.inputFormat); ) "d" === match[0].charAt(0) ? date += pad(today.getDate(), match[0].length) : "m" === match[0].charAt(0) ? date += pad(today.getMonth() + 1, match[0].length) : "yyyy" === match[0] ? date += today.getFullYear().toString() : "y" === match[0].charAt(0) && (date += pad(today.getYear(), match[0].length));
this.inputmask._valueSet(date), $(this).trigger("setvalue");
}
},
onUnMask: function(maskedValue, unmaskedValue, opts) {
return parse(opts.outputFormat, analyseMask(maskedValue, opts.inputFormat, opts), opts);
},
casing: function(elem, test, pos, validPositions) {
return 0 == test.nativeDef.indexOf("[ap]") ? elem.toLowerCase() : 0 == test.nativeDef.indexOf("[AP]") ? elem.toUpperCase() : elem;
},
insertMode: !1
}
}), Inputmask;
});<|fim▁end|> | }
return result && opts.max && opts.max.date.getTime() == opts.max.date.getTime() && (result = opts.max.date.getTime() >= dateParts.date.getTime()),
result;
}(dateParts, opts)), result; |
<|file_name|>load_run_model.py<|end_file_name|><|fim▁begin|># Mantid Repository : https://github.com/mantidproject/mantid
#
# Copyright © 2018 ISIS Rutherford Appleton Laboratory UKRI,
# NScD Oak Ridge National Laboratory, European Spallation Source
# & Institut Laue - Langevin
# SPDX - License - Identifier: GPL - 3.0 +
from __future__ import (absolute_import, division, print_function)
from Muon.GUI.Common.muon_load_data import MuonLoadData
import Muon.GUI.Common.utilities.load_utils as load_utils
class LoadRunWidgetModel(object):
"""Stores info on all currently loaded workspaces"""
def __init__(self, loaded_data_store=MuonLoadData(), context=None):
# Used with load thread
self._filenames = []
self._loaded_data_store = loaded_data_store
self._context = context
self._current_run = None
def remove_previous_data(self):
self._loaded_data_store.remove_last_added_data()
# Used with load thread
def loadData(self, filenames):
self._filenames = filenames
# Used with load thread
def execute(self):
failed_files = []
for filename in self._filenames:
try:
ws, run, filename = load_utils.load_workspace_from_filename(filename)
except Exception as error:
failed_files += [(filename, error)]
continue
self._loaded_data_store.remove_data(run=[run])
self._loaded_data_store.add_data(run=[run], workspace=ws, filename=filename, instrument=self._context.instrument)
if failed_files:
message = load_utils.exception_message_for_failed_files(failed_files)
raise ValueError(message)
# This is needed to work with thread model
def output(self):
pass
def cancel(self):
pass
def clear_loaded_data(self):
self._loaded_data_store.clear()
@property
def current_run(self):
return self._current_run
@current_run.setter
def current_run(self, run):
self._current_run = run
@property
def loaded_filenames(self):
return self._loaded_data_store.get_parameter("filename")
@property
def loaded_workspaces(self):
return self._loaded_data_store.get_parameter("workspace")<|fim▁hole|> def loaded_runs(self):
return self._loaded_data_store.get_parameter("run")<|fim▁end|> |
@property |
<|file_name|>is_send_sync.rs<|end_file_name|><|fim▁begin|>// NOTE: the following tests will compile iff. the tested structs are `Send + Sync + 'static`.
use ncollide3d::world::CollisionWorld;
fn is_send_sync<T: Send + Sync + 'static>(_: T) -> bool {
true
}
<|fim▁hole|> assert!(is_send_sync(world));
}<|fim▁end|> | #[test]
fn world_is_send_sync() {
let world = CollisionWorld::<f32, ()>::new(0.0f32); |
<|file_name|>js-borschik-include.js<|end_file_name|><|fim▁begin|>/**
* js-borschik-include
* ===================
*
* Собирает *js*-файлы инклудами борщика, сохраняет в виде `?.js`.
* Технология нужна, если в исходных *js*-файлах используются инклуды борщика.
*
* В последствии, получившийся файл `?.js` следует раскрывать с помощью технологии `borschik`.
*
* **Опции**
*
* * *String* **target** — Результирующий таргет. Обязательная опция.
* * *String* **filesTarget** — files-таргет, на основе которого получается список исходных файлов
* (его предоставляет технология `files`). По умолчанию — `?.files`.
* * *String[]* **sourceSuffixes** — суффиксы файлов, по которым строится files-таргет. По умолчанию — ['js'].
*
* **Пример**
*
* ```javascript
* nodeConfig.addTechs([
* [ require('enb-borschik/techs/js-borschik-include') ],<|fim▁hole|> * [ require('enb-borschik/techs/borschik'), {
* source: '?.js',
* target: '_?.js'
* } ]);
* ]);
* ```
*/
module.exports = require('enb/lib/build-flow').create()
.name('js-borschik-include')
.target('target', '?.js')
.useFileList(['js'])
.builder(function (files) {
var node = this.node;
return files.map(function (file) {
return '/*borschik:include:' + node.relativePath(file.fullname) + '*/';
}).join('\n');
})
.createTech();<|fim▁end|> | |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>use test::Bencher;
use solver::Solution;
mod tools;
mod jnh01;
mod jnh02;
mod jnh03;
mod jnh12;
#[bench]
fn wikipedia(b: &mut Bencher) {
let p = tools::load_problem("
p cnf 12 8
1 4 0
1 -3 -8 0
1 8 12 0
2 11 0<|fim▁hole|>-7 8 -9 0
7 8 -10 0
7 10 -12");
b.iter(|| ::solver::solve(
&p.expression,
p.varcount,
Solution::new(p.varcount)).unwrap())
}<|fim▁end|> | -7 -3 9 0 |
<|file_name|>lang-item-public.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at<|fim▁hole|>// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![no_std]
#[lang="fail_"]
fn fail(_: &'static str, _: &'static str, _: uint) -> ! { loop {} }
#[lang = "stack_exhausted"]
extern fn stack_exhausted() {}
#[lang = "eh_personality"]
extern fn eh_personality() {}<|fim▁end|> | // http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or |
<|file_name|>decorators.py<|end_file_name|><|fim▁begin|>from django.http import HttpResponseNotAllowed, HttpResponseServerError
from django.utils import simplejson as json
from util import to_json_response
from util import to_dojo_data
try:
from functools import wraps
except ImportError:
from django.utils.functional import wraps # Python 2.3, 2.4 fallback.
def expect_post_request(func):
"""Allow only POST requests to come in, throw an exception otherwise.
This relieves from checking every time that the request is
really a POST request, which it should be when using this
decorator.
"""
def _ret(*args, **kwargs):
ret = func(*args, **kwargs)
request = args[0]
if not request.method=='POST':
return HttpResponseNotAllowed(['POST'])
return ret
return _ret
def add_request_getdict(func):
"""Add the method getdict() to the request object.
This works just like getlist() only that it decodes any nested
JSON encoded object structure.
Since sending deep nested structures is not possible via
GET/POST by default, this enables it. Of course you need to
make sure that on the JavaScript side you are also sending
the data properly, which dojango.send() automatically does.
Example:
this is being sent:
one:1
two:{"three":3, "four":4}
using
request.POST.getdict('two')
returns a dict containing the values sent by the JavaScript.
"""
def _ret(*args, **kwargs):
args[0].POST.__class__.getdict = __getdict
ret = func(*args, **kwargs)
return ret
return _ret
<|fim▁hole|> except ValueError: # The value was not JSON encoded :-)
raise Exception('"%s" was not JSON encoded as expected (%s).' % (key, str(ret)))
return ret
def json_response(func):
"""
A simple json response decorator. Use it on views, where a python data object should be converted
to a json response:
@json_response
def my_view(request):
my_data = {'foo': 'bar'}
return my_data
"""
def inner(request, *args, **kwargs):
ret = func(request, *args, **kwargs)
return __prepare_json_ret(request, ret)
return wraps(func)(inner)
def jsonp_response_custom(callback_param_name):
"""
A jsonp (JSON with Padding) response decorator, where you can define your own callbackParamName.
It acts like the json_response decorator but with the difference, that it
wraps the returned json string into a client-specified function name (that is the Padding).
You can add this decorator to a function like that:
@jsonp_response_custom("my_callback_param")
def my_view(request):
my_data = {'foo': 'bar'}
return my_data
Your now can access this view from a foreign URL using JSONP.
An example with Dojo looks like that:
dojo.io.script.get({ url:"http://example.com/my_url/",
callbackParamName:"my_callback_param",
load: function(response){
console.log(response);
}
});
Note: the callback_param_name in the decorator and in your JavaScript JSONP call must be the same.
"""
def decorator(func):
def inner(request, *args, **kwargs):
ret = func(request, *args, **kwargs)
return __prepare_json_ret(request, ret, callback_param_name=callback_param_name)
return wraps(func)(inner)
return decorator
jsonp_response = jsonp_response_custom("jsonp_callback")
jsonp_response.__doc__ = "A predefined jsonp response decorator using 'jsoncallback' as a fixed callback_param_name."
def json_iframe_response(func):
"""
A simple json response decorator but wrapping the json response into a html page.
It helps when doing a json request using an iframe (e.g. file up-/download):
@json_iframe
def my_view(request):
my_data = {'foo': 'bar'}
return my_data
"""
def inner(request, *args, **kwargs):
ret = func(request, *args, **kwargs)
return __prepare_json_ret(request, ret, use_iframe=True)
return wraps(func)(inner)
def __prepare_json_ret(request, ret, callback_param_name=None, use_iframe=False):
if ret==False:
ret = {'success':False}
elif ret==None: # Sometimes there is no return.
ret = {}
# Add the 'ret'=True, since it was obviously no set yet and we got valid data, no exception.
func_name = None
if callback_param_name:
func_name = request.GET.get(callback_param_name, "callbackParamName")
try:
if not ret.has_key('success'):
ret['success'] = True
except AttributeError, e:
raise Exception("The returned data of your function must be a dictionary!")
json_ret = ""
try:
# Sometimes the serialization fails, i.e. when there are too deeply nested objects or even classes inside
json_ret = to_json_response(ret, func_name, use_iframe)
except Exception, e:
print '\n\n===============Exception=============\n\n'+str(e)+'\n\n'
print ret
print '\n\n'
return HttpResponseServerError(content=str(e))
return json_ret<|fim▁end|> | def __getdict(self, key):
ret = self.get(key)
try:
ret = json.loads(ret) |
<|file_name|>test_extras.py<|end_file_name|><|fim▁begin|>from __future__ import (absolute_import, division, print_function,
unicode_literals)
import io
import unittest
from prov.model import *
from prov.dot import prov_to_dot
from prov.serializers import Registry
from prov.tests.examples import primer_example, primer_example_alternate
EX_NS = Namespace('ex', 'http://example.org/')
EX2_NS = Namespace('ex2', 'http://example2.org/')
EX_OTHER_NS = Namespace('other', 'http://exceptions.example.org/')
def add_label(record):
record.add_attributes(
[('prov:label', Literal("hello"))]
)
def add_labels(record):
record.add_attributes([
('prov:label', Literal("hello")),
('prov:label', Literal("bye", langtag="en")),
('prov:label', Literal("bonjour", langtag="fr"))
])
def add_types(record):
record.add_attributes([
('prov:type', 'a'),
('prov:type', 1),
('prov:type', 1.0),
('prov:type', True),
('prov:type', EX_NS['abc']),
('prov:type', datetime.datetime.now()),
('prov:type', Literal('http://boiled-egg.example.com', datatype=XSD_ANYURI)),
])
def add_locations(record):
record.add_attributes([
('prov:Location', "Southampton"),
('prov:Location', 1),
('prov:Location', 1.0),
('prov:Location', True),
('prov:Location', EX_NS['london']),
('prov:Location', datetime.datetime.now()),
('prov:Location', EX_NS.uri + "london"),
('prov:Location', Literal(2002, datatype=XSD['gYear'])),
])
def add_value(record):
record.add_attributes([
('prov:value', EX_NS['avalue'])
])
def add_further_attributes(record):
record.add_attributes([
(EX_NS['tag1'], "hello"),
(EX_NS['tag2'], "bye"),
(EX2_NS['tag3'], "hi"),
(EX_NS['tag1'], "hello\nover\nmore\nlines"),
])
def add_further_attributes0(record):
record.add_attributes([
(EX_NS['tag1'], "hello"),
(EX_NS['tag2'], "bye"),
(EX_NS['tag2'], Literal("hola", langtag="es")),
(EX2_NS['tag3'], "hi"),
(EX_NS['tag'], 1),
# long on python 2, int on python 3
(EX_NS['tag'], six.integer_types[-1](1)),
(EX_NS['tag'], Literal(1, datatype=XSD_SHORT)),
(EX_NS['tag'], Literal(1, datatype=XSD_DOUBLE)),
(EX_NS['tag'], 1.0),
(EX_NS['tag'], True),
(EX_NS['tag'], EX_NS.uri + "southampton"),
])
add_further_attributes_with_qnames(record)
def add_further_attributes_with_qnames(record):
record.add_attributes([
(EX_NS['tag'], EX2_NS['newyork']),
(EX_NS['tag'], EX_NS['london']),
])
class TestExtras(unittest.TestCase):
def test_dot(self):
# This is naive.. since we can't programatically check the output is correct
document = ProvDocument()
bundle1 = ProvBundle(identifier=EX_NS['bundle1'])
bundle1.usage(activity=EX_NS['a1'], entity=EX_NS['e1'], identifier=EX_NS['use1'])
bundle1.entity(identifier=EX_NS['e1'], other_attributes={PROV_ROLE: "sausage"})
bundle1.activity(identifier=EX_NS['a1'])
document.activity(EX_NS['a2'])
bundle2 = ProvBundle(identifier=EX_NS['bundle2'])
bundle2.usage(activity=EX_NS['aa1'], entity=EX_NS['ee1'], identifier=EX_NS['use2'])
bundle2.entity(identifier=EX_NS['ee1'])
bundle2.activity(identifier=EX_NS['aa1'])
document.add_bundle(bundle1)
document.add_bundle(bundle2)
prov_to_dot(document)
def test_extra_attributes(self):
document = ProvDocument()
inf = document.influence(EX_NS['a2'], EX_NS['a1'], identifier=EX_NS['inf7'])
add_labels(inf)
add_types(inf)
add_further_attributes(inf)
self.assertEqual(len(inf.attributes), len(list(inf.formal_attributes) + inf.extra_attributes))
def test_serialize_to_path(self):
document = ProvDocument()
document.serialize("output.json")
os.remove('output.json')
document.serialize("http://netloc/outputmyprov/submit.php")
def test_bundle_no_id(self):
document = ProvDocument()
<|fim▁hole|> def test():
bundle = ProvBundle()
document.add_bundle(bundle)
self.assertRaises(ProvException, test)
def test_use_set_time_helpers(self):
dt = datetime.datetime.now()
document1 = ProvDocument()
document1.activity(EX_NS['a8'], startTime=dt, endTime=dt)
document2 = ProvDocument()
a = document2.activity(EX_NS['a8'])
a.set_time(startTime=dt, endTime=dt)
self.assertEqual(document1, document2)
self.assertEqual(a.get_startTime(), dt)
self.assertEqual(a.get_endTime(), dt)
def test_bundle_add_garbage(self):
document = ProvDocument()
def test():
document.add_bundle(document.entity(EX_NS['entity_trying_to_be_a_bundle']))
self.assertRaises(ProvException, test)
def test():
bundle = ProvBundle()
document.add_bundle(bundle)
self.assertRaises(ProvException, test)
def test_bundle_equality_garbage(self):
document = ProvBundle()
self.assertNotEqual(document, 1)
def test_bundle_is_bundle(self):
document = ProvBundle()
self.assertTrue(document.is_bundle())
def test_bundle_in_document(self):
document = ProvDocument()
bundle = document.bundle('b')
self.assertTrue(bundle in bundle.document.bundles)
def test_bundle_get_record_by_id(self):
document = ProvDocument()
self.assertEqual(document.get_record(None), None)
# record = document.entity(identifier=EX_NS['e1'])
# self.assertEqual(document.get_record(EX_NS['e1']), record)
#
# bundle = document.bundle(EX_NS['b'])
# self.assertEqual(bundle.get_record(EX_NS['e1']), record)
def test_bundle_get_records(self):
document = ProvDocument()
document.entity(identifier=EX_NS['e1'])
document.agent(identifier=EX_NS['e1'])
self.assertEqual(len(list(document.get_records(ProvAgent))), 1)
self.assertEqual(len(document.get_records()), 2)
def test_bundle_name_clash(self):
document = ProvDocument()
def test():
document.bundle(EX_NS['indistinct'])
document.bundle(EX_NS['indistinct'])
self.assertRaises(ProvException, test)
document = ProvDocument()
def test():
document.bundle(EX_NS['indistinct'])
bundle = ProvBundle(identifier=EX_NS['indistinct'])
document.add_bundle(bundle)
self.assertRaises(ProvException, test)
def test_document_helper_methods(self):
document = ProvDocument()
self.assertFalse(document.is_bundle())
self.assertFalse(document.has_bundles())
document.bundle(EX_NS['b'])
self.assertTrue(document.has_bundles())
self.assertEqual(u'<ProvDocument>', str(document))
def test_reading_and_writing_to_file_like_objects(self):
"""
Tests reading and writing to and from file like objects.
"""
# Create some random document.
document = ProvDocument()
document.entity(EX2_NS["test"])
objects = [io.BytesIO, io.StringIO]
Registry.load_serializers()
formats = Registry.serializers.keys()
for obj in objects:
for format in formats:
try:
buf = obj()
document.serialize(destination=buf, format=format)
buf.seek(0, 0)
new_document = ProvDocument.deserialize(source=buf,
format=format)
self.assertEqual(document, new_document)
except NotImplementedError:
# Some serializers might not implement serialize or deserialize method
pass # and this is fine in the context of this test
finally:
buf.close()
# def test_document_unification(self):
# # TODO: Improve testing of this...
# document = ProvDocument()
# bundle = document.bundle(identifier=EX_NS['b'])
# e1 = bundle.entity(EX_NS['e'])
# e2 = bundle.entity(EX_NS['e'])
# unified = document.unified()
#
# self.assertEqual(len(unified._bundles[0]._records), 1)
def test_primer_alternate(self):
g1 = primer_example()
g2 = primer_example_alternate()
self.assertEqual(g1, g2)
if __name__ == '__main__':
unittest.main()<|fim▁end|> | |
<|file_name|>manage.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "hischool.settings")
# Add the "core" and "extensions" folders to the path
sys.path.insert(0, os.path.join(os.path.dirname(__file__), "extensions"))
sys.path.insert(0, os.path.join(os.path.dirname(__file__), "core"))
<|fim▁hole|> try:
from django.core.management import execute_from_command_line
except ImportError:
# The above import may fail for some other reason. Ensure that the
# issue is really that Django is missing to avoid masking other
# exceptions on Python 2.
try:
import django
except ImportError:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
)
raise
execute_from_command_line(sys.argv)<|fim▁end|> | |
<|file_name|>BSONType.java<|end_file_name|><|fim▁begin|>package com.mattinsler.guiceymongo.data.query;
import org.bson.BSON;
/**
* Created by IntelliJ IDEA.
* User: mattinsler
* Date: 12/29/10
* Time: 3:28 AM
* To change this template use File | Settings | File Templates.
*/
public enum BSONType {
Double(BSON.NUMBER),
String(BSON.STRING),
Object(BSON.OBJECT),
Array(BSON.ARRAY),
BinaryData(BSON.BINARY),
ObjectId(BSON.OID),
Boolean(BSON.BOOLEAN),
Date(BSON.DATE),
Null(BSON.NULL),
RegularExpression(BSON.REGEX),
Code(BSON.CODE),
Symbol(BSON.SYMBOL),
CodeWithScope(BSON.CODE_W_SCOPE),
Integer(BSON.NUMBER_INT),
Timestamp(BSON.TIMESTAMP),
Long(BSON.NUMBER_LONG),
MinKey(BSON.MINKEY),
MaxKey(BSON.MAXKEY);
private final byte _typeCode;
BSONType(byte typeCode) {
_typeCode = typeCode;<|fim▁hole|> }
}<|fim▁end|> | }
byte getTypeCode() {
return _typeCode; |
<|file_name|>aws.js<|end_file_name|><|fim▁begin|><|fim▁hole|>var AWS = require('./core');
// Load all service classes
require('../clients/all');
/**
* @api private
*/
module.exports = AWS;<|fim▁end|> | require('./node_loader');
|
<|file_name|>macro-expr-uncommon-token.rs<|end_file_name|><|fim▁begin|>#![allow(
dead_code,
non_snake_case,
non_camel_case_types,<|fim▁hole|> non_upper_case_globals
)]
pub const MODBUS_WOOT: u32 = 3;
extern "C" {
pub fn foo();
}<|fim▁end|> | |
<|file_name|>_uirevision.py<|end_file_name|><|fim▁begin|>import _plotly_utils.basevalidators
class UirevisionValidator(_plotly_utils.basevalidators.AnyValidator):
def __init__(self, plotly_name="uirevision", parent_name="heatmap", **kwargs):
super(UirevisionValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "none"),
role=kwargs.pop("role", "info"),<|fim▁hole|><|fim▁end|> | **kwargs
) |
<|file_name|>bf_constants.rs<|end_file_name|><|fim▁begin|>/// @copyright
/// Copyright (C) 2020 Assured Information Security, Inc.
///
/// @copyright
/// Permission is hereby granted, free of charge, to any person obtaining a copy
/// of this software and associated documentation files (the "Software"), to deal
/// in the Software without restriction, including without limitation the rights
/// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
/// copies of the Software, and to permit persons to whom the Software is
/// furnished to do so, subject to the following conditions:
///
/// @copyright
/// The above copyright notice and this permission notice shall be included in
/// all copies or substantial portions of the Software.
///
/// @copyright
/// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
/// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
/// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
/// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
/// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
/// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
/// SOFTWARE.
// -----------------------------------------------------------------------------
// Page Alignment
// -----------------------------------------------------------------------------
/// <!-- description -->
/// @brief Returns true if the provided address is 4k page aligned,
/// returns false otherwise.
///
/// <!-- inputs/outputs -->
/// @param addr the address to query
/// @return Returns true if the provided address is 4k page aligned,
/// returns false otherwise.
///
pub fn bf_is_page_aligned(addr: bsl::SafeU64) -> bool {
let mask: bsl::SafeU64 = crate::HYPERVISOR_PAGE_SIZE - 1;
return (addr & mask) == 0;
}
#[cfg(test)]
mod test_bf_is_page_aligned {
#[test]
fn test_bf_is_page_aligned() {
let addr: bsl::SafeU64 = bsl::SafeU64::new(0x1234567890ABCDEF);
assert!(!super::bf_is_page_aligned(addr));
}
}
/// <!-- description -->
/// @brief Returns the page aligned version of the addr
///
/// <!-- inputs/outputs -->
/// @param addr the address to query
/// @return Returns the page aligned version of the addr
///
pub fn bf_page_aligned(addr: bsl::SafeU64) -> bsl::SafeU64 {
return addr & !(crate::HYPERVISOR_PAGE_SIZE - 1);
}
#[cfg(test)]
mod test_bf_page_aligned {
#[test]
fn test_bf_page_aligned() {
let addr: bsl::SafeU64 = bsl::SafeU64::new(0x1234567890ABCDEF);
let expected: bsl::SafeU64 = bsl::SafeU64::new(0x1234567890ABC000);
assert_eq!(super::bf_page_aligned(addr), expected);
}
}
// -----------------------------------------------------------------------------
// Special IDs
// -----------------------------------------------------------------------------
/// @brief Defines an invalid ID for an extension, VM, VP and VS
pub const BF_INVALID_ID: bsl::SafeU16 = bsl::SafeU16::new(0xFFFF);
/// @brief Defines an ID for any extension, VM, VP and VS
pub const BF_ANY_ID: bsl::SafeU16 = bsl::SafeU16::new(0xFFFE);
/// @brief Defines the bootstrap physical processor ID
pub const BF_BS_PPID: bsl::SafeU16 = bsl::SafeU16::new(0x0);
/// @brief Defines the root virtual machine ID
pub const BF_ROOT_VMID: bsl::SafeU16 = bsl::SafeU16::new(0x0);
// -----------------------------------------------------------------------------
// Syscall Status Codes
// -----------------------------------------------------------------------------
/// @brief Indicates the syscall returned successfully
pub const BF_STATUS_SUCCESS: bsl::SafeU64 = bsl::SafeU64::new(0x0000000000000000);
/// @brief Indicates an unknown error occurred
pub const BF_STATUS_FAILURE_UNKNOWN: bsl::SafeU64 = bsl::SafeU64::new(0xDEAD000000010001);
/// @brief Indicates the syscall is unsupported
pub const BF_STATUS_FAILURE_INVALID_HANDLE: bsl::SafeU64 = bsl::SafeU64::new(0xDEAD000000020001);
/// @brief Indicates the provided handle is invalid
pub const BF_STATUS_FAILURE_UNSUPPORTED: bsl::SafeU64 = bsl::SafeU64::new(0xDEAD000000040001);
/// @brief Indicates the policy engine denied the syscall
pub const BF_STATUS_INVALID_PERM_DENIED: bsl::SafeU64 = bsl::SafeU64::new(0xDEAD000000010002);
/// @brief Indicates input reg0 is invalid
pub const BF_STATUS_INVALID_INPUT_REG0: bsl::SafeU64 = bsl::SafeU64::new(0xDEAD000000010003);
/// @brief Indicates input reg1 is invalid
pub const BF_STATUS_INVALID_INPUT_REG1: bsl::SafeU64 = bsl::SafeU64::new(0xDEAD000000020003);
/// @brief Indicates input reg2 is invalid
pub const BF_STATUS_INVALID_INPUT_REG2: bsl::SafeU64 = bsl::SafeU64::new(0xDEAD000000040003);
/// @brief Indicates input reg3 is invalid
pub const BF_STATUS_INVALID_INPUT_REG3: bsl::SafeU64 = bsl::SafeU64::new(0xDEAD000000080003);
/// @brief Indicates input reg4 is invalid
pub const BF_STATUS_INVALID_INPUT_REG4: bsl::SafeU64 = bsl::SafeU64::new(0xDEAD000000100003);
/// @brief Indicates input reg5 is invalid
pub const BF_STATUS_INVALID_INPUT_REG5: bsl::SafeU64 = bsl::SafeU64::new(0xDEAD000000200003);
/// @brief Indicates output reg0 is invalid
pub const BF_STATUS_INVALID_OUTPUT_REG0: bsl::SafeU64 = bsl::SafeU64::new(0xDEAD000000400003);
/// @brief Indicates output reg1 is invalid
pub const BF_STATUS_INVALID_OUTPUT_REG1: bsl::SafeU64 = bsl::SafeU64::new(0xDEAD000000800003);
/// @brief Indicates output reg2 is invalid
pub const BF_STATUS_INVALID_OUTPUT_REG2: bsl::SafeU64 = bsl::SafeU64::new(0xDEAD000001000003);
/// @brief Indicates output reg3 is invalid
pub const BF_STATUS_INVALID_OUTPUT_REG3: bsl::SafeU64 = bsl::SafeU64::new(0xDEAD000002000003);
/// @brief Indicates output reg4 is invalid
pub const BF_STATUS_INVALID_OUTPUT_REG4: bsl::SafeU64 = bsl::SafeU64::new(0xDEAD000004000003);
/// @brief Indicates output reg5 is invalid
pub const BF_STATUS_INVALID_OUTPUT_REG5: bsl::SafeU64 = bsl::SafeU64::new(0xDEAD000008000003);
// -----------------------------------------------------------------------------
// Syscall Inputs
// -----------------------------------------------------------------------------
/// @brief Defines the BF_SYSCALL_SIG field for RAX
pub const BF_SYSCALL_SIG_VAL: bsl::SafeU64 = bsl::SafeU64::new(0x6642000000000000);
/// @brief Defines a mask for BF_SYSCALL_SIG
pub const BF_SYSCALL_SIG_MASK: bsl::SafeU64 = bsl::SafeU64::new(0xFFFF000000000000);
/// @brief Defines a mask for BF_SYSCALL_FLAGS
pub const BF_SYSCALL_FLAGS_MASK: bsl::SafeU64 = bsl::SafeU64::new(0x0000FFFF00000000);
/// @brief Defines a mask for BF_SYSCALL_OP
pub const BF_SYSCALL_OPCODE_MASK: bsl::SafeU64 = bsl::SafeU64::new(0xFFFF0000FFFF0000);
/// @brief Defines a mask for BF_SYSCALL_OP (with no signature added)
pub const BF_SYSCALL_OPCODE_NOSIG_MASK: bsl::SafeU64 = bsl::SafeU64::new(0x00000000FFFF0000);
/// @brief Defines a mask for BF_SYSCALL_IDX
pub const BF_SYSCALL_INDEX_MASK: bsl::SafeU64 = bsl::SafeU64::new(0x000000000000FFFF);
/// <!-- description -->
/// @brief n/a
///
/// <!-- inputs/outputs -->
/// @param rax n/a
/// @return n/a
///
pub fn bf_syscall_sig(rax: bsl::SafeU64) -> bsl::SafeU64 {
return rax & BF_SYSCALL_SIG_MASK;
}
#[cfg(test)]
mod test_bf_syscall_sig {
#[test]
fn test_bf_syscall_sig() {
let syscall: bsl::SafeU64 = bsl::SafeU64::new(0x1234567890ABCDEF);
let expected: bsl::SafeU64 = bsl::SafeU64::new(0x1234000000000000);
assert_eq!(super::bf_syscall_sig(syscall), expected);
}
}
/// <!-- description -->
/// @brief n/a
///
/// <!-- inputs/outputs -->
/// @param rax n/a
/// @return n/a
///
pub fn bf_syscall_flags(rax: bsl::SafeU64) -> bsl::SafeU64 {
return rax & BF_SYSCALL_FLAGS_MASK;
}
#[cfg(test)]
mod test_bf_syscall_flags {
#[test]
fn test_bf_syscall_flags() {
let syscall: bsl::SafeU64 = bsl::SafeU64::new(0x1234567890ABCDEF);
let expected: bsl::SafeU64 = bsl::SafeU64::new(0x0000567800000000);
assert_eq!(super::bf_syscall_flags(syscall), expected);
}
}
/// <!-- description -->
/// @brief n/a
///
/// <!-- inputs/outputs -->
/// @param rax n/a
/// @return n/a
///
pub fn bf_syscall_opcode(rax: bsl::SafeU64) -> bsl::SafeU64 {
return rax & BF_SYSCALL_OPCODE_MASK;
}
#[cfg(test)]
mod test_bf_syscall_opcode {
#[test]
fn test_bf_syscall_opcode() {
let syscall: bsl::SafeU64 = bsl::SafeU64::new(0x1234567890ABCDEF);
let expected: bsl::SafeU64 = bsl::SafeU64::new(0x1234000090AB0000);
assert_eq!(super::bf_syscall_opcode(syscall), expected);
}
}
/// <!-- description -->
/// @brief n/a
///
/// <!-- inputs/outputs -->
/// @param rax n/a
/// @return n/a
///
pub fn bf_syscall_opcode_nosig(rax: bsl::SafeU64) -> bsl::SafeU64 {
return rax & BF_SYSCALL_OPCODE_NOSIG_MASK;
}
#[cfg(test)]
mod test_bf_syscall_opcode_nosig {
#[test]
fn test_bf_syscall_opcode_nosig() {
let syscall: bsl::SafeU64 = bsl::SafeU64::new(0x1234567890ABCDEF);
let expected: bsl::SafeU64 = bsl::SafeU64::new(0x0000000090AB0000);
assert_eq!(super::bf_syscall_opcode_nosig(syscall), expected);
}
}
/// <!-- description -->
/// @brief n/a
///
/// <!-- inputs/outputs -->
/// @param rax n/a
/// @return n/a
///
pub fn bf_syscall_index(rax: bsl::SafeU64) -> bsl::SafeU64 {
return rax & BF_SYSCALL_INDEX_MASK;
}
#[cfg(test)]
mod test_bf_syscall_index {
#[test]
fn test_bf_syscall_index() {
let syscall: bsl::SafeU64 = bsl::SafeU64::new(0x1234567890ABCDEF);
let expected: bsl::SafeU64 = bsl::SafeU64::new(0x000000000000CDEF);
assert_eq!(super::bf_syscall_index(syscall), expected);
}
}
// -----------------------------------------------------------------------------
// Specification IDs
// -----------------------------------------------------------------------------
/// @brief Defines the ID for version #1 of this spec
pub const BF_SPEC_ID1_VAL: bsl::SafeU32 = bsl::SafeU32::new(0x31236642);
/// @brief Defines the mask for checking support for version #1 of this spec
pub const BF_SPEC_ID1_MASK: bsl::SafeU32 = bsl::SafeU32::new(0x2);
/// @brief Defines all versions supported
pub const BF_ALL_SPECS_SUPPORTED_VAL: bsl::SafeU32 = bsl::SafeU32::new(0x2);
/// @brief Defines an invalid version
pub const BF_INVALID_VERSION: bsl::SafeU32 = bsl::SafeU32::new(0x80000000);
/// <!-- description -->
/// @brief n/a
///
/// <!-- inputs/outputs -->
/// @param version n/a
/// @return n/a
///
pub fn bf_is_spec1_supported(version: bsl::SafeU32) -> bool {
return (version & BF_SPEC_ID1_MASK) != 0;
}
#[cfg(test)]
mod test_bf_is_spec1_supported {
#[test]
fn test_bf_is_spec1_supported() {
let ver1: bsl::SafeU32 = 0x2;
let ver2: bsl::SafeU32 = 0x80000000;
assert!(super::bf_is_spec1_supported(ver1));
assert!(!super::bf_is_spec1_supported(ver2));
}
}
// -----------------------------------------------------------------------------
// Syscall Opcodes - Control Support
// -----------------------------------------------------------------------------
/// @brief Defines the syscall opcode for bf_control_op
pub const BF_CONTROL_OP_VAL: bsl::SafeU64 = bsl::SafeU64::new(0x6642000000000000);
/// @brief Defines the syscall opcode for bf_control_op (nosig)
pub const BF_CONTROL_OP_NOSIG_VAL: bsl::SafeU64 = bsl::SafeU64::new(0x0000000000000000);
// -----------------------------------------------------------------------------
// Syscall Opcodes - Handle Support
// -----------------------------------------------------------------------------
/// @brief Defines the syscall opcode for bf_handle_op
pub const BF_HANDLE_OP_VAL: bsl::SafeU64 = bsl::SafeU64::new(0x6642000000010000);
/// @brief Defines the syscall opcode for bf_handle_op (nosig)
pub const BF_HANDLE_OP_NOSIG_VAL: bsl::SafeU64 = bsl::SafeU64::new(0x0000000000010000);
// -----------------------------------------------------------------------------
// Syscall Opcodes - Debug Support
// -----------------------------------------------------------------------------
/// @brief Defines the syscall opcode for bf_debug_op
pub const BF_DEBUG_OP_VAL: bsl::SafeU64 = bsl::SafeU64::new(0x6642000000020000);
/// @brief Defines the syscall opcode for bf_debug_op (nosig)
pub const BF_DEBUG_OP_NOSIG_VAL: bsl::SafeU64 = bsl::SafeU64::new(0x00000000000020000);
// -----------------------------------------------------------------------------
// Syscall Opcodes - Callback Support
// -----------------------------------------------------------------------------
/// @brief Defines the syscall opcode for bf_callback_op
pub const BF_CALLBACK_OP_VAL: bsl::SafeU64 = bsl::SafeU64::new(0x6642000000030000);
/// @brief Defines the syscall opcode for bf_callback_op (nosig)
pub const BF_CALLBACK_OP_NOSIG_VAL: bsl::SafeU64 = bsl::SafeU64::new(0x0000000000030000);
// -----------------------------------------------------------------------------
// Syscall Opcodes - VM Support
// -----------------------------------------------------------------------------
/// @brief Defines the syscall opcode for bf_vm_op
pub const BF_VM_OP_VAL: bsl::SafeU64 = bsl::SafeU64::new(0x6642000000040000);
/// @brief Defines the syscall opcode for bf_vm_op (nosig)
pub const BF_VM_OP_NOSIG_VAL: bsl::SafeU64 = bsl::SafeU64::new(0x0000000000040000);
// -----------------------------------------------------------------------------
// Syscall Opcodes - VP Support
// -----------------------------------------------------------------------------
/// @brief Defines the syscall opcode for bf_vp_op
pub const BF_VP_OP_VAL: bsl::SafeU64 = bsl::SafeU64::new(0x6642000000050000);
/// @brief Defines the syscall opcode for bf_vp_op (nosig)
pub const BF_VP_OP_NOSIG_VAL: bsl::SafeU64 = bsl::SafeU64::new(0x0000000000050000);
// -----------------------------------------------------------------------------
// Syscall Opcodes - VS Support
// -----------------------------------------------------------------------------
/// @brief Defines the syscall opcode for bf_vs_op
pub const BF_VS_OP_VAL: bsl::SafeU64 = bsl::SafeU64::new(0x6642000000060000);
/// @brief Defines the syscall opcode for bf_vs_op (nosig)
pub const BF_VS_OP_NOSIG_VAL: bsl::SafeU64 = bsl::SafeU64::new(0x0000000000060000);
// -----------------------------------------------------------------------------
// Syscall Opcodes - Intrinsic Support
// -----------------------------------------------------------------------------
/// @brief Defines the syscall opcode for bf_intrinsic_op
pub const BF_INTRINSIC_OP_VAL: bsl::SafeU64 = bsl::SafeU64::new(0x6642000000070000);
/// @brief Defines the syscall opcode for bf_intrinsic_op (nosig)
pub const BF_INTRINSIC_OP_NOSIG_VAL: bsl::SafeU64 = bsl::SafeU64::new(0x0000000000070000);
// -----------------------------------------------------------------------------
// Syscall Opcodes - Mem Support
// -----------------------------------------------------------------------------
/// @brief Defines the syscall opcode for bf_mem_op
pub const BF_MEM_OP_VAL: bsl::SafeU64 = bsl::SafeU64::new(0x6642000000080000);
/// @brief Defines the syscall opcode for bf_mem_op (nosig)
pub const BF_MEM_OP_NOSIG_VAL: bsl::SafeU64 = bsl::SafeU64::new(0x0000000000080000);
// -----------------------------------------------------------------------------
// TLS Offsets
// -----------------------------------------------------------------------------
/// @brief stores the offset for rax
pub const TLS_OFFSET_RAX: bsl::SafeU64 = bsl::SafeU64::new(0x800);
/// @brief stores the offset for rbx
pub const TLS_OFFSET_RBX: bsl::SafeU64 = bsl::SafeU64::new(0x808);
/// @brief stores the offset for rcx
pub const TLS_OFFSET_RCX: bsl::SafeU64 = bsl::SafeU64::new(0x810);
/// @brief stores the offset for rdx
pub const TLS_OFFSET_RDX: bsl::SafeU64 = bsl::SafeU64::new(0x818);
/// @brief stores the offset for rbp
pub const TLS_OFFSET_RBP: bsl::SafeU64 = bsl::SafeU64::new(0x820);
/// @brief stores the offset for rsi
pub const TLS_OFFSET_RSI: bsl::SafeU64 = bsl::SafeU64::new(0x828);
/// @brief stores the offset for rdi
pub const TLS_OFFSET_RDI: bsl::SafeU64 = bsl::SafeU64::new(0x830);
/// @brief stores the offset for r8
pub const TLS_OFFSET_R8: bsl::SafeU64 = bsl::SafeU64::new(0x838);
/// @brief stores the offset for r9
pub const TLS_OFFSET_R9: bsl::SafeU64 = bsl::SafeU64::new(0x840);
/// @brief stores the offset for r10
pub const TLS_OFFSET_R10: bsl::SafeU64 = bsl::SafeU64::new(0x848);
/// @brief stores the offset for r11
pub const TLS_OFFSET_R11: bsl::SafeU64 = bsl::SafeU64::new(0x850);
/// @brief stores the offset for r12
pub const TLS_OFFSET_R12: bsl::SafeU64 = bsl::SafeU64::new(0x858);
/// @brief stores the offset for r13
pub const TLS_OFFSET_R13: bsl::SafeU64 = bsl::SafeU64::new(0x860);
/// @brief stores the offset for r14
pub const TLS_OFFSET_R14: bsl::SafeU64 = bsl::SafeU64::new(0x868);
/// @brief stores the offset for r15
pub const TLS_OFFSET_R15: bsl::SafeU64 = bsl::SafeU64::new(0x870);
/// @brief stores the offset of the active extid
pub const TLS_OFFSET_ACTIVE_EXTID: bsl::SafeU64 = bsl::SafeU64::new(0xFF0);
/// @brief stores the offset of the active vmid
pub const TLS_OFFSET_ACTIVE_VMID: bsl::SafeU64 = bsl::SafeU64::new(0xFF2);
/// @brief stores the offset of the active vpid
pub const TLS_OFFSET_ACTIVE_VPID: bsl::SafeU64 = bsl::SafeU64::new(0xFF4);
/// @brief stores the offset of the active vsid
pub const TLS_OFFSET_ACTIVE_VSID: bsl::SafeU64 = bsl::SafeU64::new(0xFF6);
/// @brief stores the offset of the active ppid
pub const TLS_OFFSET_ACTIVE_PPID: bsl::SafeU64 = bsl::SafeU64::new(0xFF8);
/// @brief stores the number of PPs that are online
pub const TLS_OFFSET_ONLINE_PPS: bsl::SafeU64 = bsl::SafeU64::new(0xFFA);
// -----------------------------------------------------------------------------
// Hypercall Related Constants
// -----------------------------------------------------------------------------
/// @brief Defines an invalid handle
pub const BF_INVALID_HANDLE: bsl::SafeU64 = bsl::SafeU64::new(0xFFFFFFFFFFFFFFFF);
// -----------------------------------------------------------------------------
// Syscall Indexes
// -----------------------------------------------------------------------------
/// @brief Defines the index for bf_control_op_exit
pub const BF_CONTROL_OP_EXIT_IDX_VAL: bsl::SafeU64 = bsl::SafeU64::new(0x0000000000000000);
/// @brief Defines the index for bf_control_op_wait
pub const BF_CONTROL_OP_WAIT_IDX_VAL: bsl::SafeU64 = bsl::SafeU64::new(0x0000000000000001);
/// @brief Defines the index for bf_control_op_again
pub const BF_CONTROL_OP_AGAIN_IDX_VAL: bsl::SafeU64 = bsl::SafeU64::new(0x0000000000000002);
/// @brief Defines the index for bf_handle_op_open_handle
pub const BF_HANDLE_OP_OPEN_HANDLE_IDX_VAL: bsl::SafeU64 = bsl::SafeU64::new(0x0000000000000000);
/// @brief Defines the index for bf_handle_op_close_handle
pub const BF_HANDLE_OP_CLOSE_HANDLE_IDX_VAL: bsl::SafeU64 = bsl::SafeU64::new(0x0000000000000001);
/// @brief Defines the index for bf_debug_op_out
pub const BF_DEBUG_OP_OUT_IDX_VAL: bsl::SafeU64 = bsl::SafeU64::new(0x0000000000000000);
/// @brief Defines the index for bf_debug_op_dump_vm
pub const BF_DEBUG_OP_DUMP_VM_IDX_VAL: bsl::SafeU64 = bsl::SafeU64::new(0x0000000000000001);
/// @brief Defines the index for bf_debug_op_dump_vp
pub const BF_DEBUG_OP_DUMP_VP_IDX_VAL: bsl::SafeU64 = bsl::SafeU64::new(0x0000000000000002);
/// @brief Defines the index for bf_debug_op_dump_vs
pub const BF_DEBUG_OP_DUMP_VS_IDX_VAL: bsl::SafeU64 = bsl::SafeU64::new(0x0000000000000003);
/// @brief Defines the index for bf_debug_op_dump_vmexit_log
pub const BF_DEBUG_OP_DUMP_VMEXIT_LOG_IDX_VAL: bsl::SafeU64 = bsl::SafeU64::new(0x0000000000000004);
/// @brief Defines the index for bf_debug_op_write_c
pub const BF_DEBUG_OP_WRITE_C_IDX_VAL: bsl::SafeU64 = bsl::SafeU64::new(0x0000000000000005);
/// @brief Defines the index for bf_debug_op_write_str
pub const BF_DEBUG_OP_WRITE_STR_IDX_VAL: bsl::SafeU64 = bsl::SafeU64::new(0x0000000000000006);
/// @brief Defines the index for bf_debug_op_dump_ext
pub const BF_DEBUG_OP_DUMP_EXT_IDX_VAL: bsl::SafeU64 = bsl::SafeU64::new(0x0000000000000007);
/// @brief Defines the index for bf_debug_op_dump_page_pool
pub const BF_DEBUG_OP_DUMP_PAGE_POOL_IDX_VAL: bsl::SafeU64 = bsl::SafeU64::new(0x0000000000000008);
/// @brief Defines the index for bf_debug_op_dump_huge_pool
pub const BF_DEBUG_OP_DUMP_HUGE_POOL_IDX_VAL: bsl::SafeU64 = bsl::SafeU64::new(0x0000000000000009);
/// @brief Defines the index for bf_callback_op_register_bootstrap
pub const BF_CALLBACK_OP_REGISTER_BOOTSTRAP_IDX_VAL: bsl::SafeU64 =
bsl::SafeU64::new(0x0000000000000000);
/// @brief Defines the index for bf_callback_op_register_vmexit
pub const BF_CALLBACK_OP_REGISTER_VMEXIT_IDX_VAL: bsl::SafeU64 =
bsl::SafeU64::new(0x0000000000000001);
/// @brief Defines the index for bf_callback_op_register_fail
pub const BF_CALLBACK_OP_REGISTER_FAIL_IDX_VAL: bsl::SafeU64 =
bsl::SafeU64::new(0x0000000000000002);
/// @brief Defines the index for bf_vm_op_create_vm
pub const BF_VM_OP_CREATE_VM_IDX_VAL: bsl::SafeU64 = bsl::SafeU64::new(0x0000000000000000);
/// @brief Defines the index for bf_vm_op_destroy_vm
pub const BF_VM_OP_DESTROY_VM_IDX_VAL: bsl::SafeU64 = bsl::SafeU64::new(0x0000000000000001);
/// @brief Defines the index for bf_vm_op_map_direct
pub const BF_VM_OP_MAP_DIRECT_IDX_VAL: bsl::SafeU64 = bsl::SafeU64::new(0x0000000000000002);
/// @brief Defines the index for bf_vm_op_unmap_direct
pub const BF_VM_OP_UNMAP_DIRECT_IDX_VAL: bsl::SafeU64 = bsl::SafeU64::new(0x0000000000000003);
/// @brief Defines the index for bf_vm_op_unmap_direct_broadcast
pub const BF_VM_OP_UNMAP_DIRECT_BROADCAST_IDX_VAL: bsl::SafeU64 =
bsl::SafeU64::new(0x0000000000000004);<|fim▁hole|>
/// @brief Defines the index for bf_vp_op_create_vp
pub const BF_VP_OP_CREATE_VP_IDX_VAL: bsl::SafeU64 = bsl::SafeU64::new(0x0000000000000000);
/// @brief Defines the index for bf_vp_op_destroy_vp
pub const BF_VP_OP_DESTROY_VP_IDX_VAL: bsl::SafeU64 = bsl::SafeU64::new(0x0000000000000001);
/// @brief Defines the index for bf_vs_op_create_vs
pub const BF_VS_OP_CREATE_VS_IDX_VAL: bsl::SafeU64 = bsl::SafeU64::new(0x0000000000000000);
/// @brief Defines the index for bf_vs_op_destroy_vs
pub const BF_VS_OP_DESTROY_VS_IDX_VAL: bsl::SafeU64 = bsl::SafeU64::new(0x0000000000000001);
/// @brief Defines the index for bf_vs_op_init_as_root
pub const BF_VS_OP_INIT_AS_ROOT_IDX_VAL: bsl::SafeU64 = bsl::SafeU64::new(0x0000000000000002);
/// @brief Defines the index for bf_vs_op_read_reg
pub const BF_VS_OP_READ_IDX_VAL: bsl::SafeU64 = bsl::SafeU64::new(0x0000000000000003);
/// @brief Defines the index for bf_vs_op_write_reg
pub const BF_VS_OP_WRITE_IDX_VAL: bsl::SafeU64 = bsl::SafeU64::new(0x0000000000000004);
/// @brief Defines the index for bf_vs_op_run
pub const BF_VS_OP_RUN_IDX_VAL: bsl::SafeU64 = bsl::SafeU64::new(0x0000000000000005);
/// @brief Defines the index for bf_vs_op_run_current
pub const BF_VS_OP_RUN_CURRENT_IDX_VAL: bsl::SafeU64 = bsl::SafeU64::new(0x0000000000000006);
/// @brief Defines the index for bf_vs_op_advance_ip_and_run
pub const BF_VS_OP_ADVANCE_IP_AND_RUN_IDX_VAL: bsl::SafeU64 = bsl::SafeU64::new(0x0000000000000007);
/// @brief Defines the index for bf_vs_op_advance_ip_and_run_current
pub const BF_VS_OP_ADVANCE_IP_AND_RUN_CURRENT_IDX_VAL: bsl::SafeU64 =
bsl::SafeU64::new(0x0000000000000008);
/// @brief Defines the index for bf_vs_op_promote
pub const BF_VS_OP_PROMOTE_IDX_VAL: bsl::SafeU64 = bsl::SafeU64::new(0x0000000000000009);
/// @brief Defines the index for bf_vs_op_clear
pub const BF_VS_OP_CLEAR_IDX_VAL: bsl::SafeU64 = bsl::SafeU64::new(0x000000000000000A);
/// @brief Defines the index for bf_vs_op_migrate
pub const BF_VS_OP_MIGRATE_IDX_VAL: bsl::SafeU64 = bsl::SafeU64::new(0x000000000000000B);
/// @brief Defines the index for bf_vs_op_set_active
pub const BF_VS_OP_SET_ACTIVE_IDX_VAL: bsl::SafeU64 = bsl::SafeU64::new(0x000000000000000C);
/// @brief Defines the index for bf_vs_op_advance_ip_and_set_active
pub const BF_VS_OP_ADVANCE_IP_AND_SET_ACTIVE_IDX_VAL: bsl::SafeU64 =
bsl::SafeU64::new(0x000000000000000D);
/// @brief Defines the index for bf_vs_op_tlb_flush
pub const BF_VS_OP_TLB_FLUSH_IDX_VAL: bsl::SafeU64 = bsl::SafeU64::new(0x000000000000000E);
/// @brief Defines the index for bf_intrinsic_op_rdmsr
pub const BF_INTRINSIC_OP_RDMSR_IDX_VAL: bsl::SafeU64 = bsl::SafeU64::new(0x0000000000000000);
/// @brief Defines the index for bf_intrinsic_op_wrmsr
pub const BF_INTRINSIC_OP_WRMSR_IDX_VAL: bsl::SafeU64 = bsl::SafeU64::new(0x0000000000000001);
/// @brief Defines the index for bf_mem_op_alloc_page
pub const BF_MEM_OP_ALLOC_PAGE_IDX_VAL: bsl::SafeU64 = bsl::SafeU64::new(0x0000000000000000);
/// @brief Defines the index for bf_mem_op_alloc_huge
pub const BF_MEM_OP_ALLOC_HUGE_IDX_VAL: bsl::SafeU64 = bsl::SafeU64::new(0x0000000000000002);<|fim▁end|> | /// @brief Defines the index for bf_vm_op_tlb_flush
pub const BF_VM_OP_TLB_FLUSH_IDX_VAL: bsl::SafeU64 = bsl::SafeU64::new(0x0000000000000005); |
<|file_name|>index.js<|end_file_name|><|fim▁begin|>'use strict';
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = Logger;
var _lodash = require('lodash');
var _moment = require('moment');
var _moment2 = _interopRequireDefault(_moment);
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
var logLevels = {
TRACE: 'TRACE',
INFO: 'INFO',
WARN: 'WARN',
ERROR: 'ERROR'
};
function _log(category, level) {
var _console2;
var now = (0, _moment2.default)().format();
for (var _len = arguments.length, args = Array(_len > 2 ? _len - 2 : 0), _key = 2; _key < _len; _key++) {
args[_key - 2] = arguments[_key];
}
if (level === logLevels.ERROR) {
var _console;
return (_console = console).error.apply(_console, [now + ' ' + level + ' [' + category + ']'].concat(args)); // eslint-disable-line no-console
}
return (_console2 = console).log.apply(_console2, [now + ' ' + level + ' [' + category + ']'].concat(args)); // eslint-disable-line no-console
}
function Logger(category, requestId) {
this.category = category;
this.requestId = requestId;
}
function createLogLevel(level) {
return function logWithLevel() {
for (var _len2 = arguments.length, args = Array(_len2), _key2 = 0; _key2 < _len2; _key2++) {
args[_key2] = arguments[_key2];
}
if (this.requestId) {
_log.apply(undefined, [this.category, level, 'RequestId: ' + this.requestId].concat(args));
}
_log.apply(undefined, [this.category, level].concat(args));
};
}
Logger.prototype.trace = createLogLevel(logLevels.TRACE);
Logger.prototype.info = createLogLevel(logLevels.INFO);
Logger.prototype.warn = createLogLevel(logLevels.WARN);
Logger.prototype.error = createLogLevel(logLevels.ERROR);
Logger.prototype.log = function log(level) {
for (var _len3 = arguments.length, args = Array(_len3 > 1 ? _len3 - 1 : 0), _key3 = 1; _key3 < _len3; _key3++) {
args[_key3 - 1] = arguments[_key3];<|fim▁hole|> if ((0, _lodash.size)(args) === 1 && (0, _lodash.isObject)(args[0])) {
_log(this.category, (0, _lodash.toUpper)(level), JSON.stringify(args[0]));
return;
}
_log.apply(undefined, [this.category, (0, _lodash.toUpper)(level)].concat(args));
};<|fim▁end|> | }
|
<|file_name|>unitfactory.py<|end_file_name|><|fim▁begin|>from model.commonfactory import CommonFactory
from model.slaveunit import SlaveUnit
from model.squireunit import SquireUnit
from model.swordfighterunit import SwordfighterUnit
from model.archerunit import ArcherUnit
from model.cavalryunit import CavalryUnit<|fim▁hole|>UNIT_TYPES = {
'slave': SlaveUnit,
'squire': SquireUnit,
'swordfighter': SwordfighterUnit,
'archer': ArcherUnit,
'cavalry': CavalryUnit
}
def UnitFactory(unit_type = "none", *args, **kwargs):
return CommonFactory("unit", unit_type, UNIT_TYPES, *args, **kwargs)<|fim▁end|> | |
<|file_name|>test_dynamodb_consumedcapacity.py<|end_file_name|><|fim▁begin|>import boto3
import pytest
import sure # noqa # pylint: disable=unused-import
from botocore.exceptions import ClientError
from moto import mock_dynamodb2
@mock_dynamodb2
def test_error_on_wrong_value_for_consumed_capacity():
resource = boto3.resource("dynamodb", region_name="ap-northeast-3")
client = boto3.client("dynamodb", region_name="ap-northeast-3")
client.create_table(
TableName="jobs",
KeySchema=[{"AttributeName": "job_id", "KeyType": "HASH"}],
AttributeDefinitions=[{"AttributeName": "job_id", "AttributeType": "S"}],
ProvisionedThroughput={"ReadCapacityUnits": 5, "WriteCapacityUnits": 5},
)
table = resource.Table("jobs")
item = {"job_id": "asdasdasd", "expires_at": "1"}
# PUT_ITEM
with pytest.raises(ClientError) as ex:
table.put_item(Item=item, ReturnConsumedCapacity="Garbage")
err = ex.value.response["Error"]
err["Code"].should.equal("ValidationException")
err["Message"].should.equal(
"1 validation error detected: Value 'Garbage' at 'returnConsumedCapacity' failed to satisfy constraint: Member must satisfy enum value set: [INDEXES, TOTAL, NONE]"
)
@mock_dynamodb2
def test_consumed_capacity_get_unknown_item():
conn = boto3.client("dynamodb", region_name="us-east-1")
conn.create_table(
TableName="test_table",
KeySchema=[{"AttributeName": "u", "KeyType": "HASH"}],
AttributeDefinitions=[{"AttributeName": "u", "AttributeType": "S"}],
BillingMode="PAY_PER_REQUEST",
)
response = conn.get_item(
TableName="test_table",
Key={"u": {"S": "does_not_exist"}},
ReturnConsumedCapacity="TOTAL",
)
# Should still return ConsumedCapacity, even if it does not return an item
response.should.have.key("ConsumedCapacity")
response["ConsumedCapacity"].should.equal(
{"TableName": "test_table", "CapacityUnits": 0.5}
)
@mock_dynamodb2
@pytest.mark.parametrize(
"capacity,should_have_capacity,should_have_table",
[
[None, False, False],
["NONE", False, False],<|fim▁hole|> ["INDEXES", True, True],
],
)
def test_only_return_consumed_capacity_when_required(
capacity, should_have_capacity, should_have_table
):
resource = boto3.resource("dynamodb", region_name="ap-northeast-3")
client = boto3.client("dynamodb", region_name="ap-northeast-3")
client.create_table(
TableName="jobs",
KeySchema=[{"AttributeName": "job_id", "KeyType": "HASH"}],
LocalSecondaryIndexes=[
{
"IndexName": "job_name-index",
"KeySchema": [{"AttributeName": "job_name", "KeyType": "HASH"}],
"Projection": {"ProjectionType": "ALL"},
}
],
AttributeDefinitions=[
{"AttributeName": "job_id", "AttributeType": "S"},
{"AttributeName": "job_name", "AttributeType": "S"},
],
ProvisionedThroughput={"ReadCapacityUnits": 5, "WriteCapacityUnits": 5},
)
table = resource.Table("jobs")
item = {"job_id": "asdasdasd", "expires_at": "1"}
# PUT_ITEM
args = {"Item": item}
if capacity:
args["ReturnConsumedCapacity"] = capacity
response = table.put_item(**args)
validate_response(response, should_have_capacity, should_have_table)
# GET_ITEM
args = {"Key": item}
if capacity:
args["ReturnConsumedCapacity"] = capacity
response = table.get_item(**args)
validate_response(response, should_have_capacity, should_have_table, value=0.5)
# SCAN
args = {"TableName": "jobs"}
if capacity:
args["ReturnConsumedCapacity"] = capacity
response = client.scan(**args)
validate_response(response, should_have_capacity, should_have_table)
# SCAN_INDEX
args["IndexName"] = "job_name-index"
response = client.scan(**args)
validate_response(response, should_have_capacity, should_have_table, is_index=True)
# QUERY
args = {
"TableName": "jobs",
"KeyConditionExpression": "job_id = :id",
"ExpressionAttributeValues": {":id": {"S": "asdasdasd"}},
}
if capacity:
args["ReturnConsumedCapacity"] = capacity
response = client.query(**args)
validate_response(response, should_have_capacity, should_have_table)
# QUERY_INDEX
args["IndexName"] = "job_name-index"
response = client.query(**args)
validate_response(response, should_have_capacity, should_have_table, is_index=True)
def validate_response(
response, should_have_capacity, should_have_table, is_index=False, value=1.0
):
if should_have_capacity:
response.should.have.key("ConsumedCapacity")
response["ConsumedCapacity"]["TableName"].should.equal("jobs")
response["ConsumedCapacity"]["CapacityUnits"].should.equal(value)
if should_have_table:
response["ConsumedCapacity"]["Table"].should.equal({"CapacityUnits": value})
if is_index:
response["ConsumedCapacity"].should.have.key("LocalSecondaryIndexes")
response["ConsumedCapacity"]["LocalSecondaryIndexes"].should.equal(
{"job_name-index": {"CapacityUnits": value}}
)
else:
response.shouldnt.have.key("ConsumedCapacity")<|fim▁end|> | ["TOTAL", True, False], |
<|file_name|>directory_storage_test.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python2<|fim▁hole|># Copyright (c) 2012 The Native Client Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Tests of directory storage adapter."""
import os
import unittest
import directory_storage
import fake_storage
import gsd_storage
import hashing_tools
import hashing_tools_test
import working_directory
class TestDirectoryStorage(unittest.TestCase):
def setUp(self):
storage = fake_storage.FakeStorage()
self._dir_storage = directory_storage.DirectoryStorageAdapter(storage)
def test_WriteRead(self):
# Check that a directory can be written and then read back.
with working_directory.TemporaryWorkingDirectory() as work_dir:
temp1 = os.path.join(work_dir, 'temp1')
temp2 = os.path.join(work_dir, 'temp2')
hashing_tools_test.GenerateTestTree('write_read', temp1)
self._dir_storage.PutDirectory(temp1, 'foo')
self._dir_storage.GetDirectory('foo', temp2)
self.assertEqual(hashing_tools.StableHashPath(temp1),
hashing_tools.StableHashPath(temp2))
def test_InputUntouched(self):
# Check that PutDirectory doesn't alter its inputs.
with working_directory.TemporaryWorkingDirectory() as work_dir:
temp1 = os.path.join(work_dir, 'temp1')
hashing_tools_test.GenerateTestTree('input_untouched', temp1)
h1 = hashing_tools.StableHashPath(temp1)
self._dir_storage.PutDirectory(temp1, 'hello')
h2 = hashing_tools.StableHashPath(temp1)
self.assertEqual(h1, h2)
def test_URLsPropagate(self):
# Check that consistent non-None URLs come from get and put.
with working_directory.TemporaryWorkingDirectory() as work_dir:
temp1 = os.path.join(work_dir, 'temp1')
temp2 = os.path.join(work_dir, 'temp2')
hashing_tools_test.GenerateTestTree('url_propagate', temp1)
url1 = self._dir_storage.PutDirectory(temp1, 'me')
url2 = self._dir_storage.GetDirectory('me', temp2)
self.assertEqual(url1, url2)
self.assertNotEqual(None, url1)
def test_BadWrite(self):
def call(cmd):
return 1
storage = gsd_storage.GSDStorage(
gsutil=['mygsutil'],
write_bucket='mybucket',
read_buckets=[],
call=call)
dir_storage = directory_storage.DirectoryStorageAdapter(storage)
# Check that storage exceptions come thru on failure.
with working_directory.TemporaryWorkingDirectory() as work_dir:
temp1 = os.path.join(work_dir, 'temp1')
hashing_tools_test.GenerateTestTree('bad_write', temp1)
self.assertRaises(gsd_storage.GSDStorageError,
dir_storage.PutDirectory, temp1, 'bad')
def test_BadRead(self):
# Check that storage exceptions come thru on failure.
with working_directory.TemporaryWorkingDirectory() as work_dir:
temp1 = os.path.join(work_dir, 'temp1')
self.assertEqual(None, self._dir_storage.GetDirectory('foo', temp1))
if __name__ == '__main__':
unittest.main()<|fim▁end|> | |
<|file_name|>NodeJS.js<|end_file_name|><|fim▁begin|>/*
Siesta 3.0.1
Copyright(c) 2009-2015 Bryntum AB
http://bryntum.com/contact
http://bryntum.com/products/siesta/license
*/
/**
@class Siesta.Harness.NodeJS
@extends Siesta.Harness
Class, representing the NodeJS harness. This class reports the output from all test files to console.
This file is a reference only however, for a getting start guide and manual, please refer to <a href="#!/guide/siesta_getting_started">Getting Started Guide</a>.
Synopsys
========
var Harness,
isNode = typeof process != 'undefined' && process.pid
if (isNode) {
Harness = require('siesta');
} else {
Harness = Siesta.Harness.Browser;
}
Harness.configure({
title : 'Awesome Test Suite',
transparentEx : true,
autoCheckGlobals : true,
expectedGlobals : [
'Ext',
'Sch'
],
preload : [
"http://cdn.sencha.io/ext-4.0.2a/ext-all-debug.js",
"../awesome-project-all.js",
{
text : "console.log('preload completed')"
}
]
})
Harness.start(
// simple string - url relative to harness file
'sanity.t.js',
// test file descriptor with own configuration options
{
url : 'basic.t.js',
// replace `preload` option of harness
preload : [
"http://cdn.sencha.io/ext-4.0.6/ext-all-debug.js",
"../awesome-project-all.js"
]
},
// groups ("folders") of test files (possibly with own options)
{
group : 'Sanity',
autoCheckGlobals : false,
items : [
'data/crud.t.js',
...
]
},
...
)
Running the test suite in NodeJS
================================
To run the suite in NodeJS, launch the harness javascript file:
> node t/index.js
*/
Class('Siesta.Harness.NodeJS', {<|fim▁hole|>
does : Siesta.Role.ConsoleReporter,
has : {
contentManagerClass : Siesta.Content.Manager.NodeJS,
scopeProvider : 'Scope.Provider.NodeJS',
chdirToIndex : true
},
before : {
start : function () {
this.runCore = 'sequential'
if (this.chdirToIndex) {
var indexFile = process.argv[1]
var path = require('path')
process.chdir(path.dirname(indexFile))
}
}
},
methods : {
log : console.log,
exit : process.exit,
getScopeProviderConfigFor : function (desc, launchId) {
var config = this.SUPER(desc, launchId)
config.sourceURL = desc.url
return config
},
normalizeURL : function (url) {
// ref to lib in current dist (no trailing `.js`)
if (!/\.js$/.test(url)) {
url = '../lib/' + url.replace(/\./g, '/') + '.js'
}
return url
}
}
}
//eof my
})
//eof Siesta.Harness.NodeJS<|fim▁end|> |
// static
my : {
isa : Siesta.Harness, |
<|file_name|>expressions.py<|end_file_name|><|fim▁begin|># encoding: utf-8
#
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http:# mozilla.org/MPL/2.0/.
#
# Author: Kyle Lahnakoski ([email protected])
#
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from jx_base.expressions import Variable, DateOp, TupleOp, LeavesOp, BinaryOp, OrOp, InequalityOp, extend, Literal, NullOp, TrueOp, FalseOp, DivOp, FloorOp, \
NeOp, NotOp, LengthOp, NumberOp, StringOp, CountOp, MultiOp, RegExpOp, CoalesceOp, MissingOp, ExistsOp, \
PrefixOp, UnixOp, FromUnixOp, NotLeftOp, RightOp, NotRightOp, FindOp, InOp, RangeOp, CaseOp, AndOp, \
ConcatOp, LeftOp, EqOp, WhenOp, BasicIndexOfOp, IntegerOp, MaxOp, BasicSubstringOp, FALSE, MinOp, BooleanOp, SuffixOp, BetweenOp, simplified, ZERO, SqlInstrOp, SqlSubstrOp, NULL, ONE, builtin_ops, TRUE, SqlEqOp, BasicMultiOp
from jx_base.queries import get_property_name
from jx_sqlite import quoted_GUID, GUID
from mo_dots import coalesce, wrap, Null, split_field, listwrap, startswith_field
from mo_dots import join_field, ROOT_PATH, relative_field
from mo_future import text_type
from mo_json import json2value
from mo_json.typed_encoder import OBJECT, BOOLEAN, EXISTS, NESTED
from mo_logs import Log
from mo_math import Math
from pyLibrary import convert
from pyLibrary.sql import SQL, SQL_AND, SQL_EMPTY_STRING, SQL_OR, SQL_TRUE, SQL_ZERO, SQL_FALSE, SQL_NULL, SQL_ONE, SQL_IS_NOT_NULL, sql_list, sql_iso, SQL_IS_NULL, SQL_END, SQL_ELSE, SQL_THEN, SQL_WHEN, SQL_CASE, sql_concat, sql_coalesce
from pyLibrary.sql.sqlite import quote_column, quote_value
@extend(Variable)
def to_sql(self, schema, not_null=False, boolean=False):
if self.var == GUID:
return wrap([{"name": ".", "sql": {"s": quoted_GUID}, "nested_path": ROOT_PATH}])
vars = schema[self.var]
if not vars:
# DOES NOT EXIST
return wrap([{"name": ".", "sql": {"0": SQL_NULL}, "nested_path": ROOT_PATH}])
var_name = list(set(listwrap(vars).name))
if len(var_name) > 1:
Log.error("do not know how to handle")
var_name = var_name[0]
cols = schema.leaves(self.var)
acc = {}
if boolean:
for col in cols:
cname = relative_field(col.name, var_name)
nested_path = col.nested_path[0]
if col.type == OBJECT:
value = SQL_TRUE
elif col.type == BOOLEAN:
value = quote_column(col.es_column)
else:
value = quote_column(col.es_column) + SQL_IS_NOT_NULL
tempa = acc.setdefault(nested_path, {})
tempb = tempa.setdefault(get_property_name(cname), {})
tempb['b'] = value
else:
for col in cols:
cname = relative_field(col.name, var_name)
if col.type == OBJECT:
prefix = self.var + "."
for cn, cs in schema.items():
if cn.startswith(prefix):
for child_col in cs:
tempa = acc.setdefault(child_col.nested_path[0], {})
tempb = tempa.setdefault(get_property_name(cname), {})
tempb[json_type_to_sql_type[col.type]] = quote_column(child_col.es_column)
else:
nested_path = col.nested_path[0]
tempa = acc.setdefault(nested_path, {})
tempb = tempa.setdefault(get_property_name(cname), {})
tempb[json_type_to_sql_type[col.type]] = quote_column(col.es_column)
return wrap([
{"name": cname, "sql": types, "nested_path": nested_path}
for nested_path, pairs in acc.items() for cname, types in pairs.items()
])
@extend(Literal)
def to_sql(self, schema, not_null=False, boolean=False):
value = self.value
v = quote_value(value)
if v == None:
return wrap([{"name": "."}])
elif isinstance(value, text_type):
return wrap([{"name": ".", "sql": {"s": quote_value(value)}}])
elif Math.is_number(v):
return wrap([{"name": ".", "sql": {"n": quote_value(value)}}])
elif v in [True, False]:
return wrap([{"name": ".", "sql": {"b": quote_value(value)}}])
else:
return wrap([{"name": ".", "sql": {"j": quote_value(self.json)}}])
@extend(NullOp)
def to_sql(self, schema, not_null=False, boolean=False):
return Null
@extend(TrueOp)
def to_sql(self, schema, not_null=False, boolean=False):
return wrap([{"name": ".", "sql": {"b": SQL_TRUE}}])
@extend(FalseOp)
def to_sql(self, schema, not_null=False, boolean=False):
return wrap([{"name": ".", "sql": {"b": SQL_FALSE}}])
@extend(DateOp)
def to_sql(self, schema, not_null=False, boolean=False):
return wrap([{"name": ".", "sql": {"n": quote_value(self.value)}}])
@extend(TupleOp)
def to_sql(self, schema, not_null=False, boolean=False):
return wrap([{"name": ".", "sql": t.to_sql(schema)[0].sql} for t in self.terms])
@extend(LeavesOp)
def to_sql(self, schema, not_null=False, boolean=False):
if not isinstance(self.term, Variable):
Log.error("Can only handle Variable")
term = self.term.var
prefix_length = len(split_field(term))
output = wrap([
{
"name": join_field(split_field(schema.get_column_name(c))[prefix_length:]),
"sql": Variable(schema.get_column_name(c)).to_sql(schema)[0].sql
}
for c in schema.columns
if startswith_field(c.name, term) and (
(c.jx_type not in (EXISTS, OBJECT, NESTED) and startswith_field(schema.nested_path[0], c.nested_path[0])) or
(c.jx_type not in (EXISTS, OBJECT) and schema.nested_path[0] == c.nested_path[0])
)
])
return output
@extend(EqOp)
def to_sql(self, schema, not_null=False, boolean=False):
lhs = self.lhs.to_sql(schema)
rhs = self.rhs.to_sql(schema)
acc = []
if len(lhs) != len(rhs):
Log.error("lhs and rhs have different dimensionality!?")
for l, r in zip(lhs, rhs):
for t in "bsnj":
if l.sql[t] == None:
if r.sql[t] == None:
pass
else:
acc.append(sql_iso(r.sql[t]) + SQL_IS_NULL)
else:
if r.sql[t] == None:
acc.append(sql_iso(l.sql[t]) + SQL_IS_NULL)
else:
acc.append(sql_iso(l.sql[t]) + " = " + sql_iso(r.sql[t]))
if not acc:
return FALSE.to_sql(schema)
else:
return wrap([{"name": ".", "sql": {"b": SQL_OR.join(acc)}}])
@extend(EqOp)
@simplified
def partial_eval(self):
lhs = self.lhs.partial_eval()
rhs = self.rhs.partial_eval()
if isinstance(lhs, Literal) and isinstance(rhs, Literal):
return TRUE if builtin_ops["eq"](lhs.value, rhs.value) else FALSE
else:
rhs_missing = rhs.missing().partial_eval()
return CaseOp(
"case",
[
WhenOp("when", lhs.missing(), **{"then": rhs_missing}),
WhenOp("when", rhs_missing, **{"then": FALSE}),
SqlEqOp("eq", [lhs, rhs])
]
).partial_eval()
@extend(NeOp)
def to_sql(self, schema, not_null=False, boolean=False):
return NotOp('not', EqOp('eq', [self.lhs, self.rhs]).partial_eval()).partial_eval().to_sql(schema)
@extend(BasicIndexOfOp)
def to_sql(self, schema, not_null=False, boolean=False):
value = self.value.to_sql(schema)[0].sql.s
find = self.find.to_sql(schema)[0].sql.s
start = self.start
if isinstance(start, Literal) and start.value == 0:
return wrap([{"name": ".", "sql": {"n": "INSTR" + sql_iso(value + "," + find) + "-1"}}])
else:
start_index = start.to_sql(schema)[0].sql.n
found = "INSTR(SUBSTR" + sql_iso(value + "," + start_index + "+1)," + find)
return wrap([{"name": ".", "sql": {"n": (
SQL_CASE +
SQL_WHEN + found +
SQL_THEN + found + "+" + start_index + "-1" +
SQL_ELSE + "-1" +
SQL_END
)}}])
@extend(BasicSubstringOp)
def to_sql(self, schema, not_null=False, boolean=False):
value = self.value.to_sql(schema)[0].sql.s
start = MultiOp("add", [self.start, Literal(None, 1)]).partial_eval().to_sql(schema)[0].sql.n
length = BinaryOp("subtract", [self.end, self.start]).partial_eval().to_sql(schema)[0].sql.n
return wrap([{"name": ".", "sql": {"s": "SUBSTR" + sql_iso(value + "," + start + ", " + length)}}])
@extend(BinaryOp)
def to_sql(self, schema, not_null=False, boolean=False):
lhs = self.lhs.to_sql(schema)[0].sql.n
rhs = self.rhs.to_sql(schema)[0].sql.n
return wrap([{"name": ".", "sql": {"n": sql_iso(lhs) + " " + BinaryOp.operators[self.op] + " " + sql_iso(rhs)}}])
@extend(MinOp)
def to_sql(self, schema, not_null=False, boolean=False):
terms = [t.partial_eval().to_sql(schema)[0].sql.n for t in self.terms]
return wrap([{"name": ".", "sql": {"n": "min" + sql_iso((sql_list(terms)))}}])
@extend(MaxOp)
def to_sql(self, schema, not_null=False, boolean=False):
terms = [t.partial_eval().to_sql(schema)[0].sql.n for t in self.terms]
return wrap([{"name": ".", "sql": {"n": "max" + sql_iso((sql_list(terms)))}}])
@extend(InequalityOp)
def to_sql(self, schema, not_null=False, boolean=False):
lhs = self.lhs.to_sql(schema, not_null=True)[0].sql
rhs = self.rhs.to_sql(schema, not_null=True)[0].sql
lhs_exists = self.lhs.exists().to_sql(schema)[0].sql
rhs_exists = self.rhs.exists().to_sql(schema)[0].sql
if len(lhs) == 1 and len(rhs) == 1:
return wrap([{"name": ".", "sql": {
"b": sql_iso(lhs.values()[0]) + " " + InequalityOp.operators[self.op] + " " + sql_iso(rhs.values()[0])
}}])
ors = []
for l in "bns":
ll = lhs[l]
if not ll:
continue
for r in "bns":
rr = rhs[r]
if not rr:
continue
elif r == l:
ors.append(
sql_iso(lhs_exists[l]) + SQL_AND + sql_iso(rhs_exists[r]) + SQL_AND + sql_iso(lhs[l]) + " " +
InequalityOp.operators[self.op] + " " + sql_iso(rhs[r])
)
elif (l > r and self.op in ["gte", "gt"]) or (l < r and self.op in ["lte", "lt"]):
ors.append(
sql_iso(lhs_exists[l]) + SQL_AND + sql_iso(rhs_exists[r])
)
sql = sql_iso(SQL_OR.join(sql_iso(o) for o in ors))
<|fim▁hole|> return wrap([{"name": ".", "sql": {"b": sql}}])
@extend(DivOp)
def to_sql(self, schema, not_null=False, boolean=False):
lhs = self.lhs.to_sql(schema)[0].sql.n
rhs = self.rhs.to_sql(schema)[0].sql.n
d = self.default.to_sql(schema)[0].sql.n
if lhs and rhs:
if d == None:
return wrap([{
"name": ".",
"sql": {"n": sql_iso(lhs) + " / " + sql_iso(rhs)}
}])
else:
return wrap([{
"name": ".",
"sql": {"n": sql_coalesce([sql_iso(lhs) + " / " + sql_iso(rhs), d])}
}])
else:
return Null
@extend(FloorOp)
def to_sql(self, schema, not_null=False, boolean=False):
lhs = self.lhs.to_sql(schema)
rhs = self.rhs.to_sql(schema)
acc = []
if len(lhs) != len(rhs):
Log.error("lhs and rhs have different dimensionality!?")
for l, r in zip(lhs, rhs):
for t in "bsnj":
if l.sql[t] == None:
if r.sql[t] == None:
pass
else:
acc.append(sql_iso(r.sql[t]) + " IS " + SQL_NULL)
else:
if r.sql[t] == None:
acc.append(sql_iso(l.sql[t]) + " IS " + SQL_NULL)
else:
acc.append("(" + sql_iso(l.sql[t]) + " = " + sql_iso(r.sql[t]) + " OR (" + sql_iso(l.sql[t]) + " IS" + SQL_NULL + SQL_AND + "(" + r.sql[
t] + ") IS NULL))")
if not acc:
return FALSE.to_sql(schema)
else:
return wrap([{"name": ".", "sql": {"b": SQL_OR.join(acc)}}])
# @extend(NeOp)
# def to_sql(self, schema, not_null=False, boolean=False):
# return NotOp("not", EqOp("eq", [self.lhs, self.rhs])).to_sql(schema, not_null, boolean)
@extend(NotOp)
def to_sql(self, schema, not_null=False, boolean=False):
not_expr = NotOp("not", BooleanOp("boolean", self.term)).partial_eval()
if isinstance(not_expr, NotOp):
return wrap([{"name": ".", "sql": {"b": "NOT " + sql_iso(not_expr.term.to_sql(schema)[0].sql.b)}}])
else:
return not_expr.to_sql(schema)
@extend(BooleanOp)
def to_sql(self, schema, not_null=False, boolean=False):
term = self.term.partial_eval()
if term.type == "boolean":
sql = term.to_sql(schema)
return sql
else:
sql = term.exists().partial_eval().to_sql(schema)
return sql
@extend(AndOp)
def to_sql(self, schema, not_null=False, boolean=False):
if not self.terms:
return wrap([{"name": ".", "sql": {"b": SQL_TRUE}}])
elif all(self.terms):
return wrap([{"name": ".", "sql": {
"b": SQL_AND.join([sql_iso(t.to_sql(schema, boolean=True)[0].sql.b) for t in self.terms])
}}])
else:
return wrap([{"name": ".", "sql": {"b": SQL_FALSE}}])
@extend(OrOp)
def to_sql(self, schema, not_null=False, boolean=False):
return wrap([{
"name": ".",
"sql": {"b": SQL_OR.join(
sql_iso(t.to_sql(schema, boolean=True)[0].sql.b)
for t in self.terms
)}
}])
@extend(LengthOp)
def to_sql(self, schema, not_null=False, boolean=False):
term = self.term.partial_eval()
if isinstance(term, Literal):
val = term.value
if isinstance(val, text_type):
return wrap([{"name": ".", "sql": {"n": convert.value2json(len(val))}}])
elif isinstance(val, (float, int)):
return wrap([{"name": ".", "sql": {"n": convert.value2json(len(convert.value2json(val)))}}])
else:
return Null
value = term.to_sql(schema)[0].sql.s
return wrap([{"name": ".", "sql": {"n": "LENGTH" + sql_iso(value)}}])
@extend(IntegerOp)
def to_sql(self, schema, not_null=False, boolean=False):
value = self.term.to_sql(schema, not_null=True)
acc = []
for c in value:
for t, v in c.sql.items():
if t == "s":
acc.append("CAST(" + v + " as INTEGER)")
else:
acc.append(v)
if not acc:
return wrap([])
elif len(acc) == 1:
return wrap([{"name": ".", "sql": {"n": acc[0]}}])
else:
return wrap([{"name": ".", "sql": {"n": sql_coalesce(acc)}}])
@extend(NumberOp)
def to_sql(self, schema, not_null=False, boolean=False):
value = self.term.to_sql(schema, not_null=True)
acc = []
for c in value:
for t, v in c.sql.items():
if t == "s":
acc.append("CAST(" + v + " as FLOAT)")
else:
acc.append(v)
if not acc:
return wrap([])
elif len(acc) == 1:
return wrap([{"name": ".", "sql": {"n": acc}}])
else:
return wrap([{"name": ".", "sql": {"n": sql_coalesce(acc)}}])
@extend(StringOp)
def to_sql(self, schema, not_null=False, boolean=False):
test = self.term.missing().to_sql(schema, boolean=True)[0].sql.b
value = self.term.to_sql(schema, not_null=True)[0].sql
acc = []
for t, v in value.items():
if t == "b":
acc.append(SQL_CASE+SQL_WHEN + sql_iso(test) + SQL_THEN + SQL_NULL + SQL_WHEN + sql_iso(v) + SQL_THEN+"'true'"+SQL_ELSE+"'false'"+SQL_END)
elif t == "s":
acc.append(v)
else:
acc.append("RTRIM(RTRIM(CAST" + sql_iso(v + " as TEXT), " + quote_value('0')) + ", " + quote_value(".") + ")")
if not acc:
return wrap([{}])
elif len(acc) == 1:
return wrap([{"name": ".", "sql": {"s": acc[0]}}])
else:
return wrap([{"name": ".", "sql": {"s": sql_coalesce(acc)}}])
@extend(CountOp)
def to_sql(self, schema, not_null=False, boolean=False):
acc = []
for term in self.terms:
sqls = term.to_sql(schema)
if len(sqls) > 1:
acc.append(SQL_TRUE)
else:
for t, v in sqls[0].sql.items():
if t in ["b", "s", "n"]:
acc.append(SQL_CASE+SQL_WHEN + sql_iso(v) + SQL_IS_NULL + SQL_THEN+"0"+SQL_ELSE+"1"+SQL_END)
else:
acc.append(SQL_TRUE)
if not acc:
return wrap([{}])
else:
return wrap([{"nanme": ".", "sql": {"n": SQL("+").join(acc)}}])
_sql_operators = {
"add": (SQL(" + "), SQL_ZERO), # (operator, zero-array default value) PAIR
"basic.add": (SQL(" + "), SQL_ZERO), # (operator, zero-array default value) PAIR
"sum": (SQL(" + "), SQL_ZERO),
"mul": (SQL(" * "), SQL_ONE),
"mult": (SQL(" * "), SQL_ONE),
"multiply": (SQL(" * "), SQL_ONE),
"basic.mult": (SQL(" * "), SQL_ONE)
}
@extend(BasicMultiOp)
def to_sql(self, schema, not_null=False, boolean=False):
op, identity = _sql_operators[self.op]
sql = op.join(sql_iso(t.to_sql(schema)[0].sql.n) for t in self.terms)
return wrap([{"name": ".", "sql": {"n": sql}}])
@extend(RegExpOp)
def to_sql(self, schema, not_null=False, boolean=False):
pattern = quote_value(json2value(self.pattern.json))
value = self.var.to_sql(schema)[0].sql.s
return wrap([
{"name": ".", "sql": {"b": value + " REGEXP " + pattern}}
])
@extend(CoalesceOp)
def to_sql(self, schema, not_null=False, boolean=False):
acc = {
"b": [],
"s": [],
"n": []
}
for term in self.terms:
for t, v in term.to_sql(schema)[0].sql.items():
acc[t].append(v)
output = {}
for t, terms in acc.items():
if not terms:
continue
elif len(terms) == 1:
output[t] = terms[0]
else:
output[t] = sql_coalesce(terms)
return wrap([{"name": ".", "sql": output}])
@extend(MissingOp)
def to_sql(self, schema, not_null=False, boolean=False):
value = self.expr.partial_eval()
missing_value = value.missing().partial_eval()
if not isinstance(missing_value, MissingOp):
return missing_value.to_sql(schema)
value_sql = value.to_sql(schema)
if len(value_sql) > 1:
return wrap([{"name": ".", "sql": {"b": SQL_FALSE}}])
acc = []
for c in value_sql:
for t, v in c.sql.items():
if t == "b":
acc.append(sql_iso(v) + SQL_IS_NULL)
if t == "s":
acc.append(sql_iso(sql_iso(v) + SQL_IS_NULL) + SQL_OR + sql_iso(sql_iso(v) + "=" + SQL_EMPTY_STRING))
if t == "n":
acc.append(sql_iso(v) + SQL_IS_NULL)
if not acc:
return wrap([{"name": ".", "sql": {"b": SQL_TRUE}}])
else:
return wrap([{"name": ".", "sql": {"b": SQL_AND.join(acc)}}])
@extend(WhenOp)
def to_sql(self, schema, not_null=False, boolean=False):
when = self.when.partial_eval().to_sql(schema, boolean=True)[0].sql
then = self.then.partial_eval().to_sql(schema, not_null=not_null)[0].sql
els_ = self.els_.partial_eval().to_sql(schema, not_null=not_null)[0].sql
output = {}
for t in "bsn":
if then[t] == None:
if els_[t] == None:
pass
else:
output[t] = SQL_CASE+SQL_WHEN + when.b + SQL_THEN + SQL_NULL + SQL_ELSE + els_[t] + SQL_END
else:
if els_[t] == None:
output[t] = SQL_CASE+SQL_WHEN + when.b + SQL_THEN + then[t] + SQL_END
else:
output[t] = SQL_CASE+SQL_WHEN + when.b + SQL_THEN + then[t] + SQL_ELSE + els_[t] + SQL_END
if not output:
return wrap([{"name": ".", "sql": {"0": SQL_NULL}}])
else:
return wrap([{"name": ".", "sql": output}])
@extend(ExistsOp)
def to_sql(self, schema, not_null=False, boolean=False):
field = self.field.to_sql(schema)[0].sql
acc = []
for t, v in field.items():
if t in "bns":
acc.append(sql_iso(v + SQL_IS_NOT_NULL))
if not acc:
return wrap([{"name": ".", "sql": {"b": SQL_FALSE}}])
else:
return wrap([{"name": ".", "sql": {"b": SQL_OR.join(acc)}}])
@extend(PrefixOp)
def to_sql(self, schema, not_null=False, boolean=False):
if not self.expr:
return wrap([{"name": ".", "sql": {"b": SQL_TRUE}}])
else:
return wrap([{"name": ".", "sql": {
"b": "INSTR" + sql_iso(self.expr.to_sql(schema)[0].sql.s + ", " + self.prefix.to_sql(schema)[0].sql.s) + "==1"
}}])
@extend(SuffixOp)
def to_sql(self, schema, not_null=False, boolean=False):
if not self.expr:
return wrap([{"name": ".", "sql": {"b": SQL_FALSE}}])
elif isinstance(self.suffix, Literal) and not self.suffix.value:
return wrap([{"name": ".", "sql": {"b": SQL_TRUE}}])
else:
return EqOp(
"eq",
[
RightOp("right", [self.expr, LengthOp("length", self.suffix)]),
self.suffix
]
).partial_eval().to_sql(schema)
@extend(ConcatOp)
def to_sql(self, schema, not_null=False, boolean=False):
defult = self.default.to_sql(schema)
if len(self.terms) == 0:
return defult
defult = coalesce(defult[0].sql, SQL_NULL)
sep = self.separator.to_sql(schema)[0].sql.s
acc = []
for t in self.terms:
missing = t.missing().partial_eval()
term = t.to_sql(schema, not_null=True)[0].sql
if term.s:
term_sql = term.s
elif term.n:
term_sql = "cast(" + term.n + " as text)"
else:
term_sql = SQL_CASE + SQL_WHEN + term.b + SQL_THEN + quote_value("true") + SQL_ELSE + quote_value("false") + SQL_END
if isinstance(missing, TrueOp):
acc.append(SQL_EMPTY_STRING)
elif missing:
acc.append(
SQL_CASE +
SQL_WHEN + sql_iso(missing.to_sql(schema, boolean=True)[0].sql.b) +
SQL_THEN + SQL_EMPTY_STRING +
SQL_ELSE + sql_iso(sql_concat([sep, term_sql])) +
SQL_END
)
else:
acc.append(sql_concat([sep, term_sql]))
expr_ = "substr(" + sql_concat(acc) + ", " + LengthOp(None, self.separator).to_sql(schema)[0].sql.n + "+1)"
missing = self.missing()
if not missing:
return wrap([{"name": ".", "sql": {"s": expr_}}])
else:
return wrap([{
"name": ".",
"sql": {
"s": SQL_CASE+SQL_WHEN+"(" + missing.to_sql(schema, boolean=True)[0].sql.b +
")"+SQL_THEN+"(" + defult +
")"+SQL_ELSE+"(" + expr_ +
")"+SQL_END
}
}])
@extend(UnixOp)
def to_sql(self, schema, not_null=False, boolean=False):
v = self.value.to_sql(schema)[0].sql
return wrap([{
"name": ".",
"sql": {"n": "UNIX_TIMESTAMP" + sql_iso(v.n)}
}])
@extend(FromUnixOp)
def to_sql(self, schema, not_null=False, boolean=False):
v = self.value.to_sql(schema)[0].sql
return wrap([{
"name": ".",
"sql": {"n": "FROM_UNIXTIME" + sql_iso(v.n)}
}])
@extend(LeftOp)
def to_sql(self, schema, not_null=False, boolean=False):
return SqlSubstrOp(
"substr",
[
self.value,
ONE,
self.length
]
).partial_eval().to_sql(schema)
@extend(NotLeftOp)
def to_sql(self, schema, not_null=False, boolean=False):
# test_v = self.value.missing().to_sql(boolean=True)[0].sql.b
# test_l = self.length.missing().to_sql(boolean=True)[0].sql.b
v = self.value.to_sql(schema, not_null=True)[0].sql.s
l = "max(0, " + self.length.to_sql(schema, not_null=True)[0].sql.n + ")"
expr = "substr(" + v + ", " + l + "+1)"
return wrap([{"name": ".", "sql": {"s": expr}}])
@extend(RightOp)
def to_sql(self, schema, not_null=False, boolean=False):
v = self.value.to_sql(schema, not_null=True)[0].sql.s
r = self.length.to_sql(schema, not_null=True)[0].sql.n
l = "max(0, length" + sql_iso(v) + "-max(0, " + r + "))"
expr = "substr(" + v + ", " + l + "+1)"
return wrap([{"name": ".", "sql": {"s": expr}}])
@extend(RightOp)
@simplified
def partial_eval(self):
value = self.value.partial_eval()
length = self.length.partial_eval()
max_length = LengthOp("length", value)
return BasicSubstringOp("substring", [
value,
MaxOp("max", [ZERO, MinOp("min", [max_length, BinaryOp("sub", [max_length, length])])]),
max_length
])
@extend(NotRightOp)
def to_sql(self, schema, not_null=False, boolean=False):
v = self.value.to_sql(schema, not_null=True)[0].sql.s
r = self.length.to_sql(schema, not_null=True)[0].sql.n
l = "max(0, length" + sql_iso(v) + "-max(0, " + r + "))"
expr = "substr" + sql_iso(v + ", 1, " + l)
return wrap([{"name": ".", "sql": {"s": expr}}])
@extend(FindOp)
def to_sql(self, schema, not_null=False, boolean=False):
test = SqlInstrOp("substr", [
SqlSubstrOp("substr", [
self.value,
MultiOp("add", [self.start, ONE]),
NULL
]),
self.find
]).partial_eval()
if boolean:
return test.to_sql(schema)
else:
offset = BinaryOp("sub", [self.start, ONE]).partial_eval()
index = MultiOp("add", [test, offset]).partial_eval()
temp = index.to_sql(schema)
return WhenOp(
"when",
EqOp("eq", [test, ZERO]),
**{
"then": self.default,
"else": index
}
).partial_eval().to_sql(schema)
@extend(FindOp)
@simplified
def partial_eval(self):
return FindOp(
"find",
[
self.value.partial_eval(),
self.find.partial_eval()
],
**{
"start": self.start.partial_eval(),
"default": self.default.partial_eval()
}
)
@extend(BetweenOp)
def to_sql(self, schema, not_null=False, boolean=False):
return self.partial_eval().to_sql(schema)
@extend(InOp)
def to_sql(self, schema, not_null=False, boolean=False):
if not isinstance(self.superset, Literal):
Log.error("Not supported")
j_value = json2value(self.superset.json)
if j_value:
var = self.value.to_sql(schema)
return SQL_OR.join(sql_iso(var + "==" + quote_value(v)) for v in j_value)
else:
return wrap([{"name": ".", "sql": {"b": SQL_FALSE}}])
@extend(RangeOp)
def to_sql(self, schema, not_null=False, boolean=False):
when = self.when.to_sql(schema, boolean=True)[0].sql
then = self.then.to_sql(schema, not_null=not_null)[0].sql
els_ = self.els_.to_sql(schema, not_null=not_null)[0].sql
output = {}
for t in "bsn":
if then[t] == None:
if els_[t] == None:
pass
else:
output[t] = SQL_CASE+SQL_WHEN + when.b + SQL_THEN + SQL_NULL + SQL_ELSE + els_[t] + SQL_END
else:
if els_[t] == None:
output[t] = SQL_CASE+SQL_WHEN + when.b + SQL_THEN + then[t] + SQL_END
else:
output[t] = SQL_CASE+SQL_WHEN + when.b + SQL_THEN + then[t] + SQL_ELSE + els_[t] + SQL_END
if not output:
return wrap([{"name": ".", "sql": {"0": SQL_NULL}}])
else:
return wrap([{"name": ".", "sql": output}])
@extend(CaseOp)
def to_sql(self, schema, not_null=False, boolean=False):
if len(self.whens) == 1:
return self.whens[-1].to_sql(schema)
output = {}
for t in "bsn": # EXPENSIVE LOOP to_sql() RUN 3 TIMES
els_ = coalesce(self.whens[-1].to_sql(schema)[0].sql[t], SQL_NULL)
acc = SQL_ELSE + els_ + SQL_END
for w in reversed(self.whens[0:-1]):
acc = SQL_WHEN + w.when.to_sql(schema, boolean=True)[0].sql.b + SQL_THEN + coalesce(w.then.to_sql(schema)[0].sql[t], SQL_NULL) + acc
output[t] = SQL_CASE + acc
return wrap([{"name": ".", "sql": output}])
@extend(SqlEqOp)
def to_sql(self, schema, not_null=False, boolean=False):
lhs = self.lhs.partial_eval().to_sql(schema)[0].sql.values()[0]
rhs = self.rhs.partial_eval().to_sql(schema)[0].sql.values()[0]
return wrap([{"name": ".", "sql": {
"b": sql_iso(lhs) + "=" + sql_iso(rhs)
}}])
@extend(SqlInstrOp)
def to_sql(self, schema, not_null=False, boolean=False):
value = self.value.to_sql(schema)[0].sql.s
find = self.find.to_sql(schema)[0].sql.s
return wrap([{"name": ".", "sql": {
"n": "INSTR" + sql_iso(sql_list([value, find]))
}}])
@extend(SqlInstrOp)
@simplified
def partial_eval(self):
value = self.value.partial_eval()
find = self.find.partial_eval()
return SqlInstrOp("instr", [value, find])
@extend(SqlSubstrOp)
def to_sql(self, schema, not_null=False, boolean=False):
value = self.value.to_sql(schema)[0].sql.s
start = self.start.to_sql(schema)[0].sql.n
if self.length is NULL:
return wrap([{"name": ".", "sql": {
"s": "SUBSTR" + sql_iso(sql_list([value, start]))
}}])
else:
length = self.length.to_sql(schema)[0].sql.n
return wrap([{"name": ".", "sql": {
"s": "SUBSTR" + sql_iso(sql_list([value, start, length]))
}}])
@extend(SqlSubstrOp)
@simplified
def partial_eval(self):
value = self.value.partial_eval()
start = self.start.partial_eval()
length = self.length.partial_eval()
if isinstance(start, Literal) and start.value == 1:
if length is NULL:
return value
return SqlSubstrOp("substr", [value, start, length])
json_type_to_sql_type = {
"null": "0",
"boolean": "b",
"number": "n",
"string": "s",
"object": "j",
"nested": "j"
}
sql_type_to_json_type = {
"0": "null",
"b": "boolean",
"n": "number",
"s": "string",
"j": "object"
}<|fim▁end|> | |
<|file_name|>project-suggested-contributions.js<|end_file_name|><|fim▁begin|>/**
* window.c.ProjectSuggestedContributions component
* A Project-show page helper to show suggested amounts of contributions
*
* Example of use:
* view: () => {
* ...
* m.component(c.ProjectSuggestedContributions, {project: project})
* ...<|fim▁hole|> */
import m from 'mithril';
import _ from 'underscore';
const projectSuggestedContributions = {
view(ctrl, args) {
const project = args.project();
const suggestionUrl = amount => `/projects/${project.project_id}/contributions/new?amount=${amount}`,
suggestedValues = [10, 25, 50, 100];
return m('#suggestions', _.map(suggestedValues, amount => project ? m(`a[href="${suggestionUrl(amount)}"].card-reward.card-big.card-secondary.u-marginbottom-20`, [
m('.fontsize-larger', `R$ ${amount}`)
]) : ''));
}
};
export default projectSuggestedContributions;<|fim▁end|> | * } |
<|file_name|>LocalDatePatternFormat.java<|end_file_name|><|fim▁begin|>/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.dataformat.bindy.format;
import java.time.LocalDate;
import java.time.format.DateTimeFormatter;
import java.util.Locale;
import org.apache.camel.dataformat.bindy.PatternFormat;
import org.apache.camel.util.ObjectHelper;<|fim▁hole|> private Locale locale;
public LocalDatePatternFormat() {
}
public LocalDatePatternFormat(String pattern, Locale locale) {
this.pattern = pattern;
this.locale = locale;
}
public String format(LocalDate object) throws Exception {
ObjectHelper.notNull(this.pattern, "pattern");
return this.getDateFormat().format(object);
}
public LocalDate parse(String string) throws Exception {
LocalDate date;
DateTimeFormatter df = this.getDateFormat();
ObjectHelper.notNull(this.pattern, "pattern");
if (doesStringFitLengthOfPattern(string)) {
date = LocalDate.parse(string, df);
return date;
} else {
throw new FormatException("Date provided does not fit the pattern defined");
}
}
private boolean doesStringFitLengthOfPattern(String string) {
return string.length() <= this.pattern.length();
}
protected DateTimeFormatter getDateFormat() {
DateTimeFormatter result;
if (locale != null) {
result = DateTimeFormatter.ofPattern(pattern, locale);
} else {
result = DateTimeFormatter.ofPattern(pattern);
}
return result;
}
public String getPattern() {
return pattern;
}
/**
* Sets the pattern
*
* @param pattern the pattern
*/
public void setPattern(String pattern) {
this.pattern = pattern;
}
}<|fim▁end|> |
public class LocalDatePatternFormat implements PatternFormat<LocalDate> {
private String pattern; |
<|file_name|>require.js<|end_file_name|><|fim▁begin|>/** vim: et:ts=4:sw=4:sts=4
* @license RequireJS 2.1.5 Copyright (c) 2010-2012, The Dojo Foundation All Rights Reserved.
* Available via the MIT or new BSD license.
* see: http://github.com/jrburke/requirejs for details
*/
//Not using strict: uneven strict support in browsers, #392, and causes
//problems with requirejs.exec()/transpiler plugins that may not be strict.
/*jslint regexp: true, nomen: true, sloppy: true */
/*global window, navigator, document, importScripts, setTimeout, opera */
var requirejs, require, define;
(function (global) {
var req, s, head, baseElement, dataMain, src,
interactiveScript, currentlyAddingScript, mainScript, subPath,
version = '2.1.5',
commentRegExp = /(\/\*([\s\S]*?)\*\/|([^:]|^)\/\/(.*)$)/mg,
cjsRequireRegExp = /[^.]\s*require\s*\(\s*["']([^'"\s]+)["']\s*\)/g,
jsSuffixRegExp = /\.js$/,
currDirRegExp = /^\.\//,
op = Object.prototype,
ostring = op.toString,
hasOwn = op.hasOwnProperty,
ap = Array.prototype,
apsp = ap.splice,
isBrowser = !!(typeof window !== 'undefined' && navigator && document),
isWebWorker = !isBrowser && typeof importScripts !== 'undefined',
//PS3 indicates loaded and complete, but need to wait for complete
//specifically. Sequence is 'loading', 'loaded', execution,
// then 'complete'. The UA check is unfortunate, but not sure how
//to feature test w/o causing perf issues.
readyRegExp = isBrowser && navigator.platform === 'PLAYSTATION 3' ?
/^complete$/ : /^(complete|loaded)$/,
defContextName = '_',
//Oh the tragedy, detecting opera. See the usage of isOpera for reason.
isOpera = typeof opera !== 'undefined' && opera.toString() === '[object Opera]',
contexts = {},
cfg = {},
globalDefQueue = [],
useInteractive = false;
function isFunction(it) {
return ostring.call(it) === '[object Function]';
}
function isArray(it) {
return ostring.call(it) === '[object Array]';
}
/**
* Helper function for iterating over an array. If the func returns
* a true value, it will break out of the loop.
*/
function each(ary, func) {
if (ary) {
var i;
for (i = 0; i < ary.length; i += 1) {
if (ary[i] && func(ary[i], i, ary)) {
break;
}
}
}
}
/**
* Helper function for iterating over an array backwards. If the func
* returns a true value, it will break out of the loop.
*/
function eachReverse(ary, func) {
if (ary) {
var i;
for (i = ary.length - 1; i > -1; i -= 1) {
if (ary[i] && func(ary[i], i, ary)) {
break;
}
}
}
}
function hasProp(obj, prop) {
return hasOwn.call(obj, prop);
}
function getOwn(obj, prop) {
return hasProp(obj, prop) && obj[prop];
}
/**
* Cycles over properties in an object and calls a function for each
* property value. If the function returns a truthy value, then the
* iteration is stopped.
*/
function eachProp(obj, func) {
var prop;
for (prop in obj) {
if (hasProp(obj, prop)) {
if (func(obj[prop], prop)) {
break;
}
}
}
}
/**
* Simple function to mix in properties from source into target,
* but only if target does not already have a property of the same name.
*/
function mixin(target, source, force, deepStringMixin) {
if (source) {
eachProp(source, function (value, prop) {
if (force || !hasProp(target, prop)) {
if (deepStringMixin && typeof value !== 'string') {
if (!target[prop]) {
target[prop] = {};
}
mixin(target[prop], value, force, deepStringMixin);
} else {
target[prop] = value;
}
}
});
}
return target;
}
//Similar to Function.prototype.bind, but the 'this' object is specified
//first, since it is easier to read/figure out what 'this' will be.
function bind(obj, fn) {
return function () {
return fn.apply(obj, arguments);
};
}
function scripts() {
return document.getElementsByTagName('script');
}
//Allow getting a global that expressed in
//dot notation, like 'a.b.c'.
function getGlobal(value) {
if (!value) {
return value;
}
var g = global;
each(value.split('.'), function (part) {
g = g[part];
});
return g;
}
/**
* Constructs an error with a pointer to an URL with more information.
* @param {String} id the error ID that maps to an ID on a web page.
* @param {String} message human readable error.
* @param {Error} [err] the original error, if there is one.
*
* @returns {Error}
*/
function makeError(id, msg, err, requireModules) {
var e = new Error(msg + '\nhttp://requirejs.org/docs/errors.html#' + id);
e.requireType = id;
e.requireModules = requireModules;
if (err) {
e.originalError = err;
}
return e;
}
if (typeof define !== 'undefined') {
//If a define is already in play via another AMD loader,
//do not overwrite.
return;
}
if (typeof requirejs !== 'undefined') {
if (isFunction(requirejs)) {
//Do not overwrite and existing requirejs instance.
return;
}
cfg = requirejs;
requirejs = undefined;
}
//Allow for a require config object
if (typeof require !== 'undefined' && !isFunction(require)) {
//assume it is a config object.
cfg = require;
require = undefined;
}
function newContext(contextName) {
var inCheckLoaded, Module, context, handlers,
checkLoadedTimeoutId,
config = {
//Defaults. Do not set a default for map
//config to speed up normalize(), which
//will run faster if there is no default.
waitSeconds: 7,
baseUrl: './',
paths: {},
pkgs: {},
shim: {},
config: {}
},
registry = {},
//registry of just enabled modules, to speed
//cycle breaking code when lots of modules
//are registered, but not activated.
enabledRegistry = {},
undefEvents = {},
defQueue = [],
defined = {},
urlFetched = {},
requireCounter = 1,
unnormalizedCounter = 1;
/**
* Trims the . and .. from an array of path segments.
* It will keep a leading path segment if a .. will become
* the first path segment, to help with module name lookups,
* which act like paths, but can be remapped. But the end result,
* all paths that use this function should look normalized.
* NOTE: this method MODIFIES the input array.
* @param {Array} ary the array of path segments.
*/
function trimDots(ary) {
var i, part;
for (i = 0; ary[i]; i += 1) {
part = ary[i];
if (part === '.') {
ary.splice(i, 1);
i -= 1;
} else if (part === '..') {
if (i === 1 && (ary[2] === '..' || ary[0] === '..')) {
//End of the line. Keep at least one non-dot
//path segment at the front so it can be mapped
//correctly to disk. Otherwise, there is likely
//no path mapping for a path starting with '..'.
//This can still fail, but catches the most reasonable
//uses of ..
break;
} else if (i > 0) {
ary.splice(i - 1, 2);
i -= 2;
}
}
}
}
/**
* Given a relative module name, like ./something, normalize it to
* a real name that can be mapped to a path.
* @param {String} name the relative name
* @param {String} baseName a real name that the name arg is relative
* to.
* @param {Boolean} applyMap apply the map config to the value. Should
* only be done if this normalization is for a dependency ID.
* @returns {String} normalized name
*/
function normalize(name, baseName, applyMap) {
var pkgName, pkgConfig, mapValue, nameParts, i, j, nameSegment,
foundMap, foundI, foundStarMap, starI,
baseParts = baseName && baseName.split('/'),
normalizedBaseParts = baseParts,
map = config.map,
starMap = map && map['*'];
//Adjust any relative paths.
if (name && name.charAt(0) === '.') {
//If have a base name, try to normalize against it,
//otherwise, assume it is a top-level require that will
//be relative to baseUrl in the end.
if (baseName) {
if (getOwn(config.pkgs, baseName)) {
//If the baseName is a package name, then just treat it as one
//name to concat the name with.
normalizedBaseParts = baseParts = [baseName];
} else {
//Convert baseName to array, and lop off the last part,
//so that . matches that 'directory' and not name of the baseName's
//module. For instance, baseName of 'one/two/three', maps to
//'one/two/three.js', but we want the directory, 'one/two' for
//this normalization.
normalizedBaseParts = baseParts.slice(0, baseParts.length - 1);
}
name = normalizedBaseParts.concat(name.split('/'));
trimDots(name);
//Some use of packages may use a . path to reference the
//'main' module name, so normalize for that.
pkgConfig = getOwn(config.pkgs, (pkgName = name[0]));
name = name.join('/');
if (pkgConfig && name === pkgName + '/' + pkgConfig.main) {
name = pkgName;
}
} else if (name.indexOf('./') === 0) {
// No baseName, so this is ID is resolved relative
// to baseUrl, pull off the leading dot.
name = name.substring(2);
}
}
//Apply map config if available.
if (applyMap && map && (baseParts || starMap)) {
nameParts = name.split('/');
for (i = nameParts.length; i > 0; i -= 1) {
nameSegment = nameParts.slice(0, i).join('/');
if (baseParts) {
//Find the longest baseName segment match in the config.
//So, do joins on the biggest to smallest lengths of baseParts.
for (j = baseParts.length; j > 0; j -= 1) {
mapValue = getOwn(map, baseParts.slice(0, j).join('/'));
//baseName segment has config, find if it has one for
//this name.
if (mapValue) {
mapValue = getOwn(mapValue, nameSegment);
if (mapValue) {
//Match, update name to the new value.
foundMap = mapValue;
foundI = i;
break;
}
}
}
}
if (foundMap) {
break;
}
//Check for a star map match, but just hold on to it,
//if there is a shorter segment match later in a matching
//config, then favor over this star map.
if (!foundStarMap && starMap && getOwn(starMap, nameSegment)) {
foundStarMap = getOwn(starMap, nameSegment);
starI = i;
}
}
if (!foundMap && foundStarMap) {
foundMap = foundStarMap;
foundI = starI;
}
if (foundMap) {
nameParts.splice(0, foundI, foundMap);
name = nameParts.join('/');
}
}
return name;
}
function removeScript(name) {
if (isBrowser) {
each(scripts(), function (scriptNode) {
if (scriptNode.getAttribute('data-requiremodule') === name &&
scriptNode.getAttribute('data-requirecontext') === context.contextName) {
scriptNode.parentNode.removeChild(scriptNode);
return true;
}
});
}
}
function hasPathFallback(id) {
var pathConfig = getOwn(config.paths, id);
if (pathConfig && isArray(pathConfig) && pathConfig.length > 1) {
removeScript(id);
//Pop off the first array value, since it failed, and
//retry
pathConfig.shift();
context.require.undef(id);
context.require([id]);
return true;
}
}
//Turns a plugin!resource to [plugin, resource]
//with the plugin being undefined if the name
//did not have a plugin prefix.
function splitPrefix(name) {
var prefix,
index = name ? name.indexOf('!') : -1;
if (index > -1) {
prefix = name.substring(0, index);
name = name.substring(index + 1, name.length);
}
return [prefix, name];
}
/**
* Creates a module mapping that includes plugin prefix, module
* name, and path. If parentModuleMap is provided it will
* also normalize the name via require.normalize()
*
* @param {String} name the module name
* @param {String} [parentModuleMap] parent module map
* for the module name, used to resolve relative names.
* @param {Boolean} isNormalized: is the ID already normalized.
* This is true if this call is done for a define() module ID.
* @param {Boolean} applyMap: apply the map config to the ID.
* Should only be true if this map is for a dependency.
*
* @returns {Object}
*/
function makeModuleMap(name, parentModuleMap, isNormalized, applyMap) {
var url, pluginModule, suffix, nameParts,
prefix = null,
parentName = parentModuleMap ? parentModuleMap.name : null,
originalName = name,
isDefine = true,
normalizedName = '';
//If no name, then it means it is a require call, generate an
//internal name.
if (!name) {
isDefine = false;
name = '_@r' + (requireCounter += 1);
}
nameParts = splitPrefix(name);
prefix = nameParts[0];
name = nameParts[1];
if (prefix) {
prefix = normalize(prefix, parentName, applyMap);
pluginModule = getOwn(defined, prefix);
}
//Account for relative paths if there is a base name.
if (name) {
if (prefix) {
if (pluginModule && pluginModule.normalize) {
//Plugin is loaded, use its normalize method.
normalizedName = pluginModule.normalize(name, function (name) {
return normalize(name, parentName, applyMap);
});
} else {
normalizedName = normalize(name, parentName, applyMap);
}
} else {
//A regular module.
normalizedName = normalize(name, parentName, applyMap);
//Normalized name may be a plugin ID due to map config
//application in normalize. The map config values must
//already be normalized, so do not need to redo that part.
nameParts = splitPrefix(normalizedName);
prefix = nameParts[0];
normalizedName = nameParts[1];
isNormalized = true;
url = context.nameToUrl(normalizedName);
}
}
//If the id is a plugin id that cannot be determined if it needs
//normalization, stamp it with a unique ID so two matching relative
//ids that may conflict can be separate.
suffix = prefix && !pluginModule && !isNormalized ?
'_unnormalized' + (unnormalizedCounter += 1) :
'';
return {
prefix: prefix,
name: normalizedName,
parentMap: parentModuleMap,
unnormalized: !!suffix,
url: url,
originalName: originalName,
isDefine: isDefine,
id: (prefix ?
prefix + '!' + normalizedName :
normalizedName) + suffix
};
}
function getModule(depMap) {
var id = depMap.id,
mod = getOwn(registry, id);
if (!mod) {
mod = registry[id] = new context.Module(depMap);
}
return mod;
}
function on(depMap, name, fn) {
var id = depMap.id,
mod = getOwn(registry, id);
if (hasProp(defined, id) &&
(!mod || mod.defineEmitComplete)) {
if (name === 'defined') {
fn(defined[id]);
}
} else {
getModule(depMap).on(name, fn);
}
}
function onError(err, errback) {
var ids = err.requireModules,
notified = false;
if (errback) {
errback(err);
} else {
each(ids, function (id) {
var mod = getOwn(registry, id);
if (mod) {
//Set error on module, so it skips timeout checks.
mod.error = err;
if (mod.events.error) {
notified = true;
mod.emit('error', err);
}
}
});
if (!notified) {
req.onError(err);
}
}
}
/**
* Internal method to transfer globalQueue items to this context's
* defQueue.
*/
function takeGlobalQueue() {
//Push all the globalDefQueue items into the context's defQueue
if (globalDefQueue.length) {
//Array splice in the values since the context code has a
//local var ref to defQueue, so cannot just reassign the one
//on context.
apsp.apply(defQueue,
[defQueue.length - 1, 0].concat(globalDefQueue));
globalDefQueue = [];
}
}
handlers = {
'require': function (mod) {
if (mod.require) {
return mod.require;
} else {
return (mod.require = context.makeRequire(mod.map));
}
},
'exports': function (mod) {
mod.usingExports = true;
if (mod.map.isDefine) {
if (mod.exports) {
return mod.exports;
} else {
return (mod.exports = defined[mod.map.id] = {});
}
}
},
'module': function (mod) {
if (mod.module) {
return mod.module;
} else {
return (mod.module = {
id: mod.map.id,
uri: mod.map.url,
config: function () {
return (config.config && getOwn(config.config, mod.map.id)) || {};
},
exports: defined[mod.map.id]
});
}
}
};
function cleanRegistry(id) {
//Clean up machinery used for waiting modules.
delete registry[id];
delete enabledRegistry[id];
}
function breakCycle(mod, traced, processed) {
var id = mod.map.id;
if (mod.error) {
mod.emit('error', mod.error);
} else {
traced[id] = true;
each(mod.depMaps, function (depMap, i) {
var depId = depMap.id,
dep = getOwn(registry, depId);
//Only force things that have not completed
//being defined, so still in the registry,
//and only if it has not been matched up
//in the module already.
if (dep && !mod.depMatched[i] && !processed[depId]) {
if (getOwn(traced, depId)) {
mod.defineDep(i, defined[depId]);
mod.check(); //pass false?
} else {
breakCycle(dep, traced, processed);
}
}
});
processed[id] = true;
}
}
function checkLoaded() {
var map, modId, err, usingPathFallback,
waitInterval = config.waitSeconds * 1000,
//It is possible to disable the wait interval by using waitSeconds of 0.
expired = waitInterval && (context.startTime + waitInterval) < new Date().getTime(),
noLoads = [],
reqCalls = [],
stillLoading = false,
needCycleCheck = true;
//Do not bother if this call was a result of a cycle break.
if (inCheckLoaded) {
return;
}
inCheckLoaded = true;
//Figure out the state of all the modules.
eachProp(enabledRegistry, function (mod) {
map = mod.map;
modId = map.id;
//Skip things that are not enabled or in error state.
if (!mod.enabled) {
return;
}
if (!map.isDefine) {
reqCalls.push(mod);
}
if (!mod.error) {
//If the module should be executed, and it has not
//been inited and time is up, remember it.
if (!mod.inited && expired) {
if (hasPathFallback(modId)) {
usingPathFallback = true;
stillLoading = true;
} else {
noLoads.push(modId);
removeScript(modId);
}
} else if (!mod.inited && mod.fetched && map.isDefine) {
stillLoading = true;
if (!map.prefix) {
//No reason to keep looking for unfinished
//loading. If the only stillLoading is a
//plugin resource though, keep going,
//because it may be that a plugin resource
//is waiting on a non-plugin cycle.
return (needCycleCheck = false);
}
}
}
});
if (expired && noLoads.length) {
//If wait time expired, throw error of unloaded modules.
err = makeError('timeout', 'Load timeout for modules: ' + noLoads, null, noLoads);
err.contextName = context.contextName;
return onError(err);
}
//Not expired, check for a cycle.
if (needCycleCheck) {
each(reqCalls, function (mod) {
breakCycle(mod, {}, {});
});
}
//If still waiting on loads, and the waiting load is something
//other than a plugin resource, or there are still outstanding
//scripts, then just try back later.
if ((!expired || usingPathFallback) && stillLoading) {
//Something is still waiting to load. Wait for it, but only
//if a timeout is not already in effect.
if ((isBrowser || isWebWorker) && !checkLoadedTimeoutId) {
checkLoadedTimeoutId = setTimeout(function () {
checkLoadedTimeoutId = 0;
checkLoaded();
}, 50);
}
}
inCheckLoaded = false;
}
Module = function (map) {
this.events = getOwn(undefEvents, map.id) || {};
this.map = map;
this.shim = getOwn(config.shim, map.id);
this.depExports = [];
this.depMaps = [];
this.depMatched = [];
this.pluginMaps = {};
this.depCount = 0;
/* this.exports this.factory
this.depMaps = [],
this.enabled, this.fetched
*/
};
Module.prototype = {
init: function (depMaps, factory, errback, options) {
options = options || {};
//Do not do more inits if already done. Can happen if there
//are multiple define calls for the same module. That is not
//a normal, common case, but it is also not unexpected.
if (this.inited) {
return;
}
this.factory = factory;
if (errback) {
//Register for errors on this module.
this.on('error', errback);
} else if (this.events.error) {
//If no errback already, but there are error listeners
//on this module, set up an errback to pass to the deps.
errback = bind(this, function (err) {
this.emit('error', err);
});
}
//Do a copy of the dependency array, so that
//source inputs are not modified. For example
//"shim" deps are passed in here directly, and
//doing a direct modification of the depMaps array
//would affect that config.
this.depMaps = depMaps && depMaps.slice(0);
this.errback = errback;
//Indicate this module has be initialized
this.inited = true;
this.ignore = options.ignore;
//Could have option to init this module in enabled mode,
//or could have been previously marked as enabled. However,
//the dependencies are not known until init is called. So
//if enabled previously, now trigger dependencies as enabled.
if (options.enabled || this.enabled) {
//Enable this module and dependencies.
//Will call this.check()
this.enable();
} else {
this.check();
}
},
defineDep: function (i, depExports) {
//Because of cycles, defined callback for a given
//export can be called more than once.
if (!this.depMatched[i]) {
this.depMatched[i] = true;
this.depCount -= 1;
this.depExports[i] = depExports;
}
},
fetch: function () {
if (this.fetched) {
return;
}
this.fetched = true;
context.startTime = (new Date()).getTime();
var map = this.map;
//If the manager is for a plugin managed resource,
//ask the plugin to load it now.
if (this.shim) {
context.makeRequire(this.map, {
enableBuildCallback: true
})(this.shim.deps || [], bind(this, function () {
return map.prefix ? this.callPlugin() : this.load();
}));
} else {
//Regular dependency.
return map.prefix ? this.callPlugin() : this.load();
}
},
load: function () {
var url = this.map.url;
//Regular dependency.
if (!urlFetched[url]) {
urlFetched[url] = true;
context.load(this.map.id, url);
}
},
/**
* Checks if the module is ready to define itself, and if so,
* define it.
*/
check: function () {
if (!this.enabled || this.enabling) {
return;
}
var err, cjsModule,
id = this.map.id,
depExports = this.depExports,
exports = this.exports,
factory = this.factory;
if (!this.inited) {
this.fetch();
} else if (this.error) {
this.emit('error', this.error);
} else if (!this.defining) {
//The factory could trigger another require call
//that would result in checking this module to
//define itself again. If already in the process
//of doing that, skip this work.
this.defining = true;
if (this.depCount < 1 && !this.defined) {
if (isFunction(factory)) {
//If there is an error listener, favor passing
//to that instead of throwing an error.
if (this.events.error) {
try {
exports = context.execCb(id, factory, depExports, exports);
} catch (e) {
err = e;
}
} else {
exports = context.execCb(id, factory, depExports, exports);
}
if (this.map.isDefine) {
//If setting exports via 'module' is in play,
//favor that over return value and exports. After that,
//favor a non-undefined return value over exports use.
cjsModule = this.module;
if (cjsModule &&
cjsModule.exports !== undefined &&
//Make sure it is not already the exports value
cjsModule.exports !== this.exports) {
exports = cjsModule.exports;
} else if (exports === undefined && this.usingExports) {
//exports already set the defined value.
exports = this.exports;
}
}
if (err) {
err.requireMap = this.map;
err.requireModules = [this.map.id];
err.requireType = 'define';
return onError((this.error = err));
}
} else {
//Just a literal value
exports = factory;
}
this.exports = exports;
if (this.map.isDefine && !this.ignore) {
defined[id] = exports;
if (req.onResourceLoad) {
req.onResourceLoad(context, this.map, this.depMaps);
}
}
//Clean up
cleanRegistry(id);
this.defined = true;
}
//Finished the define stage. Allow calling check again
//to allow define notifications below in the case of a
//cycle.
this.defining = false;
if (this.defined && !this.defineEmitted) {
this.defineEmitted = true;
this.emit('defined', this.exports);
this.defineEmitComplete = true;
}
}
},
callPlugin: function () {
var map = this.map,
id = map.id,
//Map already normalized the prefix.
pluginMap = makeModuleMap(map.prefix);
//Mark this as a dependency for this plugin, so it
//can be traced for cycles.
this.depMaps.push(pluginMap);
on(pluginMap, 'defined', bind(this, function (plugin) {
var load, normalizedMap, normalizedMod,
name = this.map.name,
parentName = this.map.parentMap ? this.map.parentMap.name : null,
localRequire = context.makeRequire(map.parentMap, {
enableBuildCallback: true
});
//If current map is not normalized, wait for that
//normalized name to load instead of continuing.
if (this.map.unnormalized) {
//Normalize the ID if the plugin allows it.
if (plugin.normalize) {
name = plugin.normalize(name, function (name) {
return normalize(name, parentName, true);
}) || '';
}
//prefix and name should already be normalized, no need
//for applying map config again either.
normalizedMap = makeModuleMap(map.prefix + '!' + name,
this.map.parentMap);
on(normalizedMap,
'defined', bind(this, function (value) {
this.init([], function () { return value; }, null, {
enabled: true,
ignore: true
});
}));
normalizedMod = getOwn(registry, normalizedMap.id);
if (normalizedMod) {
//Mark this as a dependency for this plugin, so it
//can be traced for cycles.
this.depMaps.push(normalizedMap);
if (this.events.error) {
normalizedMod.on('error', bind(this, function (err) {
this.emit('error', err);
}));
}
normalizedMod.enable();
}
return;
}
load = bind(this, function (value) {
this.init([], function () { return value; }, null, {
enabled: true
});
});
load.error = bind(this, function (err) {
this.inited = true;
this.error = err;
err.requireModules = [id];
//Remove temp unnormalized modules for this module,
//since they will never be resolved otherwise now.
eachProp(registry, function (mod) {
if (mod.map.id.indexOf(id + '_unnormalized') === 0) {
cleanRegistry(mod.map.id);
}
});
onError(err);
});
//Allow plugins to load other code without having to know the
//context or how to 'complete' the load.
load.fromText = bind(this, function (text, textAlt) {
/*jslint evil: true */
var moduleName = map.name,
moduleMap = makeModuleMap(moduleName),
hasInteractive = useInteractive;
//As of 2.1.0, support just passing the text, to reinforce
//fromText only being called once per resource. Still
//support old style of passing moduleName but discard
//that moduleName in favor of the internal ref.
if (textAlt) {
text = textAlt;
}
//Turn off interactive script matching for IE for any define
//calls in the text, then turn it back on at the end.
if (hasInteractive) {
useInteractive = false;
}
//Prime the system by creating a module instance for
//it.
getModule(moduleMap);
//Transfer any config to this other module.
if (hasProp(config.config, id)) {
config.config[moduleName] = config.config[id];
}
try {
req.exec(text);
} catch (e) {
return onError(makeError('fromtexteval',
'fromText eval for ' + id +
' failed: ' + e,
e,
[id]));
}
if (hasInteractive) {
useInteractive = true;
}
//Mark this as a dependency for the plugin
//resource
this.depMaps.push(moduleMap);
//Support anonymous modules.
context.completeLoad(moduleName);
//Bind the value of that module to the value for this
//resource ID.
localRequire([moduleName], load);
});
//Use parentName here since the plugin's name is not reliable,
//could be some weird string with no path that actually wants to
//reference the parentName's path.
plugin.load(map.name, localRequire, load, config);
}));
context.enable(pluginMap, this);
this.pluginMaps[pluginMap.id] = pluginMap;
},
enable: function () {
enabledRegistry[this.map.id] = this;
this.enabled = true;
//Set flag mentioning that the module is enabling,
//so that immediate calls to the defined callbacks
//for dependencies do not trigger inadvertent load
//with the depCount still being zero.
this.enabling = true;
//Enable each dependency
each(this.depMaps, bind(this, function (depMap, i) {
var id, mod, handler;
if (typeof depMap === 'string') {
//Dependency needs to be converted to a depMap
//and wired up to this module.
depMap = makeModuleMap(depMap,
(this.map.isDefine ? this.map : this.map.parentMap),
false,
!this.skipMap);
this.depMaps[i] = depMap;
handler = getOwn(handlers, depMap.id);
if (handler) {
this.depExports[i] = handler(this);
return;
}
this.depCount += 1;
on(depMap, 'defined', bind(this, function (depExports) {
this.defineDep(i, depExports);
this.check();
}));
if (this.errback) {
on(depMap, 'error', this.errback);
}
}
id = depMap.id;
mod = registry[id];
//Skip special modules like 'require', 'exports', 'module'
//Also, don't call enable if it is already enabled,
//important in circular dependency cases.
if (!hasProp(handlers, id) && mod && !mod.enabled) {
context.enable(depMap, this);
}
}));
//Enable each plugin that is used in
//a dependency
eachProp(this.pluginMaps, bind(this, function (pluginMap) {
var mod = getOwn(registry, pluginMap.id);
if (mod && !mod.enabled) {
context.enable(pluginMap, this);
}
}));
this.enabling = false;
this.check();
},
on: function (name, cb) {
var cbs = this.events[name];
if (!cbs) {
cbs = this.events[name] = [];
}
cbs.push(cb);
},
emit: function (name, evt) {
each(this.events[name], function (cb) {
cb(evt);
});
if (name === 'error') {
//Now that the error handler was triggered, remove
//the listeners, since this broken Module instance
//can stay around for a while in the registry.
delete this.events[name];
}
}
};
function callGetModule(args) {
//Skip modules already defined.
if (!hasProp(defined, args[0])) {
getModule(makeModuleMap(args[0], null, true)).init(args[1], args[2]);
}
}
function removeListener(node, func, name, ieName) {
//Favor detachEvent because of IE9
//issue, see attachEvent/addEventListener comment elsewhere
//in this file.
if (node.detachEvent && !isOpera) {
//Probably IE. If not it will throw an error, which will be
//useful to know.
if (ieName) {
node.detachEvent(ieName, func);
}
} else {
node.removeEventListener(name, func, false);
}
}
/**
* Given an event from a script node, get the requirejs info from it,
* and then removes the event listeners on the node.
* @param {Event} evt
* @returns {Object}
*/
function getScriptData(evt) {
//Using currentTarget instead of target for Firefox 2.0's sake. Not
//all old browsers will be supported, but this one was easy enough
//to support and still makes sense.
var node = evt.currentTarget || evt.srcElement;
//Remove the listeners once here.
removeListener(node, context.onScriptLoad, 'load', 'onreadystatechange');
removeListener(node, context.onScriptError, 'error');
return {
node: node,
id: node && node.getAttribute('data-requiremodule')
};
}
function intakeDefines() {
var args;
//Any defined modules in the global queue, intake them now.
takeGlobalQueue();
//Make sure any remaining defQueue items get properly processed.
while (defQueue.length) {
args = defQueue.shift();
if (args[0] === null) {
return onError(makeError('mismatch', 'Mismatched anonymous define() module: ' + args[args.length - 1]));
} else {
//args are id, deps, factory. Should be normalized by the
//define() function.
callGetModule(args);
}
}
}
context = {
config: config,
contextName: contextName,
registry: registry,
defined: defined,
urlFetched: urlFetched,
defQueue: defQueue,
Module: Module,
makeModuleMap: makeModuleMap,
nextTick: req.nextTick,
onError: onError,
/**
* Set a configuration for the context.
* @param {Object} cfg config object to integrate.
*/
configure: function (cfg) {
//Make sure the baseUrl ends in a slash.
if (cfg.baseUrl) {
if (cfg.baseUrl.charAt(cfg.baseUrl.length - 1) !== '/') {
cfg.baseUrl += '/';
}
}
//Save off the paths and packages since they require special processing,
//they are additive.
var pkgs = config.pkgs,
shim = config.shim,
objs = {
paths: true,
config: true,
map: true
};
eachProp(cfg, function (value, prop) {
if (objs[prop]) {
if (prop === 'map') {
if (!config.map) {
config.map = {};
}
mixin(config[prop], value, true, true);
} else {
mixin(config[prop], value, true);
}
} else {
config[prop] = value;
}
});
//Merge shim
if (cfg.shim) {
eachProp(cfg.shim, function (value, id) {
//Normalize the structure
if (isArray(value)) {
value = {
deps: value
};
}
if ((value.exports || value.init) && !value.exportsFn) {
value.exportsFn = context.makeShimExports(value);
}
shim[id] = value;
});
config.shim = shim;
}
//Adjust packages if necessary.
if (cfg.packages) {
each(cfg.packages, function (pkgObj) {
var location;
pkgObj = typeof pkgObj === 'string' ? { name: pkgObj } : pkgObj;
location = pkgObj.location;
//Create a brand new object on pkgs, since currentPackages can
//be passed in again, and config.pkgs is the internal transformed
//state for all package configs.
pkgs[pkgObj.name] = {
name: pkgObj.name,
location: location || pkgObj.name,
//Remove leading dot in main, so main paths are normalized,
//and remove any trailing .js, since different package
//envs have different conventions: some use a module name,
//some use a file name.
main: (pkgObj.main || 'main')
.replace(currDirRegExp, '')
.replace(jsSuffixRegExp, '')
};
});
//Done with modifications, assing packages back to context config
config.pkgs = pkgs;
}
//If there are any "waiting to execute" modules in the registry,
//update the maps for them, since their info, like URLs to load,
//may have changed.
eachProp(registry, function (mod, id) {
//If module already has init called, since it is too
//late to modify them, and ignore unnormalized ones
//since they are transient.
if (!mod.inited && !mod.map.unnormalized) {
mod.map = makeModuleMap(id);
}
});
//If a deps array or a config callback is specified, then call
//require with those args. This is useful when require is defined as a
//config object before require.js is loaded.
if (cfg.deps || cfg.callback) {
context.require(cfg.deps || [], cfg.callback);
}
},
makeShimExports: function (value) {
function fn() {
var ret;
if (value.init) {
ret = value.init.apply(global, arguments);
}
return ret || (value.exports && getGlobal(value.exports));
}
return fn;
},
makeRequire: function (relMap, options) {
options = options || {};
function localRequire(deps, callback, errback) {
var id, map, requireMod;
if (options.enableBuildCallback && callback && isFunction(callback)) {
callback.__requireJsBuild = true;
}
if (typeof deps === 'string') {
if (isFunction(callback)) {
//Invalid call
return onError(makeError('requireargs', 'Invalid require call'), errback);
}
//If require|exports|module are requested, get the
//value for them from the special handlers. Caveat:
//this only works while module is being defined.
if (relMap && hasProp(handlers, deps)) {
return handlers[deps](registry[relMap.id]);
}
//Synchronous access to one module. If require.get is
//available (as in the Node adapter), prefer that.
if (req.get) {
return req.get(context, deps, relMap, localRequire);
}
//Normalize module name, if it contains . or ..
map = makeModuleMap(deps, relMap, false, true);
id = map.id;
if (!hasProp(defined, id)) {
return onError(makeError('notloaded', 'Module name "' +
id +
'" has not been loaded yet for context: ' +
contextName +
(relMap ? '' : '. Use require([])')));
}
return defined[id];
}
//Grab defines waiting in the global queue.
intakeDefines();
//Mark all the dependencies as needing to be loaded.
context.nextTick(function () {
//Some defines could have been added since the
//require call, collect them.
intakeDefines();
requireMod = getModule(makeModuleMap(null, relMap));
//Store if map config should be applied to this require
//call for dependencies.
requireMod.skipMap = options.skipMap;
requireMod.init(deps, callback, errback, {
enabled: true
});
checkLoaded();
});
return localRequire;
}
mixin(localRequire, {
isBrowser: isBrowser,
/**
* Converts a module name + .extension into an URL path.
* *Requires* the use of a module name. It does not support using
* plain URLs like nameToUrl.
*/
toUrl: function (moduleNamePlusExt) {
var ext,
index = moduleNamePlusExt.lastIndexOf('.'),
segment = moduleNamePlusExt.split('/')[0],
isRelative = segment === '.' || segment === '..';
//Have a file extension alias, and it is not the
//dots from a relative path.
if (index !== -1 && (!isRelative || index > 1)) {
ext = moduleNamePlusExt.substring(index, moduleNamePlusExt.length);
moduleNamePlusExt = moduleNamePlusExt.substring(0, index);
}
return context.nameToUrl(normalize(moduleNamePlusExt,
relMap && relMap.id, true), ext, true);
},
defined: function (id) {
return hasProp(defined, makeModuleMap(id, relMap, false, true).id);
},
specified: function (id) {
id = makeModuleMap(id, relMap, false, true).id;
return hasProp(defined, id) || hasProp(registry, id);
}
});
//Only allow undef on top level require calls
if (!relMap) {
localRequire.undef = function (id) {
//Bind any waiting define() calls to this context,
//fix for #408
takeGlobalQueue();
var map = makeModuleMap(id, relMap, true),
mod = getOwn(registry, id);
delete defined[id];
delete urlFetched[map.url];
delete undefEvents[id];
if (mod) {
//Hold on to listeners in case the
//module will be attempted to be reloaded
//using a different config.
if (mod.events.defined) {
undefEvents[id] = mod.events;
}
cleanRegistry(id);
}
};
}
return localRequire;
},
/**
* Called to enable a module if it is still in the registry
* awaiting enablement. A second arg, parent, the parent module,
* is passed in for context, when this method is overriden by
* the optimizer. Not shown here to keep code compact.
*/
enable: function (depMap) {
var mod = getOwn(registry, depMap.id);
if (mod) {
getModule(depMap).enable();
}
},
/**
* Internal method used by environment adapters to complete a load event.
* A load event could be a script load or just a load pass from a synchronous
* load call.
* @param {String} moduleName the name of the module to potentially complete.
*/
completeLoad: function (moduleName) {
var found, args, mod,
shim = getOwn(config.shim, moduleName) || {},
shExports = shim.exports;
takeGlobalQueue();
while (defQueue.length) {
args = defQueue.shift();
if (args[0] === null) {
args[0] = moduleName;
//If already found an anonymous module and bound it
//to this name, then this is some other anon module
//waiting for its completeLoad to fire.
if (found) {
break;
}
found = true;
} else if (args[0] === moduleName) {
//Found matching define call for this script!
found = true;
}
callGetModule(args);
}
//Do this after the cycle of callGetModule in case the result
//of those calls/init calls changes the registry.
mod = getOwn(registry, moduleName);
if (!found && !hasProp(defined, moduleName) && mod && !mod.inited) {
if (config.enforceDefine && (!shExports || !getGlobal(shExports))) {
if (hasPathFallback(moduleName)) {
return;
} else {
return onError(makeError('nodefine',
'No define call for ' + moduleName,
null,
[moduleName]));
}
} else {
//A script that does not call define(), so just simulate
//the call for it.
callGetModule([moduleName, (shim.deps || []), shim.exportsFn]);
}
}
checkLoaded();
},
/**
* Converts a module name to a file path. Supports cases where
* moduleName may actually be just an URL.
* Note that it **does not** call normalize on the moduleName,
* it is assumed to have already been normalized. This is an
* internal API, not a public one. Use toUrl for the public API.
*/
nameToUrl: function (moduleName, ext, skipExt) {
var paths, pkgs, pkg, pkgPath, syms, i, parentModule, url,
parentPath;
//If a colon is in the URL, it indicates a protocol is used and it is just
//an URL to a file, or if it starts with a slash, contains a query arg (i.e. ?)
//or ends with .js, then assume the user meant to use an url and not a module id.
//The slash is important for protocol-less URLs as well as full paths.
if (req.jsExtRegExp.test(moduleName)) {
//Just a plain path, not module name lookup, so just return it.
//Add extension if it is included. This is a bit wonky, only non-.js things pass
//an extension, this method probably needs to be reworked.
url = moduleName + (ext || '');
} else {
//A module that needs to be converted to a path.
paths = config.paths;
pkgs = config.pkgs;
syms = moduleName.split('/');
//For each module name segment, see if there is a path
//registered for it. Start with most specific name
//and work up from it.
for (i = syms.length; i > 0; i -= 1) {
parentModule = syms.slice(0, i).join('/');
pkg = getOwn(pkgs, parentModule);
parentPath = getOwn(paths, parentModule);
if (parentPath) {
//If an array, it means there are a few choices,
//Choose the one that is desired
if (isArray(parentPath)) {
parentPath = parentPath[0];
}
syms.splice(0, i, parentPath);
break;
} else if (pkg) {
//If module name is just the package name, then looking
//for the main module.
if (moduleName === pkg.name) {
pkgPath = pkg.location + '/' + pkg.main;
} else {
pkgPath = pkg.location;
}
syms.splice(0, i, pkgPath);
break;
}
}
//Join the path parts together, then figure out if baseUrl is needed.
url = syms.join('/');
url += (ext || (/\?/.test(url) || skipExt ? '' : '.js'));
url = (url.charAt(0) === '/' || url.match(/^[\w\+\.\-]+:/) ? '' : config.baseUrl) + url;
}
return config.urlArgs ? url +
((url.indexOf('?') === -1 ? '?' : '&') +
config.urlArgs) : url;
},
//Delegates to req.load. Broken out as a separate function to
//allow overriding in the optimizer.
load: function (id, url) {
req.load(context, id, url);
},
/**
* Executes a module callack function. Broken out as a separate function
* solely to allow the build system to sequence the files in the built
* layer in the right sequence.
*
* @private
*/
execCb: function (name, callback, args, exports) {
return callback.apply(exports, args);
},
/**
* callback for script loads, used to check status of loading.
*
* @param {Event} evt the event from the browser for the script
* that was loaded.
*/
onScriptLoad: function (evt) {
//Using currentTarget instead of target for Firefox 2.0's sake. Not
//all old browsers will be supported, but this one was easy enough
//to support and still makes sense.
if (evt.type === 'load' ||
(readyRegExp.test((evt.currentTarget || evt.srcElement).readyState))) {
//Reset interactive script so a script node is not held onto for
//to long.
interactiveScript = null;
//Pull out the name of the module and the context.
var data = getScriptData(evt);
context.completeLoad(data.id);
}
},
/**
* Callback for script errors.
*/
onScriptError: function (evt) {
var data = getScriptData(evt);
if (!hasPathFallback(data.id)) {
return onError(makeError('scripterror', 'Script error', evt, [data.id]));
}
}
};
context.require = context.makeRequire();
return context;
}
/**
* Main entry point.
*
* If the only argument to require is a string, then the module that
* is represented by that string is fetched for the appropriate context.
*
* If the first argument is an array, then it will be treated as an array
* of dependency string names to fetch. An optional function callback can
* be specified to execute when all of those dependencies are available.
*
* Make a local req variable to help Caja compliance (it assumes things
* on a require that are not standardized), and to give a short
* name for minification/local scope use.
*/
req = requirejs = function (deps, callback, errback, optional) {
//Find the right context, use default
var context, config,
contextName = defContextName;
// Determine if have config object in the call.
if (!isArray(deps) && typeof deps !== 'string') {
// deps is a config object
config = deps;
if (isArray(callback)) {
// Adjust args if there are dependencies
deps = callback;
callback = errback;
errback = optional;
} else {
deps = [];
}
}
if (config && config.context) {
contextName = config.context;
}
context = getOwn(contexts, contextName);
if (!context) {
context = contexts[contextName] = req.s.newContext(contextName);
}
if (config) {
context.configure(config);
}
return context.require(deps, callback, errback);
};
/**
* Support require.config() to make it easier to cooperate with other
* AMD loaders on globally agreed names.
*/
req.config = function (config) {
return req(config);
};
/**
* Execute something after the current tick
* of the event loop. Override for other envs
* that have a better solution than setTimeout.
* @param {Function} fn function to execute later.
*/
req.nextTick = typeof setTimeout !== 'undefined' ? function (fn) {
setTimeout(fn, 4);
} : function (fn) { fn(); };
/**
* Export require as a global, but only if it does not already exist.
*/
if (!require) {
require = req;
}
req.version = version;
//Used to filter out dependencies that are already paths.
req.jsExtRegExp = /^\/|:|\?|\.js$/;
req.isBrowser = isBrowser;
s = req.s = {
contexts: contexts,
newContext: newContext
};
//Create default context.
req({});
//Exports some context-sensitive methods on global require.
each([
'toUrl',
'undef',
'defined',
'specified'
], function (prop) {
//Reference from contexts instead of early binding to default context,
//so that during builds, the latest instance of the default context
//with its config gets used.
req[prop] = function () {
var ctx = contexts[defContextName];
return ctx.require[prop].apply(ctx, arguments);
};
});
if (isBrowser) {
head = s.head = document.getElementsByTagName('head')[0];
//If BASE tag is in play, using appendChild is a problem for IE6.
//When that browser dies, this can be removed. Details in this jQuery bug:
//http://dev.jquery.com/ticket/2709
baseElement = document.getElementsByTagName('base')[0];
if (baseElement) {
head = s.head = baseElement.parentNode;
}
}
/**
* Any errors that require explicitly generates will be passed to this
* function. Intercept/override it if you want custom error handling.
* @param {Error} err the error object.
*/
req.onError = function (err) {
throw err;
};
/**
* Does the request to load a module for the browser case.
* Make this a separate function to allow other environments
* to override it.
*
* @param {Object} context the require context to find state.
* @param {String} moduleName the name of the module.
* @param {Object} url the URL to the module.
*/
req.load = function (context, moduleName, url) {
var config = (context && context.config) || {},
node;
if (isBrowser) {
//In the browser so use a script tag
node = config.xhtml ?
document.createElementNS('http://www.w3.org/1999/xhtml', 'html:script') :
document.createElement('script');
node.type = config.scriptType || 'text/javascript';
node.charset = 'utf-8';
node.async = true;
node.setAttribute('data-requirecontext', context.contextName);
node.setAttribute('data-requiremodule', moduleName);
//Set up load listener. Test attachEvent first because IE9 has
//a subtle issue in its addEventListener and script onload firings
//that do not match the behavior of all other browsers with
//addEventListener support, which fire the onload event for a
//script right after the script execution. See:
//https://connect.microsoft.com/IE/feedback/details/648057/script-onload-event-is-not-fired-immediately-after-script-execution
//UNFORTUNATELY Opera implements attachEvent but does not follow the script
//script execution mode.
if (node.attachEvent &&
//Check if node.attachEvent is artificially added by custom script or
//natively supported by browser
//read https://github.com/jrburke/requirejs/issues/187
//if we can NOT find [native code] then it must NOT natively supported.
//in IE8, node.attachEvent does not have toString()
//Note the test for "[native code" with no closing brace, see:
//https://github.com/jrburke/requirejs/issues/273
!(node.attachEvent.toString && node.attachEvent.toString().indexOf('[native code') < 0) &&
!isOpera) {
//Probably IE. IE (at least 6-8) do not fire
//script onload right after executing the script, so
//we cannot tie the anonymous define call to a name.
//However, IE reports the script as being in 'interactive'
//readyState at the time of the define call.
useInteractive = true;
node.attachEvent('onreadystatechange', context.onScriptLoad);
//It would be great to add an error handler here to catch
//404s in IE9+. However, onreadystatechange will fire before
//the error handler, so that does not help. If addEventListener
//is used, then IE will fire error before load, but we cannot
//use that pathway given the connect.microsoft.com issue
//mentioned above about not doing the 'script execute,
//then fire the script load event listener before execute
//next script' that other browsers do.
//Best hope: IE10 fixes the issues,
//and then destroys all installs of IE 6-9.
//node.attachEvent('onerror', context.onScriptError);
} else {
node.addEventListener('load', context.onScriptLoad, false);
node.addEventListener('error', context.onScriptError, false);
}
node.src = url;
//For some cache cases in IE 6-8, the script executes before the end
//of the appendChild execution, so to tie an anonymous define
//call to the module name (which is stored on the node), hold on
//to a reference to this node, but clear after the DOM insertion.
currentlyAddingScript = node;
if (baseElement) {
head.insertBefore(node, baseElement);
} else {
head.appendChild(node);
}
currentlyAddingScript = null;
return node;
} else if (isWebWorker) {
try {
//In a web worker, use importScripts. This is not a very
//efficient use of importScripts, importScripts will block until
//its script is downloaded and evaluated. However, if web workers
//are in play, the expectation that a build has been done so that
//only one script needs to be loaded anyway. This may need to be
//reevaluated if other use cases become common.
importScripts(url);
//Account for anonymous modules
context.completeLoad(moduleName);
} catch (e) {
context.onError(makeError('importscripts',
'importScripts failed for ' +<|fim▁hole|> }
};
function getInteractiveScript() {
if (interactiveScript && interactiveScript.readyState === 'interactive') {
return interactiveScript;
}
eachReverse(scripts(), function (script) {
if (script.readyState === 'interactive') {
return (interactiveScript = script);
}
});
return interactiveScript;
}
//Look for a data-main script attribute, which could also adjust the baseUrl.
if (isBrowser) {
//Figure out baseUrl. Get it from the script tag with require.js in it.
eachReverse(scripts(), function (script) {
//Set the 'head' where we can append children by
//using the script's parent.
if (!head) {
head = script.parentNode;
}
//Look for a data-main attribute to set main script for the page
//to load. If it is there, the path to data main becomes the
//baseUrl, if it is not already set.
dataMain = script.getAttribute('data-main');
if (dataMain) {
//Set final baseUrl if there is not already an explicit one.
if (!cfg.baseUrl) {
//Pull off the directory of data-main for use as the
//baseUrl.
src = dataMain.split('/');
mainScript = src.pop();
subPath = src.length ? src.join('/') + '/' : './';
cfg.baseUrl = subPath;
dataMain = mainScript;
}
//Strip off any trailing .js since dataMain is now
//like a module name.
dataMain = dataMain.replace(jsSuffixRegExp, '');
//Put the data-main script in the files to load.
cfg.deps = cfg.deps ? cfg.deps.concat(dataMain) : [dataMain];
return true;
}
});
}
/**
* The function that handles definitions of modules. Differs from
* require() in that a string for the module should be the first argument,
* and the function to execute after dependencies are loaded should
* return a value to define the module corresponding to the first argument's
* name.
*/
define = function (name, deps, callback) {
var node, context;
//Allow for anonymous modules
if (typeof name !== 'string') {
//Adjust args appropriately
callback = deps;
deps = name;
name = null;
}
//This module may not have dependencies
if (!isArray(deps)) {
callback = deps;
deps = [];
}
//If no name, and callback is a function, then figure out if it a
//CommonJS thing with dependencies.
if (!deps.length && isFunction(callback)) {
//Remove comments from the callback string,
//look for require calls, and pull them into the dependencies,
//but only if there are function args.
if (callback.length) {
callback
.toString()
.replace(commentRegExp, '')
.replace(cjsRequireRegExp, function (match, dep) {
deps.push(dep);
});
//May be a CommonJS thing even without require calls, but still
//could use exports, and module. Avoid doing exports and module
//work though if it just needs require.
//REQUIRES the function to expect the CommonJS variables in the
//order listed below.
deps = (callback.length === 1 ? ['require'] : ['require', 'exports', 'module']).concat(deps);
}
}
//If in IE 6-8 and hit an anonymous define() call, do the interactive
//work.
if (useInteractive) {
node = currentlyAddingScript || getInteractiveScript();
if (node) {
if (!name) {
name = node.getAttribute('data-requiremodule');
}
context = contexts[node.getAttribute('data-requirecontext')];
}
}
//Always save off evaluating the def call until the script onload handler.
//This allows multiple modules to be in a file without prematurely
//tracing dependencies, and allows for anonymous module support,
//where the module name is not known until the script onload event
//occurs. If no context, use the global queue, and get it processed
//in the onscript load callback.
(context ? context.defQueue : globalDefQueue).push([name, deps, callback]);
};
define.amd = {
jQuery: true
};
/**
* Executes the text. Normally just uses eval, but can be modified
* to use a better, environment-specific call. Only used for transpiling
* loader plugins, not for plain JS modules.
* @param {String} text the text to execute/evaluate.
*/
req.exec = function (text) {
/*jslint evil: true */
return eval(text);
};
//Set up with config info.
req(cfg);
}(this));<|fim▁end|> | moduleName + ' at ' + url,
e,
[moduleName]));
} |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>from api import Api<|fim▁hole|>def full_url(resource):
return Api.url_base.format(resource=resource)<|fim▁end|> | |
<|file_name|>review.py<|end_file_name|><|fim▁begin|># This file is part of Indico.
# Copyright (C) 2002 - 2016 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
import tempfile
from copy import copy
from datetime import datetime
import BTrees.OIBTree as OIBTree
from BTrees.IOBTree import IOBTree
from BTrees.OOBTree import OOBTree, intersection, union
from persistent import Persistent
from persistent.list import PersistentList
from pytz import timezone
from indico.core.config import Config
from indico.modules.events.abstracts.legacy import (contribution_from_abstract,
AbstractFieldManagerAdapter,
AbstractJudgmentLegacyMixin,
AbstractLegacyMixin,
AbstractManagerLegacyMixin,
AbstractStatusAcceptedLegacyMixin)
from indico.modules.events.contributions.models.types import ContributionType
from indico.util.string import safe_slice, safe_upper
from MaKaC.common.Counter import Counter
from MaKaC.common.timezoneUtils import nowutc
from MaKaC.errors import MaKaCError, NoReportError
from MaKaC.i18n import _
from MaKaC.trashCan import TrashCanManager
class _AbstractParticipationIndex(Persistent):
"""This class allows to index abstract participations (submitters)
for a single CFA process; this means that clients will be able to
efficiently perform queries of the type "give me all the abstracts
in which a certain registered user is implied".
For being able to perform this indexing, it is supposed that the Avatar
identifier is unique among other avatars and that it cannot change.
This index must be maintained by clients (i.e. the CFAMgr) as it doesn't
keep track of the changes on Participantons.
The key of the index is the Avatar and the values the different
Participations that user has within the current CFA process. For
performance reasons, the Avatar id will be used as index key (using the
whole Avatar object would make the index bigger and as the Avatar id
cannot change it's enough); the clients would have to keep the
integrity of the index.
"""
def __init__(self):
self._idx = OOBTree()
def index(self, participation):
"""Add a new participation to the index
"""
#if the Participation is not linked to an Avatar there's no point to
# index it
a = participation.getAvatar()
if not a:
return
#ToDo: if the Participation corresponds to an abstract which doesn't
# correspond to the current CFAMgr, then an error must be raised
if not self._idx.has_key(a.getId()):
self._idx[a.getId()] = PersistentList()
#if the participation is already in the index, no need for adding it
if participation in self._idx[a.getId()]:
return
self._idx[a.getId()].append(participation)
def unindex(self, participation):
"""Remove an existing participation from the index
"""
#if the Participation is not linked to an Avatar there's no point to
# unindex it
a = participation.getAvatar()
if not a:
return
#if the Avatar associated to the participation isn't in the index do
# nothing
if not self._idx.has_key(a.getId()):
return
#if the given participation is indexed remove it, otherwise do nothing
if participation in self._idx[a.getId()]:
self._idx[a.getId()].remove(participation)
def getParticipationList(self, av):
try:
return self._idx[av.getId()]
except KeyError, e:
return []
class AbstractParticipation(Persistent):
def __init__(self, abstract, **data):
self._abstract = abstract
self._firstName = ""
self._surName = ""
self._email = ""
self._affilliation = ""
self._address = ""
self._telephone = ""
self._fax = ""
self._title = ""
self.setData(**data)
def setFromAvatar(self, av):
data = {"title": av.getTitle(),
"firstName": av.getName(),
"surName": av.getSurName(),
"email": av.getEmail(),
"affiliation": av.getOrganisation(),
"address": av.getAddress(),
"telephone": av.getTelephone(),
"fax": av.getFax()}
self.setData(**data)
def setFromAbstractParticipation(self, part):
data = {"title": part.getTitle(),
"firstName": part.getFirstName(),
"surName": part.getSurName(),
"email": part.getEmail(),
"affiliation": part.getAffiliation(),
"address": part.getAddress(),
"telephone": part.getTelephone(),
"fax": part.getFax()}
self.setData(**data)
def setData(self, **data):
if "firstName" in data:
self.setFirstName(data["firstName"])
if "surName" in data:
self.setSurName(data["surName"])
if "email" in data:
self.setEmail(data["email"])
if "affiliation" in data:
self.setAffiliation(data["affiliation"])
if "address" in data:
self.setAddress(data["address"])
if "telephone" in data:
self.setTelephone(data["telephone"])
if "fax" in data:
self.setFax(data["fax"])
if "title" in data:
self.setTitle(data["title"])
setValues = setData
def getData(self):
data = {}
data["firstName"] = self.getFirstName()
data["surName"] = self.getSurName()
data["email"] = self.getEmail()
data["affiliation"] = self.getAffiliation()
data["address"] = self.getAddress()
data["telephone"] = self.getTelephone()
data["fax"] = self.getFax()
data["title"] = self.getTitle()
return data
getValues = getData
def clone(self, abstract):
ap = AbstractParticipation(abstract, self.getData())
return ap
def _notifyModification(self):
self._abstract._notifyModification()
def _unindex(self):
abs = self.getAbstract()
if abs is not None:
mgr = abs.getOwner()
if mgr is not None:
mgr.unindexAuthor(self)
def _index(self):
abs = self.getAbstract()
if abs is not None:
mgr = abs.getOwner()
if mgr is not None:
mgr.indexAuthor(self)
def setFirstName(self, name):
tmp = name.strip()
if tmp == self.getFirstName():
return
self._unindex()
self._firstName = tmp
self._index()
self._notifyModification()
def getFirstName(self):
return self._firstName
def getName(self):
return self._firstName
def setSurName(self, name):
tmp = name.strip()
if tmp == self.getSurName():
return
self._unindex()
self._surName = tmp
self._index()
self._notifyModification()<|fim▁hole|> def getFamilyName(self):
return self._surName
def setEmail(self, email):
email = email.strip().lower()
if email != self.getEmail():
self._unindex()
self._email = email
self._index()
self._notifyModification()
def getEmail(self):
return self._email
def setAffiliation(self, af):
self._affilliation = af.strip()
self._notifyModification()
setAffilliation = setAffiliation
def getAffiliation(self):
return self._affilliation
@property
def affiliation(self):
return self._affilliation
def setAddress(self, address):
self._address = address.strip()
self._notifyModification()
def getAddress(self):
return self._address
def setTelephone(self, telf):
self._telephone = telf.strip()
self._notifyModification()
def getTelephone(self):
return self._telephone
def setFax(self, fax):
self._fax = fax.strip()
self._notifyModification()
def getFax(self):
return self._fax
def setTitle(self, title):
self._title = title.strip()
self._notifyModification()
def getTitle(self):
return self._title
def getFullName(self):
res = safe_upper(self.getSurName())
tmp = []
for name in self.getFirstName().lower().split(" "):
if not name.strip():
continue
name = name.strip()
tmp.append(safe_upper(safe_slice(name, 0, 1)) + safe_slice(name, 1))
firstName = " ".join(tmp)
if firstName:
res = "%s, %s" % (res, firstName)
if self.getTitle():
res = "%s %s" % (self.getTitle(), res)
return res
@property
def full_name(self):
return self.getFullName()
def getStraightFullName(self):
name = ""
if self.getName():
name = "%s " % self.getName()
return "%s%s" % (name, self.getSurName())
def getAbrName(self):
res = self.getSurName()
if self.getFirstName():
if res:
res = "%s, " % res
res = "%s%s." % (res, safe_upper(safe_slice(self.getFirstName(), 0, 1)))
return res
def getAbstract(self):
return self._abstract
def setAbstract(self, abs):
self._abstract = abs
def delete(self):
self._unindex()
self._abstract = None
TrashCanManager().add(self)
def recover(self):
TrashCanManager().remove(self)
class Author(AbstractParticipation):
def __init__(self, abstract, **data):
AbstractParticipation.__init__(self, abstract, **data)
self._abstractId = ""
def getId(self):
return self._id
def setId(self, newId):
self._id = str(newId)
def clone(self, abstract):
auth = Author(abstract, self.getData())
return auth
def isSpeaker(self):
return self._abstract.isSpeaker(self)
class Submitter(AbstractParticipation):
def __init__(self, abstract, av):
if av is None:
raise MaKaCError(_("abstract submitter cannot be None"))
AbstractParticipation.__init__(self, abstract)
self._user = None
self._setUser(av)
self.setFromAvatar(av)
def _setUser(self, av):
if self.getUser() == av:
return
#if currently there's an association with a registered user, we notify
# the unidexation of the participation
if self.getUser():
self.getAbstract().getOwner().unregisterParticipation(self)
self._user = av
#if the participation is associated to any avatar, we make the
# association and index it
if self.getUser():
self.getAbstract().getOwner().registerParticipation(self)
def clone(self, abstract):
sub = Submitter(abstract, self.getAvatar())
sub.setData(self.getData())
return sub
def getUser(self):
return self._user
def getAvatar(self):
return self._user
def representsUser(self, av):
return self.getUser() == av
class _AuthIdx(Persistent):
def __init__(self, mgr):
self._mgr = mgr
self._idx = OOBTree()
def _getKey(self, auth):
return "%s %s" % (auth.getSurName().lower(), auth.getFirstName().lower())
def index(self, auth):
if auth.getAbstract() is None:
raise MaKaCError(_("cannot index an author of an abstract which is not included in a conference"))
if auth.getAbstract().getOwner() != self._mgr:
raise MaKaCError(_("cannot index an author of an abstract which does not belong to this conference"))
key = self._getKey(auth)
abstractId = str(auth.getAbstract().getId())
if not self._idx.has_key(key):
self._idx[key] = OIBTree.OIBTree()
if not self._idx[key].has_key(abstractId):
self._idx[key][abstractId] = 0
self._idx[key][abstractId] += 1
def unindex(self, auth):
if auth.getAbstract() is None:
raise MaKaCError(_("cannot unindex an author of an abstract which is not included in a conference"))
if auth.getAbstract().getOwner() != self._mgr:
raise MaKaCError(_("cannot unindex an author of an abstract which does not belong to this conference"))
key = self._getKey(auth)
if not self._idx.has_key(key):
return
abstractId = str(auth.getAbstract().getId())
if abstractId not in self._idx[key]:
return
self._idx[key][abstractId] -= 1
if self._idx[key][abstractId] <= 0:
del self._idx[key][abstractId]
if len(self._idx[key]) <= 0:
del self._idx[key]
def match(self, query):
query = query.lower().strip()
res = OIBTree.OISet()
for k in self._idx.keys():
if k.find(query) != -1:
res = OIBTree.union(res, self._idx[k])
return res
class _PrimAuthIdx(_AuthIdx):
def __init__(self, mgr):
_AuthIdx.__init__(self, mgr)
for abs in self._mgr.getAbstractList():
for auth in abs.getPrimaryAuthorList():
self.index(auth)
class _AuthEmailIdx(_AuthIdx):
def __init__(self, mgr):
_AuthIdx.__init__(self, mgr)
for abs in self._mgr.getAbstractList():
for auth in abs.getPrimaryAuthorList():
self.index(auth)
for auth in abs.getCoAuthorList():
self.index(auth)
def _getKey(self, auth):
return auth.getEmail().lower()
class AbstractMgr(AbstractManagerLegacyMixin, Persistent):
def __init__(self, owner):
self._owner = owner
self._abstracts = OOBTree()
self._participationIdx = _AbstractParticipationIndex()
self.__abstractGenerator = Counter()
self._activated = False
self.setStartSubmissionDate(datetime.now())
self.setEndSubmissionDate(datetime.now())
## self._contribTypes = PersistentList()
self.setAnnouncement("")
self._notifTpls = IOBTree()
self._notifTplsOrder = PersistentList()
self.__notifTplsCounter = Counter()
self._authorizedSubmitter = PersistentList()
self._primAuthIdx = _PrimAuthIdx(self)
self._authEmailIdx = _AuthEmailIdx(self)
self._submissionNotification = SubmissionNotification()
self._multipleTracks = True
self._tracksMandatory = False
self._attachFiles = False
self._showSelectAsSpeaker = True
self._selectSpeakerMandatory = True
self._showAttachedFilesContribList = False
def getMultipleTracks(self):
try:
return self._multipleTracks
except:
self.setMultipleTracks(True)
return self._multipleTracks
def setMultipleTracks(self, multipleTracks=True):
self._multipleTracks = multipleTracks
def areTracksMandatory(self):
try:
return self._tracksMandatory
except:
self.setTracksMandatory(False)
return self._tracksMandatory
def canAttachFiles(self):
try:
return self._attachFiles
except:
self.setAllowAttachFiles(False)
return self._attachFiles
def setAllowAttachFiles(self, attachedFiles):
self._attachFiles = attachedFiles
def setTracksMandatory(self, tracksMandatory=False):
self._tracksMandatory = tracksMandatory
def showSelectAsSpeaker(self):
try:
return self._showSelectAsSpeaker
except:
self._showSelectAsSpeaker = True
return self._showSelectAsSpeaker
def setShowSelectAsSpeaker(self, showSelectAsSpeaker):
self._showSelectAsSpeaker = showSelectAsSpeaker
def isSelectSpeakerMandatory(self):
try:
return self._selectSpeakerMandatory
except:
self._selectSpeakerMandatory = True
return self._selectSpeakerMandatory
def setSelectSpeakerMandatory(self, selectSpeakerMandatory):
self._selectSpeakerMandatory = selectSpeakerMandatory
def showAttachedFilesContribList(self):
try:
return self._showAttachedFilesContribList
except:
self._showAttachedFilesContribList = False
return self._showAttachedFilesContribList
def setSwitchShowAttachedFilesContribList(self, showshowAttachedFilesContribList):
self._showAttachedFilesContribList = showshowAttachedFilesContribList
def getAbstractFieldsMgr(self):
return AbstractFieldManagerAdapter(self._owner.as_event)
def clone(self, conference):
# XXX: Couldn't find any calls of this, but raise an exception just in case...
raise NotImplementedError('Abstract manager should never be cloned')
amgr = AbstractMgr(conference)
amgr.setAnnouncement(self.getAnnouncement())
timeDifference = conference.getStartDate() - self.getOwner().getStartDate()
amgr.setStartSubmissionDate(self.getStartSubmissionDate() + timeDifference)
amgr.setEndSubmissionDate(self.getEndSubmissionDate() + timeDifference)
modifDeadline = self.getModificationDeadline()
if modifDeadline is not None:
amgr.setModificationDeadline(self.getModificationDeadline() + timeDifference)
amgr.setActive(self.isActive())
if self.getCFAStatus():
amgr.activeCFA()
else:
amgr.desactiveCFA()
for a in self.getAbstractList():
amgr.addAbstract(a.clone(conference, amgr._generateNewAbstractId()))
for tpl in self.getNotificationTplList():
amgr.addNotificationTpl(tpl.clone())
# Cloning submission notification:
amgr.setSubmissionNotification(self.getSubmissionNotification().clone())
return amgr
def getOwner(self):
return self._owner
getConference = getOwner
def getTimezone(self):
return self.getConference().getTimezone()
def activeCFA(self):
self._activated = True
def desactiveCFA(self):
self._activated = False
def getAuthorizedSubmitterList(self):
try:
return self._authorizedSubmitter
except AttributeError:
self._authorizedSubmitter = PersistentList()
return self._authorizedSubmitter
def addAuthorizedSubmitter(self, av):
try:
if self._authorizedSubmitter:
pass
except AttributeError:
self._authorizedSubmitter = PersistentList()
if not av in self._authorizedSubmitter:
self._authorizedSubmitter.append(av)
def removeAuthorizedSubmitter(self, av):
try:
if self._authorizedSubmitter:
pass
except:
self._authorizedSubmitter = PersistentList()
if av in self._authorizedSubmitter:
self._authorizedSubmitter.remove(av)
def getCFAStatus(self):
return self._activated
def setActive(self, value):
if value:
self.activeCFA()
else:
self.desactiveCFA()
def isActive(self):
return self._activated
def setStartSubmissionDate(self, date):
self._submissionStartDate = datetime(date.year, date.month, date.day, 0, 0, 0)
def getStartSubmissionDate(self):
return timezone(self.getTimezone()).localize(self._submissionStartDate)
def setEndSubmissionDate(self, date):
self._submissionEndDate = datetime(date.year, date.month, date.day, 23, 59, 59)
def getEndSubmissionDate(self):
return timezone(self.getTimezone()).localize(self._submissionEndDate)
def inSubmissionPeriod(self, date=None):
if date is None:
date = nowutc()
sd = self.getStartSubmissionDate()
ed = self.getEndSubmissionDate()
return date <= ed and date >= sd
def getModificationDeadline(self):
"""Returns the deadline for modifications on the submitted abstracts.
"""
try:
if self._modifDeadline:
pass
except AttributeError, e:
self._modifDeadline = None
if self._modifDeadline is not None:
return timezone(self.getTimezone()).localize(self._modifDeadline)
else:
return None
def setModificationDeadline(self, newDL):
"""Sets a new deadline for modifications on the submitted abstracts.
"""
if newDL is not None:
self._modifDeadline = datetime(newDL.year, newDL.month, newDL.day, 23, 59, 59)
else:
self._modifDeadline = newDL
def inModificationPeriod(self, date=None):
"""Tells whether is possible to modify a submitted abstract in a
certain date.
"""
if date is None:
date = nowutc()
if not self.getModificationDeadline():
return True
return date <= self.getModificationDeadline()
def getAnnouncement(self):
#to be removed
try:
if self._announcement:
pass
except AttributeError, e:
self._announcement = ""
return self._announcement
def setAnnouncement(self, newAnnouncement):
self._announcement = newAnnouncement.strip()
def _getOldAbstractCounter(self):
return self.__abstractGenerator._getCount()
def newAbstract(self, av, **data):
"""Creates a new abstract under this manager
"""
from indico.modules.events.contributions import Contribution
new_abstract = self._new_abstract(self.getConference().as_event)
# sanity checks to avoid collisions
assert str(new_abstract.id) not in self._abstracts
assert not Contribution.query.with_parent(new_abstract.event_new).filter_by(friendly_id=new_abstract.id).count()
a = Abstract(self, str(new_abstract.friendly_id), av, **data)
self._abstracts[str(new_abstract.friendly_id)] = a
for auth in a.getPrimaryAuthorList():
self.indexAuthor(auth)
return a
def addAbstract(self, abstract):
if abstract in self.getAbstractList():
return
if isinstance(abstract.getCurrentStatus(), AbstractStatusWithdrawn):
raise MaKaCError(_("Cannot add an abstract which has been withdrawn"), ("Event"))
abstract._setOwner(self)
self._abstracts[abstract.getId()] = abstract
for auth in abstract.getPrimaryAuthorList():
self.indexAuthor(auth)
def removeAbstract(self, abstract):
if self._abstracts.has_key(abstract.getId()):
#for auth in abstract.getPrimaryAuthorList():
# self.unindexAuthor(auth)
# * Remove dependencies with another abstracts:
# - If it's an accepted abstract-->remove abstract from contribution
if isinstance(abstract.getCurrentStatus(), AbstractStatusAccepted):
raise NoReportError(_("Cannot remove an accepted abstract before removing the contribution linked to it"))
# If it's a withdrawn abstract-->remove abstract from contribution
if abstract.as_new.contribution:
raise NoReportError(_("Cannot remove the abstract before removing the contribution linked to it"))
for abs in self._abstracts.values():
if abs != abstract:
st = abs.getCurrentStatus()
if isinstance(st, AbstractStatusDuplicated):
#if the abstract to delete is the orginal in another "duplicated", change status to submitted
if st.getOriginal() == abstract:
abs.setCurrentStatus(AbstractStatusSubmitted(abs))
elif isinstance(st, AbstractStatusMerged):
#if the abstract to delete is the target one in another "merged", change status to submitted
if st.getTargetAbstract() == abstract:
abs.setCurrentStatus(AbstractStatusSubmitted(abs))
#unindex participations!!!
self.unregisterParticipation(abstract.getSubmitter())
self._remove_abstract(abstract)
del self._abstracts[abstract.getId()]
abstract.delete()
def recoverAbstract(self, abstract):
self.addAbstract(abstract)
abstract.recoverFromTrashCan()
def getAbstractList(self):
return self._abstracts.values()
def getAbstractById(self, id):
return self._abstracts.get(str(id), None)
def registerParticipation(self, p):
self._participationIdx.index(p)
def unregisterParticipation(self, p):
self._participationIdx.unindex(p)
def getAbstractListForAvatar(self, av):
try:
if self._participationIdx:
pass
except AttributeError, e:
self._participationIdx = self._partipationIdx
self._partipationIdx = None
res = []
for participation in self._participationIdx.getParticipationList(av):
abstract = participation.getAbstract()
if abstract is not None and abstract.isSubmitter(av):
if abstract not in res:
res.append(abstract)
return res
def getAbstractListForAuthorEmail(self, email):
""" Get list of abstracts where the email belongs to an author"""
return [self.getAbstractById(i) for i in self._getAuthEmailIndex().match(email)]
def getNotificationTplList(self):
try:
if self._notifTpls:
pass
except AttributeError:
self._notifTpls = IOBTree()
try:
if self._notifTplsOrder:
pass
except AttributeError:
self._notifTplsOrder = PersistentList()
for tpl in self._notifTpls.values():
self._notifTplsOrder.append(tpl)
return self._notifTplsOrder
def addNotificationTpl(self, tpl):
try:
if self._notifTpls:
pass
except AttributeError:
self._notifTpls = IOBTree()
try:
if self._notifTplsOrder:
pass
except AttributeError:
self._notifTplsOrder = PersistentList()
for tpl in self._notifTpls.values():
self._notifTplsOrder.append(tpl)
try:
if self._notifTplsCounter:
pass
except AttributeError:
self._notifTplsCounter = Counter()
if tpl.getOwner() == self and self._notifTpls.has_key(tpl.getId()):
return
id = tpl.getId()
if id == "":
id = self._notifTplsCounter.newCount()
tpl.includeInOwner(self, id)
self._notifTpls[int(id)] = tpl
self._notifTplsOrder.append(tpl)
def removeNotificationTpl(self, tpl):
try:
if self._notifTpls:
pass
except AttributeError:
self._notifTpls = IOBTree()
try:
if self._notifTplsOrder:
pass
except AttributeError:
self._notifTplsOrder = PersistentList()
for tpl in self._notifTpls.values():
self._notifTplsOrder.append(tpl)
if tpl.getOwner() != self or not self._notifTpls.has_key(int(tpl.getId())):
return
del self._notifTpls[int(tpl.getId())]
self._notifTplsOrder.remove(tpl)
tpl.includeInOwner(None, tpl.getId()) # We don't change the id for
# recovery purposes.
tpl.delete()
def recoverNotificationTpl(self, tpl):
self.addNotificationTpl(tpl)
tpl.recover()
def getNotificationTplById(self, id):
try:
if self._notifTpls:
pass
except AttributeError:
self._notifTpls = IOBTree()
return self._notifTpls.get(int(id), None)
def getNotifTplForAbstract(self, abs):
"""
"""
for tpl in self.getNotificationTplList():
if tpl.satisfies(abs):
return tpl
return None
def moveUpNotifTpl(self, tpl):
"""
"""
try:
if self._notifTplsOrder:
pass
except AttributeError:
self._notifTplsOrder = PersistentList()
for tpl in self._notifTpls.values():
self._notifTplsOrder.append(tpl)
if tpl not in self._notifTplsOrder:
return
idx = self._notifTplsOrder.index(tpl)
if idx == 0:
return
self._notifTplsOrder.remove(tpl)
self._notifTplsOrder.insert(idx-1, tpl)
def moveDownNotifTpl(self, tpl):
"""
"""
try:
if self._notifTplsOrder:
pass
except AttributeError:
self._notifTplsOrder = PersistentList()
for tpl in self._notifTpls.values():
self._notifTplsOrder.append(tpl)
idx = self._notifTplsOrder.index(tpl)
if idx == len(self._notifTplsOrder):
return
self._notifTplsOrder.remove(tpl)
self._notifTplsOrder.insert(idx+1, tpl)
def indexAuthor(self, auth):
a = auth.getAbstract()
if a.isPrimaryAuthor(auth):
self._getPrimAuthIndex().index(auth)
self._getAuthEmailIndex().index(auth)
def unindexAuthor(self, auth):
a = auth.getAbstract()
if a.isPrimaryAuthor(auth):
self._getPrimAuthIndex().unindex(auth)
self._getAuthEmailIndex().unindex(auth)
def _getPrimAuthIndex(self):
try:
if self._primAuthIdx:
pass
except AttributeError:
self._primAuthIdx = _PrimAuthIdx(self)
return self._primAuthIdx
def _getAuthEmailIndex(self):
if not hasattr(self, '_authEmailIdx'):
self._authEmailIdx = _AuthEmailIdx(self)
return self._authEmailIdx
def getAbstractsMatchingAuth(self, query, onlyPrimary=True):
if str(query).strip() == "":
return self.getAbstractList()
res = self._getPrimAuthIndex().match(query)
return [self.getAbstractById(id) for id in res]
def hasAnyEnabledAbstractField(self):
return self.getAbstractFieldsMgr().hasAnyActiveField()
def hasEnabledAbstractField(self, key):
return self.getAbstractFieldsMgr().hasActiveField(key)
def getSubmissionNotification(self):
try:
if self._submissionNotification:
pass
except AttributeError, e:
self._submissionNotification = SubmissionNotification()
return self._submissionNotification
def setSubmissionNotification(self, sn):
self._submissionNotification = sn
def recalculateAbstractsRating(self, scaleLower, scaleHigher):
''' recalculate the values of the rating for all the abstracts in the conference '''
for abs in self.getAbstractList():
abs.updateRating((scaleLower, scaleHigher))
def removeAnswersOfQuestion(self, questionId):
''' Remove a question results for each abstract '''
for abs in self.getAbstractList():
abs.removeAnswersOfQuestion(questionId)
def notifyModification(self):
self._p_changed = 1
class SubmissionNotification(Persistent):
def __init__(self):
self._toList = PersistentList()
self._ccList = PersistentList()
def hasDestination(self):
return self._toList != [] or self._toList != []
def getToList(self):
return self._toList
def setToList(self, tl):
self._toList = tl
def addToList(self, to):
self._toList.append(to)
def clearToList(self):
self._toList = PersistentList()
def getCCList(self):
return self._ccList
def setCCList(self, cl):
self._ccList = cl
def addCCList(self, cc):
self._ccList.append(cc)
def clearCCList(self):
self._ccList = PersistentList()
def clone(self):
nsn = SubmissionNotification()
for i in self.getToList():
nsn.addToList(i)
for i in self.getCCList():
nsn.addCCList(i)
return nsn
class Comment(Persistent):
def __init__(self, res, content=""):
self._abstract = None
self._id = ""
self._responsible = res
self._content = ""
self._creationDate = nowutc()
self._modificationDate = nowutc()
def getLocator(self):
loc = self._abstract.getLocator()
loc["intCommentId"] = self._id
return loc
def includeInAbstract(self, abstract, id):
self._abstract = abstract
self._id = id
def delete(self):
self._abstract = None
TrashCanManager().add(self)
def recover(self):
TrashCanManager().remove(self)
def _notifyModification(self, dt=None):
if dt:
self._modificationDate = dt
else:
self._modificationDate = nowutc()
def getResponsible(self):
return self._responsible
def getAbstract(self):
return self._abstract
def getId(self):
return self._id
def getContent(self):
return self._content
def setContent(self, newContent):
self._content = newContent
self._notifyModification()
def getCreationDate(self):
return self._creationDate
def getModificationDate(self):
return self._modificationDate
def canModify(self, aw_or_user):
if hasattr(aw_or_user, 'getUser'):
aw_or_user = aw_or_user.getUser()
return self.canUserModify(aw_or_user)
def canUserModify(self, user):
abstract = self.getAbstract()
conf = abstract.getConference()
return self.getResponsible() == user and \
(abstract.canUserModify(user) or \
len(conf.getConference().getCoordinatedTracks(user)) > 0)
class Abstract(AbstractLegacyMixin, Persistent):
def __init__(self, owner, id, submitter, **abstractData):
self._setOwner( owner )
self._setId( id )
self._title = ""
self._authorGen = Counter()
self._authors = OOBTree()
self._primaryAuthors = PersistentList()
self._coAuthors = PersistentList()
self._speakers = PersistentList()
self._tracks = OOBTree()
self._contribTypes = PersistentList( [""] )
self._setSubmissionDate( nowutc() )
self._modificationDate = nowutc()
self._currentStatus = AbstractStatusSubmitted( self )
self._trackAcceptances = OOBTree()
self._trackRejections = OOBTree()
self._trackReallocations = OOBTree()
self._trackJudgementsHistorical={}
self._comments = ""
self._contribution = None
self._intCommentGen=Counter()
self._intComments=PersistentList()
self._mergeFromList = PersistentList()
self._notifLog=NotificationLog(self)
self._submitter=None
self._setSubmitter( submitter )
self._rating = None # It needs to be none to avoid the case of having the same value as the lowest value in the judgement
self._attachments = {}
self._attachmentsCounter = Counter()
def __cmp__(self, other):
if type(self) is not type(other):
# This is actually dangerous and the ZODB manual says not to do this
# because it relies on memory order. However, this branch should never
# be taken anyway since we do not store different types in the same set
# or use them as keys.
return cmp(hash(self), hash(other))
if self.getConference() == other.getConference():
return cmp(self.getId(), other.getId())
return cmp(self.getConference(), other.getConference())
def clone(self, conference, abstractId):
# abstractId - internal in abstract manager of the conference
abs = Abstract(conference.getAbstractMgr(), abstractId, self.getSubmitter().getAvatar())
abs.setTitle(self.getTitle())
for key in self.getFields().keys():
abs.setField(key,self.getField(key))
abs.setComments(self.getComments())
abs._setSubmissionDate(self.getSubmissionDate())
abs._modificationDate = self.getModificationDate()
# Cloning of primary- and coauthors
# if an author is also a speaker, an appropriate object will be
# appended also to the speaker list
for pa in self.getPrimaryAuthorList() :
npa = abs.newPrimaryAuthor(**(pa.getData()))
if self.isSpeaker(pa) :
abs.addSpeaker(npa)
for ca in self.getCoAuthorList() :
nca = abs.newCoAuthor(**(ca.getData()))
if self.isSpeaker(ca) :
abs.addSpeaker(nca)
# Cloning of speakers
# only those, who are not authors :
for sp in self.getSpeakerList() :
if not self.isAuthor(sp) :
abs.addSpeaker(sp.clone())
abs.setSubmitter(self.getSubmitter().getAvatar())
abs.as_new.type = self.as_new.type
# the track, to which the abstract belongs to
# legacy list implementation
for tr in self.getTrackList() :
for newtrack in conference.getTrackList():
if newtrack.getTitle() == tr.getTitle() :
abs.addTrack(newtrack)
# overall abstract status (accepted / rejected)
abs._currentStatus = self._currentStatus.clone(abs)
abs.as_new.accepted_track_id = self.as_new.track.id if self.as_new.track else None
abs.as_new.accepted_type = self.as_new.type
for ta in self.getTrackAcceptanceList() :
for newtrack in conference.getTrackList():
if newtrack.getTitle() == ta.getTrack().getTitle() :
newta = ta.clone(newtrack)
abs._addTrackAcceptance(newta)
abs._addTrackJudgementToHistorical(newta)
for trj in self.getTrackRejections().values() :
for newtrack in conference.getTrackList():
if newtrack.getTitle() == trj.getTrack().getTitle() :
newtrj = trj.clone(newtrack)
abs._addTrackRejection(newtrj)
abs._addTrackJudgementToHistorical(newtrj)
for trl in self.getTrackReallocations().values() :
for newtrack in conference.getTrackList():
if newtrack.getTitle() == trl.getTrack().getTitle() :
newtrl = trl.clone(newtrack)
abs._addTrackReallocation(newtrl)
abs._addTrackJudgementToHistorical(newtrl)
# Cloning materials
for f in self.getAttachments().values():
newFile = f.clone(abs, protection=False)
abs.__addFile(newFile)
return abs
def getUniqueId( self ):
"""returns (string) the unique identifier of the item"""
"""used only in the web session access key table"""
"""it is the same as the conference since only the conf can"""
"""be protected with an access key"""
return self.getConference().getUniqueId()
def getMergeFromList(self):
try:
return self._mergeFromList
except AttributeError:
self._mergeFromList = PersistentList()
return self._mergeFromList
def addMergeFromAbstract(self, abstract):
try:
if self._mergeFromList:
pass
except AttributeError:
self._mergeFromList = PersistentList()
self._mergeFromList.append(abstract)
def removeMergeFromAbstract(self, abstract):
try:
if self._mergeFromList:
pass
except AttributeError:
self._mergeFromList = PersistentList()
if abstract in self._mergeFromList:
self._mergeFromList.remove(abstract)
def getComments(self):
try:
return self._comments
except AttributeError:
self._comments = ""
return self._comments
def setComments(self, comments):
self._comments = comments
def __addFile(self, file):
file.archive(self.getConference()._getRepository())
self.getAttachments()[file.getId()] = file
self._notifyModification()
def saveFiles(self, files):
cfg = Config.getInstance()
from MaKaC.conference import LocalFile
for fileUploaded in files:
if fileUploaded.filename:
# create a temp file
tempPath = cfg.getUploadedFilesTempDir()
tempFileName = tempfile.mkstemp(suffix="IndicoAbstract.tmp", dir=tempPath)[1]
f = open(tempFileName, "wb")
f.write(fileUploaded.file.read() )
f.close()
file = LocalFile()
file.setFileName(fileUploaded.filename)
file.setFilePath(tempFileName)
file.setOwner(self)
file.setId(self._getAttachmentsCounter())
self.__addFile(file)
def deleteFilesNotInList(self, keys):
"""This method is used in order to delete all the files that are not present (by id) in the
parameter "keys".
This is useful when files are deleted from the abstract form using Javascript, and so it is
the only way to know that they are deleted.
"""
existingKeys = self.getAttachments().keys()
for key in existingKeys:
if not key in keys:
self._deleteFile(key)
def _deleteFile(self, key):
file = self.getAttachments()[key]
file.delete()
del self.getAttachments()[key]
self._notifyModification()
def _setOwner( self, owner ):
self._owner = owner
def getOwner( self ):
return self._owner
def _setId( self, id ):
self._id = str( id )
def getId(self):
return self._id
def _setSubmissionDate( self, newDate ):
self._submissionDate = newDate
def setModificationDate(self, dt = None):
if dt:
self._modificationDate = dt
else:
self._modificationDate = nowutc()
def _notifyModification( self, dt=None ):
self.setModificationDate(dt)
self._p_changed = 1
def getModificationDate( self ):
return self._modificationDate
def _setSubmitter( self, av ):
if not av:
raise MaKaCError( _("An abstract must have a submitter"))
if self._submitter:
self.getOwner().unregisterParticipation( self._submitter )
self._submitter.getUser().unlinkTo(self, "submitter")
self._submitter.delete()
self._submitter=Submitter( self, av )
av.linkTo(self, "submitter")
self.getOwner().registerParticipation( self._submitter )
self._notifyModification()
def recoverSubmitter(self, subm):
if not subm:
raise MaKaCError( _("An abstract must have a submitter"))
if self._submitter:
self.getOwner().unregisterParticipation( self._submitter )
self._submitter.delete()
self._submitter = subm
self._submitter.setAbstract(self)
self.getOwner().registerParticipation( self._submitter )
subm.recover()
self._notifyModification()
def setSubmitter( self, av ):
self._setSubmitter(av)
def getSubmitter( self ):
return self._submitter
def isSubmitter( self, av ):
return self.getSubmitter().representsUser( av )
def setTitle(self, title):
self._title = title.strip()
self._notifyModification()
def getTitle(self):
return self._title
@property
def title(self):
return self._title
def getSubmissionDate( self ):
try:
if self._submissionDate:
pass
except AttributeError:
self._submissionDate=nowutc()
return self._submissionDate
def getConference( self ):
mgr = self.getOwner()
return mgr.getOwner() if mgr else None
def _newAuthor( self, **data ):
author = Author( self, **data )
author.setId( self._authorGen.newCount() )
self._authors[ author.getId() ] = author
return author
def _removeAuthor(self,part):
if not self.isAuthor(part):
return
part.delete()
del self._authors[part.getId()]
def isAuthor( self, part ):
return self._authors.has_key( part.getId() )
def getAuthorList( self ):
return self._authors.values()
def getAuthorById(self, id):
return self._authors.get(str(id), None)
def clearAuthors( self ):
self.clearPrimaryAuthors()
self.clearCoAuthors()
self._notifyModification()
def newPrimaryAuthor(self,**data):
auth=self._newAuthor(**data)
self._addPrimaryAuthor(auth)
self._notifyModification()
return auth
def isPrimaryAuthor( self, part ):
return part in self._primaryAuthors
def getPrimaryAuthorList( self ):
return self._primaryAuthors
#XXX: I keep it for compatibility but it should be removed
getPrimaryAuthorsList = getPrimaryAuthorList
def getPrimaryAuthorEmailList(self, lower=False):
emailList = []
for pAuthor in self.getPrimaryAuthorList():
emailList.append(pAuthor.getEmail().lower() if lower else pAuthor.getEmail())
return emailList
def clearPrimaryAuthors(self):
while len(self._primaryAuthors)>0:
self._removePrimaryAuthor(self._primaryAuthors[0])
self._notifyModification()
def _addPrimaryAuthor( self, part ):
if not self.isAuthor( part ):
raise MaKaCError( _("The participation you want to set as primary author is not an author of the abstract"))
if part in self._primaryAuthors:
return
self._primaryAuthors.append( part )
self.getOwner().indexAuthor(part)
def _removePrimaryAuthor(self,part):
if not self.isPrimaryAuthor(part):
return
if self.isSpeaker(part):
self.removeSpeaker(part)
self.getOwner().unindexAuthor(part)
self._primaryAuthors.remove(part)
self._removeAuthor(part)
def recoverPrimaryAuthor(self, auth):
self._authors[ auth.getId() ] = auth
auth.setAbstract(self)
self._addPrimaryAuthor(auth)
auth.recover()
self._notifyModification()
def newCoAuthor(self,**data):
auth=self._newAuthor(**data)
self._addCoAuthor(auth)
self._notifyModification()
return auth
def _comp_CoAuthors(self):
try:
if self._coAuthors!=None:
return
except AttributeError:
self._coAuthors=PersistentList()
for auth in self._authors.values():
if not self.isPrimaryAuthor(auth):
self._addCoAuthor(auth)
def isCoAuthor( self, part ):
self._comp_CoAuthors()
return part in self._coAuthors
def getCoAuthorList( self ):
self._comp_CoAuthors()
return self._coAuthors
def getCoAuthorEmailList(self, lower=False):
emailList = []
for coAuthor in self.getCoAuthorList():
emailList.append(coAuthor.getEmail().lower() if lower else coAuthor.getEmail())
return emailList
def clearCoAuthors(self):
while len(self._coAuthors)>0:
self._removeCoAuthor(self._coAuthors[0])
self._notifyModification()
def _addCoAuthor( self, part ):
self._comp_CoAuthors()
if not self.isAuthor( part ):
raise MaKaCError( _("The participation you want to set as primary author is not an author of the abstract"))
if part in self._coAuthors:
return
self._coAuthors.append( part )
def _removeCoAuthor(self,part):
if not self.isCoAuthor(part):
return
if self.isSpeaker(part):
self.removeSpeaker(part)
self._coAuthors.remove(part)
self._removeAuthor(part)
def recoverCoAuthor(self, auth):
self._authors[ auth.getId() ] = auth
auth.setAbstract(self)
self._addCoAuthor(auth)
auth.recover()
self._notifyModification()
def addSpeaker( self, part ):
if not self.isAuthor( part ):
raise MaKaCError( _("The participation you want to set as speaker is not an author of the abstract"))
if part in self._speakers:
return
self._speakers.append( part )
self._notifyModification()
def removeSpeaker(self,part):
if part not in self._speakers:
return
self._speakers.remove(part)
def clearSpeakers( self ):
while len(self.getSpeakerList()) > 0:
self.removeSpeaker(self.getSpeakerList()[0])
self._speakers = PersistentList()
def getSpeakerList( self ):
return self._speakers
def isSpeaker( self, part ):
return part in self._speakers
def _addTrack( self, track ):
"""Adds the specified track to the suggested track list. Any
verification must be done by the caller.
"""
self._tracks[ track.getId() ] = track
track.addAbstract( self )
self._notifyModification()
def addTrack( self, track ):
self._changeTracksImpl()
if not self._tracks.has_key( track.getId() ):
self._addTrack( track )
self.getCurrentStatus().update()
def _removeTrack( self, track ):
"""Removes the specified track from the track list. Any verification
must be done by the caller.
"""
del self._tracks[ track.getId() ]
track.removeAbstract( self )
self._notifyModification()
def removeTrack( self, track ):
if self._tracks.has_key( track.getId() ):
self._removeTrack( track )
self.getCurrentStatus().update()
if isinstance(self.getCurrentStatus(), AbstractStatusAccepted):
self.getCurrentStatus()._setTrack(None)
def _changeTracksImpl( self ):
if self._tracks.__class__ != OOBTree:
oldTrackList = self._tracks
self._tracks = OOBTree()
for track in oldTrackList:
self._addTrack( track )
self.getCurrentStatus().update()
def getTrackList( self ):
self._changeTracksImpl()
return self._tracks.values()
def getAcceptedTrack(self):
status = self.getCurrentStatus()
if status is None:
return None
if isinstance(status, AbstractStatusAccepted):
return status.getTrack()
def hasTrack( self, track ):
self._changeTracksImpl()
return self._tracks.has_key( track.getId() )
def getTrackListSorted( self ):
self._changeTracksImpl()
return self.getConference().sortTrackList( self._tracks.values() )
def clearTracks( self ):
self._changeTracksImpl()
while len(self.getTrackList())>0:
track = self.getTrackList()[0]
self._removeTrack( track )
self.getCurrentStatus().update()
def setTracks( self, trackList ):
"""Set the suggested track classification of the current abstract to
the specified list
"""
#We need to do it in 2 steps otherwise the list over which we are
# iterating gets modified
toBeRemoved = []
toBeAdded = copy( trackList )
for track in self.getTrackList():
if track not in trackList:
toBeRemoved.append( track )
else:
toBeAdded.remove( track )
for track in toBeRemoved:
self._removeTrack( track )
for track in toBeAdded:
self._addTrack( track )
self.getCurrentStatus().update()
def isProposedForTrack( self, track ):
return self._tracks.has_key( track.getId() )
def getNumTracks(self):
return len( self._tracks )
def getLocator(self):
loc = self.getConference().getLocator()
loc["abstractId"] = self.getId()
return loc
def isAllowedToCoordinate(self, av):
"""Tells whether or not the specified user can coordinate any of the
tracks of this abstract
"""
for track in self.getTrackList():
if track.canUserCoordinate(av):
return True
return False
def canAuthorAccess(self, user):
if user is None:
return False
el = self.getCoAuthorEmailList(True)+self.getPrimaryAuthorEmailList(True)
for e in user.getEmails():
if e.lower() in el:
return True
return False
def isAllowedToAccess(self, av):
"""Tells whether or not an avatar can access an abstract independently
of the protection
"""
#any author is allowed to access
#CFA managers are allowed to access
#any user being able to modify is also allowed to access
#any TC is allowed to access
if self.canAuthorAccess(av):
return True
if self.isAllowedToCoordinate(av):
return True
return self.canUserModify(av)
def canAccess(self, aw):
#if the conference is protected, then only allowed AW can access
return self.isAllowedToAccess(aw.getUser())
def canView(self, aw):
#in the future it would be possible to add an access control
#only those users allowed to access are allowed to view
return self.isAllowedToAccess(aw.getUser())
def canModify(self, aw_or_user):
if hasattr(aw_or_user, 'getUser'):
aw_or_user = aw_or_user.getUser()
return self.canUserModify(aw_or_user)
def canUserModify(self, av):
#the submitter can modify
if self.isSubmitter(av):
return True
#??? any CFA manager can modify
#??? any user granted with modification privileges can modify
#conference managers can modify
conf = self.getConference()
return conf.canUserModify(av)
def getModifKey(self):
return ""
def getAccessKey(self):
return ""
def getAccessController(self):
return self.getConference().getAccessController()
def isProtected(self):
return self.getConference().isProtected()
def delete(self):
if self._owner:
self.getOwner().unregisterParticipation(self._submitter)
self._submitter.getUser().unlinkTo(self, "submitter")
self._submitter.delete()
self._submitter = None
self.clearAuthors()
self.clearSpeakers()
self.clearTracks()
owner = self._owner
self._owner = None
owner.removeAbstract(self)
self.setCurrentStatus(AbstractStatusNone(self))
TrashCanManager().add(self)
def recoverFromTrashCan(self):
TrashCanManager().remove(self)
def getCurrentStatus(self):
try:
if self._currentStatus:
pass
except AttributeError, e:
self._currentStatus = AbstractStatusSubmitted(self)
return self._currentStatus
def setCurrentStatus(self, newStatus):
self._currentStatus = newStatus
#If we want to keep a history of status changes we should add here
# the old status to a list
def accept(self, responsible, destTrack, type, comments="", session=None):
"""
"""
self.getCurrentStatus().accept(responsible, destTrack, type, comments)
# add the abstract to the track for which it has been accepted so it
# is visible for it.
if destTrack is not None:
destTrack.addAbstract(self)
contrib = contribution_from_abstract(self, session)
self.as_new.contribution = contrib
def reject(self, responsible, comments=""):
"""
"""
self.getCurrentStatus().reject(responsible, comments)
def _cmpByDate(self, tj1, tj2):
return cmp(tj1.getDate(), tj2.getDate())
def getTrackJudgementsHistorical(self):
try:
if self._trackJudgementsHistorical:
pass
if type(self._trackJudgementsHistorical) == tuple:
self._trackJudgementsHistorical = {}
except AttributeError:
self._trackJudgementsHistorical = {}
for track in self.getTrackList():
judgement = None
if self.getTrackAcceptances().has_key(track.getId()):
judgement = self.getTrackAcceptances()[track.getId()]
elif self.getTrackRejections().has_key(track.getId()):
judgement = self.getTrackRejections()[track.getId()]
elif self.getTrackReallocations().has_key(track.getId()):
judgement = self.getTrackReallocations()[track.getId()]
self._trackJudgementsHistorical[track.getId()] = [judgement]
self._notifyModification()
return self._trackJudgementsHistorical
def getJudgementHistoryByTrack(self, track):
id = "notrack"
if track is not None:
id = track.getId()
if self.getTrackJudgementsHistorical().has_key(id):
return self.getTrackJudgementsHistorical()[id]
return []
def _addTrackJudgementToHistorical(self, tj):
id = "notrack"
if tj.getTrack() is not None:
id = tj.getTrack().getId()
if self.getTrackJudgementsHistorical().has_key(id):
if tj not in self.getTrackJudgementsHistorical()[id]:
self.getTrackJudgementsHistorical()[id].insert(0, tj)
else:
self.getTrackJudgementsHistorical()[id] = [tj]
self._notifyModification()
def _removeTrackAcceptance( self, track ):
"""
"""
if self.getTrackAcceptances().has_key( track.getId() ):
del self.getTrackAcceptances()[ track.getId() ]
def _addTrackAcceptance( self, judgement ):
"""
"""
self._removeTrackRejection( judgement.getTrack() )
self._removeTrackReallocation( judgement.getTrack() )
self.getTrackAcceptances()[ judgement.getTrack().getId() ] = judgement
self._addTrackJudgementToHistorical(judgement)
def _removeTrackRejection( self, track ):
"""
"""
if self.getTrackRejections().has_key( track.getId() ):
del self.getTrackRejections()[ track.getId() ]
def _addTrackRejection( self, judgement ):
"""
"""
self._removeTrackAcceptance( judgement.getTrack() )
self._removeTrackReallocation( judgement.getTrack() )
self.getTrackRejections()[ judgement.getTrack().getId() ] = judgement
self._addTrackJudgementToHistorical(judgement)
def _removeTrackReallocation( self, track ):
"""
"""
if self.getTrackReallocations().has_key( track.getId() ):
del self.getTrackReallocations()[ track.getId() ]
def _addTrackReallocation( self, judgement ):
"""
"""
self._removeTrackAcceptance( judgement.getTrack() )
self._removeTrackRejection( judgement.getTrack() )
self.getTrackReallocations()[ judgement.getTrack().getId() ] = judgement
self._addTrackJudgementToHistorical(judgement)
def _clearTrackRejections( self ):
while len(self.getTrackRejections().values())>0:
t = self.getTrackRejections().values()[0].getTrack()
self._removeTrackRejection( t )
def _clearTrackAcceptances( self ):
while len(self.getTrackAcceptances().values())>0:
t = self.getTrackAcceptances().values()[0].getTrack()
self._removeTrackAcceptance( t )
def _clearTrackReallocations( self ):
while len(self.getTrackReallocations().values())>0:
t = self.getTrackReallocations().values()[0].getTrack()
self._removeTrackReallocation(t)
def _removePreviousJud(self, responsible, track):
''' Check if there is a previous judgement and remove it '''
toDelete = [] # list of judgements to delete
for jud in self.getJudgementHistoryByTrack(track):
if jud.getResponsible() == responsible:
toDelete.append(jud)
for x in toDelete:
self.getTrackJudgementsHistorical()[track.getId()].remove(x)
if isinstance(x, AbstractAcceptance):
self._del_judgment(x)
def proposeToAccept( self, responsible, track, contribType, comment="", answers=[] ):
"""
"""
# the proposal has to be done for a track
if track is None:
raise MaKaCError( _("You have to choose a track in order to do the proposal. If there are not tracks to select, please change the track assignment of the abstract"))
#We check the track for which the abstract is proposed to be accepted
# is in the current abstract
if not self.isProposedForTrack( track ):
raise MaKaCError( _("Cannot propose to accept an abstract which is not proposed for the specified track"))
# check if there is a previous judgement of this author in for this abstract in this track
self._removePreviousJud(responsible, track)
# Create the new judgement
jud = AbstractAcceptance(self, track, responsible, contribType, answers)
self._add_judgment(jud)
jud.setComment( comment )
self._addTrackAcceptance( jud )
# Update the rating of the abstract
self.updateRating()
#We trigger the state transition
self.getCurrentStatus().proposeToAccept()
def proposeToReject( self, responsible, track, comment="", answers=[] ):
"""
"""
# the proposal has to be done for a track
if track is None:
raise MaKaCError( _("You have to choose a track in order to do the proposal. If there are not tracks to select, please change the track assignment of the abstract"))
#We check the track for which the abstract is proposed to be accepted
# is in the current abstract
if not self.isProposedForTrack( track ):
raise MaKaCError( _("Cannot propose to reject an abstract which is not proposed for the specified track"))
# check if there is a previous judgement of this author in for this abstract in this track
self._removePreviousJud(responsible, track)
# Create the new judgement
jud = AbstractRejection(self, track, responsible, answers)
jud.setComment( comment )
self._addTrackRejection( jud )
# Update the rating of the abstract
self.updateRating()
#We trigger the state transition
self.getCurrentStatus().proposeToReject()
def proposeForOtherTracks( self, responsible, track, comment, propTracks, answers=[] ):
"""
"""
#We check the track which proposes to allocate the abstract is in the
# current abstract
if not self.isProposedForTrack( track ):
raise MaKaCError( _("Cannot propose to reallocate an abstract which is not proposed for the specified track"))
# check if there is a previous judgement of this author in for this abstract in this track
self._removePreviousJud(responsible, track)
#We keep the track judgement
jud = AbstractReallocation(self, track, responsible, propTracks, answers)
jud.setComment( comment )
self._addTrackReallocation( jud )
#We add the proposed tracks to the abstract
for track in propTracks:
self._addTrack( track )
#We trigger the state transition
self.getCurrentStatus().proposeToReallocate()
# Update the rating of the abstract
self.updateRating()
def withdraw(self,resp,comment=""):
"""
"""
self.getCurrentStatus().withdraw(resp,comment)
def recover( self ):
"""Puts a withdrawn abstract back in the list of submitted abstracts.
HAS NOTHING TO DO WITH THE RECOVERY PROCESS...
"""
#we must clear any track judgement
#self._clearTrackAcceptances()
#self._clearTrackRejections()
#self._clearTrackReallocations()
self.getCurrentStatus().recover() #status change
#if succeeded we must reset the submission date
self._setSubmissionDate( nowutc() )
self._notifyModification()
def getTrackJudgement( self, track ):
if not self.getJudgementHistoryByTrack(track):
return None
lastJud = self.getJudgementHistoryByTrack(track)[0]
# check if judgements for specified trak are the same. If not there is a conflict.
if all(jud.__class__ == lastJud.__class__ for jud in self.getJudgementHistoryByTrack(track)):
return lastJud
return AbstractInConflict(self, track)
def getTrackAcceptances( self ):
try:
if self._trackAcceptances:
pass
except AttributeError, e:
self._trackAcceptances = OOBTree()
return self._trackAcceptances
def getTrackAcceptanceList( self ):
res = []
for trackId in intersection( self._tracks, self.getTrackAcceptances() ):
res.append( self.getTrackAcceptances()[ trackId ] )
return res
def getNumProposedToAccept( self ):
return len( intersection( self._tracks, self.getTrackAcceptances() ) )
def getTrackRejections( self ):
try:
if self._trackRejections:
pass
except AttributeError, e:
self._trackRejections = OOBTree()
return self._trackRejections
def getNumProposedToReject( self ):
return len( intersection( self._tracks, self.getTrackRejections() ) )
def getTrackReallocations( self ):
try:
if self._trackReallocations:
pass
except AttributeError, e:
self._trackReallocations = OOBTree()
return self._trackReallocations
def getNumProposedToReallocate( self ):
return len( intersection( self._tracks, self.getTrackReallocations() ) )
def getNumJudgements( self ):
"""
Returns the number of tracks for which some proposal has been done.
For instance, let's suppose:
Track 1: 2 propose to accept, 3 propose to reject
Track 2: 1 propose to accept
Track 3: None
The result would be 2 (out of 3)
"""
tmp1 = union( self.getTrackAcceptances(), self.getTrackRejections() )
judgements = union( tmp1, self.getTrackReallocations() )
return len( intersection( self._tracks, judgements ) )
def getReallocationTargetedList( self, track ):
#XXX: not optimal
res = []
for r in self.getTrackReallocations().values():
if track in r.getProposedTrackList():
res.append( r )
return res
def getIntCommentList(self):
try:
if self._intComments:
pass
except AttributeError:
self._intComments=PersistentList()
return self._intComments
def addIntComment(self,newComment):
try:
if self._intComments:
pass
except AttributeError:
self._intComments=PersistentList()
try:
if self._intCommentsGen:
pass
except AttributeError:
self._intCommentsGen=Counter()
if newComment in self._intComments:
return
id = newComment.getId()
if id == "":
id = self._authorGen.newCount()
newComment.includeInAbstract(self, id)
self._intComments.append(newComment)
def getIntCommentById(self,id):
try:
if self._intComments:
pass
except AttributeError:
self._intComments=PersistentList()
for comment in self._intComments:
if id.strip()==comment.getId():
return comment
return None
def clearIntCommentList(self):
while len(self.getIntCommentList()) > 0:
self.removeIntComment(self.getIntCommentList()[0])
def removeIntComment(self,comment):
try:
if self._intComments:
pass
except AttributeError:
self._intComments=PersistentList()
if comment not in self._intComments:
return
self._intComments.remove(comment)
comment.delete()
def recoverIntComment(self, comment):
self.addIntComment(comment)
comment.recover()
def markAsDuplicated(self,responsible,originalAbstract,comments="", track=None, answers=[]):
"""
"""
self.getCurrentStatus().markAsDuplicated(responsible,originalAbstract,comments)
# check if there is a previous judgement of this author in for this abstract in this track
self._removePreviousJud(responsible, track)
if track is not None:
jud = AbstractMarkedAsDuplicated(self, track, responsible, originalAbstract, answers)
jud.setComment( comments )
self._addTrackJudgementToHistorical(jud)
else:
for t in self.getTrackList():
jud = AbstractMarkedAsDuplicated(self, t, responsible, originalAbstract, answers)
jud.setComment( comments )
self._addTrackJudgementToHistorical(jud)
# Update the rating of the abstract
self.updateRating()
def unMarkAsDuplicated(self,responsible,comments="", track=None, answers=[]):
"""
"""
#we must clear any track judgement
self._clearTrackAcceptances()
self._clearTrackRejections()
self._clearTrackReallocations()
#self.getCurrentStatus().recover() #status change
self.getCurrentStatus().unMarkAsDuplicated(responsible,comments)
# check if there is a previous judgement of this author in for this abstract in this track
self._removePreviousJud(responsible, track)
if track is not None:
jud = AbstractUnMarkedAsDuplicated(self, track, responsible, answers)
jud.setComment( comments )
self._addTrackJudgementToHistorical(jud)
else:
for t in self.getTrackList():
jud = AbstractUnMarkedAsDuplicated(self, t, responsible, answers )
jud.setComment( comments )
self._addTrackJudgementToHistorical(jud)
# Update the rating of the abstract
self.updateRating()
self._notifyModification()
def mergeInto(self,responsible,targetAbs,mergeAuthors=False,comments=""):
"""
"""
self.getCurrentStatus().mergeInto(responsible,targetAbs,comments)
targetAbs.addMergeFromAbstract(self)
if mergeAuthors:
#for auth in self.getAuthorList():
# newAuth=targetAbs.newAuthor()
# newAuth.setFromAbstractParticipation(auth)
# if self.isPrimaryAuthor(auth):
# targetAbs.addPrimaryAuthor(newAuth)
for auth in self.getPrimaryAuthorList():
newAuth=targetAbs.newPrimaryAuthor()
newAuth.setFromAbstractParticipation(auth)
for auth in self.getCoAuthorList():
newAuth=targetAbs.newCoAuthor()
newAuth.setFromAbstractParticipation(auth)
def notify(self,notificator,responsible):
"""notifies the abstract responsibles with a matching template
"""
tpl=self.getOwner().getNotifTplForAbstract(self)
if not tpl:
return
notificator.notify(self,tpl)
self.getNotificationLog().addEntry(NotifLogEntry(responsible,tpl))
def unMerge(self,responsible,comments=""):
#we must clear any track judgement
self._clearTrackAcceptances()
self._clearTrackRejections()
self._clearTrackReallocations()
self.getCurrentStatus().getTargetAbstract().removeMergeFromAbstract(self)
self.getCurrentStatus().unMerge(responsible,comments)
self._notifyModification()
def getNotificationLog(self):
try:
if self._notifLog:
pass
except AttributeError:
self._notifLog=NotificationLog(self)
return self._notifLog
# Rating methods
def getRating(self):
""" Get the average rating of the abstract """
try:
if self._rating:
pass
except AttributeError:
self._rating = None
return self._rating
def updateRating(self, scale = None):
"""
Update the average rating of the abstract which is calculated with the average of each judgement.
If the scale (tuple with lower,higher) is passed, the judgement are re-adjusted to the new scale.
"""
self._rating = None
# calculate the total valoration
judNum = 0
ratingSum = 0
for track in self.getTrackListSorted():
for jud in self.getJudgementHistoryByTrack(track):
if scale:
# calculate the new values for each judgement
scaleLower, scaleHigher = scale
jud.recalculateJudgementValues(scaleLower, scaleHigher)
if jud.getJudValue() != None: # it means there is a numeric value for the judgement
ratingSum += jud.getJudValue()
judNum += 1
# Calculate the average
if judNum != 0:
self._rating = float(ratingSum) / judNum
def getQuestionsAverage(self):
'''Get the list of questions answered in the reviews for an abstract '''
dTotals = {} # {idQ1: total_value, idQ2: total_value ...}
dTimes = {} # {idQ1: times_answered, idQ2: times_answered}
for track in self.getTrackListSorted():
for jud in self.getJudgementHistoryByTrack(track):
for answer in jud.getAnswers():
# check if the question is in d and sum the answers value or insert in d the new question
if dTotals.has_key(answer.getQuestion().getText()):
dTotals[answer.getQuestion().getText()] += answer.getValue()
dTimes[answer.getQuestion().getText()] += 1
else: # first time
dTotals[answer.getQuestion().getText()] = answer.getValue()
dTimes[answer.getQuestion().getText()] = 1
# get the questions average
questionsAverage = {}
for q, v in dTotals.iteritems():
# insert the element and calculate the average for the value
questionsAverage[q] = float(v)/dTimes[q]
return questionsAverage
def removeAnswersOfQuestion(self, questionId):
''' Remove the answers of the question with questionId value '''
for track in self.getTrackListSorted():
for jud in self.getJudgementHistoryByTrack(track):
jud.removeAnswer(questionId)
def getRatingPerReviewer(self, user, track):
"""
Get the rating of the user for the abstract in the track given.
"""
for jud in self.getJudgementHistoryByTrack(track):
if (jud.getResponsible() == user):
return jud.getJudValue()
def getLastJudgementPerReviewer(self, user, track):
"""
Get the last judgement of the user for the abstract in the track given.
"""
for jud in self.getJudgementHistoryByTrack(track):
if (jud.getResponsible() == user):
return jud
def _getAttachmentsCounter(self):
try:
if self._attachmentsCounter:
pass
except AttributeError:
self._attachmentsCounter = Counter()
return self._attachmentsCounter.newCount()
def setAttachments(self, attachments):
self._attachments = attachments
def getAttachments(self):
try:
if self._attachments:
pass
except AttributeError:
self._attachments = {}
return self._attachments
def getAttachmentById(self, id):
return self.getAttachments().get(id, None)
class AbstractJudgement(AbstractJudgmentLegacyMixin, Persistent):
"""This class represents each of the judgements made by a track about a
certain abstract. Each track for which an abstract is proposed can
make a judgement proposing the abstract to be accepted or rejected.
Different track judgements must be kept so the referees who have to
take the final decission can overview different opinions from the
track coordinators.
Together with the judgement some useful information like the date when
it was done and the user who did it will be kept.
"""
def __init__( self, abstract, track, responsible, answers ):
self._abstract = abstract
self._track = track
self._setResponsible( responsible )
self._comment = ""
self._date = nowutc()
self._answers = answers
self._judValue = self.calculateJudgementAverage() # judgement average value
self._totalJudValue = self.calculateAnswersTotalValue()
def _setResponsible( self, newRes ):
self._responsible = newRes
def getResponsible( self ):
return self._responsible
def getDate( self ):
return self._date
def setDate(self, date):
self._date = date
def getTrack( self ):
return self._track
def setComment( self, newComment ):
self._comment = newComment.strip()
def getComment( self ):
return self._comment
def getAnswers(self):
try:
if self._answers:
pass
except AttributeError:
self._answers = []
return self._answers
def calculateJudgementAverage(self):
'''Calculate the average value of the given answers'''
result = 0
if (len(self.getAnswers()) != 0):
# convert the values into float types
floatList = [ans.getValue() for ans in self._answers]
result = sum(floatList) / float(len(floatList)) # calculate the average
else:
# there are no questions
result = None
return result
def getJudValue(self):
try:
if self._judValue:
pass
except AttributeError:
self._judValue = self.calculateJudgementAverage() # judgement average value
return self._judValue
def getTotalJudValue(self):
try:
if self._totalJudValue:
pass
except AttributeError:
self._totalJudValue = self.calculateAnswersTotalValue()
return self._totalJudValue
def calculateAnswersTotalValue(self):
''' Calculate the sum of all the ratings '''
result = 0
for ans in self.getAnswers():
result += ans.getValue()
return result
def recalculateJudgementValues(self, scaleLower, scaleHigher):
''' Update the values of the judgement. This function is called when the scale is changed.'''
for ans in self.getAnswers():
ans.calculateRatingValue(scaleLower, scaleHigher)
self._judValue = self.calculateJudgementAverage()
self._totalJudValue = self.calculateAnswersTotalValue()
def removeAnswer(self, questionId):
''' Remove the current answers of the questionId '''
for ans in self.getAnswers():
if ans.getQuestion().getId() == questionId:
self._answers.remove(ans)
self._notifyModification()
def _notifyModification(self):
self._p_changed = 1
class AbstractAcceptance( AbstractJudgement ):
def __init__(self, abstract, track, responsible, contribType, answers):
AbstractJudgement.__init__(self, abstract, track, responsible, answers)
self._contribType = contribType
def getDate( self ):
return self.as_new.creation_dt
def setDate(self, date):
self.as_new.creation_dt = date
def clone(self,track):
aa = AbstractAcceptance(self._abstract, track, self.getResponsible(), self.getContribType(), self.getAnswers())
return aa
class AbstractRejection( AbstractJudgement ):
def clone(self, track):
arj = AbstractRejection(self._abstract, track, self.getResponsible(), self.getAnswers())
return arj
class AbstractReallocation( AbstractJudgement ):
def __init__(self, abstract, track, responsible, propTracks, answers):
AbstractJudgement.__init__(self, abstract, track, responsible, answers)
self._proposedTracks = PersistentList( propTracks )
def clone(self, track):
arl = AbstractReallocation(self._abstract, track, self.getResponsible(), self.getProposedTrackList(),
self.getAnswers())
return arl
def getProposedTrackList( self ):
return self._proposedTracks
class AbstractInConflict( AbstractJudgement ):
def __init__(self, abstract, track):
AbstractJudgement.__init__(self, abstract, track, None, '')
def clone(self, track):
aic = AbstractInConflict(self._abstract, track, None, '')
return aic
class AbstractMarkedAsDuplicated( AbstractJudgement ):
def __init__(self, abstract, track, responsible, originalAbst, answers):
AbstractJudgement.__init__(self, abstract, track, responsible, answers)
self._originalAbst = originalAbst
def clone(self, track):
amad = AbstractMarkedAsDuplicated(self._abstract, track, self.getResponsible(), self.getOriginalAbstract(),
self.getAnswers())
return amad
def getOriginalAbstract(self):
return self._originalAbst
class AbstractUnMarkedAsDuplicated( AbstractJudgement ):
def clone(self, track):
auad = AbstractUnMarkedAsDuplicated(self._abstract, track,self.getResponsible())
return auad
class AbstractStatus( Persistent ):
"""This class represents any of the status in which an abstract can be.
From the moment they are submitted (and therefore created), abstracts
can go throuugh different status each having a different meaning.
As there can be many status, the transitions between them are quite
complex and as the system evolves we could require to add or delete
new status the "Status" pattern is applied. This is the base class.
Apart from giving information about the status of an abstract, this
class is responsible to store information about how the status was
reached (who provoke the transition, when, ...).
"""
_name = ""
def __init__( self, abstract ):
self._setAbstract( abstract )
self._setDate( nowutc() )
def getName(self):
return self._name
def _setAbstract( self, abs ):
self._abstract = abs
def getAbstract( self ):
return self._abstract
def _setDate( self, date ):
self._date = date
def getDate( self ):
return self._date
def accept(self, responsible, destTrack, type_, comments=""):
"""
"""
abstract = self.getAbstract()
s = AbstractStatusAccepted(abstract, responsible, comments)
abstract.as_new.accepted_track_id = destTrack.id if destTrack else None
abstract.as_new.accepted_type = type_
self.getAbstract().setCurrentStatus(s)
def reject( self, responsible, comments = "" ):
"""
"""
s = AbstractStatusRejected(self.getAbstract(), responsible, comments)
self.getAbstract().setCurrentStatus(s)
def _getStatusClass( self ):
"""
"""
numAccepts = self._abstract.getNumProposedToAccept() # number of tracks that have at least one proposal to accept
numReallocate = self._abstract.getNumProposedToReallocate() # number of tracks that have at least one proposal to reallocate
numJudgements = self._abstract.getNumJudgements() # number of tracks that have at least one judgement
if numJudgements > 0:
# If at least one track status is in conflict the abstract status is in conflict too.
if any(isinstance(self._abstract.getTrackJudgement(track), AbstractInConflict) for track in self._abstract.getTrackList()):
return AbstractStatusInConflict
numTracks = self._abstract.getNumTracks() # number of tracks that this abstract has assigned
if numTracks == numJudgements: # Do we have judgements for all tracks?
if numReallocate == numTracks:
return AbstractStatusInConflict
elif numAccepts == 1:
return AbstractStatusProposedToAccept
elif numAccepts == 0:
return AbstractStatusProposedToReject
return AbstractStatusInConflict
return AbstractStatusUnderReview
return AbstractStatusSubmitted
def update( self ):
"""
"""
newStatusClass = self._getStatusClass()
if self.__class__ != newStatusClass:
self.getAbstract().setCurrentStatus( newStatusClass( self._abstract ) )
def proposeToAccept( self ):
"""
"""
s = self._getStatusClass()( self._abstract )
self.getAbstract().setCurrentStatus( s )
def proposeToReject( self ):
"""
"""
s = self._getStatusClass()( self._abstract )
self.getAbstract().setCurrentStatus( s )
def proposeToReallocate( self ):
"""
"""
s = self._getStatusClass()( self._abstract )
self.getAbstract().setCurrentStatus( s )
def withdraw(self,resp,comments=""):
"""
"""
s=AbstractStatusWithdrawn(self.getAbstract(), resp, self, comments)
self.getAbstract().setCurrentStatus(s)
def recover( self ):
"""
"""
raise MaKaCError( _("only withdrawn abstracts can be recovered"))
def markAsDuplicated(self,responsible,originalAbs,comments=""):
"""
"""
if self.getAbstract()==originalAbs:
raise MaKaCError( _("the original abstract is the same as the duplicated one"))
if isinstance(originalAbs.getCurrentStatus(),AbstractStatusDuplicated):
raise MaKaCError( _("cannot set as original abstract one which is already marked as duplicated"))
s=AbstractStatusDuplicated(self.getAbstract(),responsible,originalAbs,comments)
self.getAbstract().setCurrentStatus(s)
def unMarkAsDuplicated(self,responsible,comments=""):
"""
"""
raise MaKaCError( _("Only duplicated abstract can be unmark as duplicated"))
def mergeInto(self,responsible,targetAbs,comments=""):
"""
"""
if self.getAbstract()==targetAbs:
raise MaKaCError( _("An abstract cannot be merged into itself"))
if targetAbs.getCurrentStatus().__class__ not in [AbstractStatusSubmitted,AbstractStatusUnderReview,AbstractStatusProposedToAccept,AbstractStatusProposedToReject,AbstractStatusInConflict]:
raise MaKaCError(_("Target abstract is in a status which cannot receive mergings"))
s=AbstractStatusMerged(self.getAbstract(),responsible,targetAbs,comments)
self.getAbstract().setCurrentStatus(s)
def unMerge(self,responsible,comments=""):
"""
"""
raise MaKaCError( _("Only merged abstracts can be unmerged"))
def getComments(self):
return ""
class AbstractStatusSubmitted( AbstractStatus ):
"""
"""
def clone(self,abstract):
ass = AbstractStatusSubmitted(abstract)
return ass
def update( self ):
#if an abstract that has been submitted has no judgement it
# must remain in the submitted status
if self._abstract.getNumJudgements() == 0:
return
AbstractStatus.update( self )
class AbstractStatusAccepted(AbstractStatusAcceptedLegacyMixin, AbstractStatus):
"""
"""
def __init__(self, abstract, responsible, comments=""):
AbstractStatus.__init__(self, abstract)
self._setResponsible(responsible)
self._setComments(comments)
def clone(self, abstract):
return AbstractStatusAccepted(abstract, self.getResponsible(), self.getComments())
def _setResponsible( self, res ):
self._responsible = res
def getResponsible( self ):
return self._responsible
def _setComments( self, comments ):
self._comments = str( comments ).strip()
def getComments( self ):
try:
if self._comments:
pass
except AttributeError:
self._comments = ""
return self._comments
def update( self ):
return
def accept(self,responsible,destTrack,type,comments="" ):
raise MaKaCError( _("Cannot accept an abstract which is already accepted"))
def reject( self, responsible, comments="" ):
raise MaKaCError( _("Cannot reject an abstract which is already accepted"))
def proposeToAccept( self ):
raise MaKaCError( _("Cannot propose for acceptance an abstract which is already accepted"))
def proposeToReject( self ):
raise MaKaCError( _("Cannot propose for rejection an abstract which is already accepted"))
def proposeToReallocate( self ):
raise MaKaCError( _("Cannot propose for reallocation an abstract which is already accepted"))
def markAsDuplicated(self,responsible,originalAbs,comments=""):
raise MaKaCError( _("Cannot mark as duplicated an abstract which is accepted"))
def unMarkAsDuplicated(self,responsible,comments=""):
"""
"""
raise MaKaCError( _("Only duplicated abstract can be unmark as duplicated"))
def mergeInto(self,responsible,targetAbs,comments=""):
raise MaKaCError( _("Cannot merge an abstract which is already accepted"))
class AbstractStatusRejected( AbstractStatus ):
"""
"""
def __init__( self, abstract, responsible, comments = "" ):
AbstractStatus.__init__( self, abstract )
self._setResponsible( responsible )
self._setComments( comments )
def clone(self,abstract):
asr = AbstractStatusRejected(abstract, self.getResponsible(), self.getComments())
return asr
def _setResponsible( self, res ):
self._responsible = res
def getResponsible( self ):
return self._responsible
def _setComments( self, comments ):
self._comments = str( comments ).strip()
def getComments( self ):
try:
if self._comments:
pass
except AttributeError:
self._comments = ""
return self._comments
def update( self ):
return
def reject( self, responsible, comments="" ):
raise MaKaCError( _("Cannot reject an abstract which is already rejected"))
def proposeToAccept( self ):
raise MaKaCError( _("Cannot propose for acceptance an abstract which is already rejected"))
def proposeToReject( self ):
raise MaKaCError( _("Cannot propose for rejection an abstract which is already rejected"))
def proposeToReallocate( self ):
raise MaKaCError( _("Cannot propose for reallocation an abstract which is already rejected"))
def withdraw(self,resp,comments=""):
raise MaKaCError( _("Cannot withdraw a REJECTED abstract"))
def markAsDuplicated(self,responsible,originalAbs,comments=""):
raise MaKaCError( _("Cannot mark as duplicated an abstract which is rejected"))
def unMarkAsDuplicated(self,responsible,comments=""):
"""
"""
raise MaKaCError( _("Only duplicated abstract can be unmark as duplicated"))
def mergeInto(self,responsible,targetAbs,comments=""):
raise MaKaCError( _("Cannot merge an abstract which is rejected"))
class AbstractStatusUnderReview( AbstractStatus ):
"""
"""
def clone(self,abstract):
asur = AbstractStatusUnderReview(abstract)
return asur
class AbstractStatusProposedToAccept( AbstractStatus ):
"""
"""
def clone(self, abstract):
aspta = AbstractStatusProposedToAccept(abstract)
return aspta
def getTrack(self):
jud=self.getAbstract().getTrackAcceptanceList()[0]
return jud.getTrack()
def getType(self):
jud=self.getAbstract().getTrackAcceptanceList()[0]
return jud.getContribType()
class AbstractStatusProposedToReject( AbstractStatus ):
"""
"""
def clone(self, abstract):
asptr = AbstractStatusProposedToReject(abstract)
return asptr
class AbstractStatusInConflict( AbstractStatus ):
"""
"""
def clone(self,abstract):
asic = AbstractStatusInConflict(abstract)
return asic
class AbstractStatusWithdrawn(AbstractStatus):
"""
"""
def __init__(self,abstract,responsible, prevStatus,comments=""):
AbstractStatus.__init__(self,abstract)
self._setComments(comments)
self._setResponsible(responsible)
self._prevStatus=prevStatus
def clone(self,abstract):
asw = AbstractStatusWithdrawn(abstract,self.getResponsible(),self.getComments())
return asw
def _setResponsible(self,newResp):
self._responsible=newResp
def getResponsible(self):
try:
if self._responsible:
pass
except AttributeError,e:
self._responsible=self._abstract.getSubmitter().getAvatar()
return self._responsible
def getPrevStatus(self):
try:
if self._prevStatus:
pass
except AttributeError,e:
self._prevStatus=None
return self._prevStatus
def _setComments( self, comments ):
self._comments = str( comments ).strip()
def getComments( self ):
return self._comments
def update( self ):
return
def accept(self,responsible,destTrack,type,comments=""):
raise MaKaCError( _("Cannot accept an abstract wich is withdrawn"))
def reject( self, responsible, comments="" ):
raise MaKaCError( _("Cannot reject an abstract which is withdrawn"))
def proposeToAccept( self ):
raise MaKaCError( _("Cannot propose for acceptance an abstract which withdrawn"))
def proposeToReject( self ):
raise MaKaCError( _("Cannot propose for rejection an abstract which is withdrawn"))
def recover(self):
abstract = self.getAbstract()
contrib = abstract.as_new.contribution
if self.getPrevStatus() is None:
# reset all the judgments
self._clearTrackAcceptances()
self._clearTrackRejections()
self._clearTrackReallocations()
# setting the status
if contrib is None:
s = AbstractStatusSubmitted(abstract)
else:
s = AbstractStatusAccepted(abstract, self.getResponsible(), "")
else:
if contrib is not None and not isinstance(self.getPrevStatus(), AbstractStatusAccepted):
s = AbstractStatusAccepted(abstract, self.getResponsible(), "")
else:
s = self.getPrevStatus()
abstract.setCurrentStatus(s)
abstract.as_new.accepted_track_id = int(contrib.track.id) if contrib.track else None
abstract.as_new.accepted_type = contrib.type
def markAsDuplicated(self,responsible,originalAbs,comments=""):
raise MaKaCError( _("Cannot mark as duplicated an abstract which is withdrawn"))
def unMarkAsDuplicated(self,responsible,comments=""):
"""
"""
raise MaKaCError( _("Only duplicated abstract can be unmark as duplicated"))
def mergeInto(self,responsible,targetAbs,comments=""):
raise MaKaCError( _("Cannot merge an abstract which is withdrawn"))
def withdraw(self,resp,comments=""):
raise MaKaCError( _("This abstract is already withdrawn"))
class AbstractStatusDuplicated(AbstractStatus):
"""
"""
def __init__( self,abstract,responsible,originalAbstract,comments=""):
AbstractStatus.__init__(self,abstract)
self._setResponsible(responsible)
self._setComments(comments)
self._setOriginalAbstract(originalAbstract)
def clone(self, abstract):
asd = AbstractStatusDuplicated(abstract,self.getResponsible(),self.getOriginal(),self.getComments())
return asd
def _setResponsible( self, res ):
self._responsible = res
def getResponsible(self):
return self._responsible
def _setComments( self, comments ):
self._comments = str( comments ).strip()
def getComments( self ):
return self._comments
def _setOriginalAbstract(self,abs):
self._original=abs
def getOriginal(self):
return self._original
def update( self ):
return
def reject( self, responsible, comments="" ):
raise MaKaCError( _("Cannot reject an abstract which is duplicated"))
def proposeToAccept( self ):
raise MaKaCError( _("Cannot propose for acceptance an abstract which is duplicated"))
def proposeToReject( self ):
raise MaKaCError( _("Cannot propose for rejection an abstract which is duplicated"))
def proposeToReallocate( self ):
raise MaKaCError( _("Cannot propose for reallocation an abstract which is duplicated"))
def withdraw(self,resp,comments=""):
raise MaKaCError( _("Cannot withdraw a duplicated abstract"))
def markAsDuplicated(self,responsible,originalAbs,comments=""):
raise MaKaCError( _("This abstract is already duplicated"))
def unMarkAsDuplicated(self,responsible,comments=""):
s = AbstractStatusSubmitted( self.getAbstract() )
self.getAbstract().setCurrentStatus( s )
def mergeInto(self,responsible,targetAbs,comments=""):
raise MaKaCError( _("Cannot merge an abstract which is marked as a duplicate"))
class AbstractStatusMerged(AbstractStatus):
"""
"""
def __init__(self,abstract,responsible,targetAbstract,comments=""):
AbstractStatus.__init__(self,abstract)
self._setResponsible(responsible)
self._setComments(comments)
self._setTargetAbstract(targetAbstract)
def clone(self,abstract):
asm = AbstractStatusMerged(abstract,self.getResponsible(),self.getTargetAbstract(),self.getComments())
return asm
def _setResponsible( self, res ):
self._responsible = res
def getResponsible( self ):
return self._responsible
def _setComments( self, comments ):
self._comments = str( comments ).strip()
def getComments( self ):
return self._comments
def _setTargetAbstract(self,abstract):
self._target=abstract
def getTargetAbstract(self):
return self._target
def update( self ):
return
def reject( self, responsible, comments="" ):
raise MaKaCError( _("Cannot reject an abstract which is merged into another one"))
def proposeToAccept( self ):
raise MaKaCError( _("Cannot propose for acceptance an abstract which is merged into another one"))
def proposeToReject( self ):
raise MaKaCError( _("Cannot propose for rejection an abstract which is merged into another one"))
def proposeToReallocate( self ):
raise MaKaCError( _("Cannot propose for reallocation an abstract which is merged into another one"))
def withdraw(self,resp,comments=""):
raise MaKaCError( _("Cannot withdraw an abstract which is merged into another one"))
def markAsDuplicated(self,responsible,originalAbs,comments=""):
raise MaKaCError( _("Cannot mark as duplicated an abstract which is merged into another one"))
def unMarkAsDuplicated(self,responsible,comments=""):
"""
"""
raise MaKaCError( _("Only duplicated abstract can be unmark as duplicated"))
def mergeInto(self,responsible,target,comments=""):
raise MaKaCError( _("This abstract is already merged into another one"))
def unMerge(self,responsible,comments=""):
s = AbstractStatusSubmitted( self.getAbstract() )
self.getAbstract().setCurrentStatus( s )
class AbstractStatusNone(AbstractStatus):
# This is a special status we assign to abstracts that are put in the trash can.
def __init__(self,abstract):
AbstractStatus.__init__(self,abstract)
def clone(self,abstract):
asn = AbstractStatusNone(abstract)
return asn
class NotificationTemplate(Persistent):
def __init__(self):
self._owner=None
self._id=""
self._name=""
self._description=""
self._tplSubject=""
self._tplBody=""
self._fromAddr = ""
self._CAasCCAddr = False
self._ccAddrList=PersistentList()
self._toAddrs = PersistentList()
self._conditions=PersistentList()
self._toAddrGenerator=Counter()
self._condGenerator=Counter()
def clone(self):
tpl = NotificationTemplate()
tpl.setName(self.getName())
tpl.setDescription(self.getDescription())
tpl.setTplSubject(self.getTplSubject())
tpl.setTplBody(self.getTplBody())
tpl.setFromAddr(self.getFromAddr())
tpl.setCAasCCAddr(self.getCAasCCAddr())
for cc in self.getCCAddrList() :
tpl.addCCAddr(cc)
for to in self.getToAddrList() :
tpl.addToAddr(to)
for con in self.getConditionList() :
tpl.addCondition(con.clone(tpl))
return tpl
def delete(self):
self.clearToAddrs()
self.clearCCAddrList()
self.clearConditionList()
TrashCanManager().add(self)
def recover(self):
TrashCanManager().remove(self)
## def getResponsible( self ):
## return self._responsible
##
## def _setComments( self, comments ):
## self._comments = str( comments ).strip()
##
## def getComments( self ):
## return self._comments
##
## def _setOriginalAbstract(self,abstract):
## self._original=abstract
def canModify(self, aw_or_user):
return self.getConference().canModify(aw_or_user)
def getLocator(self):
loc = self.getOwner().getConference().getLocator()
loc["notifTplId"] = self._id
return loc
def getConference(self):
return self._owner.getConference()
def includeInOwner(self,owner,id):
self._owner=owner
self._id=id
def getOwner(self):
return self._owner
def getId(self):
return self._id
def setName(self,newName):
self._name=newName.strip()
def getName(self):
return self._name
def setDescription(self,newDesc):
self._description=newDesc.strip()
def getDescription(self):
return self._description
def setTplSubject(self,newSubject, varList):
self._tplSubject=self.parseTplContent(newSubject, varList).strip()
def getTplSubject(self):
return self._tplSubject
def getTplSubjectShow(self, varList):
return self.parseTplContentUndo(self._tplSubject, varList)
def setTplBody(self,newBody, varList):
self._tplBody=self.parseTplContent(newBody, varList).strip()
def getTplBody(self):
return self._tplBody
def getTplBodyShow(self, varList):
return self.parseTplContentUndo(self._tplBody, varList)
def getCCAddrList(self):
try:
if self._ccAddrList:
pass
except AttributeError:
self._ccAddrList=PersistentList()
return self._ccAddrList
def addCCAddr(self,newAddr):
try:
if self._ccAddrList:
pass
except AttributeError:
self._ccAddrList=PersistentList()
ccAddr=newAddr.strip()
if ccAddr!="" and ccAddr not in self._ccAddrList:
self._ccAddrList.append(ccAddr)
def setCCAddrList(self,l):
self.clearCCAddrList()
for addr in l:
self.addCCAddr(addr)
def setCAasCCAddr(self, CAasCCAddr):
self._CAasCCAddr = CAasCCAddr
def getCAasCCAddr(self):
try:
if self._CAasCCAddr:
pass
except AttributeError:
self._CAasCCAddr = False
return self._CAasCCAddr
def clearCCAddrList(self):
self._ccAddrList=PersistentList()
def getFromAddr(self):
try:
return self._fromAddr
except AttributeError:
self._fromAddr = self._owner.getConference().getSupportInfo().getEmail()
return self._fromAddr
def setFromAddr(self, addr):
self._fromAddr = addr
def addToAddr(self,toAddr):
"""
"""
if self.hasToAddr(toAddr.__class__):
return
try:
if self._toAddrGenerator:
pass
except AttributeError, e:
self._toAddrGenerator = Counter()
id = toAddr.getId()
if id == -1:
id = int(self._toAddrGenerator.newCount())
toAddr.includeInTpl(self,id)
self.getToAddrList().append(toAddr)
def removeToAddr(self,toAddr):
"""
"""
if not self.hasToAddr(toAddr.__class__):
return
self.getToAddrList().remove(toAddr)
toAddr.includeInTpl(None,toAddr.getId())
toAddr.delete()
def recoverToAddr(self, toAddr):
self.addToAddr(toAddr)
toAddr.recover()
def getToAddrs(self, abs):
users = []
for toAddr in self.getToAddrList():
users += toAddr.getToAddrList(abs)
return users
def getToAddrList(self):
"""
"""
try:
if self._toAddrs:
pass
except AttributeError, e:
self._toAddrs = PersistentList()
return self._toAddrs
def getToAddrById(self,id):
"""
"""
for toAddr in self.getToAddrList():
if toAddr.getId()==int(id):
return toAddr
return None
def hasToAddr(self,toAddrKlass):
"""Returns True if the TPL contains a "toAddr" which class is "toAddrKlass"
"""
for toAddr in self.getToAddrList():
if toAddr.__class__ == toAddrKlass:
return True
return False
def clearToAddrs(self):
while(len(self.getToAddrList())>0):
self.removeToAddr(self.getToAddrList()[0])
def addCondition(self,cond):
"""
"""
if cond in self._conditions:
return
id = cond.getId()
if id == -1:
id = int(self._condGenerator.newCount())
cond.includeInTpl(self, id)
self._conditions.append(cond)
def removeCondition(self,cond):
"""
"""
if cond not in self._conditions:
return
self._conditions.remove(cond)
cond.delete()
def recoverCondition(self, cond):
self.addCondition(cond)
cond.recover()
def getConditionList(self):
"""
"""
return self._conditions
def getConditionById(self,id):
"""
"""
for cond in self._conditions:
if cond.getId()==int(id):
return cond
return None
def clearConditionList(self):
while(len(self.getConditionList())>0):
self.removeCondition(self.getConditionList()[0])
def satisfies(self,abs):
"""
"""
for cond in self._conditions:
if cond.satisfies(abs):
return True
return False
def parseTplContent(self, content, varList):
# replace the % in order to avoid exceptions
result = content.replace("%", "%%")
# find the vars and make the expressions, it is necessary to do in reverse in order to find the longest tags first
for var in varList:
result = result.replace("{"+var.getName()+"}", "%("+var.getName()+")s")
return result
def parseTplContentUndo(self, content, varList):
# The body content is shown without "%()" and with "%" in instead of "%%" but it is not modified
result = content
for var in varList:
result = result.replace("%("+var.getName()+")s", "{"+var.getName()+"}")
# replace the %% by %
result = result.replace("%%", "%")
return result
def getModifKey( self ):
return self.getConference().getModifKey()
class NotifTplToAddr(Persistent):
"""
"""
def __init__(self):
self._tpl=None
self._id=-1
def clone(self):
ntta = NotifTplToAddr()
return ntta
def delete(self):
TrashCanManager().add(self)
def recover(self):
TrashCanManager().remove(self)
def includeInTpl(self,newTpl,newId):
self._tpl=newTpl
self._id=newId
def getTpl(self):
return self._tpl
def getId(self):
return self._id
def getToAddrList(self,absList):
"""
Return a list with all the emails for a group.
"""
return []
class NotifTplToAddrSubmitter(NotifTplToAddr):
def getToAddrList(self,abs):
l = []
l.append(abs.getSubmitter())
return l
def clone(self):
nttas = NotifTplToAddrSubmitter()
return nttas
class NotifTplToAddrPrimaryAuthors(NotifTplToAddr):
def getToAddrList(self,abs):
l = []
for pa in abs.getPrimaryAuthorList():
l.append(pa)
return l
def clone(self):
nttapa = NotifTplToAddrPrimaryAuthors()
return nttapa
class NotifTplCondition(Persistent):
"""
"""
def __init__(self):
self._tpl=None
self._id=-1
def clone(self, template):
con = NotifyCondition()
con.includeInTpl(template)
return con
def delete(self):
TrashCanManager().add(self)
def recover(self):
TrashCanManager().remove(self)
def includeInTpl(self,newTpl,newId):
self._tpl=newTpl
self._id=newId
def getTpl(self):
return self._tpl
def getId(self):
return self._id
def satisfies(self,abs):
return True
class NotifTplCondAccepted(NotifTplCondition):
def __init__(self, track="--any--", contribType="--any--"):
NotifTplCondition.__init__(self)
self._track = track
self._contrib_type_id = contribType if isinstance(contribType, basestring) else contribType.id
def clone(self, conference, template):
ntca = NotifTplCondAccepted()
for newtrack in conference.getTrackList() :
if newtrack.getTitle() == self.getTrack().getTitle() :
ntca.setTrack(newtrack)
for newtype in conference.as_event.contribution_types:
if newtype.name == self.getContribType():
ntca.setContribType(newtype)
return ntca
def setContribType(self, ct="--any--"):
self._contrib_type_id = '--any--' if ct == '--any--' else ct.id
def getContribType(self):
# Ugly, but only way to handle '--any--'
return (ContributionType.get(self._contrib_type_id) if isinstance(self._contrib_type_id, int)
else self._contrib_type_id)
def setTrack(self, tr="--any--"):
self._track = tr
def getTrack(self):
try:
if self._track:
pass
except AttributeError:
self._track="--any--"
return self._track
def _satifiesContribType(self, abs_wrap):
abstract_type = abs_wrap.getCurrentStatus().getAbstract().as_new.accepted_type
if self._contrib_type_id == "--any--":
return True
else:
if self._contrib_type_id == '--none--':
return not abstract_type
if not abstract_type:
return False
# TODO: use ids in db, instead of objects!
return abstract_type.id == self._contrib_type_id
return False
def _satifiesTrack(self, abs_wrap):
accepted_track = abs_wrap.getCurrentStatus().getTrack()
target_track = self.getTrack()
if target_track == "--any--":
return True
else:
if not target_track or target_track == '--none--':
return not bool(accepted_track)
return accepted_track == target_track
return False
def satisfies(self,abs):
if not isinstance(abs.getCurrentStatus(), AbstractStatusAccepted):
return False
else:
return self._satifiesContribType(abs) and self._satifiesTrack(abs)
class NotifTplCondRejected(NotifTplCondition):
def satisfies(self,abs):
return isinstance(abs.getCurrentStatus(),AbstractStatusRejected)
def clone(self, conference, template):
ntcr = NotifTplCondRejected()
ntcr.includeInTpl(template)
return ntcr
class NotifTplCondMerged(NotifTplCondition):
def __init__(self, track=None, contrib_type=None):
NotifTplCondition.__init__(self)
self._track = track
self._contrib_type_id = contrib_type if isinstance(contrib_type, basestring) else contrib_type.id
def satisfies(self, abstract):
if not isinstance(abstract.getCurrentStatus(), AbstractStatusMerged):
return False
else:
return self._satisfiesContribType(abstract) and self._satisfiesTrack(abstract)
def _satisfiesContribType(self, abs_wrap):
if self._contrib_type_id == '--any--':
return True
else:
abstract_type = abs_wrap.getCurrentStatus().getAbstract().as_new.type
if self._contrib_type_id == '--none--':
return not abstract_type
if not abstract_type:
return False
# TODO: use ids in db, instead of objects!
return abstract_type.id == int(self._contrib_type_id)
def _satisfiesTrack(self, abs_wrap):
target_track = self.getTrack()
if target_track == "--any--":
return True
else:
tracks = abs_wrap.getCurrentStatus().getAbstract().getTrackListSorted()
if not target_track or target_track == '--none--':
return not tracks
return target_track in tracks
def getTrack(self):
return self._track
def getContribType(self):
# Ugly, but only way to handle '--any--'
return (ContributionType.get(self._contrib_type_id) if isinstance(self._contrib_type_id, int)
else self._contrib_type_id)
def clone(self, conference, template):
ntcm = NotifTplCondMerged()
for newtrack in conference.getTrackList():
if newtrack.getTitle() == self.getTrack().getTitle():
ntcm.setTrack(newtrack)
break
for newtype in conference.as_event.contribution_types:
if newtype.name == self.getContribType():
ntcm.setContribType(newtype)
break
return ntcm
class NotificationLog(Persistent):
def __init__(self,abstract):
self._abstract=abstract
self._entries=PersistentList()
def getAbstract(self):
return self._abstract
def addEntry(self,newEntry):
if newEntry!=None and newEntry not in self._entries:
self._entries.append(newEntry)
def getEntryList(self):
return self._entries
# The 3 following metods are used only for recovery purposes:
def removeEntry(self, entry):
if entry!=None and entry in self._entries:
self._entries.remove(entry)
entry.delete()
def recoverEntry(self, entry):
self.addEntry(entry)
entry.recover()
def clearEntryList(self):
while len(self.getEntryList()) > 0:
self.removeEntry(self.getEntryList()[0])
# -----------------------------------------------------------
class NotifLogEntry(Persistent):
def __init__(self,responsible,tpl):
self._setDate(nowutc())
self._setResponsible(responsible)
self._setTpl(tpl)
def _setDate(self,newDate):
self._date=newDate
def getDate(self):
return self._date
def _setResponsible(self,newResp):
self._responsible=newResp
def getResponsible(self):
return self._responsible
def _setTpl(self,newTpl):
self._tpl=newTpl
def getTpl(self):
return self._tpl
def delete(self):
TrashCanManager().add(self)
def recover(self):
TrashCanManager().remove(self)<|fim▁end|> |
def getSurName(self):
return self._surName
|
<|file_name|>async_steps35.py<|end_file_name|><|fim▁begin|># -- REQUIRES: Python >= 3.5
from behave import step
from behave.api.async_step import async_run_until_complete<|fim▁hole|>@step('an async-step waits {duration:f} seconds')
@async_run_until_complete
async def step_async_step_waits_seconds_py35(context, duration):
"""Simple example of a coroutine as async-step (in Python 3.5)"""
await asyncio.sleep(duration)<|fim▁end|> | import asyncio
|
<|file_name|>settings.py<|end_file_name|><|fim▁begin|># finances.settings
# The common Django settings for the Finance Analysis project.
#
# Author: Benjamin Bengfort <[email protected]>
# Created: Tue Jan 19 20:43:16 2016 -0500
#
# Copyright (C) 2015 Bengfort.com
# For license information, see LICENSE.txt
#
# ID: settings.py [] [email protected] $
"""
Django settings for finances project.
Generated by 'django-admin startproject' using Django 1.9.1.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.9/ref/settings/
"""
<|fim▁hole|>
import os
import dj_database_url
##########################################################################
## Helper function for environmental settings
##########################################################################
def environ_setting(name, default=None):
"""
Fetch setting from the environment- if not found, then this setting is
ImproperlyConfigured.
"""
if name not in os.environ and default is None:
from django.core.exceptions import ImproperlyConfigured
raise ImproperlyConfigured(
"The {0} ENVVAR is not set.".format(name)
)
return os.environ.get(name, default)
##########################################################################
## Build Paths inside of project with os.path.join
##########################################################################
PROJECT = os.path.dirname(os.path.abspath(__file__))
REPOSITORY = os.path.dirname(PROJECT)
##########################################################################
## Database Settings
##########################################################################
## Database
## https://docs.djangoproject.com/en/1.9/ref/settings/#databases
DATABASES = {
'default': dj_database_url.config(),
}
DATABASES['default']['ENGINE'] = 'django.db.backends.postgresql_psycopg2'
##########################################################################
## Secret settings - do not store!
##########################################################################
## SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = environ_setting("SECRET_KEY")
# Password validation
# https://docs.djangoproject.com/en/1.9/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
##########################################################################
## Runtime settings
##########################################################################
## Debugging settings
## SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
## Honor the 'X-Forwarded-Proto' header for request.is_secure()
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
## Hosts
ALLOWED_HOSTS = ["*"]
INTERNAL_IPS = ('127.0.0.1', '198.168.1.10')
## WSGI Configuration
ROOT_URLCONF = 'finances.urls'
WSGI_APPLICATION = 'finances.wsgi.application'
## Application definition
INSTALLED_APPS = [
# Django apps
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
# Third party apps
'django_gravatar',
]
## Request Handling
MIDDLEWARE_CLASSES = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
## Internationalization
## https://docs.djangoproject.com/en/1.9/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'America/New_York'
USE_I18N = True
USE_L10N = True
USE_TZ = True
##########################################################################
## Content (Static, Media, Templates)
##########################################################################
## Templates
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [
os.path.join(PROJECT, 'templates'),
],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
## Static files (CSS, JavaScript, Images)
## https://docs.djangoproject.com/en/1.9/howto/static-files/
STATIC_URL = '/assets/'
STATICFILES_DIRS = (
os.path.join(PROJECT, 'assets'),
)
STATICFILES_STORAGE = 'whitenoise.django.GzipManifestStaticFilesStorage'
## Content
MEDIA_ROOT = os.path.join(PROJECT, 'media')
STATIC_ROOT = 'staticfiles'
##########################################################################
## Logging and Error Reporting
##########################################################################
ADMINS = (
('Benjamin Bengfort', '[email protected]'),
)
SERVER_EMAIL = 'Dakota Server <[email protected]>'
EMAIL_USE_TLS = True
EMAIL_HOST = 'smtp.gmail.com'
EMAIL_HOST_USER = environ_setting("EMAIL_HOST_USER")
EMAIL_HOST_PASSWORD = environ_setting("EMAIL_HOST_PASSWORD")
EMAIL_PORT = 587
EMAIL_SUBJECT_PREFIX = '[FINANCES] '<|fim▁end|> | ##########################################################################
## Imports
########################################################################## |
<|file_name|>FATSuite.java<|end_file_name|><|fim▁begin|><|fim▁hole|> * are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* IBM Corporation - initial API and implementation
*******************************************************************************/
package com.ibm.ws.jpa.tests.spec10.entity;
import org.junit.ClassRule;
import org.junit.runner.RunWith;
import org.junit.runners.Suite;
import org.junit.runners.Suite.SuiteClasses;
import com.ibm.ws.jpa.tests.spec10.entity.tests.AbstractFATSuite;
import com.ibm.ws.jpa.tests.spec10.entity.tests.Entity_EJB;
import com.ibm.ws.jpa.tests.spec10.entity.tests.Entity_Web;
import componenttest.rules.repeater.FeatureReplacementAction;
import componenttest.rules.repeater.RepeatTests;
@RunWith(Suite.class)
@SuiteClasses({
Entity_EJB.class,
Entity_Web.class,
componenttest.custom.junit.runner.AlwaysPassesTest.class
})
public class FATSuite extends AbstractFATSuite {
@ClassRule
public static RepeatTests r = RepeatTests.with(FeatureReplacementAction.EE9_FEATURES());
}<|fim▁end|> | /*******************************************************************************
* Copyright (c) 2021 IBM Corporation and others.
* All rights reserved. This program and the accompanying materials |
<|file_name|>test_coordination_geometries.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
__author__ = 'waroquiers'
import unittest
import numpy as np
from pymatgen.util.testing import PymatgenTest
from pymatgen.analysis.chemenv.coordination_environments.coordination_geometries import ExplicitPermutationsAlgorithm
from pymatgen.analysis.chemenv.coordination_environments.coordination_geometries import SeparationPlane
from pymatgen.analysis.chemenv.coordination_environments.coordination_geometries import AllCoordinationGeometries
from pymatgen.analysis.chemenv.coordination_environments.coordination_geometries import CoordinationGeometry
allcg = AllCoordinationGeometries()
class FakeSite:
def __init__(self, coords):
self.coords = coords
class CoordinationGeometriesTest(PymatgenTest):
def test_algorithms(self):
expl_algo = ExplicitPermutationsAlgorithm(permutations=[[0, 1, 2], [1, 2, 3]])
expl_algo2 = ExplicitPermutationsAlgorithm.from_dict(expl_algo.as_dict)
self.assertEqual(expl_algo.permutations, expl_algo2.permutations)
sepplane_algos_oct = allcg['O:6'].algorithms
self.assertEqual(len(sepplane_algos_oct[0].safe_separation_permutations()), 24)
self.assertEqual(len(sepplane_algos_oct[1].safe_separation_permutations()), 36)
sepplane_algos_oct_0 = SeparationPlane.from_dict(sepplane_algos_oct[0].as_dict)
self.assertEqual(sepplane_algos_oct[0].plane_points, sepplane_algos_oct_0.plane_points)
self.assertEqual(sepplane_algos_oct[0].mirror_plane, sepplane_algos_oct_0.mirror_plane)
self.assertEqual(sepplane_algos_oct[0].ordered_plane, sepplane_algos_oct_0.ordered_plane)
self.assertEqual(sepplane_algos_oct[0].point_groups, sepplane_algos_oct_0.point_groups)
self.assertEqual(sepplane_algos_oct[0].ordered_point_groups, sepplane_algos_oct_0.ordered_point_groups)
self.assertTrue(all([np.array_equal(perm, sepplane_algos_oct_0.explicit_optimized_permutations[iperm])
for iperm, perm in enumerate(sepplane_algos_oct[0].explicit_optimized_permutations)]))
self.assertEqual(sepplane_algos_oct[0].__str__(),
'Separation plane algorithm with the following reference separation :\n'
'[[4]] | [[0, 2, 1, 3]] | [[5]]')
def test_hints(self):
hints = CoordinationGeometry.NeighborsSetsHints(hints_type='single_cap',
options={'cap_index': 2, 'csm_max': 8})
myhints = hints.hints({'csm': 12.0})
self.assertEqual(myhints, [])
hints2 = CoordinationGeometry.NeighborsSetsHints.from_dict(hints.as_dict())
self.assertEqual(hints.hints_type, hints2.hints_type)
self.assertEqual(hints.options, hints2.options)
def test_coordination_geometry(self):
cg_oct = allcg['O:6']
cg_oct2 = CoordinationGeometry.from_dict(cg_oct.as_dict())
self.assertArrayAlmostEqual(cg_oct.central_site, cg_oct2.central_site)
self.assertArrayAlmostEqual(cg_oct.points, cg_oct2.points)
self.assertEqual(cg_oct.__str__(), 'Coordination geometry type : Octahedron (IUPAC: OC-6 || IUCr: [6o])\n'<|fim▁hole|> ' - [0.0, 0.0, -1.0]\n'
' - [1.0, 0.0, 0.0]\n'
' - [-1.0, 0.0, 0.0]\n'
' - [0.0, 1.0, 0.0]\n'
' - [0.0, -1.0, 0.0]\n'
'------------------------------------------------------------\n')
self.assertEqual(cg_oct.__len__(), 6)
self.assertEqual(cg_oct.ce_symbol, cg_oct.mp_symbol)
self.assertTrue(cg_oct.is_implemented())
self.assertEqual(cg_oct.get_name(), 'Octahedron')
self.assertEqual(cg_oct.IUPAC_symbol, 'OC-6')
self.assertEqual(cg_oct.IUPAC_symbol_str, 'OC-6')
self.assertEqual(cg_oct.IUCr_symbol, '[6o]')
self.assertEqual(cg_oct.IUCr_symbol_str, '[6o]')
cg_oct.permutations_safe_override = True
self.assertEqual(cg_oct.number_of_permutations, 720.0)
self.assertEqual(cg_oct.ref_permutation([0, 3, 2, 4, 5, 1]), (0, 3, 1, 5, 2, 4))
sites = [FakeSite(coords=pp) for pp in cg_oct.points]
faces = [[[0.0, 0.0, 1.0], [1.0, 0.0, 0.0], [0.0, -1.0, 0.0]],
[[0.0, 0.0, 1.0], [1.0, 0.0, 0.0], [0.0, 0.0, -1.0]],
[[0.0, 0.0, 1.0], [0.0, 1.0, 0.0], [0.0, -1.0, 0.0]],
[[0.0, 0.0, 1.0], [0.0, 1.0, 0.0], [0.0, 0.0, -1.0]],
[[-1.0, 0.0, 0.0], [1.0, 0.0, 0.0], [0.0, -1.0, 0.0]],
[[-1.0, 0.0, 0.0], [1.0, 0.0, 0.0], [0.0, 0.0, -1.0]],
[[-1.0, 0.0, 0.0], [0.0, 1.0, 0.0], [0.0, -1.0, 0.0]],
[[-1.0, 0.0, 0.0], [0.0, 1.0, 0.0], [0.0, 0.0, -1.0]]]
self.assertArrayAlmostEqual(cg_oct.faces(sites=sites, permutation=[0, 3, 2, 4, 5, 1]), faces)
faces = [[[0.0, 0.0, 1.0], [1.0, 0.0, 0.0], [0.0, 1.0, 0.0]],
[[0.0, 0.0, 1.0], [1.0, 0.0, 0.0], [0.0, -1.0, 0.0]],
[[0.0, 0.0, 1.0], [-1.0, 0.0, 0.0], [0.0, 1.0, 0.0]],
[[0.0, 0.0, 1.0], [-1.0, 0.0, 0.0], [0.0, -1.0, 0.0]],
[[0.0, 0.0, -1.0], [1.0, 0.0, 0.0], [0.0, 1.0, 0.0]],
[[0.0, 0.0, -1.0], [1.0, 0.0, 0.0], [0.0, -1.0, 0.0]],
[[0.0, 0.0, -1.0], [-1.0, 0.0, 0.0], [0.0, 1.0, 0.0]],
[[0.0, 0.0, -1.0], [-1.0, 0.0, 0.0], [0.0, -1.0, 0.0]]]
self.assertArrayAlmostEqual(cg_oct.faces(sites=sites), faces)
edges = [[[0.0, 0.0, 1.0], [1.0, 0.0, 0.0]],
[[0.0, 0.0, 1.0], [0.0, 1.0, 0.0]],
[[0.0, 0.0, 1.0], [0.0, -1.0, 0.0]],
[[0.0, 0.0, 1.0], [0.0, 0.0, -1.0]],
[[-1.0, 0.0, 0.0], [1.0, 0.0, 0.0]],
[[-1.0, 0.0, 0.0], [0.0, 1.0, 0.0]],
[[-1.0, 0.0, 0.0], [0.0, -1.0, 0.0]],
[[-1.0, 0.0, 0.0], [0.0, 0.0, -1.0]],
[[1.0, 0.0, 0.0], [0.0, -1.0, 0.0]],
[[1.0, 0.0, 0.0], [0.0, 0.0, -1.0]],
[[0.0, 1.0, 0.0], [0.0, -1.0, 0.0]],
[[0.0, 1.0, 0.0], [0.0, 0.0, -1.0]]]
self.assertArrayAlmostEqual(cg_oct.edges(sites=sites, permutation=[0, 3, 2, 4, 5, 1]), edges)
edges = [[[0.0, 0.0, 1.0], [1.0, 0.0, 0.0]],
[[0.0, 0.0, 1.0], [-1.0, 0.0, 0.0]],
[[0.0, 0.0, 1.0], [0.0, 1.0, 0.0]],
[[0.0, 0.0, 1.0], [0.0, -1.0, 0.0]],
[[0.0, 0.0, -1.0], [1.0, 0.0, 0.0]],
[[0.0, 0.0, -1.0], [-1.0, 0.0, 0.0]],
[[0.0, 0.0, -1.0], [0.0, 1.0, 0.0]],
[[0.0, 0.0, -1.0], [0.0, -1.0, 0.0]],
[[1.0, 0.0, 0.0], [0.0, 1.0, 0.0]],
[[1.0, 0.0, 0.0], [0.0, -1.0, 0.0]],
[[-1.0, 0.0, 0.0], [0.0, 1.0, 0.0]],
[[-1.0, 0.0, 0.0], [0.0, -1.0, 0.0]]]
self.assertArrayAlmostEqual(cg_oct.edges(sites=sites), edges)
self.assertArrayAlmostEqual(cg_oct.solid_angles(),
[2.0943951, 2.0943951, 2.0943951, 2.0943951, 2.0943951, 2.0943951])
pmeshes = cg_oct.get_pmeshes(sites=sites)
self.assertEqual(pmeshes[0]['pmesh_string'],
'14\n 0.00000000 0.00000000 1.00000000\n'
' 0.00000000 0.00000000 -1.00000000\n'
' 1.00000000 0.00000000 0.00000000\n'
' -1.00000000 0.00000000 0.00000000\n'
' 0.00000000 1.00000000 0.00000000\n'
' 0.00000000 -1.00000000 0.00000000\n'
' 0.33333333 0.33333333 0.33333333\n'
' 0.33333333 -0.33333333 0.33333333\n'
' -0.33333333 0.33333333 0.33333333\n'
' -0.33333333 -0.33333333 0.33333333\n'
' 0.33333333 0.33333333 -0.33333333\n'
' 0.33333333 -0.33333333 -0.33333333\n'
' -0.33333333 0.33333333 -0.33333333\n'
' -0.33333333 -0.33333333 -0.33333333\n'
'8\n4\n0\n2\n4\n0\n4\n0\n2\n5\n0\n4\n0\n3\n4\n0\n'
'4\n0\n3\n5\n0\n4\n1\n2\n4\n1\n4\n1\n2\n5\n1\n4\n'
'1\n3\n4\n1\n4\n1\n3\n5\n1\n')
allcg_str = allcg.__str__()
self.assertTrue('\n#=======================================================#\n'
'# List of coordination geometries currently implemented #\n'
'#=======================================================#\n'
'\nCoordination geometry type : Single neighbor (IUCr: [1l])\n\n'
' - coordination number : 1\n'
' - list of points :\n'
' - [0.0, 0.0, 1.0]\n'
'------------------------------------------------------------\n\n' in allcg_str)
self.assertTrue('Coordination geometry type : Trigonal plane (IUPAC: TP-3 || IUCr: [3l])\n\n'
' - coordination number : 3\n'
' - list of points :\n' in allcg_str)
all_symbols = [u'S:1', u'L:2', u'A:2', u'TL:3', u'TY:3', u'TS:3', u'T:4', u'S:4', u'SY:4', u'SS:4',
u'PP:5', u'S:5', u'T:5', u'O:6', u'T:6', u'PP:6', u'PB:7', u'ST:7', u'ET:7', u'FO:7',
u'C:8', u'SA:8', u'SBT:8', u'TBT:8', u'DD:8', u'DDPN:8', u'HB:8', u'BO_1:8', u'BO_2:8',
u'BO_3:8', u'TC:9', u'TT_1:9', u'TT_2:9', u'TT_3:9', u'HD:9', u'TI:9', u'SMA:9', u'SS:9',
u'TO_1:9', u'TO_2:9', u'TO_3:9', u'PP:10', u'PA:10', u'SBSA:10', u'MI:10', u'S:10',
u'H:10', u'BS_1:10', u'BS_2:10', u'TBSA:10', u'PCPA:11', u'H:11', u'SH:11', u'CO:11',
u'DI:11', u'I:12', u'PBP:12', u'TT:12', u'C:12', u'AC:12', u'SC:12', u'S:12', u'HP:12',
u'HA:12', u'SH:13', u'DD:20', u'UNKNOWN', u'UNCLEAR']
self.assertEqual(len(allcg.get_geometries()), 68)
self.assertEqual(len(allcg.get_geometries(coordination=3)), 3)
self.assertEqual(sorted(allcg.get_geometries(returned='mp_symbol')), sorted(all_symbols))
self.assertEqual(sorted(allcg.get_geometries(returned='mp_symbol', coordination=3)),
['TL:3', 'TS:3', 'TY:3'])
self.assertEqual(allcg.get_symbol_name_mapping(coordination=3),
{u'TY:3': u'Triangular non-coplanar', u'TL:3': u'Trigonal plane', u'TS:3': u'T-shaped'})
self.assertEqual(allcg.get_symbol_cn_mapping(coordination=3),
{u'TY:3': 3, u'TL:3': 3, u'TS:3': 3})
self.assertEqual(sorted(allcg.get_implemented_geometries(coordination=4, returned='mp_symbol')),
[u'S:4', u'SS:4', u'SY:4', u'T:4'])
self.assertEqual(sorted(allcg.get_not_implemented_geometries(returned='mp_symbol')),
[u'CO:11', u'DD:20', u'H:10', u'S:10', u'S:12', u'UNCLEAR', u'UNKNOWN'])
self.assertEqual(allcg.get_geometry_from_name('Octahedron').mp_symbol, cg_oct.mp_symbol)
with self.assertRaises(LookupError) as cm:
allcg.get_geometry_from_name('Octahedran')
self.assertEqual(str(cm.exception), 'No coordination geometry found with name "Octahedran"')
self.assertEqual(allcg.get_geometry_from_IUPAC_symbol('OC-6').mp_symbol, cg_oct.mp_symbol)
with self.assertRaises(LookupError) as cm:
allcg.get_geometry_from_IUPAC_symbol('OC-7')
self.assertEqual(str(cm.exception), 'No coordination geometry found with IUPAC symbol "OC-7"')
self.assertEqual(allcg.get_geometry_from_IUCr_symbol('[6o]').mp_symbol, cg_oct.mp_symbol)
with self.assertRaises(LookupError) as cm:
allcg.get_geometry_from_IUCr_symbol('[6oct]')
self.assertEqual(str(cm.exception), 'No coordination geometry found with IUCr symbol "[6oct]"')
with self.assertRaises(LookupError) as cm:
allcg.get_geometry_from_mp_symbol('O:7')
self.assertEqual(str(cm.exception), 'No coordination geometry found with mp_symbol "O:7"')
self.assertEqual(allcg.pretty_print(maxcn=4),
'+-------------------------+\n| Coordination geometries |\n+-------------------------+\n'
'\n==>> CN = 1 <<==\n - S:1 : Single neighbor\n\n'
'==>> CN = 2 <<==\n'
' - L:2 : Linear\n - A:2 : Angular\n\n'
'==>> CN = 3 <<==\n'
' - TL:3 : Trigonal plane\n - TY:3 : Triangular non-coplanar\n - TS:3 : T-shaped\n\n'
'==>> CN = 4 <<==\n - T:4 : Tetrahedron\n - S:4 : Square plane\n'
' - SY:4 : Square non-coplanar\n - SS:4 : See-saw\n\n')
self.assertEqual(allcg.pretty_print(maxcn=2, type='all_geometries_latex'),
'\\subsection*{Coordination 1}\n\n\\begin{itemize}\n'
'\\item S:1 $\\rightarrow$ Single neighbor (IUPAC : None - IUCr : $[$1l$]$)\n'
'\\end{itemize}\n\n\\subsection*{Coordination 2}\n\n\\begin{itemize}\n'
'\\item L:2 $\\rightarrow$ Linear (IUPAC : L-2 - IUCr : $[$2l$]$)\n'
'\\item A:2 $\\rightarrow$ Angular (IUPAC : A-2 - IUCr : $[$2n$]$)\n'
'\\end{itemize}\n\n')
self.assertEqual(allcg.pretty_print(maxcn=2, type='all_geometries_latex_images'),
'\\section*{Coordination 1}\n\n\\subsubsection*{S:1 : Single neighbor}\n\n'
'IUPAC : None\n\nIUCr : [1l]\n\n\\begin{center}\n'
'\\includegraphics[scale=0.15]{images/S_1.png}\n'
'\\end{center}\n\n\\section*{Coordination 2}\n\n'
'\\subsubsection*{L:2 : Linear}\n\nIUPAC : L-2\n\n'
'IUCr : [2l]\n\n\\begin{center}\n\\includegraphics[scale=0.15]{images/L_2.png}\n'
'\\end{center}\n\n\\subsubsection*{A:2 : Angular}\n\nIUPAC : A-2\n\nIUCr : [2n]\n\n'
'\\begin{center}\n\\includegraphics[scale=0.15]{images/A_2.png}\n\\end{center}\n\n')
self.assertDictEqual(allcg.minpoints, {6: 2, 7: 2, 8: 2, 9: 2, 10: 2, 11: 2, 12: 2, 13: 3})
self.assertDictEqual(allcg.maxpoints, {6: 5, 7: 5, 8: 6, 9: 7, 10: 6, 11: 5, 12: 8, 13: 6})
self.assertDictEqual(allcg.maxpoints_inplane, {6: 5, 7: 5, 8: 6, 9: 7, 10: 6, 11: 5, 12: 8, 13: 6})
self.assertDictEqual(allcg.separations_cg, {6: {(0, 3, 3): [u'O:6', u'T:6'],
(1, 4, 1): [u'O:6'],
(0, 5, 1): [u'PP:6'],
(2, 2, 2): [u'PP:6'],
(0, 4, 2): [u'T:6']},
7: {(1, 3, 3): [u'ET:7', u'FO:7'],
(2, 3, 2): [u'PB:7', u'ST:7', u'ET:7'],
(1, 4, 2): [u'ST:7', u'FO:7'],
(1, 5, 1): [u'PB:7']},
8: {(1, 6, 1): [u'HB:8'],
(0, 4, 4):
[u'C:8', u'SA:8', u'SBT:8'],
(1, 4, 3): [u'SA:8', u'SBT:8', u'BO_2:8', u'BO_3:8'],
(2, 4, 2): [u'C:8', u'TBT:8', u'DD:8', u'DDPN:8', u'HB:8',
u'BO_1:8', u'BO_1:8', u'BO_2:8', u'BO_2:8',
u'BO_3:8', u'BO_3:8']},
9: {(3, 3, 3): [u'TT_1:9', u'TT_1:9', u'TT_2:9', u'SMA:9',
u'SMA:9', u'TO_1:9', u'TO_3:9'],
(0, 6, 3): [u'TC:9'],
(2, 4, 3): [u'TC:9', u'TT_2:9', u'TT_3:9', u'TI:9',
u'SS:9', u'TO_1:9', u'TO_1:9', u'TO_2:9',
u'TO_3:9'],
(1, 3, 5): [u'TI:9'],
(1, 4, 4): [u'TT_1:9', u'SMA:9', u'SS:9'],
(2, 3, 4): [u'TC:9'],
(2, 5, 2): [u'TT_3:9', u'SS:9', u'TO_2:9'],
(1, 7, 1): [u'HD:9']},
10: {(0, 5, 5): [u'PP:10', u'PA:10'],
(3, 4, 3): [u'PA:10', u'SBSA:10', u'MI:10',
u'BS_2:10', u'TBSA:10'],
(2, 6, 2): [u'BS_1:10'],
(2, 4, 4): [u'PP:10', u'MI:10', u'BS_2:10'],
(3, 3, 4): [u'SBSA:10'],
(1, 4, 5): [u'BS_2:10'],
(0, 4, 6): [u'BS_1:10', u'TBSA:10']},
11: {(4, 3, 4): [u'PCPA:11'],
(3, 4, 4): [u'DI:11'],
(1, 5, 5): [u'PCPA:11', u'DI:11'],
(3, 5, 3): [u'H:11']},
12: {(3, 3, 6): [u'TT:12'],
(2, 4, 6): [u'TT:12'],
(0, 6, 6): [u'HP:12', u'HA:12'],
(3, 6, 3): [u'C:12', u'AC:12'],
(4, 4, 4): [u'I:12', u'PBP:12', u'C:12', u'HP:12'],
(0, 8, 4): [u'SC:12']},
13: {(0, 6, 7): [u'SH:13']}})
if __name__ == "__main__":
unittest.main()<|fim▁end|> | '\n'
' - coordination number : 6\n'
' - list of points :\n'
' - [0.0, 0.0, 1.0]\n' |
<|file_name|>test-conv-samevalue.js<|end_file_name|><|fim▁begin|>/*
* SameValue() (E5 Section 9.12).
*
* SameValue() is difficult to test indirectly. It appears in E5 Section
* 8.12.9, [[DefineOwnProperty]] several times.
*
* One relatively simple approach is to create a non-configurable, non-writable
* property, and attempt to use Object.defineProperty() to set a new value for
* the property. If SameValue(oldValue,newValue), no exception is thrown.
* Otherwise, reject (TypeError); see E5 Section 8.12.9, step 10.a.ii.1.
*/
function sameValue(x,y) {
var obj = {};
try {
Object.defineProperty(obj, 'test', {
writable: false,
enumerable: false,
configurable: false,
value: x
});
Object.defineProperty(obj, 'test', {
value: y
});
} catch (e) {
if (e.name === 'TypeError') {
return false;
} else {
throw e;
}
}
return true;
}
function test(x,y) {
print(sameValue(x,y));
}
/*===
test: different types, first undefined
false
false
false
false
false
false
===*/
/* Different types, first is undefined */
print('test: different types, first undefined')
test(undefined, null);
test(undefined, true);
test(undefined, false);
test(undefined, 123.0);
test(undefined, 'foo');
test(undefined, {});
/*===
test: different types, first null
false
false
false
false
false
false
===*/
/* Different types, first is null */
print('test: different types, first null')
test(null, undefined);
test(null, true);
test(null, false);
test(null, 123.0);
test(null, 'foo');
test(null, {});
/*===
test: different types, first boolean
false
false
false
false
false
false
false
false
false
false
===*/
/* Different types, first is boolean */
print('test: different types, first boolean')
test(true, undefined);
test(true, null);
test(true, 123.0);
test(true, 'foo');
test(true, {});
test(false, undefined);
test(false, null);
test(false, 123.0);
test(false, 'foo');
test(false, {});
/*===
test: different types, first number
false
false
false
false
false
false
===*/
/* Different types, first is number */
print('test: different types, first number')
test(123.0, undefined);
test(123.0, null);
test(123.0, true);
test(123.0, false);
test(123.0, 'foo');
test(123.0, {});
/*===
test: different types, first string
false
false
false
false
false
false
===*/
/* Different types, first is string */
print('test: different types, first string')
test('foo', undefined);
test('foo', null);
test('foo', true);
test('foo', false);
test('foo', 123.0);
test('foo', {});
/*===
test: different types, first object
false
false
false
false
false
false
===*/
/* Different types, first is object */<|fim▁hole|>test({}, undefined);
test({}, null);
test({}, true);
test({}, false);
test({}, 123.0);
test({}, 'foo');
/*===
test: same types, undefined
true
===*/
/* Same types: undefined */
print('test: same types, undefined')
test(undefined, undefined);
/*===
test: same types, null
true
===*/
/* Same types: null */
print('test: same types, null')
test(null, null);
/*===
test: same types, boolean
true
false
false
true
===*/
/* Same types: boolean */
print('test: same types, boolean')
test(true, true);
test(true, false);
test(false, true);
test(false, false);
/*===
test: same types, number
true
true
false
false
true
true
false
false
true
true
true
===*/
/* Same types: number */
print('test: same types, number')
test(NaN, NaN);
test(-0, -0);
test(-0, +0);
test(+0, -0);
test(+0, +0);
test(Number.NEGATIVE_INFINITY, Number.NEGATIVE_INFINITY);
test(Number.NEGATIVE_INFINITY, Number.POSITIVE_INFINITY);
test(Number.POSITIVE_INFINITY, Number.NEGATIVE_INFINITY);
test(Number.POSITIVE_INFINITY, Number.POSITIVE_INFINITY);
test(-123.0, -123.0);
test(123.0, 123.0);
/*===
test: same types, string
true
false
false
true
===*/
/* Same types: string */
print('test: same types, string')
test('', '');
test('foo', '')
test('', 'foo');
test('foo', 'foo');
/*===
test: same types, object
true
false
false
true
===*/
/* Same types: object */
var obj1 = {};
var obj2 = {};
print('test: same types, object')
test(obj1, obj1);
test(obj1, obj2);
test(obj2, obj1);
test(obj2, obj2);<|fim▁end|> |
print('test: different types, first object') |
<|file_name|>TestIpPrefixOperators.java<|end_file_name|><|fim▁begin|>/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.type;
import com.facebook.presto.operator.scalar.AbstractTestFunctions;
import com.facebook.presto.spi.block.Block;
import com.facebook.presto.spi.block.BlockBuilder;
import com.google.common.net.InetAddresses;
import io.airlift.slice.Slices;
import org.testng.annotations.Test;
import static com.facebook.presto.spi.function.OperatorType.HASH_CODE;
import static com.facebook.presto.spi.function.OperatorType.INDETERMINATE;
import static com.facebook.presto.spi.type.BigintType.BIGINT;
import static com.facebook.presto.spi.type.BooleanType.BOOLEAN;
import static com.facebook.presto.spi.type.VarcharType.VARCHAR;
import static com.facebook.presto.type.IpAddressType.IPADDRESS;
import static com.facebook.presto.type.IpPrefixType.IPPREFIX;
import static java.lang.System.arraycopy;
<|fim▁hole|> @Test
public void testVarcharToIpPrefixCast()
{
assertFunction("CAST('::ffff:1.2.3.4/24' AS IPPREFIX)", IPPREFIX, "1.2.3.0/24");
assertFunction("CAST('192.168.0.0/24' AS IPPREFIX)", IPPREFIX, "192.168.0.0/24");
assertFunction("CAST('255.2.3.4/0' AS IPPREFIX)", IPPREFIX, "0.0.0.0/0");
assertFunction("CAST('255.2.3.4/1' AS IPPREFIX)", IPPREFIX, "128.0.0.0/1");
assertFunction("CAST('255.2.3.4/2' AS IPPREFIX)", IPPREFIX, "192.0.0.0/2");
assertFunction("CAST('255.2.3.4/4' AS IPPREFIX)", IPPREFIX, "240.0.0.0/4");
assertFunction("CAST('1.2.3.4/8' AS IPPREFIX)", IPPREFIX, "1.0.0.0/8");
assertFunction("CAST('1.2.3.4/16' AS IPPREFIX)", IPPREFIX, "1.2.0.0/16");
assertFunction("CAST('1.2.3.4/24' AS IPPREFIX)", IPPREFIX, "1.2.3.0/24");
assertFunction("CAST('1.2.3.255/25' AS IPPREFIX)", IPPREFIX, "1.2.3.128/25");
assertFunction("CAST('1.2.3.255/26' AS IPPREFIX)", IPPREFIX, "1.2.3.192/26");
assertFunction("CAST('1.2.3.255/28' AS IPPREFIX)", IPPREFIX, "1.2.3.240/28");
assertFunction("CAST('1.2.3.255/30' AS IPPREFIX)", IPPREFIX, "1.2.3.252/30");
assertFunction("CAST('1.2.3.255/32' AS IPPREFIX)", IPPREFIX, "1.2.3.255/32");
assertFunction("CAST('2001:0db8:0000:0000:0000:ff00:0042:8329/128' AS IPPREFIX)", IPPREFIX, "2001:db8::ff00:42:8329/128");
assertFunction("CAST('2001:db8::ff00:42:8329/128' AS IPPREFIX)", IPPREFIX, "2001:db8::ff00:42:8329/128");
assertFunction("CAST('2001:db8:0:0:1:0:0:1/128' AS IPPREFIX)", IPPREFIX, "2001:db8::1:0:0:1/128");
assertFunction("CAST('2001:db8:0:0:1::1/128' AS IPPREFIX)", IPPREFIX, "2001:db8::1:0:0:1/128");
assertFunction("CAST('2001:db8::1:0:0:1/128' AS IPPREFIX)", IPPREFIX, "2001:db8::1:0:0:1/128");
assertFunction("CAST('2001:DB8::FF00:ABCD:12EF/128' AS IPPREFIX)", IPPREFIX, "2001:db8::ff00:abcd:12ef/128");
assertFunction("CAST('ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/0' AS IPPREFIX)", IPPREFIX, "::/0");
assertFunction("CAST('ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/1' AS IPPREFIX)", IPPREFIX, "8000::/1");
assertFunction("CAST('ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/2' AS IPPREFIX)", IPPREFIX, "c000::/2");
assertFunction("CAST('ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/4' AS IPPREFIX)", IPPREFIX, "f000::/4");
assertFunction("CAST('ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/8' AS IPPREFIX)", IPPREFIX, "ff00::/8");
assertFunction("CAST('ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/16' AS IPPREFIX)", IPPREFIX, "ffff::/16");
assertFunction("CAST('ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/32' AS IPPREFIX)", IPPREFIX, "ffff:ffff::/32");
assertFunction("CAST('ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/48' AS IPPREFIX)", IPPREFIX, "ffff:ffff:ffff::/48");
assertFunction("CAST('ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/64' AS IPPREFIX)", IPPREFIX, "ffff:ffff:ffff:ffff::/64");
assertFunction("CAST('ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/80' AS IPPREFIX)", IPPREFIX, "ffff:ffff:ffff:ffff:ffff::/80");
assertFunction("CAST('ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/96' AS IPPREFIX)", IPPREFIX, "ffff:ffff:ffff:ffff:ffff:ffff::/96");
assertFunction("CAST('ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/112' AS IPPREFIX)", IPPREFIX, "ffff:ffff:ffff:ffff:ffff:ffff:ffff:0/112");
assertFunction("CAST('ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/120' AS IPPREFIX)", IPPREFIX, "ffff:ffff:ffff:ffff:ffff:ffff:ffff:ff00/120");
assertFunction("CAST('ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/124' AS IPPREFIX)", IPPREFIX, "ffff:ffff:ffff:ffff:ffff:ffff:ffff:fff0/124");
assertFunction("CAST('ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/126' AS IPPREFIX)", IPPREFIX, "ffff:ffff:ffff:ffff:ffff:ffff:ffff:fffc/126");
assertFunction("CAST('ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/127' AS IPPREFIX)", IPPREFIX, "ffff:ffff:ffff:ffff:ffff:ffff:ffff:fffe/127");
assertFunction("CAST('ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/128' AS IPPREFIX)", IPPREFIX, "ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/128");
assertFunction("IPPREFIX '10.0.0.0/32'", IPPREFIX, "10.0.0.0/32");
assertFunction("IPPREFIX '64:ff9b::10.0.0.0/128'", IPPREFIX, "64:ff9b::a00:0/128");
assertInvalidCast("CAST('facebook.com/32' AS IPPREFIX)", "Cannot cast value to IPPREFIX: facebook.com/32");
assertInvalidCast("CAST('localhost/32' AS IPPREFIX)", "Cannot cast value to IPPREFIX: localhost/32");
assertInvalidCast("CAST('2001:db8::1::1/128' AS IPPREFIX)", "Cannot cast value to IPPREFIX: 2001:db8::1::1/128");
assertInvalidCast("CAST('2001:zxy::1::1/128' AS IPPREFIX)", "Cannot cast value to IPPREFIX: 2001:zxy::1::1/128");
assertInvalidCast("CAST('789.1.1.1/32' AS IPPREFIX)", "Cannot cast value to IPPREFIX: 789.1.1.1/32");
assertInvalidCast("CAST('192.1.1.1' AS IPPREFIX)", "Cannot cast value to IPPREFIX: 192.1.1.1");
assertInvalidCast("CAST('192.1.1.1/128' AS IPPREFIX)", "Cannot cast value to IPPREFIX: 192.1.1.1/128");
}
@Test
public void testIpPrefixToVarcharCast()
{
assertFunction("CAST(IPPREFIX '::ffff:1.2.3.4/32' AS VARCHAR)", VARCHAR, "1.2.3.4/32");
assertFunction("CAST(IPPREFIX '::ffff:102:304/32' AS VARCHAR)", VARCHAR, "1.2.3.4/32");
assertFunction("CAST(IPPREFIX '2001:0db8:0000:0000:0000:ff00:0042:8329/128' AS VARCHAR)", VARCHAR, "2001:db8::ff00:42:8329/128");
assertFunction("CAST(IPPREFIX '2001:db8::ff00:42:8329/128' AS VARCHAR)", VARCHAR, "2001:db8::ff00:42:8329/128");
assertFunction("CAST(IPPREFIX '2001:db8:0:0:1:0:0:1/128' AS VARCHAR)", VARCHAR, "2001:db8::1:0:0:1/128");
assertFunction("CAST(CAST('1.2.3.4/32' AS IPPREFIX) AS VARCHAR)", VARCHAR, "1.2.3.4/32");
assertFunction("CAST(CAST('2001:db8:0:0:1::1/128' AS IPPREFIX) AS VARCHAR)", VARCHAR, "2001:db8::1:0:0:1/128");
assertFunction("CAST(CAST('64:ff9b::10.0.0.0/128' AS IPPREFIX) AS VARCHAR)", VARCHAR, "64:ff9b::a00:0/128");
}
@Test
public void testIpPrefixToIpAddressCast()
{
assertFunction("CAST(IPPREFIX '1.2.3.4/32' AS IPADDRESS)", IPADDRESS, "1.2.3.4");
assertFunction("CAST(IPPREFIX '1.2.3.4/24' AS IPADDRESS)", IPADDRESS, "1.2.3.0");
assertFunction("CAST(IPPREFIX '::1/128' AS IPADDRESS)", IPADDRESS, "::1");
assertFunction("CAST(IPPREFIX '2001:db8::ff00:42:8329/128' AS IPADDRESS)", IPADDRESS, "2001:db8::ff00:42:8329");
assertFunction("CAST(IPPREFIX '2001:db8::ff00:42:8329/64' AS IPADDRESS)", IPADDRESS, "2001:db8::");
}
@Test
public void testIpAddressToIpPrefixCast()
{
assertFunction("CAST(IPADDRESS '1.2.3.4' AS IPPREFIX)", IPPREFIX, "1.2.3.4/32");
assertFunction("CAST(IPADDRESS '::ffff:102:304' AS IPPREFIX)", IPPREFIX, "1.2.3.4/32");
assertFunction("CAST(IPADDRESS '::1' AS IPPREFIX)", IPPREFIX, "::1/128");
assertFunction("CAST(IPADDRESS '2001:db8::ff00:42:8329' AS IPPREFIX)", IPPREFIX, "2001:db8::ff00:42:8329/128");
}
@Test
public void testEquals()
{
assertFunction("IPPREFIX '2001:0db8:0000:0000:0000:ff00:0042:8329/128' = IPPREFIX '2001:db8::ff00:42:8329/128'", BOOLEAN, true);
assertFunction("CAST('1.2.3.4/32' AS IPPREFIX) = CAST('::ffff:1.2.3.4/32' AS IPPREFIX)", BOOLEAN, true);
assertFunction("IPPREFIX '192.168.0.0/32' = IPPREFIX '::ffff:192.168.0.0/32'", BOOLEAN, true);
assertFunction("IPPREFIX '10.0.0.0/32' = IPPREFIX '::ffff:a00:0/32'", BOOLEAN, true);
assertFunction("CAST('1.2.3.4/24' AS IPPREFIX) = IPPREFIX '1.2.3.5/24'", BOOLEAN, true);
assertFunction("IPPREFIX '2001:db8::ff00:42:8329/128' = IPPREFIX '2001:db8::ff00:42:8300/128'", BOOLEAN, false);
assertFunction("CAST('1.2.3.4/32' AS IPPREFIX) = IPPREFIX '1.2.3.5/32'", BOOLEAN, false);
assertFunction("CAST('1.2.0.0/24' AS IPPREFIX) = IPPREFIX '1.2.0.0/25'", BOOLEAN, false);
}
@Test
public void testDistinctFrom()
{
assertFunction("IPPREFIX '2001:0db8:0000:0000:0000:ff00:0042:8329/128' IS DISTINCT FROM IPPREFIX '2001:db8::ff00:42:8329/128'", BOOLEAN, false);
assertFunction("CAST(NULL AS IPPREFIX) IS DISTINCT FROM CAST(NULL AS IPPREFIX)", BOOLEAN, false);
assertFunction("IPPREFIX '2001:0db8:0000:0000:0000:ff00:0042:8329/128' IS DISTINCT FROM IPPREFIX '2001:db8::ff00:42:8328/128'", BOOLEAN, true);
assertFunction("IPPREFIX '2001:0db8:0000:0000:0000:ff00:0042:8329/128' IS DISTINCT FROM CAST(NULL AS IPPREFIX)", BOOLEAN, true);
assertFunction("CAST(NULL AS IPPREFIX) IS DISTINCT FROM IPPREFIX '2001:db8::ff00:42:8328/128'", BOOLEAN, true);
}
@Test
public void testNotEquals()
{
assertFunction("IPPREFIX '2001:0db8:0000:0000:0000:ff00:0042:8329/128' != IPPREFIX '1.2.3.4/32'", BOOLEAN, true);
assertFunction("CAST('1.2.3.4/32' AS IPPREFIX) <> CAST('1.2.3.5/32' AS IPPREFIX)", BOOLEAN, true);
assertFunction("CAST('1.2.3.4/32' AS IPPREFIX) != IPPREFIX '1.2.3.4/32'", BOOLEAN, false);
assertFunction("IPPREFIX '2001:0db8:0000:0000:0000:ff00:0042:8329/128' <> IPPREFIX '2001:db8::ff00:42:8329/128'", BOOLEAN, false);
assertFunction("CAST('1.2.3.4/32' AS IPPREFIX) <> CAST('::ffff:1.2.3.4/32' AS IPPREFIX)", BOOLEAN, false);
}
@Test
public void testOrderOperators()
{
assertFunction("IPPREFIX '2001:0db8:0000:0000:0000:ff00:0042:8329/128' > IPPREFIX '1.2.3.4/32'", BOOLEAN, true);
assertFunction("IPPREFIX '1.2.3.4/32' > IPPREFIX '2001:0db8:0000:0000:0000:ff00:0042:8329/128'", BOOLEAN, false);
assertFunction("CAST('1.2.3.4/32' AS IPPREFIX) < CAST('1.2.3.5/32' AS IPPREFIX)", BOOLEAN, true);
assertFunction("CAST('1.2.3.5/32' AS IPPREFIX) < CAST('1.2.3.4/32' AS IPPREFIX)", BOOLEAN, false);
assertFunction("CAST('1.2.0.0/24' AS IPPREFIX) < CAST('1.2.0.0/25' AS IPPREFIX)", BOOLEAN, true);
assertFunction("IPPREFIX '::1/128' <= CAST('1.2.3.5/32' AS IPPREFIX)", BOOLEAN, true);
assertFunction("IPPREFIX '1.2.3.5/32' <= CAST('1.2.3.5/32' AS IPPREFIX)", BOOLEAN, true);
assertFunction("IPPREFIX '1.2.3.6/32' <= CAST('1.2.3.5/32' AS IPPREFIX)", BOOLEAN, false);
assertFunction("IPPREFIX '::1/128' >= IPPREFIX '::/128'", BOOLEAN, true);
assertFunction("IPPREFIX '::1/128' >= IPPREFIX '::1/128'", BOOLEAN, true);
assertFunction("IPPREFIX '::/128' >= IPPREFIX '::1/128'", BOOLEAN, false);
assertFunction("IPPREFIX '::1/128' BETWEEN IPPREFIX '::/128' AND IPPREFIX '::1234/128'", BOOLEAN, true);
assertFunction("IPPREFIX '::2222/128' BETWEEN IPPREFIX '::/128' AND IPPREFIX '::1234/128'", BOOLEAN, false);
}
@Test
public void testIndeterminate()
{
assertOperator(INDETERMINATE, "CAST(null AS IPPREFIX)", BOOLEAN, true);
assertOperator(INDETERMINATE, "IPPREFIX '::2222/128'", BOOLEAN, false);
}
@Test
public void testHash()
{
assertOperator(HASH_CODE, "CAST(null AS IPPREFIX)", BIGINT, null);
assertOperator(HASH_CODE, "IPPREFIX '::2222/128'", BIGINT, hashFromType("::2222/128"));
}
private static long hashFromType(String address)
{
BlockBuilder blockBuilder = IPPREFIX.createBlockBuilder(null, 1);
String[] parts = address.split("/");
byte[] bytes = new byte[IPPREFIX.getFixedSize()];
byte[] addressBytes = InetAddresses.forString(parts[0]).getAddress();
arraycopy(addressBytes, 0, bytes, 0, 16);
bytes[IPPREFIX.getFixedSize() - 1] = (byte) Integer.parseInt(parts[1]);
IPPREFIX.writeSlice(blockBuilder, Slices.wrappedBuffer(bytes));
Block block = blockBuilder.build();
return IPPREFIX.hash(block, 0);
}
}<|fim▁end|> | public class TestIpPrefixOperators
extends AbstractTestFunctions
{ |
<|file_name|>window.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::WindowBinding;
use dom::bindings::js::JS;
use dom::bindings::trace::Untraceable;
use dom::bindings::utils::{Reflectable, Reflector};
use dom::document::Document;
use dom::element::Element;
use dom::eventtarget::{EventTarget, WindowTypeId};
use dom::console::Console;
use dom::location::Location;
use dom::navigator::Navigator;
use layout_interface::{ReflowForDisplay, DocumentDamageLevel};
use script_task::{ExitWindowMsg, FireTimerMsg, Page, ScriptChan};
use servo_msg::compositor_msg::ScriptListener;
use servo_net::image_cache_task::ImageCacheTask;
use servo_util::str::DOMString;
use servo_util::task::{spawn_named};
use js::jsapi::{JSObject, JSContext, JS_DefineProperty, JS_PropertyStub, JS_StrictPropertyStub};
use js::jsval::{NullValue, ObjectValue, JSVal};
use js::JSPROP_ENUMERATE;
use collections::hashmap::HashMap;
use std::cmp;
use std::comm::{channel, Sender, Receiver};
use std::comm::Select;
use std::hash::{Hash, sip};
use std::io::timer::Timer;
use std::rc::Rc;
use serialize::{Encoder, Encodable};
use url::Url;
pub enum TimerControlMsg {
TimerMessageFire(~TimerData),
TimerMessageClose,
TimerMessageTriggerExit //XXXjdm this is just a quick hack to talk to the script task
}
pub struct TimerHandle {
handle: i32,
cancel_chan: Option<Sender<()>>,
}
impl<S: Encoder> Encodable<S> for TimerHandle {
fn encode(&self, _: &mut S) {
}
}
impl Hash for TimerHandle {
fn hash(&self, state: &mut sip::SipState) {
self.handle.hash(state);
}
}
impl Eq for TimerHandle {
fn eq(&self, other: &TimerHandle) -> bool {
self.handle == other.handle
}
}
impl TotalEq for TimerHandle {
fn equals(&self, other: &TimerHandle) -> bool {
self.eq(other)
}
}
impl TimerHandle {
fn cancel(&self) {
self.cancel_chan.as_ref().map(|chan| chan.send(()));
}
}
#[deriving(Encodable)]
pub struct Window {
eventtarget: EventTarget,
script_chan: ScriptChan,
console: Option<JS<Console>>,
location: Option<JS<Location>>,
navigator: Option<JS<Navigator>>,
image_cache_task: ImageCacheTask,
active_timers: ~HashMap<i32, TimerHandle>,
next_timer_handle: i32,
compositor: Untraceable<~ScriptListener>,
timer_chan: Untraceable<Sender<TimerControlMsg>>,
page: Rc<Page>,
}
impl Window {
pub fn get_cx(&self) -> *JSObject {
let js_info = self.page().js_info();
js_info.get_ref().js_compartment.deref().cx.deref().ptr
}
pub fn page<'a>(&'a self) -> &'a Page {
&*self.page
}
pub fn get_url(&self) -> Url {
self.page().get_url()
}
}
#[unsafe_destructor]
impl Drop for Window {
fn drop(&mut self) {
self.timer_chan.send(TimerMessageClose);
for timer_handle in self.active_timers.values() {
timer_handle.cancel();
}
}
}
// Holder for the various JS values associated with setTimeout
// (ie. function value to invoke and all arguments to pass
// to the function when calling it)
pub struct TimerData {
handle: i32,
is_interval: bool,
funval: JSVal,
args: ~[JSVal],
}
impl Window {
pub fn Alert(&self, s: DOMString) {
// Right now, just print to the console
println!("ALERT: {:s}", s);
}
pub fn Close(&self) {
self.timer_chan.deref().send(TimerMessageTriggerExit);
}
pub fn Document(&self) -> JS<Document> {
let frame = self.page().frame();
frame.get_ref().document.clone()
}
pub fn Name(&self) -> DOMString {
~""
}
pub fn SetName(&self, _name: DOMString) {
}
pub fn Status(&self) -> DOMString {
~""
}
pub fn SetStatus(&self, _status: DOMString) {
}
pub fn Closed(&self) -> bool {
false
}
pub fn Stop(&self) {
}
pub fn Focus(&self) {
}
pub fn Blur(&self) {
}
pub fn GetFrameElement(&self) -> Option<JS<Element>> {
None
}
pub fn Location(&mut self, abstract_self: &JS<Window>) -> JS<Location> {
if self.location.is_none() {
self.location = Some(Location::new(abstract_self, self.page.clone()));
}
self.location.get_ref().clone()
}
pub fn Console(&mut self, abstract_self: &JS<Window>) -> JS<Console> {
if self.console.is_none() {
self.console = Some(Console::new(abstract_self));
}
self.console.get_ref().clone()
}
pub fn Navigator(&mut self, abstract_self: &JS<Window>) -> JS<Navigator> {
if self.navigator.is_none() {
self.navigator = Some(Navigator::new(abstract_self));
}
self.navigator.get_ref().clone()
}
pub fn Confirm(&self, _message: DOMString) -> bool {
false
}
pub fn Prompt(&self, _message: DOMString, _default: DOMString) -> Option<DOMString> {
None
}
pub fn Print(&self) {
}
pub fn ShowModalDialog(&self, _cx: *JSContext, _url: DOMString, _argument: Option<JSVal>) -> JSVal {
NullValue()
}
}
impl Reflectable for Window {
fn reflector<'a>(&'a self) -> &'a Reflector {
self.eventtarget.reflector()
}
fn mut_reflector<'a>(&'a mut self) -> &'a mut Reflector {
self.eventtarget.mut_reflector()<|fim▁hole|>impl Window {
fn set_timeout_or_interval(&mut self, callback: JSVal, timeout: i32, is_interval: bool) -> i32 {
let timeout = cmp::max(0, timeout) as u64;
let handle = self.next_timer_handle;
self.next_timer_handle += 1;
// Post a delayed message to the per-window timer task; it will dispatch it
// to the relevant script handler that will deal with it.
let tm = Timer::new().unwrap();
let (cancel_chan, cancel_port) = channel();
let chan = self.timer_chan.clone();
let spawn_name = if is_interval {
"Window:SetInterval"
} else {
"Window:SetTimeout"
};
spawn_named(spawn_name, proc() {
let mut tm = tm;
let timeout_port = if is_interval {
tm.periodic(timeout)
} else {
tm.oneshot(timeout)
};
let cancel_port = cancel_port;
let select = Select::new();
let mut timeout_handle = select.handle(&timeout_port);
unsafe { timeout_handle.add() };
let mut cancel_handle = select.handle(&cancel_port);
unsafe { cancel_handle.add() };
loop {
let id = select.wait();
if id == timeout_handle.id() {
timeout_port.recv();
chan.send(TimerMessageFire(~TimerData {
handle: handle,
is_interval: is_interval,
funval: callback,
args: ~[],
}));
if !is_interval {
break;
}
} else if id == cancel_handle.id() {
break;
}
}
});
self.active_timers.insert(handle, TimerHandle { handle: handle, cancel_chan: Some(cancel_chan) });
handle
}
pub fn SetTimeout(&mut self, _cx: *JSContext, callback: JSVal, timeout: i32) -> i32 {
self.set_timeout_or_interval(callback, timeout, false)
}
pub fn ClearTimeout(&mut self, handle: i32) {
let timer_handle = self.active_timers.pop(&handle);
match timer_handle {
Some(handle) => handle.cancel(),
None => { }
}
}
pub fn SetInterval(&mut self, _cx: *JSContext, callback: JSVal, timeout: i32) -> i32 {
self.set_timeout_or_interval(callback, timeout, true)
}
pub fn ClearInterval(&mut self, handle: i32) {
self.ClearTimeout(handle);
}
pub fn damage_and_reflow(&self, damage: DocumentDamageLevel) {
// FIXME This should probably be ReflowForQuery, not Display. All queries currently
// currently rely on the display list, which means we can't destroy it by
// doing a query reflow.
self.page().damage(damage);
self.page().reflow(ReflowForDisplay, self.script_chan.clone(), *self.compositor);
}
pub fn wait_until_safe_to_modify_dom(&self) {
// FIXME: This disables concurrent layout while we are modifying the DOM, since
// our current architecture is entirely unsafe in the presence of races.
self.page().join_layout();
}
pub fn new(cx: *JSContext,
page: Rc<Page>,
script_chan: ScriptChan,
compositor: ~ScriptListener,
image_cache_task: ImageCacheTask)
-> JS<Window> {
let script_chan_clone = script_chan.clone();
let (timer_chan, timer_port): (Sender<TimerControlMsg>, Receiver<TimerControlMsg>) = channel();
let id = page.id.clone();
spawn_named("timer controller", proc() {
let ScriptChan(script_chan) = script_chan;
loop {
match timer_port.recv() {
TimerMessageClose => break,
TimerMessageFire(td) => script_chan.send(FireTimerMsg(id, td)),
TimerMessageTriggerExit => script_chan.send(ExitWindowMsg(id)),
}
}
});
let win = ~Window {
eventtarget: EventTarget::new_inherited(WindowTypeId),
script_chan: script_chan_clone,
console: None,
compositor: Untraceable::new(compositor),
timer_chan: Untraceable::new(timer_chan),
page: page.clone(),
location: None,
navigator: None,
image_cache_task: image_cache_task,
active_timers: ~HashMap::new(),
next_timer_handle: 0
};
let global = WindowBinding::Wrap(cx, win);
let fn_names = ["window", "self"];
for str in fn_names.iter() {
(*str).to_c_str().with_ref(|name| {
let object = global.reflector().get_jsobject();
assert!(object.is_not_null());
unsafe {
JS_DefineProperty(cx, object, name,
ObjectValue(&*object),
Some(JS_PropertyStub),
Some(JS_StrictPropertyStub),
JSPROP_ENUMERATE);
}
})
}
global
}
}<|fim▁end|> | }
}
|
<|file_name|>api.pb.go<|end_file_name|><|fim▁begin|>/*
Copyright The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// Code generated by protoc-gen-gogo. DO NOT EDIT.
// source: api.proto
/*
Package deviceplugin is a generated protocol buffer package.
It is generated from these files:
api.proto
It has these top-level messages:
RegisterRequest
Empty
ListAndWatchResponse
Device
AllocateRequest
AllocateResponse
Mount
DeviceSpec
*/
package deviceplugin
import proto "github.com/gogo/protobuf/proto"
import fmt "fmt"
import math "math"
import _ "github.com/gogo/protobuf/gogoproto"
import (
context "golang.org/x/net/context"
grpc "google.golang.org/grpc"
)
import strings "strings"
import reflect "reflect"
import github_com_gogo_protobuf_sortkeys "github.com/gogo/protobuf/sortkeys"
import io "io"
// Reference imports to suppress errors if they are not otherwise used.
var _ = proto.Marshal
var _ = fmt.Errorf
var _ = math.Inf
// This is a compile-time assertion to ensure that this generated file
// is compatible with the proto package it is being compiled against.
// A compilation error at this line likely means your copy of the
// proto package needs to be updated.
const _ = proto.GoGoProtoPackageIsVersion2 // please upgrade the proto package
type RegisterRequest struct {
// Version of the API the Device Plugin was built against
Version string `protobuf:"bytes,1,opt,name=version,proto3" json:"version,omitempty"`
// Name of the unix socket the device plugin is listening on
// PATH = path.Join(DevicePluginPath, endpoint)
Endpoint string `protobuf:"bytes,2,opt,name=endpoint,proto3" json:"endpoint,omitempty"`
// Schedulable resource name. As of now it's expected to be a DNS Label
ResourceName string `protobuf:"bytes,3,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"`
}
func (m *RegisterRequest) Reset() { *m = RegisterRequest{} }
func (*RegisterRequest) ProtoMessage() {}
func (*RegisterRequest) Descriptor() ([]byte, []int) { return fileDescriptorApi, []int{0} }
func (m *RegisterRequest) GetVersion() string {
if m != nil {
return m.Version
}
return ""
}
func (m *RegisterRequest) GetEndpoint() string {
if m != nil {
return m.Endpoint
}
return ""
}
func (m *RegisterRequest) GetResourceName() string {
if m != nil {
return m.ResourceName
}
return ""
}
type Empty struct {
}
func (m *Empty) Reset() { *m = Empty{} }
func (*Empty) ProtoMessage() {}
func (*Empty) Descriptor() ([]byte, []int) { return fileDescriptorApi, []int{1} }
// ListAndWatch returns a stream of List of Devices
// Whenever a Device state changes or a Device disappears, ListAndWatch
// returns the new list
type ListAndWatchResponse struct {
Devices []*Device `protobuf:"bytes,1,rep,name=devices" json:"devices,omitempty"`
}
func (m *ListAndWatchResponse) Reset() { *m = ListAndWatchResponse{} }
func (*ListAndWatchResponse) ProtoMessage() {}
func (*ListAndWatchResponse) Descriptor() ([]byte, []int) { return fileDescriptorApi, []int{2} }
func (m *ListAndWatchResponse) GetDevices() []*Device {
if m != nil {
return m.Devices
}
return nil
}
// E.g:
// struct Device {
// ID: "GPU-fef8089b-4820-abfc-e83e-94318197576e",
// State: "Healthy",
// }
type Device struct {
// A unique ID assigned by the device plugin used
// to identify devices during the communication
// Max length of this field is 63 characters
ID string `protobuf:"bytes,1,opt,name=ID,json=iD,proto3" json:"ID,omitempty"`
// Health of the device, can be healthy or unhealthy, see constants.go
Health string `protobuf:"bytes,2,opt,name=health,proto3" json:"health,omitempty"`
}
func (m *Device) Reset() { *m = Device{} }
func (*Device) ProtoMessage() {}
func (*Device) Descriptor() ([]byte, []int) { return fileDescriptorApi, []int{3} }
func (m *Device) GetID() string {
if m != nil {
return m.ID
}
return ""
}
func (m *Device) GetHealth() string {
if m != nil {
return m.Health
}
return ""
}
// - Allocate is expected to be called during pod creation since allocation
// failures for any container would result in pod startup failure.
// - Allocate allows kubelet to exposes additional artifacts in a pod's
// environment as directed by the plugin.
// - Allocate allows Device Plugin to run device specific operations on
// the Devices requested
type AllocateRequest struct {
DevicesIDs []string `protobuf:"bytes,1,rep,name=devicesIDs" json:"devicesIDs,omitempty"`
}
func (m *AllocateRequest) Reset() { *m = AllocateRequest{} }
func (*AllocateRequest) ProtoMessage() {}
func (*AllocateRequest) Descriptor() ([]byte, []int) { return fileDescriptorApi, []int{4} }
func (m *AllocateRequest) GetDevicesIDs() []string {
if m != nil {
return m.DevicesIDs
}
return nil
}
// AllocateResponse includes the artifacts that needs to be injected into
// a container for accessing 'deviceIDs' that were mentioned as part of
// 'AllocateRequest'.
// Failure Handling:
// if Kubelet sends an allocation request for dev1 and dev2.
// Allocation on dev1 succeeds but allocation on dev2 fails.
// The Device plugin should send a ListAndWatch update and fail the
// Allocation request
type AllocateResponse struct {
// List of environment variable to be set in the container to access one of more devices.
Envs map[string]string `protobuf:"bytes,1,rep,name=envs" json:"envs,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
// Mounts for the container.
Mounts []*Mount `protobuf:"bytes,2,rep,name=mounts" json:"mounts,omitempty"`
// Devices for the container.
Devices []*DeviceSpec `protobuf:"bytes,3,rep,name=devices" json:"devices,omitempty"`
// Container annotations to pass to the container runtime
Annotations map[string]string `protobuf:"bytes,4,rep,name=annotations" json:"annotations,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
}
func (m *AllocateResponse) Reset() { *m = AllocateResponse{} }
func (*AllocateResponse) ProtoMessage() {}
func (*AllocateResponse) Descriptor() ([]byte, []int) { return fileDescriptorApi, []int{5} }
func (m *AllocateResponse) GetEnvs() map[string]string {
if m != nil {
return m.Envs
}
return nil
}
func (m *AllocateResponse) GetMounts() []*Mount {
if m != nil {
return m.Mounts
}
return nil
}
func (m *AllocateResponse) GetDevices() []*DeviceSpec {
if m != nil {
return m.Devices
}
return nil
}
func (m *AllocateResponse) GetAnnotations() map[string]string {
if m != nil {
return m.Annotations
}
return nil
}
// Mount specifies a host volume to mount into a container.
// where device library or tools are installed on host and container
type Mount struct {
// Path of the mount within the container.
ContainerPath string `protobuf:"bytes,1,opt,name=container_path,json=containerPath,proto3" json:"container_path,omitempty"`
// Path of the mount on the host.
HostPath string `protobuf:"bytes,2,opt,name=host_path,json=hostPath,proto3" json:"host_path,omitempty"`
// If set, the mount is read-only.
ReadOnly bool `protobuf:"varint,3,opt,name=read_only,json=readOnly,proto3" json:"read_only,omitempty"`
}
func (m *Mount) Reset() { *m = Mount{} }
func (*Mount) ProtoMessage() {}
func (*Mount) Descriptor() ([]byte, []int) { return fileDescriptorApi, []int{6} }
func (m *Mount) GetContainerPath() string {
if m != nil {
return m.ContainerPath
}
return ""
}
func (m *Mount) GetHostPath() string {
if m != nil {
return m.HostPath
}
return ""
}
func (m *Mount) GetReadOnly() bool {
if m != nil {
return m.ReadOnly
}
return false
}
// DeviceSpec specifies a host device to mount into a container.
type DeviceSpec struct {
// Path of the device within the container.
ContainerPath string `protobuf:"bytes,1,opt,name=container_path,json=containerPath,proto3" json:"container_path,omitempty"`
// Path of the device on the host.
HostPath string `protobuf:"bytes,2,opt,name=host_path,json=hostPath,proto3" json:"host_path,omitempty"`
// Cgroups permissions of the device, candidates are one or more of
// * r - allows container to read from the specified device.
// * w - allows container to write to the specified device.
// * m - allows container to create device files that do not yet exist.
Permissions string `protobuf:"bytes,3,opt,name=permissions,proto3" json:"permissions,omitempty"`
}
func (m *DeviceSpec) Reset() { *m = DeviceSpec{} }
func (*DeviceSpec) ProtoMessage() {}
func (*DeviceSpec) Descriptor() ([]byte, []int) { return fileDescriptorApi, []int{7} }
func (m *DeviceSpec) GetContainerPath() string {
if m != nil {
return m.ContainerPath
}
return ""
}
func (m *DeviceSpec) GetHostPath() string {
if m != nil {
return m.HostPath
}
return ""
}
func (m *DeviceSpec) GetPermissions() string {
if m != nil {
return m.Permissions
}
return ""
}
func init() {
proto.RegisterType((*RegisterRequest)(nil), "deviceplugin.RegisterRequest")
proto.RegisterType((*Empty)(nil), "deviceplugin.Empty")
proto.RegisterType((*ListAndWatchResponse)(nil), "deviceplugin.ListAndWatchResponse")
proto.RegisterType((*Device)(nil), "deviceplugin.Device")
proto.RegisterType((*AllocateRequest)(nil), "deviceplugin.AllocateRequest")
proto.RegisterType((*AllocateResponse)(nil), "deviceplugin.AllocateResponse")
proto.RegisterType((*Mount)(nil), "deviceplugin.Mount")
proto.RegisterType((*DeviceSpec)(nil), "deviceplugin.DeviceSpec")
}
// Reference imports to suppress errors if they are not otherwise used.
var _ context.Context
var _ grpc.ClientConn
// This is a compile-time assertion to ensure that this generated file
// is compatible with the grpc package it is being compiled against.
const _ = grpc.SupportPackageIsVersion4
// Client API for Registration service
type RegistrationClient interface {
Register(ctx context.Context, in *RegisterRequest, opts ...grpc.CallOption) (*Empty, error)
}
type registrationClient struct {
cc *grpc.ClientConn
}
func NewRegistrationClient(cc *grpc.ClientConn) RegistrationClient {
return ®istrationClient{cc}
}
func (c *registrationClient) Register(ctx context.Context, in *RegisterRequest, opts ...grpc.CallOption) (*Empty, error) {
out := new(Empty)
err := grpc.Invoke(ctx, "/deviceplugin.Registration/Register", in, out, c.cc, opts...)
if err != nil {
return nil, err
}
return out, nil
}
// Server API for Registration service
type RegistrationServer interface {
Register(context.Context, *RegisterRequest) (*Empty, error)
}
func RegisterRegistrationServer(s *grpc.Server, srv RegistrationServer) {
s.RegisterService(&_Registration_serviceDesc, srv)
}
func _Registration_Register_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(RegisterRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(RegistrationServer).Register(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/deviceplugin.Registration/Register",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(RegistrationServer).Register(ctx, req.(*RegisterRequest))
}
return interceptor(ctx, in, info, handler)
}
var _Registration_serviceDesc = grpc.ServiceDesc{
ServiceName: "deviceplugin.Registration",
HandlerType: (*RegistrationServer)(nil),
Methods: []grpc.MethodDesc{
{
MethodName: "Register",
Handler: _Registration_Register_Handler,
},
},
Streams: []grpc.StreamDesc{},
Metadata: "api.proto",
}
// Client API for DevicePlugin service
type DevicePluginClient interface {
// ListAndWatch returns a stream of List of Devices
// Whenever a Device state changes or a Device disappears, ListAndWatch
// returns the new list
ListAndWatch(ctx context.Context, in *Empty, opts ...grpc.CallOption) (DevicePlugin_ListAndWatchClient, error)
// Allocate is called during container creation so that the Device
// Plugin can run device specific operations and instruct Kubelet
// of the steps to make the Device available in the container
Allocate(ctx context.Context, in *AllocateRequest, opts ...grpc.CallOption) (*AllocateResponse, error)
}
type devicePluginClient struct {
cc *grpc.ClientConn
}
func NewDevicePluginClient(cc *grpc.ClientConn) DevicePluginClient {
return &devicePluginClient{cc}
}
func (c *devicePluginClient) ListAndWatch(ctx context.Context, in *Empty, opts ...grpc.CallOption) (DevicePlugin_ListAndWatchClient, error) {
stream, err := grpc.NewClientStream(ctx, &_DevicePlugin_serviceDesc.Streams[0], c.cc, "/deviceplugin.DevicePlugin/ListAndWatch", opts...)
if err != nil {
return nil, err
}
x := &devicePluginListAndWatchClient{stream}
if err := x.ClientStream.SendMsg(in); err != nil {
return nil, err
}
if err := x.ClientStream.CloseSend(); err != nil {
return nil, err
}
return x, nil
}
type DevicePlugin_ListAndWatchClient interface {
Recv() (*ListAndWatchResponse, error)
grpc.ClientStream
}
type devicePluginListAndWatchClient struct {
grpc.ClientStream
}
func (x *devicePluginListAndWatchClient) Recv() (*ListAndWatchResponse, error) {
m := new(ListAndWatchResponse)
if err := x.ClientStream.RecvMsg(m); err != nil {
return nil, err
}
return m, nil
}
func (c *devicePluginClient) Allocate(ctx context.Context, in *AllocateRequest, opts ...grpc.CallOption) (*AllocateResponse, error) {
out := new(AllocateResponse)
err := grpc.Invoke(ctx, "/deviceplugin.DevicePlugin/Allocate", in, out, c.cc, opts...)
if err != nil {
return nil, err
}
return out, nil
}
// Server API for DevicePlugin service
type DevicePluginServer interface {
// ListAndWatch returns a stream of List of Devices
// Whenever a Device state changes or a Device disappears, ListAndWatch
// returns the new list
ListAndWatch(*Empty, DevicePlugin_ListAndWatchServer) error
// Allocate is called during container creation so that the Device
// Plugin can run device specific operations and instruct Kubelet
// of the steps to make the Device available in the container
Allocate(context.Context, *AllocateRequest) (*AllocateResponse, error)
}
func RegisterDevicePluginServer(s *grpc.Server, srv DevicePluginServer) {
s.RegisterService(&_DevicePlugin_serviceDesc, srv)
}
func _DevicePlugin_ListAndWatch_Handler(srv interface{}, stream grpc.ServerStream) error {
m := new(Empty)
if err := stream.RecvMsg(m); err != nil {
return err
}
return srv.(DevicePluginServer).ListAndWatch(m, &devicePluginListAndWatchServer{stream})
}
type DevicePlugin_ListAndWatchServer interface {
Send(*ListAndWatchResponse) error
grpc.ServerStream
}
type devicePluginListAndWatchServer struct {
grpc.ServerStream
}
func (x *devicePluginListAndWatchServer) Send(m *ListAndWatchResponse) error {
return x.ServerStream.SendMsg(m)
}
func _DevicePlugin_Allocate_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(AllocateRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(DevicePluginServer).Allocate(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/deviceplugin.DevicePlugin/Allocate",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(DevicePluginServer).Allocate(ctx, req.(*AllocateRequest))
}
return interceptor(ctx, in, info, handler)
}
var _DevicePlugin_serviceDesc = grpc.ServiceDesc{
ServiceName: "deviceplugin.DevicePlugin",
HandlerType: (*DevicePluginServer)(nil),
Methods: []grpc.MethodDesc{
{
MethodName: "Allocate",
Handler: _DevicePlugin_Allocate_Handler,
},
},
Streams: []grpc.StreamDesc{
{
StreamName: "ListAndWatch",
Handler: _DevicePlugin_ListAndWatch_Handler,
ServerStreams: true,
},
},
Metadata: "api.proto",
}
func (m *RegisterRequest) Marshal() (dAtA []byte, err error) {
size := m.Size()
dAtA = make([]byte, size)
n, err := m.MarshalTo(dAtA)
if err != nil {
return nil, err
}
return dAtA[:n], nil
}
func (m *RegisterRequest) MarshalTo(dAtA []byte) (int, error) {
var i int
_ = i
var l int
_ = l
if len(m.Version) > 0 {
dAtA[i] = 0xa
i++
i = encodeVarintApi(dAtA, i, uint64(len(m.Version)))
i += copy(dAtA[i:], m.Version)
}
if len(m.Endpoint) > 0 {
dAtA[i] = 0x12
i++
i = encodeVarintApi(dAtA, i, uint64(len(m.Endpoint)))
i += copy(dAtA[i:], m.Endpoint)
}
if len(m.ResourceName) > 0 {
dAtA[i] = 0x1a
i++
i = encodeVarintApi(dAtA, i, uint64(len(m.ResourceName)))
i += copy(dAtA[i:], m.ResourceName)
}
return i, nil
}
func (m *Empty) Marshal() (dAtA []byte, err error) {
size := m.Size()
dAtA = make([]byte, size)
n, err := m.MarshalTo(dAtA)
if err != nil {
return nil, err
}
return dAtA[:n], nil
}
func (m *Empty) MarshalTo(dAtA []byte) (int, error) {
var i int
_ = i
var l int
_ = l
return i, nil
}
func (m *ListAndWatchResponse) Marshal() (dAtA []byte, err error) {
size := m.Size()
dAtA = make([]byte, size)
n, err := m.MarshalTo(dAtA)
if err != nil {
return nil, err
}
return dAtA[:n], nil
}
func (m *ListAndWatchResponse) MarshalTo(dAtA []byte) (int, error) {
var i int
_ = i
var l int
_ = l
if len(m.Devices) > 0 {
for _, msg := range m.Devices {
dAtA[i] = 0xa
i++
i = encodeVarintApi(dAtA, i, uint64(msg.Size()))
n, err := msg.MarshalTo(dAtA[i:])
if err != nil {
return 0, err
}
i += n
}
}
return i, nil
}
func (m *Device) Marshal() (dAtA []byte, err error) {
size := m.Size()
dAtA = make([]byte, size)
n, err := m.MarshalTo(dAtA)
if err != nil {
return nil, err
}
return dAtA[:n], nil
}
func (m *Device) MarshalTo(dAtA []byte) (int, error) {
var i int
_ = i
var l int
_ = l
if len(m.ID) > 0 {
dAtA[i] = 0xa
i++
i = encodeVarintApi(dAtA, i, uint64(len(m.ID)))
i += copy(dAtA[i:], m.ID)
}
if len(m.Health) > 0 {
dAtA[i] = 0x12
i++
i = encodeVarintApi(dAtA, i, uint64(len(m.Health)))
i += copy(dAtA[i:], m.Health)
}
return i, nil
}
func (m *AllocateRequest) Marshal() (dAtA []byte, err error) {
size := m.Size()
dAtA = make([]byte, size)
n, err := m.MarshalTo(dAtA)
if err != nil {
return nil, err
}
return dAtA[:n], nil
}
func (m *AllocateRequest) MarshalTo(dAtA []byte) (int, error) {
var i int
_ = i
var l int
_ = l
if len(m.DevicesIDs) > 0 {
for _, s := range m.DevicesIDs {
dAtA[i] = 0xa
i++
l = len(s)
for l >= 1<<7 {
dAtA[i] = uint8(uint64(l)&0x7f | 0x80)
l >>= 7
i++
}
dAtA[i] = uint8(l)
i++
i += copy(dAtA[i:], s)
}
}
return i, nil
}
func (m *AllocateResponse) Marshal() (dAtA []byte, err error) {
size := m.Size()
dAtA = make([]byte, size)
n, err := m.MarshalTo(dAtA)
if err != nil {
return nil, err
}
return dAtA[:n], nil
}
func (m *AllocateResponse) MarshalTo(dAtA []byte) (int, error) {
var i int
_ = i
var l int
_ = l
if len(m.Envs) > 0 {
for k := range m.Envs {
dAtA[i] = 0xa
i++
v := m.Envs[k]
mapSize := 1 + len(k) + sovApi(uint64(len(k))) + 1 + len(v) + sovApi(uint64(len(v)))
i = encodeVarintApi(dAtA, i, uint64(mapSize))
dAtA[i] = 0xa
i++
i = encodeVarintApi(dAtA, i, uint64(len(k)))
i += copy(dAtA[i:], k)
dAtA[i] = 0x12
i++
i = encodeVarintApi(dAtA, i, uint64(len(v)))
i += copy(dAtA[i:], v)
}
}
if len(m.Mounts) > 0 {
for _, msg := range m.Mounts {
dAtA[i] = 0x12
i++
i = encodeVarintApi(dAtA, i, uint64(msg.Size()))
n, err := msg.MarshalTo(dAtA[i:])
if err != nil {
return 0, err
}
i += n
}
}
if len(m.Devices) > 0 {
for _, msg := range m.Devices {
dAtA[i] = 0x1a
i++
i = encodeVarintApi(dAtA, i, uint64(msg.Size()))
n, err := msg.MarshalTo(dAtA[i:])
if err != nil {
return 0, err
}
i += n
}
}
if len(m.Annotations) > 0 {
for k := range m.Annotations {
dAtA[i] = 0x22
i++
v := m.Annotations[k]
mapSize := 1 + len(k) + sovApi(uint64(len(k))) + 1 + len(v) + sovApi(uint64(len(v)))
i = encodeVarintApi(dAtA, i, uint64(mapSize))
dAtA[i] = 0xa
i++
i = encodeVarintApi(dAtA, i, uint64(len(k)))
i += copy(dAtA[i:], k)
dAtA[i] = 0x12
i++
i = encodeVarintApi(dAtA, i, uint64(len(v)))
i += copy(dAtA[i:], v)
}
}
return i, nil
}
func (m *Mount) Marshal() (dAtA []byte, err error) {
size := m.Size()
dAtA = make([]byte, size)
n, err := m.MarshalTo(dAtA)
if err != nil {
return nil, err
}
return dAtA[:n], nil
}
func (m *Mount) MarshalTo(dAtA []byte) (int, error) {
var i int
_ = i
var l int
_ = l
if len(m.ContainerPath) > 0 {
dAtA[i] = 0xa
i++
i = encodeVarintApi(dAtA, i, uint64(len(m.ContainerPath)))
i += copy(dAtA[i:], m.ContainerPath)
}
if len(m.HostPath) > 0 {
dAtA[i] = 0x12
i++
i = encodeVarintApi(dAtA, i, uint64(len(m.HostPath)))
i += copy(dAtA[i:], m.HostPath)
}
if m.ReadOnly {
dAtA[i] = 0x18
i++
if m.ReadOnly {
dAtA[i] = 1
} else {
dAtA[i] = 0
}
i++
}
return i, nil
}
func (m *DeviceSpec) Marshal() (dAtA []byte, err error) {
size := m.Size()
dAtA = make([]byte, size)
n, err := m.MarshalTo(dAtA)
if err != nil {
return nil, err
}
return dAtA[:n], nil
}
func (m *DeviceSpec) MarshalTo(dAtA []byte) (int, error) {
var i int
_ = i
var l int
_ = l
if len(m.ContainerPath) > 0 {
dAtA[i] = 0xa
i++
i = encodeVarintApi(dAtA, i, uint64(len(m.ContainerPath)))
i += copy(dAtA[i:], m.ContainerPath)
}
if len(m.HostPath) > 0 {
dAtA[i] = 0x12
i++
i = encodeVarintApi(dAtA, i, uint64(len(m.HostPath)))
i += copy(dAtA[i:], m.HostPath)
}
if len(m.Permissions) > 0 {
dAtA[i] = 0x1a
i++
i = encodeVarintApi(dAtA, i, uint64(len(m.Permissions)))
i += copy(dAtA[i:], m.Permissions)
}
return i, nil
}
func encodeVarintApi(dAtA []byte, offset int, v uint64) int {
for v >= 1<<7 {
dAtA[offset] = uint8(v&0x7f | 0x80)
v >>= 7
offset++
}
dAtA[offset] = uint8(v)
return offset + 1
}
func (m *RegisterRequest) Size() (n int) {
var l int
_ = l
l = len(m.Version)
if l > 0 {
n += 1 + l + sovApi(uint64(l))
}
l = len(m.Endpoint)
if l > 0 {
n += 1 + l + sovApi(uint64(l))
}
l = len(m.ResourceName)
if l > 0 {
n += 1 + l + sovApi(uint64(l))
}
return n
}
func (m *Empty) Size() (n int) {
var l int
_ = l
return n
}
func (m *ListAndWatchResponse) Size() (n int) {
var l int
_ = l
if len(m.Devices) > 0 {
for _, e := range m.Devices {
l = e.Size()
n += 1 + l + sovApi(uint64(l))
}
}
return n
}
func (m *Device) Size() (n int) {
var l int
_ = l
l = len(m.ID)
if l > 0 {
n += 1 + l + sovApi(uint64(l))
}
l = len(m.Health)
if l > 0 {
n += 1 + l + sovApi(uint64(l))
}
return n
}
func (m *AllocateRequest) Size() (n int) {
var l int
_ = l
if len(m.DevicesIDs) > 0 {
for _, s := range m.DevicesIDs {
l = len(s)
n += 1 + l + sovApi(uint64(l))
}
}
return n
}
func (m *AllocateResponse) Size() (n int) {
var l int
_ = l
if len(m.Envs) > 0 {
for k, v := range m.Envs {
_ = k
_ = v
mapEntrySize := 1 + len(k) + sovApi(uint64(len(k))) + 1 + len(v) + sovApi(uint64(len(v)))
n += mapEntrySize + 1 + sovApi(uint64(mapEntrySize))
}
}
if len(m.Mounts) > 0 {
for _, e := range m.Mounts {
l = e.Size()
n += 1 + l + sovApi(uint64(l))
}
}
if len(m.Devices) > 0 {
for _, e := range m.Devices {
l = e.Size()
n += 1 + l + sovApi(uint64(l))
}
}
if len(m.Annotations) > 0 {
for k, v := range m.Annotations {
_ = k
_ = v
mapEntrySize := 1 + len(k) + sovApi(uint64(len(k))) + 1 + len(v) + sovApi(uint64(len(v)))
n += mapEntrySize + 1 + sovApi(uint64(mapEntrySize))
}
}
return n
}
func (m *Mount) Size() (n int) {
var l int
_ = l
l = len(m.ContainerPath)
if l > 0 {
n += 1 + l + sovApi(uint64(l))
}
l = len(m.HostPath)
if l > 0 {
n += 1 + l + sovApi(uint64(l))
}
if m.ReadOnly {
n += 2
}
return n
}
func (m *DeviceSpec) Size() (n int) {
var l int
_ = l
l = len(m.ContainerPath)
if l > 0 {
n += 1 + l + sovApi(uint64(l))
}
l = len(m.HostPath)
if l > 0 {
n += 1 + l + sovApi(uint64(l))
}
l = len(m.Permissions)
if l > 0 {
n += 1 + l + sovApi(uint64(l))
}
return n
}
func sovApi(x uint64) (n int) {
for {
n++
x >>= 7
if x == 0 {
break
}
}
return n
}
func sozApi(x uint64) (n int) {
return sovApi(uint64((x << 1) ^ uint64((int64(x) >> 63))))
}
func (this *RegisterRequest) String() string {
if this == nil {
return "nil"
}
s := strings.Join([]string{`&RegisterRequest{`,
`Version:` + fmt.Sprintf("%v", this.Version) + `,`,
`Endpoint:` + fmt.Sprintf("%v", this.Endpoint) + `,`,
`ResourceName:` + fmt.Sprintf("%v", this.ResourceName) + `,`,
`}`,
}, "")
return s
}
func (this *Empty) String() string {
if this == nil {
return "nil"
}
s := strings.Join([]string{`&Empty{`,
`}`,
}, "")
return s
}
func (this *ListAndWatchResponse) String() string {
if this == nil {
return "nil"
}
s := strings.Join([]string{`&ListAndWatchResponse{`,
`Devices:` + strings.Replace(fmt.Sprintf("%v", this.Devices), "Device", "Device", 1) + `,`,
`}`,
}, "")
return s
}
func (this *Device) String() string {
if this == nil {
return "nil"
}
s := strings.Join([]string{`&Device{`,
`ID:` + fmt.Sprintf("%v", this.ID) + `,`,
`Health:` + fmt.Sprintf("%v", this.Health) + `,`,
`}`,
}, "")
return s
}
func (this *AllocateRequest) String() string {
if this == nil {
return "nil"
}
s := strings.Join([]string{`&AllocateRequest{`,
`DevicesIDs:` + fmt.Sprintf("%v", this.DevicesIDs) + `,`,
`}`,
}, "")
return s
}
func (this *AllocateResponse) String() string {
if this == nil {
return "nil"
}
keysForEnvs := make([]string, 0, len(this.Envs))
for k := range this.Envs {
keysForEnvs = append(keysForEnvs, k)
}
github_com_gogo_protobuf_sortkeys.Strings(keysForEnvs)
mapStringForEnvs := "map[string]string{"
for _, k := range keysForEnvs {
mapStringForEnvs += fmt.Sprintf("%v: %v,", k, this.Envs[k])
}
mapStringForEnvs += "}"
keysForAnnotations := make([]string, 0, len(this.Annotations))
for k := range this.Annotations {
keysForAnnotations = append(keysForAnnotations, k)
}
github_com_gogo_protobuf_sortkeys.Strings(keysForAnnotations)
mapStringForAnnotations := "map[string]string{"
for _, k := range keysForAnnotations {
mapStringForAnnotations += fmt.Sprintf("%v: %v,", k, this.Annotations[k])
}
mapStringForAnnotations += "}"
s := strings.Join([]string{`&AllocateResponse{`,
`Envs:` + mapStringForEnvs + `,`,
`Mounts:` + strings.Replace(fmt.Sprintf("%v", this.Mounts), "Mount", "Mount", 1) + `,`,
`Devices:` + strings.Replace(fmt.Sprintf("%v", this.Devices), "DeviceSpec", "DeviceSpec", 1) + `,`,
`Annotations:` + mapStringForAnnotations + `,`,
`}`,
}, "")
return s
}
func (this *Mount) String() string {
if this == nil {
return "nil"
}
s := strings.Join([]string{`&Mount{`,
`ContainerPath:` + fmt.Sprintf("%v", this.ContainerPath) + `,`,
`HostPath:` + fmt.Sprintf("%v", this.HostPath) + `,`,
`ReadOnly:` + fmt.Sprintf("%v", this.ReadOnly) + `,`,
`}`,
}, "")
return s
}
func (this *DeviceSpec) String() string {
if this == nil {
return "nil"
}
s := strings.Join([]string{`&DeviceSpec{`,
`ContainerPath:` + fmt.Sprintf("%v", this.ContainerPath) + `,`,
`HostPath:` + fmt.Sprintf("%v", this.HostPath) + `,`,
`Permissions:` + fmt.Sprintf("%v", this.Permissions) + `,`,
`}`,
}, "")
return s
}
func valueToStringApi(v interface{}) string {
rv := reflect.ValueOf(v)
if rv.IsNil() {
return "nil"
}
pv := reflect.Indirect(rv).Interface()
return fmt.Sprintf("*%v", pv)
}
func (m *RegisterRequest) Unmarshal(dAtA []byte) error {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApi
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: RegisterRequest: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: RegisterRequest: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
case 1:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Version", wireType)
}
var stringLen uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApi
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
intStringLen := int(stringLen)
if intStringLen < 0 {
return ErrInvalidLengthApi
}
postIndex := iNdEx + intStringLen
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.Version = string(dAtA[iNdEx:postIndex])
iNdEx = postIndex
case 2:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Endpoint", wireType)
}
var stringLen uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApi
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
intStringLen := int(stringLen)
if intStringLen < 0 {
return ErrInvalidLengthApi
}
postIndex := iNdEx + intStringLen
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.Endpoint = string(dAtA[iNdEx:postIndex])
iNdEx = postIndex
case 3:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field ResourceName", wireType)
}
var stringLen uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApi
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
intStringLen := int(stringLen)
if intStringLen < 0 {
return ErrInvalidLengthApi
}
postIndex := iNdEx + intStringLen
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.ResourceName = string(dAtA[iNdEx:postIndex])
iNdEx = postIndex
default:
iNdEx = preIndex
skippy, err := skipApi(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthApi
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}
func (m *Empty) Unmarshal(dAtA []byte) error {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApi
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: Empty: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: Empty: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
default:
iNdEx = preIndex
skippy, err := skipApi(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthApi
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}
func (m *ListAndWatchResponse) Unmarshal(dAtA []byte) error {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApi
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {<|fim▁hole|> break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: ListAndWatchResponse: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: ListAndWatchResponse: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
case 1:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Devices", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApi
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= (int(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthApi
}
postIndex := iNdEx + msglen
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.Devices = append(m.Devices, &Device{})
if err := m.Devices[len(m.Devices)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
default:
iNdEx = preIndex
skippy, err := skipApi(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthApi
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}
func (m *Device) Unmarshal(dAtA []byte) error {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApi
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: Device: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: Device: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
case 1:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field ID", wireType)
}
var stringLen uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApi
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
intStringLen := int(stringLen)
if intStringLen < 0 {
return ErrInvalidLengthApi
}
postIndex := iNdEx + intStringLen
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.ID = string(dAtA[iNdEx:postIndex])
iNdEx = postIndex
case 2:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Health", wireType)
}
var stringLen uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApi
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
intStringLen := int(stringLen)
if intStringLen < 0 {
return ErrInvalidLengthApi
}
postIndex := iNdEx + intStringLen
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.Health = string(dAtA[iNdEx:postIndex])
iNdEx = postIndex
default:
iNdEx = preIndex
skippy, err := skipApi(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthApi
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}
func (m *AllocateRequest) Unmarshal(dAtA []byte) error {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApi
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: AllocateRequest: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: AllocateRequest: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
case 1:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field DevicesIDs", wireType)
}
var stringLen uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApi
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
intStringLen := int(stringLen)
if intStringLen < 0 {
return ErrInvalidLengthApi
}
postIndex := iNdEx + intStringLen
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.DevicesIDs = append(m.DevicesIDs, string(dAtA[iNdEx:postIndex]))
iNdEx = postIndex
default:
iNdEx = preIndex
skippy, err := skipApi(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthApi
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}
func (m *AllocateResponse) Unmarshal(dAtA []byte) error {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApi
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: AllocateResponse: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: AllocateResponse: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
case 1:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Envs", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApi
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= (int(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthApi
}
postIndex := iNdEx + msglen
if postIndex > l {
return io.ErrUnexpectedEOF
}
if m.Envs == nil {
m.Envs = make(map[string]string)
}
var mapkey string
var mapvalue string
for iNdEx < postIndex {
entryPreIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApi
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
if fieldNum == 1 {
var stringLenmapkey uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApi
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLenmapkey |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
intStringLenmapkey := int(stringLenmapkey)
if intStringLenmapkey < 0 {
return ErrInvalidLengthApi
}
postStringIndexmapkey := iNdEx + intStringLenmapkey
if postStringIndexmapkey > l {
return io.ErrUnexpectedEOF
}
mapkey = string(dAtA[iNdEx:postStringIndexmapkey])
iNdEx = postStringIndexmapkey
} else if fieldNum == 2 {
var stringLenmapvalue uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApi
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLenmapvalue |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
intStringLenmapvalue := int(stringLenmapvalue)
if intStringLenmapvalue < 0 {
return ErrInvalidLengthApi
}
postStringIndexmapvalue := iNdEx + intStringLenmapvalue
if postStringIndexmapvalue > l {
return io.ErrUnexpectedEOF
}
mapvalue = string(dAtA[iNdEx:postStringIndexmapvalue])
iNdEx = postStringIndexmapvalue
} else {
iNdEx = entryPreIndex
skippy, err := skipApi(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthApi
}
if (iNdEx + skippy) > postIndex {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
m.Envs[mapkey] = mapvalue
iNdEx = postIndex
case 2:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Mounts", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApi
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= (int(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthApi
}
postIndex := iNdEx + msglen
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.Mounts = append(m.Mounts, &Mount{})
if err := m.Mounts[len(m.Mounts)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
case 3:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Devices", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApi
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= (int(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthApi
}
postIndex := iNdEx + msglen
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.Devices = append(m.Devices, &DeviceSpec{})
if err := m.Devices[len(m.Devices)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
case 4:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Annotations", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApi
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= (int(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthApi
}
postIndex := iNdEx + msglen
if postIndex > l {
return io.ErrUnexpectedEOF
}
if m.Annotations == nil {
m.Annotations = make(map[string]string)
}
var mapkey string
var mapvalue string
for iNdEx < postIndex {
entryPreIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApi
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
if fieldNum == 1 {
var stringLenmapkey uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApi
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLenmapkey |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
intStringLenmapkey := int(stringLenmapkey)
if intStringLenmapkey < 0 {
return ErrInvalidLengthApi
}
postStringIndexmapkey := iNdEx + intStringLenmapkey
if postStringIndexmapkey > l {
return io.ErrUnexpectedEOF
}
mapkey = string(dAtA[iNdEx:postStringIndexmapkey])
iNdEx = postStringIndexmapkey
} else if fieldNum == 2 {
var stringLenmapvalue uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApi
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLenmapvalue |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
intStringLenmapvalue := int(stringLenmapvalue)
if intStringLenmapvalue < 0 {
return ErrInvalidLengthApi
}
postStringIndexmapvalue := iNdEx + intStringLenmapvalue
if postStringIndexmapvalue > l {
return io.ErrUnexpectedEOF
}
mapvalue = string(dAtA[iNdEx:postStringIndexmapvalue])
iNdEx = postStringIndexmapvalue
} else {
iNdEx = entryPreIndex
skippy, err := skipApi(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthApi
}
if (iNdEx + skippy) > postIndex {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
m.Annotations[mapkey] = mapvalue
iNdEx = postIndex
default:
iNdEx = preIndex
skippy, err := skipApi(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthApi
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}
func (m *Mount) Unmarshal(dAtA []byte) error {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApi
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: Mount: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: Mount: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
case 1:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field ContainerPath", wireType)
}
var stringLen uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApi
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
intStringLen := int(stringLen)
if intStringLen < 0 {
return ErrInvalidLengthApi
}
postIndex := iNdEx + intStringLen
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.ContainerPath = string(dAtA[iNdEx:postIndex])
iNdEx = postIndex
case 2:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field HostPath", wireType)
}
var stringLen uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApi
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
intStringLen := int(stringLen)
if intStringLen < 0 {
return ErrInvalidLengthApi
}
postIndex := iNdEx + intStringLen
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.HostPath = string(dAtA[iNdEx:postIndex])
iNdEx = postIndex
case 3:
if wireType != 0 {
return fmt.Errorf("proto: wrong wireType = %d for field ReadOnly", wireType)
}
var v int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApi
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
v |= (int(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
m.ReadOnly = bool(v != 0)
default:
iNdEx = preIndex
skippy, err := skipApi(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthApi
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}
func (m *DeviceSpec) Unmarshal(dAtA []byte) error {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApi
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: DeviceSpec: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: DeviceSpec: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
case 1:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field ContainerPath", wireType)
}
var stringLen uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApi
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
intStringLen := int(stringLen)
if intStringLen < 0 {
return ErrInvalidLengthApi
}
postIndex := iNdEx + intStringLen
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.ContainerPath = string(dAtA[iNdEx:postIndex])
iNdEx = postIndex
case 2:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field HostPath", wireType)
}
var stringLen uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApi
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
intStringLen := int(stringLen)
if intStringLen < 0 {
return ErrInvalidLengthApi
}
postIndex := iNdEx + intStringLen
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.HostPath = string(dAtA[iNdEx:postIndex])
iNdEx = postIndex
case 3:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Permissions", wireType)
}
var stringLen uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApi
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
intStringLen := int(stringLen)
if intStringLen < 0 {
return ErrInvalidLengthApi
}
postIndex := iNdEx + intStringLen
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.Permissions = string(dAtA[iNdEx:postIndex])
iNdEx = postIndex
default:
iNdEx = preIndex
skippy, err := skipApi(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthApi
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}
func skipApi(dAtA []byte) (n int, err error) {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return 0, ErrIntOverflowApi
}
if iNdEx >= l {
return 0, io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
wireType := int(wire & 0x7)
switch wireType {
case 0:
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return 0, ErrIntOverflowApi
}
if iNdEx >= l {
return 0, io.ErrUnexpectedEOF
}
iNdEx++
if dAtA[iNdEx-1] < 0x80 {
break
}
}
return iNdEx, nil
case 1:
iNdEx += 8
return iNdEx, nil
case 2:
var length int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return 0, ErrIntOverflowApi
}
if iNdEx >= l {
return 0, io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
length |= (int(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
iNdEx += length
if length < 0 {
return 0, ErrInvalidLengthApi
}
return iNdEx, nil
case 3:
for {
var innerWire uint64
var start int = iNdEx
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return 0, ErrIntOverflowApi
}
if iNdEx >= l {
return 0, io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
innerWire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
innerWireType := int(innerWire & 0x7)
if innerWireType == 4 {
break
}
next, err := skipApi(dAtA[start:])
if err != nil {
return 0, err
}
iNdEx = start + next
}
return iNdEx, nil
case 4:
return iNdEx, nil
case 5:
iNdEx += 4
return iNdEx, nil
default:
return 0, fmt.Errorf("proto: illegal wireType %d", wireType)
}
}
panic("unreachable")
}
var (
ErrInvalidLengthApi = fmt.Errorf("proto: negative length found during unmarshaling")
ErrIntOverflowApi = fmt.Errorf("proto: integer overflow")
)
func init() { proto.RegisterFile("api.proto", fileDescriptorApi) }
var fileDescriptorApi = []byte{
// 594 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x54, 0x5d, 0x8b, 0xd3, 0x40,
0x14, 0x6d, 0xd2, 0xdd, 0x6e, 0x7b, 0xdb, 0xdd, 0x2d, 0x63, 0x91, 0x10, 0x35, 0x94, 0x88, 0x50,
0x10, 0xd3, 0xb5, 0x3e, 0x28, 0x22, 0x62, 0xa5, 0x15, 0x96, 0xf5, 0xa3, 0xc6, 0x07, 0x1f, 0xcb,
0x34, 0x1d, 0x9b, 0xc1, 0x64, 0x26, 0x66, 0x26, 0x85, 0xbe, 0xf9, 0x13, 0xfc, 0x19, 0xfe, 0x94,
0x7d, 0xf4, 0xd1, 0x47, 0xb7, 0xfe, 0x0e, 0x41, 0x3a, 0x49, 0xfa, 0x11, 0x8a, 0x22, 0xf8, 0x96,
0x7b, 0xe6, 0x9e, 0xc9, 0xb9, 0x27, 0xf7, 0x04, 0x6a, 0x38, 0xa2, 0x4e, 0x14, 0x73, 0xc9, 0x51,
0x63, 0x4a, 0xe6, 0xd4, 0x23, 0x51, 0x90, 0xcc, 0x28, 0x33, 0xef, 0xcd, 0xa8, 0xf4, 0x93, 0x89,
0xe3, 0xf1, 0xb0, 0x3b, 0xe3, 0x33, 0xde, 0x55, 0x4d, 0x93, 0xe4, 0x83, 0xaa, 0x54, 0xa1, 0x9e,
0x52, 0xb2, 0x1d, 0xc0, 0xa9, 0x4b, 0x66, 0x54, 0x48, 0x12, 0xbb, 0xe4, 0x53, 0x42, 0x84, 0x44,
0x06, 0x1c, 0xcd, 0x49, 0x2c, 0x28, 0x67, 0x86, 0xd6, 0xd6, 0x3a, 0x35, 0x37, 0x2f, 0x91, 0x09,
0x55, 0xc2, 0xa6, 0x11, 0xa7, 0x4c, 0x1a, 0xba, 0x3a, 0x5a, 0xd7, 0xe8, 0x36, 0x1c, 0xc7, 0x44,
0xf0, 0x24, 0xf6, 0xc8, 0x98, 0xe1, 0x90, 0x18, 0x65, 0xd5, 0xd0, 0xc8, 0xc1, 0xd7, 0x38, 0x24,
0xf6, 0x11, 0x1c, 0x0e, 0xc3, 0x48, 0x2e, 0xec, 0x17, 0xd0, 0x7a, 0x49, 0x85, 0xec, 0xb3, 0xe9,
0x7b, 0x2c, 0x3d, 0xdf, 0x25, 0x22, 0xe2, 0x4c, 0x10, 0xe4, 0xc0, 0x51, 0x3a, 0x8d, 0x30, 0xb4,
0x76, 0xb9, 0x53, 0xef, 0xb5, 0x9c, 0xed, 0xe9, 0x9c, 0x81, 0x2a, 0xdc, 0xbc, 0xc9, 0x3e, 0x83,
0x4a, 0x0a, 0xa1, 0x13, 0xd0, 0xcf, 0x07, 0x99, 0x60, 0x9d, 0x0e, 0xd0, 0x75, 0xa8, 0xf8, 0x04,
0x07, 0xd2, 0xcf, 0x94, 0x66, 0x95, 0x7d, 0x1f, 0x4e, 0xfb, 0x41, 0xc0, 0x3d, 0x2c, 0x49, 0x3e,
0xb0, 0x05, 0x90, 0xdd, 0x77, 0x3e, 0x48, 0xdf, 0x5b, 0x73, 0xb7, 0x10, 0xfb, 0x97, 0x0e, 0xcd,
0x0d, 0x27, 0x53, 0xfa, 0x04, 0x0e, 0x08, 0x9b, 0xe7, 0x32, 0x3b, 0xbb, 0x32, 0x8b, 0xdd, 0xce,
0x90, 0xcd, 0xc5, 0x90, 0xc9, 0x78, 0xe1, 0x2a, 0x16, 0xba, 0x0b, 0x95, 0x90, 0x27, 0x4c, 0x0a,
0x43, 0x57, 0xfc, 0x6b, 0xbb, 0xfc, 0x57, 0xab, 0x33, 0x37, 0x6b, 0x41, 0xbd, 0x8d, 0x29, 0x65,
0xd5, 0x6d, 0xec, 0x33, 0xe5, 0x5d, 0x44, 0xbc, 0xb5, 0x31, 0xe8, 0x2d, 0xd4, 0x31, 0x63, 0x5c,
0x62, 0x49, 0x39, 0x13, 0xc6, 0x81, 0xe2, 0x75, 0xff, 0xa2, 0xb2, 0xbf, 0x61, 0xa4, 0x62, 0xb7,
0xef, 0x30, 0x1f, 0x42, 0x6d, 0x3d, 0x06, 0x6a, 0x42, 0xf9, 0x23, 0x59, 0x64, 0x7e, 0xaf, 0x1e,
0x51, 0x0b, 0x0e, 0xe7, 0x38, 0x48, 0x48, 0xe6, 0x77, 0x5a, 0x3c, 0xd6, 0x1f, 0x69, 0xe6, 0x53,
0x68, 0x16, 0x6f, 0xfe, 0x17, 0xbe, 0xed, 0xc3, 0xa1, 0x32, 0x04, 0xdd, 0x81, 0x13, 0x8f, 0x33,
0x89, 0x29, 0x23, 0xf1, 0x38, 0xc2, 0xd2, 0xcf, 0xf8, 0xc7, 0x6b, 0x74, 0x84, 0xa5, 0x8f, 0x6e,
0x40, 0xcd, 0xe7, 0x42, 0xa6, 0x1d, 0xd9, 0x9e, 0xae, 0x80, 0xfc, 0x30, 0x26, 0x78, 0x3a, 0xe6,
0x2c, 0x58, 0xa8, 0x1d, 0xad, 0xba, 0xd5, 0x15, 0xf0, 0x86, 0x05, 0x0b, 0x3b, 0x06, 0xd8, 0x98,
0xf9, 0x5f, 0x5e, 0xd7, 0x86, 0x7a, 0x44, 0xe2, 0x90, 0x0a, 0xa1, 0xbe, 0x43, 0x1a, 0x8a, 0x6d,
0xa8, 0x37, 0x82, 0x46, 0x9a, 0xc0, 0x58, 0xf9, 0x83, 0x9e, 0x41, 0x35, 0x4f, 0x24, 0xba, 0xb5,
0xfb, 0xc1, 0x0a, 0x49, 0x35, 0x0b, 0x5b, 0x93, 0x46, 0xab, 0xd4, 0xfb, 0xaa, 0x41, 0x23, 0x1d,
0x63, 0xa4, 0x0e, 0xd0, 0x05, 0x34, 0xb6, 0xd3, 0x86, 0xf6, 0xf1, 0x4c, 0x7b, 0x17, 0xdc, 0x17,
0x4f, 0xbb, 0x74, 0xa6, 0xa1, 0x0b, 0xa8, 0xe6, 0x8b, 0x53, 0xd4, 0x57, 0x08, 0x96, 0x69, 0xfd,
0x79, 0xdf, 0xec, 0xd2, 0xf3, 0x9b, 0x97, 0x57, 0x96, 0xf6, 0xfd, 0xca, 0x2a, 0x7d, 0x5e, 0x5a,
0xda, 0xe5, 0xd2, 0xd2, 0xbe, 0x2d, 0x2d, 0xed, 0xc7, 0xd2, 0xd2, 0xbe, 0xfc, 0xb4, 0x4a, 0x93,
0x8a, 0xfa, 0x47, 0x3d, 0xf8, 0x1d, 0x00, 0x00, 0xff, 0xff, 0x67, 0x68, 0xfd, 0xfd, 0xed, 0x04,
0x00, 0x00,
}<|fim▁end|> | |
<|file_name|>highlighter.py<|end_file_name|><|fim▁begin|>import re
from PyQt4.QtGui import QSyntaxHighlighter, QColor, QFont, QTextCharFormat
from PyQt4.QtCore import Qt
from .mdx_strkundr import DEL_RE, INS_RE, STRONG_RE, EMPH_RE
class MikiHighlighter(QSyntaxHighlighter):
WORDS = r'(?iu)[\w\']+'
def __init__(self, parent=None):
super(MikiHighlighter, self).__init__(parent)
baseFontSize = 12
NUM = 15
self.patterns = []
regexp = [0] * NUM
font = [0]*NUM
color = [0]*NUM
# 0: html tags - <pre></pre>
regexp[0] = '</?[^>]+>'
font[0] = QFont("monospace", baseFontSize, -1)
color[0] = QColor("#A40000")
# 1: h1 - #
regexp[1] = '^#[^#]+'
color[1] = QColor("#4E9A06")
font[1] = QFont("decorative", 2*baseFontSize, QFont.Bold)
# 2: h2 - ##
regexp[2] = '^##[^#]+'
color[2] = QColor("#4E9A06")
font[2] = QFont("serif", 5.0/3*baseFontSize, QFont.Bold)
# 3: h3 - ###
regexp[3] = '^###[^#]+'
color[3] = QColor("#4E9A06")
font[3] = QFont("serif", 4.0/3*baseFontSize, QFont.Bold)
# 4: h4 and more - ####
regexp[4] = '^####.+'
color[4] = QColor("#4E9A06")
font[4] = QFont("serif", baseFontSize, QFont.Bold)
# 5: html symbols - >
regexp[5] = '&[^; ].+;'
color[5] = QColor("#A40000")
font[5] = QFont("monospace", baseFontSize, -1)
# 6: html comments - <!-- -->
regexp[6] = '<!--.+-->'
color[6] = QColor("#888A85")
font[6] = QFont(None, baseFontSize, -1)
# 7: delete - ~~delete~~
regexp[7] = DEL_RE
color[7] = QColor("#888A85")
font[7] = QFont(None, baseFontSize, -1)
# 8: insert - __insert__
regexp[8] = INS_RE
font[8] = QFont(None, baseFontSize, -1)
font[8].setUnderline(True)
# 9: strong - **strong**
regexp[9] = STRONG_RE
color[9] = QColor("#F57900")
font[9] = QFont(None, baseFontSize, QFont.Bold)
# 10: emphasis - //emphasis//
regexp[10] = EMPH_RE
color[10] = QColor("#F57900")
font[10] = QFont(None, baseFontSize, -1, True)
# 11: links - (links) after [] or links after []:
regexp[11] = r'(?<=(\]\())[^\(\)]*(?=\))'
font[11] = QFont(None, baseFontSize, -1, True)
font[11].setUnderline(True)
#.setUnderlineColor("#204A87")
# 12: link/image references - [] or ![]
regexp[12] = r'!?\[[^\[\]]*\]'
color[12] = QColor("#204A87")
font[12] = QFont(None, baseFontSize, -1)
# 13: blockquotes and lists - > or - or *
regexp[13] = r'(^>+)|(^- )|(^\* )'
color[13] = QColor("#F57900")
font[13] = QFont(None, baseFontSize, -1)
# 14: fence - ``` or ~~~
regexp[14] = '^(?:~{3,}|`{3,}).*$'
color[14] = QColor("#F57900")
font[14] = QFont(None, baseFontSize, QFont.Bold)
for i in range(NUM):
p = re.compile(regexp[i])
f = QTextCharFormat()
if font[i] != 0:
f.setFont(font[i])
if color[i] != 0:
f.setForeground(color[i])
self.patterns.append((p, f))
self.speller = parent.speller
fenced_font = QFont("monospace", baseFontSize, -1)
self.fenced_block = re.compile("^(?:~{3,}|`{3,}).*$")
self.fenced_format = QTextCharFormat()
self.fenced_format.setFont(fenced_font)
def highlightSpellcheck(self, text):
for word_object in re.finditer(self.WORDS, str(text)):
if not word_object.group():
# don't bother with empty words
continue
if self.speller and not self.speller.check(word_object.group()):
current_format = self.format(word_object.start())
current_format.setUnderlineColor(Qt.red)
current_format.setUnderlineStyle(QTextCharFormat.SpellCheckUnderline)
self.setFormat(word_object.start(),
word_object.end() - word_object.start(), current_format)
def highlightBlock(self, text):
# highlight patterns
for i in range(0, len(self.patterns)):
p = self.patterns[i]
for match in p[0].finditer(text):
self.setFormat(
match.start(), match.end() - match.start(), p[1])
# escape highlights in fenced_block
m = self.fenced_block.match(text)
self.setCurrentBlockState(0)
if self.previousBlockState() != 1:
if m:<|fim▁hole|> else:
if m:
self.setCurrentBlockState(0)
else:
self.setCurrentBlockState(1)
self.setFormat(0, len(text), self.fenced_format)
self.highlightSpellcheck(text)<|fim▁end|> | self.setCurrentBlockState(1) |
<|file_name|>polls.py<|end_file_name|><|fim▁begin|>import factory
import factory.django
from faker import Faker
from machina.core.db.models import get_model<|fim▁hole|>from machina.test.factories.auth import UserFactory
from machina.test.factories.conversation import TopicFactory
faker = Faker()
TopicPoll = get_model('forum_polls', 'TopicPoll')
TopicPollOption = get_model('forum_polls', 'TopicPollOption')
TopicPollVote = get_model('forum_polls', 'TopicPollVote')
class TopicPollFactory(factory.django.DjangoModelFactory):
topic = factory.SubFactory(TopicFactory)
question = faker.text(max_nb_chars=200)
class Meta:
model = TopicPoll
class TopicPollOptionFactory(factory.django.DjangoModelFactory):
poll = factory.SubFactory(TopicPollFactory)
text = faker.text(max_nb_chars=100)
class Meta:
model = TopicPollOption
class TopicPollVoteFactory(factory.django.DjangoModelFactory):
poll_option = factory.SubFactory(TopicPollOptionFactory)
voter = factory.SubFactory(UserFactory)
class Meta:
model = TopicPollVote<|fim▁end|> | |
<|file_name|>index.rs<|end_file_name|><|fim▁begin|>extern crate iron;
extern crate staticfile;
extern crate mount;
use iron::{status, Iron, Request, Response, IronResult, IronError};
use iron::mime::Mime;
use staticfile::Static;
use mount::Mount;
use std::process::{Command, Output};
use std::error::Error;
#[derive(Debug)]
struct ServerError(String);
impl ::std::fmt::Display for ServerError {
#[inline]
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
self.0.fmt(f)
}
}
impl Error for ServerError {
fn description(&self) -> &str {
&*self.0
}
}
impl ::std::convert::From<&'static str> for ServerError {
fn from(s: &'static str) -> ServerError { ServerError(s.to_owned()) }
}
fn serve(req: &mut Request) -> IronResult<Response> {
match req.url.query {
Some(ref param) if param.starts_with("module=") => {
let module = ¶m[7..];
match Command::new(format!("modules/shell_files/{}.sh", module)).output() {
Ok(Output { stdout, .. }) => Ok(Response::with((status::Ok, stdout, "application/json".parse::<Mime>().unwrap()))),
Err(err) => Err(IronError::new(err, status::InternalServerError)),
}
},
Some(_) => Err(IronError::new::<ServerError, _>("module parameter required".into(), status::BadRequest)),
None => Err(IronError::new::<ServerError, _>("object not found".into(), status::NotFound)),
}
}
<|fim▁hole|>
Iron::new(root).http("localhost:8081").unwrap();
}<|fim▁end|> | fn main() {
let mut root = Mount::new();
root.mount("/", Static::new("../"))
.mount("/server", serve); |
<|file_name|>manage.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "content_edit_proj.settings")
<|fim▁hole|>
execute_from_command_line(sys.argv)<|fim▁end|> | from django.core.management import execute_from_command_line |
<|file_name|>derives-span-Clone-enum.rs<|end_file_name|><|fim▁begin|>// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed<|fim▁hole|>// This file was auto-generated using 'src/etc/generate-deriving-span-tests.py'
struct Error;
#[derive(Clone)]
enum Enum {
A(
Error //~ ERROR
)
}
fn main() {}<|fim▁end|> | // except according to those terms.
|
<|file_name|>voice.cpp<|end_file_name|><|fim▁begin|>// ---------------------------------------------------------------------------
// This file is part of reSID, a MOS6581 SID emulator engine.
// Copyright (C) 2004 Dag Lem <[email protected]>
//
// This program is free software; you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation; either version 2 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program; if not, write to the Free Software
// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
// ---------------------------------------------------------------------------
#define __VOICE_CC__
#include "voice.h"
// ----------------------------------------------------------------------------
// Constructor.
// ----------------------------------------------------------------------------
Voice::Voice()
{
set_chip_model(MOS6581);
}
// ----------------------------------------------------------------------------
// Set chip model.
// ----------------------------------------------------------------------------
void Voice::set_chip_model(chip_model model)
{
wave.set_chip_model(model);
if (model == MOS6581) {
// The waveform D/A converter introduces a DC offset in the signal
// to the envelope multiplying D/A converter. The "zero" level of
// the waveform D/A converter can be found as follows:
//
// Measure the "zero" voltage of voice 3 on the SID audio output
// pin, routing only voice 3 to the mixer ($d417 = $0b, $d418 =
// $0f, all other registers zeroed).
//
// Then set the sustain level for voice 3 to maximum and search for
// the waveform output value yielding the same voltage as found
// above. This is done by trying out different waveform output
// values until the correct value is found, e.g. with the following
// program:
//
// lda #$08
// sta $d412
// lda #$0b
// sta $d417
// lda #$0f
// sta $d418
// lda #$f0
// sta $d414
// lda #$21
// sta $d412
// lda #$01
// sta $d40e
//
// ldx #$00
// lda #$38 ; Tweak this to find the "zero" level
//l cmp $d41b
// bne l
// stx $d40e ; Stop frequency counter - freeze waveform output
// brk
//
// The waveform output range is 0x000 to 0xfff, so the "zero"
// level should ideally have been 0x800. In the measured chip, the
// waveform output "zero" level was found to be 0x380 (i.e. $d41b<|fim▁hole|>
// The envelope multiplying D/A converter introduces another DC
// offset. This is isolated by the following measurements:
//
// * The "zero" output level of the mixer at full volume is 5.44V.
// * Routing one voice to the mixer at full volume yields
// 6.75V at maximum voice output (wave = 0xfff, sustain = 0xf)
// 5.94V at "zero" voice output (wave = any, sustain = 0x0)
// 5.70V at minimum voice output (wave = 0x000, sustain = 0xf)
// * The DC offset of one voice is (5.94V - 5.44V) = 0.50V
// * The dynamic range of one voice is |6.75V - 5.70V| = 1.05V
// * The DC offset is thus 0.50V/1.05V ~ 1/2 of the dynamic range.
//
// Note that by removing the DC offset, we get the following ranges for
// one voice:
// y > 0: (6.75V - 5.44V) - 0.50V = 0.81V
// y < 0: (5.70V - 5.44V) - 0.50V = -0.24V
// The scaling of the voice amplitude is not symmetric about y = 0;
// this follows from the DC level in the waveform output.
voice_DC = 0x800*0xff;
}
else {
// No DC offsets in the MOS8580.
wave_zero = 0x800;
voice_DC = 0;
}
}
// ----------------------------------------------------------------------------
// Set sync source.
// ----------------------------------------------------------------------------
void Voice::set_sync_source(Voice* source)
{
wave.set_sync_source(&source->wave);
}
// ----------------------------------------------------------------------------
// Register functions.
// ----------------------------------------------------------------------------
void Voice::writeCONTROL_REG(reg8 control)
{
wave.writeCONTROL_REG(control);
envelope.writeCONTROL_REG(control);
}
// ----------------------------------------------------------------------------
// SID reset.
// ----------------------------------------------------------------------------
void Voice::reset()
{
wave.reset();
envelope.reset();
}<|fim▁end|> | // = 0x38) at 5.94V.
wave_zero = 0x380; |
<|file_name|>zh.js<|end_file_name|><|fim▁begin|><|fim▁hole|>Copyright (c) 2003-2019, CKSource - Frederico Knabben. All rights reserved.
For licensing, see LICENSE.md or https://ckeditor.com/legal/ckeditor-oss-license
*/
CKEDITOR.plugins.setLang( 'toolbar', 'zh', {
toolbarCollapse: '摺疊工具列',
toolbarExpand: '展開工具列',
toolbarGroups: {
document: '文件',
clipboard: '剪貼簿/復原',
editing: '編輯選項',
forms: '格式',
basicstyles: '基本樣式',
paragraph: '段落',
links: '連結',
insert: '插入',
styles: '樣式',
colors: '顏色',
tools: '工具'
},
toolbars: '編輯器工具列'
} );<|fim▁end|> | /*
|
<|file_name|>UserService.java<|end_file_name|><|fim▁begin|>package de.leif.ffmanagementsuite.service;
import de.leif.ffmanagementsuite.domain.Authority;
import de.leif.ffmanagementsuite.domain.User;
import de.leif.ffmanagementsuite.repository.AuthorityRepository;
import de.leif.ffmanagementsuite.config.Constants;
import de.leif.ffmanagementsuite.repository.UserRepository;
import de.leif.ffmanagementsuite.security.AuthoritiesConstants;
import de.leif.ffmanagementsuite.security.SecurityUtils;
import de.leif.ffmanagementsuite.service.util.RandomUtil;
import de.leif.ffmanagementsuite.service.dto.UserDTO;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.cache.CacheManager;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.security.crypto.password.PasswordEncoder;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import java.time.Instant;
import java.time.temporal.ChronoUnit;
import java.util.*;
import java.util.stream.Collectors;
/**
* Service class for managing users.
*/
@Service
@Transactional
public class UserService {
private final Logger log = LoggerFactory.getLogger(UserService.class);
private final UserRepository userRepository;
private final PasswordEncoder passwordEncoder;
private final AuthorityRepository authorityRepository;
private final CacheManager cacheManager;
public UserService(UserRepository userRepository, PasswordEncoder passwordEncoder, AuthorityRepository authorityRepository, CacheManager cacheManager) {
this.userRepository = userRepository;
this.passwordEncoder = passwordEncoder;
this.authorityRepository = authorityRepository;
this.cacheManager = cacheManager;
}
public Optional<User> activateRegistration(String key) {
log.debug("Activating user for activation key {}", key);
return userRepository.findOneByActivationKey(key)
.map(user -> {
// activate given user for the registration key.
user.setActivated(true);
user.setActivationKey(null);
cacheManager.getCache("users").evict(user.getLogin());
log.debug("Activated user: {}", user);
return user;
});
}
public Optional<User> completePasswordReset(String newPassword, String key) {
log.debug("Reset user password for reset key {}", key);
return userRepository.findOneByResetKey(key)
.filter(user -> user.getResetDate().isAfter(Instant.now().minusSeconds(86400)))
.map(user -> {
user.setPassword(passwordEncoder.encode(newPassword));
user.setResetKey(null);
user.setResetDate(null);
cacheManager.getCache("users").evict(user.getLogin());
return user;
});
}
public Optional<User> requestPasswordReset(String mail) {
return userRepository.findOneByEmailIgnoreCase(mail)
.filter(User::getActivated)
.map(user -> {
user.setResetKey(RandomUtil.generateResetKey());
user.setResetDate(Instant.now());
cacheManager.getCache("users").evict(user.getLogin());
return user;
});
}
public User createUser(String login, String password, String firstName, String lastName, String email,
String imageUrl, String langKey) {
User newUser = new User();
Authority authority = authorityRepository.findOne(AuthoritiesConstants.USER);
Set<Authority> authorities = new HashSet<>();
String encryptedPassword = passwordEncoder.encode(password);<|fim▁hole|> // new user gets initially a generated password
newUser.setPassword(encryptedPassword);
newUser.setFirstName(firstName);
newUser.setLastName(lastName);
newUser.setEmail(email);
newUser.setImageUrl(imageUrl);
newUser.setLangKey(langKey);
// new user is not active
newUser.setActivated(false);
// new user gets registration key
newUser.setActivationKey(RandomUtil.generateActivationKey());
authorities.add(authority);
newUser.setAuthorities(authorities);
userRepository.save(newUser);
log.debug("Created Information for User: {}", newUser);
return newUser;
}
public User createUser(UserDTO userDTO) {
User user = new User();
user.setLogin(userDTO.getLogin());
user.setFirstName(userDTO.getFirstName());
user.setLastName(userDTO.getLastName());
user.setEmail(userDTO.getEmail());
user.setImageUrl(userDTO.getImageUrl());
if (userDTO.getLangKey() == null) {
user.setLangKey(Constants.DEFAULT_LANGUAGE); // default language
} else {
user.setLangKey(userDTO.getLangKey());
}
if (userDTO.getAuthorities() != null) {
Set<Authority> authorities = new HashSet<>();
userDTO.getAuthorities().forEach(
authority -> authorities.add(authorityRepository.findOne(authority))
);
user.setAuthorities(authorities);
}
String encryptedPassword = passwordEncoder.encode(RandomUtil.generatePassword());
user.setPassword(encryptedPassword);
user.setResetKey(RandomUtil.generateResetKey());
user.setResetDate(Instant.now());
user.setActivated(true);
userRepository.save(user);
log.debug("Created Information for User: {}", user);
return user;
}
/**
* Update basic information (first name, last name, email, language) for the current user.
*
* @param firstName first name of user
* @param lastName last name of user
* @param email email id of user
* @param langKey language key
* @param imageUrl image URL of user
*/
public void updateUser(String firstName, String lastName, String email, String langKey, String imageUrl) {
userRepository.findOneByLogin(SecurityUtils.getCurrentUserLogin()).ifPresent(user -> {
user.setFirstName(firstName);
user.setLastName(lastName);
user.setEmail(email);
user.setLangKey(langKey);
user.setImageUrl(imageUrl);
cacheManager.getCache("users").evict(user.getLogin());
log.debug("Changed Information for User: {}", user);
});
}
/**
* Update all information for a specific user, and return the modified user.
*
* @param userDTO user to update
* @return updated user
*/
public Optional<UserDTO> updateUser(UserDTO userDTO) {
return Optional.of(userRepository
.findOne(userDTO.getId()))
.map(user -> {
user.setLogin(userDTO.getLogin());
user.setFirstName(userDTO.getFirstName());
user.setLastName(userDTO.getLastName());
user.setEmail(userDTO.getEmail());
user.setImageUrl(userDTO.getImageUrl());
user.setActivated(userDTO.isActivated());
user.setLangKey(userDTO.getLangKey());
Set<Authority> managedAuthorities = user.getAuthorities();
managedAuthorities.clear();
userDTO.getAuthorities().stream()
.map(authorityRepository::findOne)
.forEach(managedAuthorities::add);
cacheManager.getCache("users").evict(user.getLogin());
log.debug("Changed Information for User: {}", user);
return user;
})
.map(UserDTO::new);
}
public void deleteUser(String login) {
userRepository.findOneByLogin(login).ifPresent(user -> {
userRepository.delete(user);
cacheManager.getCache("users").evict(login);
log.debug("Deleted User: {}", user);
});
}
public void changePassword(String password) {
userRepository.findOneByLogin(SecurityUtils.getCurrentUserLogin()).ifPresent(user -> {
String encryptedPassword = passwordEncoder.encode(password);
user.setPassword(encryptedPassword);
cacheManager.getCache("users").evict(user.getLogin());
log.debug("Changed password for User: {}", user);
});
}
@Transactional(readOnly = true)
public Page<UserDTO> getAllManagedUsers(Pageable pageable) {
return userRepository.findAllByLoginNot(pageable, Constants.ANONYMOUS_USER).map(UserDTO::new);
}
@Transactional(readOnly = true)
public Optional<User> getUserWithAuthoritiesByLogin(String login) {
return userRepository.findOneWithAuthoritiesByLogin(login);
}
@Transactional(readOnly = true)
public User getUserWithAuthorities(Long id) {
return userRepository.findOneWithAuthoritiesById(id);
}
@Transactional(readOnly = true)
public User getUserWithAuthorities() {
return userRepository.findOneWithAuthoritiesByLogin(SecurityUtils.getCurrentUserLogin()).orElse(null);
}
/**
* Not activated users should be automatically deleted after 3 days.
* <p>
* This is scheduled to get fired everyday, at 01:00 (am).
*/
@Scheduled(cron = "0 0 1 * * ?")
public void removeNotActivatedUsers() {
List<User> users = userRepository.findAllByActivatedIsFalseAndCreatedDateBefore(Instant.now().minus(3, ChronoUnit.DAYS));
for (User user : users) {
log.debug("Deleting not activated user {}", user.getLogin());
userRepository.delete(user);
cacheManager.getCache("users").evict(user.getLogin());
}
}
/**
* @return a list of all the authorities
*/
public List<String> getAuthorities() {
return authorityRepository.findAll().stream().map(Authority::getName).collect(Collectors.toList());
}
}<|fim▁end|> | newUser.setLogin(login); |
<|file_name|>views.py<|end_file_name|><|fim▁begin|>"""
Copyright 2016 Andrea McIntosh
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from django.shortcuts import get_object_or_404, render
from django.http import HttpResponseRedirect
from django.core.urlresolvers import reverse
from django.views import generic
from .models import Question, Choice
<|fim▁hole|> context_object_name = 'latest_question_list'
def get_queryset(self):
"""Return the last five published questions."""
return Question.objects.order_by('-pub_date')[:5]
class DetailView(generic.DetailView):
model = Question
template_name = 'polls/detail.html'
class ResultsView(generic.DetailView):
model = Question
template_name = 'polls/results.html'
def vote(request, question_id):
question = get_object_or_404(Question, pk=question_id)
try:
selected_choice = question.choice_set.get(pk=request.POST['choice'])
except:
return render(request, 'polls/detail.html', {
'question': question,
'error_message': "You didn't select a choice.",
})
else:
selected_choice.votes += 1
selected_choice.save()
return HttpResponseRedirect(reverse('polls:results', args=(question.id,)))<|fim▁end|> | class IndexView(generic.ListView):
template_name = "polls/index.html" |
<|file_name|>0010_target_data_migration.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
<|fim▁hole|>
for entry in Entry.objects.all():
entry.target_amount = entry.goal.target_amount
entry.save()
class Migration(migrations.Migration):
dependencies = [
('momentum', '0009_entry_target_amount'),
]
operations = [
migrations.RunPython(populate_target_amount),
]<|fim▁end|> | def populate_target_amount(apps, schema_editor):
Entry = apps.get_model("momentum", "Entry") |
<|file_name|>motion_detector.py<|end_file_name|><|fim▁begin|># USAGE
# python motion_detector.py
# python motion_detector.py --video videos/example_01.mp4
# import the necessary packages
import argparse
import datetime
import imutils
import time
import cv2
# construct the argument parser and parse the arguments
ap = argparse.ArgumentParser()
ap.add_argument("-v", "--video", help="path to the video file")
ap.add_argument("-a", "--min-area", type=int, default=500, help="minimum area size")
args = vars(ap.parse_args())
# if the video argument is None, then we are reading from webcam
if args.get("video", None) is None:
camera = cv2.VideoCapture(0)
time.sleep(0.25)
# otherwise, we are reading from a video file
else:
camera = cv2.VideoCapture(1)
time.sleep(0.25)<|fim▁hole|># loop over the frames of the video
while True:
# grab the current frame and initialize the occupied/unoccupied
# text
(grabbed, frame) = camera.read()
text = "Unoccupied"
# if the frame could not be grabbed, then we have reached the end
# of the video
if not grabbed:
break
# resize the frame, convert it to grayscale, and blur it
frame = imutils.resize(frame, width=500)
gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
gray = cv2.GaussianBlur(gray, (21, 21), 0)
# if the first frame is None, initialize it
if firstFrame is None:
firstFrame = gray
continue
# compute the absolute difference between the current frame and
# first frame
frameDelta = cv2.absdiff(firstFrame, gray)
thresh = cv2.threshold(frameDelta, 25, 255, cv2.THRESH_BINARY)[1]
# dilate the thresholded image to fill in holes, then find contours
# on thresholded image
thresh = cv2.dilate(thresh, None, iterations=2)
(cnts, _) = cv2.findContours(thresh.copy(), cv2.RETR_EXTERNAL,
cv2.CHAIN_APPROX_SIMPLE)
# loop over the contours
for c in cnts:
# if the contour is too small, ignore it
if cv2.contourArea(c) < args["min_area"]:
continue
# compute the bounding box for the contour, draw it on the frame,
# and update the text
(x, y, w, h) = cv2.boundingRect(c)
cv2.rectangle(frame, (x, y), (x + w, y + h), (0, 255, 0), 2)
text = "Occupied"
# draw the text and timestamp on the frame
cv2.putText(frame, "Room Status: {}".format(text), (10, 20),
cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 255), 2)
cv2.putText(frame, datetime.datetime.now().strftime("%A %d %B %Y %I:%M:%S%p"),
(10, frame.shape[0] - 10), cv2.FONT_HERSHEY_SIMPLEX, 0.35, (0, 0, 255), 1)
# show the frame and record if the user presses a key
cv2.imshow("Security Feed", frame)
cv2.imshow("Thresh", thresh)
cv2.imshow("Frame Delta", frameDelta)
key = cv2.waitKey(1) & 0xFF
# if the `q` key is pressed, break from the lop
if key == ord("q"):
break
# cleanup the camera and close any open windows
camera.release()
cv2.destroyAllWindows()<|fim▁end|> |
# initialize the first frame in the video stream
firstFrame = None
|
<|file_name|>explicit_self_xcrate_exe.rs<|end_file_name|><|fim▁begin|>// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//<|fim▁hole|>// except according to those terms.
// ignore-fast
// aux-build:explicit_self_xcrate.rs
extern crate explicit_self_xcrate;
use explicit_self_xcrate::{Foo, Bar};
pub fn main() {
let x = Bar { x: ~"hello" };
x.f();
}<|fim▁end|> | // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed |
<|file_name|>Charge.js<|end_file_name|><|fim▁begin|>/**
* Charge.js
*
* @description :: TODO: You might write a short summary of how this model works and what it represents here.
* @docs :: http://sailsjs.org/#!documentation/models
*/
module.exports = {
attributes: {<|fim▁hole|> name: {
type: 'string'
},
scope: {
type: 'string'
},
}
};<|fim▁end|> | |
<|file_name|>Gpio.java<|end_file_name|><|fim▁begin|>package com.esd.phicomm.bruce.esdapp;
import java.io.BufferedReader;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
/**
* Created by Bruce on 2017/1/4.
*/
class Gpio {
private String port;
public boolean output(int value) {
String command = String.format("echo %d > /sys/class/gpio_sw/%s/data\n", value, port);
try {
Runtime.getRuntime().exec(new String[] {"su", "-c", command});
return true;
} catch (IOException e) {
return false;
}
}
private String readinput(){
Process p;
String command = String.format("cat /sys/class/gpio_sw/%s/data\n", port);
try {
p = Runtime.getRuntime().exec("su");
DataOutputStream outputStream = new DataOutputStream(p.getOutputStream());
outputStream.write(command.getBytes());
outputStream.flush();
BufferedReader reader = new BufferedReader(new InputStreamReader(p.getInputStream()));
StringBuilder text = new StringBuilder();
String line;
while ((line = reader.readLine()) != null) {
text.append(line);
break;
}
reader.close();
return text.toString();
} catch (IOException e) {
return "";
}
}
public boolean setcfg(int cfg){
String command = String.format("echo %d > /sys/class/gpio_sw/%s/cfg\n", cfg, port);<|fim▁hole|> Runtime.getRuntime().exec(new String[] {"su", "-c", command});
return true;
} catch (IOException e) {
return false;
}
}
private String readcfg(){
Process p;
String command = String.format("cat /sys/class/gpio_sw/%s/cfg\n", port);
try {
p = Runtime.getRuntime().exec(new String[] {"su", "-c", command});
BufferedReader reader = new BufferedReader(new InputStreamReader(p.getInputStream()));
StringBuilder text = new StringBuilder();
String line;
while((line = reader.readLine()) != null){
text.append(line);
text.append("\n");
}
return text.toString();
} catch (IOException e) {
return "";
}
}
public int input(){
char ch;
String cfg;
cfg = readinput();
if(cfg.isEmpty())
return -1;
else{
ch = cfg.charAt(0);
if(Character.isDigit(ch))
return Character.getNumericValue(ch);
else
return -1;
}
}
public int getcfg(){
char ch;
String cfg;
cfg = readcfg();
if(cfg.isEmpty())
return -1;
else{
ch = cfg.charAt(0);
if(Character.isDigit(ch))
return Character.getNumericValue(ch);
else
return -1;
}
}
//Constructor
Gpio(String port){
this.port = port;
}
}<|fim▁end|> | try { |
<|file_name|>yelphelper.py<|end_file_name|><|fim▁begin|># Copyright 2016 The Meson development team
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and<|fim▁hole|>import subprocess
import shutil
import argparse
from .. import mlog
from ..mesonlib import has_path_sep
from . import destdir_join
from .gettext import read_linguas
parser = argparse.ArgumentParser()
parser.add_argument('command')
parser.add_argument('--id', dest='project_id')
parser.add_argument('--subdir', dest='subdir')
parser.add_argument('--installdir', dest='install_dir')
parser.add_argument('--sources', dest='sources')
parser.add_argument('--media', dest='media', default='')
parser.add_argument('--langs', dest='langs', default='')
parser.add_argument('--symlinks', type=bool, dest='symlinks', default=False)
def build_pot(srcdir, project_id, sources):
# Must be relative paths
sources = [os.path.join('C', source) for source in sources]
outfile = os.path.join(srcdir, project_id + '.pot')
subprocess.call(['itstool', '-o', outfile] + sources)
def update_po(srcdir, project_id, langs):
potfile = os.path.join(srcdir, project_id + '.pot')
for lang in langs:
pofile = os.path.join(srcdir, lang, lang + '.po')
subprocess.call(['msgmerge', '-q', '-o', pofile, pofile, potfile])
def build_translations(srcdir, blddir, langs):
for lang in langs:
outdir = os.path.join(blddir, lang)
os.makedirs(outdir, exist_ok=True)
subprocess.call([
'msgfmt', os.path.join(srcdir, lang, lang + '.po'),
'-o', os.path.join(outdir, lang + '.gmo')
])
def merge_translations(blddir, sources, langs):
for lang in langs:
subprocess.call([
'itstool', '-m', os.path.join(blddir, lang, lang + '.gmo'),
'-o', os.path.join(blddir, lang)
] + sources)
def install_help(srcdir, blddir, sources, media, langs, install_dir, destdir, project_id, symlinks):
c_install_dir = os.path.join(install_dir, 'C', project_id)
for lang in langs + ['C']:
indir = destdir_join(destdir, os.path.join(install_dir, lang, project_id))
os.makedirs(indir, exist_ok=True)
for source in sources:
infile = os.path.join(srcdir if lang == 'C' else blddir, lang, source)
outfile = os.path.join(indir, source)
mlog.log('Installing %s to %s' % (infile, outfile))
shutil.copyfile(infile, outfile)
shutil.copystat(infile, outfile)
for m in media:
infile = os.path.join(srcdir, lang, m)
outfile = os.path.join(indir, m)
c_infile = os.path.join(srcdir, 'C', m)
if not os.path.exists(infile):
if not os.path.exists(c_infile):
mlog.warning('Media file "%s" did not exist in C directory' % m)
continue
elif symlinks:
srcfile = os.path.join(c_install_dir, m)
mlog.log('Symlinking %s to %s.' % (outfile, srcfile))
if has_path_sep(m):
os.makedirs(os.path.dirname(outfile), exist_ok=True)
try:
try:
os.symlink(srcfile, outfile)
except FileExistsError:
os.remove(outfile)
os.symlink(srcfile, outfile)
continue
except (NotImplementedError, OSError):
mlog.warning('Symlinking not supported, falling back to copying')
infile = c_infile
else:
# Lang doesn't have media file so copy it over 'C' one
infile = c_infile
mlog.log('Installing %s to %s' % (infile, outfile))
if has_path_sep(m):
os.makedirs(os.path.dirname(outfile), exist_ok=True)
shutil.copyfile(infile, outfile)
shutil.copystat(infile, outfile)
def run(args):
options = parser.parse_args(args)
langs = options.langs.split('@@') if options.langs else []
media = options.media.split('@@') if options.media else []
sources = options.sources.split('@@')
destdir = os.environ.get('DESTDIR', '')
src_subdir = os.path.join(os.environ['MESON_SOURCE_ROOT'], options.subdir)
build_subdir = os.path.join(os.environ['MESON_BUILD_ROOT'], options.subdir)
abs_sources = [os.path.join(src_subdir, 'C', source) for source in sources]
if not langs:
langs = read_linguas(src_subdir)
if options.command == 'pot':
build_pot(src_subdir, options.project_id, sources)
elif options.command == 'update-po':
build_pot(src_subdir, options.project_id, sources)
update_po(src_subdir, options.project_id, langs)
elif options.command == 'build':
if langs:
build_translations(src_subdir, build_subdir, langs)
elif options.command == 'install':
install_dir = os.path.join(os.environ['MESON_INSTALL_PREFIX'], options.install_dir)
if langs:
build_translations(src_subdir, build_subdir, langs)
merge_translations(build_subdir, abs_sources, langs)
install_help(src_subdir, build_subdir, sources, media, langs, install_dir,
destdir, options.project_id, options.symlinks)<|fim▁end|> | # limitations under the License.
import os |
<|file_name|>nn_NO.js<|end_file_name|><|fim▁begin|>OC.L10N.register(
"weather",
{
"Monday" : "Måndag",
"Tuesday" : "Tysdag",
"Wednesday" : "Onsdag",
"Thursday" : "Torsdag",
"Friday" : "Fredag",
"Saturday" : "Laurdag",<|fim▁hole|> "Sunday" : "Søndag",
"Weather" : "Vær",
"Save" : "Lagre",
"Add a city" : "Legg til ein by",
"Add city" : "Legg til by",
"City name" : "Bynamn",
"Add" : "Legg til",
"Cancel" : "Avbryt",
"Settings" : "Instillingar",
"Pressure" : "Trykk",
"Humidity" : "Luftfuktigheit",
"Wind" : "Vind",
"Sunrise" : "Soloppgang",
"Sunset" : "Solnedgang",
"Date" : "Date"
},
"nplurals=2; plural=(n != 1);");<|fim▁end|> | |
<|file_name|>variance.rs<|end_file_name|><|fim▁begin|>// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! This file infers the variance of type and lifetime parameters. The
//! algorithm is taken from Section 4 of the paper "Taming the Wildcards:
//! Combining Definition- and Use-Site Variance" published in PLDI'11 and
//! written by Altidor et al., and hereafter referred to as The Paper.
//!
//! This inference is explicitly designed *not* to consider the uses of
//! types within code. To determine the variance of type parameters
//! defined on type `X`, we only consider the definition of the type `X`
//! and the definitions of any types it references.
//!
//! We only infer variance for type parameters found on *data types*
//! like structs and enums. In these cases, there is fairly straightforward
//! explanation for what variance means. The variance of the type
//! or lifetime parameters defines whether `T<A>` is a subtype of `T<B>`
//! (resp. `T<'a>` and `T<'b>`) based on the relationship of `A` and `B`
//! (resp. `'a` and `'b`).
//!
//! We do not infer variance for type parameters found on traits, fns,
//! or impls. Variance on trait parameters can make indeed make sense
//! (and we used to compute it) but it is actually rather subtle in
//! meaning and not that useful in practice, so we removed it. See the
//! addendum for some details. Variances on fn/impl parameters, otoh,
//! doesn't make sense because these parameters are instantiated and
//! then forgotten, they don't persist in types or compiled
//! byproducts.
//!
//! ### The algorithm
//!
//! The basic idea is quite straightforward. We iterate over the types
//! defined and, for each use of a type parameter X, accumulate a
//! constraint indicating that the variance of X must be valid for the
//! variance of that use site. We then iteratively refine the variance of
//! X until all constraints are met. There is *always* a sol'n, because at
//! the limit we can declare all type parameters to be invariant and all
//! constraints will be satisfied.
//!
//! As a simple example, consider:
//!
//! enum Option<A> { Some(A), None }
//! enum OptionalFn<B> { Some(|B|), None }
//! enum OptionalMap<C> { Some(|C| -> C), None }
//!
//! Here, we will generate the constraints:
//!
//! 1. V(A) <= +
//! 2. V(B) <= -
//! 3. V(C) <= +
//! 4. V(C) <= -
//!
//! These indicate that (1) the variance of A must be at most covariant;
//! (2) the variance of B must be at most contravariant; and (3, 4) the
//! variance of C must be at most covariant *and* contravariant. All of these
//! results are based on a variance lattice defined as follows:
//!
//! * Top (bivariant)
//! - +
//! o Bottom (invariant)
//!
//! Based on this lattice, the solution V(A)=+, V(B)=-, V(C)=o is the
//! optimal solution. Note that there is always a naive solution which
//! just declares all variables to be invariant.
//!
//! You may be wondering why fixed-point iteration is required. The reason
//! is that the variance of a use site may itself be a function of the
//! variance of other type parameters. In full generality, our constraints
//! take the form:
//!
//! V(X) <= Term
//! Term := + | - | * | o | V(X) | Term x Term
//!
//! Here the notation V(X) indicates the variance of a type/region
//! parameter `X` with respect to its defining class. `Term x Term`
//! represents the "variance transform" as defined in the paper:
//!
//! If the variance of a type variable `X` in type expression `E` is `V2`
//! and the definition-site variance of the [corresponding] type parameter
//! of a class `C` is `V1`, then the variance of `X` in the type expression
//! `C<E>` is `V3 = V1.xform(V2)`.
//!
//! ### Constraints
//!
//! If I have a struct or enum with where clauses:
//!
//! struct Foo<T:Bar> { ... }
//!
//! you might wonder whether the variance of `T` with respect to `Bar`
//! affects the variance `T` with respect to `Foo`. I claim no. The
//! reason: assume that `T` is invariant w/r/t `Bar` but covariant w/r/t
//! `Foo`. And then we have a `Foo<X>` that is upcast to `Foo<Y>`, where
//! `X <: Y`. However, while `X : Bar`, `Y : Bar` does not hold. In that
//! case, the upcast will be illegal, but not because of a variance
//! failure, but rather because the target type `Foo<Y>` is itself just
//! not well-formed. Basically we get to assume well-formedness of all
//! types involved before considering variance.
//!
//! ### Addendum: Variance on traits
//!
//! As mentioned above, we used to permit variance on traits. This was
//! computed based on the appearance of trait type parameters in
//! method signatures and was used to represent the compatibility of
//! vtables in trait objects (and also "virtual" vtables or dictionary
//! in trait bounds). One complication was that variance for
//! associated types is less obvious, since they can be projected out
//! and put to myriad uses, so it's not clear when it is safe to allow
//! `X<A>::Bar` to vary (or indeed just what that means). Moreover (as
//! covered below) all inputs on any trait with an associated type had
//! to be invariant, limiting the applicability. Finally, the
//! annotations (`MarkerTrait`, `PhantomFn`) needed to ensure that all
//! trait type parameters had a variance were confusing and annoying
//! for little benefit.
//!
//! Just for historical reference,I am going to preserve some text indicating
//! how one could interpret variance and trait matching.
//!
//! #### Variance and object types
//!
//! Just as with structs and enums, we can decide the subtyping
//! relationship between two object types `&Trait<A>` and `&Trait<B>`
//! based on the relationship of `A` and `B`. Note that for object
//! types we ignore the `Self` type parameter -- it is unknown, and
//! the nature of dynamic dispatch ensures that we will always call a
//! function that is expected the appropriate `Self` type. However, we
//! must be careful with the other type parameters, or else we could
//! end up calling a function that is expecting one type but provided
//! another.
//!
//! To see what I mean, consider a trait like so:
//!
//! trait ConvertTo<A> {
//! fn convertTo(&self) -> A;
//! }
//!
//! Intuitively, If we had one object `O=&ConvertTo<Object>` and another
//! `S=&ConvertTo<String>`, then `S <: O` because `String <: Object`
//! (presuming Java-like "string" and "object" types, my go to examples
//! for subtyping). The actual algorithm would be to compare the
//! (explicit) type parameters pairwise respecting their variance: here,
//! the type parameter A is covariant (it appears only in a return
//! position), and hence we require that `String <: Object`.
//!
//! You'll note though that we did not consider the binding for the
//! (implicit) `Self` type parameter: in fact, it is unknown, so that's
//! good. The reason we can ignore that parameter is precisely because we
//! don't need to know its value until a call occurs, and at that time (as
//! you said) the dynamic nature of virtual dispatch means the code we run
//! will be correct for whatever value `Self` happens to be bound to for
//! the particular object whose method we called. `Self` is thus different
//! from `A`, because the caller requires that `A` be known in order to
//! know the return type of the method `convertTo()`. (As an aside, we
//! have rules preventing methods where `Self` appears outside of the
//! receiver position from being called via an object.)
//!
//! #### Trait variance and vtable resolution
//!
//! But traits aren't only used with objects. They're also used when
//! deciding whether a given impl satisfies a given trait bound. To set the
//! scene here, imagine I had a function:
//!
//! fn convertAll<A,T:ConvertTo<A>>(v: &[T]) {
//! ...
//! }
//!
//! Now imagine that I have an implementation of `ConvertTo` for `Object`:
//!
//! impl ConvertTo<int> for Object { ... }
//!
//! And I want to call `convertAll` on an array of strings. Suppose
//! further that for whatever reason I specifically supply the value of
//! `String` for the type parameter `T`:
//!
//! let mut vector = vec!["string", ...];
//! convertAll::<int, String>(vector);
//!
//! Is this legal? To put another way, can we apply the `impl` for
//! `Object` to the type `String`? The answer is yes, but to see why
//! we have to expand out what will happen:
//!
//! - `convertAll` will create a pointer to one of the entries in the
//! vector, which will have type `&String`
//! - It will then call the impl of `convertTo()` that is intended
//! for use with objects. This has the type:
//!
//! fn(self: &Object) -> int
//!
//! It is ok to provide a value for `self` of type `&String` because
//! `&String <: &Object`.
//!
//! OK, so intuitively we want this to be legal, so let's bring this back
//! to variance and see whether we are computing the correct result. We
//! must first figure out how to phrase the question "is an impl for
//! `Object,int` usable where an impl for `String,int` is expected?"
//!
//! Maybe it's helpful to think of a dictionary-passing implementation of
//! type classes. In that case, `convertAll()` takes an implicit parameter
//! representing the impl. In short, we *have* an impl of type:
//!
//! V_O = ConvertTo<int> for Object
//!
//! and the function prototype expects an impl of type:
//!
//! V_S = ConvertTo<int> for String
//!
//! As with any argument, this is legal if the type of the value given
//! (`V_O`) is a subtype of the type expected (`V_S`). So is `V_O <: V_S`?
//! The answer will depend on the variance of the various parameters. In
//! this case, because the `Self` parameter is contravariant and `A` is
//! covariant, it means that:
//!
//! V_O <: V_S iff
//! int <: int
//! String <: Object
//!
//! These conditions are satisfied and so we are happy.
//!
//! #### Variance and associated types
//!
//! Traits with associated types -- or at minimum projection
//! expressions -- must be invariant with respect to all of their
//! inputs. To see why this makes sense, consider what subtyping for a
//! trait reference means:
//!
//! <T as Trait> <: <U as Trait>
//!
//! means that if I know that `T as Trait`, I also know that `U as
//! Trait`. Moreover, if you think of it as dictionary passing style,
//! it means that a dictionary for `<T as Trait>` is safe to use where
//! a dictionary for `<U as Trait>` is expected.
//!
//! The problem is that when you can project types out from `<T as
//! Trait>`, the relationship to types projected out of `<U as Trait>`
//! is completely unknown unless `T==U` (see #21726 for more
//! details). Making `Trait` invariant ensures that this is true.
//!
//! Another related reason is that if we didn't make traits with<|fim▁hole|>//! ```
//! trait Identity { type Out; fn foo(&self); }
//! impl<T> Identity for T { type Out = T; ... }
//! ```
//!
//! Now if I have `<&'static () as Identity>::Out`, this can be
//! validly derived as `&'a ()` for any `'a`:
//!
//! <&'a () as Identity> <: <&'static () as Identity>
//! if &'static () < : &'a () -- Identity is contravariant in Self
//! if 'static : 'a -- Subtyping rules for relations
//!
//! This change otoh means that `<'static () as Identity>::Out` is
//! always `&'static ()` (which might then be upcast to `'a ()`,
//! separately). This was helpful in solving #21750.
use self::VarianceTerm::*;
use self::ParamKind::*;
use arena;
use arena::TypedArena;
use middle::resolve_lifetime as rl;
use middle::subst;
use middle::subst::{ParamSpace, FnSpace, TypeSpace, SelfSpace, VecPerParamSpace};
use middle::ty::{self, Ty};
use rustc::ast_map;
use std::fmt;
use std::rc::Rc;
use syntax::ast;
use syntax::ast_util;
use syntax::visit;
use syntax::visit::Visitor;
use util::nodemap::NodeMap;
pub fn infer_variance(tcx: &ty::ctxt) {
let krate = tcx.map.krate();
let mut arena = arena::TypedArena::new();
let terms_cx = determine_parameters_to_be_inferred(tcx, &mut arena, krate);
let constraints_cx = add_constraints_from_crate(terms_cx, krate);
solve_constraints(constraints_cx);
tcx.variance_computed.set(true);
}
// Representing terms
//
// Terms are structured as a straightforward tree. Rather than rely on
// GC, we allocate terms out of a bounded arena (the lifetime of this
// arena is the lifetime 'a that is threaded around).
//
// We assign a unique index to each type/region parameter whose variance
// is to be inferred. We refer to such variables as "inferreds". An
// `InferredIndex` is a newtype'd int representing the index of such
// a variable.
type VarianceTermPtr<'a> = &'a VarianceTerm<'a>;
#[derive(Copy, Clone, Debug)]
struct InferredIndex(usize);
#[derive(Copy, Clone)]
enum VarianceTerm<'a> {
ConstantTerm(ty::Variance),
TransformTerm(VarianceTermPtr<'a>, VarianceTermPtr<'a>),
InferredTerm(InferredIndex),
}
impl<'a> fmt::Debug for VarianceTerm<'a> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
ConstantTerm(c1) => write!(f, "{:?}", c1),
TransformTerm(v1, v2) => write!(f, "({:?} \u{00D7} {:?})", v1, v2),
InferredTerm(id) => write!(f, "[{}]", { let InferredIndex(i) = id; i })
}
}
}
// The first pass over the crate simply builds up the set of inferreds.
struct TermsContext<'a, 'tcx: 'a> {
tcx: &'a ty::ctxt<'tcx>,
arena: &'a TypedArena<VarianceTerm<'a>>,
empty_variances: Rc<ty::ItemVariances>,
// For marker types, UnsafeCell, and other lang items where
// variance is hardcoded, records the item-id and the hardcoded
// variance.
lang_items: Vec<(ast::NodeId, Vec<ty::Variance>)>,
// Maps from the node id of a type/generic parameter to the
// corresponding inferred index.
inferred_map: NodeMap<InferredIndex>,
// Maps from an InferredIndex to the info for that variable.
inferred_infos: Vec<InferredInfo<'a>> ,
}
#[derive(Copy, Clone, Debug, PartialEq)]
enum ParamKind {
TypeParam,
RegionParam,
}
struct InferredInfo<'a> {
item_id: ast::NodeId,
kind: ParamKind,
space: ParamSpace,
index: usize,
param_id: ast::NodeId,
term: VarianceTermPtr<'a>,
// Initial value to use for this parameter when inferring
// variance. For most parameters, this is Bivariant. But for lang
// items and input type parameters on traits, it is different.
initial_variance: ty::Variance,
}
fn determine_parameters_to_be_inferred<'a, 'tcx>(tcx: &'a ty::ctxt<'tcx>,
arena: &'a mut TypedArena<VarianceTerm<'a>>,
krate: &ast::Crate)
-> TermsContext<'a, 'tcx> {
let mut terms_cx = TermsContext {
tcx: tcx,
arena: arena,
inferred_map: NodeMap(),
inferred_infos: Vec::new(),
lang_items: lang_items(tcx),
// cache and share the variance struct used for items with
// no type/region parameters
empty_variances: Rc::new(ty::ItemVariances {
types: VecPerParamSpace::empty(),
regions: VecPerParamSpace::empty()
})
};
visit::walk_crate(&mut terms_cx, krate);
terms_cx
}
fn lang_items(tcx: &ty::ctxt) -> Vec<(ast::NodeId,Vec<ty::Variance>)> {
let all = vec![
(tcx.lang_items.phantom_data(), vec![ty::Covariant]),
(tcx.lang_items.unsafe_cell_type(), vec![ty::Invariant]),
// Deprecated:
(tcx.lang_items.covariant_type(), vec![ty::Covariant]),
(tcx.lang_items.contravariant_type(), vec![ty::Contravariant]),
(tcx.lang_items.invariant_type(), vec![ty::Invariant]),
(tcx.lang_items.covariant_lifetime(), vec![ty::Covariant]),
(tcx.lang_items.contravariant_lifetime(), vec![ty::Contravariant]),
(tcx.lang_items.invariant_lifetime(), vec![ty::Invariant]),
];
all.into_iter()
.filter(|&(ref d,_)| d.is_some())
.filter(|&(ref d,_)| d.as_ref().unwrap().krate == ast::LOCAL_CRATE)
.map(|(d, v)| (d.unwrap().node, v))
.collect()
}
impl<'a, 'tcx> TermsContext<'a, 'tcx> {
fn add_inferreds_for_item(&mut self,
item_id: ast::NodeId,
has_self: bool,
generics: &ast::Generics)
{
/*!
* Add "inferreds" for the generic parameters declared on this
* item. This has a lot of annoying parameters because we are
* trying to drive this from the AST, rather than the
* ty::Generics, so that we can get span info -- but this
* means we must accommodate syntactic distinctions.
*/
// NB: In the code below for writing the results back into the
// tcx, we rely on the fact that all inferreds for a particular
// item are assigned continuous indices.
let inferreds_on_entry = self.num_inferred();
if has_self {
self.add_inferred(item_id, TypeParam, SelfSpace, 0, item_id);
}
for (i, p) in generics.lifetimes.iter().enumerate() {
let id = p.lifetime.id;
self.add_inferred(item_id, RegionParam, TypeSpace, i, id);
}
for (i, p) in generics.ty_params.iter().enumerate() {
self.add_inferred(item_id, TypeParam, TypeSpace, i, p.id);
}
// If this item has no type or lifetime parameters,
// then there are no variances to infer, so just
// insert an empty entry into the variance map.
// Arguably we could just leave the map empty in this
// case but it seems cleaner to be able to distinguish
// "invalid item id" from "item id with no
// parameters".
if self.num_inferred() == inferreds_on_entry {
let newly_added =
self.tcx.item_variance_map.borrow_mut().insert(
ast_util::local_def(item_id),
self.empty_variances.clone()).is_none();
assert!(newly_added);
}
}
fn add_inferred(&mut self,
item_id: ast::NodeId,
kind: ParamKind,
space: ParamSpace,
index: usize,
param_id: ast::NodeId) {
let inf_index = InferredIndex(self.inferred_infos.len());
let term = self.arena.alloc(InferredTerm(inf_index));
let initial_variance = self.pick_initial_variance(item_id, space, index);
self.inferred_infos.push(InferredInfo { item_id: item_id,
kind: kind,
space: space,
index: index,
param_id: param_id,
term: term,
initial_variance: initial_variance });
let newly_added = self.inferred_map.insert(param_id, inf_index).is_none();
assert!(newly_added);
debug!("add_inferred(item_path={}, \
item_id={}, \
kind={:?}, \
space={:?}, \
index={}, \
param_id={}, \
inf_index={:?}, \
initial_variance={:?})",
self.tcx.item_path_str(ast_util::local_def(item_id)),
item_id, kind, space, index, param_id, inf_index,
initial_variance);
}
fn pick_initial_variance(&self,
item_id: ast::NodeId,
space: ParamSpace,
index: usize)
-> ty::Variance
{
match space {
SelfSpace | FnSpace => {
ty::Bivariant
}
TypeSpace => {
match self.lang_items.iter().find(|&&(n, _)| n == item_id) {
Some(&(_, ref variances)) => variances[index],
None => ty::Bivariant
}
}
}
}
fn num_inferred(&self) -> usize {
self.inferred_infos.len()
}
}
impl<'a, 'tcx, 'v> Visitor<'v> for TermsContext<'a, 'tcx> {
fn visit_item(&mut self, item: &ast::Item) {
debug!("add_inferreds for item {}", self.tcx.map.node_to_string(item.id));
match item.node {
ast::ItemEnum(_, ref generics) |
ast::ItemStruct(_, ref generics) => {
self.add_inferreds_for_item(item.id, false, generics);
}
ast::ItemTrait(_, ref generics, _, _) => {
// Note: all inputs for traits are ultimately
// constrained to be invariant. See `visit_item` in
// the impl for `ConstraintContext` below.
self.add_inferreds_for_item(item.id, true, generics);
visit::walk_item(self, item);
}
ast::ItemExternCrate(_) |
ast::ItemUse(_) |
ast::ItemDefaultImpl(..) |
ast::ItemImpl(..) |
ast::ItemStatic(..) |
ast::ItemConst(..) |
ast::ItemFn(..) |
ast::ItemMod(..) |
ast::ItemForeignMod(..) |
ast::ItemTy(..) |
ast::ItemMac(..) => {
visit::walk_item(self, item);
}
}
}
}
// Constraint construction and representation
//
// The second pass over the AST determines the set of constraints.
// We walk the set of items and, for each member, generate new constraints.
struct ConstraintContext<'a, 'tcx: 'a> {
terms_cx: TermsContext<'a, 'tcx>,
// These are pointers to common `ConstantTerm` instances
covariant: VarianceTermPtr<'a>,
contravariant: VarianceTermPtr<'a>,
invariant: VarianceTermPtr<'a>,
bivariant: VarianceTermPtr<'a>,
constraints: Vec<Constraint<'a>> ,
}
/// Declares that the variable `decl_id` appears in a location with
/// variance `variance`.
#[derive(Copy, Clone)]
struct Constraint<'a> {
inferred: InferredIndex,
variance: &'a VarianceTerm<'a>,
}
fn add_constraints_from_crate<'a, 'tcx>(terms_cx: TermsContext<'a, 'tcx>,
krate: &ast::Crate)
-> ConstraintContext<'a, 'tcx>
{
let covariant = terms_cx.arena.alloc(ConstantTerm(ty::Covariant));
let contravariant = terms_cx.arena.alloc(ConstantTerm(ty::Contravariant));
let invariant = terms_cx.arena.alloc(ConstantTerm(ty::Invariant));
let bivariant = terms_cx.arena.alloc(ConstantTerm(ty::Bivariant));
let mut constraint_cx = ConstraintContext {
terms_cx: terms_cx,
covariant: covariant,
contravariant: contravariant,
invariant: invariant,
bivariant: bivariant,
constraints: Vec::new(),
};
visit::walk_crate(&mut constraint_cx, krate);
constraint_cx
}
impl<'a, 'tcx, 'v> Visitor<'v> for ConstraintContext<'a, 'tcx> {
fn visit_item(&mut self, item: &ast::Item) {
let did = ast_util::local_def(item.id);
let tcx = self.terms_cx.tcx;
debug!("visit_item item={}", tcx.map.node_to_string(item.id));
match item.node {
ast::ItemEnum(ref enum_definition, _) => {
let scheme = tcx.lookup_item_type(did);
// Not entirely obvious: constraints on structs/enums do not
// affect the variance of their type parameters. See discussion
// in comment at top of module.
//
// self.add_constraints_from_generics(&scheme.generics);
// Hack: If we directly call `ty::enum_variants`, it
// annoyingly takes it upon itself to run off and
// evaluate the discriminants eagerly (*grumpy* that's
// not the typical pattern). This results in double
// error messages because typeck goes off and does
// this at a later time. All we really care about is
// the types of the variant arguments, so we just call
// `ty::VariantInfo::from_ast_variant()` ourselves
// here, mainly so as to mask the differences between
// struct-like enums and so forth.
for ast_variant in &enum_definition.variants {
let variant =
ty::VariantInfo::from_ast_variant(tcx,
&**ast_variant,
/*discriminant*/ 0);
for arg_ty in &variant.args {
self.add_constraints_from_ty(&scheme.generics, *arg_ty, self.covariant);
}
}
}
ast::ItemStruct(..) => {
let scheme = tcx.lookup_item_type(did);
// Not entirely obvious: constraints on structs/enums do not
// affect the variance of their type parameters. See discussion
// in comment at top of module.
//
// self.add_constraints_from_generics(&scheme.generics);
let struct_fields = tcx.lookup_struct_fields(did);
for field_info in &struct_fields {
assert_eq!(field_info.id.krate, ast::LOCAL_CRATE);
let field_ty = tcx.node_id_to_type(field_info.id.node);
self.add_constraints_from_ty(&scheme.generics, field_ty, self.covariant);
}
}
ast::ItemTrait(..) => {
let trait_def = tcx.lookup_trait_def(did);
self.add_constraints_from_trait_ref(&trait_def.generics,
trait_def.trait_ref,
self.invariant);
}
ast::ItemExternCrate(_) |
ast::ItemUse(_) |
ast::ItemStatic(..) |
ast::ItemConst(..) |
ast::ItemFn(..) |
ast::ItemMod(..) |
ast::ItemForeignMod(..) |
ast::ItemTy(..) |
ast::ItemImpl(..) |
ast::ItemDefaultImpl(..) |
ast::ItemMac(..) => {
}
}
visit::walk_item(self, item);
}
}
/// Is `param_id` a lifetime according to `map`?
fn is_lifetime(map: &ast_map::Map, param_id: ast::NodeId) -> bool {
match map.find(param_id) {
Some(ast_map::NodeLifetime(..)) => true, _ => false
}
}
impl<'a, 'tcx> ConstraintContext<'a, 'tcx> {
fn tcx(&self) -> &'a ty::ctxt<'tcx> {
self.terms_cx.tcx
}
fn inferred_index(&self, param_id: ast::NodeId) -> InferredIndex {
match self.terms_cx.inferred_map.get(¶m_id) {
Some(&index) => index,
None => {
self.tcx().sess.bug(&format!(
"no inferred index entry for {}",
self.tcx().map.node_to_string(param_id)));
}
}
}
fn find_binding_for_lifetime(&self, param_id: ast::NodeId) -> ast::NodeId {
let tcx = self.terms_cx.tcx;
assert!(is_lifetime(&tcx.map, param_id));
match tcx.named_region_map.get(¶m_id) {
Some(&rl::DefEarlyBoundRegion(_, _, lifetime_decl_id))
=> lifetime_decl_id,
Some(_) => panic!("should not encounter non early-bound cases"),
// The lookup should only fail when `param_id` is
// itself a lifetime binding: use it as the decl_id.
None => param_id,
}
}
/// Is `param_id` a type parameter for which we infer variance?
fn is_to_be_inferred(&self, param_id: ast::NodeId) -> bool {
let result = self.terms_cx.inferred_map.contains_key(¶m_id);
// To safe-guard against invalid inferred_map constructions,
// double-check if variance is inferred at some use of a type
// parameter (by inspecting parent of its binding declaration
// to see if it is introduced by a type or by a fn/impl).
let check_result = |this:&ConstraintContext| -> bool {
let tcx = this.terms_cx.tcx;
let decl_id = this.find_binding_for_lifetime(param_id);
// Currently only called on lifetimes; double-checking that.
assert!(is_lifetime(&tcx.map, param_id));
let parent_id = tcx.map.get_parent(decl_id);
let parent = tcx.map.find(parent_id).unwrap_or_else(
|| panic!("tcx.map missing entry for id: {}", parent_id));
let is_inferred;
macro_rules! cannot_happen { () => { {
panic!("invalid parent: {} for {}",
tcx.map.node_to_string(parent_id),
tcx.map.node_to_string(param_id));
} } }
match parent {
ast_map::NodeItem(p) => {
match p.node {
ast::ItemTy(..) |
ast::ItemEnum(..) |
ast::ItemStruct(..) |
ast::ItemTrait(..) => is_inferred = true,
ast::ItemFn(..) => is_inferred = false,
_ => cannot_happen!(),
}
}
ast_map::NodeTraitItem(..) => is_inferred = false,
ast_map::NodeImplItem(..) => is_inferred = false,
_ => cannot_happen!(),
}
return is_inferred;
};
assert_eq!(result, check_result(self));
return result;
}
/// Returns a variance term representing the declared variance of the type/region parameter
/// with the given id.
fn declared_variance(&self,
param_def_id: ast::DefId,
item_def_id: ast::DefId,
kind: ParamKind,
space: ParamSpace,
index: usize)
-> VarianceTermPtr<'a> {
assert_eq!(param_def_id.krate, item_def_id.krate);
if param_def_id.krate == ast::LOCAL_CRATE {
// Parameter on an item defined within current crate:
// variance not yet inferred, so return a symbolic
// variance.
let InferredIndex(index) = self.inferred_index(param_def_id.node);
self.terms_cx.inferred_infos[index].term
} else {
// Parameter on an item defined within another crate:
// variance already inferred, just look it up.
let variances = self.tcx().item_variances(item_def_id);
let variance = match kind {
TypeParam => *variances.types.get(space, index),
RegionParam => *variances.regions.get(space, index),
};
self.constant_term(variance)
}
}
fn add_constraint(&mut self,
InferredIndex(index): InferredIndex,
variance: VarianceTermPtr<'a>) {
debug!("add_constraint(index={}, variance={:?})",
index, variance);
self.constraints.push(Constraint { inferred: InferredIndex(index),
variance: variance });
}
fn contravariant(&mut self,
variance: VarianceTermPtr<'a>)
-> VarianceTermPtr<'a> {
self.xform(variance, self.contravariant)
}
fn invariant(&mut self,
variance: VarianceTermPtr<'a>)
-> VarianceTermPtr<'a> {
self.xform(variance, self.invariant)
}
fn constant_term(&self, v: ty::Variance) -> VarianceTermPtr<'a> {
match v {
ty::Covariant => self.covariant,
ty::Invariant => self.invariant,
ty::Contravariant => self.contravariant,
ty::Bivariant => self.bivariant,
}
}
fn xform(&mut self,
v1: VarianceTermPtr<'a>,
v2: VarianceTermPtr<'a>)
-> VarianceTermPtr<'a> {
match (*v1, *v2) {
(_, ConstantTerm(ty::Covariant)) => {
// Applying a "covariant" transform is always a no-op
v1
}
(ConstantTerm(c1), ConstantTerm(c2)) => {
self.constant_term(c1.xform(c2))
}
_ => {
&*self.terms_cx.arena.alloc(TransformTerm(v1, v2))
}
}
}
fn add_constraints_from_trait_ref(&mut self,
generics: &ty::Generics<'tcx>,
trait_ref: ty::TraitRef<'tcx>,
variance: VarianceTermPtr<'a>) {
debug!("add_constraints_from_trait_ref: trait_ref={:?} variance={:?}",
trait_ref,
variance);
let trait_def = self.tcx().lookup_trait_def(trait_ref.def_id);
self.add_constraints_from_substs(
generics,
trait_ref.def_id,
trait_def.generics.types.as_slice(),
trait_def.generics.regions.as_slice(),
trait_ref.substs,
variance);
}
/// Adds constraints appropriate for an instance of `ty` appearing
/// in a context with the generics defined in `generics` and
/// ambient variance `variance`
fn add_constraints_from_ty(&mut self,
generics: &ty::Generics<'tcx>,
ty: Ty<'tcx>,
variance: VarianceTermPtr<'a>) {
debug!("add_constraints_from_ty(ty={:?}, variance={:?})",
ty,
variance);
match ty.sty {
ty::TyBool |
ty::TyChar | ty::TyInt(_) | ty::TyUint(_) |
ty::TyFloat(_) | ty::TyStr => {
/* leaf type -- noop */
}
ty::TyClosure(..) => {
self.tcx().sess.bug("Unexpected closure type in variance computation");
}
ty::TyRef(region, ref mt) => {
let contra = self.contravariant(variance);
self.add_constraints_from_region(generics, *region, contra);
self.add_constraints_from_mt(generics, mt, variance);
}
ty::TyBox(typ) | ty::TyArray(typ, _) | ty::TySlice(typ) => {
self.add_constraints_from_ty(generics, typ, variance);
}
ty::TyRawPtr(ref mt) => {
self.add_constraints_from_mt(generics, mt, variance);
}
ty::TyTuple(ref subtys) => {
for &subty in subtys {
self.add_constraints_from_ty(generics, subty, variance);
}
}
ty::TyEnum(def_id, substs) |
ty::TyStruct(def_id, substs) => {
let item_type = self.tcx().lookup_item_type(def_id);
// All type parameters on enums and structs should be
// in the TypeSpace.
assert!(item_type.generics.types.is_empty_in(subst::SelfSpace));
assert!(item_type.generics.types.is_empty_in(subst::FnSpace));
assert!(item_type.generics.regions.is_empty_in(subst::SelfSpace));
assert!(item_type.generics.regions.is_empty_in(subst::FnSpace));
self.add_constraints_from_substs(
generics,
def_id,
item_type.generics.types.get_slice(subst::TypeSpace),
item_type.generics.regions.get_slice(subst::TypeSpace),
substs,
variance);
}
ty::TyProjection(ref data) => {
let trait_ref = &data.trait_ref;
let trait_def = self.tcx().lookup_trait_def(trait_ref.def_id);
self.add_constraints_from_substs(
generics,
trait_ref.def_id,
trait_def.generics.types.as_slice(),
trait_def.generics.regions.as_slice(),
trait_ref.substs,
variance);
}
ty::TyTrait(ref data) => {
let poly_trait_ref =
data.principal_trait_ref_with_self_ty(self.tcx(),
self.tcx().types.err);
// The type `Foo<T+'a>` is contravariant w/r/t `'a`:
let contra = self.contravariant(variance);
self.add_constraints_from_region(generics, data.bounds.region_bound, contra);
// Ignore the SelfSpace, it is erased.
self.add_constraints_from_trait_ref(generics, poly_trait_ref.0, variance);
let projections = data.projection_bounds_with_self_ty(self.tcx(),
self.tcx().types.err);
for projection in &projections {
self.add_constraints_from_ty(generics, projection.0.ty, self.invariant);
}
}
ty::TyParam(ref data) => {
let def_id = generics.types.get(data.space, data.idx as usize).def_id;
assert_eq!(def_id.krate, ast::LOCAL_CRATE);
match self.terms_cx.inferred_map.get(&def_id.node) {
Some(&index) => {
self.add_constraint(index, variance);
}
None => {
// We do not infer variance for type parameters
// declared on methods. They will not be present
// in the inferred_map.
}
}
}
ty::TyBareFn(_, &ty::BareFnTy { ref sig, .. }) => {
self.add_constraints_from_sig(generics, sig, variance);
}
ty::TyError => {
// we encounter this when walking the trait references for object
// types, where we use TyError as the Self type
}
ty::TyInfer(..) => {
self.tcx().sess.bug(
&format!("unexpected type encountered in \
variance inference: {}", ty));
}
}
}
/// Adds constraints appropriate for a nominal type (enum, struct,
/// object, etc) appearing in a context with ambient variance `variance`
fn add_constraints_from_substs(&mut self,
generics: &ty::Generics<'tcx>,
def_id: ast::DefId,
type_param_defs: &[ty::TypeParameterDef<'tcx>],
region_param_defs: &[ty::RegionParameterDef],
substs: &subst::Substs<'tcx>,
variance: VarianceTermPtr<'a>) {
debug!("add_constraints_from_substs(def_id={:?}, substs={:?}, variance={:?})",
def_id,
substs,
variance);
for p in type_param_defs {
let variance_decl =
self.declared_variance(p.def_id, def_id, TypeParam,
p.space, p.index as usize);
let variance_i = self.xform(variance, variance_decl);
let substs_ty = *substs.types.get(p.space, p.index as usize);
debug!("add_constraints_from_substs: variance_decl={:?} variance_i={:?}",
variance_decl, variance_i);
self.add_constraints_from_ty(generics, substs_ty, variance_i);
}
for p in region_param_defs {
let variance_decl =
self.declared_variance(p.def_id, def_id,
RegionParam, p.space, p.index as usize);
let variance_i = self.xform(variance, variance_decl);
let substs_r = *substs.regions().get(p.space, p.index as usize);
self.add_constraints_from_region(generics, substs_r, variance_i);
}
}
/// Adds constraints appropriate for a function with signature
/// `sig` appearing in a context with ambient variance `variance`
fn add_constraints_from_sig(&mut self,
generics: &ty::Generics<'tcx>,
sig: &ty::PolyFnSig<'tcx>,
variance: VarianceTermPtr<'a>) {
let contra = self.contravariant(variance);
for &input in &sig.0.inputs {
self.add_constraints_from_ty(generics, input, contra);
}
if let ty::FnConverging(result_type) = sig.0.output {
self.add_constraints_from_ty(generics, result_type, variance);
}
}
/// Adds constraints appropriate for a region appearing in a
/// context with ambient variance `variance`
fn add_constraints_from_region(&mut self,
_generics: &ty::Generics<'tcx>,
region: ty::Region,
variance: VarianceTermPtr<'a>) {
match region {
ty::ReEarlyBound(ref data) => {
if self.is_to_be_inferred(data.param_id) {
let index = self.inferred_index(data.param_id);
self.add_constraint(index, variance);
}
}
ty::ReStatic => { }
ty::ReLateBound(..) => {
// We do not infer variance for region parameters on
// methods or in fn types.
}
ty::ReFree(..) | ty::ReScope(..) | ty::ReInfer(..) |
ty::ReEmpty => {
// We don't expect to see anything but 'static or bound
// regions when visiting member types or method types.
self.tcx()
.sess
.bug(&format!("unexpected region encountered in variance \
inference: {:?}",
region));
}
}
}
/// Adds constraints appropriate for a mutability-type pair
/// appearing in a context with ambient variance `variance`
fn add_constraints_from_mt(&mut self,
generics: &ty::Generics<'tcx>,
mt: &ty::mt<'tcx>,
variance: VarianceTermPtr<'a>) {
match mt.mutbl {
ast::MutMutable => {
let invar = self.invariant(variance);
self.add_constraints_from_ty(generics, mt.ty, invar);
}
ast::MutImmutable => {
self.add_constraints_from_ty(generics, mt.ty, variance);
}
}
}
}
// Constraint solving
//
// The final phase iterates over the constraints, refining the variance
// for each inferred until a fixed point is reached. This will be the
// optimal solution to the constraints. The final variance for each
// inferred is then written into the `variance_map` in the tcx.
struct SolveContext<'a, 'tcx: 'a> {
terms_cx: TermsContext<'a, 'tcx>,
constraints: Vec<Constraint<'a>> ,
// Maps from an InferredIndex to the inferred value for that variable.
solutions: Vec<ty::Variance> }
fn solve_constraints(constraints_cx: ConstraintContext) {
let ConstraintContext { terms_cx, constraints, .. } = constraints_cx;
let solutions =
terms_cx.inferred_infos.iter()
.map(|ii| ii.initial_variance)
.collect();
let mut solutions_cx = SolveContext {
terms_cx: terms_cx,
constraints: constraints,
solutions: solutions
};
solutions_cx.solve();
solutions_cx.write();
}
impl<'a, 'tcx> SolveContext<'a, 'tcx> {
fn solve(&mut self) {
// Propagate constraints until a fixed point is reached. Note
// that the maximum number of iterations is 2C where C is the
// number of constraints (each variable can change values at most
// twice). Since number of constraints is linear in size of the
// input, so is the inference process.
let mut changed = true;
while changed {
changed = false;
for constraint in &self.constraints {
let Constraint { inferred, variance: term } = *constraint;
let InferredIndex(inferred) = inferred;
let variance = self.evaluate(term);
let old_value = self.solutions[inferred];
let new_value = glb(variance, old_value);
if old_value != new_value {
debug!("Updating inferred {} (node {}) \
from {:?} to {:?} due to {:?}",
inferred,
self.terms_cx
.inferred_infos[inferred]
.param_id,
old_value,
new_value,
term);
self.solutions[inferred] = new_value;
changed = true;
}
}
}
}
fn write(&self) {
// Collect all the variances for a particular item and stick
// them into the variance map. We rely on the fact that we
// generate all the inferreds for a particular item
// consecutively (that is, we collect solutions for an item
// until we see a new item id, and we assume (1) the solutions
// are in the same order as the type parameters were declared
// and (2) all solutions or a given item appear before a new
// item id).
let tcx = self.terms_cx.tcx;
let solutions = &self.solutions;
let inferred_infos = &self.terms_cx.inferred_infos;
let mut index = 0;
let num_inferred = self.terms_cx.num_inferred();
while index < num_inferred {
let item_id = inferred_infos[index].item_id;
let mut types = VecPerParamSpace::empty();
let mut regions = VecPerParamSpace::empty();
while index < num_inferred && inferred_infos[index].item_id == item_id {
let info = &inferred_infos[index];
let variance = solutions[index];
debug!("Index {} Info {} / {:?} / {:?} Variance {:?}",
index, info.index, info.kind, info.space, variance);
match info.kind {
TypeParam => { types.push(info.space, variance); }
RegionParam => { regions.push(info.space, variance); }
}
index += 1;
}
let item_variances = ty::ItemVariances {
types: types,
regions: regions
};
debug!("item_id={} item_variances={:?}",
item_id,
item_variances);
let item_def_id = ast_util::local_def(item_id);
// For unit testing: check for a special "rustc_variance"
// attribute and report an error with various results if found.
if tcx.has_attr(item_def_id, "rustc_variance") {
span_err!(tcx.sess, tcx.map.span(item_id), E0208, "{:?}", item_variances);
}
let newly_added = tcx.item_variance_map.borrow_mut()
.insert(item_def_id, Rc::new(item_variances)).is_none();
assert!(newly_added);
}
}
fn evaluate(&self, term: VarianceTermPtr<'a>) -> ty::Variance {
match *term {
ConstantTerm(v) => {
v
}
TransformTerm(t1, t2) => {
let v1 = self.evaluate(t1);
let v2 = self.evaluate(t2);
v1.xform(v2)
}
InferredTerm(InferredIndex(index)) => {
self.solutions[index]
}
}
}
}
// Miscellany transformations on variance
trait Xform {
fn xform(self, v: Self) -> Self;
}
impl Xform for ty::Variance {
fn xform(self, v: ty::Variance) -> ty::Variance {
// "Variance transformation", Figure 1 of The Paper
match (self, v) {
// Figure 1, column 1.
(ty::Covariant, ty::Covariant) => ty::Covariant,
(ty::Covariant, ty::Contravariant) => ty::Contravariant,
(ty::Covariant, ty::Invariant) => ty::Invariant,
(ty::Covariant, ty::Bivariant) => ty::Bivariant,
// Figure 1, column 2.
(ty::Contravariant, ty::Covariant) => ty::Contravariant,
(ty::Contravariant, ty::Contravariant) => ty::Covariant,
(ty::Contravariant, ty::Invariant) => ty::Invariant,
(ty::Contravariant, ty::Bivariant) => ty::Bivariant,
// Figure 1, column 3.
(ty::Invariant, _) => ty::Invariant,
// Figure 1, column 4.
(ty::Bivariant, _) => ty::Bivariant,
}
}
}
fn glb(v1: ty::Variance, v2: ty::Variance) -> ty::Variance {
// Greatest lower bound of the variance lattice as
// defined in The Paper:
//
// *
// - +
// o
match (v1, v2) {
(ty::Invariant, _) | (_, ty::Invariant) => ty::Invariant,
(ty::Covariant, ty::Contravariant) => ty::Invariant,
(ty::Contravariant, ty::Covariant) => ty::Invariant,
(ty::Covariant, ty::Covariant) => ty::Covariant,
(ty::Contravariant, ty::Contravariant) => ty::Contravariant,
(x, ty::Bivariant) | (ty::Bivariant, x) => x,
}
}<|fim▁end|> | //! associated types invariant, then projection is no longer a
//! function with a single result. Consider:
//! |
<|file_name|>parserx86.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python2
##
## We define Instrution as two types "Computing instruction" and "Control Transfer instruction"
## for computing instruction
## "NAME" : [ Operand_Number , [ Formula_that_modify_reg ], [ FLAG_reg_modified]]
## for control transfter instruciton
## "NAME" : [ Operand_Number , [ Formula_that_modify_reg ], [ DST_Addr_on_condition]]
##
from capstone import *
from expression import Exp
from semantic import Semantic
from copy import deepcopy
class X86:
FLAG = ["CF", "PF", "AF", "ZF", "SF", "TF", "IF", "DF", "OF"]
regs64 = ["rax", "rbx", "rcx", "rdx", "rsi", "rdi", "rbp", "rsp", "r8", "r9", "r10", "r11", "r12",
"r13", "r14", "r15", "cs", "ds", "es", "fs", "gs", "ss"]
regs32 = ["eax", "ebx", "ecx", "edx", "cs", "ds", "es", "fs", "gs", "ss", "esi", "edi", "ebp", "esp", "eip"]
Tregs64 = {
"eax" : ["rax $ 0 : 31", "rax = ( rax $ 32 : 63 ) # eax", 32],
"ax" : ["rax $ 0 : 15", "rax = ( rax $ 16 : 63 ) # ax", 16],
"ah" : ["rax $ 8 : 15", "rax = ( rax $ 16 : 63 ) # ah # ( rax $ 0 : 7 )", 8],
"al" : ["rax $ 0 : 7", "rax = ( rax $ 8 : 63 ) # al", 8],
"ebx" : ["rbx $ 0 : 31", "rbx = ( rbx $ 32 : 63 ) # ebx", 32],
"bx" : ["rbx $ 0 : 15", "rbx = ( rbx $ 16 : 63 ) # bx", 16],
"bh" : ["rbx $ 8 : 15", "rbx = ( rbx $ 16 : 63 ) # bh # ( rbx $ 0 : 7 )", 8],
"bl" : ["rbx $ 0 : 7", "rbx = ( rbx $ 8 : 63 ) # bl", 8],
"ecx" : ["rcx $ 0 : 31", "rcx = ( rcx $ 32 : 63 ) # ecx", 32],
"cx" : ["rcx $ 0 : 15", "rcx = ( rcx $ 16 : 63 ) # cx", 16],
"ch" : ["rcx $ 8 : 15", "rcx = ( rcx $ 16 : 63 ) # ch # ( rcx $ 0 : 7 )", 8],
"cl" : ["rcx $ 0 : 7", "rcx = ( rcx $ 8 : 63 ) # cl", 8],
"edx" : ["rdx $ 0 : 31", "rdx = ( rdx $ 32 : 63 ) # edx", 32],
"dx" : ["rdx $ 0 : 15", "rdx = ( rdx $ 16 : 63 ) # dx", 16],
"dh" : ["rdx $ 8 : 15", "rdx = ( rdx $ 16 : 63 ) # dh # ( rdx $ 0 : 7 )", 8],
"dl" : ["rdx $ 0 : 7", "rdx = ( rdx $ 8 : 63 ) # dl", 8],
}
Tregs32 = {
"ax" : ["eax $ 0 : 15", "eax = ( eax $ 16 : 31 ) # ax", 16],
"ah" : ["eax $ 8 : 15", "eax = ( eax $ 16 : 31 ) # ah # ( eax $ 0 : 7 )", 8],
"al" : ["eax $ 0 : 7", "eax = ( eax $ 8 : 31 ) # al", 8],
"bx" : ["ebx $ 0 : 15", "ebx = ( ebx $ 16 : 31 ) # bx", 16],
"bh" : ["ebx $ 8 : 15", "ebx = ( ebx $ 16 : 31 ) # bh # ( ebx $ 0 : 7 )", 8],
"bl" : ["ebx $ 0 : 7", "ebx = ( ebx $ 8 : 31 ) # bl", 8],
"cx" : ["ecx $ 0 : 15", "ecx = ( ecx $ 16 : 31 ) # cx", 16],
"ch" : ["ecx $ 8 : 15", "ecx = ( ecx $ 16 : 31 ) # ch # ( ecx $ 0 : 7 )", 8],
"cl" : ["ecx $ 0 : 7", "ecx = ( ecx $ 8 : 31 ) # cl", 8],
"dx" : ["edx $ 0 : 15", "edx = ( edx $ 16 : 31 ) # dx", 16],
"dh" : ["edx $ 8 : 15", "edx = ( edx $ 16 : 31 ) # dh # ( edx $ 0 : 7 )", 8],
"dl" : ["edx $ 0 : 7", "edx = ( edx $ 8 : 31 ) # dl", 8],
}
# Instructions that modifty the execution path
Control = ["ret", "iret", "int", "into", "enter", "leave", "call", "jmp", "ja", "jae", "jb", "jbe", "jc", "je","jnc", "jne", "jnp", "jp", "jg", "jge", "jl", "jle", "jno", "jns", "jo", "js"]
insn = {
# data transfer
"mov": [2, ["operand1 = operand2"], []],
"cmove": [2, ["operand1 = ( ZF == 1 ) ? operand2 : operand1"], []],
"cmovne": [2, ["operand1 = ( ZF == 0 ) ? operand2 : operand1"], []],
"cmova": [2, ["operand1 = ( ( ZF == 0 ) & ( CF == 0 ) ) ? operand2 : operand1"], []],
"cmovae": [2, ["operand1 = ( CF == 0 ) ? operand2 : operand1"], []],
"cmovb": [2, ["operand1 = ( CF == 1 ) ? operand2 : operand1"], []],
"cmovbe": [2, ["operand1 = ( ( ZF == 1 ) | ( CF == 1 ) ) ? operand2 : operand1"], []],
"cmovg": [2, ["operand1 = ( ( ZF == 0 ) & ( SF == OF ) ) ? operand2 : operand1"], []],
"cmovge": [2, ["operand1 = ( SF == OF ) ? operand2 : operand1"], []],
"cmovl": [2, ["operand1 = ( SF != OF ) ? operand2 : operand1"], []],
"cmovle": [2, ["operand1 = ( ( ZF == 1 ) & ( SF != OF ) ) ? operand2 : operand1"], []],
"cmovs": [2, ["operand1 = ( SF == 1 ) ? operand2 : operand1"], []],
"cmovp": [2, ["operand1 = ( PF == 1 ) ? operand2 : operand1"], []],
"push": [1, ["* ssp = operand1"], []],
"pop": [1, ["operand1 = * ssp"], []],
#"movsx": [2, ["operand1 = operand2 > 0 ? operand2 : operand2 & 0xffffffffffffffff"], []],
#"movzx": [2, ["operand1 = 0 & operand2"], []],
# flag control instuctions
"stc": [0, [], ["CF = 1"]],
"clc": [0, [], ["CF = 0"]],
"cmc": [0, [], ["CF = ~ CF"]],<|fim▁hole|> "std": [0, [], ["DF = 1"]],
"sti": [0, [], ["IF = 1"]],
"cli": [0, [], ["IF = 0"]],
# arithmetic
"xchg": [2, ["FIXME"], []],
"cmp": [2, ["temp = operand1 - operand2"], ["CF", "OF", "SF", "ZF", "AF", "PF"]],
"add": [2, ["operand1 = operand1 + operand2"], ["OF", "SF", "ZF", "AF", "CF", "PF"]],
"adc": [2, ["operand1 = operand1 + operand2 + CF"], ["OF", "SF", "ZF", "AF", "CF", "PF"]],
"sub": [2, ["operand1 = operand1 - operand2"], ["OF", "SF", "ZF", "AF", "CF", "PF"]],
"sbb": [2, ["operand1 = operand1 - operand2 - CF"], ["OF", "SF", "ZF", "AF", "CF", "PF"]],
"inc": [1, ["operand1 = operand1 + 1"], ["OF", "SF", "ZF", "AF", "PF"]],
"dec": [1, ["operand1 = operand1 - 1"], ["OF", "SF", "ZF", "AF", "PF"]],
"neg": [1, ["operand1 = - operand1"], ["CF", "OF", "SF", "ZF", "AF", "PF"]],
# control transfer
"ret": [1, [], ["* ssp"]],
"call": [1, [], ["* operand1"]],
"jmp": [1, [], ["* operand1"]],
"ja": [1, [], ["( ( CF == 0 ) & ( ZF == 0 ) ) ? * operand1 : 0"]],
"jae": [1, [], ["CF == 0 ? * operand1 : 0"]],
"jb": [1, [] , ["CF == 1 ? * operand1 : 0"]],
"jbe": [1, [] , ["( ( CF == 1 ) | ( ZF == 1 ) ) ? * operand1 : 0"]],
"jc": [1, [], ["CF == 1 ? * operand1 : 0"]],
"je": [1, [], ["ZF == 1 ? * operand1 : 0"]],
"jnc": [1, [], ["CF == 0 ? * operand1 : 0"]],
"jne": [1, [], ["ZF == 0 ? * operand1 : 0"]],
"jnp": [1, [], ["PF == 0 ? * operand1 : 0"]],
"jp": [1, [], ["PF == 1 ? * operand1 : 0"]],
"jg": [1, [], ["( ( ZF == 0 ) & ( SF == OF ) ) ? * operand1 : 0"]],
"jge": [1, [], ["SF == OF ? * operand1 : 0"]],
"jl": [1, [], ["SF != OF ? * operand1 : 0"]],
"jle": [1, [], ["( ( ZF == 1 ) | ( SF != OF ) ) ? * operand1 : 0"]],
"jno": [1, [], ["OF == 0 ? * operand1 : 0"]],
"jns": [1, [], ["SF == 0 ? * operand1 : 0"]],
"jo": [1, [], ["OF == 1 ? * operand1 : 0"]],
"js": [1, [], ["SF == 1 ? * operand1 : 0"]],
# logic
"and": [2, ["operand1 = operand1 & operand2"], ["CF = 0", "OF = 0", "SF", "ZF", "PF"]],
"or": [2, ["operand1 = operand1 | operand2"], ["CF = 0", "OF = 0", "SF", "ZF", "PF"]],
"xor": [2, ["operand1 = operand1 ^ operand2"], ["CF = 0","OF = 0", "SF", "ZF", "PF"]],
"not": [1, ["operand1 = ~ operand1"], []],
"test": [2, ["temp = operand1 & operand2"], ["OF = 0", "CF = 0", "SF", "ZF", "PF"]],
# segment
# others
"lea": [2, ["operand1 = & operand2"], []],
"nop": [0, [], []]
}
class ROPParserX86:
def __init__(self, gadgets, mode):
self.gadgets = gadgets
self.addrs = dict()
self.mode = mode
self.aligned = 0
self.memLoc = []
self.writeMem = {}
if mode == CS_MODE_32:
self.regs = X86.regs32 + X86.FLAG
self.Tregs = X86.Tregs32
self.aligned = 4
self.default = 32
self.sp = "esp"
self.ip = "eip"
else:
self.regs = X86.regs64 + X86.FLAG
self.Tregs = X86.Tregs64
self.aligned = 8
self.default = 64
self.sp = "rsp"
self.ip = "rip"
for k, v in X86.insn.items():
for i, s in enumerate(v[1]):
v[1][i] = s.replace("ssp", self.sp)
for i, s in enumerate(v[2]):
v[2][i] = s.replace("ssp", self.sp)
X86.insn.update({k:v})
def parse(self):
formulas = []
for gadget in self.gadgets:
self.memLoc = []
self.writeMem = {}
regs = {self.sp : Exp(self.sp)}
regs = self.parseInst(regs, gadget["insns"], 0)
if len(regs) == 0:
# gadget cannot parsed
continue
formulas.append(Semantic(regs, gadget["vaddr"], self.memLoc, self.writeMem))
self.addrs.update({hex(gadget["vaddr"]).replace("L",""):gadget["insns"]})
print "================================="
print "Unique gadgets parsed ", len(formulas)
return formulas
def parseInst(self, regs, insts, i):
if i >= len(insts):
return regs
prefix = insts[i]["mnemonic"]
op_str = insts[i]["op_str"].replace("*", " * ")
if prefix not in X86.insn.keys():
# unsupported ins
return {}
ins = X86.insn.get(prefix)
if prefix in X86.Control:
# control transfer ins, end of gadget
if prefix in ["ret", "call"]:
operand1 = None
operand1 = Exp.parseOperand(op_str.split(", ")[0], regs, self.Tregs)
dst = Exp.parseExp(ins[2][0].split())
if operand1 is None:
dst = dst.binding({"operand1":Exp.ExpL(Exp.defaultLength,0)})
else:
dst = dst.binding({"operand1":operand1})
dst = dst.binding(regs)
regs.update({self.ip : dst})
# only ret inst modifies stackpointer
if prefix == "ret":
ssp = regs[self.sp]
ssp = Exp(ssp, "+", Exp(self.aligned))
if operand1 is not None:
ssp = Exp(ssp, "+", operand1)
regs.update({ self.sp :ssp})
return regs
# handle jmp
operand1 = Exp.parseOperand(op_str.split(" ")[0], regs, self.Tregs)
dst = Exp.parseExp(ins[2][0].split())
dst = dst.binding({"operand1":operand1})
dst = dst.binding(regs)
regs.update({self.ip : dst})
return regs
else:
# computing ins
operand1 = None
operand2 = None
operands = {self.sp :regs[self.sp]}
for flag in X86.FLAG:
if flag in regs.keys():
operands.update({flag:regs[flag]})
# handle special cases
if ins[0] == 1:
operand1 = Exp.parseOperand(op_str.split(", ")[0], regs, self.Tregs)
if operand1 is None:
return []
operands.update({"operand1":operand1})
elif ins[0] == 2:
operand1 = Exp.parseOperand(op_str.split(", ")[0], regs, self.Tregs)
operand2 = Exp.parseOperand(op_str.split(", ")[1], regs, self.Tregs)
if operand1 is None or operand2 is None:
return []
operands.update({"operand1":operand1})
operands.update({"operand2":operand2})
if prefix != "lea" and "ptr" in op_str and (operand1.getCategory() == 3 or operand2.getCategory() == 3):
if prefix not in ["cmp", "test", "push"] and "ptr" in op_str.split(", ")[0]:
self.memLoc.append(operand1)
self.writeMem.update({str(operand1):operand1})
else:
self.memLoc.append(operand1 if operand1.getCategory() == 3 else operand2)
# contruct insn operation
if len(ins[1]) > 0:
if prefix == "lea":
reg = op_str.split(", ")[0]
addr = Exp.parseExp(op_str.split("[")[1][:-1].split())
addr = addr.binding(regs)
addr.length = Exp.defaultLength
regs.update({reg:addr})
return self.parseInst(regs, insts, i+1)
if prefix == "xchg":
op1k = op_str.split(", ")[0]
op2k = op_str.split(", ")[1]
op1v = None
op2v = None
if op2k in self.Tregs:
# subpart of register
temp = Exp.parse(self.Tregs[op2k][1], {op2k:operands["operand1"]})
for k, v in temp.items():
v.length = Exp.defaultLength
op2k = k
op2v = v
elif op2k in self.regs:
# register
operands["operand1"].length = Exp.defaultLength
op2v = operands["operand1"]
else:
# mem
op2k = str(operands["operand2"])
op2v = operands["operand1"]
if op1k in self.Tregs:
temp = Exp.parse(self.Tregs[op1k][1], {op1k:operands["operand2"]})
for k, v in temp.items():
v.length = Exp.defaultLength
op1k = k
op1v = v
elif op1k in self.regs:
operands["operand2"].length = Exp.defaultLength
op1v = operands["operand2"]
else:
op1k = str(operands["operand1"])
op1v = operands["operand2"]
regs.update({op1k:op1v})
regs.update({op2k:op2v})
return self.parseInst(regs, insts, i+1)
exps = Exp.parse(ins[1][0], operands)
for reg, val in exps.items():
# handle special case of xor, op1 == op2 clear the register
if prefix == "xor" and op_str.split(", ")[0] == op_str.split(", ")[1]:
val = Exp.ExpL(val.length, 0)
# temp variable, no need to assign
if reg == "temp":
val.length = max(operand1.length, operand2.length)
continue
if "*" in reg:
# this can only be push inst
val.length = Exp.defaultLength
regs.update({"[ " + str(regs[self.sp]) + " ]":val})
continue
dst = Exp.parseOperand(op_str.split(", ")[0], {}, {})
if str(dst) in self.regs:
# general purpose reg
val.length = Exp.defaultLength
regs.update({str(dst):val})
elif str(dst) in self.Tregs:
# subpart of GPRs
temp = Exp.parse(self.Tregs[str(dst)][1], {})
for k, v in temp.items():
v = v.binding(regs)
v = v.binding({str(dst):val})
v.length = Exp.defaultLength
regs.update({k:v})
else:
# mem
regs.update({str(operands["operand1"]):val})
if prefix == "push":
regs.update({self.sp :Exp(regs[self.sp], "+", Exp(self.aligned))})
if prefix == "pop":
regs.update({self.sp :Exp(regs[self.sp], "-", Exp(self.aligned))})
# evaluate flag regs base on exp
if len(ins[2]) != 0:
for flag in ins[2]:
tokens = flag.split()
if len(tokens) == 1:
for k, v in exps.items():
exp = Exp(v, tokens[0][:-1])
exp.length = 1
regs.update({tokens[0]:exp})
else:
f = Exp.parse(flag, {})
for k,v in f.items():
# "CF = 1"
v.length = 1
regs.update({tokens[0]:v})
return self.parseInst(regs, insts, i+1)
if __name__ == '__main__':
binarys = [b"\x8d\x4c\x32\x08\x01\xd8\x81\xc6\x34\x12\x00\x00\xc3",
b"\xbb\x01\x00\x00\x00\x29\xd8\x83\xf8\x01\x0f\x84\x0f\xf9\x01\x00\x5a\xc3"]
gadgets = []
md = Cs(CS_ARCH_X86, CS_MODE_32)
md.detail = True
for binary in binarys:
gadget = []
for decode in md.disasm(binary, 0x1000):
inst = {}
inst.update({"mnemonic": decode.mnemonic})
inst.update({"op_str": decode.op_str})
inst.update({"vaddr": decode.address})
gadget.append(inst)
gadgets.append(gadget)
p = ROPParserX86(gadgets, CS_MODE_32)
formulas = p.parse()<|fim▁end|> | "cld": [0, [], ["DF = 0"]], |
<|file_name|>strategy.go<|end_file_name|><|fim▁begin|>/*
Copyright 2014 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/<|fim▁hole|>
import (
"fmt"
"strconv"
"strings"
apiequality "k8s.io/apimachinery/pkg/api/equality"
"k8s.io/apimachinery/pkg/fields"
"k8s.io/apimachinery/pkg/labels"
"k8s.io/apimachinery/pkg/runtime"
"k8s.io/apimachinery/pkg/util/validation/field"
genericapirequest "k8s.io/apiserver/pkg/endpoints/request"
"k8s.io/apiserver/pkg/registry/generic"
"k8s.io/apiserver/pkg/registry/rest"
apistorage "k8s.io/apiserver/pkg/storage"
"k8s.io/apiserver/pkg/storage/names"
"k8s.io/kubernetes/pkg/api"
"k8s.io/kubernetes/pkg/api/helper"
"k8s.io/kubernetes/pkg/api/validation"
)
// rcStrategy implements verification logic for Replication Controllers.
type rcStrategy struct {
runtime.ObjectTyper
names.NameGenerator
}
// Strategy is the default logic that applies when creating and updating Replication Controller objects.
var Strategy = rcStrategy{api.Scheme, names.SimpleNameGenerator}
// DefaultGarbageCollectionPolicy returns Orphan because that was the default
// behavior before the server-side garbage collection was implemented.
func (rcStrategy) DefaultGarbageCollectionPolicy() rest.GarbageCollectionPolicy {
return rest.OrphanDependents
}
// NamespaceScoped returns true because all Replication Controllers need to be within a namespace.
func (rcStrategy) NamespaceScoped() bool {
return true
}
// PrepareForCreate clears the status of a replication controller before creation.
func (rcStrategy) PrepareForCreate(ctx genericapirequest.Context, obj runtime.Object) {
controller := obj.(*api.ReplicationController)
controller.Status = api.ReplicationControllerStatus{}
controller.Generation = 1
}
// PrepareForUpdate clears fields that are not allowed to be set by end users on update.
func (rcStrategy) PrepareForUpdate(ctx genericapirequest.Context, obj, old runtime.Object) {
newController := obj.(*api.ReplicationController)
oldController := old.(*api.ReplicationController)
// update is not allowed to set status
newController.Status = oldController.Status
// Any changes to the spec increment the generation number, any changes to the
// status should reflect the generation number of the corresponding object. We push
// the burden of managing the status onto the clients because we can't (in general)
// know here what version of spec the writer of the status has seen. It may seem like
// we can at first -- since obj contains spec -- but in the future we will probably make
// status its own object, and even if we don't, writes may be the result of a
// read-update-write loop, so the contents of spec may not actually be the spec that
// the controller has *seen*.
if !apiequality.Semantic.DeepEqual(oldController.Spec, newController.Spec) {
newController.Generation = oldController.Generation + 1
}
}
// Validate validates a new replication controller.
func (rcStrategy) Validate(ctx genericapirequest.Context, obj runtime.Object) field.ErrorList {
controller := obj.(*api.ReplicationController)
return validation.ValidateReplicationController(controller)
}
// Canonicalize normalizes the object after validation.
func (rcStrategy) Canonicalize(obj runtime.Object) {
}
// AllowCreateOnUpdate is false for replication controllers; this means a POST is
// needed to create one.
func (rcStrategy) AllowCreateOnUpdate() bool {
return false
}
// ValidateUpdate is the default update validation for an end user.
func (rcStrategy) ValidateUpdate(ctx genericapirequest.Context, obj, old runtime.Object) field.ErrorList {
oldRc := old.(*api.ReplicationController)
newRc := obj.(*api.ReplicationController)
validationErrorList := validation.ValidateReplicationController(newRc)
updateErrorList := validation.ValidateReplicationControllerUpdate(newRc, oldRc)
errs := append(validationErrorList, updateErrorList...)
for key, value := range helper.NonConvertibleFields(oldRc.Annotations) {
parts := strings.Split(key, "/")
if len(parts) != 2 {
continue
}
brokenField := parts[1]
switch {
case strings.Contains(brokenField, "selector"):
if !apiequality.Semantic.DeepEqual(oldRc.Spec.Selector, newRc.Spec.Selector) {
errs = append(errs, field.Invalid(field.NewPath("spec").Child("selector"), newRc.Spec.Selector, "cannot update non-convertible selector"))
}
default:
errs = append(errs, &field.Error{Type: field.ErrorTypeNotFound, BadValue: value, Field: brokenField, Detail: "unknown non-convertible field"})
}
}
return errs
}
func (rcStrategy) AllowUnconditionalUpdate() bool {
return true
}
// ControllerToSelectableFields returns a field set that represents the object.
func ControllerToSelectableFields(controller *api.ReplicationController) fields.Set {
objectMetaFieldsSet := generic.ObjectMetaFieldsSet(&controller.ObjectMeta, true)
controllerSpecificFieldsSet := fields.Set{
"status.replicas": strconv.Itoa(int(controller.Status.Replicas)),
}
return generic.MergeFieldsSets(objectMetaFieldsSet, controllerSpecificFieldsSet)
}
// GetAttrs returns labels and fields of a given object for filtering purposes.
func GetAttrs(obj runtime.Object) (labels.Set, fields.Set, bool, error) {
rc, ok := obj.(*api.ReplicationController)
if !ok {
return nil, nil, false, fmt.Errorf("given object is not a replication controller.")
}
return labels.Set(rc.ObjectMeta.Labels), ControllerToSelectableFields(rc), rc.Initializers != nil, nil
}
// MatchController is the filter used by the generic etcd backend to route
// watch events from etcd to clients of the apiserver only interested in specific
// labels/fields.
func MatchController(label labels.Selector, field fields.Selector) apistorage.SelectionPredicate {
return apistorage.SelectionPredicate{
Label: label,
Field: field,
GetAttrs: GetAttrs,
}
}
type rcStatusStrategy struct {
rcStrategy
}
var StatusStrategy = rcStatusStrategy{Strategy}
func (rcStatusStrategy) PrepareForUpdate(ctx genericapirequest.Context, obj, old runtime.Object) {
newRc := obj.(*api.ReplicationController)
oldRc := old.(*api.ReplicationController)
// update is not allowed to set spec
newRc.Spec = oldRc.Spec
}
func (rcStatusStrategy) ValidateUpdate(ctx genericapirequest.Context, obj, old runtime.Object) field.ErrorList {
return validation.ValidateReplicationControllerStatusUpdate(obj.(*api.ReplicationController), old.(*api.ReplicationController))
}<|fim▁end|> |
// If you make changes to this file, you should also make the corresponding change in ReplicaSet.
package replicationcontroller |
<|file_name|>fuzzylogic.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Licensed under a 3-clause BSD style license - see LICENSE.rst<|fim▁hole|>"""
Quality Control based on fuzzy logic.
"""
import logging
import numpy as np
from .core import QCCheckVar
from .gradient import gradient
from .spike import spike
from .woa_normbias import woa_normbias
from cotede.fuzzy import fuzzy_uncertainty
module_logger = logging.getLogger(__name__)
def fuzzylogic(features, cfg, require="all"):
"""
FIXME: Think about, should I return 0, or have an assert, and at qc.py
all qc tests are applied with a try, and in case it fails it flag
0s.
"""
require = cfg.get("require", require)
if (require == "all") and not np.all([f in features for f in cfg["features"]]):
module_logger.warning(
"Not all features (%s) required by fuzzy logic are available".format(
cfg["features"].keys()
)
)
raise KeyError
uncertainty = fuzzy_uncertainty(
data=features, features=cfg["features"], output=cfg["output"], require=require
)
return uncertainty
class FuzzyLogic(QCCheckVar):
def set_features(self):
self.features = {}
for v in [f for f in self.cfg["features"] if f not in self.features]:
if v == "woa_bias":
woa_comparison = woa_normbias(self.data, self.varname, self.attrs)
self.features[v] = woa_comparison["woa_bias"]
elif v == "woa_normbias":
woa_comparison = woa_normbias(self.data, self.varname, self.attrs)
self.features[v] = woa_comparison["woa_normbias"]
elif v == "spike":
self.features[v] = spike(self.data[self.varname])
elif v == "gradient":
self.features[v] = gradient(self.data[self.varname])
self.features["fuzzylogic"] = fuzzylogic(self.features, self.cfg)
def test(self):
self.flags = {}
cfg = self.cfg
flag = np.zeros(np.shape(self.data[self.varname]), dtype="i1")
uncertainty = self.features["fuzzylogic"]
# FIXME: As it is now, it will have no zero flag value. Think about cases
# where some values in a profile would not be estimated, hence flag=0
# I needed to use np.nonzeros because now uncertainty is a masked array,
# to accept when a feature is masked.
flag[np.nonzero(uncertainty <= 0.29)] = 1
flag[np.nonzero((uncertainty > 0.29) & (uncertainty <= 0.34))] = 2
flag[np.nonzero((uncertainty > 0.34) & (uncertainty <= 0.72))] = 3
flag[np.nonzero(uncertainty > 0.72)] = 4
self.flags["fuzzylogic"] = flag<|fim▁end|> | |
<|file_name|>assignment5.py<|end_file_name|><|fim▁begin|>import pandas as pd
import numpy as np
from sklearn.model_selection import train_test_split
from sklearn import tree
from subprocess import call
# https://archive.ics.uci.edu/ml/machine-learning-databases/mushroom/agaricus-lepiota.names
#
# TODO: Load up the mushroom dataset into dataframe 'X'
# Verify you did it properly.
# Indices shouldn't be doubled.
# Header information is on the dataset's website at the UCI ML Repo
# Check NA Encoding
X = pd.read_csv('Datasets/agaricus-lepiota.data', names=['label', 'cap-shape', 'cap-surface', 'cap-color',
'bruises', 'odor', 'gill-attachment',
'gill-spacing', 'gill-size', 'gill-color',
'stalk-shape', 'stalk-root',
'stalk-surface-above-ring',
'stalk-surface-below-ring', 'stalk-color-above-ring',
'stalk-color-below-ring', ' veil-type', 'veil-color',
'ring-number', 'ring-type', 'spore-print-colo', 'population',
'habitat'], header=None)
# INFO: An easy way to show which rows have nans in them
# print X[pd.isnull(X).any(axis=1)]
#
# TODO: Go ahead and drop any row with a nan
X.replace(to_replace='?', value=np.NaN, inplace=True)
X.dropna(axis=0, inplace=True)
print(X.shape)
#
# TODO: Copy the labels out of the dset into variable 'y' then Remove
# them from X. Encode the labels, using the .map() trick we showed
# you in Module 5 -- canadian:0, kama:1, and rosa:2
X['label'] = X['label'].map({'e': 1, 'p': 0})
y = X['label'].copy()
X.drop(labels=['label'], axis=1, inplace=True)
#
# TODO: Encode the entire dataset using dummies
X = pd.get_dummies(X)
#
# TODO: Split your data into test / train sets
# Your test size can be 30% with random_state 7
# Use variable names: X_train, X_test, y_train, y_test
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.3, random_state=7)
#
# TODO: Create an DT classifier. No need to set any parameters<|fim▁hole|>model = tree.DecisionTreeClassifier()
#
# TODO: train the classifier on the training data / labels:
# TODO: score the classifier on the testing data / labels:
model.fit(X_train, y_train)
score = model.score(X_test, y_test)
print('High-Dimensionality Score: %f' % round((score * 100), 3))
#
# TODO: Use the code on the courses SciKit-Learn page to output a .DOT file
# Then render the .DOT to .PNGs. Ensure you have graphviz installed.
# If not, `brew install graphviz. If you can't, use: http://webgraphviz.com/
tree.export_graphviz(model.tree_, out_file='tree.dot', feature_names=X.columns)<|fim▁end|> | |
<|file_name|>internet_protocol.py<|end_file_name|><|fim▁begin|># -*- test-case-name: twisted.test.test_factories,twisted.internet.test.test_protocol -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.<|fim▁hole|>Start here if you are looking to write a new protocol implementation for
Twisted. The Protocol class contains some introductory material.
"""
#from __future__ import division, absolute_import
#import random
from zope.interface import implementer
#from twisted.python import log, failure, components
#from twisted.internet import interfaces, error, defer
from . import internet_interfaces as interfaces
@implementer(interfaces.IProtocolFactory, interfaces.ILoggingContext)
class Factory:
"""
This is a factory which produces protocols.
By default, buildProtocol will create a protocol of the class given in
self.protocol.
"""
# put a subclass of Protocol here:
protocol = None
numPorts = 0
noisy = True
@classmethod
def forProtocol(cls, protocol, *args, **kwargs):
"""
Create a factory for the given protocol.
It sets the C{protocol} attribute and returns the constructed factory
instance.
@param protocol: A L{Protocol} subclass
@param args: Positional arguments for the factory.
@param kwargs: Keyword arguments for the factory.
@return: A L{Factory} instance wired up to C{protocol}.
"""
factory = cls(*args, **kwargs)
factory.protocol = protocol
return factory
def logPrefix(self):
"""
Describe this factory for log messages.
"""
return self.__class__.__name__
def doStart(self):
"""Make sure startFactory is called.
Users should not call this function themselves!
"""
if not self.numPorts:
if self.noisy:
log.msg("Starting factory %r" % self)
self.startFactory()
self.numPorts = self.numPorts + 1
def doStop(self):
"""Make sure stopFactory is called.
Users should not call this function themselves!
"""
if self.numPorts == 0:
# this shouldn't happen, but does sometimes and this is better
# than blowing up in assert as we did previously.
return
self.numPorts = self.numPorts - 1
if not self.numPorts:
if self.noisy:
log.msg("Stopping factory %r" % self)
self.stopFactory()
def startFactory(self):
"""This will be called before I begin listening on a Port or Connector.
It will only be called once, even if the factory is connected
to multiple ports.
This can be used to perform 'unserialization' tasks that
are best put off until things are actually running, such
as connecting to a database, opening files, etcetera.
"""
def stopFactory(self):
"""This will be called before I stop listening on all Ports/Connectors.
This can be overridden to perform 'shutdown' tasks such as disconnecting
database connections, closing files, etc.
It will be called, for example, before an application shuts down,
if it was connected to a port. User code should not call this function
directly.
"""
def buildProtocol(self, addr):
"""Create an instance of a subclass of Protocol.
The returned instance will handle input on an incoming server
connection, and an attribute \"factory\" pointing to the creating
factory.
Override this method to alter how Protocol instances get created.
@param addr: an object implementing L{twisted.internet.interfaces.IAddress}
"""
p = self.protocol()
p.factory = self
return p
class BaseProtocol:
"""
This is the abstract superclass of all protocols.
Some methods have helpful default implementations here so that they can
easily be shared, but otherwise the direct subclasses of this class are more
interesting, L{Protocol} and L{ProcessProtocol}.
"""
connected = 0
transport = None
def makeConnection(self, transport):
"""Make a connection to a transport and a server.
This sets the 'transport' attribute of this Protocol, and calls the
connectionMade() callback.
"""
self.connected = 1
self.transport = transport
self.connectionMade()
def connectionMade(self):
"""Called when a connection is made.
This may be considered the initializer of the protocol, because
it is called when the connection is completed. For clients,
this is called once the connection to the server has been
established; for servers, this is called after an accept() call
stops blocking and a socket has been received. If you need to
send any greeting or initial message, do it here.
"""
#connectionDone=failure.Failure(error.ConnectionDone())
#connectionDone.cleanFailure()
connectionDone = None
@implementer(interfaces.IProtocol, interfaces.ILoggingContext)
class Protocol(BaseProtocol):
"""
This is the base class for streaming connection-oriented protocols.
If you are going to write a new connection-oriented protocol for Twisted,
start here. Any protocol implementation, either client or server, should
be a subclass of this class.
The API is quite simple. Implement L{dataReceived} to handle both
event-based and synchronous input; output can be sent through the
'transport' attribute, which is to be an instance that implements
L{twisted.internet.interfaces.ITransport}. Override C{connectionLost} to be
notified when the connection ends.
Some subclasses exist already to help you write common types of protocols:
see the L{twisted.protocols.basic} module for a few of them.
"""
def logPrefix(self):
"""
Return a prefix matching the class name, to identify log messages
related to this protocol instance.
"""
return self.__class__.__name__
def dataReceived(self, data):
"""Called whenever data is received.
Use this method to translate to a higher-level message. Usually, some
callback will be made upon the receipt of each complete protocol
message.
@param data: a string of indeterminate length. Please keep in mind
that you will probably need to buffer some data, as partial
(or multiple) protocol messages may be received! I recommend
that unit tests for protocols call through to this method with
differing chunk sizes, down to one byte at a time.
"""
def connectionLost(self, reason=connectionDone):
"""Called when the connection is shut down.
Clear any circular references here, and any external references
to this Protocol. The connection has been closed.
@type reason: L{twisted.python.failure.Failure}
"""<|fim▁end|> |
"""
Standard implementations of Twisted protocol-related interfaces.
|
<|file_name|>_hastagbase.py<|end_file_name|><|fim▁begin|>#
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2010 Nick Hall
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# $Id$
"""
Rule that checks for an object with a particular tag.
"""
#-------------------------------------------------------------------------
#
# Standard Python modules
#
#-------------------------------------------------------------------------
from ...ggettext import gettext as _
#-------------------------------------------------------------------------
#
# GRAMPS modules
#
#-------------------------------------------------------------------------
from . import Rule
#-------------------------------------------------------------------------
#
# HasTag
#
#-------------------------------------------------------------------------
class HasTagBase(Rule):
"""
Rule that checks for an object with a particular tag.
"""
labels = [ 'Tag:' ]
name = 'Objects with the <tag>'
description = "Matches objects with the given tag"
category = _('General filters')
def prepare(self, db):
"""
Prepare the rule. Things we want to do just once.
"""
self.tag_handle = None
tag = db.get_tag_from_name(self.list[0])
if tag is not None:
self.tag_handle = tag.get_handle()
def apply(self, db, obj):
"""
Apply the rule. Return True for a match.
"""
if self.tag_handle is None:<|fim▁hole|> return False
return self.tag_handle in obj.get_tag_list()<|fim▁end|> | |
<|file_name|>client.rs<|end_file_name|><|fim▁begin|>/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
use crate::{help::Spawner, Framing, FramingDecoded, FramingEncodedFinal, Protocol};
use futures::stream::Stream;
use futures::{future, FutureExt};
use std::ffi::CStr;
use std::future::Future;
use std::pin::Pin;
use std::sync::Arc;
pub trait ClientFactory {
type Api: ?Sized;
fn new<P, T>(protocol: P, transport: T) -> Arc<Self::Api>
where
P: Protocol<Frame = T> + 'static,
T: Transport + Sync,
P::Deserializer: Send,
{
let spawner = crate::NoopSpawner;
Self::with_spawner(protocol, transport, spawner)
}
fn with_spawner<P, T, S>(protocol: P, transport: T, spawner: S) -> Arc<Self::Api>
where
P: Protocol<Frame = T> + 'static,
T: Transport + Sync,
P::Deserializer: Send,
S: Spawner;
}
pub trait Transport: Framing + Send + Sized + 'static {
fn call(
&self,
service_name: &'static CStr,
fn_name: &'static CStr,
req: FramingEncodedFinal<Self>,
) -> Pin<Box<dyn Future<Output = Result<FramingDecoded<Self>, anyhow::Error>> + Send + 'static>>;<|fim▁hole|> _service_name: &'static CStr,
_fn_name: &'static CStr,
_req: FramingEncodedFinal<Self>,
) -> Pin<
Box<
dyn Future<
Output = Result<
(
FramingDecoded<Self>,
Pin<
Box<
dyn Stream<Item = Result<FramingDecoded<Self>, anyhow::Error>>
+ Send
+ 'static,
>,
>,
),
anyhow::Error,
>,
> + Send
+ 'static,
>,
> {
future::err(anyhow::Error::msg(
"Streaming is not supported by this transport",
))
.boxed()
}
fn create_interaction(&self, _method_name: &'static CStr) -> Result<Self, anyhow::Error> {
anyhow::bail!("Interactions are not supported by this transport");
}
}<|fim▁end|> |
fn call_stream(
&self, |
<|file_name|>filters.py<|end_file_name|><|fim▁begin|>from abc import ABCMeta, abstractmethod
import six
from django.db.models import Q
from dimagi.utils.chunked import chunked
class DomainFilter(six.with_metaclass(ABCMeta)):
@abstractmethod
def get_filters(self, domain_name):
"""Return a list of filters. Each filter will be applied to a queryset independently
of the others."""
raise NotImplementedError()
class SimpleFilter(DomainFilter):
def __init__(self, filter_kwarg):
self.filter_kwarg = filter_kwarg
def get_filters(self, domain_name):
return [Q(**{self.filter_kwarg: domain_name})]
class UsernameFilter(DomainFilter):
def get_filters(self, domain_name):
"""
:return: A generator of filters each filtering for at most 500 users.
"""
from corehq.apps.users.dbaccessors.all_commcare_users import get_all_usernames_by_domain
usernames = get_all_usernames_by_domain(domain_name)
for chunk in chunked(usernames, 500):
filter = Q()
for username in chunk:
filter |= Q(username__iexact=username)
yield filter
class UserIDFilter(DomainFilter):
def __init__(self, user_id_field, include_web_users=True):
self.user_id_field = user_id_field<|fim▁hole|> def get_filters(self, domain_name):
"""
:return: A generator of filters each filtering for at most 1000 users.
"""
from corehq.apps.users.dbaccessors.all_commcare_users import get_all_user_ids_by_domain
user_ids = get_all_user_ids_by_domain(domain_name, include_web_users=self.include_web_users)
for chunk in chunked(user_ids, 1000):
query_kwarg = '{}__in'.format(self.user_id_field)
yield Q(**{query_kwarg: chunk})<|fim▁end|> | self.include_web_users = include_web_users
|
<|file_name|>file_to_gcs.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from airflow.contrib.hooks.gcs_hook import GoogleCloudStorageHook
from airflow.models import BaseOperator
from airflow.utils.decorators import apply_defaults
class FileToGoogleCloudStorageOperator(BaseOperator):<|fim▁hole|> :param src: Path to the local file
:type src: string
:param dst: Destination path within the specified bucket
:type dst: string
:param bucket: The bucket to upload to
:type bucket: string
:param google_cloud_storage_conn_id: The Airflow connection ID to upload with
:type google_cloud_storage_conn_id: string
:param mime_type: The mime-type string
:type mime_type: string
:param delegate_to: The account to impersonate, if any
:type delegate_to: string
"""
template_fields = ('src', 'dst', 'bucket')
@apply_defaults
def __init__(self,
src,
dst,
bucket,
google_cloud_storage_conn_id='google_cloud_storage_default',
mime_type='application/octet-stream',
delegate_to=None,
*args,
**kwargs):
super(FileToGoogleCloudStorageOperator, self).__init__(*args, **kwargs)
self.src = src
self.dst = dst
self.bucket = bucket
self.google_cloud_storage_conn_id = google_cloud_storage_conn_id
self.mime_type = mime_type
self.delegate_to = delegate_to
def execute(self, context):
"""
Uploads the file to Google cloud storage
"""
hook = GoogleCloudStorageHook(
google_cloud_storage_conn_id=self.google_cloud_storage_conn_id,
delegate_to=self.delegate_to)
hook.upload(
bucket=self.bucket,
object=self.dst,
mime_type=self.mime_type,
filename=self.src)<|fim▁end|> | """
Uploads a file to Google Cloud Storage
|
<|file_name|>store.tsx<|end_file_name|><|fim▁begin|>import {createStore, combineReducers} from 'redux';
import {todos} from '../reducers/todos';
export function configureStore() {<|fim▁hole|> const store = createStore(combineReducers({todos}));
return store;
}<|fim▁end|> | |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# Copyright (c) 2015, Alcatel-Lucent Inc
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from __future__ import print_function
__doc__ = """
# General Concepts
## Introduction
Bambou provides a set of objects that allow the manipulation of ReST entities very easily. It deals with all possible CRUD operations.
It is based on the library `Bambou`, which defines all these low level operations in a single place.
`Bambou` is composed of the following important classes:
* `bambou.NURESTSession`
Class representing an authenticated session.
* `bambou.NURESTObject`
Parent class of all ReST entities. All ReST exposed object objects inherit from this class.
* `bambou.NURESTFetcher`
Class used to get children of a `bambou.NURESTObject`.
* `bambou.NURESTPushCenter`
Class that deals with intercepting and rerouting ReST Push Notifications.
> There are more objects in `Bambou`, but you don't need to know all of them for now.
## NURESTSession
The `bambou.NURESTSession` represents some user credentials coupled with an API URL. All ReST calls are done using
the current active session. `bambou.NURESTSession` is an abstract class that must be reimplemented by anything using `Bambou`.
In a `MySDK` using bambou, you use a class named `mysdk.v3_2.MySession` which will be used in the following examples.
#!python
session = My}Session(username="user", password="secret", enterprise="organization", api_url="https://server")
session.start()
# your script
When you start the session, a ReST call will be sent to the API endpoint in order to get the API key.
If the credentials are valid, the attribute `MySDK.v3_2.MySession.root` will be populated with information such as your name,<|fim▁hole|>calls need to be done in the context of your account (for instance, your `/enterprises` are different from another account's `/enterprises`)
It is also possible to create sub sessions with the python statement `with`:
#!python
cspsession = MySession(username="user", password="secret", enterprise="organization", api_url="https://server")
adminsession = MySession(username="admin", password="secret", enterprise="enterprise", api_url="https://server")
cspsession.start()
# this part of the code will use the CSP root user
with adminsession.start():
# this code block will be executed as admin of `enterprise`
# back to csp root session
> You **must** use `start()` when using the `with` statement, even if the session has already been started in the main context.
## NURESTObject
`bambou.NURESTObject` is the parent class of all `MySDK` entities.
### ReST Names
All `bambou.NURESTObject` subclasses implements a given method that will return the actual ReST name of the objects. For instance, the ReST name of an Unicorn object is `unicorn`.
These names are used to forge the correct URI when doing CRUD operations on them.
> ReST names can be used as unique resource identifier for a given object.
> ReST names are auto generated. You never need to manually define them.
### ReST API URI Generation
`bambou.NURESTObject` is able to forge all the URI needed to interact with the server through the ReST API.
For instance, if an object with a ReST name set to `object` needs to get the list of children with ReST name set to `subobject`, `Bambou` will use the following endpoint URL:
`GET {api_base_url}/objects/{id}/subobjects`
If an object with a ReST name set to `entity` needs to fetch itself, the generated URL will be
`GET {api_base_url}/entities/{id}`
> `Bambou` automagically deals with plurals.
> The ReST base URL is pulled from the current active `bambou.NURESTSession`.
> URI are auto generated. You never need to deal with them manually.
### Exposing ReST Attributes
Exposed attributes will be converted and sent to the server when you do CRUD operations. That way, if an object has an attribute `name`, it can be marked as a ReST attribute.
When saving the object, the value of `name` will be put into the generated JSON structure that will be sent to the server, or automatically populated from a JSON structure that is coming from the server.
Not only the attribute can be exposed, but also its type and other informations like if it is read only, its allowed values, its format, its default value and so on.
> exposing ReST Attributes is auto generated. You never need to manually expose new attributes.
### CRUD Operations
`bambou.NURESTObject` allows to perform all sorts of CRUD operations.
* `bambou.NURESTObject.fetch`
* `bambou.NURESTObject.save`
* `bambou.NURESTObject.delete`
* `bambou.NURESTObject.create_child`
* `bambou.NURESTObject.assign`
* `bambou.NURESTObject.instantiate_child`
> All these methods require the current `bambou.NURESTObject` to have a valid `bambou.NURESTObject.ID`.
> You may notice that there is no creation method. Creation is always happening from a parent object and is done using `create_child`.
> You may notice that an optional parameter `callback` is present. This is because `MySDK` can work completely asynchronously.
### Converting to and from a Python Dictionary
`bambou.NURESTObject` allows quick and easy conversion from and to python dictionaries
* `bambou.NURESTObject.from_dict`
* `bambou.NURESTObject.to_dict`
> you never need to process to the actual JSON conversion when sending info to the server. `bambou.NURESTConnection` will do that automatically, but you can use these methods to print an object, or copy information of an object into one another.
## NURESTFetcher
`bambou.NURESTFetcher` is a class allowing a `bambou.NURESTObject` to fetch its children. All `bambou.NURESTObject` have one or more fetchers, unless it's a final object in the model hierarchy. `bambou.NURESTFetcher` provides a lot of possibility regarding the way you want to get a given children list. It can deal with simple object fetching, pagination, filtering, request headers, grouping etc.
### Fetching Children List
`bambou.NURESTFetcher` has three importants methods:
* `bambou.NURESTFetcher.fetch`
* `bambou.NURESTFetcher.get`
* `bambou.NURESTFetcher.get_first`
### Discussion about Fetchers
Fetcher is a powerfull concept that makes the process of getting child objects completely generic and code friendly. `bambou.NURESTObject` provides methods that allow to deal programatically with its fetchers in a completely generic way.
* `bambou.NURESTObject.fetcher_for_rest_name`
* `bambou.NURESTObject.fetchers`
* `bambou.NURESTObject.children_rest_names`
This allows complete abstract programatic operations on any objects.
For instance, the following function will create a new `MySDK.v3_2.Metadata` to the entire hierarchy of children from a given object that has been created after a certain date:
#!python
def apply_metatada_to_all_children(root_object, metadata, filter=None):
# Loop on all declared children fetchers
for fetcher in root_object.fetchers:
# Fetch the list of the children
children = fetcher.get(filter=filter)
# Loop on all fetched children
for child in children:
# Add the metadata to the current children
child.create_child(metadata)
# Start over recursively on the children of the current child
apply_metadata_to_all_children(child, metadata)
enterprise = Enterprise(id="xxxx-xxxx-xxx-xxxx")
metadata = Metadata(name="my metadata", blob="hello world!")
apply_metadata_to_all_children(enterprise, metadata, filter="creationDate > '01-01-2015'")
## NURESTPushCenter
The API supports client side push through a long polling connection. ReST clients can connect to that channel and will get a notification as soon as he or someone else in the system changes something. This events are filtered by permissions, which means that if someone change a property of an object you cannot see, you won't get notified. `MySDK` provides the `bambou.NURESTPushCenter`, which encapsulates all the logic to deal with the event channel. It runs in its own thread and will call registered callbacks when it receives a push.
A `bambou.NURESTPushCenter` is automatically created with each `bambou.NURESTSession` and it is available from the attribute `bambou.NURESTSession.push_center`.
#!python
session = MySession(username="user", password="secret", enterprise="organization", api_url="https://server")
session.start()
session.push_center.start()
> You need to explicitely start the push center.
### Using the NURESTPushCenter
Only the following methods are important:
* `bambou.NURESTPushCenter.start`
* `bambou.NURESTPushCenter.add_delegate`
* `bambou.NURESTPushCenter.remove_delegate`
### Example
Here is a really simple code sample that will print the push data on every push:
#!python
from MySDK import *
from pprint import pprint
from time import sleep
session = MySession(username="csproot", password="secret", enterprise="csp", api_url="https://server")
session.start()
def on_receive_push(data):
pprint(data);
session.push_center.add_delegate(on_receive_push);
session.push_center.start()
# default stupid run loop. don't do that in real life :)
while True:
sleep(1000)
## Conclusion
Now you know the basics of `Bambou` and so, of the `MySDK`. Remember that all objects in `MySDK` are subclasses of `bambou.NURESTObject` so they **all** work exactly the same.
There is a lot more to know about `Bambou` like the asynchronous mode, auto model parsing, easy controllers creation thanks introspection and so on. We'll cover this in a different advanced section.
"""
try:
import requests
requests.packages.urllib3.disable_warnings()
except:
pass
import logging
bambou_logger = logging.getLogger('bambou')
pushcenter_logger = logging.getLogger('pushcenter')
try: # Python 2.7+
from logging import NullHandler
except ImportError:
class NullHandler(logging.Handler):
def emit(self, record):
pass
bambou_logger.addHandler(NullHandler())
__all__ = ['NURESTRootObject', 'NURESTConnection', 'NURESTModelController', 'NURESTFetcher', 'NURESTLoginController', 'NURESTObject', 'NURESTPushCenter', 'NURESTRequest', 'NURESTResponse', 'NURESTSession', 'BambouConfig']
from bambou.nurest_session import NURESTSession
from bambou.nurest_root_object import NURESTRootObject
from bambou.nurest_connection import NURESTConnection
from bambou.nurest_fetcher import NURESTFetcher
from bambou.nurest_login_controller import NURESTLoginController
from bambou.nurest_object import NURESTObject
from bambou.nurest_push_center import NURESTPushCenter
from bambou.nurest_request import NURESTRequest
from bambou.nurest_response import NURESTResponse
from bambou.nurest_modelcontroller import NURESTModelController
from bambou.config import BambouConfig<|fim▁end|> | your phone number, your avatar, your enterprise name and ID etc. This `user` is the root object of everything as all subsequent |
<|file_name|>transform.py<|end_file_name|><|fim▁begin|>'''
Geometry Transform<|fim▁hole|>'''<|fim▁end|> | |
<|file_name|>brewers.py<|end_file_name|><|fim▁begin|>from tableacc import TableAcc
class Brewers(TableAcc):
def __init__(self):
super(Brewers, self).__init__(
table_name='brewers',
cols=['id', 'name', 'location_id'],
upsert_proc='brewerupsert')
def bylocation_id(self, location_id, order_by=""):
return self._select(
cols=["id", "name"],
where="location_id = %s",
order_by=order_by,<|fim▁hole|> return self._view(view_name='brewerswithrecs',
where="location_id = %s",
order_by=order_by,
param=[location_id])
if __name__ == "__main__":
br = Brewers()
for r in br.has_recommended_beers_by_loc(15):
print r<|fim▁end|> | param=[location_id] )
def has_recommended_beers_by_loc(self, location_id, order_by=""): |
<|file_name|>MutableTriple.java<|end_file_name|><|fim▁begin|>package com.springdemo.learningMVC.common.src.main.java.com.common.tuple;
/**
* A mutable triple consisting of three {@code Object} elements.
* <p>
* Not #ThreadSafe#
*
* @param <L> the left element type
* @param <M> the middle element type
* @param <R> the right element type
* @version $Id: MutableTriple.java 290 2014-10-27 08:48:18Z $
*/
public class MutableTriple<L, M, R> extends Triple<L, M, R> {
/**
* Serialization version
*/
private static final long serialVersionUID = 1L;
/**
* Left object
*/
public L left;
/**
* Middle object
*/
public M middle;
/**
* Right object
*/
public R right;
/**
* Obtains an mutable triple of three objects inferring the generic types.
* <p>
* This factory allows the triple to be created using inference to
* obtain the generic types.
*
* @param <L> the left element type
* @param <M> the middle element type
* @param <R> the right element type
* @param left the left element, may be null
* @param middle the middle element, may be null
* @param right the right element, may be null
* @return a triple formed from the three parameters, not null
*/
public static <L, M, R> MutableTriple<L, M, R> of(
final L left, final M middle, final R right) {
return new MutableTriple<>(left, middle, right);
}
/**
* Create a new triple instance of three nulls.
*/
public MutableTriple() {
super();
}
/**
* Create a new triple instance.
*
* @param left the left value, may be null
* @param middle the middle value, may be null
* @param right the right value, may be null
*/
public MutableTriple(final L left, final M middle, final R right) {
super();
this.left = left;
this.middle = middle;
this.right = right;
}
//-----------------------------------------------------------------------
/**
* {@inheritDoc}
*/
@Override
public L getLeft() {
return left;
}
/**
* Sets the left element of the triple.
*
* @param left the new value of the left element, may be null
*/
public void setLeft(final L left) {
this.left = left;<|fim▁hole|>
/**
* {@inheritDoc}
*/
@Override
public M getMiddle() {
return middle;
}
/**
* Sets the middle element of the triple.
*
* @param middle the new value of the middle element, may be null
*/
public void setMiddle(final M middle) {
this.middle = middle;
}
/**
* {@inheritDoc}
*/
@Override
public R getRight() {
return right;
}
/**
* Sets the right element of the triple.
*
* @param right the new value of the right element, may be null
*/
public void setRight(final R right) {
this.right = right;
}
}<|fim▁end|> | } |
<|file_name|>java1.py<|end_file_name|><|fim▁begin|>import os
import pynja
import repo
<|fim▁hole|> sources = [
"com/java1/Counter.java",
]
with self.java_compile_ex(sources) as task:
task.workingDir = os.path.join(self.projectDir, "source")
self.jar_create("java1.jar")<|fim▁end|> | @pynja.project
class java1(repo.JavaProject):
def emit(self): |
<|file_name|>status_library.py<|end_file_name|><|fim▁begin|>from robot.libraries.BuiltIn import BuiltIn
<|fim▁hole|><|fim▁end|> | def fail_with_traceback(traceback_message):
BuiltIn().fail(traceback_message) |
<|file_name|>TypedMap.java<|end_file_name|><|fim▁begin|>/*
* !++
* QDS - Quick Data Signalling Library
* !-
* Copyright (C) 2002 - 2021 Devexperts LLC
* !-
* This Source Code Form is subject to the terms of the Mozilla Public License, v. 2.0.
* If a copy of the MPL was not distributed with this file, You can obtain one at
* http://mozilla.org/MPL/2.0/.
* !__
*/
package com.devexperts.util;
import java.util.IdentityHashMap;
import java.util.Map;
/**
* Typed thread-safe key-value map where different values have different types and are distinguished
* by globally-unique keys that are instances of {@link TypedKey}.
*/
@SuppressWarnings({"unchecked"})
public class TypedMap {
private final Map<TypedKey<?>, Object> values = new IdentityHashMap<>();
/**
* Returns the value to which the specified key is mapped,
* or {@code null} if this map contains no mapping for the key.
*/<|fim▁hole|> /**
* Changes the specified value with the specified key in this map.
* If the map previously contained a mapping for the key, the old value is replaced by the specified value.
*/
public synchronized <T> void set(TypedKey<T> key, T value) {
values.put(key, value);
}
@Override
public String toString() {
return values.toString();
}
}<|fim▁end|> | public synchronized <T> T get(TypedKey<T> key) {
return (T) values.get(key);
}
|
<|file_name|>ScanSetup.py<|end_file_name|><|fim▁begin|>from Screen import Screen
from ServiceScan import ServiceScan
from Components.config import config, ConfigSubsection, ConfigSelection, \
ConfigYesNo, ConfigInteger, getConfigListEntry, ConfigSlider, ConfigEnableDisable
from Components.ActionMap import NumberActionMap, ActionMap
from Components.Sources.StaticText import StaticText
from Components.SystemInfo import SystemInfo
from Components.ConfigList import ConfigListScreen
from Components.NimManager import nimmanager, getConfigSatlist
from Components.Label import Label
from Tools.HardwareInfo import HardwareInfo
from Tools.Transponder import getChannelNumber, supportedChannels, channel2frequency
from Screens.InfoBar import InfoBar
from Screens.MessageBox import MessageBox
from enigma import eTimer, eDVBFrontendParametersSatellite, eComponentScan, \
eDVBSatelliteEquipmentControl, eDVBFrontendParametersTerrestrial, \
eDVBFrontendParametersCable, eConsoleAppContainer, eDVBResourceManager
def buildTerTransponder(frequency,
inversion=2, bandwidth = 7000000, fechigh = 6, feclow = 6,
modulation = 2, transmission = 2, guard = 4,
hierarchy = 4, system = 0, plp_id = 0):
# print "freq", frequency, "inv", inversion, "bw", bandwidth, "fech", fechigh, "fecl", feclow, "mod", modulation, "tm", transmission, "guard", guard, "hierarchy", hierarchy
parm = eDVBFrontendParametersTerrestrial()
parm.frequency = frequency
parm.inversion = inversion
parm.bandwidth = bandwidth
parm.code_rate_HP = fechigh
parm.code_rate_LP = feclow
parm.modulation = modulation
parm.transmission_mode = transmission
parm.guard_interval = guard
parm.hierarchy = hierarchy
parm.system = system
parm.plp_id = plp_id
return parm
def getInitialTransponderList(tlist, pos):
list = nimmanager.getTransponders(pos)
for x in list:
if x[0] == 0: #SAT
parm = eDVBFrontendParametersSatellite()
parm.frequency = x[1]
parm.symbol_rate = x[2]
parm.polarisation = x[3]
parm.fec = x[4]
parm.inversion = x[7]
parm.orbital_position = pos
parm.system = x[5]
parm.modulation = x[6]
parm.rolloff = x[8]
parm.pilot = x[9]
tlist.append(parm)
def getInitialCableTransponderList(tlist, nim):
list = nimmanager.getTranspondersCable(nim)
for x in list:
if x[0] == 1: #CABLE
parm = eDVBFrontendParametersCable()
parm.frequency = x[1]
parm.symbol_rate = x[2]
parm.modulation = x[3]
parm.fec_inner = x[4]
parm.inversion = x[5]
parm.system = x[6]
tlist.append(parm)
def getInitialTerrestrialTransponderList(tlist, region, tsystem = eDVBFrontendParametersTerrestrial.System_DVB_T_T2, skip_t2 = False):
list = nimmanager.getTranspondersTerrestrial(region)
#self.transponders[self.parsedTer].append((2,freq,bw,const,crh,crl,guard,transm,hierarchy,inv))
#def buildTerTransponder(frequency, inversion = 2, bandwidth = 3, fechigh = 6, feclow = 6,
#modulation = 2, transmission = 2, guard = 4, hierarchy = 4):
for x in list:
if x[0] == 2: #TERRESTRIAL
if skip_t2 and x[10] == eDVBFrontendParametersTerrestrial.System_DVB_T2:
# Should be searching on TerrestrialTransponderSearchSupport.
continue
if tsystem == eDVBFrontendParametersTerrestrial.System_DVB_T_T2:
parm = buildTerTransponder(x[1], x[9], x[2], x[4], x[5], x[3], x[7], x[6], x[8], x[10], x[11])
elif x[10] == eDVBFrontendParametersTerrestrial.System_DVB_T_T2 or x[10] == tsystem:
parm = buildTerTransponder(x[1], x[9], x[2], x[4], x[5], x[3], x[7], x[6], x[8], tsystem, x[11])
else:
continue
tlist.append(parm)
cable_bands = {
"DVBC_BAND_EU_VHF_I" : 1 << 0,
"DVBC_BAND_EU_MID" : 1 << 1,
"DVBC_BAND_EU_VHF_III" : 1 << 2,
"DVBC_BAND_EU_SUPER" : 1 << 3,
"DVBC_BAND_EU_HYPER" : 1 << 4,
"DVBC_BAND_EU_UHF_IV" : 1 << 5,
"DVBC_BAND_EU_UHF_V" : 1 << 6,
"DVBC_BAND_US_LO" : 1 << 7,
"DVBC_BAND_US_MID" : 1 << 8,
"DVBC_BAND_US_HI" : 1 << 9,
"DVBC_BAND_US_SUPER" : 1 << 10,
"DVBC_BAND_US_HYPER" : 1 << 11,
}
cable_autoscan_nimtype = {
'SSH108' : 'ssh108',
'TT3L10' : 'tt3l10',
'TURBO' : 'vuplus_turbo_c'
}
terrestrial_autoscan_nimtype = {
'SSH108' : 'ssh108_t2_scan',
'TT3L10' : 'tt3l10_t2_scan',
'TURBO' : 'vuplus_turbo_t'
}
def GetDeviceId(filter, nim_idx):
tuners={}
device_id = 0
socket_id = 0
for nim in nimmanager.nim_slots:
name_token = nim.description.split(' ')
name = name_token[-1][4:-1]
if name == filter:
if socket_id == nim_idx:
break
if device_id:
device_id = 0
else:
device_id = 1
socket_id += 1
return device_id
class CableTransponderSearchSupport:
def __init__(self):
pass
def tryGetRawFrontend(self, feid):
res_mgr = eDVBResourceManager.getInstance()
if res_mgr:
raw_channel = res_mgr.allocateRawChannel(self.feid)
if raw_channel:
frontend = raw_channel.getFrontend()
if frontend:
frontend.closeFrontend() # immediate close...
del frontend
del raw_channel
return True
return False
def cableTransponderSearchSessionClosed(self, *val):
print "cableTransponderSearchSessionClosed, val", val
self.cable_search_container.appClosed.remove(self.cableTransponderSearchClosed)
self.cable_search_container.dataAvail.remove(self.getCableTransponderData)
if val and len(val):
if val[0]:
self.setCableTransponderSearchResult(self.__tlist)
else:
self.cable_search_container.sendCtrlC()
self.setCableTransponderSearchResult(None)
self.cable_search_container = None
self.cable_search_session = None
self.__tlist = None
self.cableTransponderSearchFinished()
def cableTransponderSearchClosed(self, retval):
print "cableTransponderSearch finished", retval
self.cable_search_session.close(True)
def getCableTransponderData(self, str):
#prepend any remaining data from the previous call
str = self.remainingdata + str
#split in lines
lines = str.split('\n')
#'str' should end with '\n', so when splitting, the last line should be empty. If this is not the case, we received an incomplete line
if len(lines[-1]):
#remember this data for next time
self.remainingdata = lines[-1]
lines = lines[0:-1]
else:
self.remainingdata = ""
for line in lines:
data = line.split()
if len(data):
if data[0] == 'OK' and data[4] != 'NOT_IMPLEMENTED':
print str
parm = eDVBFrontendParametersCable()
qam = { "QAM16" : parm.Modulation_QAM16,
"QAM32" : parm.Modulation_QAM32,
"QAM64" : parm.Modulation_QAM64,
"QAM128" : parm.Modulation_QAM128,
"QAM256" : parm.Modulation_QAM256 }
inv = { "INVERSION_OFF" : parm.Inversion_Off,
"INVERSION_ON" : parm.Inversion_On,
"INVERSION_AUTO" : parm.Inversion_Unknown }
fec = { "FEC_AUTO" : parm.FEC_Auto,
"FEC_1_2" : parm.FEC_1_2,
"FEC_2_3" : parm.FEC_2_3,
"FEC_3_4" : parm.FEC_3_4,
"FEC_5_6" : parm.FEC_5_6,
"FEC_7_8" : parm.FEC_7_8,
"FEC_8_9" : parm.FEC_8_9,
"FEC_3_5" : parm.FEC_3_5,
"FEC_4_5" : parm.FEC_4_5,
"FEC_9_10" : parm.FEC_9_10,
"FEC_NONE" : parm.FEC_None }
parm.frequency = int(data[1])
parm.symbol_rate = int(data[2])
parm.fec_inner = fec[data[3]]
parm.modulation = qam[data[4]]
parm.inversion = inv[data[5]]
self.__tlist.append(parm)
tmpstr = _("Try to find used transponders in cable network.. please wait...")
tmpstr += "\n\n"
tmpstr += data[1].isdigit() and "%s MHz " % (int(data[1]) / 1000.) or data[1]
tmpstr += data[0]
self.cable_search_session["text"].setText(tmpstr)
def startCableTransponderSearch(self, nim_idx):
def GetCommand(nim_idx):
global cable_autoscan_nimtype
try:
nim_name = nimmanager.getNimName(nim_idx)
if nim_name is not None and nim_name != "":
device_id = ""
nim_name = nim_name.split(' ')[-1][4:-1]
if nim_name == 'TT3L10':
try:
device_id = GetDeviceId('TT3L10', nim_idx)
device_id = "--device=%s" % (device_id)
except Exception, err:
print "GetCommand ->", err
device_id = "--device=0"
command = "%s %s" % (cable_autoscan_nimtype[nim_name], device_id)
return command
except Exception, err:
print "GetCommand ->", err
return "tda1002x"
if not self.tryGetRawFrontend(nim_idx):
self.session.nav.stopService()
if not self.tryGetRawFrontend(nim_idx):
if self.session.pipshown:
self.session.infobar.showPiP()
if not self.tryGetRawFrontend(nim_idx):
self.cableTransponderSearchFinished()
return
self.__tlist = []
self.remainingdata = ""
self.cable_search_container = eConsoleAppContainer()
self.cable_search_container.appClosed.append(self.cableTransponderSearchClosed)
self.cable_search_container.dataAvail.append(self.getCableTransponderData)
cableConfig = config.Nims[nim_idx].cable
tunername = nimmanager.getNimName(nim_idx)
try:
bus = nimmanager.getI2CDevice(nim_idx)
if bus is None:
print "ERROR: could not get I2C device for nim", nim_idx, "for cable transponder search"
bus = 2
except:
# older API
if nim_idx < 2:
if HardwareInfo().get_device_name() == "dm500hd":
bus = 2
else:
bus = nim_idx
else:
if nim_idx == 2:
bus = 2 # DM8000 first nim is /dev/i2c/2
else:
bus = 4 # DM8000 second num is /dev/i2c/4
bin_name = None
if tunername == "CXD1981":
bin_name = "CXD1981"
cmd = "cxd1978 --init --scan --verbose --wakeup --inv 2 --bus %d" % bus
elif tunername.startswith("Sundtek"):
bin_name = "mediaclient"
cmd = "/opt/bin/mediaclient --blindscan %d" % nim_idx
else:
bin_name = GetCommand(nim_idx)
cmd = "%(BIN_NAME)s --init --scan --verbose --wakeup --inv 2 --bus %(BUS)d" % {'BIN_NAME':bin_name, 'BUS':bus}
if cableConfig.scan_type.value == "bands":
cmd += " --scan-bands "
bands = 0
if cableConfig.scan_band_EU_VHF_I.value:
bands |= cable_bands["DVBC_BAND_EU_VHF_I"]
if cableConfig.scan_band_EU_MID.value:
bands |= cable_bands["DVBC_BAND_EU_MID"]
if cableConfig.scan_band_EU_VHF_III.value:
bands |= cable_bands["DVBC_BAND_EU_VHF_III"]
if cableConfig.scan_band_EU_UHF_IV.value:
bands |= cable_bands["DVBC_BAND_EU_UHF_IV"]
if cableConfig.scan_band_EU_UHF_V.value:
bands |= cable_bands["DVBC_BAND_EU_UHF_V"]
if cableConfig.scan_band_EU_SUPER.value:
bands |= cable_bands["DVBC_BAND_EU_SUPER"]
if cableConfig.scan_band_EU_HYPER.value:
bands |= cable_bands["DVBC_BAND_EU_HYPER"]
if cableConfig.scan_band_US_LOW.value:
bands |= cable_bands["DVBC_BAND_US_LO"]
if cableConfig.scan_band_US_MID.value:
bands |= cable_bands["DVBC_BAND_US_MID"]
if cableConfig.scan_band_US_HIGH.value:
bands |= cable_bands["DVBC_BAND_US_HI"]
if cableConfig.scan_band_US_SUPER.value:
bands |= cable_bands["DVBC_BAND_US_SUPER"]
if cableConfig.scan_band_US_HYPER.value:
bands |= cable_bands["DVBC_BAND_US_HYPER"]
cmd += str(bands)
else:
cmd += " --scan-stepsize "
cmd += str(cableConfig.scan_frequency_steps.value)
if cableConfig.scan_mod_qam16.value:
cmd += " --mod 16"
if cableConfig.scan_mod_qam32.value:
cmd += " --mod 32"
if cableConfig.scan_mod_qam64.value:
cmd += " --mod 64"
if cableConfig.scan_mod_qam128.value:
cmd += " --mod 128"
if cableConfig.scan_mod_qam256.value:
cmd += " --mod 256"
if cableConfig.scan_sr_6900.value:
cmd += " --sr 6900000"
if cableConfig.scan_sr_6875.value:
cmd += " --sr 6875000"
if cableConfig.scan_sr_ext1.value > 450:
cmd += " --sr "
cmd += str(cableConfig.scan_sr_ext1.value)
cmd += "000"
if cableConfig.scan_sr_ext2.value > 450:
cmd += " --sr "
cmd += str(cableConfig.scan_sr_ext2.value)
cmd += "000"
print bin_name, " CMD is", cmd
self.cable_search_container.execute(cmd)
tmpstr = _("Try to find used transponders in cable network.. please wait...")
tmpstr += "\n\n..."
self.cable_search_session = self.session.openWithCallback(self.cableTransponderSearchSessionClosed, MessageBox, tmpstr, MessageBox.TYPE_INFO)
class TerrestrialTransponderSearchSupport:
def terrestrialTransponderSearchSessionClosed(self, *val):
print "TerrestrialTransponderSearchSessionClosed, val", val
self.terrestrial_search_container.appClosed.remove(self.terrestrialTransponderSearchClosed)
self.terrestrial_search_container.dataAvail.remove(self.getTerrestrialTransponderData)
if val and len(val):
if val[0]:
self.setTerrestrialTransponderSearchResult(self.__tlist)
else:
self.terrestrial_search_container.sendCtrlC()
self.setTerrestrialTransponderSearchResult(None)
self.terrestrial_search_container = None
self.terrestrial_search_session = None
self.__tlist = None
self.terrestrialTransponderSearchFinished()
def terrestrialTransponderSearchClosed(self, retval):
if self.terrestrial_tunerName.startswith("Sundtek"):
self.terrestrial_search_session.close(True)
else:
self.setTerrestrialTransponderData()
opt = self.terrestrialTransponderGetOpt()
if opt is None:
print "terrestrialTransponderSearch finished", retval
self.terrestrial_search_session.close(True)
else:
(freq, bandWidth) = opt
self.terrestrialTransponderSearch(freq, bandWidth)
def getTerrestrialTransponderData(self, str):
print str
if self.terrestrial_tunerName.startswith("Sundtek"):
str = self.remaining_data + str
lines = str.split('\n')
if len(lines[-1]):
self.remaining_data = lines[-1]
lines = lines[0:-1]
else:
self.remaining_data = ""
for line in lines:
data = line.split()
if len(data):
if 'MODE' in data[0]:
parm = eDVBFrontendParametersTerrestrial()
parm.frequency = int(data[3])
parm.bandwidth = self.terrestrialTransponderconvBandwidth_P(int(data[5]))
parm.inversion = parm.Inversion_Unknown
parm.code_rate_HP = parm.FEC_Auto
parm.code_rate_LP = parm.FEC_Auto
parm.modulation = parm.Modulation_Auto
parm.transmission_mode = parm.TransmissionMode_Auto
parm.guard_interval = parm.GuardInterval_Auto
parm.hierarchy = parm.Hierarchy_Auto
parm.system = 'DVB-T2' in data[1] and parm.System_DVB_T_T2 or parm.System_DVB_T
parm.plp_id = 0
self.__tlist.append(parm)
tmpstr = _("Try to find used transponders in terrestrial network... please wait...")
tmpstr += "\n\n"
if 'MODE' in line:
tmpstr += data[3]
tmpstr += " kHz "
else:
tmpstr += _(line)
self.terrestrial_search_session["text"].setText(tmpstr)
else:
self.terrestrial_search_data += str
def setTerrestrialTransponderData(self):
data = self.terrestrial_search_data.split()
if len(data):
# print "[setTerrestrialTransponderData] data : ", data
if data[0] == 'OK':
# DVB-T : OK frequency bandwidth delivery system -1<|fim▁hole|> parm.bandwidth = int(data[2])
parm.inversion = parm.Inversion_Unknown
parm.code_rate_HP = parm.FEC_Auto
parm.code_rate_LP = parm.FEC_Auto
parm.modulation = parm.Modulation_Auto
parm.transmission_mode = parm.TransmissionMode_Auto
parm.guard_interval = parm.GuardInterval_Auto
parm.hierarchy = parm.Hierarchy_Auto
parm.system = parm.System_DVB_T
parm.plp_id = 0
self.__tlist.append(parm)
else:
plp_list = data[5:]
plp_num = int(data[4])
if len(plp_list) > plp_num:
plp_list = plp_list[:plp_num]
for plp in plp_list:
(plp_id, plp_type) = plp.split(':')
if plp_type == '0': # common PLP:
continue
parm = eDVBFrontendParametersTerrestrial()
parm.frequency = int(data[1])
parm.bandwidth = self.terrestrialTransponderconvBandwidth_P(int(data[2]))
parm.inversion = parm.Inversion_Unknown
parm.code_rate_HP = parm.FEC_Auto
parm.code_rate_LP = parm.FEC_Auto
parm.modulation = parm.Modulation_Auto
parm.transmission_mode = parm.TransmissionMode_Auto
parm.guard_interval = parm.GuardInterval_Auto
parm.hierarchy = parm.Hierarchy_Auto
parm.system = parm.System_DVB_T2
parm.plp_id = int(plp_id)
self.__tlist.append(parm)
tmpstr = _("Try to find used transponders in terrestrial network... please wait...")
tmpstr += "\n\n"
tmpstr += data[1][:-3]
tmpstr += " kHz "
tmpstr += data[0]
self.terrestrial_search_session["text"].setText(tmpstr)
def terrestrialTransponderInitSearchList(self, searchList, region):
tpList = nimmanager.getTranspondersTerrestrial(region)
for x in tpList:
if x[0] == 2: #TERRESTRIAL
freq = x[1] # frequency
bandWidth = self.terrestrialTransponderConvBandwidth_I(x[2]) # bandWidth
parm = (freq, bandWidth)
searchList.append(parm)
def terrestrialTransponderConvBandwidth_I(self, _bandWidth):
bandWidth = {
eDVBFrontendParametersTerrestrial.Bandwidth_8MHz : 8000000,
eDVBFrontendParametersTerrestrial.Bandwidth_7MHz : 7000000,
eDVBFrontendParametersTerrestrial.Bandwidth_6MHz : 6000000,
eDVBFrontendParametersTerrestrial.Bandwidth_5MHz : 5000000,
eDVBFrontendParametersTerrestrial.Bandwidth_1_712MHz : 1712000,
eDVBFrontendParametersTerrestrial.Bandwidth_10MHz : 10000000,
}.get(_bandWidth, 8000000)
return bandWidth
def terrestrialTransponderconvBandwidth_P(self, _bandWidth):
bandWidth = {
8000000 : eDVBFrontendParametersTerrestrial.Bandwidth_8MHz,
7000000 : eDVBFrontendParametersTerrestrial.Bandwidth_7MHz,
6000000 : eDVBFrontendParametersTerrestrial.Bandwidth_6MHz,
5000000 : eDVBFrontendParametersTerrestrial.Bandwidth_5MHz,
1712000 : eDVBFrontendParametersTerrestrial.Bandwidth_1_712MHz,
10000000 : eDVBFrontendParametersTerrestrial.Bandwidth_10MHz,
}.get(_bandWidth, eDVBFrontendParametersTerrestrial.Bandwidth_8MHz)
return bandWidth
def terrestrialTransponderGetOpt(self):
if len(self.terrestrial_search_list) > 0:
return self.terrestrial_search_list.pop(0)
else:
return None
def terrestrialTransponderGetCmd(self, nim_idx):
global terrestrial_autoscan_nimtype
try:
if self.terrestrial_tunerName is not None and self.terrestrial_tunerName != "":
device_id = ""
tunerName = self.terrestrial_tunerName.split(' ')[-1][4:-1]
if tunerName == 'TT3L10':
try:
device_id = GetDeviceId('TT3L10', nim_idx)
device_id = "--device %s" % (device_id)
except Exception, err:
print "terrestrialTransponderGetCmd ->", err
device_id = "--device 0"
command = "%s %s" % (terrestrial_autoscan_nimtype[tunerName], device_id)
return command
except Exception, err:
print "terrestrialTransponderGetCmd ->", err
return ""
def startTerrestrialTransponderSearch(self, nim_idx, region):
if not self.tryGetRawFrontend(nim_idx):
self.session.nav.stopService()
if not self.tryGetRawFrontend(nim_idx):
if self.session.pipshown:
self.session.infobar.showPiP()
if not self.tryGetRawFrontend(nim_idx):
self.terrestrialTransponderSearchFinished()
return
self.__tlist = [ ]
self.remaining_data = ""
self.terrestrial_search_container = eConsoleAppContainer()
self.terrestrial_search_container.appClosed.append(self.terrestrialTransponderSearchClosed)
self.terrestrial_search_container.dataAvail.append(self.getTerrestrialTransponderData)
self.terrestrial_tunerName = nimmanager.getNimName(nim_idx)
if self.terrestrial_tunerName.startswith("Sundtek"):
cmd = "/opt/bin/mediaclient --blindscan /dev/dvb/adapter0/frontend%d" % nim_idx
print "SCAN CMD : ",cmd
self.terrestrial_search_container.execute(cmd)
else:
self.terrestrial_search_binName = self.terrestrialTransponderGetCmd(nim_idx)
self.terrestrial_search_bus = nimmanager.getI2CDevice(nim_idx)
if self.terrestrial_search_bus is None:
# print "ERROR: could not get I2C device for nim", nim_idx, "for terrestrial transponder search"
self.terrestrial_search_bus = 2
self.terrestrial_search_list = []
self.terrestrialTransponderInitSearchList(self.terrestrial_search_list ,region)
(freq, bandWidth) = self.terrestrialTransponderGetOpt()
self.terrestrialTransponderSearch(freq, bandWidth)
tmpstr = _("Try to find used transponders in terrestrial network... please wait...")
tmpstr += "\n\n..."
self.terrestrial_search_session = self.session.openWithCallback(self.terrestrialTransponderSearchSessionClosed, MessageBox, tmpstr, MessageBox.TYPE_INFO)
def terrestrialTransponderSearch(self, freq, bandWidth):
self.terrestrial_search_data = ""
cmd = "%s --freq %d --bw %d --bus %d --ds 2" % (self.terrestrial_search_binName, freq, bandWidth, self.terrestrial_search_bus)
print "SCAN CMD : ",cmd
self.terrestrial_search_container.execute(cmd)
class ScanSetup(ConfigListScreen, Screen, CableTransponderSearchSupport, TerrestrialTransponderSearchSupport):
def __init__(self, session):
Screen.__init__(self, session)
Screen.setTitle(self, _("Manual Scan"))
self.finished_cb = None
self.updateSatList()
self.service = session.nav.getCurrentService()
self.feinfo = None
self.networkid = 0
frontendData = None
if self.service is not None:
self.feinfo = self.service.frontendInfo()
frontendData = self.feinfo and self.feinfo.getAll(True)
self.ter_channel_input = False
self.ter_tnumber = None
self.createConfig(frontendData)
del self.feinfo
del self.service
self.session.postScanService = session.nav.getCurrentlyPlayingServiceOrGroup()
self["key_red"] = StaticText(_("Close"))
self["key_green"] = StaticText(_("Scan"))
self["actions"] = NumberActionMap(["SetupActions", "MenuActions", "ColorActions"],
{
"ok": self.keyGo,
"save": self.keyGo,
"cancel": self.keyCancel,
"red": self.keyCancel,
"green": self.keyGo,
"menu": self.doCloseRecursive,
}, -2)
self.statusTimer = eTimer()
self.statusTimer.callback.append(self.updateStatus)
#self.statusTimer.start(5000, True)
self.list = []
ConfigListScreen.__init__(self, self.list)
self["header"] = Label(_("Manual Scan"))
if not self.scan_nims.value == "":
self.createSetup()
self["introduction"] = Label(_("Press OK to start the scan"))
else:
self["introduction"] = Label(_("Nothing to scan!\nPlease setup your tuner settings before you start a service scan."))
def runAsync(self, finished_cb):
self.finished_cb = finished_cb
self.keyGo()
def updateSatList(self):
self.satList = []
for slot in nimmanager.nim_slots:
if slot.isCompatible("DVB-S"):
self.satList.append(nimmanager.getSatListForNim(slot.slot))
else:
self.satList.append(None)
def createSetup(self):
self.list = []
self.multiscanlist = []
index_to_scan = int(self.scan_nims.value)
print "ID: ", index_to_scan
self.tunerEntry = getConfigListEntry(_("Tuner"), self.scan_nims)
self.list.append(self.tunerEntry)
if self.scan_nims == [ ]:
return
self.typeOfScanEntry = None
self.typeOfInputEntry = None
self.systemEntry = None
self.modulationEntry = None
self.preDefSatList = None
self.TerrestrialTransponders = None
self.TerrestrialRegionEntry = None
self.TerrestrialCompleteEntry = None
nim = nimmanager.nim_slots[index_to_scan]
if nim.isCompatible("DVB-S"):
self.typeOfScanEntry = getConfigListEntry(_("Type of scan"), self.scan_type)
self.list.append(self.typeOfScanEntry)
elif nim.isCompatible("DVB-C"):
if config.Nims[index_to_scan].cable.scan_type.value != "provider": # only show predefined transponder if in provider mode
if self.scan_typecable.value == "predefined_transponder":
self.scan_typecable.value = self.cable_toggle[self.last_scan_typecable]
self.last_scan_typecable = self.scan_typecable.value
self.typeOfScanEntry = getConfigListEntry(_("Type of scan"), self.scan_typecable)
self.list.append(self.typeOfScanEntry)
elif nim.isCompatible("DVB-T"):
self.typeOfScanEntry = getConfigListEntry(_("Type of scan"), self.scan_typeterrestrial)
self.list.append(self.typeOfScanEntry)
if self.scan_typeterrestrial.value == "single_transponder":
self.typeOfInputEntry = getConfigListEntry(_("Use frequency or channel"), self.scan_input_as)
if self.ter_channel_input:
self.list.append(self.typeOfInputEntry)
else:
self.scan_input_as.value = self.scan_input_as.choices[0]
self.scan_networkScan.value = False
if nim.isCompatible("DVB-S"):
if self.scan_type.value == "single_transponder":
self.updateSatList()
if nim.isCompatible("DVB-S2"):
self.systemEntry = getConfigListEntry(_('System'), self.scan_sat.system)
self.list.append(self.systemEntry)
else:
# downgrade to dvb-s, in case a -s2 config was active
self.scan_sat.system.value = eDVBFrontendParametersSatellite.System_DVB_S
self.list.append(getConfigListEntry(_('Satellite'), self.scan_satselection[index_to_scan]))
self.list.append(getConfigListEntry(_('Frequency'), self.scan_sat.frequency))
self.list.append(getConfigListEntry(_('Inversion'), self.scan_sat.inversion))
self.list.append(getConfigListEntry(_('Symbol rate'), self.scan_sat.symbolrate))
self.list.append(getConfigListEntry(_('Polarization'), self.scan_sat.polarization))
if self.scan_sat.system.value == eDVBFrontendParametersSatellite.System_DVB_S:
self.list.append(getConfigListEntry(_("FEC"), self.scan_sat.fec))
elif self.scan_sat.system.value == eDVBFrontendParametersSatellite.System_DVB_S2:
self.list.append(getConfigListEntry(_("FEC"), self.scan_sat.fec_s2))
self.modulationEntry = getConfigListEntry(_('Modulation'), self.scan_sat.modulation)
self.list.append(self.modulationEntry)
self.list.append(getConfigListEntry(_('Roll-off'), self.scan_sat.rolloff))
self.list.append(getConfigListEntry(_('Pilot'), self.scan_sat.pilot))
elif self.scan_type.value == "predefined_transponder" and self.satList[index_to_scan]:
self.updateSatList()
self.preDefSatList = getConfigListEntry(_('Satellite'), self.scan_satselection[index_to_scan])
self.list.append(self.preDefSatList)
sat = self.satList[index_to_scan][self.scan_satselection[index_to_scan].index]
self.predefinedTranspondersList(sat[0])
self.list.append(getConfigListEntry(_('Transponder'), self.preDefTransponders))
elif self.scan_type.value == "single_satellite":
self.updateSatList()
print self.scan_satselection[index_to_scan]
self.list.append(getConfigListEntry(_("Satellite"), self.scan_satselection[index_to_scan]))
self.scan_networkScan.value = True
elif "multisat" in self.scan_type.value:
tlist = []
SatList = nimmanager.getSatListForNim(index_to_scan)
for x in SatList:
if self.Satexists(tlist, x[0]) == 0:
tlist.append(x[0])
sat = ConfigEnableDisable(default = "_yes" in self.scan_type.value and True or False)
configEntry = getConfigListEntry(nimmanager.getSatDescription(x[0]), sat)
self.list.append(configEntry)
self.multiscanlist.append((x[0], sat))
self.scan_networkScan.value = True
elif nim.isCompatible("DVB-C"):
if self.scan_typecable.value == "single_transponder":
self.list.append(getConfigListEntry(_("Frequency"), self.scan_cab.frequency))
self.list.append(getConfigListEntry(_("Inversion"), self.scan_cab.inversion))
self.list.append(getConfigListEntry(_("Symbol rate"), self.scan_cab.symbolrate))
self.list.append(getConfigListEntry(_("Modulation"), self.scan_cab.modulation))
self.list.append(getConfigListEntry(_("FEC"), self.scan_cab.fec))
elif self.scan_typecable.value == "predefined_transponder":
self.predefinedCabTranspondersList()
self.list.append(getConfigListEntry(_('Transponder'), self.CableTransponders))
if config.Nims[index_to_scan].cable.scan_networkid.value:
self.networkid = config.Nims[index_to_scan].cable.scan_networkid.value
self.scan_networkScan.value = True
elif nim.isCompatible("DVB-T"):
if self.scan_typeterrestrial.value == "single_transponder":
if nim.isCompatible("DVB-T2"):
self.systemEntry = getConfigListEntry(_('System'), self.scan_ter.system)
self.list.append(self.systemEntry)
else:
self.scan_ter.system.value = eDVBFrontendParametersTerrestrial.System_DVB_T
if self.ter_channel_input and self.scan_input_as.value == "channel":
channel = getChannelNumber(self.scan_ter.frequency.value*1000, self.ter_tnumber)
if channel:
self.scan_ter.channel.value = int(channel.replace("+","").replace("-",""))
self.list.append(getConfigListEntry(_("Channel"), self.scan_ter.channel))
else:
prev_val = self.scan_ter.frequency.value
self.scan_ter.frequency.value = channel2frequency(self.scan_ter.channel.value, self.ter_tnumber)/1000
if self.scan_ter.frequency.value == 474000:
self.scan_ter.frequency.value = prev_val
self.list.append(getConfigListEntry(_("Frequency"), self.scan_ter.frequency))
self.list.append(getConfigListEntry(_("Inversion"), self.scan_ter.inversion))
self.list.append(getConfigListEntry(_("Bandwidth"), self.scan_ter.bandwidth))
self.list.append(getConfigListEntry(_("Code rate HP"), self.scan_ter.fechigh))
self.list.append(getConfigListEntry(_("Code rate LP"), self.scan_ter.feclow))
self.list.append(getConfigListEntry(_("Modulation"), self.scan_ter.modulation))
self.list.append(getConfigListEntry(_("Transmission mode"), self.scan_ter.transmission))
self.list.append(getConfigListEntry(_("Guard interval"), self.scan_ter.guard))
self.list.append(getConfigListEntry(_("Hierarchy info"), self.scan_ter.hierarchy))
if self.scan_ter.system.value == eDVBFrontendParametersTerrestrial.System_DVB_T2:
self.list.append(getConfigListEntry(_('PLP ID'), self.scan_ter.plp_id))
elif self.scan_typeterrestrial.value == "predefined_transponder":
if nim.isCompatible("DVB-T2"):
self.systemEntry = getConfigListEntry(_('System'), self.scan_ter.system)
self.list.append(self.systemEntry)
else:
self.scan_ter.system.value = eDVBFrontendParametersTerrestrial.System_DVB_T
self.TerrestrialRegion = self.terrestrial_nims_regions[index_to_scan]
self.TerrestrialRegionEntry = getConfigListEntry(_('Region'), self.TerrestrialRegion)
self.list.append(self.TerrestrialRegionEntry)
self.predefinedTerrTranspondersList()
self.list.append(getConfigListEntry(_('Transponder'), self.TerrestrialTransponders))
elif self.scan_typeterrestrial.value == "complete":
self.TerrestrialRegion = self.terrestrial_nims_regions[index_to_scan]
if nimmanager.getNimName(nim.slot).startswith("Sundtek"):
self.TerrestrialCompleteEntry = getConfigListEntry(_('Scan options'), self.scan_ter_complete_type)
self.list.append(self.TerrestrialCompleteEntry)
if self.TerrestrialCompleteEntry is None or self.scan_ter_complete_type.value == "extended":
if nim.isCompatible("DVB-T2"):
self.systemEntry = getConfigListEntry(_('System'), self.scan_ter.system)
self.list.append(self.systemEntry)
else:
self.scan_ter.system.value = eDVBFrontendParametersTerrestrial.System_DVB_T
self.TerrestrialRegionEntry = getConfigListEntry(_('Region'), self.TerrestrialRegion)
self.list.append(self.TerrestrialRegionEntry)
self.list.append(getConfigListEntry(_("Network scan"), self.scan_networkScan))
self.list.append(getConfigListEntry(_("Clear before scan"), self.scan_clearallservices))
self.list.append(getConfigListEntry(_("Only free scan"), self.scan_onlyfree))
self["config"].list = self.list
self["config"].l.setList(self.list)
def Satexists(self, tlist, pos):
for x in tlist:
if x == pos:
return 1
return 0
def newConfig(self):
cur = self["config"].getCurrent()
print "cur is", cur
if cur == self.typeOfScanEntry or \
cur == self.typeOfInputEntry or \
cur == self.tunerEntry or \
cur == self.systemEntry or \
cur == self.preDefSatList or \
cur == self.TerrestrialRegionEntry or \
cur == self.TerrestrialCompleteEntry or \
(self.modulationEntry and self.systemEntry[1].value == eDVBFrontendParametersSatellite.System_DVB_S2 and cur == self.modulationEntry):
self.createSetup()
def createConfig(self, frontendData):
defaultSat = {
"orbpos": 192,
"system": eDVBFrontendParametersSatellite.System_DVB_S,
"frequency": 11836,
"inversion": eDVBFrontendParametersSatellite.Inversion_Unknown,
"symbolrate": 27500,
"polarization": eDVBFrontendParametersSatellite.Polarisation_Horizontal,
"fec": eDVBFrontendParametersSatellite.FEC_Auto,
"fec_s2": eDVBFrontendParametersSatellite.FEC_9_10,
"modulation": eDVBFrontendParametersSatellite.Modulation_QPSK }
defaultCab = {
"frequency": 466,
"inversion": eDVBFrontendParametersCable.Inversion_Unknown,
"modulation": eDVBFrontendParametersCable.Modulation_QAM64,
"fec": eDVBFrontendParametersCable.FEC_Auto,
"symbolrate": 6900,
"system": eDVBFrontendParametersCable.System_DVB_C_ANNEX_A }
defaultTer = {
"frequency" : 474000,
"inversion" : eDVBFrontendParametersTerrestrial.Inversion_Unknown,
"bandwidth" : 8000000,
"fechigh" : eDVBFrontendParametersTerrestrial.FEC_Auto,
"feclow" : eDVBFrontendParametersTerrestrial.FEC_Auto,
"modulation" : eDVBFrontendParametersTerrestrial.Modulation_Auto,
"transmission_mode" : eDVBFrontendParametersTerrestrial.TransmissionMode_Auto,
"guard_interval" : eDVBFrontendParametersTerrestrial.GuardInterval_Auto,
"hierarchy": eDVBFrontendParametersTerrestrial.Hierarchy_Auto,
"system": eDVBFrontendParametersTerrestrial.System_DVB_T,
"plp_id": 0 }
if frontendData is not None:
ttype = frontendData.get("tuner_type", "UNKNOWN")
if ttype == "DVB-S":
defaultSat["system"] = frontendData.get("system", eDVBFrontendParametersSatellite.System_DVB_S)
defaultSat["frequency"] = frontendData.get("frequency", 0) / 1000
defaultSat["inversion"] = frontendData.get("inversion", eDVBFrontendParametersSatellite.Inversion_Unknown)
defaultSat["symbolrate"] = frontendData.get("symbol_rate", 0) / 1000
defaultSat["polarization"] = frontendData.get("polarization", eDVBFrontendParametersSatellite.Polarisation_Horizontal)
if defaultSat["system"] == eDVBFrontendParametersSatellite.System_DVB_S2:
defaultSat["fec_s2"] = frontendData.get("fec_inner", eDVBFrontendParametersSatellite.FEC_Auto)
defaultSat["rolloff"] = frontendData.get("rolloff", eDVBFrontendParametersSatellite.RollOff_alpha_0_35)
defaultSat["pilot"] = frontendData.get("pilot", eDVBFrontendParametersSatellite.Pilot_Unknown)
else:
defaultSat["fec"] = frontendData.get("fec_inner", eDVBFrontendParametersSatellite.FEC_Auto)
defaultSat["modulation"] = frontendData.get("modulation", eDVBFrontendParametersSatellite.Modulation_QPSK)
defaultSat["orbpos"] = frontendData.get("orbital_position", 0)
elif ttype == "DVB-C":
defaultCab["frequency"] = frontendData.get("frequency", 0) / 1000
defaultCab["symbolrate"] = frontendData.get("symbol_rate", 0) / 1000
defaultCab["inversion"] = frontendData.get("inversion", eDVBFrontendParametersCable.Inversion_Unknown)
defaultCab["fec"] = frontendData.get("fec_inner", eDVBFrontendParametersCable.FEC_Auto)
defaultCab["modulation"] = frontendData.get("modulation", eDVBFrontendParametersCable.Modulation_QAM16)
defaultCab["system"] = frontendData.get("system", eDVBFrontendParametersCable.System_DVB_C_ANNEX_A)
elif ttype == "DVB-T":
defaultTer["frequency"] = frontendData.get("frequency", 47400000) / 1000
defaultTer["inversion"] = frontendData.get("inversion", eDVBFrontendParametersTerrestrial.Inversion_Unknown)
defaultTer["bandwidth"] = frontendData.get("bandwidth", 8000000)
defaultTer["fechigh"] = frontendData.get("code_rate_hp", eDVBFrontendParametersTerrestrial.FEC_Auto)
defaultTer["feclow"] = frontendData.get("code_rate_lp", eDVBFrontendParametersTerrestrial.FEC_Auto)
defaultTer["modulation"] = frontendData.get("constellation", eDVBFrontendParametersTerrestrial.Modulation_Auto)
defaultTer["transmission_mode"] = frontendData.get("transmission_mode", eDVBFrontendParametersTerrestrial.TransmissionMode_Auto)
defaultTer["guard_interval"] = frontendData.get("guard_interval", eDVBFrontendParametersTerrestrial.GuardInterval_Auto)
defaultTer["hierarchy"] = frontendData.get("hierarchy_information", eDVBFrontendParametersTerrestrial.Hierarchy_Auto)
defaultTer["system"] = frontendData.get("system", eDVBFrontendParametersTerrestrial.System_DVB_T)
defaultTer["plp_id"] = frontendData.get("plp_id", 0)
self.scan_sat = ConfigSubsection()
self.scan_cab = ConfigSubsection()
self.scan_ter = ConfigSubsection()
nim_list = []
# collect all nims which are *not* set to "nothing"
for n in nimmanager.nim_slots:
if n.config_mode == "nothing":
continue
if n.config_mode == "advanced" and len(nimmanager.getSatListForNim(n.slot)) < 1:
continue
if n.config_mode in ("loopthrough_internal", "loopthrough_external", "satposdepends"):
root_id = nimmanager.sec.getRoot(n.slot_id, int(n.config.connectedTo.value))
if n.type == nimmanager.nim_slots[root_id].type: # check if connected from a DVB-S to DVB-S2 Nim or vice versa
continue
nim_list.append((str(n.slot), n.friendly_full_description))
self.scan_nims = ConfigSelection(choices = nim_list)
if frontendData is not None and len(nim_list) > 0:
self.scan_nims.value = str(frontendData.get("tuner_number", nim_list[0][0]))
for slot in nimmanager.nim_slots:
if slot.isCompatible("DVB-T"):
self.ter_tnumber = slot.slot
if self.ter_tnumber is not None:
self.ter_channel_input = supportedChannels(self.ter_tnumber)
# status
self.scan_snr = ConfigSlider()
self.scan_snr.enabled = False
self.scan_agc = ConfigSlider()
self.scan_agc.enabled = False
self.scan_ber = ConfigSlider()
self.scan_ber.enabled = False
# sat
self.scan_sat.system = ConfigSelection(default = defaultSat["system"], choices = [
(eDVBFrontendParametersSatellite.System_DVB_S, _("DVB-S")),
(eDVBFrontendParametersSatellite.System_DVB_S2, _("DVB-S2"))])
self.scan_sat.frequency = ConfigInteger(default = defaultSat["frequency"], limits = (1, 99999))
self.scan_sat.inversion = ConfigSelection(default = defaultSat["inversion"], choices = [
(eDVBFrontendParametersSatellite.Inversion_Off, _("Off")),
(eDVBFrontendParametersSatellite.Inversion_On, _("On")),
(eDVBFrontendParametersSatellite.Inversion_Unknown, _("Auto"))])
self.scan_sat.symbolrate = ConfigInteger(default = defaultSat["symbolrate"], limits = (1, 99999))
self.scan_sat.polarization = ConfigSelection(default = defaultSat["polarization"], choices = [
(eDVBFrontendParametersSatellite.Polarisation_Horizontal, _("horizontal")),
(eDVBFrontendParametersSatellite.Polarisation_Vertical, _("vertical")),
(eDVBFrontendParametersSatellite.Polarisation_CircularLeft, _("circular left")),
(eDVBFrontendParametersSatellite.Polarisation_CircularRight, _("circular right"))])
self.scan_sat.fec = ConfigSelection(default = defaultSat["fec"], choices = [
(eDVBFrontendParametersSatellite.FEC_Auto, _("Auto")),
(eDVBFrontendParametersSatellite.FEC_1_2, "1/2"),
(eDVBFrontendParametersSatellite.FEC_2_3, "2/3"),
(eDVBFrontendParametersSatellite.FEC_3_4, "3/4"),
(eDVBFrontendParametersSatellite.FEC_5_6, "5/6"),
(eDVBFrontendParametersSatellite.FEC_7_8, "7/8"),
(eDVBFrontendParametersSatellite.FEC_None, _("None"))])
self.scan_sat.fec_s2 = ConfigSelection(default = defaultSat["fec_s2"], choices = [
(eDVBFrontendParametersSatellite.FEC_Auto, _("Auto")),
(eDVBFrontendParametersSatellite.FEC_1_2, "1/2"),
(eDVBFrontendParametersSatellite.FEC_2_3, "2/3"),
(eDVBFrontendParametersSatellite.FEC_3_4, "3/4"),
(eDVBFrontendParametersSatellite.FEC_3_5, "3/5"),
(eDVBFrontendParametersSatellite.FEC_4_5, "4/5"),
(eDVBFrontendParametersSatellite.FEC_5_6, "5/6"),
(eDVBFrontendParametersSatellite.FEC_7_8, "7/8"),
(eDVBFrontendParametersSatellite.FEC_8_9, "8/9"),
(eDVBFrontendParametersSatellite.FEC_9_10, "9/10")])
self.scan_sat.modulation = ConfigSelection(default = defaultSat["modulation"], choices = [
(eDVBFrontendParametersSatellite.Modulation_QPSK, "QPSK"),
(eDVBFrontendParametersSatellite.Modulation_8PSK, "8PSK"),
(eDVBFrontendParametersSatellite.Modulation_16APSK, "16APSK"),
(eDVBFrontendParametersSatellite.Modulation_32APSK, "32APSK")])
self.scan_sat.rolloff = ConfigSelection(default = defaultSat.get("rolloff", eDVBFrontendParametersSatellite.RollOff_alpha_0_35), choices = [
(eDVBFrontendParametersSatellite.RollOff_alpha_0_35, "0.35"),
(eDVBFrontendParametersSatellite.RollOff_alpha_0_25, "0.25"),
(eDVBFrontendParametersSatellite.RollOff_alpha_0_20, "0.20"),
(eDVBFrontendParametersSatellite.RollOff_auto, _("Auto"))])
self.scan_sat.pilot = ConfigSelection(default = defaultSat.get("pilot", eDVBFrontendParametersSatellite.Pilot_Unknown), choices = [
(eDVBFrontendParametersSatellite.Pilot_Off, _("Off")),
(eDVBFrontendParametersSatellite.Pilot_On, _("On")),
(eDVBFrontendParametersSatellite.Pilot_Unknown, _("Auto"))])
# cable
self.scan_cab.frequency = ConfigInteger(default = defaultCab["frequency"], limits = (50, 999))
self.scan_cab.inversion = ConfigSelection(default = defaultCab["inversion"], choices = [
(eDVBFrontendParametersCable.Inversion_Off, _("Off")),
(eDVBFrontendParametersCable.Inversion_On, _("On")),
(eDVBFrontendParametersCable.Inversion_Unknown, _("Auto"))])
self.scan_cab.modulation = ConfigSelection(default = defaultCab["modulation"], choices = [
(eDVBFrontendParametersCable.Modulation_QAM16, "16-QAM"),
(eDVBFrontendParametersCable.Modulation_QAM32, "32-QAM"),
(eDVBFrontendParametersCable.Modulation_QAM64, "64-QAM"),
(eDVBFrontendParametersCable.Modulation_QAM128, "128-QAM"),
(eDVBFrontendParametersCable.Modulation_QAM256, "256-QAM")])
self.scan_cab.fec = ConfigSelection(default = defaultCab["fec"], choices = [
(eDVBFrontendParametersCable.FEC_Auto, _("Auto")),
(eDVBFrontendParametersCable.FEC_1_2, "1/2"),
(eDVBFrontendParametersCable.FEC_2_3, "2/3"),
(eDVBFrontendParametersCable.FEC_3_4, "3/4"),
(eDVBFrontendParametersCable.FEC_5_6, "5/6"),
(eDVBFrontendParametersCable.FEC_7_8, "7/8"),
(eDVBFrontendParametersCable.FEC_8_9, "8/9"),
(eDVBFrontendParametersCable.FEC_3_5, "3/5"),
(eDVBFrontendParametersCable.FEC_4_5, "4/5"),
(eDVBFrontendParametersCable.FEC_9_10, "9/10"),
(eDVBFrontendParametersCable.FEC_None, _("None"))])
self.scan_cab.symbolrate = ConfigInteger(default = defaultCab["symbolrate"], limits = (1, 99999))
self.scan_cab.system = ConfigSelection(default = defaultCab["system"], choices = [
(eDVBFrontendParametersCable.System_DVB_C_ANNEX_A, _("DVB-C")),
(eDVBFrontendParametersCable.System_DVB_C_ANNEX_C, _("DVB-C ANNEX C"))])
# terrestial
self.scan_ter.frequency = ConfigInteger(default = defaultTer["frequency"], limits = (50000, 999000))
self.scan_ter.channel = ConfigInteger(default = 21, limits = (1, 99))
self.scan_ter.inversion = ConfigSelection(default = defaultTer["inversion"], choices = [
(eDVBFrontendParametersTerrestrial.Inversion_Off, _("Off")),
(eDVBFrontendParametersTerrestrial.Inversion_On, _("On")),
(eDVBFrontendParametersTerrestrial.Inversion_Unknown, _("Auto"))])
# WORKAROUND: we can't use BW-auto
self.scan_ter.bandwidth = ConfigSelection(default = defaultTer["bandwidth"], choices = [
(1712000, "1.712MHz"),
(5000000, "5MHz"),
(6000000, "6MHz"),
(7000000, "7MHz"),
(8000000, "8MHz"),
(10000000,"10MHz")
])
#, (eDVBFrontendParametersTerrestrial.Bandwidth_Auto, _("Auto"))))
self.scan_ter.fechigh = ConfigSelection(default = defaultTer["fechigh"], choices = [
(eDVBFrontendParametersTerrestrial.FEC_1_2, "1/2"),
(eDVBFrontendParametersTerrestrial.FEC_2_3, "2/3"),
(eDVBFrontendParametersTerrestrial.FEC_3_4, "3/4"),
(eDVBFrontendParametersTerrestrial.FEC_5_6, "5/6"),
(eDVBFrontendParametersTerrestrial.FEC_6_7, "6/7"),
(eDVBFrontendParametersTerrestrial.FEC_7_8, "7/8"),
(eDVBFrontendParametersTerrestrial.FEC_8_9, "8/9"),
(eDVBFrontendParametersTerrestrial.FEC_Auto, _("Auto"))])
self.scan_ter.feclow = ConfigSelection(default = defaultTer["feclow"], choices = [
(eDVBFrontendParametersTerrestrial.FEC_1_2, "1/2"),
(eDVBFrontendParametersTerrestrial.FEC_2_3, "2/3"),
(eDVBFrontendParametersTerrestrial.FEC_3_4, "3/4"),
(eDVBFrontendParametersTerrestrial.FEC_5_6, "5/6"),
(eDVBFrontendParametersTerrestrial.FEC_6_7, "6/7"),
(eDVBFrontendParametersTerrestrial.FEC_7_8, "7/8"),
(eDVBFrontendParametersTerrestrial.FEC_8_9, "8/9"),
(eDVBFrontendParametersTerrestrial.FEC_Auto, _("Auto"))])
self.scan_ter.modulation = ConfigSelection(default = defaultTer["modulation"], choices = [
(eDVBFrontendParametersTerrestrial.Modulation_QPSK, "QPSK"),
(eDVBFrontendParametersTerrestrial.Modulation_QAM16, "QAM16"),
(eDVBFrontendParametersTerrestrial.Modulation_QAM64, "QAM64"),
(eDVBFrontendParametersTerrestrial.Modulation_QAM256, "QAM256"),
(eDVBFrontendParametersTerrestrial.Modulation_Auto, _("Auto"))])
self.scan_ter.transmission = ConfigSelection(default = defaultTer["transmission_mode"], choices = [
(eDVBFrontendParametersTerrestrial.TransmissionMode_1k, "1K"),
(eDVBFrontendParametersTerrestrial.TransmissionMode_2k, "2K"),
(eDVBFrontendParametersTerrestrial.TransmissionMode_4k, "4K"),
(eDVBFrontendParametersTerrestrial.TransmissionMode_8k, "8K"),
(eDVBFrontendParametersTerrestrial.TransmissionMode_16k, "16K"),
(eDVBFrontendParametersTerrestrial.TransmissionMode_32k, "32K"),
(eDVBFrontendParametersTerrestrial.TransmissionMode_Auto, _("Auto"))])
self.scan_ter.guard = ConfigSelection(default = defaultTer["guard_interval"], choices = [
(eDVBFrontendParametersTerrestrial.GuardInterval_1_32, "1/32"),
(eDVBFrontendParametersTerrestrial.GuardInterval_1_16, "1/16"),
(eDVBFrontendParametersTerrestrial.GuardInterval_1_8, "1/8"),
(eDVBFrontendParametersTerrestrial.GuardInterval_1_4, "1/4"),
(eDVBFrontendParametersTerrestrial.GuardInterval_1_128, "1/128"),
(eDVBFrontendParametersTerrestrial.GuardInterval_19_128, "19/128"),
(eDVBFrontendParametersTerrestrial.GuardInterval_19_256, "19/256"),
(eDVBFrontendParametersTerrestrial.GuardInterval_Auto, _("Auto"))])
self.scan_ter.hierarchy = ConfigSelection(default = defaultTer["hierarchy"], choices = [
(eDVBFrontendParametersTerrestrial.Hierarchy_None, _("None")),
(eDVBFrontendParametersTerrestrial.Hierarchy_1, "1"),
(eDVBFrontendParametersTerrestrial.Hierarchy_2, "2"),
(eDVBFrontendParametersTerrestrial.Hierarchy_4, "4"),
(eDVBFrontendParametersTerrestrial.Hierarchy_Auto, _("Auto"))])
self.scan_ter.system = ConfigSelection(default = defaultTer["system"], choices = [
(eDVBFrontendParametersTerrestrial.System_DVB_T_T2, _("Auto")),
(eDVBFrontendParametersTerrestrial.System_DVB_T, _("DVB-T")),
(eDVBFrontendParametersTerrestrial.System_DVB_T2, _("DVB-T2"))])
self.scan_ter.plp_id = ConfigInteger(default = defaultTer["plp_id"], limits = (0, 255))
self.scan_scansat = {}
for sat in nimmanager.satList:
#print sat[1]
self.scan_scansat[sat[0]] = ConfigYesNo(default = False)
self.scan_satselection = []
for slot in nimmanager.nim_slots:
if slot.isCompatible("DVB-S"):
self.scan_satselection.append(getConfigSatlist(defaultSat["orbpos"], self.satList[slot.slot]))
else:
self.scan_satselection.append(None)
self.terrestrial_nims_regions = []
for slot in nimmanager.nim_slots:
if slot.isCompatible("DVB-T"):
self.terrestrial_nims_regions.append(self.getTerrestrialRegionsList(slot.slot))
else:
self.terrestrial_nims_regions.append(None)
if frontendData is not None and ttype == "DVB-S" and self.predefinedTranspondersList(defaultSat["orbpos"]) is not None:
defaultSatSearchType = "predefined_transponder"
else:
defaultSatSearchType = "single_transponder"
if frontendData is not None and ttype == "DVB-T" and self.predefinedTerrTranspondersList() is not None:
defaultTerrSearchType = "predefined_transponder"
else:
defaultTerrSearchType = "single_transponder"
if frontendData is not None and ttype == "DVB-C" and self.predefinedCabTranspondersList() is not None:
defaultCabSearchType = "predefined_transponder"
else:
defaultCabSearchType = "single_transponder"
self.scan_type = ConfigSelection(default = defaultSatSearchType, choices = [("single_transponder", _("User defined transponder")), ("predefined_transponder", _("Predefined transponder")), ("single_satellite", _("Single satellite")), ("multisat", _("Multisat")), ("multisat_yes", _("Multisat all select"))])
self.scan_typecable = ConfigSelection(default = defaultCabSearchType, choices = [("single_transponder", _("User defined transponder")), ("predefined_transponder", _("Predefined transponder")), ("complete", _("Complete"))])
self.last_scan_typecable = "single_transponder"
self.cable_toggle = {"single_transponder":"complete", "complete":"single_transponder"}
self.scan_typeterrestrial = ConfigSelection(default = defaultTerrSearchType, choices = [("single_transponder", _("User defined transponder")), ("predefined_transponder", _("Predefined transponder")), ("complete", _("Complete"))])
self.scan_input_as = ConfigSelection(default = "channel", choices = [("frequency", _("Frequency")), ("channel", _("Channel"))])
self.scan_ter_complete_type = ConfigSelection(default = "all", choices = [("all", _("All frequency")), ("extended", _("Extended"))])
self.scan_clearallservices = ConfigSelection(default = "no", choices = [("no", _("no")), ("yes", _("yes")), ("yes_hold_feeds", _("yes (keep feeds)"))])
self.scan_onlyfree = ConfigYesNo(default = False)
self.scan_networkScan = ConfigYesNo(default = False)
return True
def keyLeft(self):
ConfigListScreen.keyLeft(self)
self.newConfig()
def keyRight(self):
ConfigListScreen.keyRight(self)
self.newConfig()
def handleKeyFileCallback(self, answer):
ConfigListScreen.handleKeyFileCallback(self, answer)
self.newConfig()
def updateStatus(self):
print "updatestatus"
def addSatTransponder(self, tlist, frequency, symbol_rate, polarisation, fec, inversion, orbital_position, system, modulation, rolloff, pilot):
print "Add Sat: frequ: " + str(frequency) + " symbol: " + str(symbol_rate) + " pol: " + str(polarisation) + " fec: " + str(fec) + " inversion: " + str(inversion) + " modulation: " + str(modulation) + " system: " + str(system) + " rolloff" + str(rolloff) + " pilot" + str(pilot)
print "orbpos: " + str(orbital_position)
parm = eDVBFrontendParametersSatellite()
parm.modulation = modulation
parm.system = system
parm.frequency = frequency * 1000
parm.symbol_rate = symbol_rate * 1000
parm.polarisation = polarisation
parm.fec = fec
parm.inversion = inversion
parm.orbital_position = orbital_position
parm.rolloff = rolloff
parm.pilot = pilot
tlist.append(parm)
def addCabTransponder(self, tlist, frequency, symbol_rate, modulation, fec, inversion):
print "Add Cab: frequ: " + str(frequency) + " symbol: " + str(symbol_rate) + " pol: " + str(modulation) + " fec: " + str(fec) + " inversion: " + str(inversion)
parm = eDVBFrontendParametersCable()
parm.frequency = frequency
parm.symbol_rate = symbol_rate
parm.modulation = modulation
parm.fec_inner = fec
parm.inversion = inversion
tlist.append(parm)
def addTerTransponder(self, tlist, *args, **kwargs):
tlist.append(buildTerTransponder(*args, **kwargs))
def keyGo(self):
infoBarInstance = InfoBar.instance
if infoBarInstance:
infoBarInstance.checkTimeshiftRunning(self.keyGoCheckTimeshiftCallback)
else:
self.keyGoCheckTimeshiftCallback(True)
def keyGoCheckTimeshiftCallback(self, answer):
START_SCAN = 0
SEARCH_CABLE_TRANSPONDERS = 1
SEARCH_TERRESTRIAL2_TRANSPONDERS = 2
if not answer or self.scan_nims.value == "":
return
tlist = []
flags = None
removeAll = True
action = START_SCAN
index_to_scan = int(self.scan_nims.value)
if self.scan_nims == [ ]:
self.session.open(MessageBox, _("No tuner is enabled!\nPlease setup your tuner settings before you start a service scan."), MessageBox.TYPE_ERROR)
return
nim = nimmanager.nim_slots[index_to_scan]
print "nim", nim.slot
if nim.isCompatible("DVB-S"):
print "is compatible with DVB-S"
if self.scan_type.value == "single_transponder":
# these lists are generated for each tuner, so this has work.
assert len(self.satList) > index_to_scan
assert len(self.scan_satselection) > index_to_scan
nimsats = self.satList[index_to_scan]
selsatidx = self.scan_satselection[index_to_scan].index
# however, the satList itself could be empty. in that case, "index" is 0 (for "None").
if len(nimsats):
orbpos = nimsats[selsatidx][0]
if self.scan_sat.system.value == eDVBFrontendParametersSatellite.System_DVB_S:
fec = self.scan_sat.fec.value
else:
fec = self.scan_sat.fec_s2.value
print "add sat transponder"
self.addSatTransponder(tlist, self.scan_sat.frequency.value,
self.scan_sat.symbolrate.value,
self.scan_sat.polarization.value,
fec,
self.scan_sat.inversion.value,
orbpos,
self.scan_sat.system.value,
self.scan_sat.modulation.value,
self.scan_sat.rolloff.value,
self.scan_sat.pilot.value)
removeAll = False
elif self.scan_type.value == "predefined_transponder":
nimsats = self.satList[index_to_scan]
selsatidx = self.scan_satselection[index_to_scan].index
if len(nimsats):
orbpos = nimsats[selsatidx][0]
tps = nimmanager.getTransponders(orbpos)
if len(tps) and len(tps) > self.preDefTransponders.index:
tp = tps[self.preDefTransponders.index]
self.addSatTransponder(tlist, tp[1] / 1000, tp[2] / 1000, tp[3], tp[4], tp[7], orbpos, tp[5], tp[6], tp[8], tp[9])
removeAll = False
elif self.scan_type.value == "single_satellite":
sat = self.satList[index_to_scan][self.scan_satselection[index_to_scan].index]
getInitialTransponderList(tlist, sat[0])
elif "multisat" in self.scan_type.value:
SatList = nimmanager.getSatListForNim(index_to_scan)
for x in self.multiscanlist:
if x[1].value:
print " " + str(x[0])
getInitialTransponderList(tlist, x[0])
elif nim.isCompatible("DVB-C"):
if self.scan_typecable.value == "single_transponder":
self.addCabTransponder(tlist, self.scan_cab.frequency.value*1000,
self.scan_cab.symbolrate.value*1000,
self.scan_cab.modulation.value,
self.scan_cab.fec.value,
self.scan_cab.inversion.value)
removeAll = False
elif self.scan_typecable.value == "predefined_transponder":
tps = nimmanager.getTranspondersCable(index_to_scan)
if len(tps) and len(tps) > self.CableTransponders.index :
tp = tps[self.CableTransponders.index]
# 0 transponder type, 1 freq, 2 sym, 3 mod, 4 fec, 5 inv, 6 sys
self.addCabTransponder(tlist, tp[1], tp[2], tp[3], tp[4], tp[5])
removeAll = False
elif self.scan_typecable.value == "complete":
if config.Nims[index_to_scan].cable.scan_type.value == "provider":
getInitialCableTransponderList(tlist, index_to_scan)
else:
action = SEARCH_CABLE_TRANSPONDERS
elif nim.isCompatible("DVB-T"):
if self.scan_typeterrestrial.value == "single_transponder":
if self.scan_input_as.value == "channel":
frequency = channel2frequency(self.scan_ter.channel.value, self.ter_tnumber)
else:
frequency = self.scan_ter.frequency.value * 1000
self.addTerTransponder(tlist,
frequency,
inversion = self.scan_ter.inversion.value,
bandwidth = self.scan_ter.bandwidth.value,
fechigh = self.scan_ter.fechigh.value,
feclow = self.scan_ter.feclow.value,
modulation = self.scan_ter.modulation.value,
transmission = self.scan_ter.transmission.value,
guard = self.scan_ter.guard.value,
hierarchy = self.scan_ter.hierarchy.value,
system = self.scan_ter.system.value,
plp_id = self.scan_ter.plp_id.value)
removeAll = False
elif self.scan_typeterrestrial.value == "predefined_transponder":
if self.TerrestrialTransponders is not None:
region = self.terrestrial_nims_regions[index_to_scan].value
tps = nimmanager.getTranspondersTerrestrial(region)
if len(tps) and len(tps) > self.TerrestrialTransponders.index:
tp = tps[self.TerrestrialTransponders.index]
tlist.append(buildTerTransponder(tp[1], tp[9], tp[2], tp[4], tp[5], tp[3], tp[7], tp[6], tp[8], self.scan_ter.system.value, tp[11]))
removeAll = False
elif self.scan_typeterrestrial.value == "complete":
skip_t2 = False
if nimmanager.getNimName(nim.slot).startswith("Sundtek") and self.scan_ter_complete_type.value == "all":
action = SEARCH_TERRESTRIAL2_TRANSPONDERS
elif SystemInfo["Blindscan_t2_available"]:
skip_t2 = True
if nim.isCompatible("DVB-T2"):
if len(self.terrestrialTransponderGetCmd(nim.slot)):
action = SEARCH_TERRESTRIAL2_TRANSPONDERS
else:
skip_t2 = False
getInitialTerrestrialTransponderList(tlist, self.TerrestrialRegion.value, int(self.scan_ter.system.value), skip_t2)
flags = self.scan_networkScan.value and eComponentScan.scanNetworkSearch or 0
tmp = self.scan_clearallservices.value
if tmp == "yes":
flags |= eComponentScan.scanRemoveServices
elif tmp == "yes_hold_feeds":
flags |= eComponentScan.scanRemoveServices
flags |= eComponentScan.scanDontRemoveFeeds
if tmp != "no" and not removeAll:
flags |= eComponentScan.scanDontRemoveUnscanned
if self.scan_onlyfree.value:
flags |= eComponentScan.scanOnlyFree
for x in self["config"].list:
x[1].save()
if action == START_SCAN:
self.startScan(tlist, flags, index_to_scan, self.networkid)
elif action == SEARCH_CABLE_TRANSPONDERS:
self.flags = flags
self.feid = index_to_scan
self.tlist = []
self.startCableTransponderSearch(self.feid)
elif action == SEARCH_TERRESTRIAL2_TRANSPONDERS:
self.flags = flags
self.feid = index_to_scan
self.tlist = tlist
self.startTerrestrialTransponderSearch(self.feid, nimmanager.getTerrestrialDescription(self.feid))
def setCableTransponderSearchResult(self, tlist):
self.tlist = tlist
def cableTransponderSearchFinished(self):
if self.tlist is None:
self.tlist = []
else:
self.startScan(self.tlist, self.flags, self.feid)
def setTerrestrialTransponderSearchResult(self, tlist):
self.tlist = tlist
def terrestrialTransponderSearchFinished(self):
if self.tlist is None:
self.tlist = []
else:
self.startScan(self.tlist, self.flags, self.feid)
def predefinedTranspondersList(self, orbpos):
default = None
if orbpos is not None:
list = []
if self.scan_sat.system.value == eDVBFrontendParametersSatellite.System_DVB_S2:
fec = self.scan_sat.fec_s2.value
else:
fec = self.scan_sat.fec.value
compare = [0, self.scan_sat.frequency.value*1000, self.scan_sat.symbolrate.value*1000, self.scan_sat.polarization.value, fec]
i = 0
tps = nimmanager.getTransponders(orbpos)
for tp in tps:
if tp[0] == 0:
if default is None and self.compareTransponders(tp, compare):
default = str(i)
list.append((str(i), self.humanReadableTransponder(tp)))
i += 1
self.preDefTransponders = ConfigSelection(choices = list, default = default)
return default
def humanReadableTransponder(self, tp):
if tp[3] in range (4) and tp[4] in range (11):
pol_list = ['H','V','L','R']
fec_list = ['Auto','1/2','2/3','3/4','5/6','7/8','8/9','3/5','4/5','9/10','None']
return str(tp[1] / 1000) + " " + pol_list[tp[3]] + " " + str(tp[2] / 1000) + " " + fec_list[tp[4]]
return _("Invalid transponder data")
def compareTransponders(self, tp, compare):
frequencyTolerance = 2000 #2 MHz
symbolRateTolerance = 10
return abs(tp[1] - compare[1]) <= frequencyTolerance and abs(tp[2] - compare[2]) <= symbolRateTolerance and tp[3] == compare[3] and (not tp[4] or tp[4] == compare[4])
def predefinedTerrTranspondersList(self):
default = None
list = []
compare = [2, self.scan_ter.frequency.value*1000]
i = 0
index_to_scan = int(self.scan_nims.value)
channels = supportedChannels(index_to_scan)
region = self.terrestrial_nims_regions[index_to_scan].value
tps = nimmanager.getTranspondersTerrestrial(region)
for tp in tps:
if tp[0] == 2: #TERRESTRIAL
channel = ''
if channels:
channel = _(' (Channel %s)') % (getChannelNumber(tp[1], index_to_scan))
if default is None and self.compareTerrTransponders(tp, compare):
default = str(i)
list.append((str(i), '%s MHz %s' % (str(tp[1] / 1000000), channel)))
i += 1
print "channel", channel
self.TerrestrialTransponders = ConfigSelection(choices = list, default = default)
return default
def compareTerrTransponders(self, tp, compare):
frequencyTolerance = 1000000 #1 MHz
return abs(tp[1] - compare[1]) <= frequencyTolerance
def getTerrestrialRegionsList(self, index_to_scan = None):
default = None
list = []
if index_to_scan is None:
index_to_scan = int(self.scan_nims.value)
defaultRegionForNIM = nimmanager.getTerrestrialDescription(index_to_scan)
for r in nimmanager.terrestrialsList:
if default is None and r[0] == defaultRegionForNIM:
default = r[0]
list.append((r[0], r[0][:46]))
return ConfigSelection(choices = list, default = default)
def predefinedCabTranspondersList(self):
default = None
list = []
# 0 transponder type, 1 freq, 2 sym, 3 mod, 4 fec, 5 inv, 6 sys
compare = [1, self.scan_cab.frequency.value*1000, self.scan_cab.symbolrate.value*1000, self.scan_cab.modulation.value, self.scan_cab.fec.value, self.scan_cab.inversion.value, self.scan_cab.system.value]
i = 0
index_to_scan = int(self.scan_nims.value)
tps = nimmanager.getTranspondersCable(index_to_scan)
for tp in tps:
if tp[0] == 1: #CABLE
if default is None and self.compareCabTransponders(tp, compare):
default = str(i)
list.append((str(i), self.humanReadableCabTransponder(tp)))
i += 1
self.CableTransponders = ConfigSelection(choices = list, default = default)
return default
def humanReadableCabTransponder(self, tp):
if tp[3] in range (7) and (tp[4] in range (10) or tp[4] == 15):
mod_list = ['Auto', '16-QAM','32-QAM','64-QAM','128-QAM', '256-QAM', 'Auto']
fec_list = {0:"Auto", 1:'1/2', 2:'2/3', 3:'3/4', 4:'5/6', 5:'7/8', 6:'8/9', 7:'3/5', 8:'4/5', 9:'9/10', 15:'None'}
print str(tp[1]/1000) + " MHz " + fec_list[tp[4]] + " " + str(tp[2]/1000) + " " + mod_list[tp[3]]
return str(tp[1]/1000) + " MHz " + fec_list[tp[4]] + " " + str(tp[2]/1000) + " " + mod_list[tp[3]]
return _("Invalid transponder data")
def compareCabTransponders(self, tp, compare):
frequencyTolerance = 1000000 #1 MHz
symbolRateTolerance = 10
return abs(tp[1] - compare[1]) <= frequencyTolerance and abs(tp[2] - compare[2]) <= symbolRateTolerance and tp[3] == compare[3] and (not tp[4] or tp[4] == compare[4])
def startScan(self, tlist, flags, feid, networkid = 0):
if len(tlist):
# flags |= eComponentScan.scanSearchBAT
if self.finished_cb:
self.session.openWithCallback(self.finished_cb, ServiceScan, [{"transponders": tlist, "feid": feid, "flags": flags, "networkid": networkid}])
else:
self.session.openWithCallback(self.startScanCallback, ServiceScan, [{"transponders": tlist, "feid": feid, "flags": flags, "networkid": networkid}])
else:
if self.finished_cb:
self.session.openWithCallback(self.finished_cb, MessageBox, _("Nothing to scan!\nPlease setup your tuner settings before you start a service scan."), MessageBox.TYPE_ERROR)
else:
self.session.open(MessageBox, _("Nothing to scan!\nPlease setup your tuner settings before you start a service scan."), MessageBox.TYPE_ERROR)
def startScanCallback(self, answer=True):
if answer:
self.doCloseRecursive()
def keyCancel(self):
self.session.nav.playService(self.session.postScanService)
for x in self["config"].list:
x[1].cancel()
self.close()
def doCloseRecursive(self):
self.session.nav.playService(self.session.postScanService)
self.closeRecursive()
class ScanSimple(ConfigListScreen, Screen, CableTransponderSearchSupport, TerrestrialTransponderSearchSupport):
def getNetworksForNim(self, nim):
if nim.isCompatible("DVB-S"):
networks = nimmanager.getSatListForNim(nim.slot)
elif nim.isCompatible("DVB-C"):
networks = nimmanager.getTranspondersCable(nim.slot)
if not networks and config.Nims[nim.slot].configMode.value == "enabled":
networks = [ nim.type ]
elif nim.isCompatible("DVB-T"):
networks = [nimmanager.getTerrestrialDescription(nim.slot)]
if not nimmanager.somethingConnected(nim.slot):
networks = []
elif not nim.empty:
networks = [ nim.type ] # "DVB-C" or "DVB-T". TODO: seperate networks for different C/T tuners, if we want to support that.
else:
# empty tuners provide no networks.
networks = [ ]
return networks
def __init__(self, session):
Screen.__init__(self, session)
Screen.setTitle(self, _("Automatic Scan"))
self["key_red"] = StaticText(_("Close"))
self["key_green"] = StaticText(_("Scan"))
self["actions"] = ActionMap(["SetupActions", "MenuActions", "ColorActions"],
{
"ok": self.keyGo,
"save": self.keyGo,
"cancel": self.keyCancel,
"menu": self.doCloseRecursive,
"red": self.keyCancel,
"green": self.keyGo,
}, -2)
self.session.postScanService = session.nav.getCurrentlyPlayingServiceOrGroup()
self.list = []
tlist = []
known_networks = [ ]
nims_to_scan = [ ]
self.finished_cb = None
for nim in nimmanager.nim_slots:
# collect networks provided by this tuner
need_scan = False
networks = self.getNetworksForNim(nim)
print "nim %d provides" % nim.slot, networks
print "known:", known_networks
# we only need to scan on the first tuner which provides a network.
# this gives the first tuner for each network priority for scanning.
for x in networks:
if x not in known_networks:
need_scan = True
# Terrestrial delivers a string which is not correctly handled - FIX ME
# print x, "not in ", known_networks
known_networks.append(x)
# don't offer to scan nims if nothing is connected
if not nimmanager.somethingConnected(nim.slot):
need_scan = False
if need_scan:
nims_to_scan.append(nim)
# we save the config elements to use them on keyGo
self.nim_enable = [ ]
if len(nims_to_scan):
self.scan_networkScan = ConfigYesNo(default = True)
self.scan_clearallservices = ConfigSelection(default = "yes", choices = [("no", _("no")), ("yes", _("yes")), ("yes_hold_feeds", _("yes (keep feeds)"))])
self.list.append(getConfigListEntry(_("Network scan"), self.scan_networkScan))
self.list.append(getConfigListEntry(_("Clear before scan"), self.scan_clearallservices))
for nim in nims_to_scan:
nimconfig = ConfigYesNo(default = True)
nimconfig.nim_index = nim.slot
self.nim_enable.append(nimconfig)
self.list.append(getConfigListEntry(_("Scan ") + nim.slot_name + " (" + nim.friendly_type + ")", nimconfig))
ConfigListScreen.__init__(self, self.list)
self["header"] = Label(_("Automatic scan"))
self["footer"] = Label(_("Press OK to scan"))
def runAsync(self, finished_cb):
self.finished_cb = finished_cb
self.keyGo()
def keyGo(self):
InfoBarInstance = InfoBar.instance
if InfoBarInstance:
InfoBarInstance.checkTimeshiftRunning(self.keyGoCheckTimeshiftCallback)
else:
self.keyGoCheckTimeshiftCallback(True)
def keyGoCheckTimeshiftCallback(self, answer):
if answer:
self.scanList = []
self.known_networks = set()
self.nim_iter=0
self.buildTransponderList()
def buildTransponderList(self): # this method is called multiple times because of asynchronous stuff
APPEND_NOW = 0
SEARCH_CABLE_TRANSPONDERS = 1
SEARCH_TERRESTRIAL2_TRANSPONDERS = 2
action = APPEND_NOW
n = self.nim_iter < len(self.nim_enable) and self.nim_enable[self.nim_iter] or None
self.nim_iter += 1
if n:
if n.value: # check if nim is enabled
flags = 0
nim = nimmanager.nim_slots[n.nim_index]
networks = set(self.getNetworksForNim(nim))
networkid = 0
# don't scan anything twice
networks.discard(self.known_networks)
tlist = [ ]
if nim.isCompatible("DVB-S"):
# get initial transponders for each satellite to be scanned
for sat in networks:
getInitialTransponderList(tlist, sat[0])
elif nim.isCompatible("DVB-C"):
if config.Nims[nim.slot].cable.scan_type.value == "provider":
getInitialCableTransponderList(tlist, nim.slot)
else:
action = SEARCH_CABLE_TRANSPONDERS
networkid = config.Nims[nim.slot].cable.scan_networkid.value
elif nim.isCompatible("DVB-T"):
skip_t2 = False
if SystemInfo["Blindscan_t2_available"]:
skip_t2 = True
if nim.isCompatible("DVB-T2"):
if len(self.terrestrialTransponderGetCmd(nim.slot)):
action = SEARCH_TERRESTRIAL2_TRANSPONDERS
else:
skip_t2 = False
getInitialTerrestrialTransponderList(tlist, nimmanager.getTerrestrialDescription(nim.slot), skip_t2=skip_t2)
else:
assert False
flags = self.scan_networkScan.value and eComponentScan.scanNetworkSearch or 0
tmp = self.scan_clearallservices.value
if tmp == "yes":
flags |= eComponentScan.scanRemoveServices
elif tmp == "yes_hold_feeds":
flags |= eComponentScan.scanRemoveServices
flags |= eComponentScan.scanDontRemoveFeeds
if action == APPEND_NOW:
self.scanList.append({"transponders": tlist, "feid": nim.slot, "flags": flags})
elif action == SEARCH_CABLE_TRANSPONDERS:
self.flags = flags
self.feid = nim.slot
self.networkid = networkid
self.startCableTransponderSearch(nim.slot)
return
elif action == SEARCH_TERRESTRIAL2_TRANSPONDERS:
self.tlist = tlist
self.flags = flags
self.feid = nim.slot
self.startTerrestrialTransponderSearch(nim.slot, nimmanager.getTerrestrialDescription(nim.slot))
return
else:
assert False
self.buildTransponderList() # recursive call of this function !!!
return
# when we are here, then the recursion is finished and all enabled nims are checked
# so we now start the real transponder scan
self.startScan(self.scanList)
def startScan(self, scanList):
if len(scanList):
if self.finished_cb:
self.session.openWithCallback(self.finished_cb, ServiceScan, scanList = scanList)
else:
self.session.open(ServiceScan, scanList = scanList)
else:
if self.finished_cb:
self.session.openWithCallback(self.finished_cb, MessageBox, _("Nothing to scan!\nPlease setup your tuner settings before you start a service scan."), MessageBox.TYPE_ERROR)
else:
self.session.open(MessageBox, _("Nothing to scan!\nPlease setup your tuner settings before you start a service scan."), MessageBox.TYPE_ERROR)
def setCableTransponderSearchResult(self, tlist):
if tlist is not None:
self.scanList.append({"transponders": tlist, "feid": self.feid, "flags": self.flags})
def cableTransponderSearchFinished(self):
self.buildTransponderList()
def setTerrestrialTransponderSearchResult(self, tlist):
if tlist is not None:
self.tlist.extend(tlist)
if self.tlist is not None:
self.scanList.append({"transponders": self.tlist, "feid": self.feid, "flags": self.flags})
def terrestrialTransponderSearchFinished(self):
self.buildTransponderList()
def keyCancel(self):
self.session.nav.playService(self.session.postScanService)
self.close()
def doCloseRecursive(self):
self.session.nav.playService(self.session.postScanService)
self.closeRecursive()
def Satexists(self, tlist, pos):
for x in tlist:
if x == pos:
return 1
return 0<|fim▁end|> | # DVB-T2 : OK frequency bandwidth delivery system number_of_plp plp_id0:plp_type0
if data[3] == 1: # DVB-T
parm = eDVBFrontendParametersTerrestrial()
parm.frequency = int(data[1]) |
<|file_name|>price_history_pack.py<|end_file_name|><|fim▁begin|>import numpy as np
class PriceHistoryPack(object):
def __init__(self, input_seq_len, num_features, target_seq_len):
super(PriceHistoryPack, self).__init__()
self.sku_ids = []
self.XX = np.empty((0, input_seq_len, num_features))<|fim▁hole|> self.seq_mask = np.empty((0, input_seq_len))
def update(self, sku_id, inputs, targets, input_seq_len):
self.sku_ids.append(sku_id)
inputs_len = len(inputs)
self.sequence_lens.append(inputs_len)
# build current mask with zeros and ones
cur_mask = np.zeros(input_seq_len)
cur_mask[:inputs_len] = 1 # only the valid firsts should have the value of one
xx_padded = np.pad(inputs, ((0, input_seq_len - inputs_len), (0, 0)), mode='constant', constant_values=0.)
# here targets do NOT need to be padded because we do not have a sequence to sequence model
# yy_padded = np.pad(targets, (0, series_max_len - len(targets)), mode='constant', constant_values=0.)
assert len(xx_padded) == input_seq_len
self.XX = np.vstack((self.XX, xx_padded[np.newaxis]))
self.YY = np.vstack((self.YY, targets[np.newaxis]))
self.seq_mask = np.vstack((self.seq_mask, cur_mask[np.newaxis]))
def get_data(self, fraction=None, random_state=None):
# from sklearn.model_selection import train_test_split
skuIds, xx, yy, seqLens, seqMask = np.array(self.sku_ids), self.XX, self.YY, np.array(
self.sequence_lens), self.seq_mask
if fraction is None:
return skuIds, xx, yy, seqLens, seqMask
else:
random_state = np.random if random_state is None else random_state
cur_len = len(skuIds)
assert cur_len == len(xx) and cur_len == len(yy) and cur_len == len(seqLens) and cur_len == len(seqMask)
random_inds = random_state.choice(cur_len, int(cur_len * fraction))
return skuIds[random_inds], xx[random_inds], yy[random_inds], seqLens[random_inds], seqMask[random_inds]
def save(self, filepath, fraction=None, random_state=None):
if fraction is None:
np.savez(filepath, sku_ids=self.sku_ids, inputs=self.XX, targets=self.YY,
sequence_lengths=self.sequence_lens,
sequence_masks=self.seq_mask)
else:
skuIds, xx, yy, seqLens, seqMask = self.get_data(fraction=fraction, random_state=random_state)
np.savez(filepath, sku_ids=skuIds, inputs=xx, targets=yy, sequence_lengths=seqLens, sequence_masks=seqMask)<|fim▁end|> | self.YY = np.empty((0, target_seq_len))
self.sequence_lens = [] |
<|file_name|>portugal_weather_forecast.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
This program collects Portugal weather forecasts from IPMA and uploads them to the Orion Context Broker.
It uploads the list of stations on the fly from
- http://api.ipma.pt/json/locations.json.
Legal notes:
- http://www.ipma.pt/en/siteinfo/index.html?page=index.xml
Examples:
- get the weather forecast from IPMA:
curl -X GET --header 'Accept: application/json' \
'http://api.ipma.pt/json/alldata/1110600.json'
AsyncIO name convention:
async def name - entry point for asynchronous data processing/http requests and post processing
async def name_bounded - intermediate step to limit amount of parallel workers
async def name_one - worker process
"""
from aiohttp import ClientSession, ClientConnectorError
from argparse import ArgumentTypeError, ArgumentParser
from asyncio import Semaphore, ensure_future, gather, run, TimeoutError as ToE, set_event_loop_policy
from copy import deepcopy
from datetime import datetime, timedelta
from pytz import timezone
from re import sub
from requests import get, exceptions
from sys import stdout
from time import sleep
from uvloop import EventLoopPolicy
from yajl import dumps, loads
from yaml import safe_load as load
import logging
default_latest = False # preserve only latest values
default_limit_entities = 50 # amount of entities per 1 request to Orion
default_limit_source = 10 # amount of parallel request to IPMA<|fim▁hole|>default_limit_target = 50 # amount of parallel request to Orion
default_log_level = 'INFO'
default_orion = 'http://orion:1026' # Orion Contest Broker endpoint
default_timeout = -1 # if value != -1, then work as a service
http_ok = [200, 201, 204]
log_levels = ['ERROR', 'INFO', 'DEBUG']
logger = None
logger_req = None
stations = dict() # preprocessed list of stations
tz = timezone('UTC')
tz_wet = 'Europe/Lisbon'
tz_azot = 'Atlantic/Azores'
tz_azot_codes = ['3490100', '3480200', '3470100', '3460200', '3450200', '3440100', '3420300', '3410100']
url_observation = 'http://api.ipma.pt/json/alldata/{}.json'
url_stations = 'http://api.ipma.pt/json/locations.json'
template = {
'id': 'urn:ngsi-ld:WeatherForecast:Portugal-WeatherForecast-',
'type': 'WeatherForecast',
'address': {
'type': 'PostalAddress',
'value': {
'addressCountry': 'PT',
'addressLocality': None,
'postalCode': None
}
},
'dateIssued': {
'type': 'DateTime',
'value': None
},
'dataProvider': {
'type': 'Text',
'value': 'FIWARE'
},
'dateRetrieved': {
'type': 'DateTime',
'value': None
},
'dayMaximum': {
'type': 'StructuredValue',
'value': {
'temperature': None
}
},
'dayMinimum': {
'type': 'StructuredValue',
'value': {
'temperature': None
}
},
'feelsLikeTemperature': {
'type': 'Number',
'value': None
},
'precipitationProbability': {
'type': 'Number',
'value': None
},
'relativeHumidity': {
'type': 'Number',
'value': None
},
'source': {
'type': 'URL',
'value': 'http://www.ipma.pt'
},
'temperature': {
'type': 'Number',
'value': None
},
'validFrom': {
'type': 'DateTime',
'value': None
},
'validTo': {
'type': 'DateTime',
'value': None
},
'validity': {
'type': 'Text',
'value': None
},
'weatherType': {
'type': 'Text',
'value': None
},
'windDirection': {
'type': 'Number',
'value': None
},
'windSpeed': {
'type': 'Number',
'value': None
}
}
def check_entity(forecast, item):
if item in forecast:
if forecast[item] != '-99.0' and forecast[item] != -99:
return forecast[item]
return None
def decode_weather_type(item):
out = {
0: None,
1: 'clearSky',
2: 'partlyCloudy',
3: 'sunnyIntervals',
4: 'cloudy',
5: 'highClouds',
6: 'showers',
7: 'lightShowers',
8: 'heavyShowers',
9: 'rain',
10: 'lightRain',
11: 'heavyRain',
12: 'intermittentRain',
13: 'intermittentLightRain',
14: 'intermittentHeavyRain',
15: 'drizzle',
16: 'mist',
17: 'fog',
18: 'snow',
19: 'thunderstorms',
20: 'showersAndThunderstorms',
21: 'hail',
22: 'frost',
23: 'rainAndThunderstorms',
24: 'convectiveClouds',
25: 'partyCloudy',
26: 'fog',
27: 'cloudy'
}.get(item, None)
if out is None and item != 0:
logger.error('Unknown value of WeatherType detected, %s', item)
return out if out else None
def decode_wind_direction(item):
"""
North: 180
North-West: 135
West: 90
South-West: 45
South: 0
South-East: -45
East: -90
North-East: -135
"""
out = {
'9': 180,
'8': 135,
'7': 90,
'6': 45,
'5': 0,
'4': -45,
'3': -90,
'2': -135,
'N': 180,
'NW': 135,
'W': 90,
'SW': 45,
'S': 0,
'SE': -45,
'E': -90,
'NE': -135
}.get(item, None)
if out is None:
logger.error('Unknown value of WindDirection detected, %s', item)
return out if out else None
async def collect():
logger.debug('Connecting data from IPMA started')
tasks = list()
sem = Semaphore(limit_source)
async with ClientSession() as session:
for station in stations:
task = ensure_future(collect_bounded(station, sem, session))
tasks.append(task)
result = await gather(*tasks)
while False in result:
result.remove(False)
logger.debug('Collecting data from IPMA ended')
return result
async def collect_bounded(station, sem, session):
async with sem:
return await collect_one(station, session)
async def collect_one(station, session):
try:
async with session.get(stations[station]['url']) as response:
result = await response.text()
status = response.status
except ClientConnectorError:
logger.error('Collecting data from IPMA station %s failed due to the connection problem', station)
return False
except ToE:
logger.error('Collecting link from IPMA station %s failed due to the timeout problem', station)
return False
if status not in http_ok:
logger.error('Collecting data from IPMA station %s failed due to the return code %s', station, status)
return False
content = loads(result)
result = dict()
result['id'] = station
result['retrieved'] = datetime.now().replace(microsecond=0)
result['forecasts'] = dict()
today = datetime.now(tz).strftime("%Y-%m-%d") + 'T00:00:00'
tomorrow = (datetime.now(tz) + timedelta(days=1)).strftime("%Y-%m-%d") + 'T00:00:00'
for forecast in content:
if forecast['idPeriodo'] != 24:
continue
date = forecast['dataPrev']
if date not in [today, tomorrow]:
continue
result['forecasts'][date] = dict()
result['forecasts'][date]['feelsLikeTemperature'] = check_entity(forecast, 'utci')
result['forecasts'][date]['issued'] = datetime.strptime(forecast['dataUpdate'], '%Y-%m-%dT%H:%M:%S')
result['forecasts'][date]['period'] = forecast['idPeriodo']
result['forecasts'][date]['precipitationProbability'] = check_entity(forecast, 'probabilidadePrecipita')
result['forecasts'][date]['relativeHumidity'] = check_entity(forecast, 'hR')
result['forecasts'][date]['temperature'] = check_entity(forecast, 'tMed')
result['forecasts'][date]['tMax'] = check_entity(forecast, 'tMax')
result['forecasts'][date]['tMin'] = check_entity(forecast, 'tMin')
result['forecasts'][date]['weatherType'] = check_entity(forecast, 'idTipoTempo')
result['forecasts'][date]['windDirection'] = check_entity(forecast, 'ddVento')
result['forecasts'][date]['windSpeed'] = check_entity(forecast, 'ffVento')
return result
def log_level_to_int(log_level_string):
if log_level_string not in log_levels:
message = 'invalid choice: {0} (choose from {1})'.format(log_level_string, log_levels)
raise ArgumentTypeError(message)
return getattr(logging, log_level_string, logging.ERROR)
async def post(body):
logger.debug('Posting data to Orion started')
tasks = list()
headers = {
'Content-Type': 'application/json'
}
if service:
headers['FIWARE-SERVICE'] = service
if path:
headers['FIWARE-SERVICEPATH'] = path
sem = Semaphore(limit_target)
# splitting list to list of lists to fit into limits
block = 0
items = 0
body_divided = dict()
body_divided[0] = list()
while True:
if len(body) > 0:
if items < limit_entities:
body_divided[block].append(body.pop())
items += 1
else:
items = 0
block += 1
body_divided[block] = list()
else:
break
async with ClientSession() as session:
for item in body_divided:
task = ensure_future(post_bounded(body_divided[item], headers, sem, session))
tasks.append(task)
response = await gather(*tasks)
response = list(set(response))
if True in response:
response.remove(True)
for item in response:
logger.error('Posting data to Orion failed due to the %s', item)
logger.debug('Posting data to Orion ended')
async def post_bounded(item, headers, sem, session):
async with sem:
return await post_one(item, headers, session)
async def post_one(item, headers, session):
payload = {
'actionType': 'APPEND',
'entities': item
}
payload = dumps(payload)
url = orion + '/v2/op/update'
try:
async with session.post(url, headers=headers, data=payload) as response:
status = response.status
except ClientConnectorError:
return 'connection problem'
except ToE:
return 'timeout problem'
if status not in http_ok:
return 'response code ' + str(status)
return True
async def prepare_schema(source):
logger.debug('Schema preparation started')
tasks = list()
for item in source:
task = ensure_future(prepare_schema_one(item))
tasks.append(task)
result = await gather(*tasks)
logger.debug('Schema preparation ended')
return [j for i in result for j in i]
async def prepare_schema_one(source):
result = list()
id_local = source['id']
today = datetime.now(tz).strftime("%Y-%m-%d") + 'T00:00:00'
tomorrow = (datetime.now(tz) + timedelta(days=1)).strftime("%Y-%m-%d") + 'T00:00:00'
retrieved = source['retrieved'].replace(tzinfo=tz).isoformat().replace('+00:00', 'Z')
for date in source['forecasts']:
item = deepcopy(template)
forecast = source['forecasts'][date]
issued = forecast['issued'].replace(tzinfo=tz).isoformat().replace('+00:00', 'Z')
forecast_date = datetime.strptime(date, '%Y-%m-%dT00:00:00')
valid_from = forecast_date.replace(tzinfo=tz)
valid_to = valid_from + timedelta(hours=24)
valid_from_iso = valid_from.isoformat().replace('+00:00', 'Z')
valid_from_short = valid_from.strftime('%H:%M:%S')
valid_to_iso = valid_to.isoformat().replace('+00:00', 'Z')
valid_to_short = valid_to.strftime('%H:%M:%S')
if latest:
if date == today:
item['id'] = item['id'] + id_local + '_today_' + valid_from_short + '_' + valid_to_short
if date == tomorrow:
item['id'] = item['id'] + id_local + '_tomorrow_' + valid_from_short + '_' + valid_to_short
else:
item['id'] = item['id'] + id_local + '_' + valid_from_iso + '_' + valid_to_iso
item['address']['value']['addressLocality'] = stations[id_local]['addressLocality']
item['address']['value']['postalCode'] = stations[id_local]['postalCode']
item['dateIssued']['value'] = issued
item['dateRetrieved']['value'] = retrieved
if 'tMax' in forecast:
item['dayMaximum']['value']['temperature'] = float(forecast['tMax'])
else:
del item['dayMaximum']
if 'tMin' in forecast:
item['dayMinimum']['value']['temperature'] = float(forecast['tMin'])
else:
del item['dayMinimum']
if forecast['feelsLikeTemperature'] is not None:
item['feelsLikeTemperature']['value'] = float(forecast['feelsLikeTemperature'])
else:
del item['feelsLikeTemperature']
if forecast['precipitationProbability'] is not None:
item['precipitationProbability']['value'] = float(forecast['precipitationProbability'] / 100)
else:
del item['precipitationProbability']
if forecast['relativeHumidity'] is not None:
item['relativeHumidity']['value'] = float(forecast['relativeHumidity'])
else:
del item['relativeHumidity']
if forecast['temperature'] is not None:
item['temperature']['value'] = float(forecast['temperature'])
else:
del item['temperature']
item['validFrom']['value'] = valid_from_iso
item['validTo']['value'] = valid_to_iso
item['validity']['value'] = valid_from_iso + '/' + valid_to_iso
if forecast['weatherType'] is not None:
item['weatherType']['value'] = decode_weather_type(forecast['weatherType'])
if item['weatherType']['value'] is None:
del item['weatherType']
if forecast['windDirection'] is not None:
item['windDirection']['value'] = decode_wind_direction(forecast['windDirection'])
if item['windDirection']['value'] is None:
del item['windDirection']
if forecast['windSpeed'] is not None:
item['windSpeed']['value'] = round(float(forecast['windSpeed']) * 0.28, 2)
else:
del item['windSpeed']
result.append(item)
return result
def reply_status():
logger.info('Orion: %s', orion)
logger.info('FIWARE Service: %s', service)
logger.info('FIWARE Service-Path: %s', path)
logger.info('Timeout: %s', str(timeout))
logger.info('Stations: %s', str(len(stations)))
logger.info('Latest: %s', str(latest))
logger.info('Limit_source: %s', str(limit_source))
logger.info('limit_target: %s', str(limit_target))
logger.info('Log level: %s', args.log_level)
logger.info('Started')
def sanitize(str_in):
return sub(r"[<(>)\"\'=;-]", "", str_in)
def setup_logger():
local_logger = logging.getLogger('root')
local_logger.setLevel(log_level_to_int(args.log_level))
handler = logging.StreamHandler(stdout)
handler.setLevel(log_level_to_int(args.log_level))
formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s', datefmt='%Y-%m-%dT%H:%M:%SZ')
handler.setFormatter(formatter)
local_logger.addHandler(handler)
local_logger_req = logging.getLogger('requests')
local_logger_req.setLevel(logging.WARNING)
return local_logger, local_logger_req
def setup_stations(stations_limit):
result = dict()
limit_on = False
limit_off = False
resp = None
if 'include' in stations_limit:
limit_on = True
if 'exclude' in stations_limit:
limit_off = True
try:
resp = get(url_stations)
except exceptions.ConnectionError:
exit(1)
if resp.status_code not in http_ok:
logger.error('Collecting the list of stations from IPMA failed due to the return code %s', resp.status_code)
exit(1)
content = loads(resp.text)
for station in content:
station_code = str(station['globalIdLocal'])
if limit_on:
if station_code not in stations_limit['include']:
continue
if limit_off:
if station_code in stations_limit['exclude']:
continue
result[station_code] = dict()
result[station_code]['postalCode'] = station_code
result[station_code]['addressLocality'] = sanitize(station['local'])
result[station_code]['url'] = url_observation.format(station_code)
if station_code in tz_azot_codes:
result[station_code]['timezone'] = tz_azot
else:
result[station_code]['timezone'] = tz_wet
if limit_on:
if len(result) != len(stations_limit['include']):
logger.error('Errors in the list of stations detected')
exit(1)
return result
def setup_stations_config(f):
local_stations = dict()
if f:
try:
with open(f, 'r', encoding='utf8') as f:
content = f.read()
config = sub(r'-.*\n?', setup_config_re, content)
f.close()
source = load(config)
if 'exclude' in source and 'include' in source:
logging.error('Config file is empty or wrong')
exit(1)
if 'exclude' in source:
local_stations['exclude'] = list()
for item in source['exclude']:
local_stations['exclude'].append(item)
if 'include' in source:
local_stations['include'] = list()
for item in source['include']:
local_stations['include'].append(item)
except TypeError:
logging.error('Config file is empty or wrong')
exit(1)
except FileNotFoundError:
logging.error('Config file not found')
exit(1)
return local_stations
def setup_config_re(station):
fix = sub('-', '', station.group()).strip()
return "- '{}'\n".format(fix)
if __name__ == '__main__':
parser = ArgumentParser()
parser.add_argument('--config',
dest='config',
help='YAML file with list of stations to be collected or excluded from collecting')
parser.add_argument('--latest',
action='store_true',
default=default_latest,
dest='latest',
help='Collect only latest forecast')
parser.add_argument('--limit-entities',
default=default_limit_entities,
dest='limit_entities',
help='Limit amount of entities per 1 request to orion')
parser.add_argument('--limit-source',
default=default_limit_source,
dest='limit_source',
help='Limit amount of parallel requests to IPMA')
parser.add_argument('--limit-target',
default=default_limit_target,
dest='limit_target',
help='Limit amount of parallel requests to Orion')
parser.add_argument('--log-level',
default=default_log_level,
dest='log_level',
help='Set the logging output level. {0}'.format(log_levels),
nargs='?')
parser.add_argument('--orion',
action='store',
default=default_orion,
dest='orion',
help='Orion Context Broker endpoint')
parser.add_argument('--path',
action='store',
dest='path',
help='FIWARE Service Path')
parser.add_argument('--service',
action='store',
dest="service",
help='FIWARE Service')
parser.add_argument('--timeout',
action='store',
default=default_timeout,
dest='timeout',
help='Run as a service')
args = parser.parse_args()
latest = args.latest
limit_entities = int(args.limit_entities)
limit_source = int(args.limit_source)
limit_target = int(args.limit_target)
orion = args.orion
timeout = int(args.timeout)
if 'path' in args:
path = args.path
if 'service' in args:
service = args.service
logger, logger_req = setup_logger()
set_event_loop_policy(EventLoopPolicy())
res = setup_stations_config(args.config)
stations = setup_stations(res)
reply_status()
while True:
res = run(collect())
if res:
res = run(prepare_schema(res))
run(post(res))
if timeout == -1:
break
else:
logger.debug('Sleeping for the %s seconds', timeout)
sleep(timeout)
logger.info('Ended')
exit(0)<|fim▁end|> | |
<|file_name|>safe_test.cc<|end_file_name|><|fim▁begin|>// Copyright (c) 2016 Irfan Adilovic
// This software is licensed under the terms of the MIT license.
// See LICENSE for the text of the license.
#include <iostream>
#include <iomanip>
#include <limits>
#include <stdint.h>
#include <cstdio>
#include <typeinfo>
#include <inttypes.h>
using std::cout;
using std::cerr;
using std::endl;
using std::bad_cast;
#undef assert // for testability
//#define assert(e) ((e) ? (void)0 : ((void)printf ("%s:%u: failed assertion `%s'\n", __FILE__, __LINE__, #e)))
#define assert(e) ((e) ? (void)0 : ((void)printf ("failed assertion `%s'\n", #e)))
#define assert_op(e1, op, e2) ((e1) op (e2) ? (void)0 : ((void)(cout << "failed assertion `" << #e1 << " " #op " " << #e2 << ": [" << (e1) << "] isn't " #op " [" << (e2) << "]\n")))
#define assert_eq(e1, e2) assert_op(e1, ==, e2)
#define assert_ne(e1, e2) assert_op(e1, !=, e2)
#include "safe_ops.h"
using namespace safe_ops;
#define MAX(T) numeric_limits_compat<T>::max()
#define MIN(T) numeric_limits_compat<T>::lowest()
#ifdef SAFE_USE_INT128
std::ostream& operator<<(std::ostream& os, uint128_t t) {
os << std::hex << "0x" << std::setw(16) <<std::setfill('0') << (uint64_t)(t>>64);
return os << std::setw(16) << std::setfill('0') << (uint64_t)(t) << std::dec;
}
std::ostream& operator<<(std::ostream& os, int128_t t) {
return os << (uint128_t)t;
}
#endif
#if __cplusplus < 201103L
struct Lambda {
void operator ()(int result) { cout << "lambda: " << (result < 0 ? "under" : "over") << "flow detected\n"; };
} lambda;
#endif
const char *progname = "";
struct FakeLogger {
void log(const char *level, const char *str) { printf("[log]\t%s\t%s\t%s\n", progname, level, str); }
};
int main(int, char **argv) {
progname = argv[0];
printf(" If a 'safe' test were to fail, the assertion text would contain 'safe(' and would be easy to identify. E.g.:\n");
assert(safe(1) > 2);
// precision tests
safe_ops::intmax_t intmax = MAX(safe_ops::intmax_t);
assert(safe(intmax-1) < safe((safe_ops::uintmax_t)(intmax))); // promotion to float/double due to signed/unsigned comparison
assert(safe(intmax-2)+1 < safe(intmax)); // promotion to float/double due to addition
#define assert_safe_eq(s, result) assert_eq((s).value(), result)
#define safe_arith_assert_impl(x, arith_op, y, z, nosafe_eq_op) \
assert_safe_eq(safe(x) arith_op y, z); \
assert_safe_eq(x arith_op safe(y), z); \
assert_op(x arith_op (y), nosafe_eq_op, (z))
#define safe_arith_assert(x, op, y, z, nosafe_eq_op) \
safe_arith_assert_impl(x, op, y, z, nosafe_eq_op)
#define safe_arith_assert_comm(x, op, y, z, nosafe_eq_op) \
safe_arith_assert_impl(x, op, y, z, nosafe_eq_op); \
safe_arith_assert_impl(y, op, x, z, nosafe_eq_op)
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Wtype-limits"
#define gen_safe_arith_tests_nomod(T1, T2, AddMultT, SubT, DivModT) \
safe_arith_assert_comm(MAX(T1), +, MAX(T2), (AddMultT)(MAX(T1)) + MAX(T2), !=); \
safe_arith_assert_comm(MIN(T1), +, MIN(T2), (AddMultT)(MIN(T1)) + MIN(T2), !=); \
safe_arith_assert( MIN(T1), -, MAX(T2), (SubT) (MIN(T1)) - MAX(T2), !=); \
safe_arith_assert( MAX(T1), -, MIN(T2), (SubT) (MAX(T1)) - MIN(T2), !=); \
safe_arith_assert_comm(MAX(T1), *, MAX(T2), (AddMultT)(MAX(T1)) * MAX(T2), !=); \
safe_arith_assert_comm(MIN(T1), *, MIN(T2), (AddMultT)(MIN(T1)) * MIN(T2), !=); \
safe_arith_assert( MIN(T1), /, (T2)1, (DivModT) (MIN(T1)) / (T2)1 , ==); \
safe_arith_assert( MAX(T1), /, (T2)-1, (DivModT) (MAX(T1)) / (T2)-1 , ==)
#define gen_safe_arith_tests(T1, T2, AddMultT, SubT, DivModT) \
gen_safe_arith_tests_nomod(T1, T2, AddMultT, SubT, DivModT); \
safe_arith_assert( MIN(T1), %, (T2)1, (DivModT) (MIN(T1)) % (T2)1 , ==); \
safe_arith_assert( MAX(T1), %, (T2)-1, (DivModT) (MAX(T1)) % (T2)-1 , ==)
// there will be cases where T2 is unsigned and div/mod with (unsigned)-1 is tested for.
// any failures there shall be ignored.
#define gen_safe_arith_tests_float2(T1, T2, AddMultT, SubT, DivModT) \
gen_safe_arith_tests_nomod(T1, T2, AddMultT, SubT, DivModT); \
safe_arith_assert( MIN(T1), /, (T2)1e-30, (DivModT) (MIN(T1)) / (T2)1e-30 , ==); \
safe_arith_assert( MAX(T1), /, (T2)-1e-30, (DivModT) (MAX(T1)) / (T2)-1e-30 , ==)
// the arithmetic tests generator states basically:
// 1) + and * shall be tested commutatively
// 2) +, * and - shall always fail to produce a mathematically correct result without safe_t
// 3) / and % shall always produce the same result as safe_t, which is assumed to be mathematically correct
// to fulfill 2), appropriate MIN/MAX values are always chosen to overflow non-safe_t calculations.
printf(" safe_arith test int/int: no asserts expected\n");
gen_safe_arith_tests(int, int, long, long, int);
printf(" safe_arith test unsigned/unsigned: 5 asserts expected, unsigned lowest()\n");
gen_safe_arith_tests(unsigned, unsigned, unsigned long, long, unsigned);
printf(" safe_arith test int/unsigned: 4 asserts expected, unsigned lowest() and negative division\n");
gen_safe_arith_tests(int, unsigned, long, long, long);
printf(" safe_arith test unsigned/int: 3 asserts expected, unsigned lowest() and negative division\n");
gen_safe_arith_tests(unsigned, int, long, long, long);
printf(" safe_arith test int/long: no asserts expected\n");
gen_safe_arith_tests(int, long, float, float, long);
printf(" safe_arith test long/int: no asserts expected\n");
gen_safe_arith_tests(long, int, float, float, long);
printf(" safe_arith test float/uint64_t: 8 asserts expected, int too small to make a difference and 0-multiplication\n");
gen_safe_arith_tests_nomod(float, uint64_t, double, double, float);
printf(" safe_arith test uint64_t/float: 9 asserts expected, int too small to make a difference, 0-multiplication and non-zero division by small number\n");
gen_safe_arith_tests_float2(uint64_t, float, double, double, double);
printf(" safe_arith test float/float: 2 asserts expected, non-zero division by small number\n");
gen_safe_arith_tests_float2(float, float, double, double, double);
printf(" safe_arith test double/double: 2 asserts expected, non-zero division by small number\n");
gen_safe_arith_tests_float2(double, double, long double, long double, long double);
#ifdef SAFE_USE_INT128
printf(" safe_arith test float/int128_t: no asserts expected\n");
gen_safe_arith_tests_nomod(float, int128_t, double, double, float); // float/int128 fits in float
printf(" safe_arith test int128_t/float: 2 asserts expected, non-zero division by small number\n");
gen_safe_arith_tests_float2(int128_t, float, double, double, double);
printf(" safe_arith test float/uint128_t: 6 asserts expected, unsigned lowest ops and unsigned -1 division\n");
gen_safe_arith_tests_nomod(float, uint128_t, double, double, double); // float/uint128 fits in double
printf(" safe_arith test uint128_t/float: 7 asserts expected, unsigned lowest ops and divisions\n");
gen_safe_arith_tests_float2(uint128_t, float, double, double, double);
#endif
#pragma GCC diagnostic pop
// ad-hoc cross-functional test: safe_arith + policy_throw + safe-safe operator
try {
assert(safe(1) + safe(1) == safe(2)); // test both cmp and arith safe-safe operators
(int)(safe(MAX(int)).pthrow() + safe(1).passert());
} catch (...) {
cout << "ad-hoc test 'policy vs operator+': caught expected bad_cast: MAX(int) + 1 is no longer an int\n";
}
// cout << "# Testing " << x << ' ' << #op << ' ' << y << endl;
#define safe_cmp_assert_impl(x, op, y) \
assert(x op y); \
assert(safe(x) op y); \
assert(x op safe(y));
#define safe_cmp_assert(x, op, y, rev_op) \
safe_cmp_assert_impl(x, op, y) \
safe_cmp_assert_impl(y, rev_op, x)
#define safe_cmp_assert2(x, op, op2, y, rev_op, rev_op2) \
safe_cmp_assert(x, op, y, rev_op) \
safe_cmp_assert(x, op2, y, rev_op2)
cerr << "sizeof(int): " << sizeof(int) << endl;
cerr << "sizeof(long): " << sizeof(long) << endl;
cerr << std::boolalpha;
#define cerr_trait(type, trait) \
cerr << "std::" #trait "<" #type ">::value : " << std::trait<type>::value << endl
#define cerr_traits(type) \
cerr << "sizeof(" #type "): " << sizeof(type) << endl; \
cerr_trait(type, is_signed); \
cerr_trait(type, is_unsigned)
// cerr_traits(long long);
// cerr_traits(unsigned long long);
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Wsign-compare"<|fim▁hole|>printf(" expecting 0u >/>= -1 to fail, but safe variants to succeed...\n");
safe_cmp_assert2(0u, >, >=, -1, <, <=);
printf(" expecting 1ul >/>= -1 to fail, but safe variants to succeed...\n");
safe_cmp_assert2(1ul, >, >=, -1, <, <=);
printf(" expecting 4294967296ul >/>= -1 to fail, but safe variants to succeed...\n");
safe_cmp_assert2(4294967296ul, >, >=, -1, <, <=);
printf(" expecting 1ul >/>= -1l to fail, but safe variants to succeed...\n");
safe_cmp_assert2(1ul, >, >=, -1l, <, <=);
printf(" expecting 2147483648u >/!= -2147483648 to fail, but safe variants to succeed...\n");
safe_cmp_assert2(2147483648u, >, !=, (-2147483647-1), <, !=);
//NB: http://stackoverflow.com/questions/35130890/understanding-231-and-231-integer-promotion
printf(" expecting 4294967295u >/!= -1 to fail, but safe variants to succeed...\n");
safe_cmp_assert2(4294967295u, >, !=, -1, <, !=);
#pragma GCC diagnostic pop
printf(" no failures expected in any of the floating point comparisons (neither native nor 'safe')...\n");
safe_cmp_assert2(MAX(float), <, <=, MAX(double), >, >=);
safe_cmp_assert2(MAX(double), <, <=, MAX(long double), >, >=);
#ifdef SAFE_USE_INT128
safe_cmp_assert2(MAX(int128_t), <, <=, MAX(float), >, >=);
// special casing:
safe_cmp_assert2(MAX(uint128_t), >, >=, MAX(float), <, <=);
// conversion to float yields inf so it works mathematically correct
safe_cmp_assert2(MAX(uint128_t), <, <=, MAX(double), >, >=);
safe_cmp_assert2(MAX(uint128_t), <, <=, MAX(long double), >, >=);
#endif
printf("safe_cmp tests passed\n");
int i;
int result = 0;
const long lmax = MAX(long);
const long lmin = MIN(long);
printf(" safe_cast_assert: expecting two asserts...\n");
i = safe(lmax).passert(); // usage through safe generator + .policy modifier
i = safe_t<long, policy_assert>(lmin); // usage through direct safe_t instantiation
i = safe_cast_assert<int>(0l); // usage through safe_cast_* helpers
printf(" safe_cast_result: expecting no asserts...\n");
result = 0;
i = safe(lmax).presult(&result);
assert(result == 1);
result = 0;
i = safe_t<long, policy_result, int*>(lmin, &result);
assert(result == -1);
result = 0;
safe_cast_result<int>(0l, &result);
assert(result == 0); // actually: unmodified
printf(" safe_cast_lambda: expecting two 'lambda: ...' messages...\n");
#if __cplusplus >= 201103L
auto lambda = [](int result){ cout << "lambda: " << (result < 0 ? "under" : "over") << "flow detected\n"; };
i = safe(lmax).pexec(lambda);
i = safe_t<long, policy_exec, decltype(lambda)>(lmin, lambda);
#else
// look before int main(), in global scope, there is a conditional definition of lambda old-style
i = safe(lmax).pexec(lambda);
i = safe_t<long, policy_exec, Lambda>(lmin, lambda);
#endif
safe_cast_exec<int>(0l, lambda);
printf(" safe_cast_log: expecting two log entries...\n");
FakeLogger logger;
i = safe(lmax).plog(&logger);
i = safe_t<long, policy_log, FakeLogger*>(lmin, &logger);
safe_cast_log<int>(0l, &logger);
printf(" safe_cast_throw: expecting two bad_casts...\n");
try {
i = safe(lmax).pthrow();
assert("unreachable after throw" == NULL);
} catch (bad_cast &) {
printf ("bad_cast caught due to overflow\n");
}
try {
i = safe_t<long, policy_throw>(lmin);
assert("unreachable after throw" == NULL);
} catch (bad_cast &) {
printf ("bad_cast caught due to underflow\n");
}
safe_cast_throw<int>(0l);
(void)i; // silence compiler warnings
printf("non-truncating safe_cast functional tests passed\n");
printf("truncating full-coverage tests following:\n");
#define generic_expect(T1, T2, SmallerPositive, SmallerNegative) \
assert_eq((T1)safe((T2)(0)), 0); \
assert_eq((T1)safe(MAX(T2)), (T1)MAX(SmallerPositive)); \
assert_eq((T1)safe(MIN(T2)), (T1)MIN(SmallerNegative)); \
assert_eq((T1)MAX(T2), (T1)MAX(SmallerPositive)); \
assert_eq((T1)MIN(T2), (T1)MIN(SmallerNegative))
// the latter two assertions will obviously produce expected failures
#define expect_smaller_larger(Smaller, Larger) generic_expect(Smaller, Larger, Smaller, Smaller)
#define expect_larger_smaller(Larger, Smaller) generic_expect(Larger, Smaller, Smaller, Smaller)
#define expect_lower_higher(Lower, Higher) generic_expect(Lower, Higher, Lower, Higher)
#define expect_higher_lower(Higher, Lower) generic_expect(Higher, Lower, Lower, Higher)
#define expect_smaller_larger2(Smaller, Larger) \
expect_smaller_larger(Smaller, Larger); \
expect_larger_smaller(Larger, Smaller)
#define expect_lower_higher2(Lower, Higher) \
expect_lower_higher(Lower, Higher); \
expect_higher_lower(Higher, Lower)
expect_smaller_larger2(float, long double);
expect_smaller_larger2(int, double);
expect_smaller_larger2(unsigned, double);
printf("floating point tests passed\n");
/// naive size comparison is "wrong"
expect_smaller_larger2(uint64_t, float);
expect_smaller_larger2(int64_t, float);
/// naive size comparison is 'equal'
expect_smaller_larger2(uint32_t, float);
expect_smaller_larger2(int32_t, float);
printf("naive sizeof tests passed (float <-> int64/32_t)\n");
/// ints greater than float
#ifdef SAFE_USE_INT128
expect_lower_higher2(float, uint128_t);
expect_smaller_larger2(int128_t, float);
/// int128_t is less than float, but test special-case handling regardless
printf("extreme sizeof tests passed (float <-> safe_[u]int128_t)\n");
#endif
/// integers
expect_smaller_larger(int, int);
expect_smaller_larger(unsigned, unsigned);
expect_lower_higher2(int, unsigned);
printf("same size integral tests passed\n");
expect_smaller_larger2(int32_t, int64_t);
expect_smaller_larger2(uint32_t, uint64_t);
expect_smaller_larger2(uint32_t, int64_t);
expect_lower_higher2(int32_t, uint64_t);
printf("different size integral tests passed\n");
return 0;
}<|fim▁end|> | printf(" expecting 1u >/>= int8_t(-1) to fail, but safe variants to succeed...\n");
safe_cmp_assert2(1u, >, >=, int8_t(-1), <, <=);
|
<|file_name|>index.ts<|end_file_name|><|fim▁begin|><|fim▁hole|>export * from "./msg-repo";
export * from "./imsg-repo";
export * from "./msg-repo-mock";<|fim▁end|> | export * from "./msg"; |
<|file_name|>lxde.py<|end_file_name|><|fim▁begin|>#
# xfce.py
#
# Copyright (C) 2010 Fabio Erculiani
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from installclass import BaseInstallClass
from constants import *
from product import *
from flags import flags
import os, types
import iutil
import gettext
_ = lambda x: gettext.ldgettext("anaconda", x)
import installmethod
from sabayon import Entropy
from sabayon.livecd import LiveCDCopyBackend
class InstallClass(BaseInstallClass):
id = "sabayon_lxde"
name = N_("Kogaion LXDE")
_pixmap_dirs = os.getenv("PIXMAPPATH", "/usr/share/pixmaps").split(":")
for _pix_dir in _pixmap_dirs:
_pix_path = os.path.join(_pix_dir, "lxde.pg")
if os.path.isfile(_pix_path):
pixmap = _pix_path
dmrc = "LXDE"
if Entropy().is_sabayon_steambox():
dmrc = "steambox"
_description = N_("Select this installation type for a default installation "
"with the LXDE desktop environment. "<|fim▁hole|>
if not Entropy().is_installed("lxde-base/lxde-common"):
hidden = 1
def configure(self, anaconda):
BaseInstallClass.configure(self, anaconda)
BaseInstallClass.setDefaultPartitioning(self,
anaconda.storage, anaconda.platform)
def setSteps(self, anaconda):
BaseInstallClass.setSteps(self, anaconda)
anaconda.dispatch.skipStep("welcome", skip = 1)
#anaconda.dispatch.skipStep("network", skip = 1)
def getBackend(self):
return LiveCDCopyBackend
def productMatches(self, oldprod):
if oldprod is None:
return False
if oldprod.startswith(productName):
return True
return False
def versionMatches(self, oldver):
try:
oldVer = float(oldver)
newVer = float(productVersion)
except ValueError:
return True
return newVer >= oldVer
def __init__(self):
BaseInstallClass.__init__(self)<|fim▁end|> | "A small lightweight and functional working environment at your service.")
_descriptionFields = (productName,)
sortPriority = 10000 |
<|file_name|>parse.rs<|end_file_name|><|fim▁begin|>#[macro_use]
extern crate criterion;
extern crate rand;
extern crate rs_poker;
use criterion::Criterion;
use rs_poker::holdem::RangeParser;
fn parse_ako(c: &mut Criterion) {
c.bench_function("Parse AKo", |b| {
b.iter(|| RangeParser::parse_one("AKo"));
});
}
fn parse_pairs(c: &mut Criterion) {
c.bench_function("Parse pairs (22+)", |b| {
b.iter(|| RangeParser::parse_one("22+"));<|fim▁hole|>}
fn parse_connectors(c: &mut Criterion) {
c.bench_function("Parse connectors (32+)", |b| {
b.iter(|| RangeParser::parse_one("32+"));
});
}
fn parse_plus(c: &mut Criterion) {
c.bench_function("Parse plus (A2+)", |b| {
b.iter(|| RangeParser::parse_one("A2+"));
});
}
criterion_group!(
benches,
parse_ako,
parse_pairs,
parse_connectors,
parse_plus
);
criterion_main!(benches);<|fim▁end|> | }); |
<|file_name|>permissions_test.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env vpython
# Copyright 2020 The LUCI Authors. All rights reserved.
# Use of this source code is governed under the Apache License, Version 2.0
# that can be found in the LICENSE file.
import logging
import sys
import unittest
import test_env
test_env.setup_test_env()
from proto import realms_config_pb2
from realms import permissions
from test_support import test_case
class BuilderTest(test_case.TestCase):
def setUp(self):
super(BuilderTest, self).setUp()
self.builder = permissions.Builder('rev')
self.permission = self.builder.permission
self.include = self.builder.include
self.role = self.builder.role
def check(self, perms=None, roles=None):
db = self.builder.finish()
self.assertEquals(db.revision, 'rev')
if perms is not None:
self.assertEquals(sorted(db.permissions), perms)
if roles is not None:
self.assertEquals(
db.roles,
{n: permissions.Role(n, perms) for n, perms in roles.items()})
def test_empty(self):
self.check([], {})
def test_permissions_only(self):
self.permission('luci.dev.p1')
self.permission('luci.dev.p2')
self.permission('luci.dev.p1') # redeclaration is ok
self.check(perms=['luci.dev.p1', 'luci.dev.p2'])
def test_bad_permission_name(self):
with self.assertRaises(ValueError):
self.permission('luci.dev')
with self.assertRaises(ValueError):
self.permission('luci.dev.something.something')
def test_simple_role(self):
self.role('role/dev.a', [
self.permission('luci.dev.p1'),
self.permission('luci.dev.p2'),
])
self.check(
perms=['luci.dev.p1', 'luci.dev.p2'],
roles={'role/dev.a': ('luci.dev.p1', 'luci.dev.p2')})
def test_complex_role(self):
self.role('role/dev.a', [
self.permission('luci.dev.p1'),
self.permission('luci.dev.p2'),
])
self.role('role/dev.b', [
self.permission('luci.dev.p2'),
self.permission('luci.dev.p3'),
self.include('role/dev.a'),
])<|fim▁hole|> perms=['luci.dev.p1', 'luci.dev.p2', 'luci.dev.p3'],
roles={
'role/dev.a': ('luci.dev.p1', 'luci.dev.p2'),
'role/dev.b': ('luci.dev.p1', 'luci.dev.p2', 'luci.dev.p3'),
})
def test_role_redeclaration(self):
self.role('role/dev.a', [])
with self.assertRaises(ValueError):
self.role('role/dev.a', [])
def test_bad_role_name(self):
with self.assertRaises(ValueError):
self.role('zzz/role', [])
def test_referencing_undeclared_role(self):
with self.assertRaises(ValueError):
self.include('role/zzz')
def test_non_idempotent_perm(self):
self.permission('luci.dev.p1')
self.permission('luci.dev.p1')
with self.assertRaises(ValueError):
self.permission('luci.dev.p1', internal=True)
class HardcodedDBTest(test_case.TestCase):
def test_can_be_built(self):
db = permissions.db()
for b in db.implicit_root_bindings('proj'):
self.assertIsInstance(b, realms_config_pb2.Binding)
if __name__ == '__main__':
if '-v' in sys.argv:
unittest.TestCase.maxDiff = None
logging.basicConfig(level=logging.DEBUG)
else:
logging.basicConfig(level=logging.FATAL)
unittest.main()<|fim▁end|> | self.check( |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.